diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..4e86a1d05e648d0adf1095c7ec0a53273aa4ff2f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +model/sentence-transformer/unigram.json filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md index e875eb33f11c20bb976082ca1649d55ba96bc33f..1a3a7f560989931b7971cc5b76658ac01d969702 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ --- title: LlamaIndexRAG -emoji: 🚀 -colorFrom: blue -colorTo: blue +emoji: 🔥 +colorFrom: red +colorTo: green sdk: streamlit sdk_version: 1.41.1 app_file: app.py diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..83190403ae03119d8943063979c66a45f07bfcaa --- /dev/null +++ b/app.py @@ -0,0 +1,83 @@ +import streamlit as st +from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings +from llama_index.embeddings.huggingface import HuggingFaceEmbedding +from llama_index.legacy.callbacks import CallbackManager +from llama_index.llms.openai_like import OpenAILike + +# Create an instance of CallbackManager +callback_manager = CallbackManager() + +api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/" +model = "internlm2.5-latest" +api_key = "eyJ0eXBlIjoiSldUIiwiYWxnIjoiSFM1MTIifQ.eyJqdGkiOiIxNzAwMzA3OCIsInJvbCI6IlJPTEVfUkVHSVNURVIiLCJpc3MiOiJPcGVuWExhYiIsImlhdCI6MTczMzc1OTE4OCwiY2xpZW50SWQiOiJlYm1ydm9kNnlvMG5semFlazF5cCIsInBob25lIjoiMTg0MDY1MDk1NTgiLCJ1dWlkIjoiNzJhNWJhZmEtYjI3MC00NTVmLWJlYTgtYzViYmNiNDM3YWYxIiwiZW1haWwiOiIiLCJleHAiOjE3NDkzMTExODh9.D1Z-XG0-ZY7ZsSRAGq6V6hrzV8Pk9EgDHthSZZYCcK30-yiHPdRuB4kR4_96azWKbEO_G_nvhrNmKlHHCVAV1w" + +# api_base_url = "https://api.siliconflow.cn/v1" +# model = "internlm/internlm2_5-7b-chat" +# api_key = "请填写 API Key" + +llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager) + + + +st.set_page_config(page_title="llama_index_demo", page_icon="🦜🔗") +st.title("llama_index_demo") + +# 初始化模型 +@st.cache_resource +def init_models(): + embed_model = HuggingFaceEmbedding( + model_name="model/sentence-transformer" + ) + Settings.embed_model = embed_model + + #用初始化llm + Settings.llm = llm + + documents = SimpleDirectoryReader("data").load_data() + index = VectorStoreIndex.from_documents(documents) + query_engine = index.as_query_engine() + + return query_engine + +# 检查是否需要初始化模型 +if 'query_engine' not in st.session_state: + st.session_state['query_engine'] = init_models() + +def greet2(question): + response = st.session_state['query_engine'].query(question) + return response + + +# Store LLM generated responses +if "messages" not in st.session_state.keys(): + st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}] + + # Display or clear chat messages +for message in st.session_state.messages: + with st.chat_message(message["role"]): + st.write(message["content"]) + +def clear_chat_history(): + st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}] + +st.sidebar.button('Clear Chat History', on_click=clear_chat_history) + +# Function for generating LLaMA2 response +def generate_llama_index_response(prompt_input): + return greet2(prompt_input) + +# User-provided prompt +if prompt := st.chat_input(): + st.session_state.messages.append({"role": "user", "content": prompt}) + with st.chat_message("user"): + st.write(prompt) + +# Gegenerate_llama_index_response last message is not from assistant +if st.session_state.messages[-1]["role"] != "assistant": + with st.chat_message("assistant"): + with st.spinner("Thinking..."): + response = generate_llama_index_response(prompt) + placeholder = st.empty() + placeholder.markdown(response) + message = {"role": "assistant", "content": response} + st.session_state.messages.append(message) diff --git a/data/README_zh-CN.md b/data/README_zh-CN.md new file mode 100644 index 0000000000000000000000000000000000000000..f4f0b4b48d20bde7d916f534194671be09ca7d30 --- /dev/null +++ b/data/README_zh-CN.md @@ -0,0 +1,304 @@ +
+ +

+ +[![GitHub Repo stars](https://img.shields.io/github/stars/InternLM/xtuner?style=social)](https://github.com/InternLM/xtuner/stargazers) +[![license](https://img.shields.io/github/license/InternLM/xtuner.svg)](https://github.com/InternLM/xtuner/blob/main/LICENSE) +[![PyPI](https://img.shields.io/pypi/v/xtuner)](https://pypi.org/project/xtuner/) +[![Downloads](https://static.pepy.tech/badge/xtuner)](https://pypi.org/project/xtuner/) +[![issue resolution](https://img.shields.io/github/issues-closed-raw/InternLM/xtuner)](https://github.com/InternLM/xtuner/issues) +[![open issues](https://img.shields.io/github/issues-raw/InternLM/xtuner)](https://github.com/InternLM/xtuner/issues) + +👋 加入我们:[![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=wechat&label=微信)](https://cdn.vansin.top/internlm/xtuner.jpg) +[![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=twitter&label=推特)](https://twitter.com/intern_lm) +[![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=discord&label=Discord)](https://discord.gg/xa29JuW87d) + +🔍 探索我们的模型: +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🤗%20Huggingface)](https://huggingface.co/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🤖%20ModelScope)](https://www.modelscope.cn/organization/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🧰%20OpenXLab)](https://openxlab.org.cn/usercenter/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🧠%20WiseModel)](https://www.wisemodel.cn/organization/xtuner) + +[English](README.md) | 简体中文 + +
+ +## 🚀 Speed Benchmark + +- XTuner 与 LLaMA-Factory 在 Llama2-7B 模型上的训练效率对比 + +
+ +
+ +- XTuner 与 LLaMA-Factory 在 Llama2-70B 模型上的训练效率对比 + +
+ +
+ +## 🎉 更新 +- **\[2024/07\]** 支持 [MiniCPM](xtuner/configs/minicpm/) 模型! +- **\[2024/07\]** 支持训练 [DPO](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/dpo), [ORPO](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/orpo) 还有 [Reward Model](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/reward_model) ! 并且能够支持打包数据以及序列并行功能! 请参考 [文档](https://xtuner.readthedocs.io/zh-cn/latest/dpo/overview.html) 了解更多信息。 +- **\[2024/07\]** 支持 [InternLM 2.5](xtuner/configs/internlm/internlm2_5_chat_7b/) 模型! +- **\[2024/06\]** 支持 [DeepSeek V2](xtuner/configs/deepseek/deepseek_v2_chat/) models! **训练速度提升一倍!** +- **\[2024/04\]** 多模态大模型 [LLaVA-Phi-3-mini](https://huggingface.co/xtuner/llava-phi-3-mini-hf) 发布!快速开始请查阅此[文档](xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336)! +- **\[2024/04\]** 多模态大模型 [LLaVA-Llama-3-8B](https://huggingface.co/xtuner/llava-llama-3-8b) 和 [LLaVA-Llama-3-8B-v1.1](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1) 发布!快速开始请查阅此[文档](xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336)! +- **\[2024/04\]** 支持 [Llama 3](xtuner/configs/llama) 模型! +- **\[2024/04\]** 支持序列并行训练策略以实现语言模型超长上下文训练!\[[文档](https://github.com/InternLM/xtuner/blob/docs/docs/zh_cn/acceleration/train_extreme_long_sequence.rst)\] \[[速度基准](https://github.com/InternLM/xtuner/blob/docs/docs/zh_cn/acceleration/benchmark.rst)\] +- **\[2024/02\]** 支持 [Gemma](xtuner/configs/gemma) 模型! +- **\[2024/02\]** 支持 [Qwen1.5](xtuner/configs/qwen/qwen1_5) 模型! +- **\[2024/01\]** 支持 [InternLM2](xtuner/configs/internlm) 模型!同时,最新版的多模态大模型 [LLaVA-Internlm2-7B](https://huggingface.co/xtuner/llava-internlm2-7b) / [20B](https://huggingface.co/xtuner/llava-internlm2-20b) 发布,其表现出强大的性能! +- **\[2024/01\]** 支持 [DeepSeek-MoE](https://huggingface.co/deepseek-ai/deepseek-moe-16b-chat) 模型!20GB 显存即可实现 QLoRA 微调,4x80GB 即可实现全参数微调。快速开始请查阅相关[配置文件](xtuner/configs/deepseek/)! +- **\[2023/12\]** 🔥 支持多模态模型 VLM([LLaVA-v1.5](https://github.com/haotian-liu/LLaVA))预训练和指令微调!快速开始请查阅此[文档](xtuner/configs/llava/README_zh-CN.md)! +- **\[2023/12\]** 🔥 支持 [Mixtral 8x7B](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) 模型!快速开始请查阅此[文档](xtuner/configs/mixtral/README.md)! +- **\[2023/11\]** 支持 [ChatGLM3-6B](xtuner/configs/chatglm) 模型! +- **\[2023/10\]** 支持 [MSAgent-Bench](https://modelscope.cn/datasets/damo/MSAgent-Bench) 数据集,并且微调所得大语言模型可应用至 [Lagent](https://github.com/InternLM/lagent) 框架! +- **\[2023/10\]** 优化数据处理逻辑以兼容 `system` 字段,相关细节请查阅[文档](docs/zh_cn/user_guides/dataset_format.md)! +- **\[2023/09\]** 支持 [InternLM-20B](xtuner/configs/internlm) 系列模型! +- **\[2023/09\]** 支持 [Baichuan2](xtuner/configs/baichuan) 系列模型! +- **\[2023/08\]** XTuner 正式发布!众多微调模型已上传至 [HuggingFace](https://huggingface.co/xtuner)! + +## 📖 介绍 + +XTuner 是一个高效、灵活、全能的轻量化大模型微调工具库。 + +**高效** + +- 支持大语言模型 LLM、多模态图文模型 VLM 的预训练及轻量级微调。XTuner 支持在 8GB 显存下微调 7B 模型,同时也支持多节点跨设备微调更大尺度模型(70B+)。 +- 自动分发高性能算子(如 FlashAttention、Triton kernels 等)以加速训练吞吐。 +- 兼容 [DeepSpeed](https://github.com/microsoft/DeepSpeed) 🚀,轻松应用各种 ZeRO 训练优化策略。 + +**灵活** + +- 支持多种大语言模型,包括但不限于 [InternLM](https://huggingface.co/internlm)、[Mixtral-8x7B](https://huggingface.co/mistralai)、[Llama 2](https://huggingface.co/meta-llama)、[ChatGLM](https://huggingface.co/THUDM)、[Qwen](https://huggingface.co/Qwen)、[Baichuan](https://huggingface.co/baichuan-inc)。 +- 支持多模态图文模型 LLaVA 的预训练与微调。利用 XTuner 训得模型 [LLaVA-InternLM2-20B](https://huggingface.co/xtuner/llava-internlm2-20b) 表现优异。 +- 精心设计的数据管道,兼容任意数据格式,开源数据或自定义数据皆可快速上手。 +- 支持 [QLoRA](http://arxiv.org/abs/2305.14314)、[LoRA](http://arxiv.org/abs/2106.09685)、全量参数微调等多种微调算法,支撑用户根据具体需求作出最优选择。 + +**全能** + +- 支持增量预训练、指令微调与 Agent 微调。 +- 预定义众多开源对话模版,支持与开源或训练所得模型进行对话。 +- 训练所得模型可无缝接入部署工具库 [LMDeploy](https://github.com/InternLM/lmdeploy)、大规模评测工具库 [OpenCompass](https://github.com/open-compass/opencompass) 及 [VLMEvalKit](https://github.com/open-compass/VLMEvalKit)。 + +## 🔥 支持列表 + + + + + + + + + + + + + + + + +
+ 模型 + + 数据集 + + 数据格式 + + 微调算法 +
+ + + + + + + +
+ +## 🛠️ 快速上手 + +### 安装 + +- 推荐使用 conda 先构建一个 Python-3.10 的虚拟环境 + + ```bash + conda create --name xtuner-env python=3.10 -y + conda activate xtuner-env + ``` + +- 通过 pip 安装 XTuner: + + ```shell + pip install -U xtuner + ``` + + 亦可集成 DeepSpeed 安装: + + ```shell + pip install -U 'xtuner[deepspeed]' + ``` + +- 从源码安装 XTuner: + + ```shell + git clone https://github.com/InternLM/xtuner.git + cd xtuner + pip install -e '.[all]' + ``` + +### 微调 + +XTuner 支持微调大语言模型。数据集预处理指南请查阅[文档](./docs/zh_cn/user_guides/dataset_prepare.md)。 + +- **步骤 0**,准备配置文件。XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + + ```shell + xtuner list-cfg + ``` + + 或者,如果所提供的配置文件不能满足使用需求,请导出所提供的配置文件并进行相应更改: + + ```shell + xtuner copy-cfg ${CONFIG_NAME} ${SAVE_PATH} + vi ${SAVE_PATH}/${CONFIG_NAME}_copy.py + ``` + +- **步骤 1**,开始微调。 + + ```shell + xtuner train ${CONFIG_NAME_OR_PATH} + ``` + + 例如,我们可以利用 QLoRA 算法在 oasst1 数据集上微调 InternLM2.5-Chat-7B: + + ```shell + # 单卡 + xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + # 多卡 + (DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + (SLURM) srun ${SRUN_ARGS} xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --launcher slurm --deepspeed deepspeed_zero2 + ``` + + - `--deepspeed` 表示使用 [DeepSpeed](https://github.com/microsoft/DeepSpeed) 🚀 来优化训练过程。XTuner 内置了多种策略,包括 ZeRO-1、ZeRO-2、ZeRO-3 等。如果用户期望关闭此功能,请直接移除此参数。 + + - 更多示例,请查阅[文档](./docs/zh_cn/user_guides/finetune.md)。 + +- **步骤 2**,将保存的 PTH 模型(如果使用的DeepSpeed,则将会是一个文件夹)转换为 HuggingFace 模型: + + ```shell + xtuner convert pth_to_hf ${CONFIG_NAME_OR_PATH} ${PTH} ${SAVE_PATH} + ``` + +### 对话 + +XTuner 提供与大语言模型对话的工具。 + +```shell +xtuner chat ${NAME_OR_PATH_TO_LLM} --adapter {NAME_OR_PATH_TO_ADAPTER} [optional arguments] +``` + +例如: + +与 InternLM2.5-Chat-7B 对话: + +```shell +xtuner chat internlm/internlm2-chat-7b --prompt-template internlm2_chat +``` + +更多示例,请查阅[文档](./docs/zh_cn/user_guides/chat.md)。 + +### 部署 + +- **步骤 0**,将 HuggingFace adapter 合并到大语言模型: + + ```shell + xtuner convert merge \ + ${NAME_OR_PATH_TO_LLM} \ + ${NAME_OR_PATH_TO_ADAPTER} \ + ${SAVE_PATH} \ + --max-shard-size 2GB + ``` + +- **步骤 1**,使用任意推理框架部署微调后的大语言模型,例如 [LMDeploy](https://github.com/InternLM/lmdeploy) 🚀: + + ```shell + pip install lmdeploy + python -m lmdeploy.pytorch.chat ${NAME_OR_PATH_TO_LLM} \ + --max_new_tokens 256 \ + --temperture 0.8 \ + --top_p 0.95 \ + --seed 0 + ``` + + 🔥 追求速度更快、显存占用更低的推理?欢迎体验 [LMDeploy](https://github.com/InternLM/lmdeploy) 提供的 4-bit 量化!使用指南请见[文档](https://github.com/InternLM/lmdeploy/tree/main#quantization)。 + +### 评测 + +- 推荐使用一站式平台 [OpenCompass](https://github.com/InternLM/opencompass) 来评测大语言模型,其目前已涵盖 50+ 数据集的约 30 万条题目。 + +## 🤝 贡献指南 + +我们感谢所有的贡献者为改进和提升 XTuner 所作出的努力。请参考[贡献指南](.github/CONTRIBUTING.md)来了解参与项目贡献的相关指引。 + +## 🎖️ 致谢 + +- [Llama 2](https://github.com/facebookresearch/llama) +- [DeepSpeed](https://github.com/microsoft/DeepSpeed) +- [QLoRA](https://github.com/artidoro/qlora) +- [LMDeploy](https://github.com/InternLM/lmdeploy) +- [LLaVA](https://github.com/haotian-liu/LLaVA) + +## 🖊️ 引用 + +```bibtex +@misc{2023xtuner, + title={XTuner: A Toolkit for Efficiently Fine-tuning LLM}, + author={XTuner Contributors}, + howpublished = {\url{https://github.com/InternLM/xtuner}}, + year={2023} +} +``` + +## 开源许可证 + +该项目采用 [Apache License 2.0 开源许可证](LICENSE)。同时,请遵守所使用的模型与数据集的许可证。 diff --git a/data/xtuner/.github/CONTRIBUTING.md b/data/xtuner/.github/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..09eab9a11f2729b5bdebf211cc77fa44c62c104f --- /dev/null +++ b/data/xtuner/.github/CONTRIBUTING.md @@ -0,0 +1,258 @@ +## Contributing to InternLM + +Welcome to the XTuner community! All kinds of contributions are welcomed, including but not limited to + +**Fix bug** + +You can directly post a Pull Request to fix typo in code or documents + +The steps to fix the bug of code implementation are as follows. + +1. If the modification involve significant changes, you should create an issue first and describe the error information and how to trigger the bug. Other developers will discuss with you and propose an proper solution. + +2. Posting a pull request after fixing the bug and adding corresponding unit test. + +**New Feature or Enhancement** + +1. If the modification involve significant changes, you should create an issue to discuss with our developers to propose an proper design. +2. Post a Pull Request after implementing the new feature or enhancement and add corresponding unit test. + +**Document** + +You can directly post a pull request to fix documents. If you want to add a document, you should first create an issue to check if it is reasonable. + +### Pull Request Workflow + +If you're not familiar with Pull Request, don't worry! The following guidance will tell you how to create a Pull Request step by step. If you want to dive into the develop mode of Pull Request, you can refer to the [official documents](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) + +#### 1. Fork and clone + +If you are posting a pull request for the first time, you should fork the XTuner repository by clicking the **Fork** button in the top right corner of the GitHub page, and the forked repository will appear under your GitHub profile. + + + +Then, you can clone the repositories to local: + +```shell +git clone git@github.com:{username}/xtuner.git +``` + +After that, you should add official repository as the upstream repository + +```bash +git remote add upstream git@github.com:InternLM/xtuner.git +``` + +Check whether remote repository has been added successfully by `git remote -v` + +```bash +origin git@github.com:{username}/xtuner.git (fetch) +origin git@github.com:{username}/xtuner.git (push) +upstream git@github.com:InternLM/xtuner.git (fetch) +upstream git@github.com:InternLM/xtuner.git (push) +``` + +> Here's a brief introduction to origin and upstream. When we use "git clone", we create an "origin" remote by default, which points to the repository cloned from. As for "upstream", we add it ourselves to point to the target repository. Of course, if you don't like the name "upstream", you could name it as you wish. Usually, we'll push the code to "origin". If the pushed code conflicts with the latest code in official("upstream"), we should pull the latest code from upstream to resolve the conflicts, and then push to "origin" again. The posted Pull Request will be updated automatically. + +#### 2. Configure pre-commit + +You should configure [pre-commit](https://pre-commit.com/#intro) in the local development environment to make sure the code style matches that of InternLM. **Note**: The following code should be executed under the XTuner directory. + +```shell +pip install -U pre-commit +pre-commit install +``` + +Check that pre-commit is configured successfully, and install the hooks defined in `.pre-commit-config.yaml`. + +```shell +pre-commit run --all-files +``` + + + + + +If the installation process is interrupted, you can repeatedly run `pre-commit run ... ` to continue the installation. + +If the code does not conform to the code style specification, pre-commit will raise a warning and fixes some of the errors automatically. + + + +If we want to commit our code bypassing the pre-commit hook, we can use the `--no-verify` option(**only for temporarily commit**). + +```shell +git commit -m "xxx" --no-verify +``` + +#### 3. Create a development branch + +After configuring the pre-commit, we should create a branch based on the master branch to develop the new feature or fix the bug. The proposed branch name is `username/pr_name` + +```shell +git checkout -b yhc/refactor_contributing_doc +``` + +In subsequent development, if the master branch of the local repository is behind the master branch of "upstream", we need to pull the upstream for synchronization, and then execute the above command: + +```shell +git pull upstream master +``` + +#### 4. Commit the code and pass the unit test + +- XTuner introduces mypy to do static type checking to increase the robustness of the code. Therefore, we need to add Type Hints to our code and pass the mypy check. If you are not familiar with Type Hints, you can refer to [this tutorial](https://docs.python.org/3/library/typing.html). + +- The committed code should pass through the unit test + + ```shell + # Pass all unit tests + pytest tests + + # Pass the unit test of runner + pytest tests/test_runner/test_runner.py + ``` + + If the unit test fails for lack of dependencies, you can install the dependencies referring to the [guidance](#unit-test) + +- If the documents are modified/added, we should check the rendering result referring to [guidance](#document-rendering) + +#### 5. Push the code to remote + +We could push the local commits to remote after passing through the check of unit test and pre-commit. You can associate the local branch with remote branch by adding `-u` option. + +```shell +git push -u origin {branch_name} +``` + +This will allow you to use the `git push` command to push code directly next time, without having to specify a branch or the remote repository. + +#### 6. Create a Pull Request + +(1) Create a pull request in GitHub's Pull request interface + + + +(2) Modify the PR description according to the guidelines so that other developers can better understand your changes + + + +Find more details about Pull Request description in [pull request guidelines](#pr-specs). + +**note** + +(a) The Pull Request description should contain the reason for the change, the content of the change, and the impact of the change, and be associated with the relevant Issue (see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue)) + +(b) If it is your first contribution, please sign the CLA + + + +(c) Check whether the Pull Request pass through the CI + + + +XTuner will run unit test for the posted Pull Request on different platforms (Linux, Window, Mac), based on different versions of Python, PyTorch, CUDA to make sure the code is correct. We can see the specific test information by clicking `Details` in the above image so that we can modify the code. + +(3) If the Pull Request passes the CI, then you can wait for the review from other developers. You'll modify the code based on the reviewer's comments, and repeat the steps [4](#4-commit-the-code-and-pass-the-unit-test)-[5](#5-push-the-code-to-remote) until all reviewers approve it. Then, we will merge it ASAP. + + + +#### 7. Resolve conflicts + +If your local branch conflicts with the latest master branch of "upstream", you'll need to resolove them. There are two ways to do this: + +```shell +git fetch --all --prune +git rebase upstream/master +``` + +or + +```shell +git fetch --all --prune +git merge upstream/master +``` + +If you are very good at handling conflicts, then you can use rebase to resolve conflicts, as this will keep your commit logs tidy. If you are not familiar with `rebase`, then you can use `merge` to resolve conflicts. + +### Guidance + +#### Unit test + +If you cannot run the unit test of some modules for lacking of some dependencies, such as [video](https://github.com/open-mmlab/mmcv/tree/master/mmcv/video) module, you can try to install the following dependencies: + +```shell +# Linux +sudo apt-get update -y +sudo apt-get install -y libturbojpeg +sudo apt-get install -y ffmpeg + +# Windows +conda install ffmpeg +``` + +We should also make sure the committed code will not decrease the coverage of unit test, we could run the following command to check the coverage of unit test: + +```shell +python -m coverage run -m pytest /path/to/test_file +python -m coverage html +# check file in htmlcov/index.html +``` + +#### Document rendering + +If the documents are modified/added, we should check the rendering result. We could install the dependencies and run the following command to render the documents and check the results: + +```shell +pip install -r requirements/docs.txt +cd docs/zh_cn/ +# or docs/en +make html +# check file in ./docs/zh_cn/_build/html/index.html +``` + +### Code style + +#### Python + +We adopt [PEP8](https://www.python.org/dev/peps/pep-0008/) as the preferred code style. + +We use the following tools for linting and formatting: + +- [flake8](https://github.com/PyCQA/flake8): A wrapper around some linter tools. +- [isort](https://github.com/timothycrosley/isort): A Python utility to sort imports. +- [yapf](https://github.com/google/yapf): A formatter for Python files. +- [codespell](https://github.com/codespell-project/codespell): A Python utility to fix common misspellings in text files. +- [mdformat](https://github.com/executablebooks/mdformat): Mdformat is an opinionated Markdown formatter that can be used to enforce a consistent style in Markdown files. +- [docformatter](https://github.com/myint/docformatter): A formatter to format docstring. + +Style configurations of yapf and isort can be found in [setup.cfg](../setup.cfg). + +We use [pre-commit hook](https://pre-commit.com/) that checks and formats for `flake8`, `yapf`, `isort`, `trailing whitespaces`, `markdown files`, +fixes `end-of-files`, `double-quoted-strings`, `python-encoding-pragma`, `mixed-line-ending`, sorts `requirments.txt` automatically on every commit. +The config for a pre-commit hook is stored in [.pre-commit-config](../.pre-commit-config.yaml). + +#### C++ and CUDA + +We follow the [Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html). + +### PR Specs + +1. Use [pre-commit](https://pre-commit.com) hook to avoid issues of code style + +2. One short-time branch should be matched with only one PR + +3. Accomplish a detailed change in one PR. Avoid large PR + + - Bad: Support Faster R-CNN + - Acceptable: Add a box head to Faster R-CNN + - Good: Add a parameter to box head to support custom conv-layer number + +4. Provide clear and significant commit message + +5. Provide clear and meaningful PR description + + - Task name should be clarified in title. The general format is: \[Prefix\] Short description of the PR (Suffix) + - Prefix: add new feature \[Feature\], fix bug \[Fix\], related to documents \[Docs\], in developing \[WIP\] (which will not be reviewed temporarily) + - Introduce main changes, results and influences on other modules in short description + - Associate related issues and pull requests with a milestone diff --git a/data/xtuner/.github/workflows/deploy.yml b/data/xtuner/.github/workflows/deploy.yml new file mode 100644 index 0000000000000000000000000000000000000000..b2c6f0bc208ca0f3d2aba1d4dc04d97fb51cacbd --- /dev/null +++ b/data/xtuner/.github/workflows/deploy.yml @@ -0,0 +1,26 @@ +name: deploy + +on: push + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build-n-publish: + runs-on: ubuntu-latest + if: startsWith(github.event.ref, 'refs/tags') + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Build XTuner + run: | + pip install wheel + python setup.py sdist bdist_wheel + - name: Publish distribution to PyPI + run: | + pip install twine + twine upload dist/* -u __token__ -p ${{ secrets.pypi_password }} diff --git a/data/xtuner/.github/workflows/lint.yml b/data/xtuner/.github/workflows/lint.yml new file mode 100644 index 0000000000000000000000000000000000000000..74a733eb81e8e3e3b7c6ca1c08de8856d6cfb81e --- /dev/null +++ b/data/xtuner/.github/workflows/lint.yml @@ -0,0 +1,23 @@ +name: lint + +on: [push, pull_request] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install pre-commit hook + run: | + pip install pre-commit + pre-commit install + - name: Linting + run: pre-commit run --all-files diff --git a/data/xtuner/.gitignore b/data/xtuner/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..ffe3444b8cdb2ec3e6791d047d0593fcf9d20d41 --- /dev/null +++ b/data/xtuner/.gitignore @@ -0,0 +1,124 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/*/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# custom +data/ +data +.vscode +.idea +.DS_Store +*.pkl +*.pkl.json +*.log.json +work_dirs/ + +# Pytorch +*.pth +*.py~ +*.sh~ + +# srun +*.out +batchscript-* diff --git a/data/xtuner/.owners.yml b/data/xtuner/.owners.yml new file mode 100644 index 0000000000000000000000000000000000000000..996ae4c69c03821b2b79a1b7a4233988cf0623ee --- /dev/null +++ b/data/xtuner/.owners.yml @@ -0,0 +1,8 @@ +assign: + issues: disabled + pull_requests: disabled + strategy: + random + # daily-shift-based + schedule: + '*/1 * * * *' diff --git a/data/xtuner/.pre-commit-config-zh-cn.yaml b/data/xtuner/.pre-commit-config-zh-cn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4b9f51976e4b46db4db69952f437e43d72581070 --- /dev/null +++ b/data/xtuner/.pre-commit-config-zh-cn.yaml @@ -0,0 +1,51 @@ +exclude: ^tests/data/|^xtuner/tools/model_converters/modeling_internlm2_reward/ +repos: + - repo: https://gitee.com/openmmlab/mirrors-flake8 + rev: 5.0.4 + hooks: + - id: flake8 + args: ["--exclude=xtuner/model/transformers_models/*"] + - repo: https://gitee.com/openmmlab/mirrors-isort + rev: 5.11.5 + hooks: + - id: isort + - repo: https://gitee.com/openmmlab/mirrors-yapf + rev: v0.32.0 + hooks: + - id: yapf + - repo: https://gitee.com/openmmlab/mirrors-pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: check-yaml + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: double-quote-string-fixer + - id: check-merge-conflict + - id: fix-encoding-pragma + args: ["--remove"] + - id: mixed-line-ending + args: ["--fix=lf"] + - repo: https://gitee.com/openmmlab/mirrors-codespell + rev: v2.2.1 + hooks: + - id: codespell + - repo: https://gitee.com/openmmlab/mirrors-mdformat + rev: 0.7.9 + hooks: + - id: mdformat + args: ["--number"] + additional_dependencies: + - mdformat-openmmlab + - mdformat_frontmatter + - linkify-it-py + - repo: https://gitee.com/openmmlab/mirrors-docformatter + rev: v1.3.1 + hooks: + - id: docformatter + args: ["--in-place", "--wrap-descriptions", "79"] + - repo: https://github.com/asottile/pyupgrade + rev: v3.0.0 + hooks: + - id: pyupgrade + args: ["--py36-plus"] diff --git a/data/xtuner/.pre-commit-config.yaml b/data/xtuner/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f6bbfd6339aeba49dbae8a0edc425a6e3f0c8eb2 --- /dev/null +++ b/data/xtuner/.pre-commit-config.yaml @@ -0,0 +1,53 @@ +exclude: ^tests/data/|^xtuner/tools/model_converters/modeling_internlm2_reward/ +repos: + - repo: https://github.com/PyCQA/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + args: ["--exclude=xtuner/model/transformers_models/*"] + - repo: https://github.com/PyCQA/isort + rev: 5.11.5 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-yapf + rev: v0.32.0 + hooks: + - id: yapf + exclude: 'xtuner/parallel/sequence/__init__.py' + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: check-yaml + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: double-quote-string-fixer + - id: check-merge-conflict + - id: fix-encoding-pragma + args: ["--remove"] + - id: mixed-line-ending + args: ["--fix=lf"] + - repo: https://github.com/codespell-project/codespell + rev: v2.2.1 + hooks: + - id: codespell + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.9 + hooks: + - id: mdformat + args: ["--number"] + additional_dependencies: + - mdformat-openmmlab + - mdformat_frontmatter + - linkify-it-py + exclude: 'docs/zh_cn/user_guides/sequence_parallel.md' + - repo: https://github.com/myint/docformatter + rev: v1.3.1 + hooks: + - id: docformatter + args: ["--in-place", "--wrap-descriptions", "79"] + - repo: https://github.com/asottile/pyupgrade + rev: v3.0.0 + hooks: + - id: pyupgrade + args: ["--py36-plus"] diff --git a/data/xtuner/LICENSE b/data/xtuner/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/data/xtuner/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data/xtuner/MANIFEST.in b/data/xtuner/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..36e1610bf8093a8355a58d7d9779697a64931313 --- /dev/null +++ b/data/xtuner/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include xtuner/configs *.py *.yml *.json +recursive-include xtuner/tools *.sh *.py diff --git a/data/xtuner/README.md b/data/xtuner/README.md new file mode 100644 index 0000000000000000000000000000000000000000..263d300c7a17778e3be4ff6f64cd262995f98527 --- /dev/null +++ b/data/xtuner/README.md @@ -0,0 +1,302 @@ +
+ +

+ +[![GitHub Repo stars](https://img.shields.io/github/stars/InternLM/xtuner?style=social)](https://github.com/InternLM/xtuner/stargazers) +[![license](https://img.shields.io/github/license/InternLM/xtuner.svg)](https://github.com/InternLM/xtuner/blob/main/LICENSE) +[![PyPI](https://img.shields.io/pypi/v/xtuner)](https://pypi.org/project/xtuner/) +[![Downloads](https://static.pepy.tech/badge/xtuner)](https://pypi.org/project/xtuner/) +[![issue resolution](https://img.shields.io/github/issues-closed-raw/InternLM/xtuner)](https://github.com/InternLM/xtuner/issues) +[![open issues](https://img.shields.io/github/issues-raw/InternLM/xtuner)](https://github.com/InternLM/xtuner/issues) + +👋 join us on [![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=wechat&label=WeChat)](https://cdn.vansin.top/internlm/xtuner.jpg) +[![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=twitter&label=Twitter)](https://twitter.com/intern_lm) +[![Static Badge](https://img.shields.io/badge/-grey?style=social&logo=discord&label=Discord)](https://discord.gg/xa29JuW87d) + +🔍 Explore our models on +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🤗%20Huggingface)](https://huggingface.co/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🤖%20ModelScope)](https://www.modelscope.cn/organization/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🧰%20OpenXLab)](https://openxlab.org.cn/usercenter/xtuner) +[![Static Badge](https://img.shields.io/badge/-gery?style=social&label=🧠%20WiseModel)](https://www.wisemodel.cn/organization/xtuner) + +English | [简体中文](README_zh-CN.md) + +
+ +## 🚀 Speed Benchmark + +- Llama2 7B Training Speed + +
+ +
+ +- Llama2 70B Training Speed + +
+ +
+ +## 🎉 News +- **\[2024/07\]** Support [MiniCPM](xtuner/configs/minicpm/) models! +- **\[2024/07\]** Support [DPO](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/dpo), [ORPO](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/orpo) and [Reward Model](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/reward_model) training with packed data and sequence parallel! See [documents](https://xtuner.readthedocs.io/en/latest/dpo/overview.html) for more details. +- **\[2024/07\]** Support [InternLM 2.5](xtuner/configs/internlm/internlm2_5_chat_7b/) models! +- **\[2024/06\]** Support [DeepSeek V2](xtuner/configs/deepseek/deepseek_v2_chat/) models! **2x faster!** +- **\[2024/04\]** [LLaVA-Phi-3-mini](https://huggingface.co/xtuner/llava-phi-3-mini-hf) is released! Click [here](xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336) for details! +- **\[2024/04\]** [LLaVA-Llama-3-8B](https://huggingface.co/xtuner/llava-llama-3-8b) and [LLaVA-Llama-3-8B-v1.1](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1) are released! Click [here](xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336) for details! +- **\[2024/04\]** Support [Llama 3](xtuner/configs/llama) models! +- **\[2024/04\]** Support Sequence Parallel for enabling highly efficient and scalable LLM training with extremely long sequence lengths! \[[Usage](https://github.com/InternLM/xtuner/blob/docs/docs/zh_cn/acceleration/train_extreme_long_sequence.rst)\] \[[Speed Benchmark](https://github.com/InternLM/xtuner/blob/docs/docs/zh_cn/acceleration/benchmark.rst)\] +- **\[2024/02\]** Support [Gemma](xtuner/configs/gemma) models! +- **\[2024/02\]** Support [Qwen1.5](xtuner/configs/qwen/qwen1_5) models! +- **\[2024/01\]** Support [InternLM2](xtuner/configs/internlm) models! The latest VLM [LLaVA-Internlm2-7B](https://huggingface.co/xtuner/llava-internlm2-7b) / [20B](https://huggingface.co/xtuner/llava-internlm2-20b) models are released, with impressive performance! +- **\[2024/01\]** Support [DeepSeek-MoE](https://huggingface.co/deepseek-ai/deepseek-moe-16b-chat) models! 20GB GPU memory is enough for QLoRA fine-tuning, and 4x80GB for full-parameter fine-tuning. Click [here](xtuner/configs/deepseek/) for details! +- **\[2023/12\]** 🔥 Support multi-modal VLM pretraining and fine-tuning with [LLaVA-v1.5](https://github.com/haotian-liu/LLaVA) architecture! Click [here](xtuner/configs/llava/README.md) for details! +- **\[2023/12\]** 🔥 Support [Mixtral 8x7B](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) models! Click [here](xtuner/configs/mixtral/README.md) for details! +- **\[2023/11\]** Support [ChatGLM3-6B](xtuner/configs/chatglm) model! +- **\[2023/10\]** Support [MSAgent-Bench](https://modelscope.cn/datasets/damo/MSAgent-Bench) dataset, and the fine-tuned LLMs can be applied by [Lagent](https://github.com/InternLM/lagent)! +- **\[2023/10\]** Optimize the data processing to accommodate `system` context. More information can be found on [Docs](docs/en/user_guides/dataset_format.md)! +- **\[2023/09\]** Support [InternLM-20B](xtuner/configs/internlm) models! +- **\[2023/09\]** Support [Baichuan2](xtuner/configs/baichuan) models! +- **\[2023/08\]** XTuner is released, with multiple fine-tuned adapters on [Hugging Face](https://huggingface.co/xtuner). + +## 📖 Introduction + +XTuner is an efficient, flexible and full-featured toolkit for fine-tuning large models. + +**Efficient** + +- Support LLM, VLM pre-training / fine-tuning on almost all GPUs. XTuner is capable of fine-tuning 7B LLM on a single 8GB GPU, as well as multi-node fine-tuning of models exceeding 70B. +- Automatically dispatch high-performance operators such as FlashAttention and Triton kernels to increase training throughput. +- Compatible with [DeepSpeed](https://github.com/microsoft/DeepSpeed) 🚀, easily utilizing a variety of ZeRO optimization techniques. + +**Flexible** + +- Support various LLMs ([InternLM](https://huggingface.co/internlm), [Mixtral-8x7B](https://huggingface.co/mistralai), [Llama 2](https://huggingface.co/meta-llama), [ChatGLM](https://huggingface.co/THUDM), [Qwen](https://huggingface.co/Qwen), [Baichuan](https://huggingface.co/baichuan-inc), ...). +- Support VLM ([LLaVA](https://github.com/haotian-liu/LLaVA)). The performance of [LLaVA-InternLM2-20B](https://huggingface.co/xtuner/llava-internlm2-20b) is outstanding. +- Well-designed data pipeline, accommodating datasets in any format, including but not limited to open-source and custom formats. +- Support various training algorithms ([QLoRA](http://arxiv.org/abs/2305.14314), [LoRA](http://arxiv.org/abs/2106.09685), full-parameter fune-tune), allowing users to choose the most suitable solution for their requirements. + +**Full-featured** + +- Support continuous pre-training, instruction fine-tuning, and agent fine-tuning. +- Support chatting with large models with pre-defined templates. +- The output models can seamlessly integrate with deployment and server toolkit ([LMDeploy](https://github.com/InternLM/lmdeploy)), and large-scale evaluation toolkit ([OpenCompass](https://github.com/open-compass/opencompass), [VLMEvalKit](https://github.com/open-compass/VLMEvalKit)). + +## 🔥 Supports + + + + + + + + + + + + + + + + +
+ Models + + SFT Datasets + + Data Pipelines + + Algorithms +
+ + + + + + + +
+ +## 🛠️ Quick Start + +### Installation + +- It is recommended to build a Python-3.10 virtual environment using conda + + ```bash + conda create --name xtuner-env python=3.10 -y + conda activate xtuner-env + ``` + +- Install XTuner via pip + + ```shell + pip install -U xtuner + ``` + + or with DeepSpeed integration + + ```shell + pip install -U 'xtuner[deepspeed]' + ``` + +- Install XTuner from source + + ```shell + git clone https://github.com/InternLM/xtuner.git + cd xtuner + pip install -e '.[all]' + ``` + +### Fine-tune + +XTuner supports the efficient fine-tune (*e.g.*, QLoRA) for LLMs. Dataset prepare guides can be found on [dataset_prepare.md](./docs/en/user_guides/dataset_prepare.md). + +- **Step 0**, prepare the config. XTuner provides many ready-to-use configs and we can view all configs by + + ```shell + xtuner list-cfg + ``` + + Or, if the provided configs cannot meet the requirements, please copy the provided config to the specified directory and make specific modifications by + + ```shell + xtuner copy-cfg ${CONFIG_NAME} ${SAVE_PATH} + vi ${SAVE_PATH}/${CONFIG_NAME}_copy.py + ``` + +- **Step 1**, start fine-tuning. + + ```shell + xtuner train ${CONFIG_NAME_OR_PATH} + ``` + + For example, we can start the QLoRA fine-tuning of InternLM2.5-Chat-7B with oasst1 dataset by + + ```shell + # On a single GPU + xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + # On multiple GPUs + (DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + (SLURM) srun ${SRUN_ARGS} xtuner train internlm2_5_chat_7b_qlora_oasst1_e3 --launcher slurm --deepspeed deepspeed_zero2 + ``` + + - `--deepspeed` means using [DeepSpeed](https://github.com/microsoft/DeepSpeed) 🚀 to optimize the training. XTuner comes with several integrated strategies including ZeRO-1, ZeRO-2, and ZeRO-3. If you wish to disable this feature, simply remove this argument. + + - For more examples, please see [finetune.md](./docs/en/user_guides/finetune.md). + +- **Step 2**, convert the saved PTH model (if using DeepSpeed, it will be a directory) to Hugging Face model, by + + ```shell + xtuner convert pth_to_hf ${CONFIG_NAME_OR_PATH} ${PTH} ${SAVE_PATH} + ``` + +### Chat + +XTuner provides tools to chat with pretrained / fine-tuned LLMs. + +```shell +xtuner chat ${NAME_OR_PATH_TO_LLM} --adapter {NAME_OR_PATH_TO_ADAPTER} [optional arguments] +``` + +For example, we can start the chat with InternLM2.5-Chat-7B : + +```shell +xtuner chat internlm/internlm2_5-chat-7b --prompt-template internlm2_chat +``` + +For more examples, please see [chat.md](./docs/en/user_guides/chat.md). + +### Deployment + +- **Step 0**, merge the Hugging Face adapter to pretrained LLM, by + + ```shell + xtuner convert merge \ + ${NAME_OR_PATH_TO_LLM} \ + ${NAME_OR_PATH_TO_ADAPTER} \ + ${SAVE_PATH} \ + --max-shard-size 2GB + ``` + +- **Step 1**, deploy fine-tuned LLM with any other framework, such as [LMDeploy](https://github.com/InternLM/lmdeploy) 🚀. + + ```shell + pip install lmdeploy + python -m lmdeploy.pytorch.chat ${NAME_OR_PATH_TO_LLM} \ + --max_new_tokens 256 \ + --temperture 0.8 \ + --top_p 0.95 \ + --seed 0 + ``` + + 🔥 Seeking efficient inference with less GPU memory? Try 4-bit quantization from [LMDeploy](https://github.com/InternLM/lmdeploy)! For more details, see [here](https://github.com/InternLM/lmdeploy/tree/main#quantization). + +### Evaluation + +- We recommend using [OpenCompass](https://github.com/InternLM/opencompass), a comprehensive and systematic LLM evaluation library, which currently supports 50+ datasets with about 300,000 questions. + +## 🤝 Contributing + +We appreciate all contributions to XTuner. Please refer to [CONTRIBUTING.md](.github/CONTRIBUTING.md) for the contributing guideline. + +## 🎖️ Acknowledgement + +- [Llama 2](https://github.com/facebookresearch/llama) +- [DeepSpeed](https://github.com/microsoft/DeepSpeed) +- [QLoRA](https://github.com/artidoro/qlora) +- [LMDeploy](https://github.com/InternLM/lmdeploy) +- [LLaVA](https://github.com/haotian-liu/LLaVA) + +## 🖊️ Citation + +```bibtex +@misc{2023xtuner, + title={XTuner: A Toolkit for Efficiently Fine-tuning LLM}, + author={XTuner Contributors}, + howpublished = {\url{https://github.com/InternLM/xtuner}}, + year={2023} +} +``` + +## License + +This project is released under the [Apache License 2.0](LICENSE). Please also adhere to the Licenses of models and datasets being used. diff --git a/data/xtuner/docs/en/.readthedocs.yaml b/data/xtuner/docs/en/.readthedocs.yaml new file mode 100644 index 0000000000000000000000000000000000000000..67b9c44e72a1945134a22796a17df026ce24c27c --- /dev/null +++ b/data/xtuner/docs/en/.readthedocs.yaml @@ -0,0 +1,16 @@ +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.8" + +formats: + - epub + +python: + install: + - requirements: requirements/docs.txt + +sphinx: + configuration: docs/en/conf.py diff --git a/data/xtuner/docs/en/Makefile b/data/xtuner/docs/en/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d4bb2cbb9eddb1bb1b4f366623044af8e4830919 --- /dev/null +++ b/data/xtuner/docs/en/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/data/xtuner/docs/en/_static/css/readthedocs.css b/data/xtuner/docs/en/_static/css/readthedocs.css new file mode 100644 index 0000000000000000000000000000000000000000..34ed824ba96141ae07eb484df49f93bdbc7832ec --- /dev/null +++ b/data/xtuner/docs/en/_static/css/readthedocs.css @@ -0,0 +1,6 @@ +.header-logo { + background-image: url("../image/logo.png"); + background-size: 177px 40px; + height: 40px; + width: 177px; +} diff --git a/data/xtuner/docs/en/_static/image/logo.png b/data/xtuner/docs/en/_static/image/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..0d6b754c98ae1d2c39de384d51b84d4c2f94c373 Binary files /dev/null and b/data/xtuner/docs/en/_static/image/logo.png differ diff --git a/data/xtuner/docs/en/acceleration/benchmark.rst b/data/xtuner/docs/en/acceleration/benchmark.rst new file mode 100644 index 0000000000000000000000000000000000000000..813fc7d5a900ae34213559b4f971b637dc067e91 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/benchmark.rst @@ -0,0 +1,2 @@ +Benchmark +========= diff --git a/data/xtuner/docs/en/acceleration/deepspeed.rst b/data/xtuner/docs/en/acceleration/deepspeed.rst new file mode 100644 index 0000000000000000000000000000000000000000..e3dcaccc05429e664d5987d97c959be4ecff9c85 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/deepspeed.rst @@ -0,0 +1,2 @@ +DeepSpeed +========= diff --git a/data/xtuner/docs/en/acceleration/flash_attn.rst b/data/xtuner/docs/en/acceleration/flash_attn.rst new file mode 100644 index 0000000000000000000000000000000000000000..a080373ef49a2a311ca375207437fd56fc40b297 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/flash_attn.rst @@ -0,0 +1,2 @@ +Flash Attention +=============== diff --git a/data/xtuner/docs/en/acceleration/hyper_parameters.rst b/data/xtuner/docs/en/acceleration/hyper_parameters.rst new file mode 100644 index 0000000000000000000000000000000000000000..04b82b7e6189ce1a7bb24b2848b214f8462a7aff --- /dev/null +++ b/data/xtuner/docs/en/acceleration/hyper_parameters.rst @@ -0,0 +1,2 @@ +HyperParameters +=============== diff --git a/data/xtuner/docs/en/acceleration/length_grouped_sampler.rst b/data/xtuner/docs/en/acceleration/length_grouped_sampler.rst new file mode 100644 index 0000000000000000000000000000000000000000..2fc723212a40c3d3e76539b50ccea49940da4640 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/length_grouped_sampler.rst @@ -0,0 +1,2 @@ +Length Grouped Sampler +====================== diff --git a/data/xtuner/docs/en/acceleration/pack_to_max_length.rst b/data/xtuner/docs/en/acceleration/pack_to_max_length.rst new file mode 100644 index 0000000000000000000000000000000000000000..aaddd36aa6ade763959917621fa161015058bd94 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/pack_to_max_length.rst @@ -0,0 +1,2 @@ +Pack to Max Length +================== diff --git a/data/xtuner/docs/en/acceleration/train_extreme_long_sequence.rst b/data/xtuner/docs/en/acceleration/train_extreme_long_sequence.rst new file mode 100644 index 0000000000000000000000000000000000000000..d326bd690119ce91e5ac7b0dd664f0c5ceb11ab8 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/train_extreme_long_sequence.rst @@ -0,0 +1,2 @@ +Train Extreme Long Sequence +=========================== diff --git a/data/xtuner/docs/en/acceleration/train_large_scale_dataset.rst b/data/xtuner/docs/en/acceleration/train_large_scale_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..026ce9dae2ce292d2e34f2c2eafa2b51b4cc9ad1 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/train_large_scale_dataset.rst @@ -0,0 +1,2 @@ +Train Large-scale Dataset +========================= diff --git a/data/xtuner/docs/en/acceleration/varlen_flash_attn.rst b/data/xtuner/docs/en/acceleration/varlen_flash_attn.rst new file mode 100644 index 0000000000000000000000000000000000000000..2fad725f35bbdd0bc492d5c0d569080d5f362522 --- /dev/null +++ b/data/xtuner/docs/en/acceleration/varlen_flash_attn.rst @@ -0,0 +1,2 @@ +Varlen Flash Attention +====================== diff --git a/data/xtuner/docs/en/chat/agent.md b/data/xtuner/docs/en/chat/agent.md new file mode 100644 index 0000000000000000000000000000000000000000..1da3ebc104432a8be76908998d6d4b1178232854 --- /dev/null +++ b/data/xtuner/docs/en/chat/agent.md @@ -0,0 +1 @@ +# Chat with Agent diff --git a/data/xtuner/docs/en/chat/llm.md b/data/xtuner/docs/en/chat/llm.md new file mode 100644 index 0000000000000000000000000000000000000000..5c556180c87e83f6511bbe58beeb49126567e740 --- /dev/null +++ b/data/xtuner/docs/en/chat/llm.md @@ -0,0 +1 @@ +# Chat with LLM diff --git a/data/xtuner/docs/en/chat/lmdeploy.md b/data/xtuner/docs/en/chat/lmdeploy.md new file mode 100644 index 0000000000000000000000000000000000000000..f4114a3a50fa76f4d57c49b59f418c9882599b1b --- /dev/null +++ b/data/xtuner/docs/en/chat/lmdeploy.md @@ -0,0 +1 @@ +# Accelerate chat by LMDeploy diff --git a/data/xtuner/docs/en/chat/vlm.md b/data/xtuner/docs/en/chat/vlm.md new file mode 100644 index 0000000000000000000000000000000000000000..54101dcbc0b8888247f49f2d4be325e5d35722ae --- /dev/null +++ b/data/xtuner/docs/en/chat/vlm.md @@ -0,0 +1 @@ +# Chat with VLM diff --git a/data/xtuner/docs/en/conf.py b/data/xtuner/docs/en/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..457ca52327054b8b6306772ff28c5ec65fa3c6f3 --- /dev/null +++ b/data/xtuner/docs/en/conf.py @@ -0,0 +1,109 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import os +import sys + +from sphinx.ext import autodoc + +sys.path.insert(0, os.path.abspath('../..')) + +# -- Project information ----------------------------------------------------- + +project = 'XTuner' +copyright = '2024, XTuner Contributors' +author = 'XTuner Contributors' + +# The full version, including alpha/beta/rc tags +version_file = '../../xtuner/version.py' +with open(version_file) as f: + exec(compile(f.read(), version_file, 'exec')) +__version__ = locals()['__version__'] +# The short X.Y version +version = __version__ +# The full version, including alpha/beta/rc tags +release = __version__ + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', + 'sphinx.ext.intersphinx', + 'sphinx_copybutton', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'myst_parser', + 'sphinxarg.ext', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# Exclude the prompt "$" when copying code +copybutton_prompt_text = r'\$ ' +copybutton_prompt_is_regexp = True + +language = 'en' + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_book_theme' +html_logo = '_static/image/logo.png' +html_theme_options = { + 'path_to_docs': 'docs/en', + 'repository_url': 'https://github.com/InternLM/xtuner', + 'use_repository_button': True, +} +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] + +# Mock out external dependencies here. +autodoc_mock_imports = [ + 'cpuinfo', + 'torch', + 'transformers', + 'psutil', + 'prometheus_client', + 'sentencepiece', + 'vllm.cuda_utils', + 'vllm._C', + 'numpy', + 'tqdm', +] + + +class MockedClassDocumenter(autodoc.ClassDocumenter): + """Remove note about base class when a class is derived from object.""" + + def add_line(self, line: str, source: str, *lineno: int) -> None: + if line == ' Bases: :py:class:`object`': + return + super().add_line(line, source, *lineno) + + +autodoc.ClassDocumenter = MockedClassDocumenter + +navigation_with_keys = False diff --git a/data/xtuner/docs/en/dpo/modify_settings.md b/data/xtuner/docs/en/dpo/modify_settings.md new file mode 100644 index 0000000000000000000000000000000000000000..d78cc40e6e67e2cd99da4172923ebf1fd5b799b4 --- /dev/null +++ b/data/xtuner/docs/en/dpo/modify_settings.md @@ -0,0 +1,83 @@ +## Modify DPO Training Configuration + +This section introduces config parameters related to DPO (Direct Preference Optimization) training. For more details on XTuner config files, please refer to [Modifying Training Configuration](https://xtuner.readthedocs.io/zh-cn/latest/training/modify_settings.html). + +### Loss Function + +In DPO training, you can choose different types of loss functions according to your needs. XTuner provides various loss function options, such as `sigmoid`, `hinge`, `ipo`, etc. You can select the desired loss function type by setting the `dpo_loss_type` parameter. + +Additionally, you can control the temperature coefficient in the loss function by adjusting the `loss_beta` parameter. The `label_smoothing` parameter can be used for smoothing labels. + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] +loss_beta = 0.1 +label_smoothing = 0.0 +``` + +### Modifying the Model + +Users can modify `pretrained_model_name_or_path` to change the pretrained model. + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +``` + +### Training Data + +In DPO training, you can specify the maximum number of tokens for a single sample sequence using the `max_length` parameter. XTuner will automatically truncate or pad the data. + +```python +# Data +max_length = 2048 +``` + +In the configuration file, we use the `train_dataset` field to specify the training dataset. You can specify the dataset loading method using the `dataset` field and the dataset mapping function using the `dataset_map_fn` field. + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) +``` + +In the above configuration, we use `load_dataset` to load the `mlabonne/orpo-dpo-mix-40k` dataset from Hugging Face and use `orpo_dpo_mix_40k_map_fn` as the dataset mapping function. + +For more information on handling datasets and writing dataset mapping functions, please refer to the [Preference Dataset Section](../reward_model/preference_data.md). + +### Accelerating Training + +When training with preference data, we recommend enabling the [Variable-Length Attention Mechanism](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/varlen_flash_attn.html) to avoid memory waste caused by length differences between chosen and rejected samples within a single preference. You can enable the variable-length attention mechanism by setting `use_varlen_attn=True`. + +XTuner also supports many training acceleration methods. For details on how to use them, please refer to the [Acceleration Strategies Section](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/hyper_parameters.html). diff --git a/data/xtuner/docs/en/dpo/overview.md b/data/xtuner/docs/en/dpo/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..0c20946e3470eafe96f292b48cb2efc1eb036c1d --- /dev/null +++ b/data/xtuner/docs/en/dpo/overview.md @@ -0,0 +1,27 @@ +## Introduction to DPO + +### Overview + +DPO (Direct Preference Optimization) is a method used in large language model training for directly optimizing human preferences. Unlike traditional reinforcement learning methods, DPO directly uses human preference data to optimize the model, thereby improving the quality of generated content to better align with human preferences. DPO also eliminates the need to train a Reward Model and a Critic Model, avoiding the complexity of reinforcement learning algorithms, reducing training overhead, and enhancing training efficiency. + +Many algorithms have made certain improvements to DPO's loss function. In XTuner, besides DPO, we have also implemented loss functions from papers such as [Identity Preference Optimization (IPO)](https://huggingface.co/papers/2310.12036). To use these algorithms, please refer to the [Modify DPO Settings](./modify_settings.md) section. We also provide some [example configurations](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/dpo) for reference. + +In addition to DPO, there are alignment algorithms like [ORPO](https://arxiv.org/abs/2403.07691) that do not require a reference model. ORPO uses the concept of odds ratio to optimize the model by penalizing rejected samples during the training process, thereby adapting more effectively to the chosen samples. ORPO eliminates the dependence on a reference model, making the training process more simplified and efficient. The training method for ORPO in XTuner is very similar to DPO, and we provide some [example configurations](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/orpo). Users can refer to the DPO tutorial to modify the configuration. + +### Features of DPO Training in XTuner + +DPO training in XTuner offers the following significant advantages: + +1. **Latest Algorithms**: In addition to supporting standard DPO, XTuner also supports improved DPO algorithms or memory efficient algorithms like ORPO that do not rely on reference models. + +2. **Reducing Memory Waste**: Due to the length differences in chosen and rejected data in preference datasets, padding tokens during data concatenation can cause memory waste. In XTuner, by utilizing the variable-length attention feature from Flash Attention2, preference pairs are packed into the same sequence during training, significantly reducing memory waste caused by padding tokens. This not only improves memory efficiency but also allows for training larger models or handling more data under the same hardware conditions. + + ![img](../../zh_cn/reward_model/images/var_len_atten.png) + +3. **Efficient Training**: Leveraging XTuner's QLoRA training capabilities, the reference model can be converted into a policy model with the LoRA adapter removed, eliminating the memory overhead of the reference model weights and significantly reducing DPO training costs. + +4. **Long Text Training**: With XTuner's sequence parallel functionality, long text data can be trained efficiently. + +### Getting Started + +Refer to the [Quick Start Guide](./quick_start.md) to understand the basic concepts. For more information on configuring training parameters, please see the [Modify DPO Settings](./modify_settings.md) section. diff --git a/data/xtuner/docs/en/dpo/quick_start.md b/data/xtuner/docs/en/dpo/quick_start.md new file mode 100644 index 0000000000000000000000000000000000000000..19fffbf8b4eeb45ea8b457608a78630fa4d9bade --- /dev/null +++ b/data/xtuner/docs/en/dpo/quick_start.md @@ -0,0 +1,71 @@ +## Quick Start with DPO + +In this section, we will introduce how to use XTuner to train a 1.8B DPO (Direct Preference Optimization) model to help you get started quickly. + +### Preparing Pretrained Model Weights + +We use the model [InternLM2-chat-1.8b-sft](https://huggingface.co/internlm/internlm2-chat-1_8b-sft), as the initial model for DPO training to align human preferences. + +Set `pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft'` in the training configuration file, and the model files will be automatically downloaded when training starts. If you need to download the model weights manually, please refer to the section [Preparing Pretrained Model Weights](https://xtuner.readthedocs.io/zh-cn/latest/preparation/pretrained_model.html), which provides detailed instructions on how to download model weights from Huggingface or Modelscope. Here are the links to the models on HuggingFace and ModelScope: + +- HuggingFace link: https://huggingface.co/internlm/internlm2-chat-1_8b-sft +- ModelScope link: https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-1_8b-sft/summary + +### Preparing Training Data + +In this tutorial, we use the [mlabonne/orpo-dpo-mix-40k](https://huggingface.co/datasets/mlabonne/orpo-dpo-mix-40k) dataset from Huggingface as an example. + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='mlabonne/orpo-dpo-mix-40k'), + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, +) +``` + +Using the above configuration in the configuration file will automatically download and process this dataset. If you want to use other open-source datasets from Huggingface or custom datasets, please refer to the [Preference Dataset](../reward_model/preference_data.md) section. + +### Preparing Configuration File + +XTuner provides several ready-to-use configuration files, which can be viewed using `xtuner list-cfg`. Execute the following command to copy a configuration file to the current directory. + +```bash +xtuner copy-cfg internlm2_chat_1_8b_dpo_full . +``` + +Open the copied configuration file. If you choose to download the model and dataset automatically, no modifications are needed. If you want to specify paths to your pre-downloaded model and dataset, modify the `pretrained_model_name_or_path` and the `path` parameter in `dataset` under `train_dataset`. + +For more training parameter configurations, please refer to the section [Modifying DPO Training Configuration](./modify_settings.md) section. + +### Starting the Training + +After completing the above steps, you can start the training task using the following commands. + +```bash +# Single machine, single GPU +xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py +# Single machine, multiple GPUs +NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py +# Slurm cluster +srun ${SRUN_ARGS} xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py --launcher slurm +``` + +### Model Conversion + +XTuner provides integrated tools to convert models to HuggingFace format. Simply execute the following commands: + +```bash +# Create a directory for HuggingFace format parameters +mkdir work_dirs/internlm2_chat_1_8b_dpo_full_copy/iter_15230_hf + +# Convert format +xtuner convert pth_to_hf internlm2_chat_1_8b_dpo_full_copy.py \ + work_dirs/internlm2_chat_1_8b_dpo_full_copy/iter_15230.pth \ + work_dirs/internlm2_chat_1_8b_dpo_full_copy/iter_15230_hf +``` + +This will convert the XTuner's ckpt to the HuggingFace format. diff --git a/data/xtuner/docs/en/evaluation/hook.md b/data/xtuner/docs/en/evaluation/hook.md new file mode 100644 index 0000000000000000000000000000000000000000..de9e98c88665b4cb2741edb3c6e5adaef39e7116 --- /dev/null +++ b/data/xtuner/docs/en/evaluation/hook.md @@ -0,0 +1 @@ +# Evaluation during training diff --git a/data/xtuner/docs/en/evaluation/mmbench.md b/data/xtuner/docs/en/evaluation/mmbench.md new file mode 100644 index 0000000000000000000000000000000000000000..5421b1c96ac973f7a47839cb2478d63997473d94 --- /dev/null +++ b/data/xtuner/docs/en/evaluation/mmbench.md @@ -0,0 +1 @@ +# MMBench (VLM) diff --git a/data/xtuner/docs/en/evaluation/mmlu.md b/data/xtuner/docs/en/evaluation/mmlu.md new file mode 100644 index 0000000000000000000000000000000000000000..4bfabff8fa0c0492fe376413ab68dd4382f14cd4 --- /dev/null +++ b/data/xtuner/docs/en/evaluation/mmlu.md @@ -0,0 +1 @@ +# MMLU (LLM) diff --git a/data/xtuner/docs/en/evaluation/opencompass.md b/data/xtuner/docs/en/evaluation/opencompass.md new file mode 100644 index 0000000000000000000000000000000000000000..eb24da882f1ab04691e1bc87cf74a62809184d69 --- /dev/null +++ b/data/xtuner/docs/en/evaluation/opencompass.md @@ -0,0 +1 @@ +# Evaluate with OpenCompass diff --git a/data/xtuner/docs/en/get_started/installation.md b/data/xtuner/docs/en/get_started/installation.md new file mode 100644 index 0000000000000000000000000000000000000000..007e61553cc9c487db4639fe832d28b7835d22b8 --- /dev/null +++ b/data/xtuner/docs/en/get_started/installation.md @@ -0,0 +1,52 @@ +### Installation + +In this section, we will show you how to install XTuner. + +## Installation Process + +We recommend users to follow our best practices for installing XTuner. +It is recommended to use a conda virtual environment with Python-3.10 to install XTuner. + +### Best Practices + +**Step 0.** Create a Python-3.10 virtual environment using conda. + +```shell +conda create --name xtuner-env python=3.10 -y +conda activate xtuner-env +``` + +**Step 1.** Install XTuner. + +Case a: Install XTuner via pip: + +```shell +pip install -U xtuner +``` + +Case b: Install XTuner with DeepSpeed integration: + +```shell +pip install -U 'xtuner[deepspeed]' +``` + +Case c: Install XTuner from the source code: + +```shell +git clone https://github.com/InternLM/xtuner.git +cd xtuner +pip install -e '.[all]' +# "-e" indicates installing the project in editable mode, so any local modifications to the code will take effect without reinstalling. +``` + +## Verify the installation + +To verify if XTuner is installed correctly, we will use a command to print the configuration files. + +**Print Configuration Files:** Use the command `xtuner list-cfg` in the command line to verify if the configuration files can be printed. + +```shell +xtuner list-cfg +``` + +You should see a list of XTuner configuration files, corresponding to the ones in [xtuner/configs](https://github.com/InternLM/xtuner/tree/main/xtuner/configs) in the source code. diff --git a/data/xtuner/docs/en/get_started/overview.md b/data/xtuner/docs/en/get_started/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..c257c83c6a3aabd31401cd49343d86a2ee89c899 --- /dev/null +++ b/data/xtuner/docs/en/get_started/overview.md @@ -0,0 +1,5 @@ +# Overview + +This chapter introduces you to the framework and workflow of XTuner, and provides detailed tutorial links. + +## What is XTuner diff --git a/data/xtuner/docs/en/get_started/quickstart.md b/data/xtuner/docs/en/get_started/quickstart.md new file mode 100644 index 0000000000000000000000000000000000000000..23198bf3b7cae45461148c04560ca5e80c0b0e80 --- /dev/null +++ b/data/xtuner/docs/en/get_started/quickstart.md @@ -0,0 +1,308 @@ +# Quickstart + +In this section, we will show you how to use XTuner to fine-tune a model to help you get started quickly. + +After installing XTuner successfully, we can start fine-tuning the model. In this section, we will demonstrate how to use XTuner to apply the QLoRA algorithm to fine-tune InternLM2-Chat-7B on the Colorist dataset. + +The Colorist dataset ([HuggingFace link](https://huggingface.co/datasets/burkelibbey/colors); [ModelScope link](https://www.modelscope.cn/datasets/fanqiNO1/colors/summary)) is a dataset that provides color choices and suggestions based on color descriptions. A model fine-tuned on this dataset can be used to give a hexadecimal color code based on the user's description of the color. For example, when the user enters "a calming but fairly bright light sky blue, between sky blue and baby blue, with a hint of fluorescence due to its brightness", the model will output ![#66ccff](https://img.shields.io/badge/%2366ccff-66CCFF), which matches the user's description. There are a few sample data from this dataset: + +| Enligsh Description | Chinese Description | Color | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Light Sky Blue: A calming, fairly bright color that falls between sky blue and baby blue, with a hint of slight fluorescence due to its brightness. | 浅天蓝色:一种介于天蓝和婴儿蓝之间的平和、相当明亮的颜色,由于明亮而带有一丝轻微的荧光。 | #66ccff: ![#66ccff](https://img.shields.io/badge/%2366ccff-66CCFF) | +| Bright red: This is a very vibrant, saturated and vivid shade of red, resembling the color of ripe apples or fresh blood. It is as red as you can get on a standard RGB color palette, with no elements of either blue or green. | 鲜红色: 这是一种非常鲜艳、饱和、生动的红色,类似成熟苹果或新鲜血液的颜色。它是标准 RGB 调色板上的红色,不含任何蓝色或绿色元素。 | #ee0000: ![#ee0000](https://img.shields.io/badge/%23ee0000-EE0000) | +| Bright Turquoise: This color mixes the freshness of bright green with the tranquility of light blue, leading to a vibrant shade of turquoise. It is reminiscent of tropical waters. | 明亮的绿松石色:这种颜色融合了鲜绿色的清新和淡蓝色的宁静,呈现出一种充满活力的绿松石色调。它让人联想到热带水域。 | #00ffcc: ![#00ffcc](https://img.shields.io/badge/%2300ffcc-00FFCC) | + +## Prepare the model weights + +Before fine-tuning the model, we first need to prepare the weights of the model. + +### Download from HuggingFace + +```bash +pip install -U huggingface_hub + +# Download the model weights to Shanghai_AI_Laboratory/internlm2-chat-7b +huggingface-cli download internlm/internlm2-chat-7b \ + --local-dir Shanghai_AI_Laboratory/internlm2-chat-7b \ + --local-dir-use-symlinks False \ + --resume-download +``` + +### Download from ModelScope + +Since pulling model weights from HuggingFace may lead to an unstable download process, slow download speed and other problems, we can choose to download the weights of InternLM2-Chat-7B from ModelScope when experiencing network issues. + +```bash +pip install -U modelscope + +# Download the model weights to the current directory +python -c "from modelscope import snapshot_download; snapshot_download('Shanghai_AI_Laboratory/internlm2-chat-7b', cache_dir='.')" +``` + +After completing the download, we can start to prepare the dataset for fine-tuning. + +The HuggingFace link and ModelScope link are attached here: + +- The HuggingFace link is located at: https://huggingface.co/internlm/internlm2-chat-7b +- The ModelScope link is located at: https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-7b/summary + +## Prepare the fine-tuning dataset + +### Download from HuggingFace + +```bash +git clone https://huggingface.co/datasets/burkelibbey/colors +``` + +### Download from ModelScope + +Due to the same reason, we can choose to download the dataset from ModelScope. + +```bash +git clone https://www.modelscope.cn/datasets/fanqiNO1/colors.git +``` + +The HuggingFace link and ModelScope link are attached here: + +- The HuggingFace link is located at: https://huggingface.co/datasets/burkelibbey/colors +- The ModelScope link is located at: https://modelscope.cn/datasets/fanqiNO1/colors + +## Prepare the config + +XTuner provides several configs out-of-the-box, which can be viewed via `xtuner list-cfg`. We can use the following command to copy a config to the current directory. + +```bash +xtuner copy-cfg internlm2_7b_qlora_colorist_e5 . +``` + +Explanation of the config name: + +| Config Name | internlm2_7b_qlora_colorist_e5 | +| ----------- | ------------------------------ | +| Model Name | internlm2_7b | +| Algorithm | qlora | +| Dataset | colorist | +| Epochs | 5 | + +The directory structure at this point should look like this: + +```bash +. +├── colors +│ ├── colors.json +│ ├── dataset_infos.json +│ ├── README.md +│ └── train.jsonl +├── internlm2_7b_qlora_colorist_e5_copy.py +└── Shanghai_AI_Laboratory + └── internlm2-chat-7b + ├── config.json + ├── configuration_internlm2.py + ├── configuration.json + ├── generation_config.json + ├── modeling_internlm2.py + ├── pytorch_model-00001-of-00008.bin + ├── pytorch_model-00002-of-00008.bin + ├── pytorch_model-00003-of-00008.bin + ├── pytorch_model-00004-of-00008.bin + ├── pytorch_model-00005-of-00008.bin + ├── pytorch_model-00006-of-00008.bin + ├── pytorch_model-00007-of-00008.bin + ├── pytorch_model-00008-of-00008.bin + ├── pytorch_model.bin.index.json + ├── README.md + ├── special_tokens_map.json + ├── tokenization_internlm2_fast.py + ├── tokenization_internlm2.py + ├── tokenizer_config.json + └── tokenizer.model +``` + +## Modify the config + +In this step, we need to modify the model path and dataset path to local paths and modify the dataset loading method. +In addition, since the copied config is based on the Base model, we also need to modify the `prompt_template` to adapt to the Chat model. + +```diff +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +- pretrained_model_name_or_path = 'internlm/internlm2-7b' ++ pretrained_model_name_or_path = './Shanghai_AI_Laboratory/internlm2-chat-7b' + +# Data +- data_path = 'burkelibbey/colors' ++ data_path = './colors/train.jsonl' +- prompt_template = PROMPT_TEMPLATE.default ++ prompt_template = PROMPT_TEMPLATE.internlm2_chat + +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict(type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +``` + +Therefore, `pretrained_model_name_or_path`, `data_path`, `prompt_template`, and the `dataset` fields in `train_dataset` are modified. + +## Start fine-tuning + +Once having done the above steps, we can start fine-tuning using the following command. + +```bash +# Single GPU +xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py +# Multiple GPUs +NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py +# Slurm +srun ${SRUN_ARGS} xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py --launcher slurm +``` + +The correct training log may look similar to the one shown below: + +```text +01/29 21:35:34 - mmengine - INFO - Iter(train) [ 10/720] lr: 9.0001e-05 eta: 0:31:46 time: 2.6851 data_time: 0.0077 memory: 12762 loss: 2.6900 +01/29 21:36:02 - mmengine - INFO - Iter(train) [ 20/720] lr: 1.9000e-04 eta: 0:32:01 time: 2.8037 data_time: 0.0071 memory: 13969 loss: 2.6049 grad_norm: 0.9361 +01/29 21:36:29 - mmengine - INFO - Iter(train) [ 30/720] lr: 1.9994e-04 eta: 0:31:24 time: 2.7031 data_time: 0.0070 memory: 13969 loss: 2.5795 grad_norm: 0.9361 +01/29 21:36:57 - mmengine - INFO - Iter(train) [ 40/720] lr: 1.9969e-04 eta: 0:30:55 time: 2.7247 data_time: 0.0069 memory: 13969 loss: 2.3352 grad_norm: 0.8482 +01/29 21:37:24 - mmengine - INFO - Iter(train) [ 50/720] lr: 1.9925e-04 eta: 0:30:28 time: 2.7286 data_time: 0.0068 memory: 13969 loss: 2.2816 grad_norm: 0.8184 +01/29 21:37:51 - mmengine - INFO - Iter(train) [ 60/720] lr: 1.9863e-04 eta: 0:29:58 time: 2.7048 data_time: 0.0069 memory: 13969 loss: 2.2040 grad_norm: 0.8184 +01/29 21:38:18 - mmengine - INFO - Iter(train) [ 70/720] lr: 1.9781e-04 eta: 0:29:31 time: 2.7302 data_time: 0.0068 memory: 13969 loss: 2.1912 grad_norm: 0.8460 +01/29 21:38:46 - mmengine - INFO - Iter(train) [ 80/720] lr: 1.9681e-04 eta: 0:29:05 time: 2.7338 data_time: 0.0069 memory: 13969 loss: 2.1512 grad_norm: 0.8686 +01/29 21:39:13 - mmengine - INFO - Iter(train) [ 90/720] lr: 1.9563e-04 eta: 0:28:36 time: 2.7047 data_time: 0.0068 memory: 13969 loss: 2.0653 grad_norm: 0.8686 +01/29 21:39:40 - mmengine - INFO - Iter(train) [100/720] lr: 1.9426e-04 eta: 0:28:09 time: 2.7383 data_time: 0.0070 memory: 13969 loss: 1.9819 grad_norm: 0.9127 +``` + +Before training begins, the output of the model is as shown below: + +```text +2024/01/29 21:34:58 - mmengine - INFO - before_train in EvaluateChatHook. +2024/01/29 21:35:03 - mmengine - INFO - Sample output: +<|im_start|>system +You are a professional color designer. Please provide the corresponding colors based on the description of Human. +<|im_end|> +<|im_start|>user +请给我一个像天空一样清澈透明的蓝色。<|im_end|> +<|im_start|>assistant +为了匹配您所描述的“像天空一样清澈透明的蓝色”,我建议您选择一种名为“天蓝”(Cerulean)的颜色。这种颜色通常被用来代表天空、海洋和清澈的水域,它具有明亮 + +2024/01/29 21:35:07 - mmengine - INFO - Sample output: +<|im_start|>system +You are a professional color designer. Please provide the corresponding colors based on the description of Human. +<|im_end|> +<|im_start|>user +Please give me a clear blue like the sky.<|im_end|> +<|im_start|>assistant +The color you're describing is a clear, bright blue that resembles the color of the sky on a sunny day. This color is often referred to as "sky blue" or "cerulean blue." Here are some specific color codes and names that +``` + +After training, the output of the model is as shown below: + +```text +2024/01/29 22:08:07 - mmengine - INFO - Sample output: +<|im_start|>system +You are a professional color designer. Please provide the corresponding colors based on the description of Human. +<|im_end|> +<|im_start|>user +请给我一个像天空一样清澈透明的蓝色。<|im_end|> +<|im_start|>assistant +#1099ee<|im_end|> + +2024/01/29 22:08:08 - mmengine - INFO - Sample output: +<|im_start|>system +You are a professional color designer. Please provide the corresponding colors based on the description of Human. +<|im_end|> +<|im_start|>user +Please give me a clear blue like the sky.<|im_end|> +<|im_start|>assistant +#0066dd<|im_end|> +``` + +The color of the model output is shown below: + +- 天空一样清澈透明的蓝色:![天空一样清澈透明的蓝色](https://img.shields.io/badge/天空一样清澈透明的蓝色-1099EE) +- A clear blue like the sky: ![A clear blue like the sky](https://img.shields.io/badge/A_clear_blue_like_the_sky-0066DD) + +It is clear that the output of the model after training has been fully aligned with the content of the dataset. + +# Model Convert + LoRA Merge + +After training, we will get several `.pth` files that do **NOT** contain all the parameters of the model, but store the parameters updated by the training process of the QLoRA algorithm. Therefore, we need to convert these `.pth` files to HuggingFace format and merge them into the original LLM weights. + +### Model Convert + +XTuner has already integrated the tool of converting the model to HuggingFace format. We can use the following command to convert the model. + +```bash +# Create the directory to store parameters in hf format +mkdir work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf + +# Convert the model to hf format +xtuner convert pth_to_hf internlm2_7b_qlora_colorist_e5_copy.py \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720.pth \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf +``` + +This command will convert `work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720.pth` to hf format based on the contents of the config `internlm2_7b_qlora_colorist_e5_copy.py` and will save it in `work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf`. + +### LoRA Merge + +XTuner has also integrated the tool of merging LoRA weights, we just need to execute the following command: + +```bash +# Create the directory to store the merged weights +mkdir work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged + +# Merge the weights +xtuner convert merge Shanghai_AI_Laboratory/internlm2-chat-7b \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged \ + --max-shard-size 2GB +``` + +Similar to the command above, this command will read the original parameter path `Shanghai_AI_Laboratory/internlm2-chat-7b` and the path of parameter which has been converted to hf format `work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf` and merge the two parts of the parameters and save them in `work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged`, where the maximum file size for each parameter slice is 2GB. + +## Chat with the model + +To better appreciate the model's capabilities after merging the weights, we can chat with the model. XTuner also integrates the tool of chatting with models. We can start a simple demo to chat with the model with the following command: + +```bash +xtuner chat work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged \ + --prompt-template internlm2_chat \ + --system-template colorist +``` + +Of course, we can also choose not to merge the weights and instead chat directly with the LLM + LoRA Adapter, we just need to execute the following command: + +```bash +xtuner chat Shanghai_AI_Laboratory/internlm2-chat-7b + --adapter work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf \ + --prompt-template internlm2_chat \ + --system-template colorist +``` + +where `work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged` is the path to the merged weights, `--prompt-template internlm2_chat` specifies that the chat template is InternLM2-Chat, and `-- system-template colorist` specifies that the System Prompt for conversations with models is the template required by the Colorist dataset. + +There is an example below: + +```text +double enter to end input (EXIT: exit chat, RESET: reset history) >>> A calming but fairly bright light sky blue, between sky blue and baby blue, with a hint of fluorescence due to its brightness. + +#66ccff<|im_end|> +``` + +The color of the model output is shown below: + +A calming but fairly bright light sky blue, between sky blue and baby blue, with a hint of fluorescence due to its brightness: ![#66ccff](https://img.shields.io/badge/A_calming_but_fairly_bright_light_sky_blue_between_sky_blue_and_baby_blue_with_a_hint_of_fluorescence_due_to_its_brightness-66CCFF). diff --git a/data/xtuner/docs/en/index.rst b/data/xtuner/docs/en/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..c4c18d31ab03f6f9bf91f3d40f5cfeb626735f66 --- /dev/null +++ b/data/xtuner/docs/en/index.rst @@ -0,0 +1,123 @@ +.. xtuner documentation master file, created by + sphinx-quickstart on Tue Jan 9 16:33:06 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to XTuner's documentation! +================================== + +.. figure:: ./_static/image/logo.png + :align: center + :alt: xtuner + :class: no-scaled-link + +.. raw:: html + +

+ All-IN-ONE toolbox for LLM + +

+ +

+ + Star + Watch + Fork +

+ + + +Documentation +------------- +.. toctree:: + :maxdepth: 2 + :caption: Get Started + + get_started/overview.md + get_started/installation.md + get_started/quickstart.md + +.. toctree:: + :maxdepth: 2 + :caption: Preparation + + preparation/pretrained_model.rst + preparation/prompt_template.rst + +.. toctree:: + :maxdepth: 2 + :caption: Training + + training/modify_settings.rst + training/custom_sft_dataset.rst + training/custom_pretrain_dataset.rst + training/custom_agent_dataset.rst + training/multi_modal_dataset.rst + training/open_source_dataset.rst + training/visualization.rst + +.. toctree:: + :maxdepth: 2 + :caption: DPO + + dpo/overview.md + dpo/quick_start.md + dpo/modify_settings.md + +.. toctree:: + :maxdepth: 2 + :caption: Reward Model + + reward_model/overview.md + reward_model/quick_start.md + reward_model/modify_settings.md + reward_model/preference_data.md + +.. toctree:: + :maxdepth: 2 + :caption: Acceleration + + acceleration/deepspeed.rst + acceleration/pack_to_max_length.rst + acceleration/flash_attn.rst + acceleration/varlen_flash_attn.rst + acceleration/hyper_parameters.rst + acceleration/length_grouped_sampler.rst + acceleration/train_large_scale_dataset.rst + acceleration/train_extreme_long_sequence.rst + acceleration/benchmark.rst + +.. toctree:: + :maxdepth: 2 + :caption: Chat + + chat/llm.md + chat/agent.md + chat/vlm.md + chat/lmdeploy.md + +.. toctree:: + :maxdepth: 2 + :caption: Evaluation + + evaluation/hook.md + evaluation/mmlu.md + evaluation/mmbench.md + evaluation/opencompass.md + +.. toctree:: + :maxdepth: 2 + :caption: Models + + models/supported.md + +.. toctree:: + :maxdepth: 2 + :caption: InternEvo Migration + + internevo_migration/internevo_migration.rst + internevo_migration/ftdp_dataset/ftdp.rst + internevo_migration/ftdp_dataset/Case1.rst + internevo_migration/ftdp_dataset/Case2.rst + internevo_migration/ftdp_dataset/Case3.rst + internevo_migration/ftdp_dataset/Case4.rst diff --git a/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case1.rst b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case1.rst new file mode 100644 index 0000000000000000000000000000000000000000..c8eb0c76afa4c5630d910c3fce05eea62e2a9a08 --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case1.rst @@ -0,0 +1,2 @@ +Case 1 +====== diff --git a/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case2.rst b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case2.rst new file mode 100644 index 0000000000000000000000000000000000000000..74069f68f830fe2de5ee641266b4a9aad585ea7a --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case2.rst @@ -0,0 +1,2 @@ +Case 2 +====== diff --git a/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case3.rst b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case3.rst new file mode 100644 index 0000000000000000000000000000000000000000..d963b538b55c70a12978e738e1f3d6db399f445f --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case3.rst @@ -0,0 +1,2 @@ +Case 3 +====== diff --git a/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case4.rst b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case4.rst new file mode 100644 index 0000000000000000000000000000000000000000..1f7626933c512221449355c3eae138d9ea681955 --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/Case4.rst @@ -0,0 +1,2 @@ +Case 4 +====== diff --git a/data/xtuner/docs/en/internevo_migration/ftdp_dataset/ftdp.rst b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/ftdp.rst new file mode 100644 index 0000000000000000000000000000000000000000..613568f151b54848f747c0740161d01e905359a2 --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/ftdp_dataset/ftdp.rst @@ -0,0 +1,2 @@ +ftdp +==== diff --git a/data/xtuner/docs/en/internevo_migration/internevo_migration.rst b/data/xtuner/docs/en/internevo_migration/internevo_migration.rst new file mode 100644 index 0000000000000000000000000000000000000000..869206508d772d8503003f7669a134a1d44fce7e --- /dev/null +++ b/data/xtuner/docs/en/internevo_migration/internevo_migration.rst @@ -0,0 +1,2 @@ +InternEVO Migration +=================== diff --git a/data/xtuner/docs/en/make.bat b/data/xtuner/docs/en/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..954237b9b9f2b248bb1397a15c055c0af1cad03e --- /dev/null +++ b/data/xtuner/docs/en/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/data/xtuner/docs/en/models/supported.md b/data/xtuner/docs/en/models/supported.md new file mode 100644 index 0000000000000000000000000000000000000000..c61546e5209d69ef0824b54bada46c18de3d8f72 --- /dev/null +++ b/data/xtuner/docs/en/models/supported.md @@ -0,0 +1 @@ +# Supported Models diff --git a/data/xtuner/docs/en/notes/changelog.md b/data/xtuner/docs/en/notes/changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..2c9678539d213e5bbca90bbf4449cfbe4dfd7936 --- /dev/null +++ b/data/xtuner/docs/en/notes/changelog.md @@ -0,0 +1,25 @@ + + +# Changelog + +## v0.1.0 (2023.08.30) + +XTuner is released! 🔥🔥🔥 + +### Highlights + +- XTuner supports LLM fine-tuning on consumer-grade GPUs. The minimum GPU memory required for 7B LLM fine-tuning is only **8GB**. +- XTuner supports various LLMs, datasets, algorithms and training pipelines. +- Several fine-tuned adapters are released simultaneously, including various gameplays such as the colorist LLM, plugins-based LLM, and many more. For further details, please visit [XTuner on HuggingFace](https://huggingface.co/xtuner)! diff --git a/data/xtuner/docs/en/preparation/pretrained_model.rst b/data/xtuner/docs/en/preparation/pretrained_model.rst new file mode 100644 index 0000000000000000000000000000000000000000..a3ac291ac1e74801c032a581b9e0b2afaf180a91 --- /dev/null +++ b/data/xtuner/docs/en/preparation/pretrained_model.rst @@ -0,0 +1,2 @@ +Pretrained Model +================ diff --git a/data/xtuner/docs/en/preparation/prompt_template.rst b/data/xtuner/docs/en/preparation/prompt_template.rst new file mode 100644 index 0000000000000000000000000000000000000000..43ccb98e31eaca7c05368628475613f515371810 --- /dev/null +++ b/data/xtuner/docs/en/preparation/prompt_template.rst @@ -0,0 +1,2 @@ +Prompt Template +=============== diff --git a/data/xtuner/docs/en/reward_model/modify_settings.md b/data/xtuner/docs/en/reward_model/modify_settings.md new file mode 100644 index 0000000000000000000000000000000000000000..4f41ca300865bc83bd02b727cc6b61696f8617fb --- /dev/null +++ b/data/xtuner/docs/en/reward_model/modify_settings.md @@ -0,0 +1,100 @@ +## Modify Reward Model Training Configuration + +This section introduces the config related to Reward Model training. For more details on XTuner config files, please refer to [Modify Settings](https://xtuner.readthedocs.io/zh-cn/latest/training/modify_settings.html). + +### Loss Function + +XTuner uses the [Bradley–Terry Model](https://en.wikipedia.org/wiki/Bradley%E2%80%93Terry_model) for preference modeling in the Reward Model. You can specify `loss_type="ranking"` to use ranking loss. XTuner also implements the focal loss function proposed in InternLM2, which adjusts the weights of difficult and easy samples to avoid overfitting. You can set `loss_type="focal"` to use this loss function. For a detailed explanation of this loss function, please refer to the [InternLM2 Technical Report](https://arxiv.org/abs/2403.17297). + +Additionally, to maintain stable reward model output scores, we have added a constraint term in the loss. You can specify `penalty_type='log_barrier'` or `penalty_type='L2'` to enable log barrier or L2 constraints, respectively. + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +loss_type = 'focal' # 'ranking' or 'focal' +penalty_type = 'log_barrier' # 'log_barrier' or 'L2' +``` + +### Modifying the Model + +Users can modify `pretrained_model_name_or_path` to change the pretrained model. + +Note that XTuner calculates reward scores by appending a special token at the end of the data. Therefore, when switching models with different vocabularies, the ID of this special token also needs to be modified accordingly. We usually use an unused token at the end of the vocabulary as the reward token. + +For example, in InternLM2, we use `[UNUSED_TOKEN_130]` as the reward token: + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +``` + +If the user switches to the llama3 model, we can use `<|reserved_special_token_0|>` as the reward token: + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +reward_token_id = 128002 # use <|reserved_special_token_0|> as reward token +``` + +### Training Data + +In Reward Model training, you can specify the maximum number of tokens for a single sample sequence using `max_length`. XTuner will automatically truncate or pad the data. + +```python +# Data +max_length = 2048 +``` + +In the configuration file, we use the `train_dataset` field to specify the training dataset. You can specify the dataset loading method using the `dataset` field and the dataset mapping function using the `dataset_map_fn` field. + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) +``` + +In the above configuration, we use `load_dataset` to load the `argilla/ultrafeedback-binarized-preferences-cleaned` dataset from Hugging Face, using `orpo_dpo_mix_40k_map_fn` as the dataset mapping function (this is because `orpo_dpo_mix_40k` and `ultrafeedback-binarized-preferences-cleaned` have the same format, so the same mapping function is used). + +For more information on handling datasets and writing dataset mapping functions, please refer to the [Preference Data Section](./preference_data.md). + +### Accelerating Training + +When training with preference data, we recommend enabling the [Variable-Length Attention Mechanism](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/varlen_flash_attn.html) to avoid memory waste caused by length differences between chosen and rejected samples within a single preference. You can enable the variable-length attention mechanism by setting `use_varlen_attn=True`. + +XTuner also supports many training acceleration methods. For details on how to use them, please refer to the [Acceleration Strategies Section](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/hyper_parameters.html). diff --git a/data/xtuner/docs/en/reward_model/overview.md b/data/xtuner/docs/en/reward_model/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..eb210140c7e88df9912429d900709f54cfa3be5b --- /dev/null +++ b/data/xtuner/docs/en/reward_model/overview.md @@ -0,0 +1,43 @@ +## Introduction to Reward Model + +### Overview + +The Reward Model is a crucial component in the reinforcement learning process. Its primary task is to predict reward values based on given inputs, guiding the direction of the learning algorithm. In RLHF (Reinforcement Learning from Human Feedback), the Reward Model acts as a proxy for human preferences, helping the reinforcement learning algorithm optimize strategies more effectively. + +In large language model training, the Reward Model typically refers to the Preference Model. By providing good and bad (chosen & rejected) responses to the same prompts during training, it fits human preferences and predicts a reward value during inference to guide the optimization of the Actor model in the RLHF process. + +Applications of the Reward Model include but are not limited to: + +- **RLHF Training**: During RLHF training such as the Proximal Policy Optimization (PPO) algorithm, the Reward Model provides reward signals, improve the quality of generated content, and align it more closely with human preferences. +- **BoN Sampling**: In the Best-of-N (BoN) sampling process, users can use the Reward Model to score multiple responses to the same prompt and select the highest-scoring generated result, thereby enhancing the model's output. +- **Data Construction**: The Reward Model can be used to evaluate and filter training data or replace manual annotation to construct DPO training data. + +### Features of Reward Model Training in XTuner + +The Reward Model training in XTuner offers the following significant advantages: + +1. **Latest Training Techniques**: XTuner integrates the Reward Model training loss function from InternLM2, which stabilizes the numerical range of reward scores and reduces overfitting on simple samples (see [InternLM2 Technical Report](https://arxiv.org/abs/2403.17297) for details). + +2. **Reducing Memory Waste**: Due to the length differences in chosen and rejected data in preference datasets, padding tokens during data concatenation can cause memory waste. In XTuner, by utilizing the variable-length attention feature from Flash Attention2, preference pairs are packed into the same sequence during training, significantly reducing memory waste caused by padding tokens. This not only improves memory efficiency but also allows for training larger models or handling more data under the same hardware conditions. + +![img](../../zh_cn/reward_model/images/var_len_atten.png) + +3. **Efficient Training**: Leveraging XTuner's QLoRA training capabilities, we can perform full parameter training only on the Reward Model's Value Head, while using QLoRA fine-tuning on the language model itself, substantially reducing the memory overhead of model training. + +4. **Long Text Training**: With XTuner's sequence parallel functionality, long text data can be trained efficiently. + +![img](../../zh_cn/reward_model/images/sequence_parallel.png) + +### Getting Started + +Refer to the [Quick Start Guide](./quick_start.md) to understand the basic concepts. For more information on configuring training parameters, please see the [Modifying Reward Model Settings](./modify_settings.md) section. + +### Open-source Models + +We use XTuner to train the InternLM2 Reward Models from the InternLM2 Technical Report, welcome to download and use: + +| Model | Transformers(HF) | ModelScope(HF) | OpenXLab(HF) | RewardBench Score | +| ------------------------- | -------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | +| **InternLM2-1.8B-Reward** | [🤗internlm2-1_8b-reward](https://huggingface.co/internlm/internlm2-1_8b-reward) | [internlm2-1_8b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-1_8b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-1_8b-reward) | 80.6 | +| **InternLM2-7B-Reward** | [🤗internlm2-7b-reward](https://huggingface.co/internlm/internlm2-7b-reward) | [internlm2-7b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-7b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-7b-reward) | 86.6 | +| **InternLM2-20B-Reward** | [🤗internlm2-20b-reward](https://huggingface.co/internlm/internlm2-20b-reward) | [internlm2-20b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-20b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-20b-reward) | 89.5 | diff --git a/data/xtuner/docs/en/reward_model/preference_data.md b/data/xtuner/docs/en/reward_model/preference_data.md new file mode 100644 index 0000000000000000000000000000000000000000..2f304e627a29bc8e6acb73705a15f676551c5d24 --- /dev/null +++ b/data/xtuner/docs/en/reward_model/preference_data.md @@ -0,0 +1,110 @@ +## Preference Dataset + +### Overview + +XTuner's Reward Model, along with DPO, ORPO, and other algorithms that training on preference data, adopts the same data format. Each training sample in the preference dataset needs to contain the following three fields: `prompt`, `chosen`, and `rejected`. The values for each field follow the [OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) format. A specific example is as follows: + +```json +{ + "prompt": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Who won the world series in 2020?" + }, + { + "role": "assistant", + "content": "The Los Angeles Dodgers won the World Series in 2020." + }, + { + "role": "user", + "content": "Where was it played?" + } + ], + "chosen": [ + { + "role": "assistant", + "content": "The 2020 World Series was played at Globe Life Field in Arlington, Texas." + } + ], + "rejected": [ + { + "role": "assistant", + "content": "I don't know." + } + ] +} +``` + +When conducting Reward Model training or DPO training, XTuner processes the preference dataset into different training labels based on the type of training task. + +![img](../../zh_cn/reward_model/images/preference_data.png) + +As shown in the above image, for Reward Model training, we follow the ChatGPT training method by adding a special `<|reward|>` token at the end of the conversation data and calculating the loss only on the logits output by this token. For DPO series algorithm training, we mask the tokens in the prompt part and calculate the loss only on the chosen and rejected responses. In the configuration file, we control the dataset type through the `is_reward` and `is_dpo` fields in the dataset. + +### Training with Custom Data + +After understanding the data format of a single sample, you only need to save the converted data in [JSON Lines](https://jsonlines.org/) format. Each line in the file is a sample. + +```json +{"prompt": [{"content": "How can I learn to better regulate my emotions as a grown adult?", "role": "user"}], "chosen": [{"content": "You could do emotional regulation exercises with me, and try to notice and practice ways that you could better control your emotions in real life.", "role": "assistant"}], "rejected": [{"content": "Do you mean how can you learn to be more emotionally mature?", "role": "assistant"}]} +{"prompt": [{"content": "What are some interesting things I can add to a grilled cheese sandwich?", "role": "user"}, {"content": "A tomato, a slice of pickle, some chopped pickles, some chopped green onions. Also, some mayonnaise if you like.", "role": "assistant"}, {"content": "Sounds great. Anything else?", "role": "user"}], "chosen": [{"content": "If you’d like something a little different, I recommend grating some horseradish on top of the sandwich.", "role": "assistant"}], "rejected": [{"content": "Maybe some nice fresh basil?", "role": "assistant"}]} +{"prompt": [{"content": "How do I bob for apples?", "role": "user"}], "chosen": [{"content": "Ah! Do you mean the game of “applesauce,” or do you mean the idea of “to bob for apples”?", "role": "assistant"}], "rejected": [{"content": "Sorry, I don’t know that term.", "role": "assistant"}]} +...... +``` + +After preparing the custom dataset, you need to fill in the path to your saved data in the `data_files` field in the configuration file. You can load multiple JSONL files simultaneously for training. + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_jsonl_dataset, + data_files=[ + '/your/jsonl/path/here.jsonl', + '/your/another/jsonl/path/here.jsonl' + ]), +) +``` + +### Training with Open Source Datasets + +Similar to configuring SFT data in XTuner, when using open-source datasets from Hugging Face, you only need to define a mapping function `map_fn` to process the dataset format into XTuner's data format. + +Taking `Intel/orca_dpo_pairs` as an example, this dataset has `system`, `question`, `chosen`, and `rejected` fields, with each field's value in text format instead of the [OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) format. Therefore, we need to define a mapping function for this dataset: + +```python +def intel_orca_dpo_map_fn(example): + prompt = [{ + 'role': 'system', + 'content': example['system'] + }, { + 'role': 'user', + 'content': example['question'] + }] + chosen = [{'role': 'assistant', 'content': example['chosen']}] + rejected = [{'role': 'assistant', 'content': example['rejected']}] + return {'prompt': prompt, 'chosen': chosen, 'rejected': rejected} +``` + +As shown in the code, `intel_orca_dpo_map_fn` processes the four fields in the original data, converting them into `prompt`, `chosen`, and `rejected` fields, and ensures each field follows the [OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) format, maintaining uniformity in subsequent data processing flows. + +After defining the mapping function, you need to import it in the configuration file and configure it in the `dataset_map_fn` field. + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='Intel/orca_dpo_pairs'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=intel_orca_dpo_map_fn, +) +``` diff --git a/data/xtuner/docs/en/reward_model/quick_start.md b/data/xtuner/docs/en/reward_model/quick_start.md new file mode 100644 index 0000000000000000000000000000000000000000..5c802be2f33f9c25d1bb018de07c38ea09d86c69 --- /dev/null +++ b/data/xtuner/docs/en/reward_model/quick_start.md @@ -0,0 +1,85 @@ +## Quick Start Guide for Reward Model + +In this section, we will introduce how to use XTuner to train a 1.8B Reward Model, helping you get started quickly. + +### Preparing Pretrained Model Weights + +According to the paper [Training language models to follow instructions with human feedback](https://arxiv.org/abs/2203.02155), we use a language model fine-tuned with SFT as the initialization model for the Reward Model. Here, we use [InternLM2-chat-1.8b-sft](https://huggingface.co/internlm/internlm2-chat-1_8b-sft) as the initialization model. + +Set `pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft'` in the training configuration file, and the model files will be automatically downloaded when training starts. If you need to download the model weights manually, please refer to the section [Preparing Pretrained Model Weights](https://xtuner.readthedocs.io/zh-cn/latest/preparation/pretrained_model.html), which provides detailed instructions on how to download model weights from Huggingface or Modelscope. Here are the links to the models on HuggingFace and ModelScope: + +- HuggingFace link: https://huggingface.co/internlm/internlm2-chat-1_8b-sft +- ModelScope link: https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-1_8b-sft/summary + +### Preparing Training Data + +In this tutorial, we use the [UltraFeedback](https://arxiv.org/abs/2310.01377) dataset as an example. For convenience, we use the preprocessed [argilla/ultrafeedback-binarized-preferences-cleaned](https://huggingface.co/datasets/argilla/ultrafeedback-binarized-preferences-cleaned) dataset from Huggingface. + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, +) +``` + +Using the above configuration in the configuration file will automatically download and process this dataset. If you want to use other open-source datasets from Huggingface or custom datasets, please refer to the [Preference Dataset](./preference_data.md) section. + +### Preparing Configuration Files + +XTuner provides several ready-to-use configuration files, which can be viewed using `xtuner list-cfg`. Execute the following command to copy a configuration file to the current directory. + +```bash +xtuner copy-cfg internlm2_chat_1_8b_reward_full_ultrafeedback . +``` + +Open the copied configuration file. If you choose to download the model and dataset automatically, no modifications are needed. If you want to specify paths to your pre-downloaded model and dataset, modify the `pretrained_model_name_or_path` and the `path` parameter in `dataset` under `train_dataset`. + +For more training parameter configurations, please refer to the section [Modifying Reward Training Configuration](./modify_settings.md). + +### Starting the Training + +After completing the above steps, you can start the training task using the following commands. + +```bash +# Single node single GPU +xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py +# Single node multiple GPUs +NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py +# Slurm cluster +srun ${SRUN_ARGS} xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py --launcher slurm +``` + +The correct training log should look like the following (running on a single A800 GPU): + +``` +06/06 16:12:11 - mmengine - INFO - Iter(train) [ 10/15230] lr: 3.9580e-07 eta: 2:59:41 time: 0.7084 data_time: 0.0044 memory: 18021 loss: 0.6270 acc: 0.0000 chosen_score_mean: 0.0000 rejected_score_mean: 0.0000 num_samples: 4.0000 num_tokens: 969.0000 +06/06 16:12:17 - mmengine - INFO - Iter(train) [ 20/15230] lr: 8.3536e-07 eta: 2:45:25 time: 0.5968 data_time: 0.0034 memory: 42180 loss: 0.6270 acc: 0.5000 chosen_score_mean: 0.0013 rejected_score_mean: 0.0010 num_samples: 4.0000 num_tokens: 1405.0000 +06/06 16:12:22 - mmengine - INFO - Iter(train) [ 30/15230] lr: 1.2749e-06 eta: 2:37:18 time: 0.5578 data_time: 0.0024 memory: 32121 loss: 0.6270 acc: 0.7500 chosen_score_mean: 0.0016 rejected_score_mean: 0.0011 num_samples: 4.0000 num_tokens: 932.0000 +06/06 16:12:28 - mmengine - INFO - Iter(train) [ 40/15230] lr: 1.7145e-06 eta: 2:36:05 time: 0.6033 data_time: 0.0025 memory: 42186 loss: 0.6270 acc: 0.7500 chosen_score_mean: 0.0027 rejected_score_mean: 0.0016 num_samples: 4.0000 num_tokens: 994.0000 +06/06 16:12:35 - mmengine - INFO - Iter(train) [ 50/15230] lr: 2.1540e-06 eta: 2:41:03 time: 0.7166 data_time: 0.0027 memory: 42186 loss: 0.6278 acc: 0.5000 chosen_score_mean: 0.0031 rejected_score_mean: 0.0032 num_samples: 4.0000 num_tokens: 2049.0000 +06/06 16:12:40 - mmengine - INFO - Iter(train) [ 60/15230] lr: 2.5936e-06 eta: 2:33:37 time: 0.4627 data_time: 0.0023 memory: 30238 loss: 0.6262 acc: 1.0000 chosen_score_mean: 0.0057 rejected_score_mean: 0.0030 num_samples: 4.0000 num_tokens: 992.0000 +06/06 16:12:46 - mmengine - INFO - Iter(train) [ 70/15230] lr: 3.0331e-06 eta: 2:33:18 time: 0.6018 data_time: 0.0025 memory: 42186 loss: 0.6247 acc: 0.7500 chosen_score_mean: 0.0117 rejected_score_mean: 0.0055 num_samples: 4.0000 num_tokens: 815.0000 +``` + +### Model Conversion + +XTuner provides integrated tools to convert models to HuggingFace format. Simply execute the following commands: + +```bash +# Create a directory to store HF format parameters +mkdir work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy/iter_15230_hf + +# Convert the format +xtuner convert pth_to_hf internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py \ + work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py/iter_15230.pth \ + work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py/iter_15230_hf +``` + +This will convert the XTuner's ckpt to the HuggingFace format. + +Note: Since the Reward Model type is not integrated into the official transformers library, only the Reward Models trained with InternLM2 will be converted to the `InternLM2ForRewardModel` type. Other models will default to the `SequenceClassification` type (for example, LLaMa3 will be converted to the `LlamaForSequenceClassification` type). diff --git a/data/xtuner/docs/en/switch_language.md b/data/xtuner/docs/en/switch_language.md new file mode 100644 index 0000000000000000000000000000000000000000..ff7c4c42502846c4fe3fc52f0bc2c2aec09c4f02 --- /dev/null +++ b/data/xtuner/docs/en/switch_language.md @@ -0,0 +1,3 @@ +## English + +## 简体中文 diff --git a/data/xtuner/docs/en/training/custom_agent_dataset.rst b/data/xtuner/docs/en/training/custom_agent_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..b4ad82f0196b547767922df9e72bbc2224cbac72 --- /dev/null +++ b/data/xtuner/docs/en/training/custom_agent_dataset.rst @@ -0,0 +1,2 @@ +Custom Agent Dataset +==================== diff --git a/data/xtuner/docs/en/training/custom_pretrain_dataset.rst b/data/xtuner/docs/en/training/custom_pretrain_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..00ef0e0cb5c65524ed895691a09e0daa6c03a9e1 --- /dev/null +++ b/data/xtuner/docs/en/training/custom_pretrain_dataset.rst @@ -0,0 +1,2 @@ +Custom Pretrain Dataset +======================= diff --git a/data/xtuner/docs/en/training/custom_sft_dataset.rst b/data/xtuner/docs/en/training/custom_sft_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..39a0f7c33713aafe429a5d069aa4fc6794dc8d36 --- /dev/null +++ b/data/xtuner/docs/en/training/custom_sft_dataset.rst @@ -0,0 +1,2 @@ +Custom SFT Dataset +================== diff --git a/data/xtuner/docs/en/training/modify_settings.rst b/data/xtuner/docs/en/training/modify_settings.rst new file mode 100644 index 0000000000000000000000000000000000000000..382aca87221142ee1aae4a08657b31f419084093 --- /dev/null +++ b/data/xtuner/docs/en/training/modify_settings.rst @@ -0,0 +1,2 @@ +Modify Settings +=============== diff --git a/data/xtuner/docs/en/training/multi_modal_dataset.rst b/data/xtuner/docs/en/training/multi_modal_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..e3d174a1bc5319b6b68aa753c984bd2d6b70a023 --- /dev/null +++ b/data/xtuner/docs/en/training/multi_modal_dataset.rst @@ -0,0 +1,2 @@ +Multi-modal Dataset +=================== diff --git a/data/xtuner/docs/en/training/open_source_dataset.rst b/data/xtuner/docs/en/training/open_source_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..8627b439d5a031c42db99503491547706cbc6b2b --- /dev/null +++ b/data/xtuner/docs/en/training/open_source_dataset.rst @@ -0,0 +1,2 @@ +Open Source Datasets +==================== diff --git a/data/xtuner/docs/en/training/visualization.rst b/data/xtuner/docs/en/training/visualization.rst new file mode 100644 index 0000000000000000000000000000000000000000..255c7e88f1d30566d26434cf144b482a79202184 --- /dev/null +++ b/data/xtuner/docs/en/training/visualization.rst @@ -0,0 +1,2 @@ +Visualization +============= diff --git a/data/xtuner/docs/en/user_guides/chat.md b/data/xtuner/docs/en/user_guides/chat.md new file mode 100644 index 0000000000000000000000000000000000000000..82c8ee7230cd76bf547bfdac084c8af0ff26ed76 --- /dev/null +++ b/data/xtuner/docs/en/user_guides/chat.md @@ -0,0 +1,128 @@ +# Chat with fine-tuned LLMs + +## Chat with [InternLM](https://github.com/InternLM/InternLM) + +### InternLM-7B + +- InternLM-7B, oasst1 + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-7B, Arxiv Gentitle + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-arxiv-gentitle --prompt-template internlm_chat --system-template arxiv_gentile + ``` + +- InternLM-7B, Colorist + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-colorist --prompt-template internlm_chat --system-template colorist + ``` + +- InternLM-7B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +- InternLM-7B, MSAgent **(Lagent ReAct!)** + + ```shell + export SERPER_API_KEY="xxx" # Please get the key from https://serper.dev to support google search! + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-msagent-react --lagent + ``` + +### InternLM-Chat-7B + +- InternLM-Chat-7B, oasst1 + + ```shell + xtuner chat internlm/internlm-chat-7b --adapter xtuner/internlm-chat-7b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-Chat-7B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-chat-7b --adapter xtuner/internlm-chat-7b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +### InternLM-20B + +- InternLM-20B, oasst1 + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-20B, Arxiv Gentitle + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-arxiv-gentitle --prompt-template internlm_chat --system-template arxiv_gentile + ``` + +- InternLM-20B, Colorist + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-colorist --prompt-template internlm_chat --system-template colorist + ``` + +- InternLM-20B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +- InternLM-20B, MSAgent **(Lagent ReAct!)** + + ```shell + export SERPER_API_KEY="xxx" # Please get the key from https://serper.dev to support google search! + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-msagent-react --lagent + ``` + +### InternLM-Chat-20B + +- InternLM-Chat-20B, oasst1 + + ```shell + xtuner chat internlm/internlm-chat-20b --adapter xtuner/internlm-chat-20b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-Chat-20B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-chat-20b --adapter xtuner/internlm-chat-20b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +## Chat with [Llama2](https://github.com/facebookresearch/llama) + +> Don't forget to use `huggingface-cli login` and input your access token first to access Llama2! See [here](https://huggingface.co/docs/hub/security-tokens#user-access-tokens) to learn how to obtain your access token. + +### Llama-2-7B + +- Llama-2-7B, MOSS-003-SFT **(plugins!)** + + ```shell + export SERPER_API_KEY="xxx" # Please get the key from https://serper.dev to support google search! + xtuner chat meta-llama/Llama-2-7b-hf --adapter xtuner/Llama-2-7b-qlora-moss-003-sft --bot-name Llama2 --prompt-template moss_sft --system-template moss_sft --with-plugins calculate solve search --no-streamer + ``` + +- Llama-2-7B, MSAgent **(Lagent ReAct!)** + + ```shell + export SERPER_API_KEY="xxx" # Please get the key from https://serper.dev to support google search! + xtuner chat meta-llama/Llama-2-7b-hf --adapter xtuner/Llama-2-7b-qlora-msagent-react --lagent + ``` + +## Chat with [Qwen](https://github.com/QwenLM) + +### Qwen-7B + +- Qwen-7B, MOSS-003-SFT **(plugins!)** + + ```shell + export SERPER_API_KEY="xxx" # Please get the key from https://serper.dev to support google search! + xtuner chat Qwen/Qwen-7B --adapter xtuner/Qwen-7B-qlora-moss-003-sft --bot-name Qwen --prompt-template moss_sft --system-template moss_sft --with-plugins calculate solve search + ``` diff --git a/data/xtuner/docs/en/user_guides/dataset_format.md b/data/xtuner/docs/en/user_guides/dataset_format.md new file mode 100644 index 0000000000000000000000000000000000000000..46e3d6f80ae58930554f178779f0fc0f1d7b433e --- /dev/null +++ b/data/xtuner/docs/en/user_guides/dataset_format.md @@ -0,0 +1,193 @@ +# Dataset Format + +- [Incremental Pre-training Dataset Format](#incremental-pre-training-dataset-format) +- [Single-turn Dialogue Dataset Format](#single-turn-dialogue-dataset-format) +- [Multi-turn Dialogue Dataset Format](#multi-turn-dialogue-dataset-format) + - [Method 1](#method-1) + - [Method 2](#method-2) + - [Method in XTuner](#method-in-xtuner) + +The Supervised Finetune (SFT) of large language models aims to improve the performance of pre-trained models on specific tasks through supervised fine-tuning. To support as many downstream tasks as possible, XTuner supports three dataset formats: incremental pre-training, single-turn dialogue, and multi-turn dialogue. + +- The incremental pre-training dataset is used to enhance the model's capabilities in a specific domain or task. +- Single-turn and multi-turn dialogue datasets are often used in the instruction tuning stage to enhance the model's ability to respond to specific instructions. + +In the instruction tuning phase, our goal is to train the language model to answer based on human instructions. **Therefore, generally only the loss of the response part (Output) is used for gradient backpropagation, while the loss of the instruction part (System, Input) is not used for weight updates.** Based on this, we introduce "system", "input" and "output" fields when preprocessing the dataset. The "system", "input" fields are used to save fields that do not need to compute loss, such as system and user instructions, whereas the "output" field is used to save fields that do need to compute loss, such as the GroundTruth answers corresponding to input instructions. + +To unify the incremental pre-training, single-turn dialogue, and multi-turn dialogue dataset formats, we set the dataset format to the following form: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +Throughout the training phase, we amalgamate several "system", "input" and "output" pairs from a single data instance, which we then feed into the model. Loss is computed concurrently at each position, yet only the loss associated with the "output" component participates in the gradient backpropagation process. This process is elucidated in the figure below. + +
+Image +
+ +Note that the token and token are used to indicate the start and end of a sentence or text. + +## Incremental Pre-training Dataset Format + +As incremental pre-training is intended to help the model learn language knowledge and expressive abilities tailored for specific downstream tasks, the loss corresponding to the entire content of the dataset should be used for gradient backpropagation. Therefore, the "system" and "input" of the dataset are left empty, while the "output" consists of an entire piece of corpus data. The dataset format corresponding to the incremental pre-training task is shown as follows: + +```json +[{ + "conversation":[ + { + "system": "", + "input": "", + "output": "I am an artificial intelligence (AI) assistant named Puyu. I was created by the Shanghai AI Laboratory and my purpose is to assist users with various tasks through natural language processing technology." + } + ] +}, +{ + "conversation":[ + { + "system": "", + "input": "", + "output": "I am an artificial intelligence programmed to assist with various types of tasks, including answering questions, providing information, and performing automated processes." + } + ] +}] +``` + +
+Image +
+ +## Single-turn Dialogue Dataset Format + +The single-turn dialogue dataset typically consists of a single instruction (or question) and its corresponding GroundTruth answer. Since only the answer part should be used for gradient backpropagation, the "system" and "input" fields of the dataset are the input instruction, and the "output" field is the corresponding answer. The format of the single-turn dialogue dataset is shown as follows: + +```json +[{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Give three tips for staying healthy.", + "output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}, +{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "How to study English?", + "output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}] +``` + +
+Image +
+ +## Multi-turn Dialogue Dataset Format + +The multi-turn dialogue dataset typically consists of multiple rounds of instructions (or questions) and their corresponding GroundTruth answers. Suppose we have a piece of multi-turn dialogue data. For ease of introduction, for the nth round of dialogue, we set the output corresponding to User and Assistant as UserN and AssistantN. + +```text +System: You are an AI asssistant. +User1:Hello? +Assistant1:Hello! How can I help you? +User2:What's the date today? +Assistant2:Today is Monday, August 14, 2023. +User3:Thank you! +Assistant3:You are welcome. +``` + +How can we use the above multi-turn dialogue data to train large models? Currently, there are two mainstream methods. + +### Method 1 + +The text of System, User1, Assistant1, User2, Assistant2, and User3 is all considered as the input part of the model, while the text of Assistant3 is viewed as the prediction part of the model. Only the loss from the Assistant3 part is involved in the weight update. + +
+Image +
+ +The downside of this method is that it does not fully utilize the multi-turn dialogue training data because the content of Assistant1 and Assistant2 does not participate in model training, leading to a low utilization rate of training data. + +### Method 2 + +Split a piece of multi-turn dialogue data into multiple pieces of data. For example, the above instance can be split into the following three pieces of data. + +
+Image +
+ +Compared to Method 1, Method 2 can fully utilize the data from each round of dialogue, but it requires splitting one piece of data containing n rounds of dialogue into n pieces of data, which reduces the training efficiency by 1/n. + +### Method in XTuner + +When XTuner trains multi-turn dialogue models, it adopts a more comprehensive and efficient method, as shown in the figure below. + +
+Image +
+ +We concatenate multi-turn dialogues, then input them into the model. The loss at each position is computed in parallel, but only the loss from the Output part participates in backpropagation. Therefore, the format of the multi-turn dialogue dataset in XTuner is shown as follows: + +```json +[{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Hello?", + "output": "Hello! How can I help you?" + }, + { + "input": "What's the date today?", + "output": "Today is Monday, August 14, 2023." + }, + { + "input": "Thank you!", + "output": "You are welcome." + } + ] +}, +{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Hello?", + "output": "Hello! How can I help you?" + }, + { + "input": "How's the weather today in Rosso?", + "output": "The weather in Rosso on Wednesday, August 16th, is going to be cloudy for most of the day, together with moderate rain around noon." + }, + { + "input": "Thank you!", + "output": "You are welcome." + } + ] +}] +``` + +The value corresponding to the "conversation" key in the dataset is a list used to save the instructions and actual answers (GroundTruth) for each round of dialogue. To maintain uniformity in the format, the value corresponding to the "conversation" key in both incremental pre-training datasets and single-turn dialogue datasets is also a list, albeit with a length of 1. In multi-turn dialogue datasets, the length of the "conversation" list is n to accommodate n rounds of dialogue content. diff --git a/data/xtuner/docs/en/user_guides/dataset_prepare.md b/data/xtuner/docs/en/user_guides/dataset_prepare.md new file mode 100644 index 0000000000000000000000000000000000000000..86a7ae178c4822fd4254d72832a05a8e8334f77d --- /dev/null +++ b/data/xtuner/docs/en/user_guides/dataset_prepare.md @@ -0,0 +1,180 @@ +# Dataset Prepare + +- [Dataset Prepare](#dataset-prepare) + - [HuggingFace datasets](#huggingface-datasets) + - [Others](#others) + - [Arxiv Gentitle](#arxiv-gentitle) + - [MOSS-003-SFT](#moss-003-sft) + - [Chinese Lawyer](#chinese-lawyer) + - [LLaVA dataset](#llava-dataset) + - [File structure](#file-structure) + - [Pretrain](#pretrain) + - [Finetune](#finetune) + - [RefCOCO dataset](#refcoco-dataset) + - [File structure](#file-structure-1) + +## HuggingFace datasets + +For datasets on HuggingFace Hub, such as [alpaca](https://huggingface.co/datasets/tatsu-lab/alpaca), you can quickly utilize them. For more details, please refer to [single_turn_conversation.md](./single_turn_conversation.md) and [multi_turn_conversation.md](./multi_turn_conversation.md). + +## Others + +### Arxiv Gentitle + +Arxiv dataset is not released on HuggingFace Hub, but you can download it from Kaggle. + +**Step 0**, download raw data from https://kaggle.com/datasets/Cornell-University/arxiv. + +**Step 1**, process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ${SAVE_DATA_PATH} [optional arguments]`. + +For example, get all `cs.AI`, `cs.CL`, `cs.CV` papers from `2020-01-01`: + +```shell +xtuner preprocess arxiv ${DOWNLOADED_DATA} ${SAVE_DATA_PATH} --categories cs.AI cs.CL cs.CV --start-date 2020-01-01 +``` + +**Step 2**, all Arixv Gentitle configs assume the dataset path to be `./data/arxiv_data.json`. You can move and rename your data, or make changes to these configs. + +### MOSS-003-SFT + +MOSS-003-SFT dataset can be downloaded from https://huggingface.co/datasets/fnlp/moss-003-sft-data. + +**Step 0**, download data. + +```shell +# Make sure you have git-lfs installed (https://git-lfs.com) +git lfs install +git clone https://huggingface.co/datasets/fnlp/moss-003-sft-data +``` + +**Step 1**, unzip. + +```shell +cd moss-003-sft-data +unzip moss-003-sft-no-tools.jsonl.zip +unzip moss-003-sft-with-tools-no-text2image.zip +``` + +**Step 2**, all moss-003-sft configs assume the dataset path to be `./data/moss-003-sft-no-tools.jsonl` and `./data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl`. You can move and rename your data, or make changes to these configs. + +### Chinese Lawyer + +Chinese Lawyer dataset has two sub-dataset, and can be downloaded form https://github.com/LiuHC0428/LAW-GPT. + +All lawyer configs assume the dataset path to be `./data/CrimeKgAssitant清洗后_52k.json` and `./data/训练数据_带法律依据_92k.json`. You can move and rename your data, or make changes to these configs. + +### LLaVA dataset + +#### File structure + +``` +./data/llava_data +├── LLaVA-Pretrain +│   ├── blip_laion_cc_sbu_558k.json +│   ├── blip_laion_cc_sbu_558k_meta.json +│   └── images +├── LLaVA-Instruct-150K +│   └── llava_v1_5_mix665k.json +└── llava_images +    ├── coco +    │ └── train2017 +    ├── gqa +    │ └── images +    ├── ocr_vqa +    │ └── images +    ├── textvqa +    │ └── train_images +    └── vg +       ├── VG_100K +    └── VG_100K_2 +``` + +#### Pretrain + +LLaVA-Pretrain + +```shell +# Make sure you have git-lfs installed (https://git-lfs.com) +git lfs install +git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Pretrain --depth=1 +``` + +#### Finetune + +1. Text data + + 1. LLaVA-Instruct-150K + + ```shell + # Make sure you have git-lfs installed (https://git-lfs.com) + git lfs install + git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Instruct-150K --depth=1 + ``` + +2. Image data + + 1. COCO (coco): [train2017](http://images.cocodataset.org/zips/train2017.zip) + + 2. GQA (gqa): [images](https://downloads.cs.stanford.edu/nlp/data/gqa/images.zip) + + 3. OCR-VQA (ocr_vqa): [download script](https://drive.google.com/drive/folders/1_GYPY5UkUy7HIcR0zq3ZCFgeZN7BAfm_?usp=sharing) + + 1. ⚠️ Modify the name of OCR-VQA's images to keep the extension as `.jpg`! + + ```shell + #!/bin/bash + ocr_vqa_path="" + + find "$target_dir" -type f | while read file; do + extension="${file##*.}" + if [ "$extension" != "jpg" ] + then + cp -- "$file" "${file%.*}.jpg" + fi + done + ``` + + 4. TextVQA (textvqa): [train_val_images](https://dl.fbaipublicfiles.com/textvqa/images/train_val_images.zip) + + 5. VisualGenome (VG): [part1](https://cs.stanford.edu/people/rak248/VG_100K_2/images.zip), [part2](https://cs.stanford.edu/people/rak248/VG_100K_2/images2.zip) + +### RefCOCO dataset + +#### File structure + +``` + +./data +├── refcoco_annotations +│ ├── refcoco +│ │ ├── instances.json +│ │ ├── refs(google).p +│ │ └── refs(unc).p +│ ├── refcoco+ +│ │ ├── instances.json +│ │ └── refs(unc).p +│ └── refcocog +│ ├── instances.json +│ ├── refs(google).p +│ └─── refs(und).p +├── coco_images +| ├── *.jpg +... +``` + +Download the RefCOCO, RefCOCO+, RefCOCOg annotation files using below links. +Both of coco train 2017 and 2014 are valid for coco_images. + +| Image source | Download path | +| ------------ | :------------------------------------------------------------------------------------------: | +| RefCOCO | annotations | +| RefCOCO+ | annotations | +| RefCOCOg | annotations | + +After downloading the annotations, unzip the files and place them in the `./data/refcoco_annotations` directory. +Then, we convert the annotations to json format using the below command. This command saves the converted json files in the `./data/llava_data/RefCOCOJson/` directory. + +```shell +xtuner preprocess refcoco --ann-path $RefCOCO_ANN_PATH --image-path $COCO_IMAGE_PATH \ +--save-path $SAVE_PATH # ./data/llava_data/RefCOCOJson/ +``` diff --git a/data/xtuner/docs/en/user_guides/finetune.md b/data/xtuner/docs/en/user_guides/finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..e8c04fe453b2e9e2371af697a22d9ff3d22a0134 --- /dev/null +++ b/data/xtuner/docs/en/user_guides/finetune.md @@ -0,0 +1,121 @@ +# Fine-tune the pretrained LLMs + +## QLoRA Fine-tune [InternLM](https://github.com/InternLM/InternLM) + +- InternLM-7B, oasst1 + + ```shell + xtuner train internlm_7b_qlora_oasst1_e3 + ``` + +- InternLM-7B, Arxiv Gentitle + + ```shell + xtuner train internlm_7b_qlora_arxiv_gentitle_e3 + ``` + +- InternLM-7B, Colorist + + ```shell + xtuner train internlm_7b_qlora_colorist_e5 + ``` + +- InternLM-7B, Coder + + ```shell + xtuner train internlm_7b_qlora_code_alpaca_e3 + ``` + +- InternLM-7B, SQL + + ```shell + xtuner train internlm_7b_qlora_sql_e3 + ``` + +- InternLM-7B, Lawyer + + ```shell + xtuner train internlm_7b_qlora_lawyer_e3 + ``` + +- InternLM-7B, Open-Platypus + + ```shell + xtuner train internlm_7b_qlora_open_platypus_e3 + ``` + +- InternLM-7B, Alpaca-enzh + + ```shell + xtuner train internlm_7b_qlora_alpaca_enzh_e3 + ``` + +## QLoRA Fine-tune [Llama2](https://github.com/facebookresearch/llama) + +> Don't forget to use `huggingface-cli login` and input your access token first to access Llama2! See [here](https://huggingface.co/docs/hub/security-tokens#user-access-tokens) to learn how to obtain your access token. + +- Llama2-7B, MOSS-003-SFT **(plugins!)** + + ```shell + NPROC_PER_NODE=8 xtuner train llama2_7b_qlora_moss_sft_all_e2_gpu8 # Recommended! + xtuner train llama2_7b_qlora_moss_sft_all_e1 + ``` + +- Llama2-7B, Arxiv Gentitle + + ```shell + xtuner train llama2_7b_qlora_arxiv_gentitle_e3 + ``` + +- Llama2-7B, Colorist + + ```shell + xtuner train llama2_7b_qlora_colorist_e5 + ``` + +## QLoRA Fine-tune [Qwen](https://github.com/QwenLM) + +- Qwen-7B, MOSS-003-SFT **(plugins!)** + + ```shell + NPROC_PER_NODE=8 xtuner train qwen_7b_qlora_moss_sft_all_e2_gpu8 # Recommended! + xtuner train qwen_7b_qlora_moss_sft_all_e1 + ``` + +- Qwen-7B, oasst1 + + ```shell + xtuner train qwen_7b_qlora_oasst1_e3 + ``` + +- Qwen-7B, Arxiv Gentitle + + ```shell + xtuner train qwen_7b_qlora_arxiv_gentitle_e3 + ``` + +- Qwen-7B, Alpaca-enzh + + ```shell + xtuner train qwen_7b_qlora_alpaca_enzh_e3 + ``` + +## QLoRA Fine-tune [Baichuan](https://github.com/baichuan-inc) + +- Baichuan-7B, oasst1 + + ```shell + xtuner train baichuan_7b_qlora_oasst1_e3 + ``` + +- Baichuan-7B, Arxiv Gentitle + + ```shell + xtuner train baichuan_7b_qlora_arxiv_gentitle_e3 + ``` + +- Baichuan-7B, Alpaca-enzh + + ```shell + xtuner train baichuan_7b_qlora_alpaca_enzh_e3 + ``` diff --git a/data/xtuner/docs/en/user_guides/incremental_pretraining.md b/data/xtuner/docs/en/user_guides/incremental_pretraining.md new file mode 100644 index 0000000000000000000000000000000000000000..cf00137eb4031877e0a2c291a1290e0d99dbaa72 --- /dev/null +++ b/data/xtuner/docs/en/user_guides/incremental_pretraining.md @@ -0,0 +1,261 @@ +# Incremental Pre-training Data Pipeline + +- [Using Dataset in HuggingFace Hub](#using-dataset-in-huggingface-hub) +- [Using Custom Datasets](#using-custom-datasets) + +Incremental pre-training aims to enhance the model's capability in a specific domain or task. + +XTuner supports using HuggingFace Hub datasets or custom datasets for SFT (Supervised FineTune). The main difference between them is that when using HuggingFace Hub datasets, it is necessary to map the original data to the [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format)defined by XTuner. For custom datasets, users are recommended to construct the dataset according to the [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format). + +## Using Dataset in HuggingFace Hub + +### Step 1, Map Original Dataset to Standard Format + +Since different datasets have different formats, it is necessary to map the original data to the [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format) defined by XTuner. XTuner supports the implementation of format mapping through the map function. The following uses the [oasst1 dataset](https://huggingface.co/datasets/OpenAssistant/oasst1) as an example to explain how to implement data mapping. + +The format of the oasst1 dataset is shown below: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='timdettmers/openassistant-guanaco') +>>> ds['train'] +Dataset({ + features: ['text'], + num_rows: 9846 +}) +``` + +As you can see, the oasst1 train dataset has 9846 rows, 1 column, the column name is 'text'. This 'text' column is the text data needed for incremental pre-training. The [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format) describes that during the process of incremental pre-training, the data format should be: + +```json +[{ + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] +}] +``` + +Therefore, you can map the original data to the standard format using the following map function: + +```python +# Suppose the function is stored in ./map_fn.py +def custom_map_fn(example): + """ + >>> train_ds = ds['train'].map(oasst1_map_fn) + >>> train_ds + Dataset({ + features: ['text', 'conversation'], + num_rows: 9846 + }) + >>> train_ds[0]['conversation'] + [{'input': '', 'output': 'xxx'}] + """ + return {'conversation': [{'input': '', 'output': example['text']}]} + +``` + +### Step 2, List Candidate Model Names + +XTuner provides several ready-to-use configuration files. Users can view them with the following command: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is used for fuzzy search. If you want to train other models, you can replace internlm with other model names supported by XTuner. + +### Step 3, Export the Config File + +If the provided configuration file does not meet your needs, please export the provided configuration file and make corresponding changes: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +For example, you can export the config named \`internlm_7b_qlora_oasst1_e3\`\` to the current directory using the following command: + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, Modify the Config File + +The following modifications need to be made to the config file copied in Step 3: + +1. Import the mapping function `oasst1_incremental_map_fn` implemented in Step 1. +2. Replace the `dataset_map_fn` in `train_dataset` with `custom_map_fn`. +3. Set the `template_map_fn` in `train_dataset` to \`None\`\` (because there is no need to add the dialogue template to the incremental pre-training dataset). +4. Adjust the path of the original dataset. For operations related to `load_dataset`, refer to the [user document](https://huggingface.co/docs/datasets/loading). +5. Close the `EvaluateChatHook`, since the model only has a continuation function during incremental pre-training and doesn't have the conversation function. + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' +- prompt_template = PROMPT_TEMPLATE.internlm_chat ++ data_path = 'path/to/your/data' +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=custom_map_fn, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), ++ template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +- dict( +- type=EvaluateChatHook, +- tokenizer=tokenizer, +- every_n_iters=evaluation_freq, +- evaluation_inputs=evaluation_inputs, +- system=SYSTEM, +- instruction=prompt_template.INSTRUCTION) +] +... +``` + +### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. + +## Using Custom Datasets + +When using custom datasets for incremental pre-training, we recommend constructing the dataset according to the [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format) defined by XTuner. If the custom dataset is in other formats such as oasst1, refer to the section on [Using Dataset in HuggingFace Hub](#using-dataset-in-huggingface-hub). + +### Step 1, Data Preparation + +Prepare custom data according to the [incremental pre-training data format](./dataset_format.md#incremental-pre-training-dataset-format) defined by XTuner: + +```json +[ + { + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] + }, + { + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] + } +] +``` + +### Step 2, List Candidate Model Names + +```bash +xtuner list-cfg -p internlm +``` + +The `-p` option is for fuzzy search. If you want to train other models, you can replace internlm with the name of any other model supported by XTuner. + +### Step 3, Export the Config File + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, Modify the config file + +Modifications need to be made to the config file obtained in Step 3 as follows: + +1. Adjust the path of the original dataset +2. Since the dataset format is already standardized, set `dataset_map_fn` in `train_dataset` to `None` +3. Set `template_map_fn` in `train_dataset` to `None`, because there is no need to add conversation templates to the incremental pre-training dataset +4. Close the `EvaluateChatHook`, since the model only has a continuation function during incremental pre-training and doesn't have the conversation function. + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' +- prompt_template = PROMPT_TEMPLATE.internlm_chat ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=None, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), ++ template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +- dict( +- type=EvaluateChatHook, +- tokenizer=tokenizer, +- every_n_iters=evaluation_freq, +- evaluation_inputs=evaluation_inputs, +- system=SYSTEM, +- instruction=prompt_template.INSTRUCTION) +] +... +``` + +### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. diff --git a/data/xtuner/docs/en/user_guides/intern_repo_dataset.md b/data/xtuner/docs/en/user_guides/intern_repo_dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..407810d449ddb9aefd33400bf3b98a9c8abef2b0 --- /dev/null +++ b/data/xtuner/docs/en/user_guides/intern_repo_dataset.md @@ -0,0 +1,92 @@ +**Note: The primary aim of this document is to provide detailed instructions on how to train models based on the data format provided by the InternLM repository, rather than to train the InternLM model itself.** + +## Tutorial + +### Step 1, Export the Template Config File + +you can export the config named \`internlm_7b_full_intern_repo_dataset_template\`\` to the current directory using the following command: + +```bash +xtuner copy-cfg internlm_7b_full_intern_repo_dataset_template . +``` + +### Step 2, Modify the Template Config File + +You only need to modify the corresponding part of the above interface in the Config file. + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/path/to/your/dataset' ++ dataset_folder = '/real/dataset/path' +max_length = 8192 +pack_to_max_length = True +... +``` + +### Step 3, Start training + +Slurm: + +``` +srun ${SRUN_ARGS} xtuner train internlm_7b_full_intern_repo_dataset_template_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +Aliyun DLC: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +python -m torch.distributed.launch \ + --nproc_per_node=${KUBERNETES_CONTAINER_RESOURCE_GPU} \ + --master_addr=${MASTER_ADDR} \ + --master_port=${MASTER_PORT} \ + --nnodes=${WORLD_SIZE} \ + --node_rank=${RANK} \ + xtuner/tools/train.py \ + internlm_7b_full_intern_repo_dataset_template_copy.py \ + --deepspeed deepspeed_zero1 \ + --launcher pytorch \ + --work-dir work_dirs/${EXP_NAME} +``` + +## Dataset Format + +The training dataset of [InternLM](https://github.com/InternLM/InternLM) is pre-tokenized, and is formatted as follows: + +``` +{"tokens": [1, -333, -352, -1621, ..., 103028, 13, 2]} +{"tokens": [1, -333, -352, -1621, ..., 103028, 13, 2]} +``` + +Among them, tokens with negative values are not involved in the calculation of loss during the training process. diff --git a/data/xtuner/docs/en/user_guides/multi_turn_conversation.md b/data/xtuner/docs/en/user_guides/multi_turn_conversation.md new file mode 100644 index 0000000000000000000000000000000000000000..783d2ec687ca21cc95a989be594b182f172619a9 --- /dev/null +++ b/data/xtuner/docs/en/user_guides/multi_turn_conversation.md @@ -0,0 +1,284 @@ +# Multi-turn Dialogue Data Pipeline + +- [Using Dataset in HuggingFace Hub](#using-dataset-in-huggingface-hub) +- [Using Custom Datasets](#using-custom-datasets) + +The purpose of multi-turn dialogue command fine-tuning is to enhance the model's ability for multi-turn dialogues. + +XTuner supports the use of HuggingFace Hub datasets or custom datasets for SFT (Supervised FineTune). The main difference between them is that when using the HuggingFace Hub dataset, the original data needs to be mapped to the [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format) defined by XTuner. For custom datasets, it is recommended that users construct the dataset according to the [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format). + +## Using Dataset in HuggingFace Hub + +### Step 1, Map Original Dataset to Standard Format + +Since the formats of different datasets vary, the original data needs to be transformed into the [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format) defined by XTuner. XTuner supports the use of a map function to achieve format mapping. The following example uses the [oasst1 dataset](https://huggingface.co/datasets/OpenAssistant/oasst1) to illustrate how to implement data mapping. + +The oasst1 dataset format is as follows: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='timdettmers/openassistant-guanaco') +>>> ds['train'] +Dataset({ + features: ['text'], + num_rows: 9846 +}) +>>> ds['train'][0]['text'] +'### Human: xxx ### Assistant: xxx ###Human: xxx ###Assistant: xxx' +``` + +It's clear that the oasst1 dataset can not only be used as an incremental pre-training dataset for the model to learn some basic language knowledge, but also, after some processing, serve as a multi-turn dialogue dataset to cultivate the model's multi-turn conversation capabilities. The [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format) introduces that in the fine-tuning process of multi-turn dialogue instructions, the data format should be: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +Therefore, the original data can be mapped to a standard format using the following map function: + +```python +# Suppose the function is stored in ./map_fn.py +SYSTEM_OASST1 = '' # oasst1 does not set the system text +def custom_map_fn(example): + r""" + Example before preprocessing: + example['text'] = '### Human: Can you explain xxx' + '### Assistant: Sure! xxx' + '### Human: I didn't understand how xxx' + '### Assistant: It has to do with a process xxx.' + + Example after preprocessing: + example['conversation'] = [ + { + 'input': 'Can you explain xxx', + 'output': 'Sure! xxx' + }, + { + 'input': 'I didn't understand how xxx', + 'output': 'It has to do with a process xxx.' + } + ] + """ + data = [] + for sentence in example['text'].strip().split('###'): + sentence = sentence.strip() + if sentence[:6] == 'Human:': + data.append(sentence[6:].strip()) + elif sentence[:10] == 'Assistant:': + data.append(sentence[10:].strip()) + if len(data) % 2: + # The last round of conversation solely consists of input + # without any output. + # Discard the input part of the last round, as this part is ignored in + # the loss calculation. + data.pop() + conversation = [] + for i in range(0, len(data), 2): + system = SYSTEM_OASST1 if i == 0 else '' + single_turn_conversation = { + 'system': system, + 'input': data[i], + 'output': data[i + 1]} + conversation.append(single_turn_conversation) + return {'conversation': conversation} +``` + +### Step 2, List Candidate Model Names + +XTuner provides several ready-to-use configuration files. Users can view them using the following command: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is used for fuzzy search. If you want to train other models, you can replace `internlm` with other model names supported by XTuner. + +### Step 3, Export the Config File + +If the provided configuration file does not meet your needs, please export the offered configuration file and make appropriate changes: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +For example, use the following command to export the config named `internlm_7b_qlora_oasst1_e3` to the current directory: + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, Modify Config Files + +The config file copied in Step 3 needs to be modified as follows: + +1. Import the map function `custom_map_fn` implemented in Step 1. +2. Replace `dataset_map_fn` in `train_dataset` with `custom_map_fn`. +3. Adjust the path of the original dataset. You can refer to the [user documentation](https://huggingface.co/docs/datasets/loading) for operations related to `load_dataset`. + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' ++ data_path = 'path/to/your/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=custom_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. + +## Using Custom Datasets + +When using a custom multi-turn dialogue dataset for command fine-tuning, we recommend constructing the dataset in the [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format) as defined by XTuner. If the custom dataset format is oasst1 or other formats, you can refer to the section on [Using Datasets in HuggingFace Hub](#using-dataset-in-huggingface-hub). + +### Step 1, Dataset Preparation + +Prepare your custom data according to the [multi-turn dialogue data format](./dataset_format.md#multi-turn-dialogue-dataset-format) defined by XTuner: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +### Step 2, List Candidate Model Names + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is for fuzzy search. If you want to train other models, you can replace `internlm` with other model names supported by XTuner. + +### Step 3, Export the Config File + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, Modify Config File + +The config file copied in Step 3 needs to be modified as follows: + +1. Adjust the path of the original dataset +2. Since the dataset format is already in the standard format, set `dataset_map_fn` in `train_dataset` to `None` + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. diff --git a/data/xtuner/docs/en/user_guides/prompt_template.md b/data/xtuner/docs/en/user_guides/prompt_template.md new file mode 100644 index 0000000000000000000000000000000000000000..2d5a37ad02ef786fcd8f379bbc259cbdbca3060e --- /dev/null +++ b/data/xtuner/docs/en/user_guides/prompt_template.md @@ -0,0 +1,115 @@ +# Prompt Template + +The prompt template of XTuner ensures consistency with the LLMs' official templates. Below, we will elaborate on its logic using the example of InternLM-Chat model (`internlm_chat`). + +## Structure + +```python +internlm_chat=dict( + SYSTEM='<|System|>:{system}\n', + INSTRUCTION='<|User|>:{input}\n<|Bot|>:', + SUFFIX='', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['']) +``` + +- `SYSTEM`: The template for the "system" field during Q&A, where `{system}` represents the "system" text. It's worth noting that this field only appears once in multi-turn dialogues, specifically in the first turn. + +- `INSTRUCTION`: The template for the "instruction" field during Q&A, where `{input}` represents the user instruction text. + +- `SUFFIX`: The suffix for the "instruction" field, which will be appended to the "response" of each Q&A turn. Typically, this also serves as a special ending symbol (*i.e.*, `eos`). Defaults to `''`. + +- `SUFFIX_AS_EOS`: Represents whether the aforementioned suffix acts as an ending symbol. If set to `True`, it will replace the `eos_token` of the `tokenizer`. Otherwise, the `eos_token` of the `tokenizer` will still be used to denote the end of sequence. Defaults to `False`. + +- `SEP`: Used to separate multi-turn dialogues, it will be appended after the `INSTRUCTION` and `SUFFIX`. Defaults to `''`. + +- `STOP_WORDS`: Used to specify the stop words, this information will be utilized during the text generation stage. It's worth noting that the `eos_token` of the `tokenizer` is automatically added to `STOP_WORDS`, without the need for manual setting. + +## Results + +**Single-turn** + +``` +<|System|>:{system} +<|User|>:{input} +<|Bot|>:{output} +``` + +**Multi-turn** + +``` +<|System|>:{system} +<|User|>:{input} +<|Bot|>:{output} +<|User|>:{input} +<|Bot|>:{output} +<|User|>:{input} +<|Bot|>:{output} +``` + +## Choosing the prompt template + +| Model | Prompt Template | +| ---------------------------------------- | --------------- | +| baichuan-inc/Baichuan-7B | default\* | +| baichuan-inc/Baichuan-13B-Base | default\* | +| baichuan-inc/Baichuan-13B-Chat | baichuan_chat | +| baichuan-inc/Baichuan2-7B-Base | default\* | +| baichuan-inc/Baichuan2-7B-Chat | baichuan2_chat | +| baichuan-inc/Baichuan2-13B-Base | default\* | +| baichuan-inc/Baichuan2-13B-Chat | baichuan2_chat | +| THUDM/chatglm2-6b | chatglm2 | +| THUDM/chatglm3-6b | chatglm3 | +| THUDM/chatglm3-6b-base | chatglm3 | +| deepseek-ai/deepseek-coder-6.7b-base | deepseek_coder | +| deepseek-ai/deepseek-coder-6.7b-instruct | deepseek_coder | +| internlm/internlm-7b | default\* | +| internlm/internlm-20b | default\* | +| internlm/internlm-chat-7b | internlm_chat | +| internlm/internlm-chat-20b | internlm_chat | +| huggyllama/llama-7b | default | +| meta-llama/Llama-2-7b-hf | llama2_chat | +| meta-llama/Llama-2-7b-chat-hf | llama2_chat | +| meta-llama/Llama-2-70b-hf | llama2_chat | +| lmsys/vicuna-7b-v1.5 | vicuna | +| lmsys/vicuna-13b-v1.5 | vicuna | +| mistralai/Mistral-7B-v0.1 | mistral | +| mistralai/Mixtral-8x7B-v0.1 | mixtral | +| mistralai/Mixtral-8x7B-Instruct-v0.1 | mixtral | +| Qwen/Qwen-1_8B | default\* | +| Qwen/Qwen-1_8B-Chat | qwen_chat | +| Qwen/Qwen-7B | default\* | +| Qwen/Qwen-7B-Chat | qwen_chat | +| Qwen/Qwen-72B | default\* | +| Qwen/Qwen-72B-Chat | qwen_chat | +| bigcode/starcoder | default | +| 01-ai/Yi-6B | default | +| 01-ai/Yi-34B | default | +| HuggingFaceH4/zephyr-7b-beta | zephyr | +| deepseek-ai/deepseek-moe-16b-base | deepseek_moe | +| deepseek-ai/deepseek-moe-16b-chat | deepseek_moe | +| internlm/internlm2-1_8b | default\* | +| internlm/internlm2-7b | default\* | +| internlm/internlm2-20b | default\* | +| internlm/internlm2-chat-1_8b | internlm2_chat | +| internlm/internlm2-chat-7b | internlm2_chat | +| internlm/internlm2-chat-20b | internlm2_chat | +| Qwen/Qwen1.5-0.5B | default\* | +| Qwen/Qwen1.5-0.5B-Chat | qwen_chat | +| Qwen/Qwen1.5-1.8B | default\* | +| Qwen/Qwen1.5-1.8B-Chat | qwen_chat | +| Qwen/Qwen1.5-4B | default\* | +| Qwen/Qwen1.5-4B-Chat | qwen_chat | +| Qwen/Qwen1.5-7B | default\* | +| Qwen/Qwen1.5-7B-Chat | qwen_chat | +| Qwen/Qwen1.5-14B | default\* | +| Qwen/Qwen1.5-14B-Chat | qwen_chat | +| Qwen/Qwen1.5-72B | default\* | +| Qwen/Qwen1.5-72B-Chat | qwen_chat | +| google/gemma-2b | default\* | +| google/gemma-2b-it | gemma\* | +| google/gemma-7b | default\* | +| google/gemma-7b-it | gemma\* | + +\*: The official template has special tokens (like `<|im_start|>`, `<|im_end|>`) that were not trained during the pre-training phase. Therefore, these models utilize the `default` template. diff --git a/data/xtuner/docs/en/user_guides/single_turn_conversation.md b/data/xtuner/docs/en/user_guides/single_turn_conversation.md new file mode 100644 index 0000000000000000000000000000000000000000..8e5f98f7a5e31e020e88166c676c7ac5f2e78bec --- /dev/null +++ b/data/xtuner/docs/en/user_guides/single_turn_conversation.md @@ -0,0 +1,301 @@ +# Single-turn Dialogue Data Pipeline + +- [Using Dataset in HuggingFace Hub](#using-dataset-in-huggingface-hub) +- [Using Custom Datasets](#using-custom-datasets) + - [Using Alpaca Format Custom Datasets](#using-alpaca-format-custom-datasets) + - [Using Other Format Custom Datasets](#using-other-format-custom-datasets) + +Single-turn dialogue instruction fine-tuning aims to enhance the model's ability to respond to specific instructions. + +XTuner offers support for utilizing HuggingFace Hub datasets, Alpaca-Format custom datasets, or other format custom datasets for SFT (Supervised FineTune). The main differences between these options are as follows: + +1. When using the HuggingFace Hub dataset for SFT, it is necessary to map the original data to the XTuner-defined [single-turn dialogue data format](./dataset_format.md#single-turn-dialogue-dataset-format) +2. When utilizing Alpaca-Format custom datasets for SFT, it is crucial to ensure that the custom dataset includes a minimum of three columns: 'instruction', 'input', and 'output'. +3. When working with other custom datasets for SFT, it is recommended that users construct the dataset according to the single-turn dialogue data format. This is highly beneficial as it significantly reduces the time required for data preprocessing. + +## Using Dataset in HuggingFace Hub + +### Step 1, Map the Original Dataset to Standard Format + +Since different datasets have different formats, it is necessary to map the original data to the XTuner-defined [single-turn dialogue data format](./dataset_format.md#single-turn-dialogue-dataset-format). XTuner supports mapping of formats through a map function. Below we will use the [alpaca dataset](https://huggingface.co/datasets/tatsu-lab/alpaca) as an example to show how to implement data mapping. + +The alpaca dataset format is shown below: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='tatsu-lab/alpaca') +>>> ds['train'] +Dataset({ + features: ['instruction', 'input', 'output', 'text'], + num_rows: 52002 +}) +``` + +The "Alpaca Train" dataset comprises 52,002 records, organized into four distinct columns denoted as 'instruction', 'input', 'output', and 'text'. In this dataset, 'instruction' and 'input' columns provide detailed descriptions of the presented problem, while the 'output' column contains the corresponding GroundTruth responses. This dataset adheres to the [single-turn dialogue data format](./dataset_format.md#single-turn-dialogue-dataset-format) that was introduced during the process of fine-tuning using single round session instructions. The prescribed data format for this context is as follows: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +Therefore, the original data can be mapped to a standard format using the following map function: + +```python +# Suppose the function is stored in ./map_fn.py +SYSTEM_ALPACA = ('Below is an instruction that describes a task. ' + 'Write a response that appropriately completes the request.\n') +def custom_map_fn(example): + if example.get('output') == '': + return {'conversation': []} + else: + return { + 'conversation': [{ + 'system': SYSTEM_ALPACA, + 'input': f"{example['instruction']}\n{example['input']}", + 'output': example['output'] + }] + } +``` + +### Step 2, List Candidate Model Names + +XTuner provides several ready-to-use configuration files. Users can view them using the following command: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is used for fuzzy search. If you want to train other models, you can replace `internlm` with other model names supported by XTuner. + +### Step 3, Export the Config File + +If the provided configuration file does not meet your needs, please export the offered configuration file and make appropriate changes: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +For example, use the following command to export the config named `internlm_7b_qlora_alpaca_e3` to the current directory: + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +### Step 4, Modify Config Files + +The config file copied in Step 3 needs to be modified as follows: + +1. Import the map function `custom_map_fn` implemented in Step 1. +2. Replace `dataset_map_fn` in `train_dataset` with `custom_map_fn`. +3. Adjust the path of the original dataset. You can refer to the [user documentation](https://huggingface.co/docs/datasets/loading) for operations related to `load_dataset`. + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=alpaca_map_fn, ++ dataset_map_fn=custom_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. + +## Using Custom Datasets + +### Using Alpaca Format Custom Datasets + +If the data format of the custom dataset meets the 'alpaca' format, you can refer to the following steps for SFT training. + +#### Step 1, List Candidate Model Names + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is for fuzzy search. If you want to train other models, you can replace `internlm` with other model names supported by XTuner. + +#### Step 2, Export the Config File + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +As the custom dataset follows the Alpaca format, 'CONFIG_NAME' should select the ALPACA-related candidate model names listed in Step 1. For example, execute the following command to export the 'internlm_7b_qlora_alpaca_e3' config to the current directory: + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +#### Step 3, Modify Config File + +The config copied in Step 2 needs to be modified as follows: + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Using Other Format Custom Datasets + +#### Step 1, Dataset Preparation + +Prepare your custom data according to the [single-turn dialogue data format](./dataset_format.md#single-turn-dialogue-dataset-format) defined by XTuner: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +#### Step 2, List Candidate Model Names + +```bash +xtuner list-cfg -p internlm +``` + +`-p` is for fuzzy search. If you want to train other models, you can replace `internlm` with other model names supported by XTuner. + +#### Step 3, Export the Config File + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +#### Step 4, Modify Config File + +The config file copied in Step 3 needs to be modified as follows: + +1. Adjust the path of the original dataset +2. Since the dataset format is already in the standard format, set `dataset_map_fn` in `train_dataset` to `None` + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=alpaca_map_fn, ++ dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +#### Step 5, Check custom Dataset (Optional) + +After modifying the config file, you can execute the 'xtuner/tools/check_custom_dataset.py' script to verify the correct construction of the dataset. + +```bash +xtuner check-custom-dataset $CONFIG +``` + +`$CONFIG` represents the file path of the modified configuration file in Step 4. diff --git a/data/xtuner/docs/zh_cn/.readthedocs.yaml b/data/xtuner/docs/zh_cn/.readthedocs.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8d00802c5581d2e60a8060e2042fc59f8c6b81a1 --- /dev/null +++ b/data/xtuner/docs/zh_cn/.readthedocs.yaml @@ -0,0 +1,16 @@ +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.8" + +formats: + - epub + +python: + install: + - requirements: requirements/docs.txt + +sphinx: + configuration: docs/zh_cn/conf.py diff --git a/data/xtuner/docs/zh_cn/Makefile b/data/xtuner/docs/zh_cn/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d4bb2cbb9eddb1bb1b4f366623044af8e4830919 --- /dev/null +++ b/data/xtuner/docs/zh_cn/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/data/xtuner/docs/zh_cn/_static/image/logo.png b/data/xtuner/docs/zh_cn/_static/image/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..0d6b754c98ae1d2c39de384d51b84d4c2f94c373 Binary files /dev/null and b/data/xtuner/docs/zh_cn/_static/image/logo.png differ diff --git a/data/xtuner/docs/zh_cn/acceleration/benchmark.rst b/data/xtuner/docs/zh_cn/acceleration/benchmark.rst new file mode 100644 index 0000000000000000000000000000000000000000..5a1c80804ad207c46e91e6e1dea703397bd5cc54 --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/benchmark.rst @@ -0,0 +1,199 @@ +速度基准 +======== + +我们在训练速度方面与 +`LLaMA-Factory `__ +进行了对比。对比所使用的 LLaMA-Factory commit id 为 +`8e04794 `__\ 。使用 +`Alpaca `__ +作为训练数据集测试速度。 + +硬件 +---- + +- NVIDIA A100-SXM4-80GB GPUs + +- Intel(R) Xeon(R) Gold 6348 CPU @ 2.60GHz + +软件环境 +-------- + +- Python 3.10 + +- PyTorch 1.13 + +- CUDA 11.7 + +- CUDNN 8.5 + +- NCCL 2.14.3 + +速度 +---- + +|image1| + +|image2| + +|image3| + +.. tip:: + TGS 全称是 Tokens per GPU per Second,每张 GPU 每秒训练的 Token 数 + +.. raw:: html + +
+ +.. list-table:: + :widths: 30 15 20 20 20 50 + :header-rows: 1 + + * - 模型 + - GPUs + - 序列长度 + - TGS + - TFLOPs + - Config + * - Llama2-7B + - 8 + - 8k + - 3028.3 + - 185.3 + - `llama2_70b_full_alpaca_enzh_8k_sp1.py `_ + * - Llama2-7B + - 8 + - 32k + - 2234.2 + - 193.0 + - `llama2_7b_full_alpaca_enzh_32k_sp1.py `_ + * - Llama2-7B + - 8 + - 128k + - 948.6 + - 180.3 + - `llama2_7b_full_alpaca_enzh_128k_sp8.py `_ + * - Llama2-7B + - 8 + - 256k + - 540.1 + - 176.9 + - `llama2_7b_full_alpaca_enzh_256k_sp8.py `_ + * - Llama2-7B + - 32 + - 1M + - 133.6 + - 153.9 + - `llama2_7b_full_alpaca_enzh_1M_sp16.py `_ + +.. list-table:: + :widths: 30 15 20 20 20 50 + :header-rows: 1 + + * - 模型 + - GPUs + - 序列长度 + - TGS + - TFLOPs + - Config + * - Yi-34B-200K + - 32 + - 8k + - 485.1 + - 165.6 + - `yi_34b_200k_full_alpaca_enzh_8k_sp1.py `_ + * - Yi-34B-200K + - 32 + - 32k + - 491.5 + - 209.1 + - `yi_34b_200k_full_alpaca_enzh_32k_sp2.py `_ + * - Yi-34B-200K + - 32 + - 128k + - 251.1 + - 191.8 + - `yi_34b_200k_full_alpaca_enzh_128k_sp8.py `_ + * - Yi-34B-200K + - 32 + - 256k + - 119.7 + - 145.3 + - `yi_34b_200k_full_alpaca_enzh_256k_sp8.py `_ + +.. list-table:: + :widths: 30 15 20 20 20 50 + :header-rows: 1 + + * - 模型 + - GPUs + - 序列长度 + - TGS + - TFLOPs + - Config + * - Llama2-70B + - 32 + - 8k + - 216.8 + - 144.7 + - `llama2_70b_full_alpaca_enzh_8k_sp1.py `_ + * - Llama2-70B + - 32 + - 32k + - 300.9 + - 239.6 + - `llama2_70b_full_alpaca_enzh_32k_sp4.py `_ + * - Llama2-70B + - 32 + - 128k + - 144.7 + - 189.7 + - `llama2_70b_full_alpaca_enzh_128k_sp8.py `_ + * - Llama2-70B + - 32 + - 256k + - 63.8 + - 127.6 + - `llama2_70b_full_alpaca_enzh_256k_sp16.py `_ + * - Llama2-70B + - 64 + - 1M + - 21.8 + - 133.5 + - `llama2_70b_full_alpaca_enzh_1M_sp64.py `_ + +.. note:: + 所有实验都会将 Alpaca 数据集拼接为最大长度。由于 Alpaca 数据集所含 + token 数较少,无法拼接成超长序列(如 1M + 长度),因此当序列长度较长时,会对 XTuner 代码进行如下修改: + + .. code:: diff + + # xtuner/dataset/huggingface.py + def build_origin_dataset(dataset, split): + ... + + # 6 times larger dataset (for speed testing purposes only) + + dataset = concatenate_datasets([dataset for _ in range(6)]) + return dataset + + def pack_dataset(dataset, max_length, use_varlen_attn, shuffle_before_pack, + map_num_proc): + dataset = dataset.map( + Packer(max_length, use_varlen_attn=use_varlen_attn), + batched=True, + - num_proc=map_num_proc + + batch_size=25000, + + num_proc=1 + ) + return dataset + + +.. note:: + 由于 Alpaca 数据量较小,因此做了第一处修改将数据集大小扩大了 6 + 倍,以保证拥有足够的训练 iter 数(保证速度测试的稳定性)。另外,由于 + Alpaca + 数据集每条数据的长度较短,因此在数据拼接的时候做了第二处修改以保证拥有足够多的数据,足以拼接为 + ``max_length`` 最大长度。 + +.. |image1| image:: https://github.com/InternLM/xtuner/assets/41630003/c9c05dbd-0806-4fb2-9da9-62f04b150f7c +.. |image2| image:: https://github.com/InternLM/xtuner/assets/41630003/3ef6308c-595b-4624-b56d-a8737a1f2261 +.. |image3| image:: https://github.com/InternLM/xtuner/assets/41630003/ba16368e-e5f7-41eb-89ed-1140a8633134 diff --git a/data/xtuner/docs/zh_cn/acceleration/deepspeed.rst b/data/xtuner/docs/zh_cn/acceleration/deepspeed.rst new file mode 100644 index 0000000000000000000000000000000000000000..2794dc72b051683af781e81c0ecf50873f73509d --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/deepspeed.rst @@ -0,0 +1,103 @@ +============================ +DeepSpeed +============================ + +借助 DeepSpeed 中的 ZeRO 技术(零冗余优化器),可以大幅降低 LLM 训练所消耗的显存 + +如何选择 ZeRO 策略 +==================== + +模型训练阶段,每张卡中显存占用可以分为两类: + +模型状态 + 模型参数(fp16)、模型梯度(fp16)和 Adam 优化器状态(fp32 的模型参数备份,fp32 的 momentum 和 fp32 的 variance )。 + 假设模型参数量 :math:`x` ,则共需要 :math:`2x + 2x + (4x + 4x + 4x) = 16x` 字节存储。 + +.. tip:: + 全量微调时,每增加 **1B** 参数,需要增加 **16GB** 的显存来存储模型状态 + +剩余状态 + 除了模型状态之外的显存占用,包括激活值、各种临时缓冲区以及无法使用的显存碎片。 + +**ZeRO 策略只优化模型状态显存占用,** 从 ZeRO-1 到 ZeRO-3 优化等级越来越高。 + +- ZeRO-1 策略针对优化器状态进行分片,模型参数和梯度仍旧是每张卡保持一份,此时,每张卡的模型状态所需显存是 :math:`4x + \frac{12x}{N}` ( N 为 GPU 数目) +- ZeRO-2 策略针对模型梯度进行分片,模型参数仍旧是每张卡保持一份,此时,每张卡的模型状态所需显存是 :math:`2x + \frac{14x}{N}` ( N 为 GPU 数目) +- ZeRO-3 策略针对模型参数进行分片,此时每张卡的模型状态所需显存是 :math:`\frac{16x}{N}` ( N 为 GPU 数目) + + +.. tip:: + 以 7B 模型 + 8 GPUs 全量微调为例: + + - ZeRO-1 模式下,每张卡上模型状态显存占用约为 :math:`2*7 + 2*7 + \frac{4*7 + 4*7 + 4*7}{8} = 38.5` GB + - ZeRO-2 模式下,每张卡上模型状态显存占用约为 :math:`2*7 + \frac{2*7 + 4*7 + 4*7 + 4*7}{8} = 26.25` GB + - ZeRO-3 模式下,每张卡上模型状态显存占用约为 :math:`\frac{2*7 + 2*7 + 4*7 + 4*7 + 4*7}{8} = 14` GB + +.. tip:: + 由于不同的优化方案不会影响模型训练结果,因此在不会导致 OOM 的前提下,建议使用优化等级较低的 ZeRO 策略。 + + +使用 ZeRO 策略训练 +=================== + +XTuner 内置 ZeRO 配置 +--------------------- + +XTuner 内置了五种 DeepSpeed ZeRO 配置: + +- deepspeed_zero1 +- deepspeed_zero2 +- deepspeed_zero2_offload +- deepspeed_zero3 +- deepspeed_zero3_offload + +可一键启动 DeepSpeed 进行训练,通过 ``--deepspeed`` 来选择不同的 ZeRO 配置: + +.. code-block:: console + + $ # 以下命令根据需要任选其一 + $ xtuner train xxx --deepspeed deepspeed_zero1 + $ xtuner train xxx --deepspeed deepspeed_zero2 + $ xtuner train xxx --deepspeed deepspeed_zero2_offload + $ xtuner train xxx --deepspeed deepspeed_zero3 + $ xtuner train xxx --deepspeed deepspeed_zero3_offload + +例如若想使用 DeepSpeed ZeRO2 显存优化算法运行 QLoRA 算法在 oasst1 数据集上微调 InternLM2-Chat-7B,可使用以下命令: + +.. code-block:: console + + $ # single gpu + $ xtuner train internlm2_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + $ # multi gpus(torchrun) + $ NPROC_PER_NODE=${GPU_NUM} xtuner train internlm2_chat_7b_qlora_oasst1_e3 --deepspeed deepspeed_zero2 + $ # multi gpus(slurm) + $ srun ${SRUN_ARGS} xtuner train internlm2_chat_7b_qlora_oasst1_e3 --launcher slurm --deepspeed deepspeed_zero2 + + +自定义 ZeRO 配置 +------------------------------------ + + +可使用以下命令使用自定义 DeepSpeed 配置文件(需要是一个 json 文件): + +.. code-block:: console + + $ # single gpu + $ xtuner train internlm2_chat_7b_qlora_oasst1_e3 --deepspeed ${PATH_TO_DEEPSPEED_CONFIG} + $ # multi gpus(torchrun) + $ NPROC_PER_NODE=${GPU_NUM} xtuner train internlm2_chat_7b_qlora_oasst1_e3 --deepspeed ${PATH_TO_DEEPSPEED_CONFIG} + $ # multi gpus(slurm) + $ srun ${SRUN_ARGS} xtuner train internlm2_chat_7b_qlora_oasst1_e3 --launcher slurm --deepspeed ${PATH_TO_DEEPSPEED_CONFIG} + + +.. warning:: + DeepSpeed Config 中的 ``gradient_accumulation_steps`` 会被 XTuner config 中的 ``accumulative_counts`` 设置覆盖 + +.. warning:: + DeepSpeed Config 中的 ``train_micro_batch_size_per_gpu`` 会被 XTuner config 中的 ``train_dataloader.batch_size`` 设置覆盖 + +.. warning:: + DeepSpeed Config 中的 ``gradient_clipping`` 会被 XTuner config 中的 ``optim_wrapper.clip_grad.max_norm`` 设置覆盖 + +.. warning:: + XTuner 会根据所使用的 GPU 架构自动选择 ``fp16`` 或 ``bf16`` 训练,不受 diff --git a/data/xtuner/docs/zh_cn/acceleration/flash_attn.rst b/data/xtuner/docs/zh_cn/acceleration/flash_attn.rst new file mode 100644 index 0000000000000000000000000000000000000000..94bdcec62e86dd58c008c173faac3d01e3760c77 --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/flash_attn.rst @@ -0,0 +1,56 @@ +.. _flash_attn: + +Flash Attention +================================================== + +Flash Attention (Flash Attention 2) 是一种用于加速 Transformer 模型中 Attention 计算,并减少其显存消耗的算法。XTuner 中 Flash Attention (Flash Attention 2) 的支持情况如下表所示: + +.. list-table:: + :widths: 25 50 + :header-rows: 1 + + * - 模型 + - Flash Attention 支持情况 + * - baichuan 1/2 + - ❌ + * - chatglm 2/3 + - ❌ + * - deepseek + - ✅ + * - gemma + - ❌ + * - internlm 1/2 + - ✅ + * - llama 2 + - ✅ + * - mistral + - ✅ + * - qwen 1/1.5 + - ✅ + * - starcoder + - ✅ + * - yi + - ✅ + * - zephyr + - ✅ + +.. note:: + XTuner 会根据运行环境自动控制 Flash Attention 的使用情况 (见 `dispatch_modules `_): + + .. list-table:: + :widths: 50 50 + :header-rows: 1 + + * - 环境 + - Flash Attention 使用情况 + * - 安装 `flash attn `_ + - Flash Attention 2 + * - 未安装 `flash attn `_ 且 PyTorch Version <= 1.13 + - No Flash Attention + * - 未安装 `flash attn `_ 且 2.0 <= PyTorch Version <= 2.1 + - Flash Attention 1 + * - 未安装 `flash attn `_ 且 PyTorch Version >= 2.2 + - Flash Attention 2 + +.. note:: + 使用 XTuner 训练 QWen1/1.5 时若想使用 Flash Attention 加速,需要先安装 `flash attn `_ (参考 `flash attn 安装 `_,需要 cuda ) diff --git a/data/xtuner/docs/zh_cn/acceleration/hyper_parameters.rst b/data/xtuner/docs/zh_cn/acceleration/hyper_parameters.rst new file mode 100644 index 0000000000000000000000000000000000000000..39a4377fa14a0e734fe99cbfbc79c38d0e33028c --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/hyper_parameters.rst @@ -0,0 +1,49 @@ +===================== +调整加速策略 +===================== + +本节将会列举 XTuner 中会影响训练速度的配置项。 + + +max_length +------------------- + +``max_length`` 表示在数据预处理过程中,单条数据长度超过 ``max_length`` 的部分会被截断,基本所有实验都会设置该项。 + +pack_to_max_length +--------------------------- + +``pack_to_max_length`` 用于配置是否进行\ :ref:`数据集拼接 ` \ 。 + +``pack_to_max_length = True`` 表示在数据预处理过程中将多条短数据拼接为一条长度为 ``max_length`` 的长数据,该配置可以大幅提升训练速度。 + +若 ``pack_to_max_length = False``,则推荐将 ``batch_size`` 适度调大以保证训练的稳定性。 + +use_varlen_attn +--------------------------- + +``use_varlen_attn`` 用于配置是否在训练过程中使用\ :ref:`Varlen Flash Attention ` \ 。 + +当 ``use_varlen_attn = True`` 时,要求 ``pack_to_max_length`` 也要设置为 True。在此情况下,每个 token 在注意力计算阶段仅会关注其所在短数据中的所有 tokens (而非整个序列)。 + +当 ``use_varlen_attn = False`` 时,每个 token 在注意力计算阶段会关注整个序列。 + +max_position_embeddings +--------------------------------- + +当需要扩展模型上下文窗口的大小时,需要将 ``max_position_embeddings`` 设置为期望的上下文长度。 **需要保证 max_position_embeddings 不大于 max_length。**\ + +假设需要将 Llama2-7B 模型支持的上下文长度自 4k 拓展为 32k: + +1. 若训练数据集中存在较多长度接近 32k 的数据,则推荐 ``max_length = 32k, pack_to_max_length = False, use_varlen_attn = False, max_position_embeddings = 32k`` 这一配置 +2. 若训练数据集中长度接近 32k 的数据量较少甚至没有时,则推荐 ``max_length = 32k, pack_to_max_length = True, use_varlen_attn = False, max_position_embeddings = 32k`` 这一配置 + +sequence_parallel_size +------------------------------------------- + +在使用序列并行策略训练超长序列时, ``sequence_parallel_size`` 个 GPUs 会共同计算一条长序列。而 ``accumulative_counts`` 则用于控制模型参数更新的频率。 + + +accumulative_counts +---------------------------------------------- +用于控制模型参数更新的频率;假设需要在 N 块 GPUs 上执行 ``batch_size_per_device = 1, max_length = 128k`` 的训练策略。当设置序列并行维度为 ``sequence_parallel_size`` 后,为了保证训练的等价性, ``accumulative_counts`` 需要设置为原来的 ``sequence_parallel_size`` 倍,因为 128k 长度的序列会被切分为 ``sequence_parallel_size`` 份后分发给 ``sequence_parallel_size`` 个 GPUs 进行训练, ``data_parallel_world_size`` 会变为原来的 :math:`\frac{1}{sequence\_parallel\_size}`。 diff --git a/data/xtuner/docs/zh_cn/acceleration/length_grouped_sampler.rst b/data/xtuner/docs/zh_cn/acceleration/length_grouped_sampler.rst new file mode 100644 index 0000000000000000000000000000000000000000..72c5bc7e3a41f7ed8595fa8af5cc0fe0c71d34dc --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/length_grouped_sampler.rst @@ -0,0 +1,67 @@ +.. _length_grouped_sampler: + +数据分组 +======================== + +.. raw:: html + +
+ +
+ +生成式大模型(例如LLM)的训练数据往往是不定长的,这就导致同一批次(batch)内的数据长短不一。为实现并行化训练,一种常见的做法是将同一批次的数据填充到最长长度。然而,这一填充(Pad)操作会导致训练的低效。如上图,假设数据内各样本的长度分别为 +2、3、7、9,期望分为2个批次进行训练,那么如果使用默认的随机采样器(左侧),数据处理阶段会引入过多的填充数据,实际效率只有65.6%。 + +现阶段有两种技术方案可以解决 / 缓解这一问题(两者选其一即可,优先考虑 +**数据拼接技术**\ ): + +1. 利用 + **数据拼接技术**\ ,将多条数据拼接至训练支持的最大长度。这一做法可以确保同一批次内的数据长度完全一致,进而避免了填充数据所导致的训练效率降低。具体可参考 + \ :ref:`数据拼接文档 ` \ 。 + + :优点: 可以合并多个数据样本,显著降低训练 iter 数,加速效果好。 + + :缺点: 随机合并的多个数据样本间会互相影响,进而影响训练效果(实际影响程度未知);数据进行了合并,丢失了一定数据随机性。 + +2. (本文)利用 + **基于数据长度分组的采样器**\ ,在构建批次数据时,基于实际长度进行排序,确保同一批次内的数据长度尽可能相近,进而尽可能减少填充的长度。如上图右侧,利用该采样器后,同样的数据效率将提升至87.5%。 + + :优点: 每条数据依然独立存在(独立计算 + attention),避免数据拼接技术导致的数据样本间的互相影响;数据进行了分组,丢失了一定数据随机性。 + + :缺点: 在数据样本长度比较一致的情况下,加速效果一般。 + +使用 ``LengthGroupedSampler`` +----------------------------------------- + +XTuner 中基于数据长度分组的采样器 的实现在 +`这里 `__\ 。用户可以通过在配置文件中修改 +``train_dataloader`` 的 ``sampler`` 参数进行配置。以 +`internlm2_chat_7b_qlora_oasst1_512_e3 `__ +配置文件为例,其默认是使用随机的采样器,我们可以通过下列修改使其使用 +基于数据长度分组的采样器: + +.. code:: diff + + - from mmengine.dataset import DefaultSampler + + from xtuner.dataset.samplers import LengthGroupedSampler + + batch_size = 16 # per_device + accumulative_counts = 1 + + train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + - sampler=dict(type=DefaultSampler, shuffle=True), + + sampler=dict( + + type=LengthGroupedSampler, + + length_property='length', + + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +.. note:: + 其中,\ ``length_property`` + 需要传入获取数据集长度的“属性”,这一数值在通过 ``process_hf_dataset`` + 构建数据集时会自动设置为 + ``'length'``\ (因此,如果使用自定义的数据类,请确保这一属性的正确设置)。 diff --git a/data/xtuner/docs/zh_cn/acceleration/pack_to_max_length.rst b/data/xtuner/docs/zh_cn/acceleration/pack_to_max_length.rst new file mode 100644 index 0000000000000000000000000000000000000000..e08c109c3cf90be2aef4b87193ebea097bd041cb --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/pack_to_max_length.rst @@ -0,0 +1,70 @@ +.. _pack_to_max_length: + +数据拼接 +========================= + +简介 +--------- + +对于大型语言模型(LLM)的输入而言,“数据集拼接” 这一概念指的是将多个 token 序列拼接成一个单独的输入。大量的数据集都存在一个特点,即其长度分布严重偏向较短的序列,而 Transformers 模型接收固定长度的输入。因此,在模型训练过程中,通常需要将每条数据 "Pad" 至当前 batch 最长序列的长度,而 "Pad Token" 往往是某个特定的无意义的 token。 + +将多条数据打包在一起可以不再需要使用 "Pad Token" 进行无意义的填充,减少计算资源的浪费,同时还可以保持模型作为具有固定大小输入的静态图表示的优点。 + +下表展示了 InternLM2 7B 模型在 Alpaca 数据集上使用不同数据集拼接策略进行训练的速度对比,如表所示,“数据集拼接”会大幅度提升训练效率: + +.. list-table:: + :widths: 25 25 15 + :header-rows: 1 + + * - 拼接策略 + - 每秒处理 token 数 + - 加速比 + * - 不使用 + - 362.9 + - + * - 拼接至 2k + - 2677.1 + - 7.38x + * - 拼接至 4k + - 3124.3 + - 8.61x + * - 拼接至 8k + - 3173.9 + - 8.76x + * - 拼接至 16k + - 2864.4 + - 7.89x + * - 拼接至 32k + - 2965.4 + - 8.17x + +使用数据拼接 +--------------------------- + +XTuner 中提供的 config 文件中默认使用了“数据集拼接”这一功能,可以通过设置 ``max_length`` 字段来调整数据拼接长度。例如可通过以下方式将拼接长度调整为 32k : + +.. code-block:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + - max_length = 2048 + + max_length = 32768 + pack_to_max_length = True + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + max_length=max_length, + pack_to_max_length=pack_to_max_length, + ...) + +.. tip:: + 若不想使用数据拼接,在 config 中将 ``pack_to_max_length`` 设为 False 即可, + 此时 config 中的 ``max_length`` 字段表示单条数据最长的 token 数,整个 batch 会被 pad 成当前 batch 内最长的一条数据的长度。 + +.. tip:: + 在不使用数据拼接策略时,XTuner 还提供了一种数据集采样策略 (``LengthGroupedSampler``),可以保证在一个 batch 中的数据长度尽可能接近, + 以减少 Pad 对计算资源的浪费。详细用法请参考 + \ :ref:`LengthGroupedSampler 文档 ` \ 。 diff --git a/data/xtuner/docs/zh_cn/acceleration/train_extreme_long_sequence.rst b/data/xtuner/docs/zh_cn/acceleration/train_extreme_long_sequence.rst new file mode 100644 index 0000000000000000000000000000000000000000..65b364ad89494f617ced5f3f7aa480b74d881bf7 --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/train_extreme_long_sequence.rst @@ -0,0 +1,322 @@ +======== +序列并行 +======== + +在生成式 AI 领域,长文档摘要和视频生成等任务都需要模型具有超长上下文的能力。 +如何训练超长上下文的模型,既是生成式 AI 算法领域的研究热点,也是 AI Infra 领域的难点 +随着 AI 模型参数量的不断增大,为了能够训练超长上下文,通常需要使用一些复杂的并行策略,如 Nvidia Megatron, DeepSpeed Ulysses 等工作。这些工作虽然解决了超长上下文的训练问题,但需要开发者具有一定的 AI Infra 的知识,对生成式 AI 的研究人员很不友好。 +为了让研究人员能够更加便捷地训练超长上下文模型,促进生成式 AI 领域的发展,XTuner 开发了一套超长上下文训练解决方案: + + +- 支持全量训练 **超过百万个 tokens** 的超长序列 +- 支持 **百 B 级** 模型训练:XTuner 的序列并行不仅支持长序列训练,还可结合 ZeRO3 显存优化策略训练大尺寸模型 +- 开箱即用:可直接训练 Transformers 算法库内和 HF Hub 上的模型 +- 完全通用的序列并行 API 抽象 + +.. raw:: html + +

+ XTuner +

+ + +优化目标 +======== + +尽管开源模型支持的序列长度不断被刷新,但主流的显存优化策略(如 ZeRO 系列)却不足以解决大模型、长序列训练问题。 +如表 1 所示,使用 ZeRO-3 显存优化策略训练超长序列时,单纯增加 GPU 数量无法解决超长序列带来的 OOM 问题; +这是因为,ZeRO-3 只能优化模型参数和优化器状态占用的显存, **超长训列训练过程中的显存开销主要来自激活值,而非模型参数和优化器状态**。 + + +.. list-table:: **表 1 不同序列长度时,使用 ZeRO-3 训练 128k 上下文 yi-34B 模型的训练情况** + :widths: 25 15 10 15 25 + :header-rows: 1 + + * - SP + - Model + - ZeRO + - GPUs + - TGS + * - 1 + - yi-34B + - ZeRO-3 + - 16 + - OOM + * - 1 + - yi-34B + - ZeRO-3 + - 32 + - OOM + * - 1 + - yi-34B + - ZeRO-3 + - 64 + - OOM + * - 8 + - yi-34B + - ZeRO-3 + - 16 + - 227 + + +为解决长序列训练过程中的显存问题,Megatron-LM 团队和 DeepSpeed 团队分别提出了两种序列并行算法,通过对长序列进行切分的方法来降低单 GPU 上计算的序列长度。XTuner 中的序列并行设计思路参考了 DeepSpeed 的工作 `DeepSpeed Ulysses `_,并加以优化, **以实现一键开启序列并行策略** 。三者的对比如下: + +.. list-table:: **表 2 Megatron-LM、DeepSpeed Ulysses 与 XTuner 的序列并行实现对比** + :widths: 50 50 50 + :header-rows: 1 + + * - + - Attention 通信量 + - 代码侵入 + * - Megatron-LM + - O(N) + - 较高 + * - DeepSpeed Ulysses + - O(N / P) + - 较高 + * - XTuner + - O(N / P) + - 无 + + + +支持情况 +======== + +.. list-table:: + :widths: 25 25 + :header-rows: 1 + + * - 模型 + - 序列并行支持情况 + * - baichuan 1/2 + - ❌ + * - chatglm 2/3 + - ❌ + * - deepseek + - ✅ + * - gemma + - ❌ + * - internlm 2 + - ✅ + * - llama 2 + - ✅ + * - mistral + - ✅ + * - qwen 1/1.5 + - ✅ + * - starcoder + - ❌ + * - yi + - ✅ + * - zephyr + - ✅ + +其他模型的序列并行功能尚在开发中。 + +训练 +==== + +.. note:: + 使用序列并行策略需要首先安装 `flash attn `_ (参考 `flash attn 安装 `_ ,安装过程需要 cuda) + +步骤1:修改 config +------------------ + +可以通过运行以下命令查看 XTuner 提供的训练不同模型的配置文件: + +.. code-block:: console + + $ xtuner list-cfg + +针对任一 config 修改 `sequence_parallel_size` 即可使用序列并行策略: + +.. code-block:: diff + + # parallel + - sequence_parallel_size = 1 + + sequence_parallel_size = 4 # take `sequence_parallel_size = 4`` as an example + +另外,若需要进一步拓展模型的长文本处理能力,需要进一步修改 config 中的 `max_position_embeddings` 字段。例如需要将模型的上下文长度拓展为 64K 时,可进行如下修改: + +.. code-block:: diff + + + max_position_embeddings = 65536 + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + type=SupervisedFinetune, + + max_position_embeddings = max_position_embeddings, + ...) + +步骤2:开始训练 +---------------- + +需要使用 DeepSpeed 进行训练: + +.. code-block:: console + + $ # torchrun + $ NPROC_PER_NODE=${GPU_NUM} xtuner train ${CONFIG_PATH} --deepspeed deepspeed_zero2 + $ # slurm + $ srun ${SRUN_ARGS} xtuner train ${CONFIG_PATH} --launcher slurm --deepspeed deepspeed_zero2 + + +.. tip:: + ``${CONFIG_PATH}`` 为步骤 1 中修改得到的 config 文件路径 + +.. tip:: + 可根据实际情况选择使用不同的 zero 策略 + + +实现方案 +========= + +XTuner 中的序列并行设计思路参考了 DeepSpeed 的工作 `DeepSpeed Ulysses `_,并加以优化,以达到直接基于 transformers 算法库或 Huggingface Hub 上的开源模型训练 1M 以上超长序列的目标。 + +.. raw:: html + +

+ XTuner +

+ +.. raw:: html + +

+ 图 1 序列并行实现方案 +

+ +图 1 展示了序列并行策略的实现方案。由于 Transformer 结构较为规整,除 attention 计算外,其他计算过程中 token 之间不会互相影响(即每个 token 的计算是独立的),这一条件为序列并行提供了有利条件。上图展示了序列并行的核心设计。设由 P 个 GPUs 共同计算一个长度为 N 的长序列,在 Attention 计算的第一阶段,长度为 N / P 的子序列会通过线性层投影为 Query、Key、Value。接下来, QKV Tensor 会在参与序列并行计算的多个 GPUs 之间通过高度优化的 all-to-all 通信算子汇聚,得到序列长度为 N ,但更少注意力头的子序列。注意力计算后,通过另一个 all-to-all 通信算子将其转换为长度为 N / P 的子序列,进行后续计算。伪代码如下所示。 + +.. code-block:: python + + # Pseudo code for an Attention Layer + # Input: hidden_states with shape (bs, seq_len, dim) + # Output: attn_out with shape (bs, seq_len, dim) + def attn_forward(hidden_states): + q, k, v = qkv_proj(hidden_states) + q, k, v = reshape(q, k, v) # (bs, q_len, dim) -> (bs, q_len, nhead, hdim) + q, k = apply_rotary_pos_emb(q, k, cos, sin) + sp_size = get_sequence_parallel_world_size() + # (bs, q_len, nhead, hdim) -> (bs, q_len * sp_size, nhead / sp_size, hdim) + q, k, v = all_to_all(q, k, v, sp_size) + attn_out = local_attn(q, k, v) + # (bs, q_len * sp_size, nhead / sp_size, hdim) -> (bs, q_len, nhead, hdim) + attn_out = all_to_all(attn_out) + attn_out = reshape(attn_out) # (bs, q_len, nhead, hdim) -> (bs, q_len, dim) + attn_out = o_proj(attn_out) + return attn_out + + +序列并行 API +============= + +为了方便在其他 repo 中使用序列并行策略,XTuner 中抽象出了序列并行所必须的五个 API 接口: + +- 序列并行分布式环境初始化 (init_sequence_parallel) +- 适配序列并行的 Data Sampler (SequenceParallelSampler) +- 数据 Pad 与切分 (pad_for_sequence_parallel, split_for_sequence_parallel) +- 适配序列并行的 Attention (dispatch_modules) +- reduce loss 以正确打印训练损失 (reduce_sequence_parallel_loss) + +分布式环境初始化 +------------------- + +由于序列并行算法会将长序列切分为 `sequence_parallel_world_size` 块,并将每个子序列分发给对应的 GPU 独立进行计算。因此需要在训练开始前初始化序列并行分布式环境,以指定哪几块 GPU 共同负责一个长序列输入的计算。 + +一个 `sequence_parallel_world_size = 4` 的示例如下: + +.. code-block:: python + + # We have to initialize the distributed training environment first. + # Here is an example when training on slurm scheduler + # from xtuner.parallel.sequence import init_dist + # init_dist('slurm', 'nccl', init_backend='deepspeed') + from xtuner.parallel.sequence import init_sequence_parallel + sequence_parallel_world_size = 4 + init_sequence_parallel(sequence_parallel_world_size) + +.. tip:: + 上述过程在 ``xtuner/engine/_strategy/deepspeed.py`` 中实现。 + +Data Sampler +-------------- + +在使用序列并行后,Dataloader 的采样策略需要进一步调整。例如当 `sequence_parallel_world_size = 4` 时,4 块 GPU 从 Dataloader 拿到的数据需要是完全一样的。 + +在构建 Dataloader 时搭配 XTuner 中提供的 `SequenceParallelSampler` 使用即可: + +.. code-block:: python + + from xtuner.parallel.sequence import SequenceParallelSampler + dataloader = DataLoader( + train_dataset, sampler=SequenceParallelSampler(train_dataset), + **other_dataloader_params) + +数据 Pad 与切分 +--------------- + +由于每条训练数据的长度可能不尽相同,我们需要将数据进行 Pad 以使得序列长度可以被 `sequence_parallel_world_size` 整除,这样一条长数据才能被均等地分发给不同的 GPU 上。 + +训练过程中需要被 Pad 的 Tensor 往往有 input_ids, labels, position_ids, attention_mask 四个,pad 的过程可以通过以下方式实现: + +.. code-block:: python + + from xtuner.parallel.sequence import pad_for_sequence_parallel + input_ids, labels, position_ids, attention_mask = pad_for_sequence_parallel( + input_ids, labels, position_ids, attention_mask) + +如果训练过程用不到 attention_mask,那么可以: + +.. code-block:: python + + input_ids, labels, position_ids, _ = pad_for_sequence_parallel( + input_ids, labels, position_ids) + +Pad 后,我们需要对长序列均等切分: + +.. code-block:: python + + from xtuner.parallel.sequence import split_for_sequence_parallel + # attention mask should not be split + input_ids, labels, position_ids = split_for_sequence_parallel( + input_ids, labels, position_ids) + +.. tip:: + 以上两步在 ``xtuner/dataset/collate_fns/default_collate_fn.py`` 中实现。 + +Attention +----------- + +在 Attention 的计算过程中,序列中的不同 token 是不能独立运算的,但不同的 attention head 之间的计算却是独立的。因此,如第一节所述,需要在计算 Attention 前后(即 qkv_proj 后和 o_proj 前)分别插入一个 all-to-all 操作。 + +XTuner 提供了 dispatch_modules 接口以支持修改模型 Attention 的计算方式: + +.. code-block:: python + + from xtuner.model.modules import dispatch_modules + model: AutoModelForCausalLM + dispatch_modules(model) + +.. tip:: + 上述过程在 ``xtuner/model/sft.py`` 中实现。 + +Reduce Loss +------------- + +这个 API 对于保证训练的正确性不是必须的,但对于观测模型训练状态,打印训练 loss 是非常有用的。 + +.. code-block:: python + + from xtuner.parallel.sequence import reduce_sequence_parallel_loss + outputs = llm(input_ids=input_ids, labels=labels, **kwargs) + num_tokens_per_rank = (labels != -100).sum() + # Suppose sequence parallel world size equals to 4, + # losses on rank0, rank1, rank2, rank3 are different. + loss = reduce_sequence_parallel_loss(outputs.loss, num_tokens_per_rank) + # After loss reduction, losses on rank0, rank1, rank2, rank3 are the same. + +.. tip:: + 上述过程在 ``xtuner/model/sft.py`` 中实现。 diff --git a/data/xtuner/docs/zh_cn/acceleration/train_large_scale_dataset.rst b/data/xtuner/docs/zh_cn/acceleration/train_large_scale_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..f0925f050833f65442262ac7933fecbcd2775436 --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/train_large_scale_dataset.rst @@ -0,0 +1,205 @@ +================ +超大规模数据集 +================ + +在线数据处理 +=============== + +XTuner +默认采用在线数据预处理的策略,这样可以降低用户使用门槛,以达到“开箱即用”的要求。然而,在线数据处理的弊端在于,当数据集过大时,数据处理过程耗时相对较多,可能会触发 +``nccl timeout`` 报错。 + +为什么会出现 ``nccl timeout``? +------------------------------------ + +使用 XTuner 训练模型时,在训练开始前会首先通过 +`process_hf_dataset `__ +函数对整个训练集进行数据预处理,得到模型训练所需要的 ``input_ids``, +``labels`` 等数据。 + +由于数据预处理操作是一个 CPU 任务,因此在分布式训练过程中,如果多个 rank +各自执行预处理任务,会造成 CPU 资源抢占,拖慢数据处理速度。因此 XTuner +中采用的策略是统一由 rank0 处理,完成后通过 +``torch.distributed.broadcast_object_list`` 接口广播至其他 +rank。这样,不同 rank 就会得到一份完全一样的数据集。 + +然而,当使用 ``nccl`` +通信策略时,\ ``torch.distributed.broadcast_object_list`` +广播操作的超时时间与 ``nccl`` 通信超时时间相同(默认为 30 +分钟)。当训练数据集较大时,rank0 可能无法在 30 +分钟内处理完全部数据,这样就会导致 ``nccl timeout`` 报错。若修改 +``nccl`` 通信超时时间,则除数据预处理外的其他涉及 ``nccl`` +通信的超时时间设置都会被修改。 + +解决方案 +----------- + +为解决上述问题,可以在训练开始前设置环境变量 ``XTUNER_DATASET_TIMEOUT`` +为一个更大的数(默认为 30 分钟超时,可以酌情将其调大,如:120): + +.. code:: console + + $ # On multiple GPUs(torchrun) + $ XTUNER_DATASET_TIMEOUT=120 NPROC_PER_NODE=${GPU_NUM} xtuner train ${CONFIG_NAME_OR_PATH} --deepspeed deepspeed_zero1 + $ # On multiple GPUs(slurm) + $ XTUNER_DATASET_TIMEOUT=120 srun ${SRUN_ARGS} xtuner train ${CONFIG_NAME_OR_PATH} --launcher slurm --deepspeed deepspeed_zero1 + +.. note:: + 该超时设置只针对数据预处理阶段的广播操作生效。 + +离线数据处理 +=============== + +当训练数据量非常大时,每次训练的时候都先在线处理数据可能会极为耗时。我们可以先对原始数据进行离线处理并保存至本地,随后的多次训练可以读入本地离线处理好的数据后直接开始训练。 + +第一小节介绍如何针对纯语言模型训练所使用的文本数据进行离线处理,第二小节将会介绍如何离线处理 +Llava 训练数据。 + +.. warning:: + + 当切换了 tokenizer 或修改了数据处理中的超参数(如:单条数据的最大长度 ``max_length`` 等)时,需要重新离线处理数据,否则会导致训练报错。 + +语言模型训练数据离线处理 +------------------------- + +为便于介绍,本节以 +`internlm2_7b_qlora_alpaca_e3.py `__ +配置文件为基础,介绍如何离线处理数据集,并使用离线处理的数据集进行训练。 + +步骤 1:导出目标 config 文件 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``internlm2_7b_qlora_alpaca_e3.py`` 是 XTuner 提供的使用 QLora 算法在 +Alpaca 数据集上微调 Internlm2-7B 模型的配置文件。通过以下命令可以将该 +config 拷贝至当前目录下: + +.. code:: + + xtuner copy-cfg internlm2_7b_qlora_alpaca_e3 . + +.. tip:: + 执行以上命令后,当前目录下会新增一个名为 + ``internlm2_7b_qlora_alpaca_e3_copy.py`` 的配置文件(与 + `internlm2_7b_qlora_alpaca_e3.py `__ + 完全一样)。 + +步骤 2:离线处理数据集 +^^^^^^^^^^^^^^^^^^^^^^ + +使用以下命令可离线预处理原始数据: + +.. code:: + + python xtuner/tools/process_untokenized_datasets.py \ + internlm2_7b_qlora_alpaca_e3_copy.py \ + --save-folder /folder/to/save/processed/dataset + +.. note:: + 这里的第一个参数为 Step 1 中修改过的 config + 文件,第二个参数为预处理过的数据集的保存路径。 + +.. note:: + + 上述命令会在 internlm2_7b_qlora_alpaca_e3_copy.py + 同级目录下新建一个 internlm2_7b_qlora_alpaca_e3_copy_modified.py + 文件,后续训练中需要使用该配置文件,而非 + ``internlm2_7b_qlora_alpaca_e3_copy.py`` 。 + +步骤 3:启动训练 +^^^^^^^^^^^^^^^^ + +可以通过以下命令启动训练: + +.. code:: console + + $ # On multiple GPUs(torchrun) + $ NPROC_PER_NODE=${GPU_NUM} xtuner train internlm2_7b_qlora_alpaca_e3_copy_modified.py --deepspeed deepspeed_zero1 + $ # On multiple GPUs(slurm) + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_qlora_alpaca_e3_copy_modified.py --launcher slurm --deepspeed deepspeed_zero1 + + +.. note:: + 训练中需要使用步骤 2 新生成的 + internlm2_7b_qlora_alpaca_e3_copy_modified.py 文件,而非 + internlm2_7b_qlora_alpaca_e3_copy.py 文件。 + +Llava 训练数据离线处理 +--------------------------- + +为便于介绍,本节以 +`llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py `__ +配置文件为基础,介绍如何离线处理数据集,并使用离线处理的数据集进行训练。 + + +步骤 1:导出目标 config 文件 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py`` +是 XTuner 提供的基于 internlm2-chat-7b 训练 Llava +模型配置文件。可以通过以下命令将该 config 拷贝至当前目录下: + +.. code:: console + + $ xtuner copy-cfg llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain . + +.. note:: + 执行以上命令后,当前目录下会新增一个名为 + ``llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain_copy.py`` + 的配置文件(与 + `llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py `__ + 完全一样)。 + + + +步骤 2:离线处理数据集 +^^^^^^^^^^^^^^^^^^^^^^ + +使用以下命令可离线预处理原始数据: + +.. code:: console + + $ python xtuner/tools/process_untokenized_llava_data.py llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain_copy.py \ + $ --save-folder /folder/to/save/processed/llava/data + +处理后可以读取离线处理后的数据集查看是否符合预期: + +.. code:: python + + from datasets import load_from_disk + ds = load_from_disk('/folder/to/save/processed/llava/data') + print(ds) + +步骤 3:修改 config 文件 +^^^^^^^^^^^^^^^^^^^^^^^^ + +修改 config 文件以便程序运行时直接读取预处理的 Llava 数据: + +.. code:: diff + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + llava_dataset = dict( + - data_path=data_path, + - tokenizer=tokenizer, + + offline_processed_text_folder=/folder/to/save/processed/llava/data + ...) + +.. note:: + 其中,\ ``/folder/to/save/processed/llava/data`` 为步骤 2 + 保存的离线处理数据路径。 + +步骤 4:开始训练 +^^^^^^^^^^^^^^^^ + +使用步骤 3 修改得到的 config 训练即可: + +.. code:: console + + $ # On a single GPU + $ xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain_copy.py --deepspeed deepspeed_zero2 + + $ # On multiple GPUs (torchrun) + $ NPROC_PER_NODE=${GPU_NUM} xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain_copy.py --deepspeed deepspeed_zero2 + $ # On multiple GPUs (slurm) + $ srun ${SRUN_ARGS} xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain_copy.py --launcher slurm --deepspeed deepspeed_zero2 diff --git a/data/xtuner/docs/zh_cn/acceleration/varlen_flash_attn.rst b/data/xtuner/docs/zh_cn/acceleration/varlen_flash_attn.rst new file mode 100644 index 0000000000000000000000000000000000000000..2667394234cf539d5faa497edaa3620473ecb69b --- /dev/null +++ b/data/xtuner/docs/zh_cn/acceleration/varlen_flash_attn.rst @@ -0,0 +1,162 @@ +=============================================== +Varlen Attention +=============================================== + +\ :ref:`数据集拼接 ` \ 一节中,我们讨论了“数据集拼接”策略对模型训练效率的显著提升。 +理论上,数据集拼接可能会对注意力(Attention)机制的计算过程产生影响。这是因为,在未采用数据拼接策略的情况下, +每条数据在计算注意力时仅与自身相关联。然而,当采用数据拼接策略后,由多条短数据拼接成的长数据在计算注意力时会相互关联。 +以一个由若干短数据拼接成长度为 4096 的数据为例,如果不采用变长注意力机制,在注意力计算阶段,每个 token 将会关注全部 4096 个 tokens ,如图左侧所示。 + +相反,在使用变长注意力机制的情况下,每个 token 在注意力计算阶段仅会关注其所在短数据中的所有 tokens ,如图右侧所示。因此, **变长注意力机制确保了无论是否采用“数据集拼接”策略,模型训练的行为保持一致性。** + +.. raw:: html + +

+ XTuner +
变长注意力计算原理(拷贝自 https://github.com/InternLM/InternEvo/blob/develop/doc/usage.md)
+

+ +支持列表 +===================== + +.. note:: + + 使用变长注意力需要首先安装 `flash attn `_ ( + 参考 `flash attn 安装 `_ ) + +.. list-table:: + :widths: 25 50 + :header-rows: 1 + + * - 模型 + - Flash Attention 支持情况 + * - baichuan 1/2 + - ❌ + * - chatglm 2/3 + - ❌ + * - deepseek + - ✅ + * - gemma + - ❌ + * - internlm 1/2 + - ✅ + * - llama 2 + - ✅ + * - mistral + - ✅ + * - qwen 1/1.5 + - ❌ + * - starcoder + - ❌ + * - yi + - ✅ + * - zephyr + - ✅ + +使用变长注意力机制训练 +========================= + +步骤 1:安装 flash_attn +-------------------------- + +XTuner 中实现的变长注意力需要依赖 Flash Attention 2,可通过以下命令安装(需要 cuda): + +.. code:: console + + $ MAX_JOBS=4 pip install flash-attn --no-build-isolation + +.. tip:: + 更多安装方式请参考 `flash attn 安装 `_ + +步骤 2:查找模板 config +--------------------------- + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +.. code-block:: console + + $ xtuner list-cfg -p internlm + +.. tip:: + ``-p`` 为模糊查找,若想训练其他模型,可以修改 ``internlm`` 为 XTuner 支持的其他模型名称。 + +步骤 3:复制 config 文件 +----------------------------- + +导出需要使用的 config : + +.. code-block:: bash + + xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} + +例如通过下列命令将名为 ``internlm_7b_full_oasst1_e3`` 的 config 导出至当前目录下: + +.. code-block:: console + + $ xtuner copy-cfg internlm_7b_full_oasst1_e3 . + +.. note:: + + 当前目录下会存在一个新 config + ``internlm_7b_full_oasst1_e3_copy.py`` 。 + +步骤 4:修改 config 文件 +------------------------------- + +将步骤 3 复制得到的 config 文件中的 ``use_varlen_attn`` 属性由 False 改为 True 即可激活变长注意力训练机制: + +.. code-block:: diff + + ... + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm-7b' + - use_varlen_attn = False + + use_varlen_attn = True + ... + +.. warning:: + + 当设置 ``use_varlen_attn = True`` 后, ``batch_size = 2, max_length = 2k`` 的配置与 ``batch_size = 1, max_length = 4k`` 的配置训练行为是近似的, + 因此 XTuner 目前只支持了 ``batch_size = 1`` 的情况。另外, ``use_varlen_attn = True`` 时 ``pack_to_max_length`` 也需设置为 True。 + +步骤 5:开始训练 +----------------------- + +.. code-block:: bash + + xtuner train ${CONFIG_NAME_OR_PATH} + +例如,我们可以基于步骤 4 中修改得到的 `internlm_7b_full_oasst1_e3_copy.py` 进行训练: + +.. code-block:: console + + $ # On a single GPU + $ xtuner train internlm_7b_full_oasst1_e3_copy.py --deepspeed deepspeed_zero1 + $ # On multiple GPUs(torchrun) + $ NPROC_PER_NODE=${GPU_NUM} xtuner train internlm_7b_full_oasst1_e3_copy.py --deepspeed deepspeed_zero1 + $ # On multiple GPUs(slurm) + $ srun ${SRUN_ARGS} xtuner train internlm_7b_full_oasst1_e3_copy.py --launcher slurm --deepspeed deepspeed_zero1 + +.. tip:: + ``--deepspeed`` 表示使用 `DeepSpeed `_ 🚀 来优化训练过程。若未安装 DeepSpeed ,可通过 ``pip install deepspeed>=0.12.3`` 进行安装。XTuner 内置了多种策略,包括 ZeRO-1、ZeRO-2、ZeRO-3 等。如果用户期望关闭此功能,请直接移除此参数。 + +步骤 6:模型转换 +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +将保存的 PTH 模型(如果使用的DeepSpeed,则将会是一个文件夹)转换为 HuggingFace 模型: + +.. code-block:: bash + + xtuner convert pth_to_hf ${CONFIG_NAME_OR_PATH} ${PTH} ${SAVE_PATH} + +对应上面的例子,模型转换脚本为: + +.. code-block:: bash + + xtuner convert pth_to_hf internlm_7b_full_oasst1_e3_copy.py ${PTH} ${SAVE_PATH} + +.. note:: + 其中 ``${PTH}`` 为训练权重保存的路径,若训练时未指定,默认保存在 ``./work_dirs/internlm_7b_full_oasst1_e3_copy`` 路径下。 diff --git a/data/xtuner/docs/zh_cn/chat/agent.md b/data/xtuner/docs/zh_cn/chat/agent.md new file mode 100644 index 0000000000000000000000000000000000000000..c3b0d7a6fad8dbd585fd56bf3062d751d1e46866 --- /dev/null +++ b/data/xtuner/docs/zh_cn/chat/agent.md @@ -0,0 +1 @@ +# 智能体模型对话 diff --git a/data/xtuner/docs/zh_cn/chat/llm.md b/data/xtuner/docs/zh_cn/chat/llm.md new file mode 100644 index 0000000000000000000000000000000000000000..336e1b014eadd438e48b0c7cfa8ed06213c55896 --- /dev/null +++ b/data/xtuner/docs/zh_cn/chat/llm.md @@ -0,0 +1 @@ +# 语言模型对话 diff --git a/data/xtuner/docs/zh_cn/chat/lmdeploy.md b/data/xtuner/docs/zh_cn/chat/lmdeploy.md new file mode 100644 index 0000000000000000000000000000000000000000..36d9bf3f9a08dbbeb757e8c5769abe941d9345ec --- /dev/null +++ b/data/xtuner/docs/zh_cn/chat/lmdeploy.md @@ -0,0 +1 @@ +# 使用 LMDeploy 优化推理速度 diff --git a/data/xtuner/docs/zh_cn/chat/vlm.md b/data/xtuner/docs/zh_cn/chat/vlm.md new file mode 100644 index 0000000000000000000000000000000000000000..3a84a3c7eef684d0d53a425cf5dbe48e5c57b2cc --- /dev/null +++ b/data/xtuner/docs/zh_cn/chat/vlm.md @@ -0,0 +1 @@ +# 视觉-语言模型对话 diff --git a/data/xtuner/docs/zh_cn/conf.py b/data/xtuner/docs/zh_cn/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..f64d7ea52eb78d8a933a2cf08dd9534f5726725b --- /dev/null +++ b/data/xtuner/docs/zh_cn/conf.py @@ -0,0 +1,109 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import os +import sys + +from sphinx.ext import autodoc + +sys.path.insert(0, os.path.abspath('../..')) + +# -- Project information ----------------------------------------------------- + +project = 'XTuner' +copyright = '2024, XTuner Contributors' +author = 'XTuner Contributors' + +# The full version, including alpha/beta/rc tags +version_file = '../../xtuner/version.py' +with open(version_file) as f: + exec(compile(f.read(), version_file, 'exec')) +__version__ = locals()['__version__'] +# The short X.Y version +version = __version__ +# The full version, including alpha/beta/rc tags +release = __version__ + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', + 'sphinx.ext.intersphinx', + 'sphinx_copybutton', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'myst_parser', + 'sphinxarg.ext', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# Exclude the prompt "$" when copying code +copybutton_prompt_text = r'\$ ' +copybutton_prompt_is_regexp = True + +language = 'zh_CN' + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_book_theme' +html_logo = '_static/image/logo.png' +html_theme_options = { + 'path_to_docs': 'docs/zh_cn', + 'repository_url': 'https://github.com/InternLM/xtuner', + 'use_repository_button': True, +} +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] + +# Mock out external dependencies here. +autodoc_mock_imports = [ + 'cpuinfo', + 'torch', + 'transformers', + 'psutil', + 'prometheus_client', + 'sentencepiece', + 'vllm.cuda_utils', + 'vllm._C', + 'numpy', + 'tqdm', +] + + +class MockedClassDocumenter(autodoc.ClassDocumenter): + """Remove note about base class when a class is derived from object.""" + + def add_line(self, line: str, source: str, *lineno: int) -> None: + if line == ' Bases: :py:class:`object`': + return + super().add_line(line, source, *lineno) + + +autodoc.ClassDocumenter = MockedClassDocumenter + +navigation_with_keys = False diff --git a/data/xtuner/docs/zh_cn/dpo/modify_settings.md b/data/xtuner/docs/zh_cn/dpo/modify_settings.md new file mode 100644 index 0000000000000000000000000000000000000000..2365be25cb78e47376bbb5298be1834c25cfbd94 --- /dev/null +++ b/data/xtuner/docs/zh_cn/dpo/modify_settings.md @@ -0,0 +1,83 @@ +## 修改 DPO 训练配置 + +本章节仅介绍与 DPO(Direct Preference Optimization)训练相关的配置参数,更多 XTuner 配置文件的细节,请参考[修改训练配置](https://xtuner.readthedocs.io/zh-cn/latest/training/modify_settings.html) + +### 损失函数 + +在 DPO 训练中,你可以根据需求选择不同的损失函数类型。XTuner 提供了多种损失函数选项,如 `sigmoid`、`hinge`、`ipo` 等。可以通过设置 `dpo_loss_type` 参数来选择使用的损失函数类型。 + +此外,你还可以通过调整 `loss_beta` 参数来控制损失函数中的温度系数。同时,`label_smoothing` 参数可以用于平滑标签。 + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] +loss_beta = 0.1 +label_smoothing = 0.0 +``` + +### 修改模型 + +用户可以修改 `pretrained_model_name_or_path` 对预训练模型进行修改。 + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +``` + +### 训练数据 + +在 DPO 训练中,你可以通过 `max_length` 来指定单个样本序列的最大 token 数,XTuner 会自动对数据进行截断或是填充。 + +```python +# Data +max_length = 2048 +``` + +在配置文件中,我们通过 `train_dataset` 字段来指定训练数据集,你可以通过 `dataset` 字段指定数据集的加载方式,通过 `dataset_map_fn` 字段指定数据集的映射函数。 + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) +``` + +上述配置中,我们使用了 `load_dataset` 来加载 huggingface 上的 `mlabonne/orpo-dpo-mix-40k` 数据集,使用 `orpo_dpo_mix_40k_map_fn` 作为数据集映射函数。 + +关于如何处理数据集以及如何编写数据集映射函数,请参考[偏好数据集章节](../reward_model/preference_data.md)。 + +### 加速训练 + +在使用偏好数据训练时,我们推荐您开启[变长注意力机制](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/varlen_flash_attn.html), 以避免单个偏好内的 chosen 和 rejected 的样本长度差异造成的显存浪费。你可以通过 `use_varlen_attn=True` 来开启变长注意力机制。 + +XTuner 中还支持了大量的训练加速方法,关于它们的使用方法,请参考[加速策略章节](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/hyper_parameters.html)。 diff --git a/data/xtuner/docs/zh_cn/dpo/overview.md b/data/xtuner/docs/zh_cn/dpo/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..d3c3a7aadbe91cdcac94e339cee2ffa27544bcf4 --- /dev/null +++ b/data/xtuner/docs/zh_cn/dpo/overview.md @@ -0,0 +1,27 @@ +## DPO 介绍 + +### 简介 + +DPO(Direct Preference Optimization,直接偏好优化)是一种在大语言模型训练中用于直接优化模型偏好的方法。与传统的强化学习方法不同,DPO 直接使用人类偏好数据进行模型优化,从而提高生成内容的质量,使其更符合人类偏好。DPO 利用人类偏好数据,直接对模型进行优化,省略了训练 Reward Model 的训练过程,与 PPO 相比进一步省去了 Critic Model,不但避免了复杂的强化学习算法,减少了训练开销,同时还提高了训练效率。 + +DPO 拥有大量的衍生算法,它们对 DPO 的损失函数进行了一定程度上的改进,我们在 XTuner 中除了 DPO 还实现了[Identity Preference Optimisation (IPO)](https://huggingface.co/papers/2310.12036),[Kahneman-Tversky Optimisation (KTO)](https://github.com/ContextualAI/HALOs)等论文中的损失函数,如需使用这些算法,请参考[修改 DPO 配置](./modify_settings.md)章节。我们也提供了一些[示例配置](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/dpo)用于参考。 + +除了 DPO 之外,还出现了如 [ORPO](https://arxiv.org/abs/2403.07691) 等无需参考模型的对齐算法。ORPO 采用了对数比值(odds ratio)的概念来优化模型,通过在模型训练过程中惩罚那些被拒绝的样本,从而更有效地适应被选择的样本。ORPO 消除了对参考模型的依赖,使得训练过程更加简化且高效。XTuner 中 ORPO 的训练方式与 DPO 非常类似,我们提供了一些 ORPO 的[示例配置](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/orpo),用户可以参考 DPO 的教程对配置进行修改。 + +### XTuner 中 DPO 训练的优势 + +XTuner 中的 DPO 训练具备以下显著优势: + +1. **支持最新的算法**:XTuner除了支持标准的 DPO 之外,还支持了大量的衍生算法,同时也支持ORPO等不依赖参考模型的高效算法。 + +2. **减少显存浪费**:由于偏好数据中的 chosen 和 rejected 数据通常存在长度上的差异,因此在训练数据的拼接时会存在填充(padding token),造成显存浪费。在 XTuner 中,基于 Flash Attention2 中的[变长注意力](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/varlen_flash_attn.html)功能,我们在训练过程中通过将偏好数据打包到同一个序列中,显著减少了由于 padding token 带来的显存浪费。这不仅提高了显存的利用效率,还使得在相同硬件条件下可以训练更大的模型或处理更多的数据。 + +![img](../reward_model/images/var_len_atten.png) + +3. **高效训练**:借助 XTuner 的 QLoRA 训练功能,参考模型能够被转化为移除LoRA适配器的语言模型,从而省去了参考模型权重的显存占用,大幅降低了 DPO 的训练开销。 + +4. **长文本训练**: 借助 XTuner 的序列并行功能,能够对长文本数据进行训练。 + +### 开始训练 + +请参阅[快速上手](./quick_start.md)来了解最基本的概念,若希望了解更多训练参数配置相关的内容,请参考[修改DPO配置](./modify_settings.md)章节。 diff --git a/data/xtuner/docs/zh_cn/dpo/quick_start.md b/data/xtuner/docs/zh_cn/dpo/quick_start.md new file mode 100644 index 0000000000000000000000000000000000000000..a92152b0f7e5f764631a06f03056eedaab4daa00 --- /dev/null +++ b/data/xtuner/docs/zh_cn/dpo/quick_start.md @@ -0,0 +1,71 @@ +## DPO 快速上手 + +在本章节中,我们将介绍如何使用 XTuner 训练 1.8B 的 DPO(Direct Preference Optimization)模型,以帮助您快速上手。 + +### 准备预训练模型权重 + +我们使用经过 SFT 的语言模型[InternLM2-chat-1.8b-sft](https://huggingface.co/internlm/internlm2-chat-1_8b-sft)作为 DPO 模型的初始化模型来进行偏好对齐。 + +在训练配置文件中设置`pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft'`,则会在启动训练时自动下载模型文件。若您需要手动下载模型权重,那么请参考[准备预训练模型权重](https://xtuner.readthedocs.io/zh-cn/latest/preparation/pretrained_model.html)章节,其中详细说明了如何从 Huggingface 或者是 Modelscope 下载模型权重的方法。这里我们附上模型的 HuggingFace 链接与 ModelScope 链接: + +- HuggingFace 链接位于:https://huggingface.co/internlm/internlm2-chat-1_8b-sft +- ModelScope 链接位于:https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-1_8b-sft/summary + +### 准备训练数据 + +在本教程中使用 Huggingface 上的[mlabonne/orpo-dpo-mix-40k](https://huggingface.co/datasets/mlabonne/orpo-dpo-mix-40k)数据集作为演示, + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='mlabonne/orpo-dpo-mix-40k'), + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, +) +``` + +在配置文件中使用以上配置,即可自动下载并处理该数据集。如果您希望使用其他 Huggingface 上的开源数据集或是使用自定义的数据集,请参阅[偏好数据集](../reward_model/preference_data.md)章节。 + +### 准备配置文件 + +XTuner 提供了多个开箱即用的配置文件,可以通过 `xtuner list-cfg` 查看。我们执行如下指令,以复制一个配置文件到当前目录。 + +```bash +xtuner copy-cfg internlm2_chat_1_8b_dpo_full . +``` + +打开复制后的配置文件,如果您选择自动下载模型和数据集,则无需修改配置。若您希望填入您预先下载的模型路径和数据集路径,请修改配置中的`pretrained_model_name_or_path`以及`train_dataset`中`dataset`的`path`参数。 + +更多的训练参数配置,请参阅[修改DPO训练配置](./modify_settings.md)章节。 + +### 启动训练 + +在完成上述操作后,便可以使用下面的指令启动训练任务了。 + +```bash +# 单机单卡 +xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py +# 单机多卡 +NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py +# slurm 集群 +srun ${SRUN_ARGS} xtuner train ./internlm2_chat_1_8b_dpo_full_copy.py --launcher slurm +``` + +### 模型转换 + +XTuner 已经集成好了将模型转换为 HuggingFace 格式的工具,我们只需要执行 + +```bash +# 创建存放 hf 格式参数的目录 +mkdir work_dirs/internlm2_chat_1_8b_dpo_full_copy/iter_15230_hf + +# 转换格式 +xtuner convert pth_to_hf internlm2_chat_1_8b_dpo_full_copy.py \ + work_dirs/internlm2_chat_1_8b_dpo_full_copy.py/iter_15230.pth \ + work_dirs/internlm2_chat_1_8b_dpo_full_copy.py/iter_15230_hf +``` + +便能够将 XTuner 的 ckpt 转换为 Huggingface 格式的模型。 diff --git a/data/xtuner/docs/zh_cn/evaluation/hook.md b/data/xtuner/docs/zh_cn/evaluation/hook.md new file mode 100644 index 0000000000000000000000000000000000000000..80d36f10ad10e9b4d14a6ce48ecc3979150e2b4f --- /dev/null +++ b/data/xtuner/docs/zh_cn/evaluation/hook.md @@ -0,0 +1 @@ +# 训练过程中评测 diff --git a/data/xtuner/docs/zh_cn/evaluation/mmbench.md b/data/xtuner/docs/zh_cn/evaluation/mmbench.md new file mode 100644 index 0000000000000000000000000000000000000000..5421b1c96ac973f7a47839cb2478d63997473d94 --- /dev/null +++ b/data/xtuner/docs/zh_cn/evaluation/mmbench.md @@ -0,0 +1 @@ +# MMBench (VLM) diff --git a/data/xtuner/docs/zh_cn/evaluation/mmlu.md b/data/xtuner/docs/zh_cn/evaluation/mmlu.md new file mode 100644 index 0000000000000000000000000000000000000000..4bfabff8fa0c0492fe376413ab68dd4382f14cd4 --- /dev/null +++ b/data/xtuner/docs/zh_cn/evaluation/mmlu.md @@ -0,0 +1 @@ +# MMLU (LLM) diff --git a/data/xtuner/docs/zh_cn/evaluation/opencompass.md b/data/xtuner/docs/zh_cn/evaluation/opencompass.md new file mode 100644 index 0000000000000000000000000000000000000000..dbd7a49502c1ebc7d341c550f40563904b9522c2 --- /dev/null +++ b/data/xtuner/docs/zh_cn/evaluation/opencompass.md @@ -0,0 +1 @@ +# 使用 OpenCompass 评测 diff --git a/data/xtuner/docs/zh_cn/get_started/installation.rst b/data/xtuner/docs/zh_cn/get_started/installation.rst new file mode 100644 index 0000000000000000000000000000000000000000..b5eedbf1018d93c35fa35f2850142fc92017fe71 --- /dev/null +++ b/data/xtuner/docs/zh_cn/get_started/installation.rst @@ -0,0 +1,49 @@ +================================== +安装 +================================== + +本节中,我们将演示如何安装 XTuner。 + +最佳实践 +======== + +我们推荐用户参照我们的最佳实践安装 XTuner。 +推荐使用 Python-3.10 的 conda 虚拟环境安装 XTuner。 + +**步骤 0.** 使用 conda 先构建一个 Python-3.10 的虚拟环境 + +.. code-block:: console + + $ conda create --name xtuner-env python=3.10 -y + $ conda activate xtuner-env + +**步骤 1.** 安装 XTuner + +方案a: 通过 pip 直接安装 + +.. code-block:: console + + $ pip install -U 'xtuner[deepspeed]' + +方案b: 从源码安装 + +.. code-block:: console + + $ git clone https://github.com/InternLM/xtuner.git + $ cd xtuner + $ pip install -e '.[deepspeed]' + +.. note:: + + "-e" 表示在可编辑模式下安装项目,因此对代码所做的任何本地修改都会生效 + +验证 +======== + +为了验证 XTuner 是否安装正确,我们将使用命令打印配置文件。 + +**打印配置文件:** 在命令行中使用 ``xtuner list-cfg`` 验证是否能打印配置文件列表。 + +.. code-block:: console + + $ xtuner list-cfg diff --git a/data/xtuner/docs/zh_cn/get_started/quickstart.rst b/data/xtuner/docs/zh_cn/get_started/quickstart.rst new file mode 100644 index 0000000000000000000000000000000000000000..4bec2a5ace2796662303cd5a09001492366c95be --- /dev/null +++ b/data/xtuner/docs/zh_cn/get_started/quickstart.rst @@ -0,0 +1,415 @@ +快速上手 +======== + +本节中,我们将演示如何使用 XTuner 微调模型,帮助您快速上手 XTuner。 + +在成功安装 XTuner +后,便可以开始进行模型的微调。在本节中,我们将演示如何使用 XTuner,应用 +QLoRA 算法在 Colorist 数据集上微调 InternLM2-Chat-7B。 + +Colorist 数据集(\ `HuggingFace +链接 `__\ ;\ `ModelScope +链接 `__\ )是一个根据颜色描述提供颜色选择与建议的数据集,经过该数据集微调的模型可以做到根据用户对于颜色的描述,从而给出16进制下的颜色编码,如用户输入“宁静而又相当明亮的浅天蓝色,介于天蓝色和婴儿蓝之间,因其亮度而带有一丝轻微的荧光感。”,模型输出 +|image1|\ ,该颜色很符合用户的描述。以下是该数据集的几条样例数据: + ++-----------------------+-----------------------+-------------------+ +| 英文描述 | 中文描述 | 颜色 | ++=======================+=======================+===================+ +| Light Sky Blue: A | 浅天蓝色 | #66ccff: |image8| | +| calming, fairly | :一种介于天蓝和婴儿 | | +| bright color that | 蓝之间的平和、相当明 | | +| falls between sky | 亮的颜色,由于明亮而 | | +| blue and baby blue, | 带有一丝轻微的荧光。 | | +| with a hint of slight | | | +| fluorescence due to | | | +| its brightness. | | | ++-----------------------+-----------------------+-------------------+ +| Bright red: This is a | 鲜红色: | #ee0000: |image9| | +| very vibrant, | 这是一种非常鲜 | | +| saturated and vivid | 艳、饱和、生动的红色 | | +| shade of red, | ,类似成熟苹果或新鲜 | | +| resembling the color | 血液的颜色。它是标准 | | +| of ripe apples or | RGB | | +| fresh blood. It is as | 调色板上的红色,不含 | | +| red as you can get on | 任何蓝色或绿色元素。 | | +| a standard RGB color | | | +| palette, with no | | | +| elements of either | | | +| blue or green. | | | ++-----------------------+-----------------------+-------------------+ +| Bright Turquoise: | 明亮的绿松石 | #00ffcc: | +| This color mixes the | 色:这种颜色融合了鲜 | |image10| | +| freshness of bright | 绿色的清新和淡蓝色的 | | +| green with the | 宁静,呈现出一种充满 | | +| tranquility of light | 活力的绿松石色调。它 | | +| blue, leading to a | 让人联想到热带水域。 | | +| vibrant shade of | | | +| turquoise. It is | | | +| reminiscent of | | | +| tropical waters. | | | ++-----------------------+-----------------------+-------------------+ + +准备模型权重 +------------ + +在微调模型前,首先要准备待微调模型的权重。 + +.. _从-huggingface-下载-1: + +从 HuggingFace 下载 +~~~~~~~~~~~~~~~~~~~ + +.. code:: bash + + pip install -U huggingface_hub + + # 拉取模型至 Shanghai_AI_Laboratory/internlm2-chat-7b + huggingface-cli download internlm/internlm2-chat-7b \ + --local-dir Shanghai_AI_Laboratory/internlm2-chat-7b \ + --local-dir-use-symlinks False \ + --resume-download + +.. _从-modelscope-下载-1: + +从 ModelScope 下载 +~~~~~~~~~~~~~~~~~~ + +由于从 HuggingFace +拉取模型权重,可能存在下载过程不稳定、下载速度过慢等问题。因此在下载过程遇到网络问题时,我们则可以选择从 +ModelScope 下载 InternLM2-Chat-7B 的权重。 + +.. code:: bash + + pip install -U modelscope + + # 拉取模型至当前目录 + python -c "from modelscope import snapshot_download; snapshot_download('Shanghai_AI_Laboratory/internlm2-chat-7b', cache_dir='.')" + +在完成下载后,便可以开始准备微调数据集了。 + +此处附上 HuggingFace 链接与 ModelScope 链接: + +- HuggingFace + 链接位于:\ https://huggingface.co/internlm/internlm2-chat-7b + +- ModelScope + 链接位于:\ https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-7b/summary + +准备微调数据集 +-------------- + +接下来,我们需要准备微调数据集。 + +.. _从-huggingface-下载-2: + +从 HuggingFace 下载 +~~~~~~~~~~~~~~~~~~~ + +.. code:: bash + + git clone https://huggingface.co/datasets/burkelibbey/colors + +.. _从-modelscope-下载-2: + +从 ModelScope 下载 +~~~~~~~~~~~~~~~~~~ + +由于相同的问题,因此我们可以选择从 ModelScope 下载所需要的微调数据集。 + +.. code:: bash + + git clone https://www.modelscope.cn/datasets/fanqiNO1/colors.git + +此处附上 HuggingFace 链接与 ModelScope 链接: + +- HuggingFace + 链接位于:\ https://huggingface.co/datasets/burkelibbey/colors + +- ModelScope 链接位于:\ https://modelscope.cn/datasets/fanqiNO1/colors + +准备配置文件 +------------ + +XTuner 提供了多个开箱即用的配置文件,可以通过 ``xtuner list-cfg`` +查看。我们执行如下指令,以复制一个配置文件到当前目录。 + +.. code:: bash + + xtuner copy-cfg internlm2_7b_qlora_colorist_e5 . + +配置文件名的解释: + +======== ============================== +配置文件 internlm2_7b_qlora_colorist_e5 +======== ============================== +模型名 internlm2_7b +使用算法 qlora +数据集 colorist +训练时长 5 epochs +======== ============================== + +此时该目录文件结构应如下所示: + +.. code:: bash + + . + ├── colors + │ ├── colors.json + │ ├── dataset_infos.json + │ ├── README.md + │ └── train.jsonl + ├── internlm2_7b_qlora_colorist_e5_copy.py + └── Shanghai_AI_Laboratory + └── internlm2-chat-7b + ├── config.json + ├── configuration_internlm2.py + ├── configuration.json + ├── generation_config.json + ├── modeling_internlm2.py + ├── pytorch_model-00001-of-00008.bin + ├── pytorch_model-00002-of-00008.bin + ├── pytorch_model-00003-of-00008.bin + ├── pytorch_model-00004-of-00008.bin + ├── pytorch_model-00005-of-00008.bin + ├── pytorch_model-00006-of-00008.bin + ├── pytorch_model-00007-of-00008.bin + ├── pytorch_model-00008-of-00008.bin + ├── pytorch_model.bin.index.json + ├── README.md + ├── special_tokens_map.json + ├── tokenization_internlm2_fast.py + ├── tokenization_internlm2.py + ├── tokenizer_config.json + └── tokenizer.model + +修改配置文件 +------------ + +| 在这一步中,我们需要修改待微调模型路径和数据路径为本地路径,并且修改数据集加载方式。 +| 此外,由于复制得到的配置文件是基于基座(Base)模型的,所以还需要修改 + ``prompt_template`` 以适配对话(Chat)模型。 + +.. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + - pretrained_model_name_or_path = 'internlm/internlm2-7b' + + pretrained_model_name_or_path = './Shanghai_AI_Laboratory/internlm2-chat-7b' + + # Data + - data_path = 'burkelibbey/colors' + + data_path = './colors/train.jsonl' + - prompt_template = PROMPT_TEMPLATE.default + + prompt_template = PROMPT_TEMPLATE.internlm2_chat + + ... + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=process_hf_dataset, + - dataset=dict(type=load_dataset, path=data_path), + + dataset=dict(type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +因此在这一步中,修改了 +``pretrained_model_name_or_path``\ 、\ ``data_path``\ 、\ ``prompt_template`` +以及 ``train_dataset`` 中的 ``dataset`` 字段。 + +启动微调 +-------- + +在完成上述操作后,便可以使用下面的指令启动微调任务了。 + +.. code:: bash + + # 单机单卡 + xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py + # 单机多卡 + NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py + # slurm 情况 + srun ${SRUN_ARGS} xtuner train ./internlm2_7b_qlora_colorist_e5_copy.py --launcher slurm + +正确输出的训练日志应类似如下所示: + +.. code:: text + + 01/29 21:35:34 - mmengine - INFO - Iter(train) [ 10/720] lr: 9.0001e-05 eta: 0:31:46 time: 2.6851 data_time: 0.0077 memory: 12762 loss: 2.6900 + 01/29 21:36:02 - mmengine - INFO - Iter(train) [ 20/720] lr: 1.9000e-04 eta: 0:32:01 time: 2.8037 data_time: 0.0071 memory: 13969 loss: 2.6049 grad_norm: 0.9361 + 01/29 21:36:29 - mmengine - INFO - Iter(train) [ 30/720] lr: 1.9994e-04 eta: 0:31:24 time: 2.7031 data_time: 0.0070 memory: 13969 loss: 2.5795 grad_norm: 0.9361 + 01/29 21:36:57 - mmengine - INFO - Iter(train) [ 40/720] lr: 1.9969e-04 eta: 0:30:55 time: 2.7247 data_time: 0.0069 memory: 13969 loss: 2.3352 grad_norm: 0.8482 + 01/29 21:37:24 - mmengine - INFO - Iter(train) [ 50/720] lr: 1.9925e-04 eta: 0:30:28 time: 2.7286 data_time: 0.0068 memory: 13969 loss: 2.2816 grad_norm: 0.8184 + 01/29 21:37:51 - mmengine - INFO - Iter(train) [ 60/720] lr: 1.9863e-04 eta: 0:29:58 time: 2.7048 data_time: 0.0069 memory: 13969 loss: 2.2040 grad_norm: 0.8184 + 01/29 21:38:18 - mmengine - INFO - Iter(train) [ 70/720] lr: 1.9781e-04 eta: 0:29:31 time: 2.7302 data_time: 0.0068 memory: 13969 loss: 2.1912 grad_norm: 0.8460 + 01/29 21:38:46 - mmengine - INFO - Iter(train) [ 80/720] lr: 1.9681e-04 eta: 0:29:05 time: 2.7338 data_time: 0.0069 memory: 13969 loss: 2.1512 grad_norm: 0.8686 + 01/29 21:39:13 - mmengine - INFO - Iter(train) [ 90/720] lr: 1.9563e-04 eta: 0:28:36 time: 2.7047 data_time: 0.0068 memory: 13969 loss: 2.0653 grad_norm: 0.8686 + 01/29 21:39:40 - mmengine - INFO - Iter(train) [100/720] lr: 1.9426e-04 eta: 0:28:09 time: 2.7383 data_time: 0.0070 memory: 13969 loss: 1.9819 grad_norm: 0.9127 + +在训练开始前,可以看到模型的输出如下所示: + +.. code:: text + + 2024/01/29 21:34:58 - mmengine - INFO - before_train in EvaluateChatHook. + 2024/01/29 21:35:03 - mmengine - INFO - Sample output: + <|im_start|>system + You are a professional color designer. Please provide the corresponding colors based on the description of Human. + <|im_end|> + <|im_start|>user + 请给我一个像天空一样清澈透明的蓝色。<|im_end|> + <|im_start|>assistant + 为了匹配您所描述的“像天空一样清澈透明的蓝色”,我建议您选择一种名为“天蓝”(Cerulean)的颜色。这种颜色通常被用来代表天空、海洋和清澈的水域,它具有明亮 + + 2024/01/29 21:35:07 - mmengine - INFO - Sample output: + <|im_start|>system + You are a professional color designer. Please provide the corresponding colors based on the description of Human. + <|im_end|> + <|im_start|>user + Please give me a clear blue like the sky.<|im_end|> + <|im_start|>assistant + The color you're describing is a clear, bright blue that resembles the color of the sky on a sunny day. This color is often referred to as "sky blue" or "cerulean blue." Here are some specific color codes and names that + +而在训练完全完成后,可以观察到模型的输出如下所示: + +.. code:: text + + 2024/01/29 22:08:07 - mmengine - INFO - Sample output: + <|im_start|>system + You are a professional color designer. Please provide the corresponding colors based on the description of Human. + <|im_end|> + <|im_start|>user + 请给我一个像天空一样清澈透明的蓝色。<|im_end|> + <|im_start|>assistant + #1099ee<|im_end|> + + 2024/01/29 22:08:08 - mmengine - INFO - Sample output: + <|im_start|>system + You are a professional color designer. Please provide the corresponding colors based on the description of Human. + <|im_end|> + <|im_start|>user + Please give me a clear blue like the sky.<|im_end|> + <|im_start|>assistant + #0066dd<|im_end|> + +模型输出的颜色如下所示: + +- 天空一样清澈透明的蓝色:\ |image11| + +- A clear blue like the sky: |image12| + +不难发现,模型在经过训练后,其输出已经完全与数据集内容所对齐了。 + +.. _模型转换--lora-合并: + +模型转换 + LoRA 合并 +-------------------- + +在训练完成后,我们会得到几个 ``.pth`` 文件,这些文件存储了 QLoRA +算法训练过程所更新的参数,而\ **不是**\ 模型的全部参数。因此我们需要将这些 +``.pth`` 文件转换为 HuggingFace 格式,并合并入原始的语言模型权重中。 + +模型转换 +~~~~~~~~ + +XTuner 已经集成好了将模型转换为 HuggingFace 格式的工具,我们只需要执行 + +.. code:: bash + + # 创建存放 hf 格式参数的目录 + mkdir work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf + + # 转换格式 + xtuner convert pth_to_hf internlm2_7b_qlora_colorist_e5_copy.py \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720.pth \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf + +该条转换命令将会根据配置文件 ``internlm2_7b_qlora_colorist_e5_copy.py`` +的内容,将 +``work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720.pth`` 转换为 hf +格式,并保存在 +``work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf`` 位置。 + +LoRA 合并 +~~~~~~~~~ + +XTuner 也已经集成好了合并 LoRA 权重的工具,我们只需执行如下指令: + +.. code:: bash + + # 创建存放合并后的参数的目录 + mkdir work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged + + # 合并参数 + xtuner convert merge Shanghai_AI_Laboratory/internlm2-chat-7b \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf \ + work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged \ + --max-shard-size 2GB + +与转换命令类似,该条合并参数命令会读取原始参数路径 +``Shanghai_AI_Laboratory/internlm2-chat-7b`` 以及转换为 hf +格式的部分参数路径 +``work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf``\ ,将两部分参数合并后保存于 +``work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged``\ ,其中每个参数切片的最大文件大小为 +2GB。 + +与模型对话 +---------- + +在合并权重后,为了更好地体会到模型的能力,XTuner +也集成了与模型对话的工具。通过如下命令,便可以启动一个与模型对话的简易 +Demo。 + +.. code:: bash + + xtuner chat work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged \ + --prompt-template internlm2_chat \ + --system-template colorist + +当然,我们也可以选择不合并权重,而是直接与 LLM + LoRA Adapter +进行对话,我们只需要执行如下指令: + +.. code:: bash + + xtuner chat Shanghai_AI_Laboratory/internlm2-chat-7b + --adapter work_dirs/internlm2_7b_qlora_colorist_e5_copy/iter_720_hf \ + --prompt-template internlm2_chat \ + --system-template colorist + +其中 ``work_dirs/internlm2_7b_qlora_colorist_e5_copy/merged`` +是合并后的权重路径,\ ``--prompt-template internlm2_chat`` +指定了对话模板为 InternLM2-Chat,\ ``--system-template colorist`` +则是指定了与模型对话时的 System Prompt 为 Colorist 数据集所要求的模板。 + +以下是一个例子: + +.. code:: text + + double enter to end input (EXIT: exit chat, RESET: reset history) >>> 宁静而又相当明亮的浅天蓝色,介于天蓝色和婴儿蓝之间,因其亮度而带有一丝轻微的荧光感。 + + #66ccff<|im_end|> + +其颜色如下所示: + +宁静而又相当明亮的浅天蓝色,介于天蓝色和婴儿蓝之间,因其亮度而带有一丝轻微的荧光感。:\ |image13| + +.. |image1| image:: https://img.shields.io/badge/%2366ccff-66CCFF +.. |image2| image:: https://img.shields.io/badge/%2366ccff-66CCFF +.. |image3| image:: https://img.shields.io/badge/%23ee0000-EE0000 +.. |image4| image:: https://img.shields.io/badge/%2300ffcc-00FFCC +.. |image5| image:: https://img.shields.io/badge/%2366ccff-66CCFF +.. |image6| image:: https://img.shields.io/badge/%23ee0000-EE0000 +.. |image7| image:: https://img.shields.io/badge/%2300ffcc-00FFCC +.. |image8| image:: https://img.shields.io/badge/%2366ccff-66CCFF +.. |image9| image:: https://img.shields.io/badge/%23ee0000-EE0000 +.. |image10| image:: https://img.shields.io/badge/%2300ffcc-00FFCC +.. |image11| image:: https://img.shields.io/badge/天空一样清澈透明的蓝色-1099EE +.. |image12| image:: https://img.shields.io/badge/A_clear_blue_like_the_sky-0066DD +.. |image13| image:: https://img.shields.io/badge/宁静而又相当明亮的浅天蓝色,介于天蓝色和婴儿蓝之间,因其亮度而带有一丝轻微的荧光感。-66CCFF diff --git a/data/xtuner/docs/zh_cn/index.rst b/data/xtuner/docs/zh_cn/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..4acf0e8829c8d86a643038d0f30b1772bfdbca00 --- /dev/null +++ b/data/xtuner/docs/zh_cn/index.rst @@ -0,0 +1,97 @@ +.. xtuner documentation master file, created by + sphinx-quickstart on Tue Jan 9 16:33:06 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +欢迎来到 XTuner 的中文文档 +================================== + +.. figure:: ./_static/image/logo.png + :align: center + :alt: xtuner + :class: no-scaled-link + +.. raw:: html + +

+ LLM 一站式工具箱 + +

+ +

+ + Star + Watch + Fork +

+ + + +文档 +------------- +.. toctree:: + :maxdepth: 2 + :caption: 开始使用 + + get_started/installation.rst + get_started/quickstart.rst + +.. toctree:: + :maxdepth: 2 + :caption: 准备 + + preparation/pretrained_model.rst + preparation/prompt_template.rst + +.. toctree:: + :maxdepth: 2 + :caption: 训练 + + training/open_source_dataset.rst + training/custom_sft_dataset.rst + training/custom_pretrain_dataset.rst + training/multi_modal_dataset.rst + acceleration/train_large_scale_dataset.rst + training/modify_settings.rst + training/visualization.rst + +.. toctree:: + :maxdepth: 2 + :caption: DPO + + dpo/overview.md + dpo/quick_start.md + dpo/modify_settings.md + +.. toctree:: + :maxdepth: 2 + :caption: Reward Model + + reward_model/overview.md + reward_model/quick_start.md + reward_model/modify_settings.md + reward_model/preference_data.md + +.. toctree:: + :maxdepth: 2 + :caption: 加速训练 + + acceleration/deepspeed.rst + acceleration/flash_attn.rst + acceleration/varlen_flash_attn.rst + acceleration/pack_to_max_length.rst + acceleration/length_grouped_sampler.rst + acceleration/train_extreme_long_sequence.rst + acceleration/hyper_parameters.rst + acceleration/benchmark.rst + + +.. toctree:: + :maxdepth: 1 + :caption: InternEvo 迁移 + + internevo_migration/differences.rst + internevo_migration/ftdp_dataset/tokenized_and_internlm2.rst + internevo_migration/ftdp_dataset/processed_and_internlm2.rst + internevo_migration/ftdp_dataset/processed_and_others.rst + internevo_migration/ftdp_dataset/processed_normal_chat.rst diff --git a/data/xtuner/docs/zh_cn/internevo_migration/differences.rst b/data/xtuner/docs/zh_cn/internevo_migration/differences.rst new file mode 100644 index 0000000000000000000000000000000000000000..68c7f318fa2865d82c418988d1beb6d06ea5d4e9 --- /dev/null +++ b/data/xtuner/docs/zh_cn/internevo_migration/differences.rst @@ -0,0 +1,320 @@ +============== +主要差异 +============== + +总览 +============= + +XTuner 可以复现 InternEvo (train_internlm) 仓库训练得到的开源模型 +internlm/internlm2-chat-7b 的训练精度。 + +下面是 XTuner 和 InternEvo (train_internlm) +在相同数据集上训练相同基座模型的训练结果对比: + +.. list-table:: + :widths: 50 25 25 + :header-rows: 1 + + * - 能力类别 + - xtuner + - internevo + * - 全数据集平均(无智能体) + - 56.44 + - 55.26 + * - 全维度平均(无智能体) + - 49.58 + - 48.96 + * - 语言 Language + - 64.77 + - 62.41 + * - 知识 Knowledge + - 52.24 + - 52.52 + * - 推理 Reasoning + - 65.5 + - 63.91 + * - 数学 Mathematics + - 30.95 + - 30.26 + * - 代码 Coding + - 38.91 + - 41.06 + * - 长文本 LongEval + - 45.09 + - 43.62 + * - 智能体 Agent + - 44.85 + - 43.97 + * - 数学题智能体 + - 37 + - 37.19 + * - CIBench + - 79.07 + - 69.78 + * - PluginEval + - 65.57 + - 65.62 + +64 \* A100 的训练时间对比如下: + +=========== ========== +xtuner internevo +=========== ========== +15 h 55 min 16h 09 min +=========== ========== + +.. tip:: + 使用 XTuner 提供的序列并行算法可以进一步提升训练速度,使用方式请参考 + \ :ref:`序列并行文档 ` \ 。 + + +适配 +========== + +在从 InternEvo (train_internlm) 向 XTuner +迁移的过程中,我们需要关注模型、数据以及训练策略这三个方面的适配问题。后续内容将详细阐述如何进行适配。 + + +模型 +------- + +InternEvo 在训练时读取和保存的模型权重满足以下目录结构(以 tp2pp2 +为例): + +.. code:: + + |-- root + |-- model_config.pt + |-- model_tp0_pp0.pt + |-- model_tp0_pp1.pt + |-- model_tp1_pp0.pt + |-- model_tp1_pp1.pt + +其中,\ ``model_config.pt`` 保存模型权重的一些 meta 信息,其余 4 个 +checkpoint 则分别保存 4 组 GPUs 上的模型权重。因此,InternEvo +训练过程中要求读取预训练权重的 tp、pp 策略与训练使用的 tp、pp +策略一致才能正常读取预训练权重进行训练。 + +XTuner 支持基于 Huggingface Hub 上的模型进行训练,如下修改 config +内容即可将基座模型从 internlm2-7b 切换为 internlm2-20b: + +.. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + - pretrained_model_name_or_path = 'internlm/internlm2-7b' + + pretrained_model_name_or_path = 'internlm/internlm2-20b' + +数据 +--------- + +InternEvo +在训练过程中通常会把多条数据拼接为一个特定的最大长度,随后输入模型训练。其配置往往满足以下形式: + +.. code:: python + + data = dict( + seq_len=SEQ_LEN, + pack_sample_into_one=False, + min_length=MIN_LENGTH, + train_folder=TRAIN_FOLDER, + dataset_weights=DATASET_WEIGHTS, + ...) + +其中,数据配比 (``dataset_weights=DATASET_WEIGHTS``) 功能 XTuner +尚未支持。\ ``TRAIN_FOLDER`` 中的训练数据需要满足 ftdp tokenized +数据集格式: + +.. code:: + + |-- TRAIN_FOLDER + |-- cn + | |-- dataset1 + | | |-- data1.bin + | | |-- data1.bin.meta + | |-- dataset2 + | | |-- data2.bin + | | |-- data2.bin.meta + +在 XTuner 中实现在线数据集拼接策略需要参考 +``xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_internevo_dataset.py`` +文件中的配置: + +.. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Data + - dataset_folder = '/path/to/sft/data/folder' + + dataset_folder = TRAIN_FOLDER + - max_length = 32768 + + max_length = SEQ_LEN + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + data_order_path=None, + folder=dataset_folder, + - min_length=0, + + min_length=MIN_LENGTH, + file_type='.bin'), + packed_length=max_length, + seed=1024) + +.. note:: + + 需要注意,由于训练数据喂给模型的先后顺序可能对训练结果造成影响,因此建议不要轻易修改上述配置中的 ``seed`` 选项。同时,可参考 \ :ref:`获取数据顺序 ` \ 进一步固定数据顺序。 + +训练策略 +------------ + +Varlen Attention +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +InternEvo 通过设置 +`数据配置 `__ +中的 ``pack_sample_into_one`` 参数为 False +来使用“变长注意力机制”(见下图右侧)。 + +.. code:: python + + data = dict( + pack_sample_into_one=False, + ...) + +.. raw:: html + +
+ +

+
+ +在 XTuner 中使用这一功能需要设置 config 中的 ``use_varlen_attn`` 配置为 +True,即可保证训练行为与 InternEvo 一致: + +.. code:: diff + + ... + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm2-7b' + - use_varlen_attn = False + + use_varlen_attn = True + ... + +.. warning:: + 需要注意,当设置 ``use_varlen_attn = True`` 后,请确保 + ``batch_size`` 被设置为 1,且 ``pack_to_max_length`` 被设置为 + True。 + +.. tip:: + ``use_varlen_attn = True`` 时 ``单卡 batch size 等于 2,拼接数据集至最大长度 2k`` + 的配置与 ``单卡 batch size 等于 1,拼接数据集至最大长度 4k`` 的配置训练行为是近似的, + 因此 XTuner 目前只支持了 ``batch_size_per_device = 1`` 的情况。 + + +梯度累积 +~~~~~~~~~~~~~~ + +在 InternEvo 的配置中,与 batch_size 和 accumulative_counts +相关的配置有如下几个: + +.. code:: python + + data = dict( + # micro_num means the number of micro_batch contained in one gradient update + micro_num=MICRO_NUM, + # MICRO_BATCH_SIZE * SEQ_LEN = PACKED_LENGTH + micro_bsz=MICRO_BATCH_SIZE, + total_steps=TOTAL_STEP, + # 梯度累计,默认等于MICRO_NUM(BS) + gradient_accumulation=GRADIENT_ACCUMULATION, + ...) + +.. note:: + InternEVO 中的 ``micro_num`` 等价于 XTuner 中的 ``gradient_accumulation`` + +.. note:: + ``total_steps`` 在 XTuner 中可以不手动指定,可通过 ``max_epochs`` 指定。 + +.. warning:: + XTuner 目前只支持 ``micro_bsz = 1`` 。 + +.. tip:: + 为对齐以上配置,可参考 XTuner 中 + ``xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_internevo_dataset.py`` + 文件中的配置,并进行如下修改: + + .. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Scheduler & Optimizer + - accumulative_counts = 1 + + accumulative_counts = MICRO_NUM # or GRADIENT_ACCUMULATION + - max_epochs = 1 + + max_epochs = MAX_EPOCHS + +并行策略 +--------------- + +ZeRO 系列显存优化 +~~~~~~~~~~~~~~~~~~~~~~~ + +XTuner 支持使用 ZeRO 系列显存优化降低训练过程中的显存消耗: + +.. code:: bash + + # 单卡 + xtuner train ${CONFIG_NAME_OR_PATH} --deepspeed deepspeed_zero2 + # 多卡 + (DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train ${CONFIG_NAME_OR_PATH} --deepspeed deepspeed_zero2 + (SLURM) srun ${SRUN_ARGS} xtuner train ${CONFIG_NAME_OR_PATH} --launcher slurm --deepspeed deepspeed_zero2 + + +序列并行 +~~~~~~~~~~~~~~~~~~~ + +InternEvo 中支持了 Data Parallel、Tensor Parallel、Pipeline Parallel 和 +Sequence Parallel 四种并行策略。XTuner 目前支持了 Data Parallel 和 +Sequence Parallel 两种并行策略,可满足基本全部的训练需求(搭配 zero3 +显存优化策略可支持 70B 模型 256K 上下文训练)。 + +假定 InternEvo 训练过程中:tp_world_size = TP, pp_world_size = PP, +sequence_parallel = True。则训练的 global_batch_size 满足以下计算公式: + +.. code:: + + # 多除的一个 TP 是因为启用了 sequence parallel + global_batch_size = num_gpus * batch_size_per_device * gradient_accumulate / TP / PP / TP + +.. tip:: + ``use_varlen_attn = True`` 时, ``batch_size_per_device`` 只能为 1,此时若想对齐 + ``global_batch_size``,只需要在配置文件中综合调整 + ``gradient_accumulate`` 和 ``sequence_parallel_size`` 两项的数值: + +.. code:: diff + + + from xtuner.parallel.sequence import SequenceParallelSampler + + + sequence_parallel_size = SP + - accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize + + accumulative_counts = TP * PP * TP / SP + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataloader = dict( + - sampler=dict(type=DefaultSampler, shuffle=True), + + sampler=dict(type=SequenceParallelSampler, shuffle=True), + ...) diff --git a/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_internlm2.rst b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_internlm2.rst new file mode 100644 index 0000000000000000000000000000000000000000..fcddad2881f00892495e1bd9075d6748925fa8d8 --- /dev/null +++ b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_internlm2.rst @@ -0,0 +1,257 @@ + +Processed 数据集 + InternLM2 +=================================== + +.. warning:: + 非 FTDP(一款闭源数据处理工具) 用户跳过此文档 + +使用尚未 token 化的 ftdp 数据训练 InternLM2 模型的场景。 + +步骤 1:离线处理数据集 +---------------------- + +ftdp 把 sft +任务的数据处理划分为三个类型,原始数据(origin)、预处理数据(processed)和 +token 过的数据(tokenized)。我们需要将预处理过的、具有统一格式的 ftdp +数据 token +化得到直接可以用于训练的格式。其中,预处理数据需要满足以下目录结构: + +.. code:: + + |-- processed-dir + |-- data1 + | |-- processed + | |-- sft_chat + | |-- data1.jsonl + |-- data2 + | |-- processed + | |-- sft_chat + | |-- data2.jsonl + +使用以下命令可离线 token 化 ftdp 格式的预处理数据(processed)数据集: + +.. code-block:: console + + $ python xtuner/tools/tokenize_ftdp_datasets.py \ + $ --processed-dir /path/to/preprocessed/data \ + $ --tokenized-dir /path/to/tokenized/data \ + $ --tokenizer-path pretrained_model_name_or_path + +.. note:: + ``--processed-dir`` 需要指定预处理后的,具有 ftdp + 标准格式的数据路径 + +.. note:: + ``--tokenized-dir`` 需要指定为 token 化后的数据存储路径 + +.. note:: + ``--tokenizer-path pretrained_model_name_or_path`` 中的 + ``pretrained_model_name_or_path`` 同 ``from_pretrained`` 接口中的 + ``pretrained_model_name_or_path``\ + +.. note:: + 上述命令执行成功后,会在 ``/path/to/tokenized/data/chatml_llamav13_32k`` + 路径下保存两个子文件夹——``train`` 和 ``valid``\ 。 + +步骤 2:导出模板 config 文件 +---------------------------- + +XTuner 中目前提供了训练 InternLM2 的模板 config,使用命令: + +.. code-block:: console + + $ xtuner copy-cfg internlm2_7b_w_tokenized_dataset . + +.. note:: + 当前目录下会有一个名为 ``internlm2_7b_w_tokenized_dataset_copy.py`` 的新文件 + +步骤 3:修改模板 config 文件 +---------------------------- + +修改模板 config 文件中的训练数据路径为真实数据路径,其中 +``/path/to/tokenized/data`` 与步骤 1 中的 ``/path/to/tokenized/data`` +为同一个路径: + +.. code:: diff + + ... + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm2-7b' + use_varlen_attn = True + + # Data + - dataset_folder = '/path/to/sft/data/folder' + + dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' + prompt_template = PROMPT_TEMPLATE.internlm2_chat + max_length = 32768 + pack_to_max_length = True + ... + +.. tip:: + 在使用 DeepSpeed 训练模型时,如需在保存 checkpoint + 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + + 1. 确保 mmengine 版本大于等于 0.10.3 + + .. code-block:: console + + $ pip install 'mmengine>=0.10.3' + + 2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + + .. code:: diff + + default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, + + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), + ) + +.. warning:: + + 设置 ``save_optimizer=False`` 后,训练过程不可 resume 。 + + +步骤 4:获取数据顺序 (可选) +----------------------------- + +训练数据的提供顺序可能会对模型的最终训练成果产生影响。鉴于不同集群中通过 +``os.walk`` +方法所得到的结果可能存在差异,为了确保训练结果的稳定性和可控性,建议首先确立所有训练数据文件的相对次序,并在后续的训练阶段中,使用这一相对次序来替代 +``os.walk`` 方法。 + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +.. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder /folder/to/save/data/order \ + $ --file-type ${file_type} + +.. tip:: + ``--file-type ${file_type}`` 表示需要统计所有以 ``${file_type}`` + 为文件名后缀的文件的顺序。 + + 例如,需要获取 ``/path/to/tokenized/data`` 路径下所有以 ``.bin`` + 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + + .. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder . \ + $ --file-type .bin + +获得数据顺序文件后,还需要在 config 中设置数据顺序文件路径: + +.. code:: diff + + ... + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + - data_order_path=None, + + data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) + + +步骤 5:启动训练 +---------------- + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 +显存优化策略进行训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 + +在阿里云 DLC 中可通过以下命令启动训练: + +.. code:: diff + + export NCCL_IB_TC=136 + export NCCL_IB_SL=5 + export NCCL_IB_GID_INDEX=3 + export NCCL_SOCKET_IFNAME=bond0 + export NCCL_DEBUG=INFO + export NCCL_IB_HCA=mlx5 + export NCCL_IB_TIMEOUT=22 + export NCCL_IB_QPS_PER_CONNECTION=8 + export NCCL_NET_PLUGIN=none + + export NCCL_BUFFSIZE=2097152 + export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 + - export EXP_NAME=debug + + export EXP_NAME=your_exp_name + export PYTHONPATH='.':$PYTHONPATH + source ~/.bashrc + + cd /path/to/xtuner + + conda activate conda_env_name + + export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} + export PORT=${MASTER_PORT} + export NNODES=${WORLD_SIZE} + export NODE_RANK=${RANK} + export ADDR=${MASTER_ADDR} + + echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} + echo ${WORLD_SIZE} + echo ${MASTER_PORT} + echo ${MASTER_ADDR} + echo ${RANK} + xtuner train internlm2_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +步骤 6:转模型 +-------------- + +deepspeed 转 hf: + +.. code-block:: console + + $ python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path + +hf 转 Turbomind: + +.. code-block:: console + + $ lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path + +步骤 7:Turbomind 评测 +---------------------- + +请参考 `OpenCompass LMDeploy +评测文档 `__\ 。 diff --git a/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_others.rst b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_others.rst new file mode 100644 index 0000000000000000000000000000000000000000..6a472d1e7940a98fe8fc0e001500691def53a49f --- /dev/null +++ b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_and_others.rst @@ -0,0 +1,292 @@ +.. _case2: + +Processed 数据集 + 其他模型 +========================================== + +.. warning:: + 非 FTDP(一款闭源数据处理工具) 用户跳过此文档 + + +使用尚未 token 化的 ftdp 数据训练其他模型(以 Mistral 为例),且需要用 +Internlm2 对话模板覆盖原有对话模板以便让模型掌握 agent 、tool 能力。 + +步骤 1:离线处理数据集 +---------------------- + +ftdp 把 sft +任务的数据处理划分为三个类型,原始数据(origin)、预处理数据(processed)和 +token 过的数据(tokenized)。我们需要将预处理过的、具有统一格式的 ftdp +数据 token +化得到直接可以用于训练的格式。其中,预处理数据需要满足以下目录结构: + +.. code:: + + |-- processed-dir + |-- data1 + | |-- processed + | |-- sft_chat + | |-- data1.jsonl + |-- data2 + | |-- processed + | |-- sft_chat + | |-- data2.jsonl + +使用以下命令可离线 token 化 ftdp 格式的预处理数据(processed)数据集: + +.. code-block:: console + + $ python xtuner/tools/tokenize_ftdp_datasets.py \ + $ --processed-dir /path/to/preprocessed/data \ + $ --tokenized-dir /path/to/tokenized/data \ + $ --tokenizer-path pretrained_model_name_or_path + +.. note:: + ``--processed-dir`` 需要指定预处理后的,具有 ftdp + 标准格式的数据路径 + +.. note:: + ``--tokenized-dir`` 需要指定为 token 化后的数据存储路径 + +.. note:: + ``--tokenizer-path pretrained_model_name_or_path`` 中的 + ``pretrained_model_name_or_path`` 同 ``from_pretrained`` 接口中的 + ``pretrained_model_name_or_path``\ + +.. note:: + 上述命令执行成功后,会在 ``/path/to/tokenized/data/chatml_llamav13_32k`` + 路径下保存两个子文件夹——``train`` 和 ``valid``\ 。 + +.. warning:: + 由于除 Internlm2 外的其他模型(如 mistral 等)没有 internlm2-chat + 模型的智能体、工具调用等功能的对话模板,因此对于非 internlm2 + 模型,需要将 internlm2-chat + 对话模板中的一些特殊字符(如:<\|im_start\|>、<\|plugin\|>等)加入到新模型的 + tokenizer 的 special tokens 中,需要通过 + ``--tokenizer-w-special-tokens-save-dir`` 指定新 tokenizer + 的存储路径。\ **同时,后续训练过程需要使用新保存的 tokenizer 而非原始 + tokenizer。** + +步骤 2:导出模板 config 文件 +---------------------------- + +XTuner 中目前提供了训练 Mistral 的模板 config,使用命令: + +.. code-block:: console + + $ xtuner copy-cfg mistral_7b_w_tokenized_dataset . + +.. note:: + 当前目录下会有一个名为 ``mistral_7b_w_tokenized_dataset_copy.py`` 的新文件 + + +步骤 3:修改模板 config 文件 +---------------------------- + +.. note:: + 修改模板 config 文件中的训练数据路径为真实数据路径,其中 `/path/to/tokenized/data` 需要基于 Step 1 中的 `/path/to/tokenized/data` 进一步指定 train folder,即 `/path/to/tokenized/data/chatml_llamav13_32k/train/` 。 + +.. note:: + 需要修改 tokenizer 路径为步骤 1 保存的路径 `/path/to/save/new/tokenizer`。 + +.. warning:: + 由于步骤 1 扩充了 tokenizer 的词表,因此需要将新 tokenizer 传入 `SupervisedFinetune` 中,以扩展语言模型的词表大小。 + +.. code:: diff + + ... + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' + # 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板,new tokenizer 中已经 + # 添加了 Internlm2 对话模板中的特殊字符。 + # 请参考 docs/zh_cn/user_guides/finetune_custom_dataset.md + - tokenizer_path = '/new/tokenizer/path' + + tokenizer_path = '/path/to/save/new/tokenizer' + use_varlen_attn = True + + # Data + - dataset_folder = '/path/to/sft/data/folder' + + dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' + # 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板 + prompt_template = PROMPT_TEMPLATE.internlm2_chat + max_length = 32768 + pack_to_max_length = True + ... + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + + tokenizer=tokenizer, + ...) + +.. tip:: + 在使用 DeepSpeed 训练模型时,如需在保存 checkpoint + 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + + 1. 确保 mmengine 版本大于等于 0.10.3 + + .. code-block:: console + + $ pip install 'mmengine>=0.10.3' + + 2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + + .. code:: diff + + default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, + + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), + ) + +.. warning:: + + 设置 ``save_optimizer=False`` 后,训练过程不可 resume 。 + + +步骤 4:获取数据顺序 (可选) +----------------------------- + +训练数据的提供顺序可能会对模型的最终训练成果产生影响。鉴于不同集群中通过 +``os.walk`` +方法所得到的结果可能存在差异,为了确保训练结果的稳定性和可控性,建议首先确立所有训练数据文件的相对次序,并在后续的训练阶段中,使用这一相对次序来替代 +``os.walk`` 方法。 + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +.. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder /folder/to/save/data/order \ + $ --file-type ${file_type} + +.. tip:: + ``--file-type ${file_type}`` 表示需要统计所有以 ``${file_type}`` + 为文件名后缀的文件的顺序。 + + 例如,需要获取 ``/path/to/tokenized/data`` 路径下所有以 ``.bin`` + 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + + .. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder . \ + $ --file-type .bin + +获得数据顺序文件后,还需要在 config 中设置数据顺序文件路径: + +.. code:: diff + + ... + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + - data_order_path=None, + + data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) + + +步骤 5:启动训练 +---------------- + +注:训练前期(几十个 iters)loss 偏高是正常现象,因为模型需要时间学习 +Internlm2 的对话模板。 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train mistral_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 +显存优化策略进行训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 + +在阿里云 DLC 中可通过以下命令启动训练: + +.. code:: diff + + export NCCL_IB_TC=136 + export NCCL_IB_SL=5 + export NCCL_IB_GID_INDEX=3 + export NCCL_SOCKET_IFNAME=bond0 + export NCCL_DEBUG=INFO + export NCCL_IB_HCA=mlx5 + export NCCL_IB_TIMEOUT=22 + export NCCL_IB_QPS_PER_CONNECTION=8 + export NCCL_NET_PLUGIN=none + + export NCCL_BUFFSIZE=2097152 + export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 + - export EXP_NAME=debug + + export EXP_NAME=your_exp_name + export PYTHONPATH='.':$PYTHONPATH + source ~/.bashrc + + cd /path/to/xtuner + + conda activate conda_env_name + + export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} + export PORT=${MASTER_PORT} + export NNODES=${WORLD_SIZE} + export NODE_RANK=${RANK} + export ADDR=${MASTER_ADDR} + + echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} + echo ${WORLD_SIZE} + echo ${MASTER_PORT} + echo ${MASTER_ADDR} + echo ${RANK} + xtuner train mistral_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +Step 6, 转模型 +-------------- + +deepspeed 转 hf: + +.. code-block:: console + + $ python xtuner/tools/model_converters/pth_to_hf.py mistral_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path + +hf 转 Turbomind: + +.. code-block:: console + + $ lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path + +Step 7,Turbomind 评测 +---------------------- + +请参考 `OpenCompass LMDeploy +评测文档 `__\ 。 diff --git a/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_normal_chat.rst b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_normal_chat.rst new file mode 100644 index 0000000000000000000000000000000000000000..c3882b515d18401099595bd5c5f9ff916b989a71 --- /dev/null +++ b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/processed_normal_chat.rst @@ -0,0 +1,171 @@ +.. _case3: + +Processed 普通对话数据集 +======================================= + +.. warning:: + 非 FTDP(一款闭源数据处理工具) 用户跳过此文档 + +使用尚未 token 化的 ftdp +数据进行训练,保持待训练模型的对话模板不变,且不需要进行离线处理的场景。 + +步骤 1:导出模板 config 文件 +---------------------------- + +XTuner 中目前提供了训练 Internlm2 的模板 config,使用命令: + +.. code-block:: console + + $ xtuner copy-cfg internlm2_7b_w_untokenized_dataset . + +.. note:: + 当前目录下会有一个名为 ``internlm2_7b_w_untokenized_dataset_copy.py`` 的新文件 + + +步骤 2:修改模板 config 文件 +---------------------------- + +修改模板 config 文件中的训练数据路径为真实数据路径,路径中的所有以 +``.json`` 为后缀的数据将会作为训练数据: + +.. code:: diff + + ... + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm2-7b' + use_varlen_attn = True + + # Data + - dataset_folder = '/mnt/petrelfs/share_data/caoweihan/v1_sample_with_legal_cate' + + dataset_folder = '/path/to/untokenized/data' + prompt_template = PROMPT_TEMPLATE.internlm2_chat + max_length = 32768 + pack_to_max_length = True + ... + +.. _step-3-获取数据顺序-可选): + +步骤 3:获取数据顺序 (可选) +----------------------------- + +训练数据的提供顺序可能会对模型的最终训练成果产生影响。鉴于不同集群中通过 +``os.walk`` +方法所得到的结果可能存在差异,为了确保训练结果的稳定性和可控性,建议首先确立所有训练数据文件的相对次序,并在后续的训练阶段中,使用这一相对次序来替代 +``os.walk`` 方法。 + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +.. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder /folder/to/save/data/order \ + $ --file-type ${file_type} + +.. tip:: + ``--file-type ${file_type}`` 表示需要统计所有以 ``${file_type}`` + 为文件名后缀的文件的顺序。 + + 例如,需要获取 ``/path/to/tokenized/data`` 路径下所有以 ``.bin`` + 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + + .. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder . \ + $ --file-type .bin + +获得数据顺序文件后,还需要在 config 中设置数据顺序文件路径: + +.. code:: diff + + ... + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + - data_order_path=None, + + data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) + +步骤 4:启动训练 +---------------- + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_untokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 +显存优化策略进行训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 + +在阿里云 DLC 中可通过以下命令启动训练: + +.. code:: diff + + export NCCL_IB_TC=136 + export NCCL_IB_SL=5 + export NCCL_IB_GID_INDEX=3 + export NCCL_SOCKET_IFNAME=bond0 + export NCCL_DEBUG=INFO + export NCCL_IB_HCA=mlx5 + export NCCL_IB_TIMEOUT=22 + export NCCL_IB_QPS_PER_CONNECTION=8 + export NCCL_NET_PLUGIN=none + + export NCCL_BUFFSIZE=2097152 + export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 + - export EXP_NAME=debug + + export EXP_NAME=your_exp_name + export PYTHONPATH='.':$PYTHONPATH + source ~/.bashrc + + cd /path/to/xtuner + + conda activate conda_env_name + + export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} + export PORT=${MASTER_PORT} + export NNODES=${WORLD_SIZE} + export NODE_RANK=${RANK} + export ADDR=${MASTER_ADDR} + + echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} + echo ${WORLD_SIZE} + echo ${MASTER_PORT} + echo ${MASTER_ADDR} + echo ${RANK} + xtuner train internlm2_7b_w_untokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +步骤 5:转模型 +-------------- + +deepspeed 转 hf: + +.. code:: + + python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_untokenized_dataset_copy.py /src/model/path /hf/dst/model/path + +hf 转 Turbomind: + +.. code:: + + lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path diff --git a/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/tokenized_and_internlm2.rst b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/tokenized_and_internlm2.rst new file mode 100644 index 0000000000000000000000000000000000000000..d905aae57267eab0d566512c87a57eebadf1916f --- /dev/null +++ b/data/xtuner/docs/zh_cn/internevo_migration/ftdp_dataset/tokenized_and_internlm2.rst @@ -0,0 +1,208 @@ +Tokenized 数据集 + InternLM2 +=================================== + +.. tip:: + Tokenized 数据集格式应与 `InternEVO 使用教程 `_ 中保持一致 + +使用已经 token 化的 ftdp 数据训练 Internlm2 模型。 + +步骤 1:导出模板 config 文件 +---------------------------- + +XTuner 中目前提供了训练 Internlm2 的模板 config,使用命令: + +.. code-block:: console + + $ xtuner copy-cfg internlm2_7b_w_tokenized_dataset . + +.. note:: + 当前目录下会有一个名为 ``internlm2_7b_w_tokenized_dataset_copy.py`` 的新文件 + +步骤 2:修改模板 config 文件 +---------------------------- + +修改模板 config 文件中的训练数据路径为真实数据路径: + +.. code-block:: diff + + ... + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm2-7b' + use_varlen_attn = True + + # Data + - dataset_folder = '/path/to/sft/data/folder' + + dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' + prompt_template = PROMPT_TEMPLATE.internlm2_chat + max_length = 32768 + pack_to_max_length = True + ... + +.. tip:: + 在使用 DeepSpeed 训练模型时,如需在保存 checkpoint + 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + + 1. 确保 mmengine 版本大于等于 0.10.3 + + .. code-block:: console + + $ pip install 'mmengine>=0.10.3' + + 2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + + .. code:: diff + + default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, + + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), + ) + +.. warning:: + + 设置 ``save_optimizer=False`` 后,训练过程不可 resume 。 + +.. _case4-step3: + +步骤 3:获取数据顺序 (可选) +----------------------------- + +训练数据的提供顺序可能会对模型的最终训练成果产生影响。鉴于不同集群中通过 +``os.walk`` +方法所得到的结果可能存在差异,为了确保训练结果的稳定性和可控性,建议首先确立所有训练数据文件的相对次序,并在后续的训练阶段中,使用这一相对次序来替代 +``os.walk`` 方法。 + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +.. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder /folder/to/save/data/order \ + $ --file-type ${file_type} + +.. tip:: + ``--file-type ${file_type}`` 表示需要统计所有以 ``${file_type}`` + 为文件名后缀的文件的顺序。 + + 例如,需要获取 ``/path/to/tokenized/data`` 路径下所有以 ``.bin`` + 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + + .. code-block:: console + + $ python xtuner/tools/get_data_order.py \ + $ --data-folder /path/to/tokenized/data \ + $ --save-folder . \ + $ --file-type .bin + +获得数据顺序文件后,还需要在 config 中设置数据顺序文件路径: + +.. code:: diff + + ... + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + - data_order_path=None, + + data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) + +步骤 4:启动训练 +---------------- + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 +显存优化策略进行训练: + +.. code-block:: console + + $ srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 + +在阿里云 DLC 中可通过以下命令启动训练: + +.. code:: diff + + export NCCL_IB_TC=136 + export NCCL_IB_SL=5 + export NCCL_IB_GID_INDEX=3 + export NCCL_SOCKET_IFNAME=bond0 + export NCCL_DEBUG=INFO + export NCCL_IB_HCA=mlx5 + export NCCL_IB_TIMEOUT=22 + export NCCL_IB_QPS_PER_CONNECTION=8 + export NCCL_NET_PLUGIN=none + + export NCCL_BUFFSIZE=2097152 + export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 + - export EXP_NAME=debug + + export EXP_NAME=your_exp_name + export PYTHONPATH='.':$PYTHONPATH + source ~/.bashrc + + cd /path/to/xtuner + + conda activate conda_env_name + + export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} + export PORT=${MASTER_PORT} + export NNODES=${WORLD_SIZE} + export NODE_RANK=${RANK} + export ADDR=${MASTER_ADDR} + + echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} + echo ${WORLD_SIZE} + echo ${MASTER_PORT} + echo ${MASTER_ADDR} + echo ${RANK} + xtuner train internlm2_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +步骤 5:转模型 +-------------- + +deepspeed 转 hf: + +.. code-block:: console + + $ python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path + +hf 转 Turbomind: + +.. code-block:: console + + $ lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path + +步骤 6:Turbomind 评测 +---------------------- + +请参考 `OpenCompass LMDeploy +评测文档 `__\ 。 diff --git a/data/xtuner/docs/zh_cn/make.bat b/data/xtuner/docs/zh_cn/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..954237b9b9f2b248bb1397a15c055c0af1cad03e --- /dev/null +++ b/data/xtuner/docs/zh_cn/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/data/xtuner/docs/zh_cn/models/supported.md b/data/xtuner/docs/zh_cn/models/supported.md new file mode 100644 index 0000000000000000000000000000000000000000..df7ecaa58d9617d75a73c76b215c5c8a1ddab9eb --- /dev/null +++ b/data/xtuner/docs/zh_cn/models/supported.md @@ -0,0 +1 @@ +# 已支持的模型 diff --git a/data/xtuner/docs/zh_cn/notes/changelog.md b/data/xtuner/docs/zh_cn/notes/changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..997c7c087f4da6bae198433e2fc3f0d1dd5eff03 --- /dev/null +++ b/data/xtuner/docs/zh_cn/notes/changelog.md @@ -0,0 +1,25 @@ + + +# 变更日志 + +## v0.1.0 (2023.08.30) + +XTuner 正式发布!🔥🔥🔥 + +### 亮点 + +- XTuner 支持使用消费级显卡微调大语言模型。微调 7B 大语言模型的最低显存开销仅为 **8GB**。 +- XTuner 支持多种大语言模型、数据集、微调算法和训练流程。 +- 众多微调好的 adapter 也同步发布,包括调色师、插件对话等多种玩法。更多信息,请访问 [HuggingFace 仓库](https://huggingface.co/xtuner)。 diff --git a/data/xtuner/docs/zh_cn/preparation/pretrained_model.rst b/data/xtuner/docs/zh_cn/preparation/pretrained_model.rst new file mode 100644 index 0000000000000000000000000000000000000000..727372ffd0b71e4a6ed3f3fc6ff93c7e321256c0 --- /dev/null +++ b/data/xtuner/docs/zh_cn/preparation/pretrained_model.rst @@ -0,0 +1,143 @@ +================== +预训练模型权重 +================== + +``HuggingFace`` 和 ``ModelScope`` +提供了多种下载预训练模型权重的方法,本节将以下载 internlm2-chat-7b +为例,介绍如何快速下载预训练模型的权重。 + +.. note:: + + 若 HuggingFace 访问受限,请优先考虑使用 ModelScope 进行下载 + + +[推荐] 方法 1:``snapshot_download`` +======================================== + + +HuggingFace +------------ + +``huggingface_hub.snapshot_download`` 支持下载特定的 HuggingFace Hub +模型权重,并且允许多线程。您可以利用下列代码并行下载模型权重: + +.. code:: python + + from huggingface_hub import snapshot_download + + snapshot_download(repo_id='internlm/internlm2-chat-7b', local_dir='./internlm2-chat-7b', max_workers=20) + +.. note:: + + 其中,\ ``repo_id`` 表示模型在 HuggingFace Hub 的名字、\ ``local_dir`` 表示期望存储到的本地路径、\ ``max_workers`` 表示下载的最大并行数。 + +.. tip:: + + 如果未指定 ``local_dir``\ ,则将下载至 HuggingFace 的默认 cache 路径中(\ ``~/.cache/huggingface/hub``\ )。若要修改默认 cache 路径,需要修改相关环境变量: + + .. code:: console + + $ # 默认为 `~/.cache/huggingface/` + $ export HF_HOME=XXXX + +.. tip:: + 如果觉得下载较慢(例如无法达到最大带宽等情况),可以尝试设置\ ``export HF_HUB_ENABLE_HF_TRANSFER=1`` 以获得更高的下载速度。 + +.. tip:: + 关于环境变量的更多用法,可阅读\ `这里 `__ 。 + + +ModelScope +----------- + +``modelscope.snapshot_download`` +支持下载指定的模型权重,您可以利用下列命令下载模型: + +.. code:: python + + from modelscope import snapshot_download + + snapshot_download(model_id='Shanghai_AI_Laboratory/internlm2-chat-7b', cache_dir='./internlm2-chat-7b') + +.. note:: + 其中,\ ``model_id`` 表示模型在 ModelScope 模型库的名字、\ ``cache_dir`` 表示期望存储到的本地路径。 + + +.. note:: + ``modelscope.snapshot_download`` 不支持多线程并行下载。 + +.. tip:: + + 如果未指定 ``cache_dir``\ ,则将下载至 ModelScope 的默认 cache 路径中(\ ``~/.cache/huggingface/hub``\ )。 + + 若要修改默认 cache 路径,需要修改相关环境变量: + + .. code:: console + + $ # 默认为 ~/.cache/modelscope/hub/ + $ export MODELSCOPE_CACHE=XXXX + + + +方法 2: Git LFS +=================== + +HuggingFace 和 ModelScope 的远程模型仓库就是一个由 Git LFS 管理的 Git +仓库。因此,我们可以利用 ``git clone`` 完成权重的下载: + +.. code:: console + + $ git lfs install + $ # From HuggingFace + $ git clone https://huggingface.co/internlm/internlm2-chat-7b + $ # From ModelScope + $ git clone https://www.modelscope.cn/Shanghai_AI_Laboratory/internlm2-chat-7b.git + + +方法 3:``AutoModelForCausalLM`` +===================================================== + +``AutoModelForCausalLM.from_pretrained`` +在初始化模型时,将尝试连接远程仓库并自动下载模型权重。因此,我们可以利用这一特性下载模型权重。 + +HuggingFace +------------ + +.. code:: python + + from transformers import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained('internlm/internlm2-chat-7b', trust_remote_code=True) + tokenizer = AutoTokenizer.from_pretrained('internlm/internlm2-chat-7b', trust_remote_code=True) + +.. tip:: + + 此时模型将会下载至 HuggingFace 的 cache 路径中(默认为\ ``~/.cache/huggingface/hub``\ )。 + + 若要修改默认存储路径,需要修改相关环境变量: + + .. code:: console + + $ # 默认为 `~/.cache/huggingface/` + $ export HF_HOME=XXXX + +ModelScope +----------- + +.. code:: python + + from modelscope import AutoModelForCausalLM, AutoTokenizer + + model = AutoModelForCausalLM.from_pretrained('Shanghai_AI_Laboratory/internlm2-chat-7b', trust_remote_code=True) + tokenizer = AutoTokenizer.from_pretrained('Shanghai_AI_Laboratory/internlm2-chat-7b', trust_remote_code=True) + +.. tip:: + + 此时模型将会下载至 ModelScope 的 cache 路径中(默认为\ ``~/.cache/modelscope/hub``\ )。 + + 若要修改默认存储路径,需要修改相关环境变量: + + .. code:: console + + $ # 默认为 ~/.cache/modelscope/hub/ + $ export MODELSCOPE_CACHE=XXXX diff --git a/data/xtuner/docs/zh_cn/preparation/prompt_template.rst b/data/xtuner/docs/zh_cn/preparation/prompt_template.rst new file mode 100644 index 0000000000000000000000000000000000000000..709841b7fb4469ffcebdf7091394819f1f5424cc --- /dev/null +++ b/data/xtuner/docs/zh_cn/preparation/prompt_template.rst @@ -0,0 +1,237 @@ +.. _prompt_template: + +准备对话模版 +============ + +大模型的微调、对话均需要选择一个合适的对话模版(prompt template)。 +XTuner 设计了一套对话模版封装逻辑,并提供了一系列社区广泛使用的对话模版。 + +本文将从“何处需要对话模版?”、“XTuner 内置对话模版速览”、“如何选择对话模版?”、“如何自定义对话模版?”四部分展开介绍。 + +何处需要对话模版? +------------------ + +:``xtuner train``: + 需要使用对话模版将训练数据“模版化”,在训练 ``config`` 中配置 ``prompt_template`` 参数来选择对话模版 + +:``xtuner chat``: + 需要使用对话模版将对话文本“模版化”,通过 ``xtuner chat`` 命令的 ``--prompt-template`` 参数选择对话模版 + +.. note:: + + 各种推理引擎也都会用到对话模板,每个框架定义对话模板的方式都不尽相同,但最终“模板化”后的数据都是相同的 + +.. tip:: + + 请确保在训练、对话和自定义应用场景中,始终保持对话模板的一致,否则可能会出现不符合预期的结果。 + +XTuner 内置对话模版速览 +----------------------- + +XTuner 对现有大多数大语言模型的对话模版进行了实现,并集成在 +``xtuner.utils.PROMPT_TEMPLATE`` 内,用户可以直接使用。 + +.. note:: + + XTuner 内置的对话模板清单可见文末附录 + +字段约定 +~~~~~~~~ + +以 ``internlm2_chat`` 模版为例,其代码结构如下。 + +.. code:: python + + internlm2_chat=dict( + SYSTEM='<|im_start|>system\n{system}<|im_end|>\n', + INSTRUCTION=('<|im_start|>user\n{input}<|im_end|>\n' + '<|im_start|>assistant\n'), + SUFFIX='<|im_end|>', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['<|im_end|>']), + +- ``SYSTEM``\ :表示问答时“系统”字段的模版,其中 ``{system}`` + 指代“系统”文本。值得注意的是,该字段在多轮对话中只会出现一次,即在第一轮。 + +- ``INSTRUCTION``\ :表示问答时“指令”字段的模版,其中 ``{input}`` + 指代用户指令文本。 + +- ``SUFFIX``\ :表示“指令”字段的后缀,将会追加在每一轮问答的“回答”后面。通常,这也是一个特殊的结束符号。默认是空串\ ``''``\ 。 + +- ``SUFFIX_AS_EOS``\ :表示上述后缀是否作为结束符号。如果为 + ``True``\ ,则会取代 ``tokenizer`` 的 ``eos_token``\ ,否则,仍会使用 + ``tokenizer`` 的 ``eos_token`` 表示结束符号。默认是 ``False``\ 。 + +- ``SEP``\ :用于间隔多轮对话,将会追加在 ``INSTRUCTION`` 和 ``SUFFIX`` + 后面。默认是空串\ ``''``\ 。 + +- ``STOP_WORDS``\ :用于指明结束词,该信息将被用在文本生成阶段。值得注意的是,\ ``tokenizer`` + 的 ``eos_token`` 会被自动添加到 ``STOP_WORDS``\ ,而无需手动配置。 + +模版化结果 +~~~~~~~~~~ + +以 ``internlm2_chat`` 模版为例,其对应的单轮、多轮模版化结果如下。 + +**单轮** + +.. code:: + + <|im_start|>system + 你是一个无害的 AI 助手<|im_end|> + <|im_start|>user + 你是谁?<|im_end|> + <|im_start|>assistant + 我是书生浦语。<|im_end|> + +**多轮** + +.. code:: + + <|im_start|>system + 你是一个无害的 AI 助手<|im_end|> + <|im_start|>user + 你是谁?<|im_end|> + <|im_start|>assistant + 我是书生浦语。<|im_end|> + <|im_start|>user + 你的英文名字是什么?<|im_end|> + <|im_start|>assistant + InternLM<|im_end|> + +如何选择对话模版? +------------------ + +选择准确的对话模版是训练、应用模型的关键。关于如何选择对话模版,我们建议: + +:微调 chat 模型: + 使用模型所对应的对话模版,如 ``internlm2-chat`` 使用 + ``internlm2_chat``\ 、\ ``Qwen-Chat`` 使用 ``qwen_chat``\ 。 + +:全量微调 base 模型: + 任选对话模版,优先使用 chat 版模型所对应的对话模版 。 + + +:LoRA 微调 base 模型: + | 使用默认对话模版 ``default``\ 。这是由于 LoRA / + QLoRA 微调默认会关闭 ``embed_tokens`` 和 ``lm_head`` + 的训练,此时如果引入未学习过的特殊 token(如对话模版中的 + ``<|im_start|>``\ ),则会影响模型的训练。 + +.. tip:: + 通过修改 ``LoraConfig`` 可以引入 ``embed_tokens`` 和 + ``lm_head`` 的训练(会增大显存需求),进而支持任选对话模版 + + .. code:: diff + + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + + modules_to_save=['embed_tokens', 'lm_head'] # 请确保与模型中所使用的参数名一致 + task_type='CAUSAL_LM') + +.. tip:: + + 大多数的 base 模型所使用的 tokenizer 中不包含 chat + 模型对话模板中所使用的特殊 token 编码(例如 `internlm2 + chat `__ + 和 `internlm2 + base `__\ )。因此,如果要微调 + base 模型并配合使用 chat 版对话模版,需确保在 Config + 中及后续全流程使用 chat 版模型的 tokenizer。Config 中修改 tokenizer + 的方式为: + + .. code:: diff + + tokenizer = dict( + type=AutoTokenizer.from_pretrained, + - pretrained_model_name_or_path=pretrained_model_name_or_path, + + pretrained_model_name_or_path='PATH_TO_CHAT_LLM_TOKENIZER', + trust_remote_code=True, + padding_side='right') + +如何自定义对话模版? +-------------------- + +如果 XTuner +所内置的对话模版不能满足实际需求,用户可以实现自定义的对话模版。 + +具体来说,可以在 +`template.py `__ +的 ``PROMPT_TEMPLATE`` 中新增一个对话模版,并参考 “XTuner +内置对话模版速览” 章节对每个字段的描述进行自定义修改。 + +附:XTuner 内置 configs 所选择的对话模版 +---------------------------------------- + +.. note:: + + \*: 官方对话模版中存在特殊 token(比如 ``<|im_start|>``\ 、\ ``<|im_end|>``\ ),这类特殊 token + 在预训练阶段并未得到训练。故,使用 ``default`` 模版。 +======================================== ============== +模型 对话模版 +======================================== ============== +baichuan-inc/Baichuan-7B default\* +baichuan-inc/Baichuan-13B-Base default\* +baichuan-inc/Baichuan-13B-Chat baichuan_chat +baichuan-inc/Baichuan2-7B-Base default\* +baichuan-inc/Baichuan2-7B-Chat baichuan2_chat +baichuan-inc/Baichuan2-13B-Base default\* +baichuan-inc/Baichuan2-13B-Chat baichuan2_chat +THUDM/chatglm2-6b chatglm2 +THUDM/chatglm3-6b chatglm3 +THUDM/chatglm3-6b-base chatglm3 +deepseek-ai/deepseek-coder-6.7b-base deepseek_coder +deepseek-ai/deepseek-coder-6.7b-instruct deepseek_coder +internlm/internlm-7b default\* +internlm/internlm-20b default\* +internlm/internlm-chat-7b internlm_chat +internlm/internlm-chat-20b internlm_chat +huggyllama/llama-7b default +meta-llama/Llama-2-7b-hf llama2_chat +meta-llama/Llama-2-7b-chat-hf llama2_chat +meta-llama/Llama-2-70b-hf llama2_chat +lmsys/vicuna-7b-v1.5 vicuna +lmsys/vicuna-13b-v1.5 vicuna +mistralai/Mistral-7B-v0.1 mistral +mistralai/Mixtral-8x7B-v0.1 mixtral +mistralai/Mixtral-8x7B-Instruct-v0.1 mixtral +Qwen/Qwen-1_8B default\* +Qwen/Qwen-1_8B-Chat qwen_chat +Qwen/Qwen-7B default\* +Qwen/Qwen-7B-Chat qwen_chat +Qwen/Qwen-72B default\* +Qwen/Qwen-72B-Chat qwen_chat +bigcode/starcoder default +01-ai/Yi-6B default +01-ai/Yi-34B default +HuggingFaceH4/zephyr-7b-beta zephyr +deepseek-ai/deepseek-moe-16b-base deepseek_moe +deepseek-ai/deepseek-moe-16b-chat deepseek_moe +internlm/internlm2-1_8b default\* +internlm/internlm2-7b default\* +internlm/internlm2-20b default\* +internlm/internlm2-chat-1_8b internlm2_chat +internlm/internlm2-chat-7b internlm2_chat +internlm/internlm2-chat-20b internlm2_chat +Qwen/Qwen1.5-0.5B default\* +Qwen/Qwen1.5-0.5B-Chat qwen_chat +Qwen/Qwen1.5-1.8B default\* +Qwen/Qwen1.5-1.8B-Chat qwen_chat +Qwen/Qwen1.5-4B default\* +Qwen/Qwen1.5-4B-Chat qwen_chat +Qwen/Qwen1.5-7B default\* +Qwen/Qwen1.5-7B-Chat qwen_chat +Qwen/Qwen1.5-14B default\* +Qwen/Qwen1.5-14B-Chat qwen_chat +Qwen/Qwen1.5-72B default\* +Qwen/Qwen1.5-72B-Chat qwen_chat +google/gemma-2b default\* +google/gemma-2b-it gemma +google/gemma-7b default\* +google/gemma-7b-it gemma +======================================== ============== diff --git a/data/xtuner/docs/zh_cn/reward_model/images/preference_data.png b/data/xtuner/docs/zh_cn/reward_model/images/preference_data.png new file mode 100644 index 0000000000000000000000000000000000000000..a18ea64497b35cc5838febf3005c3bca9ec9ce7a Binary files /dev/null and b/data/xtuner/docs/zh_cn/reward_model/images/preference_data.png differ diff --git a/data/xtuner/docs/zh_cn/reward_model/images/sequence_parallel.png b/data/xtuner/docs/zh_cn/reward_model/images/sequence_parallel.png new file mode 100644 index 0000000000000000000000000000000000000000..53f86c81aa66d94d5cb523f6fa20e2c27e2f5bc1 Binary files /dev/null and b/data/xtuner/docs/zh_cn/reward_model/images/sequence_parallel.png differ diff --git a/data/xtuner/docs/zh_cn/reward_model/images/var_len_atten.png b/data/xtuner/docs/zh_cn/reward_model/images/var_len_atten.png new file mode 100644 index 0000000000000000000000000000000000000000..3e60777d2063d925176799f49a1e821a24ff0b2f Binary files /dev/null and b/data/xtuner/docs/zh_cn/reward_model/images/var_len_atten.png differ diff --git a/data/xtuner/docs/zh_cn/reward_model/modify_settings.md b/data/xtuner/docs/zh_cn/reward_model/modify_settings.md new file mode 100644 index 0000000000000000000000000000000000000000..c56b04115c06c75c68fe627c9f471cf70ad00062 --- /dev/null +++ b/data/xtuner/docs/zh_cn/reward_model/modify_settings.md @@ -0,0 +1,100 @@ +## 修改 Reward Model 训练配置 + +本章节仅介绍与 Reward Model 训练相关的配置参数,更多 XTuner 配置文件的细节,请参考[修改训练配置](https://xtuner.readthedocs.io/zh-cn/latest/training/modify_settings.html) + +### 损失函数 + +XTuner 使用了 [Bradley–Terry 模型](https://en.wikipedia.org/wiki/Bradley%E2%80%93Terry_model) 作为 Reward Model 的偏好建模方式,你可以指定 `loss_type="ranking"` 来使用 ranking loss。XTuner 中也实现了 InternLM2 中提出的 focal 损失函数,它通过调整难易样本的权重来避免过拟合,可以设置 `loss_type="focal"` 来使用该损失函数。对于该损失函数的详细说明,请参考 [InternLM2 技术报告](https://arxiv.org/abs/2403.17297)。 + +另外,为了使 reward model 输出的 score 数值保持稳定,我们还在 loss 中额外增加了一个约束项,你可以指定 `penalty_type='log_barrier'` 或是 `penalty_type='L2'` 以启用对数约束或是L2约束。 + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +loss_type = 'focal' # 'ranking' or 'focal' +penalty_type = 'log_barrier' # 'log_barrier' or 'L2' +``` + +### 修改模型 + +用户可以修改 `pretrained_model_name_or_path` 对预训练模型进行修改。 + +需要注意的是,由于 XTuner 通过对数据的末尾添加 `<|reward|>` 特殊 token 的方式计算 reward 得分,因此当切换模型的词表发生变化时,该特殊 token 的 id 也需要进行相应的修改,我们通常会使用词表末尾未使用的 token 作为 reward token。 + +例如,在 InternLM2 中我们使用 `[UNUSED_TOKEN_130]` 作为 reward token: + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +``` + +如果用户将模型切换为llama3,我们则可以使用 `<|reserved_special_token_0|>` 作为 reward token: + +```python +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +reward_token_id = 128002 # use <|reserved_special_token_0|> as reward token +``` + +### 训练数据 + +在 Reward Model 训练中,你可以通过 `max_length` 来指定单个样本序列的最大 token 数,XTuner 会自动对数据进行截断或是填充。 + +```python +# Data +max_length = 2048 +``` + +在配置文件中,我们通过 `train_dataset` 字段来指定训练数据集,你可以通过 `dataset` 字段指定数据集的加载方式,通过 `dataset_map_fn` 字段指定数据集的映射函数。 + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) +``` + +上述配置中,我们使用了 `load_dataset` 来加载 huggingface 上的 `argilla/ultrafeedback-binarized-preferences-cleaned` 数据集,使用 `orpo_dpo_mix_40k_map_fn` 作为数据集映射函数(这是因为 `orpo_dpo_mix_40k` 与 `ultrafeedback-binarized-preferences-cleaned` 的格式相同,因此这里共用了同一个映射函数)。 + +关于如何处理数据集以及如何编写数据集映射函数,请参考[偏好数据集章节](./preference_data.md)。 + +### 加速训练 + +在使用偏好数据训练时,我们推荐您开启[变长注意力机制](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/varlen_flash_attn.html), 以避免单个偏好内的 chosen 和 rejected 的样本长度差异造成的显存浪费。你可以通过 `use_varlen_attn=True` 来开启变长注意力机制。 + +XTuner 中还支持了大量的训练加速方法,关于它们的使用方法,请参考[加速策略章节](https://xtuner.readthedocs.io/zh-cn/latest/acceleration/hyper_parameters.html)。 diff --git a/data/xtuner/docs/zh_cn/reward_model/overview.md b/data/xtuner/docs/zh_cn/reward_model/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..6c7c976ac375e91466198ee18c1127ba56c59dfc --- /dev/null +++ b/data/xtuner/docs/zh_cn/reward_model/overview.md @@ -0,0 +1,43 @@ +## Reward Model 介绍 + +### 简介 + +Reward Model(奖励模型)是强化学习过程中一个关键的组成部分。它的主要任务是根据给定的输入和反馈来预测奖励值,从而指导学习算法的方向。在RLHF(Reinforcement Learning from Human Feedback)中,Reward Model 通过整合人类反馈,帮助强化学习算法更有效地优化策略。 + +在大语言模型训练中,Reward Model 通常指的是偏好模型(Preference Model)。通过在训练时提供相同提示词的好与坏(chosen&rejected)的回复来拟合人类的偏好,并在推理时预测出一个奖励值,以指导 RLHF 过程中 Actor 模型的优化过程。 + +Reward Model的应用场景包括但不限于: + +- **RLHF训练**:在使用 Proximal Policy Optimization(PPO)算法进行 RLHF 训练时,Reward Model提供奖励信号,指导模型优化策略,提高生成内容的质量并使其更贴近人类偏好。 +- **BoN采样**:在 Best-of-N(BoN)采样过程中,用户可以使用 Reward Model 对同一个提示词的多条回复进行打分,并选择奖励得分最高的生成结果,从而提升模型的输出效果。 +- **数据构造**:Reward Model 可以用于评估和过滤训练数据,或者也可以使用 Reward Model 替代人工标注来构造 DPO 训练数据。 + +### XTuner 中 Reward Model 训练的优势 + +XTuner 中的 Reward Model 训练具备以下显著优势: + +1. **使用最新的训练技巧**:XTuner 中集成了 InternLM2 中的 Reward Model 训练损失函数,可以稳定奖励得分的数值范围,也可以减少在简单样本上的过拟合(具体可参考 [InternLM2 技术报告](https://arxiv.org/abs/2403.17297))。 + +2. **减少显存浪费**:由于偏好数据中的 chosen 和 rejected 数据通常存在长度上的差异,因此在训练数据的拼接时会存在填充(padding token),造成显存浪费。在 XTuner 中,基于 Flash Attention2 中的变长注意力功能,我们在训练过程中通过将偏好数据打包到同一个序列中,显著减少了由于 padding token 带来的显存浪费。这不仅提高了显存的利用效率,还使得在相同硬件条件下可以训练更大的模型或处理更多的数据。 + +![img](./images/var_len_atten.png) + +3. **高效训练**:借助 XTuner 的 QLoRA 训练功能,我们能够仅对 Reward Model 的 Value Head 进行全参数训练,而对语言模型本身使用 QLoRA 微调,大幅降低了模型训练的显存开销。 + +4. **长文本训练**: 借助 XTuner 的序列并行功能,能够对长文本数据进行训练。 + +![img](./images/sequence_parallel.png) + +### 开始训练 + +请参[阅快速上手](./quick_start.md)来了解最基本的概念,若希望了解更多训练参数配置相关的内容,请参考[修改Reward Model配置](./modify_settings.md)章节。 + +### 开源模型 + +我们使用 XTuner 训练了 InternLM2 技术报告中的 Reward Model,欢迎下载使用: + +| Model | Transformers(HF) | ModelScope(HF) | OpenXLab(HF) | RewardBench Score | +| ------------------------- | -------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | +| **InternLM2-1.8B-Reward** | [🤗internlm2-1_8b-reward](https://huggingface.co/internlm/internlm2-1_8b-reward) | [internlm2-1_8b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-1_8b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-1_8b-reward) | 80.6 | +| **InternLM2-7B-Reward** | [🤗internlm2-7b-reward](https://huggingface.co/internlm/internlm2-7b-reward) | [internlm2-7b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-7b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-7b-reward) | 86.6 | +| **InternLM2-20B-Reward** | [🤗internlm2-20b-reward](https://huggingface.co/internlm/internlm2-20b-reward) | [internlm2-20b-reward](https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-20b-reward/summary) | [![Open in OpenXLab](https://cdn-static.openxlab.org.cn/header/openxlab_models.svg)](https://openxlab.org.cn/models/detail/OpenLMLab/internlm2-20b-reward) | 89.5 | diff --git a/data/xtuner/docs/zh_cn/reward_model/preference_data.md b/data/xtuner/docs/zh_cn/reward_model/preference_data.md new file mode 100644 index 0000000000000000000000000000000000000000..1dd296053a99d832467b86ffd30b04639aaeee71 --- /dev/null +++ b/data/xtuner/docs/zh_cn/reward_model/preference_data.md @@ -0,0 +1,110 @@ +## 偏好数据集 + +### 简介 + +XTuner 的 Reward Model 与 DPO、ORPO 等依赖偏好数据的算法都采用了同样的数据格式,偏好数据集中的每一条训练样本需要包含以下三个字段:`prompt`、`chosen`、`rejected`。其中每个字段的值都使用了 [OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) 格式。一个具体的例子如下所示: + +```json +{ + "prompt": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Who won the world series in 2020?" + }, + { + "role": "assistant", + "content": "The Los Angeles Dodgers won the World Series in 2020." + }, + { + "role": "user", + "content": "Where was it played?" + } + ], + "chosen": [ + { + "role": "assistant", + "content": "The 2020 World Series was played at Globe Life Field in Arlington, Texas." + } + ], + "rejected": [ + { + "role": "assistant", + "content": "I don't know." + } + ] +} +``` + +当进行 Reward Model 训练或是 DPO 训练时,xtuner 会根据训练任务类型的不同,将偏好数据集处理为不同的训练标签。 + +![img](./images/preference_data.png) + +如上图所示,当进行 Reward Model 训练时,我们参考 ChatGPT 的训练方式,在对话数据的最后添加一个特殊的`<|reward|>` token,只对该 token 输出的 logits 计算损失。而当进行 DPO 系列算法的训练时,我们则会屏蔽掉 prompt 部分的 token,只对 chosen 和 rejected 的回复部分计算损失。在配置文件中,我们通过 dataset 中的 `is_reward` 和 `is_dpo` 两个字段来控制数据集的类型。 + +### 使用自定义数据进行训练 + +在理解了单个样本的数据格式之后,我们只需要将转换好的数据保存为[JSON Lines](https://jsonlines.org/)格式即可。文件中的每一行都是一个样本。 + +```json +{"prompt": [{"content": "How can I learn to better regulate my emotions as a grown adult?", "role": "user"}], "chosen": [{"content": "You could do emotional regulation exercises with me, and try to notice and practice ways that you could better control your emotions in real life.", "role": "assistant"}], "rejected": [{"content": "Do you mean how can you learn to be more emotionally mature?", "role": "assistant"}]} +{"prompt": [{"content": "What are some interesting things I can add to a grilled cheese sandwich?", "role": "user"}, {"content": "A tomato, a slice of pickle, some chopped pickles, some chopped green onions. Also, some mayonnaise if you like.", "role": "assistant"}, {"content": "Sounds great. Anything else?", "role": "user"}], "chosen": [{"content": "If you’d like something a little different, I recommend grating some horseradish on top of the sandwich.", "role": "assistant"}], "rejected": [{"content": "Maybe some nice fresh basil?", "role": "assistant"}]} +{"prompt": [{"content": "How do I bob for apples?", "role": "user"}], "chosen": [{"content": "Ah! Do you mean the game of “applesauce,” or do you mean the idea of “to bob for apples”?", "role": "assistant"}], "rejected": [{"content": "Sorry, I don’t know that term.", "role": "assistant"}]} +...... +``` + +在准备完自定义数据集之后,你需要将你保存的数据路径填入配置文件中的 `data_files` 字段,你可以同时加载多个 jsonl 数据进行训练。 + +```python +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_jsonl_dataset, + data_files=[ + '/your/jsonl/path/here.jsonl', + '/your/another/jsonl/path/here.jsonl' + ]), +) +``` + +### 使用开源数据集进行训练 + +与 XTuner 配置 SFT 数据一样,在使用 huggingface 上的开源数据集时,我们只需要定义映射函数 map_fn,将开源数据集格式处理为 XTuner 中的数据格式即可。 + +这里我们以 Intel/orca_dpo_pairs 为例,该数据集有 `system`、`question`、`chosen`、`rejected` 四个字段,并且每个字段的值为 text 而非 [OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) 格式。因此我们需要为该数据集定义一个 map_fn: + +```python +def intel_orca_dpo_map_fn(example): + prompt = [{ + 'role': 'system', + 'content': example['system'] + }, { + 'role': 'user', + 'content': example['question'] + }] + chosen = [{'role': 'assistant', 'content': example['chosen']}] + rejected = [{'role': 'assistant', 'content': example['rejected']}] + return {'prompt': prompt, 'chosen': chosen, 'rejected': rejected} +``` + +通过代码可以看到,`intel_orca_dpo_map_fn` 对原数据中的四个字段进行处理,将其转换为了 `prompt`、`chosen`、`rejected` 三个字段,并且每个字段都处理为了[OpenAI chat message](https://platform.openai.com/docs/api-reference/chat/create) 格式,确保了后续数据处理流程的统一。 + +完成了 map_fn 的定义之后,需要在配置文件中 import 该函数,并在 `dataset_map_fn` 字段中进行配置。 + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='Intel/orca_dpo_pairs'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=intel_orca_dpo_map_fn, +) +``` diff --git a/data/xtuner/docs/zh_cn/reward_model/quick_start.md b/data/xtuner/docs/zh_cn/reward_model/quick_start.md new file mode 100644 index 0000000000000000000000000000000000000000..736624cefde1f700444d20fd488b03c1dbcbbde5 --- /dev/null +++ b/data/xtuner/docs/zh_cn/reward_model/quick_start.md @@ -0,0 +1,86 @@ +## Reward Model 快速上手 + +在本章节中,我们将介绍如何使用 XTuner 训练 1.8B 的 Reward Model,以帮助您快速上手。 + +### 准备预训练模型权重 + +依据 [Training language models to follow instructions with human feedback](https://arxiv.org/abs/2203.02155) 论文中的描述,我们使用进过 SFT 的语言模型作为 Reward Model 的初始化模型。这里我们使用[InternLM2-chat-1.8b-sft](https://huggingface.co/internlm/internlm2-chat-1_8b-sft)作为初始化模型。 + +在训练配置文件中设置`pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft'`,则会在启动训练时自动下载模型文件。若您需要手动下载模型权重,那么请参考[准备预训练模型权重](https://xtuner.readthedocs.io/zh-cn/latest/preparation/pretrained_model.html)章节,其中详细说明了如何从 Huggingface 或者是 Modelscope 下载模型权重的方法。这里我们附上模型的 HuggingFace 链接与 ModelScope 链接: + +- HuggingFace 链接位于:https://huggingface.co/internlm/internlm2-chat-1_8b-sft + +- ModelScope 链接位于:https://modelscope.cn/models/Shanghai_AI_Laboratory/internlm2-chat-1_8b-sft/summary + +### 准备训练数据 + +在本教程中使用 [UltraFeedback](https://arxiv.org/abs/2310.01377) 数据集作为演示,为了方便起见,我们使用 huggingface 上已经预处理过的 [argilla/ultrafeedback-binarized-preferences-cleaned](https://huggingface.co/datasets/argilla/ultrafeedback-binarized-preferences-cleaned) 数据集, + +```python +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, +) +``` + +在配置文件中使用以上配置,即可自动下载并处理该数据集。如果您希望使用其他 huggingface 上的开源数据集或是使用自定义的数据集,请参阅[偏好数据集](./preference_data.md)章节。 + +### 准备配置文件 + +XTuner 提供了多个开箱即用的配置文件,可以通过 `xtuner list-cfg` 查看。我们执行如下指令,以复制一个配置文件到当前目录。 + +```bash +xtuner copy-cfg internlm2_chat_1_8b_reward_full_ultrafeedback . +``` + +打开复制后的配置文件,如果您选择自动下载模型和数据集,则无需修改配置。若您希望填入您预先下载的模型路径和数据集路径,请修改配置中的 `pretrained_model_name_or_path` 以及 `train_dataset` 中 `dataset` 的 `path` 参数。 + +更多的训练参数配置,请参阅[修改Reward训练配置](./modify_settings.md)章节。 + +### 启动训练 + +在完成上述操作后,便可以使用下面的指令启动训练任务了。 + +```bash +# 单机单卡 +xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py +# 单机多卡 +NPROC_PER_NODE=${GPU_NUM} xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py +# slurm 集群 +srun ${SRUN_ARGS} xtuner train ./internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py --launcher slurm +``` + +正确的训练日志应当如下所示(在单卡 A800 上运行): + +``` +06/06 16:12:11 - mmengine - INFO - Iter(train) [ 10/15230] lr: 3.9580e-07 eta: 2:59:41 time: 0.7084 data_time: 0.0044 memory: 18021 loss: 0.6270 acc: 0.0000 chosen_score_mean: 0.0000 rejected_score_mean: 0.0000 num_samples: 4.0000 num_tokens: 969.0000 +06/06 16:12:17 - mmengine - INFO - Iter(train) [ 20/15230] lr: 8.3536e-07 eta: 2:45:25 time: 0.5968 data_time: 0.0034 memory: 42180 loss: 0.6270 acc: 0.5000 chosen_score_mean: 0.0013 rejected_score_mean: 0.0010 num_samples: 4.0000 num_tokens: 1405.0000 +06/06 16:12:22 - mmengine - INFO - Iter(train) [ 30/15230] lr: 1.2749e-06 eta: 2:37:18 time: 0.5578 data_time: 0.0024 memory: 32121 loss: 0.6270 acc: 0.7500 chosen_score_mean: 0.0016 rejected_score_mean: 0.0011 num_samples: 4.0000 num_tokens: 932.0000 +06/06 16:12:28 - mmengine - INFO - Iter(train) [ 40/15230] lr: 1.7145e-06 eta: 2:36:05 time: 0.6033 data_time: 0.0025 memory: 42186 loss: 0.6270 acc: 0.7500 chosen_score_mean: 0.0027 rejected_score_mean: 0.0016 num_samples: 4.0000 num_tokens: 994.0000 +06/06 16:12:35 - mmengine - INFO - Iter(train) [ 50/15230] lr: 2.1540e-06 eta: 2:41:03 time: 0.7166 data_time: 0.0027 memory: 42186 loss: 0.6278 acc: 0.5000 chosen_score_mean: 0.0031 rejected_score_mean: 0.0032 num_samples: 4.0000 num_tokens: 2049.0000 +06/06 16:12:40 - mmengine - INFO - Iter(train) [ 60/15230] lr: 2.5936e-06 eta: 2:33:37 time: 0.4627 data_time: 0.0023 memory: 30238 loss: 0.6262 acc: 1.0000 chosen_score_mean: 0.0057 rejected_score_mean: 0.0030 num_samples: 4.0000 num_tokens: 992.0000 +06/06 16:12:46 - mmengine - INFO - Iter(train) [ 70/15230] lr: 3.0331e-06 eta: 2:33:18 time: 0.6018 data_time: 0.0025 memory: 42186 loss: 0.6247 acc: 0.7500 chosen_score_mean: 0.0117 rejected_score_mean: 0.0055 num_samples: 4.0000 num_tokens: 815.0000 +``` + +### 模型转换 + +XTuner 已经集成好了将模型转换为 HuggingFace 格式的工具,我们只需要执行 + +```bash +# 创建存放 hf 格式参数的目录 +mkdir work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy/iter_15230_hf + +# 转换格式 +xtuner convert pth_to_hf internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py \ + work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py/iter_15230.pth \ + work_dirs/internlm2_chat_1_8b_reward_full_ultrafeedback_copy.py/iter_15230_hf +``` + +便能够将 XTuner 的 ckpt 转换为 Huggingface 格式的模型。 + +需要注意的是,由于 Reward Model 的类型并未在 transformers 官方库中集成,因此目前只有InternLM2模型训练得到的 Reward Model 会被转换为 InternLM2ForRewardModel 类型,而其他模型则会默认转换为 SequenceClassification 类型(例如 LLaMa3 会被转换为 LlamaForSequenceClassification 类型),但这并不影响其在 XTuner PPO 训练中的使用。 diff --git a/data/xtuner/docs/zh_cn/switch_language.md b/data/xtuner/docs/zh_cn/switch_language.md new file mode 100644 index 0000000000000000000000000000000000000000..ff7c4c42502846c4fe3fc52f0bc2c2aec09c4f02 --- /dev/null +++ b/data/xtuner/docs/zh_cn/switch_language.md @@ -0,0 +1,3 @@ +## English + +## 简体中文 diff --git a/data/xtuner/docs/zh_cn/training/custom_pretrain_dataset.rst b/data/xtuner/docs/zh_cn/training/custom_pretrain_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..ff2243587a9e6c2b00187e19a9d7b2e10d71301e --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/custom_pretrain_dataset.rst @@ -0,0 +1,202 @@ +================================== +自定义预训练数据集 (LLM) +================================== + +XTuner 支持使用自定义数据集进行增量预训练,为便于介绍,本节以 +`internlm2_7b_custom_pretrain_e1.py `__ +配置文件为基础进行介绍。 + +数据准备 +================= + +用户若要在进行预训练,则需要将自定义的数据处理为以下格式: + +.. code:: json + + [ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... + ] + +.. tip:: + 每条 ``text`` 数据不要太长(分词个数应小于 + ``max_length``\ ),以避免在数据处理阶段被截断。 + +.. tip:: + 为保证数据上下文的一致性,请确保长文本数据在被切分为多个 ``text`` + 后,json 列表的顺序与实际上下文顺序一致。 + +训练 +=============== + +步骤 1 :导出 config +------------------------------- + +``xtuner/configs/custom_dataset/pretrain/`` 目录下有所有 XTuner +支持的模型在自定义数据集下执行预训练的模板 config。可以通过 +``xtuner list-cfg -p custom_pretrain`` 命令来查看候选 config。下面以 +`internlm2_7b_custom_pretrain_e1.py `__ +为例展开介绍。 + +可以通过以下命令将 ``internlm2_7b_full_custom_pretrain_e1.py`` +导出至当前目录下: + +.. code:: console + + $ xtuner copy-cfg internlm2_7b_full_custom_pretrain_e1 . + +.. note:: + 当前目录下会存在一个新 config + ``internlm2_7b_full_custom_pretrain_e1_copy.py`` 。 + +步骤 2 :修改 config +--------------------------------- + +首先,需要修改数据集文件路径: + +.. code:: diff + + - data_files = ['/path/to/json/file.json'] + + data_files = ['/path/to/custom_dataset1.json', '/path/to/custom_dataset2.json', ...] + +若期望使用某个目录下所有的 json 文件作为训练数据集,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Data + - data_files = ['/path/to/json/file.json'] + + data_dir = '/dir/to/custom_dataset' + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + - dataset=dict(type=load_dataset, path='json', data_files=data_files), + + dataset=dict(type=load_dataset, path='json', data_dir=data_dir), + ...) + +若期望使用 LoRA 算法训练,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True), + + lora=dict( + + type=LoraConfig, + + r=64, + + lora_alpha=16, + + lora_dropout=0.1, + + bias='none', + + task_type='CAUSAL_LM')) + +若期望进行 QLoRA 算法训练,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + + quantization_config=dict( + + type=BitsAndBytesConfig, + + load_in_4bit=True, + + load_in_8bit=False, + + llm_int8_threshold=6.0, + + llm_int8_has_fp16_weight=False, + + bnb_4bit_compute_dtype=torch.float16, + + bnb_4bit_use_double_quant=True, + + bnb_4bit_quant_type='nf4') + ), + + lora=dict( + + type=LoraConfig, + + r=64, + + lora_alpha=16, + + lora_dropout=0.1, + + bias='none', + + task_type='CAUSAL_LM') + ) + +步骤 3 :开始训练 +------------------------- + +.. code:: bash + + NPROC_PER_NODE=8 xtuner train internlm2_7b_full_custom_pretrain_e1_copy.py --deepspeed deepspeed_zero1 + +训得模型将默认保存在 ``./work_dirs/``\ ,用户可以通过命令 +``xtuner train --work-dir ${SAVE_PATH}`` 指定保存路径。 + +步骤 4 :模型转换 +-------------------------- + +模型训练后会自动保存成 PTH 模型(例如 ``iter_2000.pth``\ ,如果使用了 +DeepSpeed,则将会是一个文件夹),我们需要利用 +``xtuner convert pth_to_hf`` 将其转换为 HuggingFace +模型,以便于后续使用。具体命令为: + +.. code:: bash + + xtuner convert pth_to_hf ${FINETUNE_CFG} ${PTH_PATH} ${SAVE_PATH} + # 例如:xtuner convert pth_to_hf internlm2_7b_full_custom_pretrain_e1_copy.py ./iter_2000.pth ./iter_2000_hf + +对话 +=========== + +用户可以利用 ``xtuner chat`` 实现与微调后的模型对话。 + +如果进行的是全量参数的微调: + +.. code:: bash + + xtuner chat ${PATH_TO_LLM} [optional arguments] + # 例如:xtuner chat ./iter_2000_hf --max-new-tokens 512 + +如果使用的是 LoRA 或 QLoRA 算法: + +.. code:: bash + + xtuner chat ${NAME_OR_PATH_TO_LLM} --adapter {NAME_OR_PATH_TO_ADAPTER} [optional arguments] + # 例如:xtuner chat internlm/internlm2-7b --adapter ./iter_2000_hf --max-new-tokens 512 + +.. _模型合并可选): + +模型合并(可选) +======================= + +如果您使用了 LoRA / QLoRA 微调,则模型转换后将得到 adapter +参数,而并不包含原 LLM +参数。如果您期望获得合并后的模型权重(例如用于后续评测),那么可以利用 +``xtuner convert merge`` : + +.. code:: bash + + (LLM) xtuner convert merge ${LLM} ${LLM_ADAPTER} ${SAVE_PATH} + +评测 +================== + +推荐使用一站式平台 +`OpenCompass `__ +来评测大语言模型,其目前已涵盖 50+ 数据集的约 30 万条题目。 diff --git a/data/xtuner/docs/zh_cn/training/custom_sft_dataset.rst b/data/xtuner/docs/zh_cn/training/custom_sft_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..75b298934c1938a72dfdb6f66cf89bf2aaabab1c --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/custom_sft_dataset.rst @@ -0,0 +1,246 @@ +=================================== +自定义指令微调数据集(LLM) +=================================== + +XTuner 支持使用自定义数据集进行指令微调,为便于介绍,本节以 +`internlm2_chat_7b_qlora_custom_sft_e1.py `__ +配置文件为基础进行介绍。 + +数据准备 +================= + +XTuner 采用 `OpenAI SFT +数据集格式 `__ +作为统一的自定义数据集格式,详细格式如下: + +.. code:: json + + [{ + "messages": [ + { "role": "system", "content": "xxx."}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx."} + ] + }, + { + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": False}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": True} + ] + }] + +.. note:: + 每条数据除了 OpenAI 标准格式中的 ``role`` + 字段和 ``content`` 字段外,XTuner 还额外扩充了一个 ``loss`` + 字段,用于控制某轮 ``assistant`` 的输出不计算 loss。 + +.. note:: + - ``system`` 和 ``user`` 的 ``loss`` 默认为 False + - ``assistant`` 的 ``loss`` 默认为 True + +.. tip:: + + 若想令某轮对话 "assistant" + 部分的内容不参与 loss 计算,需要手动设置该数据 "loss" 字段的值为 + ``false``\ 。 + +训练 +============= + +步骤 1: 导出 config +-------------------------------- + +``xtuner/configs/custom_dataset/sft`` 目录下有所有 XTuner +支持的模型在自定义数据集下使用 QLora 算法训练的模板 config。可以通过 +``xtuner list-cfg -p custom_sft`` 命令来查看候选 config。下面以 +`internlm2_chat_7b_qlora_custom_sft_e1.py `__ +为例展开介绍。 + +可以通过以下命令将 ``internlm2_chat_7b_qlora_custom_sft_e1.py`` +导出至当前目录下: + +.. code:: console + + $ xtuner copy-cfg internlm2_chat_7b_qlora_custom_sft_e1 . + +.. note:: + + 当前目录下会存在一个新 config + ``internlm2_chat_7b_qlora_custom_sft_e1_copy.py`` 。 + +步骤 2:修改 config +---------------------------------- + +首先,需要修改数据集文件路径: + +.. code:: diff + + - data_files = ['/path/to/json/file.json'] + + data_files = ['/path/to/custom_sft1.json', '/path/to/custom_sft2.json', ...] + +若期望使用某个目录下所有的 json 文件作为训练数据集,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Data + - data_files = ['/path/to/json/file.json'] + + data_dir = '/dir/to/custom_sft' + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( + - dataset=dict(type=load_dataset, path='json', data_files=data_files), + + dataset=dict(type=load_dataset, path='json', data_dir=data_dir), + ...) + +若期望使用 Lora 算法训练,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + - quantization_config=dict( + - type=BitsAndBytesConfig, + - load_in_4bit=True, + - load_in_8bit=False, + - llm_int8_threshold=6.0, + - llm_int8_has_fp16_weight=False, + - bnb_4bit_compute_dtype=torch.float16, + - bnb_4bit_use_double_quant=True, + - bnb_4bit_quant_type='nf4') + ), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +若期望进行全量参数训练,可做如下修改: + +.. code:: diff + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + - quantization_config=dict( + - type=BitsAndBytesConfig, + - load_in_4bit=True, + - load_in_8bit=False, + - llm_int8_threshold=6.0, + - llm_int8_has_fp16_weight=False, + - bnb_4bit_compute_dtype=torch.float16, + - bnb_4bit_use_double_quant=True, + - bnb_4bit_quant_type='nf4') + ), + - lora=dict( + - type=LoraConfig, + - r=64, + - lora_alpha=16, + - lora_dropout=0.1, + - bias='none', + - task_type='CAUSAL_LM') + ) + +步骤 3: 开始训练 +----------------------------- + +.. code:: console + + $ NPROC_PER_NODE=8 xtuner train internlm2_chat_7b_qlora_custom_sft_e1_copy.py --deepspeed deepspeed_zero1 + +.. tip:: + 训练日志及 checkpoint 将默认保存在 ``./work_dirs/``\ ,可以通过命令 + ``xtuner train --work-dir ${SAVE_PATH}`` 指定保存路径。 + +步骤 4: 模型转换 +------------------------------ + +模型训练后会自动保存成 PTH 模型(例如 ``iter_2000.pth``\ ,如果使用了 +DeepSpeed,则将会是一个文件夹),我们需要利用 +``xtuner convert pth_to_hf`` 将其转换为 HuggingFace +模型,以便于后续使用。具体命令为: + +.. code:: bash + + xtuner convert pth_to_hf ${FINETUNE_CFG} ${PTH_PATH} ${SAVE_PATH} + # 例如:xtuner convert pth_to_hf internlm2_chat_7b_qlora_custom_sft_e1_copy.py ./iter_2000.pth ./iter_2000_hf + +对话 +================= + +用户可以利用 ``xtuner chat`` 实现与微调后的模型对话。如果使用的是 Lora +或 QLora 算法: + +.. code:: console + + $ xtuner chat ${NAME_OR_PATH_TO_LLM} --adapter {NAME_OR_PATH_TO_ADAPTER} --prompt-template ${PROMPT_TEMPLATE} [optional arguments] + $ # 例如:xtuner chat internlm/internlm2-7b --adapter ./iter_2000_hf --prompt-template internlm2_chat + + +如果进行的是全量参数的微调: + +.. code:: console + + $ xtuner chat ${PATH_TO_LLM} --prompt-template ${PROMPT_TEMPLATE} [optional arguments] + $ # 例如:xtuner chat ./iter_2000_hf --prompt-template internlm2_chat + +.. note:: + + 其中 ${PROMPT_TEMPLATE} 表示模型的对话模板,需要与训练用的 config 中的 + ``prompt_template`` 字段保持一致,例如 + ``internlm2_chat_7b_qlora_custom_sft_e1_copy.py`` 中的设置为: + + .. code:: python + + prompt_template = PROMPT_TEMPLATE.internlm2_chat + +.. _模型合并可选): + +模型合并(可选) +====================== + +如果您使用了 LoRA / QLoRA 微调,则模型转换后将得到 adapter +参数,而并不包含原 LLM +参数。如果您期望获得合并后的模型权重(例如用于后续评测),那么可以利用 +``xtuner convert merge`` : + +.. code:: console + + $ xtuner convert merge ${LLM} ${LLM_ADAPTER} ${SAVE_PATH} + +.. tip:: + + 模型合并后,就得到了一个可以通过 ``AutoModelForCausalLM.from_pretrained`` 直接加载的模型,可以直接在各种下游工具中直接使用 + +评测 +====================== + +推荐使用一站式平台 +`OpenCompass `__ +来评测大语言模型,其目前已涵盖 50+ 数据集的约 30 万条题目。 diff --git a/data/xtuner/docs/zh_cn/training/modify_settings.rst b/data/xtuner/docs/zh_cn/training/modify_settings.rst new file mode 100644 index 0000000000000000000000000000000000000000..619dbe5532c8bc441ee939834f1f255fef6cbaa1 --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/modify_settings.rst @@ -0,0 +1,473 @@ +============ +修改训练配置 +============ + +XTuner 的训练由 MMEngine +的训练器提供支持,用户可以通过修改配置文件(config)中的特定参数,来修改对应的训练配置。以 +`internlm2_chat_7b_qlora_oasst1_e3 `__ +为例,本节将首先速览配置文件中各个参数的含义,之后讲解常见配置的修改方式。 + +配置文件速览 +============ + +XTuner 使用 MMEngine 的「纯 Python 风格的配置文件」,直接利用 ``import`` +机制使用一些类或函数。 + +.. tip:: + + 如果您期望深入了解 MMEngine 「纯 Python + 风格的配置文件」的特性、优势,请参考 + `这里 `__\ 。 + +.. code:: python + + # Copyright (c) OpenMMLab. All rights reserved. + import torch + from datasets import load_dataset + from mmengine.dataset import DefaultSampler + from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) + from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR + from peft import LoraConfig + from torch.optim import AdamW + from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + + from xtuner.dataset import process_hf_dataset + from xtuner.dataset.collate_fns import default_collate_fn + from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory + from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) + from xtuner.engine.runner import TrainLoop + from xtuner.model import SupervisedFinetune + from xtuner.utils import PROMPT_TEMPLATE + + ####################################################################### + # PART 1 Settings # + ####################################################################### + # Model + pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' # 设置 LLM 路径或 HuggingFace Hub ID + use_varlen_attn = False # 是否使用 varlen_attention + + # Data + data_path = 'timdettmers/openassistant-guanaco' # 设置 dataset 路径或 HuggingFace Hub ID,以用于 datasets.load_dataset + prompt_template = PROMPT_TEMPLATE.internlm2_chat # 设置对话模版 + max_length = 2048 # 设置训练数据截断长度 + pack_to_max_length = True # 是否将多条样本打包为一条最长长度的样本 + + # Scheduler & Optimizer + batch_size = 1 # per_device # 每个设备的样本个数 + accumulative_counts = 16 # 梯度累计数 + dataloader_num_workers = 0 # dataloader worker 数 + max_epochs = 3 # 训练迭代代数 + optim_type = AdamW # 优化器 + lr = 2e-4 # 学习率 + betas = (0.9, 0.999) # AdamW 优化器 betas + weight_decay = 0 # AdamW 优化器权重衰减 + max_norm = 1 # grad clip # 梯度裁剪 + warmup_ratio = 0.03 # warmup 比率 + + # Save + save_steps = 500 # checkpoint 保存间隔(iter 数) + save_total_limit = 2 # 最大保存 checkpoint 个数,-1 表示无限制 + + # Evaluate the generation performance during the training + evaluation_freq = 500 # 验证对话效果的执行间隔(iter 数) + SYSTEM = '' # 验证对话效果的 system 字段 + evaluation_inputs = [ # 验证对话效果时的测试问题 + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' + ] + + ####################################################################### + # PART 2 Model & Tokenizer # + ####################################################################### + tokenizer = dict( # 构建 tokenizer + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + + model = dict( # 构建 model + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( # 构建 LLM + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( # 量化配置(保留则为 4 比特,删除则为正常浮点) + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( # LoRA 配置(保留则使用 LoRA 微调,删除则使用全量微调) + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + + ####################################################################### + # PART 3 Dataset & Dataloader # + ####################################################################### + train_dataset = dict( # 构建训练数据集 + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), # 调用 datasets.load_dataset 接口 + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, # 选择匹配的数据集 map_fn + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + + train_dataloader = dict( # 构建训练数据集的 DataLoader + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + + ####################################################################### + # PART 4 Scheduler & Optimizer # + ####################################################################### + # optimizer + optim_wrapper = dict( # 构建优化器 + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + + # learning policy + # More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 + param_scheduler = [ # 设置学习率 scheduler + dict( + type=LinearLR, # warmup 阶段 + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, # Cosine 学习率衰减阶段 + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) + ] + + # train, val, test setting + train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) # 设置训练迭代代数 + + ####################################################################### + # PART 5 Runtime # + ####################################################################### + # Log the dialogue periodically during the training process, optional + custom_hooks = [ # 定义 Hooks + dict(type=DatasetInfoHook, tokenizer=tokenizer), # 在训练前打印可视化打印数据样本 + dict( + type=EvaluateChatHook, # 在训练时测试对话效果 + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) + ] + + if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] # vallen_attention 依赖的 Hook + + # 以下均为默认配置,如需调整请参考 MMEngine 文档及代码 + + # configure default hooks + default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), + ) + + # configure environment + env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), + ) + + # set visualizer + visualizer = None + + # set log level + log_level = 'INFO' + + # load from which checkpoint + load_from = None + + # whether to resume training from the loaded checkpoint + resume = False + + # Defaults to use random seed and disable `deterministic` + randomness = dict(seed=None, deterministic=False) + + # set log processor + log_processor = dict(by_epoch=False) + +常见训练配置修改 +======================= + +模型 +------------ + +使用其他 LLM 模型? +~~~~~~~~~~~~~~~~~~~~~~~~ +1. 修改 ``pretrained_model_name_or_path``\ ,其将应用至 ``model.llm`` 和 ``tokenizer`` 的初始化中。 +#. 修改 ``prompt_template`` 以适配所选择的 LLM。 + +使用 ModelScope 模型? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +1. 参考 `文档 <../preparation/pretrained_model.md>`__ 将其下载至本地 +2. 修改\ ``pretrained_model_name_or_path``\ 。 + +使用 openMind 模型? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +可在配置文件中新增 ``model_resource`` 参数, ``args`` 用作可变参数(如下载私有模型需传入token的情况): + +.. code:: python + from openmind_hub import snapshot_download + + # Model + pretrained_model_name_or_path = 'Tianjin_Ascend/Qwen1.5-4B' + model_resource = { + "fn": snapshot_download, + "args":{ + # "token":"xxxxxxxxxx" + } + } + +微调类型 +------------- + +.. tip:: + XTuner 内置的配置文件以 QLoRA 微调为主,但并不意味着 XTuner 仅支持 QLoRA + 微调。用户可以通过修改配置文件中的 ``model`` 来决定微调类型。 + + +QLoRA 微调 +~~~~~~~~~~~~~~~~~ + +.. code:: python + + model = dict( + ...... + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM'), + ......) + + +LoRA 微调 +~~~~~~~~~~~~~~~~ + +.. tip:: + + 在 QLoRA 设置的基础上,将 `quantization_config` 设置为 None,就切换成了 LoRA 微调 + +.. code:: python + + model = dict( + ...... + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=None), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM'), + ......) + + +全参数微调 +~~~~~~~~~~~~~~~~~~ +.. tip:: + + 将 `lora` 和 `quantization_config` 都设置为 None,就切换到了全参数训练模式 + +.. code:: python + + model = dict( + ...... + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=None), + lora=None, + ......) + + + + +数据集 +-------------- + +请参考 `训练` 章节文档。 + +优化器 +----------- + +使用其他优化器? +~~~~~~~~~~~~~~~~~~~~ + +- 方法 1:修改 ``optim_type``\ (例如 ``optim_type=torch.optim.SGD``\ ),其将应用至 ``optim_wrapper.optimzer``\ 。 +- 方法 2:忽略 ``optim_type``\ ,直接修改 ``optim_wrapper.optimzer``\ 。 + + +修改优化器参数配置? +~~~~~~~~~~~~~~~~~~~~~~~~ + +- 方法 1:修改 ``lr``\ 、\ ``weight_decay`` 等参数,其将应用至 ``optim_wrapper.optimzer``\ 。 +- 方法 2:直接修改 ``optim_wrapper.optimzer``\ 。 + +迭代次数 +--------------- + +调整迭代次数? +~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``max_epochs`` 参数。 + +保存 Checkpoint 间隔 +--------------------------- + +调整保存间隔? +~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``save_steps`` 参数。 + +调整最大保存 checkpoint 个数? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``save_total_limit`` 参数。 + +训练间对话评测 +---------------------- + +调整对话评测间隔? +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``evaluation_freq`` 参数。 + +调整对话评测的 system 字段? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``SYSTEM`` 参数。 + +调整对话评测的测试指令? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- 修改 ``evaluation_inputs`` 参数。 + +GPU 数 +-------------- + +XTuner +的多卡训练由启动命令决定,而非配置文件。用户可以参考下列命令启动多卡训练: + +.. code:: bash + + # 单卡 + xtuner train ${CONFIG} + # 多卡 + (DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train ${CONFIG} + (SLURM) srun ${SRUN_ARGS} xtuner train ${CONFIG} --launcher slurm + +DeepSpeed +------------------ + +XTuner 的 DeepSpeed +优化由启动命令决定,而非配置文件。用户可以参考下列命令启用 DeepSpeed +优化: + +.. code:: bash + + xtuner train ${CONFIG} --deepspeed ${DS_CONFIG} + +.. note:: + + XTuner 内置了多个 DeepSpeed 配置文件(即命令中的 + ``${DS_CONFIG}``\ ),用户可以直接使用,具体文件见 + `这里 `__\ : + + .. code:: bash + + xtuner train ${CONFIG} --deepspeed [deepspeed_zero1,deepspeed_zero2,deepspeed_zero2_offload,deepspeed_zero3,deepspeed_zero3_offload] + +.. note:: + 部分参数会在 DeepSpeed Config 和 XTuner Config 中重复定义(例如 batch + size等)。此时相关配置会以 XTuner Config 为准: + + - ``gradient_accumulation_steps`` 会被 XTuner Config 中的 + ``accumulative_counts`` 设置覆盖。 + + - ``train_micro_batch_size_per_gpu`` 会被 XTuner Config 中的 + ``train_dataloader.batch_size`` 设置覆盖。 + + - ``gradient_clipping`` 会被 XTuner Config 中的 + ``optim_wrapper.clip_grad.max_norm`` 设置覆盖。 + + - XTuner 会根据所使用的 GPU 架构自动选择 ``fp16`` 或 ``bf16`` 训练。 + +其他 +---------- + +如有遗漏或特定需求,欢迎提出 +`issue `__ 讨论。 diff --git a/data/xtuner/docs/zh_cn/training/multi_modal_dataset.rst b/data/xtuner/docs/zh_cn/training/multi_modal_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..541dcec7a7f3055f447443d7b83932867a29fb61 --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/multi_modal_dataset.rst @@ -0,0 +1,296 @@ +========================== +多模态数据集 (VLM) +========================== + +XTuner 支持 LLaVA 图文模型的微调,本文将以 +`xtuner/llava-internlm2-7b `__ +为例,讲解如何利用 XTuner 快速上手多模态数据集训练,及后续的对话、评测。 + +数据准备 +======== + +XTuner 支持 LLaVA 格式数据集的多模态图文预训练、微调。本节将从「LLaVA +开源数据集准备」和「自定义数据集准备」两部分展开介绍。 + +LLaVA 开源数据集准备 +----------------------------- + +数据文件结构 +^^^^^^^^^^^^ + +.. code:: + + ./data/llava_data + ├── LLaVA-Pretrain + │ ├── blip_laion_cc_sbu_558k.json + │ ├── blip_laion_cc_sbu_558k_meta.json + │ └── images + ├── LLaVA-Instruct-150K + │ └── llava_v1_5_mix665k.json + └── llava_images + ├── coco + │ └── train2017 + ├── gqa + │ └── images + ├── ocr_vqa + │ └── images + ├── textvqa + │ └── train_images + └── vg + ├── VG_100K + └── VG_100K_2 + +预训练数据下载 +^^^^^^^^^^^^^^ + +LLaVA-Pretrain + +.. code:: bash + + # Make sure you have git-lfs installed (https://git-lfs.com) + git lfs install + git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Pretrain --depth=1 + +指令微调数据下载 +^^^^^^^^^^^^^^^^ + +**LLaVA-Instruct-150K** (文本) + +.. code:: bash + + # Make sure you have git-lfs installed (https://git-lfs.com) + git lfs install + git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Instruct-150K --depth=1 + + +**COCO** (图像): `train2017 `__ + +**GQA** (图像): `images `__ + +**TextVQA** (图像): `train_val_images `__ + +**VisualGenome** (图像): `part1 `__, `part2 `__ + +**OCR-VQA** (图像): `download script `__ + +.. tip:: + ⚠️ OCR-VQA 所下载的图片命名需要利用如下脚本进行处理,以确保所有图片后缀为 + ``.jpg``\ ! + + .. code:: bash + + #!/bin/bash + ocr_vqa_path="" + + find "$target_dir" -type f | while read file; do + extension="${file##*.}" + if [ "$extension" != "jpg" ] + then + cp -- "$file" "${file%.*}.jpg" + fi + done + + +自定义数据集准备 +---------------- + +如果用户期望使用自定义数据集进行图文训练,可以参照 LLaVA +开源数据集格式进行准备,具体格式如下: + +.. code:: json + + [ + { + "image": "xxx/xxx", + "conversations": [ + { + "from": "human", + "value": "\nHello! What's this?" + }, + { + "from": "gpt", + "value": "This is a dog!" + }, + { + "from": "human", + "value": "Is it cute?" + }, + { + "from": "gpt", + "value": "Yes." + } + ] + }, + ... + ] + +.. note:: + 目前针对自定义数据有一些约束: + + 1. ``image`` 字段表示图片路径,且仅能有一张图片 + + 2. ``conversations`` 字段第 0 条的 ``value`` 需要包括 ```` + ,以确保图片被正确嵌入。 + +训练 +===== + +多模态图文训练一般分为两步:预训练(pretrain)、指令跟随微调(finetune)。\ ``xtuner/llava-internlm2-7b`` +对应的配置文件:\ `预训练 `__ +/ +`指令跟随微调 `__\ ,用户可以对其中的模型路径、数据路径进行自定义修改。 + +预训练 +------ + +.. code:: console + + $ NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain --deepspeed deepspeed_zero2 + +.. tip:: + 训得模型将默认保存在 ``./work_dirs/``\ ,用户可以通过命令 + ``xtuner train --work-dir ${SAVE_PATH}`` 指定保存路径。 + +指令跟随微调 +----------------- + +指令跟随微调时,需要载入预训练阶段所得到的 ``.pth`` +模型,以提供良好的初始化,这一通过在配置文件中的 ``pretrained_pth`` +指定,用户可以自行修改。 + +.. code:: console + + $ NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune --deepspeed deepspeed_zero2 + +模型转换 +-------- + +模型训练后会自动保存成 PTH 模型(例如 +``iter_5198.pth``\ ),我们需要利用 ``xtuner convert pth_to_hf`` +将其转换为 HuggingFace 模型,以便于后续使用。具体命令为: + +.. code:: console + + $ xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH + $ # 例如:xtuner convert pth_to_hf llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune ./iter_5198.pth ./iter_5198_hf + +.. note:: + 此时,我们将获得所需要的模型。如果使用默认的微调范式,文件结构应与 + `这里 `__ + 一致。 + + + +模型合并(可选) +------------------- + +如果您使用了 LoRA / QLoRA 微调,则模型转换后将得到 adapter +参数,而并不包含原 LLM +参数。如果您期望获得合并后的模型权重,那么可以利用 +``xtuner convert merge`` : + +.. code:: console + + $ xtuner convert merge $LLM $LLM_ADAPTER $SAVE_PATH + $ xtuner convert merge $CLIP $CLIP_ADAPTER $SAVE_PATH --is-clip + +对话 +===== + +用户可以利用 ``xtuner chat`` +实现与微调后的多模态图文模型对话。假设模型转换阶段获得的模型路径为 +``./iter_5198_hf``\ ,则我们可以利用下列命令实现对话: + +.. code:: console + + $ xtuner chat internlm/internlm2-chat-7b \ + $ --visual-encoder openai/clip-vit-large-patch14-336 \ + $ --llava ./iter_5198_hf \ + $ --prompt-template internlm2_chat \ + $ --image $IMAGE_PATH + +.. note:: + + ``xtuner chat`` 的第一个参数为 LLM 路径或 HuggingFace Hub + ID。如果训练阶段 LLM 使用的是 LoRA / QLoRA 微调,则此参数请传入基础 + LLM,如 + ``internlm/internlm2-chat-7b``\ ;如果使用的是全参数微调,则此参数请传入转换(\ ``xtuner convert pth_to_hf``\ )所得到的模型权重,如 + ``./iter_5198_hf``\ 。 + +评测 +==== + +XTuner 的 LLaVA 模型可以利用 +`VLMEvalKit `__ +进行评测,请参考 +`这里 `__ +快速上手。 + +同时,为了方便使用,XTuner 内也集成了 MMBench +评测,您可以通过下列命令下载 MMBench 评测数据集: + +.. code:: console + + $ wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_EN.tsv + $ wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_EN.tsv + $ wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_CN.tsv + $ wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_CN.tsv + $ wget https://opencompass.openxlab.space/utils/VLMEval/CCBench.tsv + +之后,您可以利用下列命令实现评测: + +.. code:: console + + $ xtuner mmbench internlm/internlm2-chat-7b \ + $ --visual-encoder openai/clip-vit-large-patch14-336 \ + $ --llava ./iter_5198_hf \ + $ --prompt-template internlm2_chat \ + $ --data-path $DATA_PATH \ + $ --work-dir $RESULT_PATH + +.. note:: + + ``xtuner mmbench`` 的第一个参数为 LLM 路径或 HuggingFace Hub + ID。如果训练阶段 LLM 使用的是 LoRA / QLoRA 微调,则此参数请传入基础 + LLM,如 + ``internlm/internlm2-chat-7b``\ ;如果使用的是全参数微调,则此参数请传入转换(\ ``xtuner convert pth_to_hf``\ )所得到的模型权重,如 + ``./iter_5198_hf``\ 。 + +.. note:: + + ``$DATA_PATH`` 指上一步骤所下载的某一个 tsv 文件,如 + ``MMBench_DEV_EN.tsv``\ 。 + +.. note:: + 评测完成后,若为开发集则会直接打印出结果;若为测试集,则需将 + ``mmbench_result.xlsx`` 提交至 `MMBench + 官方 `__ 完成评测取得精度结果。 + +FAQ +==== + +如何更换 LLM? +---------------------- + +修改 LLM 的方式与训练单模态的大语言模型类似。 + +1. 修改配置文件中的 ``llm_name_or_path`` 参数至您期望使用的 LLM,例如 + ``internlm/internlm2-chat-20b``\ 等。 + +2. 修改配置文件中的 ``prompt_template`` 参数,与您所选择的 LLM + 保持对齐,具体选择可参考 + \ :ref:`对话模版文档 ` \ 。 + + +ValueError: ``bos_token_id`` has to be defined when no ``input_ids`` are provided. +------------------------------------------------------------------------------------- + +这是由于老版本 ``transformers`` 的 LLM ``generate`` 接口在接受 +``inputs_embeds`` 输入时,必须传入有效的 ``bos_token_id``\ 。 +(`#29772 `__) + +更新 ``transformers`` 即可解决 + +.. code:: console + + $ pip install -U transformers diff --git a/data/xtuner/docs/zh_cn/training/open_source_dataset.rst b/data/xtuner/docs/zh_cn/training/open_source_dataset.rst new file mode 100644 index 0000000000000000000000000000000000000000..380ba0db313185ee8967229ac66aea88c5bf214a --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/open_source_dataset.rst @@ -0,0 +1,213 @@ +================================ +开源指令微调数据集(LLM) +================================ + +HuggingFace Hub 中有众多优秀的开源数据,本节将以 +`timdettmers/openassistant-guanaco `__ +开源指令微调数据集为例,讲解如何开始训练。为便于介绍,本节以 +`internlm2_chat_7b_qlora_oasst1_e3 `__ +配置文件为基础进行讲解。 + +适配开源数据集 +===================== + +不同的开源数据集有不同的数据「载入方式」和「字段格式」,因此我们需要针对所使用的开源数据集进行一些适配。 + +载入方式 +----------- + +XTuner 使用上游库 ``datasets`` 的统一载入接口 ``load_dataset``\ 。 + +.. code:: python + + data_path = 'timdettmers/openassistant-guanaco' + train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + ...) + +.. tip:: + 一般来说,若想要使用不同的开源数据集,用户只需修改 + ``dataset=dict(type=load_dataset, path=data_path)`` 中的 ``path`` + 参数即可。 + + 若想使用 openMind 数据集,可将 ``dataset=dict(type=load_dataset, path=data_path)`` 中的 ``type`` 替换为 ``openmind.OmDataset``。 + + +字段格式 +-------- + +为适配不同的开源数据集的字段格式,XTuner 开发并设计了一套 ``map_fn`` 机制,可以把不同的开源数据集转为统一的字段格式 + +.. code:: python + + from xtuner.dataset.map_fns import oasst1_map_fn + train_dataset = dict( + type=process_hf_dataset, + ... + dataset_map_fn=oasst1_map_fn, + ...) + +XTuner 内置了众多 map_fn +(\ `这里 `__\ ),可以满足大多数开源数据集的需要。此处我们罗列一些常用 +map_fn 及其对应的原始字段和参考数据集: + ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| map_fn | Columns | Reference Datasets | ++====================================================================================================================================+===================================================+=======================================================================================================================+ +| `alpaca_map_fn `__ | ['instruction', 'input', 'output', ...] | `tatsu-lab/alpaca `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `alpaca_zh_map_fn `__ | ['instruction_zh', 'input_zh', 'output_zh', ...] | `silk-road/alpaca-data-gpt4-chinese `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `oasst1_map_fn `__ | ['text', ...] | `timdettmers/openassistant-guanaco `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `openai_map_fn `__ | ['messages', ...] | `DavidLanz/fine_tuning_datraset_4_openai `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `code_alpaca_map_fn `__ | ['prompt', 'completion', ...] | `HuggingFaceH4/CodeAlpaca_20K `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `medical_map_fn `__ | ['instruction', 'input', 'output', ...] | `shibing624/medical `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `tiny_codes_map_fn `__ | ['prompt', 'response', ...] | `nampdn-ai/tiny-codes `__ | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ +| `default_map_fn `__ | ['input', 'output', ...] | / | ++------------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ + +例如,针对 ``timdettmers/openassistant-guanaco`` 数据集,XTuner 内置了 +``oasst1_map_fn``\ ,以对其进行字段格式统一。具体实现如下: + +.. code:: python + + def oasst1_map_fn(example): + r"""Example before preprocessing: + example['text'] = ('### Human: Can you explain xxx' + '### Assistant: Sure! xxx' + '### Human: I didn't understand how xxx' + '### Assistant: It has to do with a process xxx.') + + Example after preprocessing: + example['conversation'] = [ + { + 'input': 'Can you explain xxx', + 'output': 'Sure! xxx' + }, + { + 'input': 'I didn't understand how xxx', + 'output': 'It has to do with a process xxx.' + } + ] + """ + data = [] + for sentence in example['text'].strip().split('###'): + sentence = sentence.strip() + if sentence[:6] == 'Human:': + data.append(sentence[6:].strip()) + elif sentence[:10] == 'Assistant:': + data.append(sentence[10:].strip()) + if len(data) % 2: + # The last round of conversation solely consists of input + # without any output. + # Discard the input part of the last round, as this part is ignored in + # the loss calculation. + data.pop() + conversation = [] + for i in range(0, len(data), 2): + single_turn_conversation = {'input': data[i], 'output': data[i + 1]} + conversation.append(single_turn_conversation) + return {'conversation': conversation} + +通过代码可以看到,\ ``oasst1_map_fn`` 对原数据中的 ``text`` +字段进行处理,进而构造了一个 ``conversation`` +字段,以此确保了后续数据处理流程的统一。 + +值得注意的是,如果部分开源数据集依赖特殊的 +map_fn,则需要用户自行参照以提供的 map_fn +进行自定义开发,实现字段格式的对齐。 + +训练 +===== + +用户可以使用 ``xtuner train`` 启动训练。假设所使用的配置文件路径为 +``./config.py``\ ,并使用 DeepSpeed ZeRO-2 优化。 + +单机单卡 +-------- + +.. code:: console + + $ xtuner train ./config.py --deepspeed deepspeed_zero2 + +单机多卡 +-------- + +.. code:: console + + $ NPROC_PER_NODE=${GPU_NUM} xtuner train ./config.py --deepspeed deepspeed_zero2 + +多机多卡(以 2 \* 8 GPUs 为例) +-------------------------------------- + +**方法 1:torchrun** + +.. code:: console + + $ # excuete on node 0 + $ NPROC_PER_NODE=8 NNODES=2 PORT=$PORT ADDR=$NODE_0_ADDR NODE_RANK=0 xtuner train mixtral_8x7b_instruct_full_oasst1_e3 --deepspeed deepspeed_zero2 + + $ # excuete on node 1 + $ NPROC_PER_NODE=8 NNODES=2 PORT=$PORT ADDR=$NODE_0_ADDR NODE_RANK=1 xtuner train mixtral_8x7b_instruct_full_oasst1_e3 --deepspeed deepspeed_zero2 + +.. note:: + + \ ``$PORT`` 表示通信端口、\ ``$NODE_0_ADDR`` 表示 node 0 的 IP 地址。 + 二者并不是系统自带的环境变量,需要根据实际情况,替换为实际使用的值 + +**方法 2:slurm** + +.. code:: console + + $ srun -p $PARTITION --nodes=2 --gres=gpu:8 --ntasks-per-node=8 xtuner train internlm2_chat_7b_qlora_oasst1_e3 --launcher slurm --deepspeed deepspeed_zero2 + +模型转换 +========= + +模型训练后会自动保存成 PTH 模型(例如 ``iter_500.pth``\ ),我们需要利用 +``xtuner convert pth_to_hf`` 将其转换为 HuggingFace +模型,以便于后续使用。具体命令为: + +.. code:: console + + $ xtuner convert pth_to_hf ${CONFIG_NAME_OR_PATH} ${PTH} ${SAVE_PATH} + $ # 例如:xtuner convert pth_to_hf ./config.py ./iter_500.pth ./iter_500_hf + +.. _模型合并可选): + +模型合并(可选) +================ + +如果您使用了 LoRA / QLoRA 微调,则模型转换后将得到 adapter +参数,而并不包含原 LLM +参数。如果您期望获得合并后的模型权重,那么可以利用 +``xtuner convert merge`` : + +.. code:: console + + $ xtuner convert merge ${LLM} ${ADAPTER_PATH} ${SAVE_PATH} + $ # 例如:xtuner convert merge internlm/internlm2-chat-7b ./iter_500_hf ./iter_500_merged_llm + +对话 +===== + +用户可以利用 ``xtuner chat`` 实现与微调后的模型对话: + +.. code:: console + + $ xtuner chat ${NAME_OR_PATH_TO_LLM} --adapter ${NAME_OR_PATH_TO_ADAPTER} --prompt-template ${PROMPT_TEMPLATE} [optional arguments] + +.. tip:: + + 例如: + + .. code:: console + + $ xtuner chat internlm2/internlm2-chat-7b --adapter ./iter_500_hf --prompt-template internlm2_chat + $ xtuner chat ./iter_500_merged_llm --prompt-template internlm2_chat diff --git a/data/xtuner/docs/zh_cn/training/visualization.rst b/data/xtuner/docs/zh_cn/training/visualization.rst new file mode 100644 index 0000000000000000000000000000000000000000..64c1f8afe7bdda6fe176e3205380d34a80cf71b9 --- /dev/null +++ b/data/xtuner/docs/zh_cn/training/visualization.rst @@ -0,0 +1,73 @@ +============== +可视化训练过程 +============== + +XTuner 支持通过 `MMEngine `__ +使用 `TensorBoard `__ +和 `Weights & Biases (WandB) `__ +实验管理工具,只需在 config 中添加一行代码,就可以跟踪和可视化损失、显存占用等指标。 + +TensorBoard +============ + +1. 设置 config 中的 ``visualizer`` 字段,并将 ``vis_backends`` 设置为 `TensorboardVisBackend `__\ : + +.. code:: diff + + # set visualizer + - visualizer = None + + from mmengine.visualization import Visualizer, TensorboardVisBackend + + visualizer = dict(type=Visualizer, vis_backends=[dict(type=TensorboardVisBackend)]) + +2. 启动实验后,tensorboard 产生的相关文件会存在 ``vis_data`` 中,通过 tensorboard 命令可以启动进行实时可视化: + +|image1| + +.. code:: + + tensorboard --logdir=$PATH_TO_VIS_DATA + +WandB +====== + +1. 使用 WandB 前需安装依赖库 ``wandb`` 并登录至 wandb。 + +.. code:: console + + $ pip install wandb + $ wandb login + +2. 设置 config 中的 ``visualizer`` 字段,并将 ``vis_backends`` 设置为 `WandbVisBackend `__\ : + +.. code:: diff + + # set visualizer + + from mmengine.visualization import Visualizer, WandbVisBackend + - visualizer = None + + visualizer = dict(type=Visualizer, vis_backends=[dict(type=WandbVisBackend)]) + +.. tip:: + 可以点击 `WandbVisBackend + API `__ + 查看 ``WandbVisBackend`` 可配置的参数。例如 + ``init_kwargs``\ ,该参数会传给 + `wandb.init `__ 方法。 + + .. code:: diff + + # set visualizer + - visualizer = None + + from mmengine.visualization import Visualizer, WandbVisBackend + + visualizer = dict( + + type=Visualizer, + + vis_backends=[ + + dict(type=WandbVisBackend, init_kwargs=dict(project='toy-example'))]) + + +3. 启动实验后,可在 wandb 网页端 ``https://wandb.ai`` 上查看可视化结果: + +|image2| + + +.. |image1| image:: https://github.com/InternLM/xtuner/assets/67539920/abacb28f-5afd-46d0-91b2-acdd20887969 +.. |image2| image:: https://github.com/InternLM/xtuner/assets/41630003/fc16387a-3c83-4015-9235-8ec811077953 diff --git a/data/xtuner/docs/zh_cn/user_guides/ceph.md b/data/xtuner/docs/zh_cn/user_guides/ceph.md new file mode 100644 index 0000000000000000000000000000000000000000..b03bd1e3b574dfbf2485e1794a5ab4ff562877b7 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ceph.md @@ -0,0 +1,64 @@ +## 功能说明 + +### 已支持的功能 + +- 保存 DeepSpeed Checkpoint 至 CEPH +- 从 Ceph 上的 DeepSpeed Checkpoint 续训 +- `pth_to_hf` 支持 Ceph 上的 DeepSpeed Checkpoint + +### 暂不支持的功能 + +- 训练时从 Ceph 加载 Huggingface 模型, 与 `zero3` 加载权重冲突 +- HuggingFace `save_pretrained` 保存至 Ceph, 逻辑过于复杂,没办法 patch + +## 使用说明 + +#### 1. 验证 ceph 环境 + +使用前需确保 `petrel sdk` 可用,并且要使用的 Ceph bucket 存在且可用 + +验证 `aws` 命令行工具 + +```bash +# 验证 aws 命令行工具 +aws s3 ls $YOUR_BUCKET +``` + +验证 `petrel sdk` + +```python +bucket = 's3://xxx' + +from mmengine import get_file_backend +backend = get_file_backend(bucket) + +for f in backend.list_dir_or_file(bucket): + print(f) +``` + +#### 2. 训练时保存 Checkpoint 至 Ceph + +`XTuner` 根据环境变量 `DS_CEPH_DIR` 来判断是否将 checkpoint 保存至 ceph + +```bash +DS_CEPH_DIR=s3://xxxx srun ${SRUN_ARGS} xtuner train $CONFIG --launcher slurm +``` + +#### 3. 从 Ceph 上的 Checkpoint 续训 + +Resume 时,要填写 checkpoint 在 ceph 上的完整路径 + +```bash +DS_CEPH_DIR=s3://xxxx srun ${SRUN_ARGS} xtuner train $CONFIG --launcher slurm --resume s3://xxx/yyy/epoch_x.pth +``` + +#### 4. 将 Ceph 上的 Checkpoint 转换为 HF 模型 + +不支持 `$HF_DIR` 为 ceph 路径 + +由于 Checkpoint 中存储了优化器状态,加载比较耗时,对于 ZeRO 1&2 可以直接加载 checkpoint 中的 `model_states.pt` 文件加速转换过程;ZeRO 3 必须先加载整个 checkpoint + +```bash +srun ${SRUN_ARGS} xtuner convert pth_to_hf $CONFIG s3://xxx/yyy/epoch_x.pth $HF_DIR + +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/chat.md b/data/xtuner/docs/zh_cn/user_guides/chat.md new file mode 100644 index 0000000000000000000000000000000000000000..02c27d3e70a7f39c2a3e73e5903e198cd156bf2c --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/chat.md @@ -0,0 +1,128 @@ +# 与微调后的大语言模型 LLMs 对话 + +## 与微调后的 [InternLM](https://github.com/InternLM/InternLM) 对话 + +### InternLM-7B + +- InternLM-7B, oasst1 + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-7B, Arxiv Gentitle + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-arxiv-gentitle --prompt-template internlm_chat --system-template arxiv_gentile + ``` + +- InternLM-7B, Colorist + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-colorist --prompt-template internlm_chat --system-template colorist + ``` + +- InternLM-7B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +- InternLM-7B, MSAgent **(支持 Lagent ReAct)** + + ```shell + export SERPER_API_KEY="xxx" # 请从 https://serper.dev 获得 API_KEY,以此支持谷歌搜索! + xtuner chat internlm/internlm-7b --adapter xtuner/internlm-7b-qlora-msagent-react --lagent + ``` + +### InternLM-Chat-7B + +- InternLM-Chat-7B, oasst1 + + ```shell + xtuner chat internlm/internlm-chat-7b --adapter xtuner/internlm-chat-7b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-Chat-7B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-chat-7b --adapter xtuner/internlm-chat-7b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +### InternLM-20B + +- InternLM-20B, oasst1 + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-20B, Arxiv Gentitle + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-arxiv-gentitle --prompt-template internlm_chat --system-template arxiv_gentile + ``` + +- InternLM-20B, Colorist + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-colorist --prompt-template internlm_chat --system-template colorist + ``` + +- InternLM-20B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +- InternLM-20B, MSAgent **(支持 Lagent ReAct)** + + ```shell + export SERPER_API_KEY="xxx" # 请从 https://serper.dev 获得 API_KEY,以此支持谷歌搜索! + xtuner chat internlm/internlm-20b --adapter xtuner/internlm-20b-qlora-msagent-react --lagent + ``` + +### InternLM-Chat-20B + +- InternLM-Chat-20B, oasst1 + + ```shell + xtuner chat internlm/internlm-chat-20b --adapter xtuner/internlm-chat-20b-qlora-oasst1 --prompt-template internlm_chat + ``` + +- InternLM-Chat-20B, Alpaca-enzh + + ```shell + xtuner chat internlm/internlm-chat-20b --adapter xtuner/internlm-chat-20b-qlora-alpaca-enzh --prompt-template internlm_chat --system-template alpaca + ``` + +## 与微调后的 [Llama-2](https://github.com/facebookresearch/llama) 对话 + +> 在使用 Llama-2 之前,请先使用 `huggingface-cli login` 输入你的访问令牌(access token)!点击[这里](https://huggingface.co/docs/hub/security-tokens#user-access-tokens)了解如何获取访问令牌。 + +### Llama-2-7B + +- Llama-2-7B, MOSS-003-SFT **(支持调用插件)** + + ```shell + export SERPER_API_KEY="xxx" # 请从 https://serper.dev 获得 API_KEY,以此支持谷歌搜索! + xtuner chat meta-llama/Llama-2-7b-hf --adapter xtuner/Llama-2-7b-qlora-moss-003-sft --bot-name Llama2 --prompt-template moss_sft --system-template moss_sft --with-plugins calculate solve search --no-streamer + ``` + +- Llama-2-7B, MSAgent **(支持 Lagent ReAct)** + + ```shell + export SERPER_API_KEY="xxx" # 请从 https://serper.dev 获得 API_KEY,以此支持谷歌搜索! + xtuner chat meta-llama/Llama-2-7b-hf --adapter xtuner/Llama-2-7b-qlora-msagent-react --lagent + ``` + +## 与微调后的 [Qwen](https://github.com/QwenLM) 对话 + +### Qwen-7B + +- Qwen-7B, MOSS-003-SFT **(支持调用插件)** + + ```shell + export SERPER_API_KEY="xxx" # 请从 https://serper.dev 获得API_KEY,以此支持谷歌搜索! + xtuner chat Qwen/Qwen-7B --adapter xtuner/Qwen-7B-qlora-moss-003-sft --bot-name Qwen --prompt-template moss_sft --system-template moss_sft --with-plugins calculate solve search + ``` diff --git a/data/xtuner/docs/zh_cn/user_guides/config.md b/data/xtuner/docs/zh_cn/user_guides/config.md new file mode 100644 index 0000000000000000000000000000000000000000..6ff1bb698c8a4ffc3c361eae5b84018dfa5bd52b --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/config.md @@ -0,0 +1,214 @@ +# Config 介绍 + +以 [internlm_7b_qlora_oasst1_e3](https://github.com/InternLM/xtuner/blob/main/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3.py) 为例。 + +```python +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' # 设置 LLM 路径或 HuggingFace Hub ID + +# Data +data_path = 'timdettmers/openassistant-guanaco' # 设置 dataset 路径或 HuggingFace Hub ID,以用于 datasets.load_dataset +prompt_template = PROMPT_TEMPLATE.internlm_chat # 设置 prompt_template 以确定对话模板 +max_length = 2048 # 设置训练数据最大长度 +pack_to_max_length = True # 是否将多条样本打包为一条最长长度的样本 + +# Scheduler & Optimizer +batch_size = 1 # per_device # 每个设备的样本个数 +accumulative_counts = 16 # 梯度累计数 +dataloader_num_workers = 0 # dataloader worker 数 +max_epochs = 3 # 训练迭代代数 +optim_type = AdamW # 优化器 +lr = 2e-4 # 学习率 +betas = (0.9, 0.999) # AdamW 优化器 betas +weight_decay = 0 # 权重衰减 +max_norm = 1 # grad clip # 梯度裁剪 +warmup_ratio = 0.03 # warmup + +# Save +save_steps = 500 # 保存间隔 +save_total_limit = 2 # 最大保存 checkpoint 个数,-1 表示无限制 + +# Evaluate the generation performance during the training +evaluation_freq = 500 # 验证对话效果频率 +SYSTEM = '' # 验证对话效果时对话字段 +evaluation_inputs = [ # 验证对话效果时测试问题 + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( # 构建 tokenizer + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( # 构建 model + type=SupervisedFinetune, # 指令跟随微调 + llm=dict( # LLM + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( # 量化配置 + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( # LoRA 配置 + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( # 构建训练数据集 + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), # 调用 datasets.load_dataset 接口 + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( # 构建 dataloader + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) # 使用默认的 collate_fn + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, # 自动混合精度优化器 + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, # warmup 阶段 + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, # Cosine 学习率策略 + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) # 设置 train loop + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), # 在训练、测试前打印数据集样本 + dict( + type=EvaluateChatHook, # 在训练时测试对话效果 + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# 以下均为默认配置,如需调整请参考 MMEngine 文档及代码 + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) + +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Offline.md b/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Offline.md new file mode 100644 index 0000000000000000000000000000000000000000..c41eff24ae6673d9ea1155486f2e17a9485cd065 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Offline.md @@ -0,0 +1,156 @@ +# 离线处理数据集 + +当训练数据量非常大时,每次训练的时候都先在线处理数据可能会极为耗时。我们可以先对原始数据进行离线处理并保存至本地,随后的多次训练可以读入本地离线处理好的数据后直接开始训练。 + +## Step 1, 导出模板 config 文件 + +XTuner 中提供了用于自定义数据集微调的模板 config ,与其他基于 huggingface hub 上的数据集微调的 config 相比,只有数据部分进行了微小的修改: + +```diff ++ data_files = ['/path/to/json/file.json'] +train_dataset = dict( + ..., +- dataset=dict(type=load_dataset, path='tatsu-lab/alpaca'), ++ dataset=dict(type=load_dataset, path='json', data_files=data_files), + ... +) +``` + +可使用以下命令查看 XTuner 中提供的用于自定义数据集微调的模板 config: + +``` +xtuner list-cfg -p custom_dataset +``` + +若想基于 Internlm2 进行全量微调,可从上述命令输出结果中选择 `internlm2_7b_full_finetune_custom_dataset_e1` 并导出至当前目录下: + +``` +xtuner copy-cfg internlm2_7b_full_finetune_custom_dataset_e1 . +``` + +## Step 2, 修改模板 config 文件 + +首先,需要修改 Step 1 中导出的模板 config 中的训练数据路径部分: + +```diff +- data_files = ['/path/to/json/file.json'] ++ data_files = ['/path/to/your/json/file1.json', ++ '/path/to/your/json/file2.json', ...] +``` + +其次,需要修改 config 模板中的数据格式对应部分。若数据集满足以下格式,则不需修改: + +``` +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +``` + +若不满足,则可以通过 `xtuner list-dataset-format` 命令查看 XTuner 中支持的数据集格式,并修改 config 模板中的数据格式对应部分。例如自定义数据集满足 Alpaca 格式,则可以修改: + +```diff ++ from xtuner.dataset.map_fns import alpaca_map_fn +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + ..., +- dataset_map_fn=None, ++ dataset_map_fn=alpaca_map_fn, + ... +) +``` + +## Step 3, 离线处理数据集 + +使用以下命令可离线预处理原始数据: + +``` +python xtuner/tools/process_untokenized_datasets.py \ + internlm2_7b_full_finetune_custom_dataset_e1_copy.py \ + --save-folder /folder/to/save/processed/dataset +``` + +这里的第一个参数为 Step 2 中修改过的 config 文件,第二个参数为预处理过的数据集的保存路径。**注意,上述命令会在 internlm2_7b_full_finetune_custom_dataset_e1_copy.py 同级目录下新建一个 internlm2_7b_full_finetune_custom_dataset_e1_copy_modified.py 文件,后续训练中需要使用该配置文件,而非 internlm2_7b_full_finetune_custom_dataset_e1_copy.py。** + +## Step 4, 启动训练 + +**注意,训练中需要使用 Step 3 新生成的 internlm2_7b_full_finetune_custom_dataset_e1_copy_modified.py 文件,而非 internlm2_7b_full_finetune_custom_dataset_e1_copy.py 文件。** + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_full_finetune_custom_dataset_e1_copy_modified.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_full_finetune_custom_dataset_e1_copy_modified.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} +``` + +## Step 5, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_full_finetune_custom_dataset_e1_copy_modified.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Online.md b/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Online.md new file mode 100644 index 0000000000000000000000000000000000000000..aef9835c62d714d2688bbf628d6eee63b4c002bd --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/custom_dataset/Online.md @@ -0,0 +1,150 @@ +# 在线处理数据集 + +与离线处理数据集相比,在线处理数据集操作较为简单,适用于数据量不是特别大的训练情况。 + +## Step 1, 导出模板 config 文件 + +XTuner 中提供了用于自定义数据集微调的模板 config ,与其他基于 huggingface hub 上的数据集微调的 config 相比,只有数据部分进行了微小的修改,例如: + +```diff ++ data_files = ['/path/to/json/file.json'] +train_dataset = dict( + ..., +- dataset=dict(type=load_dataset, path='tatsu-lab/alpaca'), ++ dataset=dict(type=load_dataset, path='json', data_files=data_files), + ... +) +``` + +可使用以下命令查看 XTuner 中提供的用于自定义数据集微调的模板 config: + +``` +xtuner list-cfg -p custom_dataset +``` + +若想基于 Internlm2 进行全量微调,可从上述命令输出结果中选择 `internlm2_7b_full_finetune_custom_dataset_e1` 并导出至当前目录下: + +``` +xtuner copy-cfg internlm2_7b_full_finetune_custom_dataset_e1 . +``` + +## Step 2, 修改模板 config 文件 + +首先,需要修改 Step 1 中导出的模板 config 中的训练数据路径部分: + +```diff +- data_files = ['/path/to/json/file.json'] ++ data_files = ['/path/to/your/json/file1.json', ++ '/path/to/your/json/file2.json', ...] +``` + +其次,需要修改 config 模板中的数据格式对应部分。若数据集满足以下格式,则不需修改: + +``` +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +``` + +若不满足,则可以通过 `xtuner list-dataset-format` 命令查看 XTuner 中支持的数据集格式,并修改 config 模板中的数据格式对应部分。例如自定义数据集满足 Alpaca 格式,则可以修改: + +```diff ++ from xtuner.dataset.map_fns import alpaca_map_fn +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + ..., +- dataset_map_fn=None, ++ dataset_map_fn=alpaca_map_fn, + ... +) +``` + +## Step 3, 启动训练 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_full_finetune_custom_dataset_e1_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +若训练数据集较大,可能需要在训练前设置环境变量 `XTUNER_DATASET_TIMEOUT` 为一个更大的数(默认为 60 分钟超时,可以酌情将其调大,如:120): + +``` +XTUNER_DATASET_TIMEOUT=120 srun ${SRUN_ARGS} xtuner train internlm2_7b_full_finetune_custom_dataset_e1_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_full_finetune_custom_dataset_e1_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} +``` + +同理,对于大数据集,需要酌情调整环境变量 `XTUNER_DATASET_TIMEOUT`。 + +## Step 4, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_full_finetune_custom_dataset_e1_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/dataset_format.md b/data/xtuner/docs/zh_cn/user_guides/dataset_format.md new file mode 100644 index 0000000000000000000000000000000000000000..356824196cae44c9098db27be39b8b40b6bef5df --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/dataset_format.md @@ -0,0 +1,195 @@ +# 数据集格式 + +- [增量预训练数据集格式](#增量预训练数据集格式) +- [单轮对话数据集格式](#单轮对话数据集格式) +- [多轮对话数据集格式](#多轮对话数据集格式) + - [方法 1](#方法-1) + - [方法 2](#方法-2) + - [XTuner 方法介绍](#xtuner-方法介绍) + +大语言模型 Supervised Finetune(SFT)旨在通过有监督的微调来提高预训练模型在特定任务上的性能。为支持尽可能多的下游任务,XTuner 支持了增量预训练、单轮对话、多轮对话三种数据集格式。 + +- 增量预训练数据集用于提升模型在特定领域或任务的能力。 +- 单轮对话和多轮对话数据集则经常用于指令微调(instruction tuning)阶段,以提升模型回复特定指令的能力。 + +在指令微调阶段,我们的目标是训练语言模型根据人类指令给出回答。 **因此,一般只有回答部分(Output)的 loss 会用于梯度回传,而指令部分(System、Input)部分的 loss 则不会用于权重更新。** 基于此,我们在对数据集进行预处理的时候引入了 "system"、"input" 和 "output" 三个字段,"system"、"input" 字段用于保存不需要计算 loss 的文本,例如系统或用户指令,而 "output" 字段则用于保存需要计算 loss 的文本,例如输入指令对应的 GroundTruth 回答。 + +为了统一增量预训练、单轮对话和多轮对话三种数据集格式,我们将数据集格式设置为以下形式: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +在训练过程中,我们会将一条数据中的多组 "system"、"input" 和 "output" 进行拼接,之后输入模型,并行计算每个位置的 loss ,但只有 "output" 部分对应的 loss 参与梯度回传,如下图所示。 + +
+Image +
+ +其中 \ token 和 \ token 用于表示句子或文本的开始和结束。 + +## 增量预训练数据集格式 + +由于增量预训练旨在帮助模型学习针对特定下游任务的语言知识和表达能力,因此数据集的全部内容对应的 loss 都应该用于梯度回传。因此,数据集的 "system"、"input" 为空,而 "output" 为一整条语料数据。增量预训练任务对应的数据集格式如下所示: + +```json +[{ + "conversation":[ + { + "system": "", + "input": "", + "output": "I am an artificial intelligence (AI) assistant named Puyu. I was created by the Shanghai AI Laboratory and my purpose is to assist users with various tasks through natural language processing technology." + } + ] +}, +{ + "conversation":[ + { + "system": "", + "input": "", + "output": "I am an artificial intelligence programmed to assist with various types of tasks, including answering questions, providing information, and performing automated processes." + } + ] +}] +``` + +
+Image +
+ +## 单轮对话数据集格式 + +单轮对话数据集往往由一条指令(或问题)及其对应 GroundTruth 回答组成。由于只有回答部分需要对 loss 进行回传,因此数据集的 "system"、"input" 字段为输入指令,"output" 字段为对应回答。单轮对话数据集格式如下所示: + +```json +[{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Give three tips for staying healthy.", + "output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}, +{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "How to study English?", + "output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}] +``` + +
+Image +
+ +## 多轮对话数据集格式 + +多轮对话数据集往往由多轮指令(或问题)+ 对应 GroundTruth 回答组成。假设我们现在有一条多轮对话数据,内容如下。 +为方便介绍,对于第 n 轮对话,我们将 User 和 Assistant 对应的输出设为 UserN 和 AssistantN。 + +```text +System: You are an AI asssistant. +User1:Hello? +Assistant1:Hello! How can I help you? +User2:What's the date today? +Assistant2:Today is Monday, August 14, 2023. +User3:Thank you! +Assistant3:You are welcome. +``` + +如何使用上述这条多轮对话数据训练大模型?目前有以下两个主流方法。 + +### 方法 1 + +System、User1、Assistant1、User2、Assistant2、User3的文本都视为模型的输入部分,将 Assistant3 的文本视为模型的预测部分,只有 Assistant3 部分的 loss 参与权重更新。 + +
+Image +
+ +这种方法的弊端在于没有充分利用多轮对话的训练数据,因为 Assistant1 和 Assistant2 的内容没有参与模型训练,导致训练数据利用率较低。 + +### 方法 2 + +将一条多轮对话数据,拆分成多条数据。例如将以上示例拆分成如下三条数据。 + +
+Image +
+ +相比于方法1,方法2可以充分利用每一轮对话的数据,但需要将一条包含 n 轮对话的数据拆分为 n 条数据, +训练效率降低 1/n。 + +### XTuner 方法介绍 + +XTuner 训练多轮对话模型时,采取了一种更加充分高效的方法,如下图所示。 + +
+Image +
+ +我们将多轮对话进行拼接,之后输入模型,并行计算每个位置的 loss,而只有 Output 部分的 loss 参与回传。因此 XTuner 中多轮对话数据集格式如下所示: + +```json +[{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Hello?", + "output": "Hello! How can I help you?" + }, + { + "input": "What's the date today?", + "output": "Today is Monday, August 14, 2023." + }, + { + "input": "Thank you!", + "output": "You are welcome." + } + ] +}, +{ + "conversation":[ + { + "system": "You are an AI asssistant." + "input": "Hello?", + "output": "Hello! How can I help you?" + }, + { + "input": "How's the weather today in Rosso?", + "output": "The weather in Rosso on Wednesday, August 16th, is going to be cloudy for most of the day, together with moderate rain around noon." + }, + { + "input": "Thank you!", + "output": "You are welcome." + } + ] +}] +``` + +数据集中的 "conversation" 键对应的值是一个列表,用于保存每一轮对话的指令和实际回答(GroundTruth)。为了保持格式统一,增量预训练数据集和单轮对话数据集中的 "conversation" 键也对应一个列表,只不过该列表的长度为 1。而在多轮对话数据集中,"conversation" 列表的长度为 n,以容纳 n 轮的对话内容。 diff --git a/data/xtuner/docs/zh_cn/user_guides/dataset_prepare.md b/data/xtuner/docs/zh_cn/user_guides/dataset_prepare.md new file mode 100644 index 0000000000000000000000000000000000000000..cb602226e30bc06bd9ec7aa236c329917c4eb457 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/dataset_prepare.md @@ -0,0 +1,180 @@ +# 数据集准备 + +- [数据集准备](#数据集准备) + - [HuggingFace 数据集](#huggingface-数据集) + - [其他](#其他) + - [Arxiv Gentitle 生成题目](#arxiv-gentitle-生成题目) + - [MOSS-003-SFT](#moss-003-sft) + - [Chinese Lawyer](#chinese-lawyer) + - [LLaVA dataset](#llava-dataset) + - [文件结构](#文件结构) + - [预训练 Pretrain](#预训练-pretrain) + - [微调 Finetune](#微调-finetune) + - [RefCOCO dataset](#refcoco-dataset) + - [文件结构](#文件结构-1) + +## HuggingFace 数据集 + +针对 HuggingFace Hub 中的数据集,比如 [alpaca](https://huggingface.co/datasets/tatsu-lab/alpaca),用户可以快速使用它们。更多使用指南请参照[单轮对话文档](./single_turn_conversation.md)和[多轮对话文档](./multi_turn_conversation.md)。 + +## 其他 + +### Arxiv Gentitle 生成题目 + +Arxiv 数据集并未在 HuggingFace Hub上发布,但是可以在 Kaggle 上下载。 + +**步骤 0**,从 https://kaggle.com/datasets/Cornell-University/arxiv 下载原始数据。 + +**步骤 1**,使用 `xtuner preprocess arxiv ${DOWNLOADED_DATA} ${SAVE_DATA_PATH} [optional arguments]` 命令处理数据。 + +例如,提取从 `2020-01-01` 起的所有 `cs.AI`、`cs.CL`、`cs.CV` 论文: + +```shell +xtuner preprocess arxiv ${DOWNLOADED_DATA} ${SAVE_DATA_PATH} --categories cs.AI cs.CL cs.CV --start-date 2020-01-01 +``` + +**步骤 2**,所有的 Arixv Gentitle 配置文件都假设数据集路径为 `./data/arxiv_data.json`。用户可以移动并重命名数据,或者在配置文件中重新设置数据路径。 + +### MOSS-003-SFT + +MOSS-003-SFT 数据集可以在 https://huggingface.co/datasets/fnlp/moss-003-sft-data 下载。 + +**步骤 0**,下载数据。 + +```shell +# 确保已经安装 git-lfs (https://git-lfs.com) +git lfs install +git clone https://huggingface.co/datasets/fnlp/moss-003-sft-data +``` + +**步骤 1**,解压缩。 + +```shell +cd moss-003-sft-data +unzip moss-003-sft-no-tools.jsonl.zip +unzip moss-003-sft-with-tools-no-text2image.zip +``` + +**步骤 2**, 所有的 moss-003-sft 配置文件都假设数据集路径为 `./data/moss-003-sft-no-tools.jsonl` 和 `./data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl`。用户可以移动并重命名数据,或者在配置文件中重新设置数据路径。 + +### Chinese Lawyer + +Chinese Lawyer 数据集有两个子数据集,它们可以在 https://github.com/LiuHC0428/LAW-GPT 下载。 + +所有的 Chinese Lawyer 配置文件都假设数据集路径为 `./data/CrimeKgAssitant清洗后_52k.json` 和 `./data/训练数据_带法律依据_92k.json`。用户可以移动并重命名数据,或者在配置文件中重新设置数据路径。 + +### LLaVA dataset + +#### 文件结构 + +``` +./data/llava_data +├── LLaVA-Pretrain +│   ├── blip_laion_cc_sbu_558k.json +│   ├── blip_laion_cc_sbu_558k_meta.json +│   └── images +├── LLaVA-Instruct-150K +│   └── llava_v1_5_mix665k.json +└── llava_images +    ├── coco +    │ └── train2017 +    ├── gqa +    │ └── images +    ├── ocr_vqa +    │ └── images +    ├── textvqa +    │ └── train_images +    └── vg +       ├── VG_100K +    └── VG_100K_2 +``` + +#### 预训练 Pretrain + +LLaVA-Pretrain + +```shell +# Make sure you have git-lfs installed (https://git-lfs.com) +git lfs install +git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Pretrain --depth=1 +``` + +#### 微调 Finetune + +1. 文本数据 + + 1. LLaVA-Instruct-150K + + ```shell + # Make sure you have git-lfs installed (https://git-lfs.com) + git lfs install + git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Instruct-150K --depth=1 + ``` + +2. 图片数据 + + 1. COCO (coco): [train2017](http://images.cocodataset.org/zips/train2017.zip) + + 2. GQA (gqa): [images](https://downloads.cs.stanford.edu/nlp/data/gqa/images.zip) + + 3. OCR-VQA (ocr_vqa): [download script](https://drive.google.com/drive/folders/1_GYPY5UkUy7HIcR0zq3ZCFgeZN7BAfm_?usp=sharing) + + 1. ⚠️ OCR-VQA 所下载的图片命名需要进行修改,以确保所有图片后缀为 `.jpg`! + + ```shell + #!/bin/bash + ocr_vqa_path="" + + find "$target_dir" -type f | while read file; do + extension="${file##*.}" + if [ "$extension" != "jpg" ] + then + cp -- "$file" "${file%.*}.jpg" + fi + done + ``` + + 4. TextVQA (textvqa): [train_val_images](https://dl.fbaipublicfiles.com/textvqa/images/train_val_images.zip) + + 5. VisualGenome (VG): [part1](https://cs.stanford.edu/people/rak248/VG_100K_2/images.zip), [part2](https://cs.stanford.edu/people/rak248/VG_100K_2/images2.zip) + +### RefCOCO dataset + +#### 文件结构 + +``` + +./data +├── refcoco_annotations +│ ├── refcoco +│ │ ├── instances.json +│ │ ├── refs(google).p +│ │ └── refs(unc).p +│ ├── refcoco+ +│ │ ├── instances.json +│ │ └── refs(unc).p +│ └── refcocog +│ ├── instances.json +│ ├── refs(google).p +│ └─── refs(und).p +├── coco_images +| ├── *.jpg +... +``` + +下载以下链接中的 RefCOCO、RefCOCO+、RefCOCOg文件。 +Coco 2017 与 Coco 2014 都可以作为coco的图片数据。 + +| Image source | Download path | +| ------------ | :------------------------------------------------------------------------------------------: | +| RefCOCO | annotations | +| RefCOCO+ | annotations | +| RefCOCOg | annotations | + +在下载完refcoco相关数据文件后,解压文件并将它们放在 `./data/refcoco_annotations` 目录中。 +然后,我们使用以下命令将注释转换为json格式。此命令将转换后的json文件保存在 `./data/llava_data/RefCOCOJson/` 目录中。 + +```shell +xtuner preprocess refcoco --ann-path $RefCOCO_ANN_PATH --image-path $COCO_IMAGE_PATH \ +--save-path $SAVE_PATH # ./data/llava_data/RefCOCOJson/ +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/finetune.md b/data/xtuner/docs/zh_cn/user_guides/finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..bd6a7f17c41eceeeb415cf79710fff6335515490 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/finetune.md @@ -0,0 +1,121 @@ +# 微调大语言模型 LLMs + +## QLoRA 微调 [InternLM](https://github.com/InternLM/InternLM) + +- InternLM-7B, oasst1 + + ```shell + xtuner train internlm_7b_qlora_oasst1_e3 + ``` + +- InternLM-7B, Arxiv Gentitle + + ```shell + xtuner train internlm_7b_qlora_arxiv_gentitle_e3 + ``` + +- InternLM-7B, Colorist + + ```shell + xtuner train internlm_7b_qlora_colorist_e5 + ``` + +- InternLM-7B, Coder + + ```shell + xtuner train internlm_7b_qlora_code_alpaca_e3 + ``` + +- InternLM-7B, SQL + + ```shell + xtuner train internlm_7b_qlora_sql_e3 + ``` + +- InternLM-7B, Lawyer + + ```shell + xtuner train internlm_7b_qlora_lawyer_e3 + ``` + +- InternLM-7B, Open-Platypus + + ```shell + xtuner train internlm_7b_qlora_open_platypus_e3 + ``` + +- InternLM-7B, Alpaca-enzh + + ```shell + xtuner train internlm_7b_qlora_alpaca_enzh_e3 + ``` + +## QLoRA 微调 [Llama2](https://github.com/facebookresearch/llama) + +> 在使用 Llama2 之前,请先使用 \`huggingface-cli login\`\` 输入你的访问令牌(access token)!查看[这里](https://huggingface.co/docs/hub/security-tokens#user-access-tokens)了解如何获取访问令牌(access token)。 + +- Llama2-7B, MOSS-003-SFT **(插件!)** + + ```shell + NPROC_PER_NODE=8 xtuner train llama2_7b_qlora_moss_sft_all_e2_gpu8 # Recommended! + xtuner train llama2_7b_qlora_moss_sft_all_e1 + ``` + +- Llama2-7B, Arxiv Gentitle + + ```shell + xtuner train llama2_7b_qlora_arxiv_gentitle_e3 + ``` + +- Llama2-7B, Colorist + + ```shell + xtuner train llama2_7b_qlora_colorist_e5 + ``` + +## QLoRA 微调 [Qwen](https://github.com/QwenLM) + +- Qwen-7B, MOSS-003-SFT **(插件!)** + + ```shell + NPROC_PER_NODE=8 xtuner train qwen_7b_qlora_moss_sft_all_e2_gpu8 # Recommended! + xtuner train qwen_7b_qlora_moss_sft_all_e1 + ``` + +- Qwen-7B, oasst1 + + ```shell + xtuner train qwen_7b_qlora_oasst1_e3 + ``` + +- Qwen-7B, Arxiv Gentitle + + ```shell + xtuner train qwen_7b_qlora_arxiv_gentitle_e3 + ``` + +- Qwen-7B, Alpaca-enzh + + ```shell + xtuner train qwen_7b_qlora_alpaca_enzh_e3 + ``` + +## QLoRA 微调 [Baichuan](https://github.com/baichuan-inc) + +- Baichuan-7B, oasst1 + + ```shell + xtuner train baichuan_7b_qlora_oasst1_e3 + ``` + +- Baichuan-7B, Arxiv Gentitle + + ```shell + xtuner train baichuan_7b_qlora_arxiv_gentitle_e3 + ``` + +- Baichuan-7B, Alpaca-enzh + + ```shell + xtuner train baichuan_7b_qlora_alpaca_enzh_e3 + ``` diff --git a/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case1.md b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case1.md new file mode 100644 index 0000000000000000000000000000000000000000..bb9c000670c9ef461196e143418b1a4b7ea60f13 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case1.md @@ -0,0 +1,344 @@ +# 使用 Processed 数据集训练 InternLM2 + +使用尚未 token 化的 ftdp 数据训练 Internlm2 模型的场景。 + +## Step 1, 离线处理数据集 + +ftdp 把 sft 任务的数据处理划分为三个类型,原始数据(origin)、预处理数据(processed)和 token 过的数据(tokenized)。我们需要将预处理过的、具有统一格式的 ftdp 数据 token 化得到直接可以用于训练的格式。其中,预处理数据需要满足以下目录结构: + +``` +|-- processed-dir + |-- data1 + | |-- processed + | |-- sft_chat + | |-- data1.jsonl + |-- data2 + | |-- processed + | |-- sft_chat + | |-- data2.jsonl +``` + +使用以下命令可离线 token 化 ftdp 格式的预处理数据(processed)数据集: + +``` +python xtuner/tools/tokenize_ftdp_datasets.py \ + --processed-dir /path/to/preprocessed/data \ + --tokenized-dir /path/to/tokenized/data \ + --tokenizer-path pretrained_model_name_or_path +``` + +上述命令中: + +1. `--processed-dir` 需要指定预处理后的,具有 ftdp 标准格式的数据路径; +2. `--tokenized-dir` 需要指定为 token 化后的数据存储路径; +3. `--tokenizer-path pretrained_model_name_or_path` 中的 `pretrained_model_name_or_path` 同 `from_pretrained` 接口中的 `pretrained_model_name_or_path`。 + +上述命令执行成功后,会在 `/path/to/tokenized/data/chatml_llamav13_32k` 路径下保存两个子文件夹——`train` 和 `valid`。 + +## Step 2, 导出模板 config 文件 + +XTuner 中目前提供了训练 Internlm2 的模板 config,使用命令: + +``` +xtuner copy-cfg internlm2_7b_w_tokenized_dataset . +``` + +可将训练 Internlm2 的模板 config 导出至当前目录下。 + +## Step 3, 修改模板 config 文件 + +修改模板 config 文件中的训练数据路径为真实数据路径,其中 `/path/to/tokenized/data` 与 Step 1 中的 `/path/to/tokenized/data` 为同一个路径: + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/path/to/sft/data/folder' ++ dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... +``` + +在使用 DeepSpeed 训练模型时,如需在保存 checkpoint 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + +1. 确保 mmengine 版本大于等于 0.10.3 + +``` +pip install 'mmengine>=0.10.3' +``` + +2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + +```diff +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, ++ save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) +``` + +需要注意,经过以上设置后,训练过程不可 resume 。 + +## Step 4, 获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder /folder/to/save/data/order \ + --file-type ${file_type} +``` + +其中,`--file-type ${file_type}` 表示需要统计所有以 `${file_type}` 为文件名后缀的文件的顺序。 + +例如,需要获取 `/path/to/tokenized/data` 路径下所有以 `.bin` 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder . \ + --file-type .bin +``` + +同时,需要进一步修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) +``` + +## Step 5, 启动训练 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +``` + +## Step 6, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` + +## Step 7,Turbomind 评测 + +评测前需要按照[Opencompass 使用文档](https://aicarrier.feishu.cn/wiki/PR28wWg3tiY2xCkuysccRBNenIf#RNcbdEVZ9oulPQxFz9gcOxwjnff)准备环境。 + +使用内部版 Opencompass 的 ca949db74502a68c8a900afdf751c584fb7c7655 这个 commit id 进行评测。在 `configs/sft_cfg/7B/Ampere_chatml_v053/` 目录下添加如下 config : + +```diff +import os.path as osp +from copy import deepcopy + +from mmengine.config import read_base + +with read_base(): + # datasets + from ...dataset_collections.medium_chat_sft_v053 import \ + base_datasets, longtext_datasets, math_agent_datasets, cibench_datasets, plugin_eval_datasets + # summarizer + from ...summarizers.medium_chat_sft_v053 import summarizer + # clusters + from ...clusters.slurm_llmit2 import infer, eval + # lark robot + from ...lark import lark_bot_url + # base models cfg + from .base_model.base_model_turbomind import base_model_cfg, base_longtext_model_cfg, base_agent_llm_cfg, base_math_agent_cfg, \ + base_cibench_agent_cfg, base_plugin_eval_model_cfg + +# ------------------ change here ↓ ------------------ +models_path = [ ++ '/path/to/turbomind_model' +] + +# users can set `auto`, `spot`, or `reserved`. Defaults to `auto`. +infer['runner']['quotatype'] = 'auto' +infer['runner']['max_num_workers'] = 32 +infer['runner']['partition'] = 'llmit2' + +eval['runner']['quotatype'] = 'auto' +eval['runner']['max_num_workers'] = 64 +eval['runner']['partition'] = 'llmit2' +# ------------------ change end ------------------ + +# ------------------ default settings ↓ ------------------ +# careful to change the following settings + +# add different eval models +base_models = [] +longtext_models = [] +math_agent_models = [] +cibench_agent_models = [] +plugin_eval_models = [] +for model_path in models_path: + if model_path.endswith('/'): + model_path = model_path[:-1] + abbr = osp.split(osp.split(model_path)[0])[-1] + ckpt_iter = osp.split(model_path)[-1] + + summarizer_abbr = f"{abbr}@{ckpt_iter}" + + tmp_base_model_cfg = deepcopy(base_model_cfg) + tmp_base_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}" + tmp_base_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_base_model_cfg['path'] = model_path + + # process base model + base_models.append(tmp_base_model_cfg) + + # process longtext model + tmp_longtext_model_cfg = deepcopy(base_longtext_model_cfg) + tmp_longtext_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-longtext" + tmp_longtext_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_longtext_model_cfg['path'] = model_path + longtext_models.append(tmp_longtext_model_cfg) + + # set agent model cfg + tmp_agent_llm_cfg = deepcopy(base_agent_llm_cfg) + tmp_agent_llm_cfg['path'] = model_path + + # process math agent model + tmp_math_agent_cfg = deepcopy(base_math_agent_cfg) + tmp_math_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-math-react" + tmp_math_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_math_agent_cfg['llm'] = tmp_agent_llm_cfg + math_agent_models.append(tmp_math_agent_cfg) + + # process cibench agent model + tmp_cibench_agent_cfg = deepcopy(base_cibench_agent_cfg) + tmp_cibench_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-cibench-react" + tmp_cibench_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_cibench_agent_cfg['llm'] = tmp_agent_llm_cfg + cibench_agent_models.append(tmp_cibench_agent_cfg) + + # process plugin eval model + tmp_plugin_eval_model_cfg = deepcopy(base_plugin_eval_model_cfg) + tmp_plugin_eval_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-plugin-eval" + tmp_plugin_eval_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_plugin_eval_model_cfg['path'] = model_path + plugin_eval_models.append(tmp_plugin_eval_model_cfg) + +del tmp_base_model_cfg, tmp_longtext_model_cfg, tmp_agent_llm_cfg, \ + tmp_math_agent_cfg, tmp_cibench_agent_cfg, tmp_plugin_eval_model_cfg + +# set all models +model_dataset_combinations = [] +models = [] +datasets = [] + +# The agent test is relatively slow, so they placed first. +# process longtext datasets +model_dataset_combinations.append(dict(models=longtext_models, datasets=longtext_datasets)) +models.extend(longtext_models) +datasets.extend(longtext_datasets) +# process math agent datasets +model_dataset_combinations.append(dict(models=math_agent_models, datasets=math_agent_datasets)) +models.extend(math_agent_models) +datasets.extend(math_agent_datasets) +# process cibench agent datasets +model_dataset_combinations.append(dict(models=cibench_agent_models, datasets=cibench_datasets)) +models.extend(cibench_agent_models) +datasets.extend(cibench_datasets) +# process plugin eval datasets +model_dataset_combinations.append(dict(models=plugin_eval_models, datasets=plugin_eval_datasets)) +models.extend(plugin_eval_models) +datasets.extend(plugin_eval_datasets) + +# process base datasets +model_dataset_combinations.append(dict(models=base_models, datasets=base_datasets)) +models.extend(base_models) +datasets.extend(base_datasets) + +# ------------------ default settings end ------------------ + +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case2.md b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case2.md new file mode 100644 index 0000000000000000000000000000000000000000..5096e896acfad7849030fbfbc533c71d7eb1a427 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case2.md @@ -0,0 +1,361 @@ +# 使用 Processed 数据集训练非 InternLM2 模型 + +使用尚未 token 化的 ftdp 数据训练其他模型(以 Mistral 为例),且需要用 Internlm2 对话模板覆盖原有对话模板以便让模型掌握 agent 、tool 能力。 + +## Step 1, 离线处理数据集 + +ftdp 把 sft 任务的数据处理划分为三个类型,原始数据(origin)、预处理数据(processed)和 token 过的数据(tokenized)。我们需要将预处理过的、具有统一格式的 ftdp 数据 token 化得到直接可以用于训练的格式。其中,预处理数据需要满足以下目录结构: + +``` +|-- processed-dir + |-- data1 + | |-- processed + | |-- sft_chat + | |-- data1.jsonl + |-- data2 + | |-- processed + | |-- sft_chat + | |-- data2.jsonl +``` + +使用以下命令可离线 token 化 ftdp 格式的预处理数据(processed)数据集: + +``` +python xtuner/tools/tokenize_ftdp_datasets.py \ + --processed-dir /path/to/preprocessed/data \ + --tokenized-dir /path/to/tokenized/data \ + --tokenizer-path pretrained_model_name_or_path \ + --tokenizer-w-special-tokens-save-dir /path/to/save/new/tokenizer +``` + +上述命令中: + +1. `--processed-dir` 需要指定预处理后的,具有 ftdp 标准格式的数据路径(同 Case 1); +2. `--tokenized-dir` 需要指定为 token 化后的数据存储路径(同 Case 1); +3. `--tokenizer-path pretrained_model_name_or_path` 中的 `pretrained_model_name_or_path` 同 `from_pretrained` 接口中的 `pretrained_model_name_or_path`(同 Case 1); +4. 由于除 Internlm2 外的其他模型(如 mistral 等)没有 internlm2-chat 模型的智能体、工具调用等功能的对话模板,因此对于非 internlm2 模型,需要将 internlm2-chat 对话模板中的一些特殊字符(如:\<|im_start|>、\<|plugin|>等)加入到新模型的 tokenizer 的 special tokens 中,需要通过 `--tokenizer-w-special-tokens-save-dir` 指定新 tokenizer 的存储路径。**同时,后续训练过程需要使用新保存的 tokenizer 而非原始 tokenizer。** + +## Step 2, 导出模板 config 文件 + +XTuner 中目前提供了训练 Mistral 的模板 config,使用命令: + +``` +xtuner copy-cfg mistral_7b_w_tokenized_dataset . +``` + +可将训练 Mistral 的模板 config 导出至当前目录下。 + +## Step 3, 修改模板 config 文件 + +1. 修改模板 config 文件中的训练数据路径为真实数据路径,其中 `/path/to/tokenized/data` 需要基于 Step 1 中的 `/path/to/tokenized/data` 进一步指定 train folder,即 `/path/to/tokenized/data/chatml_llamav13_32k/train/` 。 +2. 需要修改 tokenizer 路径为 Step 1 保存的路径 `/path/to/save/new/tokenizer`。 +3. 由于 Step 1 扩充了 tokenizer 的词表,因此需要将新 tokenizer 传入 `SupervisedFinetune` 中,以扩展 llm model 的词表大小。 + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +# 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板,new tokenizer 中已经 +# 添加了 Internlm2 对话模板中的特殊字符。 +# 请参考 docs/zh_cn/user_guides/finetune_custom_dataset.md +- tokenizer_path = '/new/tokenizer/path' ++ tokenizer_path = '/path/to/save/new/tokenizer' +use_varlen_attn = True + +# Data +- dataset_folder = '/path/to/sft/data/folder' ++ dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' +# 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板 +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +model = dict( ++ tokenizer=tokenizer, + ...) +``` + +在使用 DeepSpeed 训练模型时,如需在保存 checkpoint 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + +1. 确保 mmengine 版本大于等于 0.10.3 + +``` +pip install 'mmengine>=0.10.3' +``` + +2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + +```diff +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, ++ save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) +``` + +需要注意,经过以上设置后,训练过程不可 resume 。 + +## Step 4, 获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder /folder/to/save/data/order \ + --file-type ${file_type} +``` + +其中,`--file-type ${file_type}` 表示需要统计所有以 `${file_type}` 为文件名后缀的文件的顺序。 + +例如,需要获取 `/path/to/tokenized/data` 路径下所有以 `.bin` 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder . \ + --file-type .bin +``` + +同时,需要进一步修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) +``` + +## Step 5, 启动训练 + +注:训练前期(几十个 iters)loss 偏高是正常现象,因为模型需要时间学习 Internlm2 的对话模板。 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train mistral_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train mistral_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +``` + +## Step 6, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py mistral_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` + +## Step 7,Turbomind 评测 + +评测前需要按照[Opencompass 使用文档](https://aicarrier.feishu.cn/wiki/PR28wWg3tiY2xCkuysccRBNenIf#RNcbdEVZ9oulPQxFz9gcOxwjnff)准备环境。 + +使用内部版 Opencompass 的 ca949db74502a68c8a900afdf751c584fb7c7655 这个 commit id 进行评测。在 `configs/sft_cfg/7B/Ampere_chatml_v053/` 目录下添加如下 config : + +```diff +import os.path as osp +from copy import deepcopy + +from mmengine.config import read_base + +with read_base(): + # datasets + from ...dataset_collections.medium_chat_sft_v053 import \ + base_datasets, longtext_datasets, math_agent_datasets, cibench_datasets, plugin_eval_datasets + # summarizer + from ...summarizers.medium_chat_sft_v053 import summarizer + # clusters + from ...clusters.slurm_llmit2 import infer, eval + # lark robot + from ...lark import lark_bot_url + # base models cfg + from .base_model.base_model_turbomind import base_model_cfg, base_longtext_model_cfg, base_agent_llm_cfg, base_math_agent_cfg, \ + base_cibench_agent_cfg, base_plugin_eval_model_cfg + +# ------------------ change here ↓ ------------------ +models_path = [ ++ '/path/to/turbomind_model' +] + +# users can set `auto`, `spot`, or `reserved`. Defaults to `auto`. +infer['runner']['quotatype'] = 'auto' +infer['runner']['max_num_workers'] = 32 +infer['runner']['partition'] = 'llmit2' + +eval['runner']['quotatype'] = 'auto' +eval['runner']['max_num_workers'] = 64 +eval['runner']['partition'] = 'llmit2' +# ------------------ change end ------------------ + +# ------------------ default settings ↓ ------------------ +# careful to change the following settings + +# add different eval models +base_models = [] +longtext_models = [] +math_agent_models = [] +cibench_agent_models = [] +plugin_eval_models = [] +for model_path in models_path: + if model_path.endswith('/'): + model_path = model_path[:-1] + abbr = osp.split(osp.split(model_path)[0])[-1] + ckpt_iter = osp.split(model_path)[-1] + + summarizer_abbr = f"{abbr}@{ckpt_iter}" + + tmp_base_model_cfg = deepcopy(base_model_cfg) + tmp_base_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}" + tmp_base_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_base_model_cfg['path'] = model_path + + # process base model + base_models.append(tmp_base_model_cfg) + + # process longtext model + tmp_longtext_model_cfg = deepcopy(base_longtext_model_cfg) + tmp_longtext_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-longtext" + tmp_longtext_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_longtext_model_cfg['path'] = model_path + longtext_models.append(tmp_longtext_model_cfg) + + # set agent model cfg + tmp_agent_llm_cfg = deepcopy(base_agent_llm_cfg) + tmp_agent_llm_cfg['path'] = model_path + + # process math agent model + tmp_math_agent_cfg = deepcopy(base_math_agent_cfg) + tmp_math_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-math-react" + tmp_math_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_math_agent_cfg['llm'] = tmp_agent_llm_cfg + math_agent_models.append(tmp_math_agent_cfg) + + # process cibench agent model + tmp_cibench_agent_cfg = deepcopy(base_cibench_agent_cfg) + tmp_cibench_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-cibench-react" + tmp_cibench_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_cibench_agent_cfg['llm'] = tmp_agent_llm_cfg + cibench_agent_models.append(tmp_cibench_agent_cfg) + + # process plugin eval model + tmp_plugin_eval_model_cfg = deepcopy(base_plugin_eval_model_cfg) + tmp_plugin_eval_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-plugin-eval" + tmp_plugin_eval_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_plugin_eval_model_cfg['path'] = model_path + plugin_eval_models.append(tmp_plugin_eval_model_cfg) + +del tmp_base_model_cfg, tmp_longtext_model_cfg, tmp_agent_llm_cfg, \ + tmp_math_agent_cfg, tmp_cibench_agent_cfg, tmp_plugin_eval_model_cfg + +# set all models +model_dataset_combinations = [] +models = [] +datasets = [] + +# The agent test is relatively slow, so they placed first. +# process longtext datasets +model_dataset_combinations.append(dict(models=longtext_models, datasets=longtext_datasets)) +models.extend(longtext_models) +datasets.extend(longtext_datasets) +# process math agent datasets +model_dataset_combinations.append(dict(models=math_agent_models, datasets=math_agent_datasets)) +models.extend(math_agent_models) +datasets.extend(math_agent_datasets) +# process cibench agent datasets +model_dataset_combinations.append(dict(models=cibench_agent_models, datasets=cibench_datasets)) +models.extend(cibench_agent_models) +datasets.extend(cibench_datasets) +# process plugin eval datasets +model_dataset_combinations.append(dict(models=plugin_eval_models, datasets=plugin_eval_datasets)) +models.extend(plugin_eval_models) +datasets.extend(plugin_eval_datasets) + +# process base datasets +model_dataset_combinations.append(dict(models=base_models, datasets=base_datasets)) +models.extend(base_models) +datasets.extend(base_datasets) + +# ------------------ default settings end ------------------ + +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case3.md b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case3.md new file mode 100644 index 0000000000000000000000000000000000000000..e34fe5295c7d29b021b2c79702dc04b004a57fa3 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case3.md @@ -0,0 +1,146 @@ +# 使用 Processed 普通对话数据集训任意模型 + +使用尚未 token 化的 ftdp 数据进行训练,保持待训练模型的对话模板不变,且不需要进行离线处理的场景。 + +## Step 1, 导出模板 config 文件 + +XTuner 中目前提供了训练 Internlm2 的模板 config,使用命令: + +``` +xtuner copy-cfg internlm2_7b_w_untokenized_dataset . +``` + +可将训练 Internlm2 的模板 config 导出至当前目录下。 + +## Step 2, 修改模板 config 文件 + +修改模板 config 文件中的训练数据路径为真实数据路径,路径中的所有以 `.json` 为后缀的数据将会作为训练数据: + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/mnt/petrelfs/share_data/caoweihan/v1_sample_with_legal_cate' ++ dataset_folder = '/path/to/untokenized/data' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... +``` + +## Step 3, 获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/untokenized/data \ + --save-folder /folder/to/save/data/order \ + --file-type ${file_type} +``` + +其中,`--file-type ${file_type}` 表示需要统计所有以 `${file_type}` 为文件名后缀的文件的顺序。 + +例如,需要获取 `/path/to/untokenized/data` 路径下所有以 `.json` 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/untokenized/data \ + --save-folder . \ + --file-type .json +``` + +同时,需要进一步修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, +- file_type='.bin' # 指定 data_order_path 后,file_type 参数就不需要设置了 + ), + packed_length=max_length, + seed=1024) +``` + +## Step 4, 启动训练 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_untokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_w_untokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +``` + +## Step 5, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_untokenized_dataset_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case4.md b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case4.md new file mode 100644 index 0000000000000000000000000000000000000000..a159a11236d056e8581452d9fafedcdf685c50d5 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/Case4.md @@ -0,0 +1,311 @@ +# 使用 Tokenized 数据集训练 InternLM2 + +使用已经 token 化的 ftdp 数据训练 Internlm2 模型。 + +## Step 1, 导出模板 config 文件 + +XTuner 中目前提供了训练 Internlm2 的模板 config,使用命令: + +``` +xtuner copy-cfg internlm2_7b_w_tokenized_dataset . +``` + +可将训练 Internlm2 的模板 config 导出至当前目录下。 + +## Step 2, 修改模板 config 文件 + +修改模板 config 文件中的训练数据路径为真实数据路径: + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/path/to/sft/data/folder' ++ dataset_folder = '/path/to/tokenized/data/chatml_llamav13_32k/train' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... +``` + +在使用 DeepSpeed 训练模型时,如需在保存 checkpoint 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + +1. 确保 mmengine 版本大于等于 0.10.3 + +``` +pip install 'mmengine>=0.10.3' +``` + +2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + +```diff +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, ++ save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) +``` + +需要注意,经过以上设置后,训练过程不可 resume 。 + +## Step 3, 获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder /folder/to/save/data/order \ + --file-type ${file_type} +``` + +其中,`--file-type ${file_type}` 表示需要统计所有以 `${file_type}` 为文件名后缀的文件的顺序。 + +例如,需要获取 `/path/to/tokenized/data` 路径下所有以 `.bin` 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/tokenized/data \ + --save-folder . \ + --file-type .bin +``` + +同时,需要进一步修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) +``` + +## Step 4, 启动训练 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +若出现 OOM 现象,可尝试使用 zero2 或 zero3。以下命令可以使用 zero 3 显存优化策略进行训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero3 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +``` + +## Step 5, 转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` + +## Step 6,Turbomind 评测 + +评测前需要按照[Opencompass 使用文档](https://aicarrier.feishu.cn/wiki/PR28wWg3tiY2xCkuysccRBNenIf#RNcbdEVZ9oulPQxFz9gcOxwjnff)准备环境。 + +使用内部版 Opencompass 的 ca949db74502a68c8a900afdf751c584fb7c7655 这个 commit id 进行评测。在 `configs/sft_cfg/7B/Ampere_chatml_v053/` 目录下添加如下 config : + +```diff +import os.path as osp +from copy import deepcopy + +from mmengine.config import read_base + +with read_base(): + # datasets + from ...dataset_collections.medium_chat_sft_v053 import \ + base_datasets, longtext_datasets, math_agent_datasets, cibench_datasets, plugin_eval_datasets + # summarizer + from ...summarizers.medium_chat_sft_v053 import summarizer + # clusters + from ...clusters.slurm_llmit2 import infer, eval + # lark robot + from ...lark import lark_bot_url + # base models cfg + from .base_model.base_model_turbomind import base_model_cfg, base_longtext_model_cfg, base_agent_llm_cfg, base_math_agent_cfg, \ + base_cibench_agent_cfg, base_plugin_eval_model_cfg + +# ------------------ change here ↓ ------------------ +models_path = [ ++ '/path/to/turbomind_model' +] + +# users can set `auto`, `spot`, or `reserved`. Defaults to `auto`. +infer['runner']['quotatype'] = 'auto' +infer['runner']['max_num_workers'] = 32 +infer['runner']['partition'] = 'llmit2' + +eval['runner']['quotatype'] = 'auto' +eval['runner']['max_num_workers'] = 64 +eval['runner']['partition'] = 'llmit2' +# ------------------ change end ------------------ + +# ------------------ default settings ↓ ------------------ +# careful to change the following settings + +# add different eval models +base_models = [] +longtext_models = [] +math_agent_models = [] +cibench_agent_models = [] +plugin_eval_models = [] +for model_path in models_path: + if model_path.endswith('/'): + model_path = model_path[:-1] + abbr = osp.split(osp.split(model_path)[0])[-1] + ckpt_iter = osp.split(model_path)[-1] + + summarizer_abbr = f"{abbr}@{ckpt_iter}" + + tmp_base_model_cfg = deepcopy(base_model_cfg) + tmp_base_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}" + tmp_base_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_base_model_cfg['path'] = model_path + + # process base model + base_models.append(tmp_base_model_cfg) + + # process longtext model + tmp_longtext_model_cfg = deepcopy(base_longtext_model_cfg) + tmp_longtext_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-longtext" + tmp_longtext_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_longtext_model_cfg['path'] = model_path + longtext_models.append(tmp_longtext_model_cfg) + + # set agent model cfg + tmp_agent_llm_cfg = deepcopy(base_agent_llm_cfg) + tmp_agent_llm_cfg['path'] = model_path + + # process math agent model + tmp_math_agent_cfg = deepcopy(base_math_agent_cfg) + tmp_math_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-math-react" + tmp_math_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_math_agent_cfg['llm'] = tmp_agent_llm_cfg + math_agent_models.append(tmp_math_agent_cfg) + + # process cibench agent model + tmp_cibench_agent_cfg = deepcopy(base_cibench_agent_cfg) + tmp_cibench_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-cibench-react" + tmp_cibench_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_cibench_agent_cfg['llm'] = tmp_agent_llm_cfg + cibench_agent_models.append(tmp_cibench_agent_cfg) + + # process plugin eval model + tmp_plugin_eval_model_cfg = deepcopy(base_plugin_eval_model_cfg) + tmp_plugin_eval_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-plugin-eval" + tmp_plugin_eval_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_plugin_eval_model_cfg['path'] = model_path + plugin_eval_models.append(tmp_plugin_eval_model_cfg) + +del tmp_base_model_cfg, tmp_longtext_model_cfg, tmp_agent_llm_cfg, \ + tmp_math_agent_cfg, tmp_cibench_agent_cfg, tmp_plugin_eval_model_cfg + +# set all models +model_dataset_combinations = [] +models = [] +datasets = [] + +# The agent test is relatively slow, so they placed first. +# process longtext datasets +model_dataset_combinations.append(dict(models=longtext_models, datasets=longtext_datasets)) +models.extend(longtext_models) +datasets.extend(longtext_datasets) +# process math agent datasets +model_dataset_combinations.append(dict(models=math_agent_models, datasets=math_agent_datasets)) +models.extend(math_agent_models) +datasets.extend(math_agent_datasets) +# process cibench agent datasets +model_dataset_combinations.append(dict(models=cibench_agent_models, datasets=cibench_datasets)) +models.extend(cibench_agent_models) +datasets.extend(cibench_datasets) +# process plugin eval datasets +model_dataset_combinations.append(dict(models=plugin_eval_models, datasets=plugin_eval_datasets)) +models.extend(plugin_eval_models) +datasets.extend(plugin_eval_datasets) + +# process base datasets +model_dataset_combinations.append(dict(models=base_models, datasets=base_datasets)) +models.extend(base_models) +datasets.extend(base_datasets) + +# ------------------ default settings end ------------------ + +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/README.md b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/README.md new file mode 100644 index 0000000000000000000000000000000000000000..900e30e4365eff5c152571e7a78ba77d745ac305 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/ftdp_dataset/README.md @@ -0,0 +1,24 @@ +ftdp 是一个闭源的处理数据工具,开源社区用户可以忽略此文档。 + +本节介绍了常见的 4 种使用 ftdp 数据集训练的使用场景: + +- [Case 1: 使用 Processed 数据集训练 InternLM2](Case1.md) +- [Case 2: 使用 Processed 数据集训练非 InternLM2 模型](Case2.md) +- [Case 3: 使用 Processed 普通对话数据集训任意模型](Case3.md) +- [Case 4: 使用 Tokenized 数据集训练 InternLM2](Case4.md) + +请先参考下方流程图,选择自己的使用场景。 + +```mermaid +graph TD; + A{ftdp 数据} + A -->|是| B{数据 tokenized} + B -->|否| C{使用 Internlm2 对话模板} + C -->|是| D{训练 Internlm2 } + D -->|是| E[Case 1] + D -->|否| F[Case 2] + C -->|否| G{离线处理数据集} + G -->|是| H[尚不支持] + G -->|否| I[Case 3] + B -->|是| J[Case 4] +``` diff --git a/data/xtuner/docs/zh_cn/user_guides/incremental_pretraining.md b/data/xtuner/docs/zh_cn/user_guides/incremental_pretraining.md new file mode 100644 index 0000000000000000000000000000000000000000..9d019a53206f80d3f898750e5d2c4050aa212ada --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/incremental_pretraining.md @@ -0,0 +1,261 @@ +# 增量预训练 data pipeline + +- [使用 HuggingFace Hub 数据集](#使用-huggingface-hub-数据集) +- [使用自定义数据集](#使用自定义数据集) + +增量预训练旨在提升模型在特定领域或任务的能力。 + +XTuner 支持使用 HuggingFace Hub 数据集或自定义数据集进行 SFT(Supervised FineTune)。二者的主要区别在于,使用 HuggingFace Hub 数据集时需要将原始数据映射为 XTuner 定义的[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)。而对于自定义数据集则推荐用户按照[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)构造数据集。 + +## 使用 HuggingFace Hub 数据集 + +### Step 1, 映射原始数据集为标准格式 + +由于不同数据集的格式各有不同,因此需要将原始数据映射为 XTuner 定义的[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)。XTuner 支持通过 map function 来实现格式的映射。下面以 [oasst1](https://huggingface.co/datasets/OpenAssistant/oasst1) 数据集为例介绍如何实现数据映射。 + +oasst1 数据集格式如下所示: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='timdettmers/openassistant-guanaco') +>>> ds['train'] +Dataset({ + features: ['text'], + num_rows: 9846 +}) +``` + +由此可见,oasst1 train dataset 有 9846 行,1 列,列名为 'text','text' 这一列正是增量预训练需要用到的文本数据。[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)中介绍了增量预训练过程中,数据格式应该为: + +```json +[{ + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] +}] +``` + +因此,可以通过下面的 map function 将原始数据映射为标准格式: + +```python +# 假设将该函数存放在./map_fn.py文件中 +def custom_map_fn(example): + """ + >>> train_ds = ds['train'].map(oasst1_map_fn) + >>> train_ds + Dataset({ + features: ['text', 'conversation'], + num_rows: 9846 + }) + >>> train_ds[0]['conversation'] + [{'input': '', 'output': 'xxx'}] + """ + return {'conversation': [{'input': '', 'output': example['text']}]} + +``` + +### Step 2, 列出候选模型名字 + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +```bash +xtuner list-cfg -p internlm +``` + +`-p`为模糊查找,若想训练其他模型,可以修改`internlm`为 XTuner 支持的其他模型名称。 + +### Step 3, 导出 config 文件 + +如果所提供的配置文件不能满足使用需求,请导出所提供的配置文件并进行相应更改: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +例如通过下列命令将名为 `internlm_7b_qlora_oasst1_e3` 的 config 导出至当前目录下: + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 导入 Step 1 中实现的映射函数 `custom_map_fn` +2. 使用 `custom_map_fn` 替换 `train_dataset` 中的 `dataset_map_fn` +3. 将 `train_dataset` 中的 `template_map_fn` 置为None(因为无需将对话模板加入至增量预训练数据集中) +4. 调整原始数据集的路径,关于 `load_dataset` 的相关操作可以参考[用户文档](https://huggingface.co/docs/datasets/loading) +5. 关闭 `EvaluateChatHook`。由于增量预训练时的模型只具备续写功能,不具备对话功能,如果开启 `EvaluateChatHook`打印对话结果的话,模型会无法正常停止生成。 + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' +- prompt_template = PROMPT_TEMPLATE.internlm_chat ++ data_path = 'path/to/your/data' +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=custom_map_fn, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), ++ template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +- dict( +- type=EvaluateChatHook, +- tokenizer=tokenizer, +- every_n_iters=evaluation_freq, +- evaluation_inputs=evaluation_inputs, +- system=SYSTEM, +- instruction=prompt_template.INSTRUCTION) +] +... +``` + +### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 + +## 使用自定义数据集 + +在使用自定义数据集进行增量预训练时,我们推荐将数据集构造为 XTuner 定义的[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)。若自定义数据集格式为 `oasst1` 等其他格式,可参考[使用HuggingFace Hub数据集](#使用huggingface-hub数据集)一节。 + +### Step 1, 数据准备 + +按照 XTuner 定义的[增量预训练数据格式](./dataset_format.md#增量预训练数据集格式)准备自定义数据: + +```json +[ + { + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] + }, + { + "conversation":[ + { + "input": "", + "output": "xxx" + }, + ] + } +] +``` + +### Step 2, 列出候选模型名字 + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +### Step 3, 复制 config 文件 + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 调整原始数据集的路径 +2. 由于数据集格式已经是标准格式了,需要将 `train_dataset` 中的 `dataset_map_fn` 置为 `None` +3. 将 `train_dataset` 中的 `template_map_fn` 置为 `None`,因为不需要将对话模板加入至增量预训练数据集中 +4. 关闭 `EvaluateChatHook`。由于增量预训练时的模型只具备续写功能,不具备对话功能,如果开启 `EvaluateChatHook`打印对话结果的话,模型会无法正常停止生成。 + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' +- prompt_template = PROMPT_TEMPLATE.internlm_chat ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=None, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), ++ template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +- dict( +- type=EvaluateChatHook, +- tokenizer=tokenizer, +- every_n_iters=evaluation_freq, +- evaluation_inputs=evaluation_inputs, +- system=SYSTEM, +- instruction=prompt_template.INSTRUCTION) +] +... +``` + +### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 diff --git a/data/xtuner/docs/zh_cn/user_guides/intern_repo_dataset.md b/data/xtuner/docs/zh_cn/user_guides/intern_repo_dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..c7ae5ba42f6eb374c607e332c68bfcabc990ab10 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/intern_repo_dataset.md @@ -0,0 +1,477 @@ +**注意:本文档的主要目标是详细说明如何根据 InternLM 仓库所提供的数据格式进行模型训练,而非如何训练 InternLM 模型。** + +# 使用 tokenized 数据集进行训练 + +## 使用教程 + +### Step 1, 导出模板 config 文件 + +可以通过下列命令将名为 internlm2_7b_w_tokenized_dataset 的 config 导出至当前目录下: + +``` +xtuner copy-cfg internlm2_7b_w_tokenized_dataset . +``` + +### Step 2, 修改模板 config 文件 + +修改 Config 文件中上述接口对应部分。 + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/path/to/sft/data/folder' # noqa: E501 ++ dataset_folder = '/real/dataset/path' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... +``` + +在使用 DeepSpeed 训练模型时,如需在保存 checkpoint 时只保存模型权重,而不保存优化器状态,可参考以下步骤: + +1. 确保 mmengine 版本大于等于 0.10.3 + +``` +pip install 'mmengine>=0.10.3' +``` + +2. 修改 Config 文件,CheckpointHook 增加 save_optimizer=False + +```diff +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per epoch. + checkpoint=dict( + type=CheckpointHook, ++ save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) +``` + +需要注意,经过以上设置后,训练过程不可 resume 。 + +### Step 3,获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/your/data \ + --save-folder /folder/to/save/data/order \ + --file-type ${file_type} +``` + +其中,`--file-type ${file_type}` 表示需要统计所有以 `${file_type}` 为文件名后缀的文件的顺序。 + +例如,需要获取 `/path/to/your/data` 路径下所有以 `.bin` 结尾的文件的顺序,并保存在当前路径下,那么上述命令需要改为: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/your/data \ + --save-folder . \ + --file-type .bin +``` + +同时,需要进一步修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + min_length=0, + file_type='.bin' + ), + packed_length=max_length, + seed=1024) +``` + +### Step 4, 启动训练 + +在 slurm 集群调度系统中可以通过以下命令启动训练: + +``` +srun ${SRUN_ARGS} xtuner train internlm2_7b_w_tokenized_dataset_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +在阿里云 DLC 中可通过以下命令启动训练: + +```diff +export NCCL_IB_TC=136 +export NCCL_IB_SL=5 +export NCCL_IB_GID_INDEX=3 +export NCCL_SOCKET_IFNAME=bond0 +export NCCL_DEBUG=INFO +export NCCL_IB_HCA=mlx5 +export NCCL_IB_TIMEOUT=22 +export NCCL_IB_QPS_PER_CONNECTION=8 +export NCCL_NET_PLUGIN=none + +export NCCL_BUFFSIZE=2097152 +export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:512 +- export EXP_NAME=debug ++ export EXP_NAME=your_exp_name +export PYTHONPATH='.':$PYTHONPATH +source ~/.bashrc ++ cd /path/to/xtuner ++ conda activate conda_env_name + +export NPROC_PER_NODE=${KUBERNETES_CONTAINER_RESOURCE_GPU} +export PORT=${MASTER_PORT} +export NNODES=${WORLD_SIZE} +export NODE_RANK=${RANK} +export ADDR=${MASTER_ADDR} + +echo ${KUBERNETES_CONTAINER_RESOURCE_GPU} +echo ${WORLD_SIZE} +echo ${MASTER_PORT} +echo ${MASTER_ADDR} +echo ${RANK} +xtuner train internlm2_7b_w_tokenized_dataset_copy.py \ + --deepspeed deepspeed_zero1 \ + --work-dir work_dirs/${EXP_NAME} + +``` + +### Step 5,转模型 + +deepspeed 转 hf: + +``` +python xtuner/tools/model_converters/pth_to_hf.py internlm2_7b_w_tokenized_dataset_copy.py /src/model/path /hf/dst/model/path +``` + +hf 转 Turbomind: + +``` +lmdeploy convert internlm2-chat-7b /hf/dst/model/path --dst-path /turbomind/dst/model/path +``` + +### Step 6,Turbomind 评测 + +评测前需要按照[Opencompass 使用文档](https://aicarrier.feishu.cn/wiki/PR28wWg3tiY2xCkuysccRBNenIf#RNcbdEVZ9oulPQxFz9gcOxwjnff)准备环境。 + +使用内部版 Opencompass 的 ca949db74502a68c8a900afdf751c584fb7c7655 这个 commit id 进行评测。在 `configs/sft_cfg/7B/Ampere_chatml_v053/` 目录下添加如下 config : + +```diff +import os.path as osp +from copy import deepcopy + +from mmengine.config import read_base + +with read_base(): + # datasets + from ...dataset_collections.medium_chat_sft_v053 import \ + base_datasets, longtext_datasets, math_agent_datasets, cibench_datasets, plugin_eval_datasets + # summarizer + from ...summarizers.medium_chat_sft_v053 import summarizer + # clusters + from ...clusters.slurm_llmit2 import infer, eval + # lark robot + from ...lark import lark_bot_url + # base models cfg + from .base_model.base_model_turbomind import base_model_cfg, base_longtext_model_cfg, base_agent_llm_cfg, base_math_agent_cfg, \ + base_cibench_agent_cfg, base_plugin_eval_model_cfg + +# ------------------ change here ↓ ------------------ +models_path = [ ++ '/path/to/turbomind_model' +] + +# users can set `auto`, `spot`, or `reserved`. Defaults to `auto`. +infer['runner']['quotatype'] = 'auto' +infer['runner']['max_num_workers'] = 32 +infer['runner']['partition'] = 'llmit2' + +eval['runner']['quotatype'] = 'auto' +eval['runner']['max_num_workers'] = 64 +eval['runner']['partition'] = 'llmit2' +# ------------------ change end ------------------ + +# ------------------ default settings ↓ ------------------ +# careful to change the following settings + +# add different eval models +base_models = [] +longtext_models = [] +math_agent_models = [] +cibench_agent_models = [] +plugin_eval_models = [] +for model_path in models_path: + if model_path.endswith('/'): + model_path = model_path[:-1] + abbr = osp.split(osp.split(model_path)[0])[-1] + ckpt_iter = osp.split(model_path)[-1] + + summarizer_abbr = f"{abbr}@{ckpt_iter}" + + tmp_base_model_cfg = deepcopy(base_model_cfg) + tmp_base_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}" + tmp_base_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_base_model_cfg['path'] = model_path + + # process base model + base_models.append(tmp_base_model_cfg) + + # process longtext model + tmp_longtext_model_cfg = deepcopy(base_longtext_model_cfg) + tmp_longtext_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-longtext" + tmp_longtext_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_longtext_model_cfg['path'] = model_path + longtext_models.append(tmp_longtext_model_cfg) + + # set agent model cfg + tmp_agent_llm_cfg = deepcopy(base_agent_llm_cfg) + tmp_agent_llm_cfg['path'] = model_path + + # process math agent model + tmp_math_agent_cfg = deepcopy(base_math_agent_cfg) + tmp_math_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-math-react" + tmp_math_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_math_agent_cfg['llm'] = tmp_agent_llm_cfg + math_agent_models.append(tmp_math_agent_cfg) + + # process cibench agent model + tmp_cibench_agent_cfg = deepcopy(base_cibench_agent_cfg) + tmp_cibench_agent_cfg['abbr'] = f"{abbr}@{ckpt_iter}-cibench-react" + tmp_cibench_agent_cfg['summarizer_abbr'] = summarizer_abbr + tmp_cibench_agent_cfg['llm'] = tmp_agent_llm_cfg + cibench_agent_models.append(tmp_cibench_agent_cfg) + + # process plugin eval model + tmp_plugin_eval_model_cfg = deepcopy(base_plugin_eval_model_cfg) + tmp_plugin_eval_model_cfg['abbr'] = f"{abbr}@{ckpt_iter}-plugin-eval" + tmp_plugin_eval_model_cfg['summarizer_abbr'] = summarizer_abbr + tmp_plugin_eval_model_cfg['path'] = model_path + plugin_eval_models.append(tmp_plugin_eval_model_cfg) + +del tmp_base_model_cfg, tmp_longtext_model_cfg, tmp_agent_llm_cfg, \ + tmp_math_agent_cfg, tmp_cibench_agent_cfg, tmp_plugin_eval_model_cfg + +# set all models +model_dataset_combinations = [] +models = [] +datasets = [] + +# The agent test is relatively slow, so they placed first. +# process longtext datasets +model_dataset_combinations.append(dict(models=longtext_models, datasets=longtext_datasets)) +models.extend(longtext_models) +datasets.extend(longtext_datasets) +# process math agent datasets +model_dataset_combinations.append(dict(models=math_agent_models, datasets=math_agent_datasets)) +models.extend(math_agent_models) +datasets.extend(math_agent_datasets) +# process cibench agent datasets +model_dataset_combinations.append(dict(models=cibench_agent_models, datasets=cibench_datasets)) +models.extend(cibench_agent_models) +datasets.extend(cibench_datasets) +# process plugin eval datasets +model_dataset_combinations.append(dict(models=plugin_eval_models, datasets=plugin_eval_datasets)) +models.extend(plugin_eval_models) +datasets.extend(plugin_eval_datasets) + +# process base datasets +model_dataset_combinations.append(dict(models=base_models, datasets=base_datasets)) +models.extend(base_models) +datasets.extend(base_datasets) + +# ------------------ default settings end ------------------ + +``` + +## 数据集格式 + +[InternLM](https://github.com/InternLM/InternLM) 仓库所使用的训练数据集会被预先 token 化,格式如下所示: + +``` +{"tokens": [1, -333, -352, -1621, ..., 103028, 13, 2]} +{"tokens": [1, -333, -352, -1621, ..., 103028, 13, 2]} +``` + +其中,数值为负数的 tokens 在训练过程中不参与 loss 计算。 + +# 使用 untokenized 数据集进行训练 + +## 使用教程 + +### Step 1, 导出模板 config 文件 + +可以通过下列命令将名为 internlm2_7b_w_untokenized_dataset 的 config 导出至当前目录下: + +``` +xtuner copy-cfg internlm2_7b_w_untokenized_dataset . +``` + +### Step 2, 修改模板 config 文件 + +修改 Config 文件中上述接口对应部分。 + +```diff +... + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +- dataset_folder = '/mnt/petrelfs/share_data/caoweihan/v1_sample_with_legal_cate' # noqa: E501 ++ dataset_folder = '/real/dataset/path' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True +... +``` + +### Step 3,获取数据顺序 (可选) + +运行下面的代码可获取数据顺序,并存为 txt 文件: + +``` +python xtuner/tools/get_data_order.py \ + --data-folder /path/to/your/data \ + --save-folder /folder/to/save/data/order \ + --file-type .json +``` + +其中,`--file-type .json` 表示需要获取所有以 `.json` 为结尾的文件的顺序。 + +同时,需要修改 Step 2 中的 Config 文件,并设置数据顺序文件路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_untokenized_dataset, +- data_order_path=None, ++ data_order_path='/folder/to/save/data/order/'+'data_order.txt', + folder=dataset_folder, + tokenizer=tokenizer, + max_length=max_length, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + file_type='.json'), + packed_length=max_length, + seed=1024) +``` + +### Step 4,离线 token 化并处理原数据集 (可选) + +对于大数据集,将原始数据集 token 化,并添加对话模板的过程可能较为耗时,因此可以先离线处理好,每次使用时直接读取处理好的数据集。 + +运行以下代码对原始数据集进行离线处理: + +``` +python xtuner/tools/process_untokenized_datasets.py \ + --data-folder /path/to/data/folder \ + --save-folder ./processed \ + --tokenizer-path pretrained_model_name_or_path \ + --prompt-template internlm2_chat \ + --dataset-format ftdp +``` + +其中 `pretrained_model_name_or_path` 同 `from_pretrained` 接口中的 `pretrained_model_name_or_path`,`--prompt-template` 表示对话模板的种类,其他可选对话模板可参考 [templates](https://github.com/InternLM/xtuner/blob/main/docs/zh_cn/user_guides/prompt_template.md)。untokenized internlm repo 格式的数据集(别名 ftdp 格式)满足以下格式: + +``` +[ + { + 'role': 'user', + 'content': 'xxx' + }, + { + 'role': 'assistant', + 'content': 'xxx' + }, + ... +] +``` + +`--dataset-format` 一项需要设为 `ftdp`。 + +使用离线处理好的数据集进行训练,需要额外修改 Step 2 中的 Config 文件,并设置存放离线处理后的数据集路径: + +```diff +... +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_untokenized_dataset, ++ processed_dataset_dict_path=/folder/to/save/processed/data, +- data_order_path=None, +- folder=dataset_folder, +- tokenizer=tokenizer, +- max_length=max_length, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), +- file_type='.json'), + packed_length=max_length, + seed=1024) +... +``` + +### Step 4, 5, 6, 7,同上 + +## 数据集格式 + +untokenized internlm repo 格式的数据集(别名 ftdp 格式)满足以下格式: + +``` +[ + { + 'role': 'user', + 'content': 'xxx' + }, + { + 'role': 'assistant', + 'content': 'xxx' + }, + ... +] +[ + { + 'role': 'user', + 'content': 'xxx' + }, + { + 'role': 'assistant', + 'content': 'xxx' + }, + ... +] +``` + +其中 user 对应的内容在训练过程中不参与 loss 的计算。 diff --git a/data/xtuner/docs/zh_cn/user_guides/llava_offline.md b/data/xtuner/docs/zh_cn/user_guides/llava_offline.md new file mode 100644 index 0000000000000000000000000000000000000000..dabe16d0b208f926d18a7944dcfd0d122c14753e --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/llava_offline.md @@ -0,0 +1,50 @@ +# 离线处理 Llava 训练数据集 + +当训练数据量非常大时,每次训练的时候都先在线处理数据可能会极为耗时。我们可以先对原始数据进行离线处理并保存至本地,随后的多次训练可以读入本地离线处理好的数据后直接开始训练。 + +## Step 1, 导出模板 config 文件 + +可使用以下命令查看 XTuner 中提供的 Llava 训练相关的 config: + +``` +xtuner list-cfg -p llava +``` + +找到需要使用的 config 文件并导出至当前目录下: + +``` +xtuner copy-cfg ${CONFIG_NAME} . +``` + +## Step 2, 离线处理数据集 + +使用以下命令可离线处理训练数据集中的文本数据: + +``` +python xtuner/tools/process_untokenized_llava_data.py \ + ${CONFIG_PATH} \ + --save-folder /folder/to/save/processed/dataset +``` + +其中,${CONFIG_PATH} 为第一步中导出的 config 文件路径,`/folder/to/save/processed/dataset` 则需要指定为离线处理数据的保存路径。 + +## Step 3, 修改 config 文件 + +对 Step 1 中导出的 config 文件做如下修改: + +```diff +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( +- data_path=data_path, +- tokenizer=tokenizer, ++ offline_processed_text_folder=/folder/to/save/processed/dataset + ...) +``` + +其中,`/folder/to/save/processed/dataset` 为 Step 2 保存的离线处理数据路径。 + +## Step 4,开始训练 + +使用 Step 3 修改得到的 config 训练即可。 diff --git a/data/xtuner/docs/zh_cn/user_guides/multi_turn_conversation.md b/data/xtuner/docs/zh_cn/user_guides/multi_turn_conversation.md new file mode 100644 index 0000000000000000000000000000000000000000..cec40aa93a3230e770703cca0c1d862075d4b8d1 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/multi_turn_conversation.md @@ -0,0 +1,284 @@ +# 多轮对话 data pipeline + +- [使用 HuggingFace Hub 数据集](#使用-huggingface-hub-数据集) +- [使用自定义数据集](#使用自定义数据集) + +多轮对话指令微调旨在提升模型的多轮对话能力,在数据处理阶段需要将原始数据转换为XTuner支持的数据集格式。 + +XTuner 支持使用 HuggingFace Hub 数据集或自定义数据集进行 SFT(Supervised FineTune)。二者的主要区别在于,使用 HuggingFace Hub 数据集时需要将原始数据映射为 XTuner 定义的[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式),而对于自定义数据集则推荐用户按照[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式)构造数据集。 + +## 使用 HuggingFace Hub 数据集 + +### Step 1, 映射原始数据集为标准格式 + +由于不同数据集的格式各有不同,因此需要将原始数据映射为 XTuner 定义的[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式)。XTuner 支持通过 map function 来实现格式的映射。下面以 [oasst1](https://huggingface.co/datasets/OpenAssistant/oasst1) 数据集为例介绍如何实现数据映射。 + +oasst1 数据集格式如下所示: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='timdettmers/openassistant-guanaco') +>>> ds['train'] +Dataset({ + features: ['text'], + num_rows: 9846 +}) +>>> ds['train'][0]['text'] +'### Human: xxx ### Assistant: xxx ###Human: xxx ###Assistant: xxx' +``` + +由此可见,oasst1 数据集既可以当做增量预训练数据集让模型学会一些基本的语言知识,又可以在经过一些处理后作为多轮对话数据集培养模型的多轮对话能力。[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式)中介绍了多轮对话指令微调过程中,数据格式应该为: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +因此,可以通过下面的 map function 将原始数据映射为标准格式: + +```python +# 假设将该函数存放在./map_fn.py文件中 +SYSTEM_OASST1 = '' # oasst1 并未使用 system 字段 +def custom_map_fn(example): + r""" + Example before preprocessing: + example['text'] = '### Human: Can you explain xxx' + '### Assistant: Sure! xxx' + '### Human: I didn't understand how xxx' + '### Assistant: It has to do with a process xxx.' + + Example after preprocessing: + example['conversation'] = [ + { + 'input': 'Can you explain xxx', + 'output': 'Sure! xxx' + }, + { + 'input': 'I didn't understand how xxx', + 'output': 'It has to do with a process xxx.' + } + ] + """ + data = [] + for sentence in example['text'].strip().split('###'): + sentence = sentence.strip() + if sentence[:6] == 'Human:': + data.append(sentence[6:].strip()) + elif sentence[:10] == 'Assistant:': + data.append(sentence[10:].strip()) + if len(data) % 2: + # The last round of conversation solely consists of input + # without any output. + # Discard the input part of the last round, as this part is ignored in + # the loss calculation. + data.pop() + conversation = [] + for i in range(0, len(data), 2): + system = SYSTEM_OASST1 if i == 0 else '' + single_turn_conversation = { + 'system': system, + 'input': data[i], + 'output': data[i + 1]} + conversation.append(single_turn_conversation) + return {'conversation': conversation} +``` + +### Step 2, 列出候选模型名字 + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +### Step 3, 复制 config 文件 + +如果所提供的配置文件不能满足使用需求,请导出所提供的配置文件并进行相应更改: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +例如通过下列命令将名为 `internlm_7b_qlora_oasst1_e3` 的 config 导出至当前目录下: + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 导入 Step 1 中实现的映射函数 `custom_map_fn` +2. 用 `custom_map_fn` 替换 `train_dataset` 中的 `dataset_map_fn` +3. 调整原始数据集的路径,关于 `load_dataset` 的相关操作可以参考[用户文档](https://huggingface.co/docs/datasets/loading) + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' ++ data_path = 'path/to/your/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=custom_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 + +## 使用自定义数据集 + +在使用自定义多轮对话数据集进行指令微调时,我们推荐将数据集构造为 XTuner 定义的[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式)。若自定义数据集格式为 `oasst1` 等其他格式,可参考[使用 HuggingFace Hub 数据集](#使用-huggingface-hub-数据集)一节。 + +### Step 1, 数据集准备 + +按照 XTuner 定义的[多轮对话数据格式](./dataset_format.md#多轮对话数据集格式)准备自定义数据: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +### Step 2, 列出候选模型名字 + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +### Step 3, 复制 config 文件 + +```bash +xtuner copy-cfg internlm_7b_qlora_oasst1_e3 . +``` + +### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 调整原始数据集的路径 +2. 由于数据集格式已经是标准格式了,需要将 `train_dataset` 中的 `dataset_map_fn` 置为 `None` + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'timdettmers/openassistant-guanaco' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=oasst1_map_fn, ++ dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 diff --git a/data/xtuner/docs/zh_cn/user_guides/prompt_template.md b/data/xtuner/docs/zh_cn/user_guides/prompt_template.md new file mode 100644 index 0000000000000000000000000000000000000000..2800450468768e2ad78b017931f5e2edd53eee1d --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/prompt_template.md @@ -0,0 +1,110 @@ +# 对话模版(prompt template) + +XTuner 提供一系列对话模版(prompt template),其与众多 LLM 的官方模版完全对齐。本文档将以 InternLM-Chat 的模版 `internlm_chat` 为例,详细介绍对话模版的代码结构及执行逻辑。 + +## 代码结构 + +```python +internlm_chat=dict( + SYSTEM='<|System|>:{system}\n', + INSTRUCTION='<|User|>:{input}\n<|Bot|>:', + SUFFIX='', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['']) +``` + +- `SYSTEM`:表示问答时“系统”字段的模版,其中 `{system}` 指代“系统”文本。值得注意的是,该字段在多轮对话中只会出现一次,即在第一轮。 +- `INSTRUCTION`:表示问答时“指令”字段的模版,其中 `{input}` 指代用户指令文本。 +- `SUFFIX`:表示“指令”字段的后缀,将会追加在每一轮问答的“回答”后面。通常,这也是一个特殊的结束符号。默认是空串`''`。 +- `SUFFIX_AS_EOS`:表示上述后缀是否作为结束符号。如果为 `True`,则会取代 `tokenizer` 的 `eos_token`,否则,仍会使用 `tokenizer` 的 `eos_token` 表示结束符号。默认是 `False`。 +- `SEP`:用于间隔多轮对话,将会追加在 `INSTRUCTION` 和 `SUFFIX` 后面。默认是空串`''`。 +- `STOP_WORDS`:用于指明结束词,该信息将被用在文本生成阶段。值得注意的是,`tokenizer` 的 `eos_token` 会被自动添加到 `STOP_WORDS`,而无需手动配置。 + +## 结果 + +**单轮对话** + +``` +<|System|>:{system} +<|User|>:{input} +<|Bot|>:{output} +``` + +**多轮对话** + +``` +<|System|>:{system} +<|User|>:{input} +<|Bot|>:{output} +<|User|>:{input} +<|Bot|>:{output} +<|User|>:{input} +<|Bot|>:{output} +``` + +## 模版的选择 + +| 模型 | 对话模版 | +| ---------------------------------------- | -------------- | +| baichuan-inc/Baichuan-7B | default\* | +| baichuan-inc/Baichuan-13B-Base | default\* | +| baichuan-inc/Baichuan-13B-Chat | baichuan_chat | +| baichuan-inc/Baichuan2-7B-Base | default\* | +| baichuan-inc/Baichuan2-7B-Chat | baichuan2_chat | +| baichuan-inc/Baichuan2-13B-Base | default\* | +| baichuan-inc/Baichuan2-13B-Chat | baichuan2_chat | +| THUDM/chatglm2-6b | chatglm2 | +| THUDM/chatglm3-6b | chatglm3 | +| THUDM/chatglm3-6b-base | chatglm3 | +| deepseek-ai/deepseek-coder-6.7b-base | deepseek_coder | +| deepseek-ai/deepseek-coder-6.7b-instruct | deepseek_coder | +| internlm/internlm-7b | default\* | +| internlm/internlm-20b | default\* | +| internlm/internlm-chat-7b | internlm_chat | +| internlm/internlm-chat-20b | internlm_chat | +| huggyllama/llama-7b | default | +| meta-llama/Llama-2-7b-hf | llama2_chat | +| meta-llama/Llama-2-7b-chat-hf | llama2_chat | +| meta-llama/Llama-2-70b-hf | llama2_chat | +| lmsys/vicuna-7b-v1.5 | vicuna | +| lmsys/vicuna-13b-v1.5 | vicuna | +| mistralai/Mistral-7B-v0.1 | mistral | +| mistralai/Mixtral-8x7B-v0.1 | mixtral | +| mistralai/Mixtral-8x7B-Instruct-v0.1 | mixtral | +| Qwen/Qwen-1_8B | default\* | +| Qwen/Qwen-1_8B-Chat | qwen_chat | +| Qwen/Qwen-7B | default\* | +| Qwen/Qwen-7B-Chat | qwen_chat | +| Qwen/Qwen-72B | default\* | +| Qwen/Qwen-72B-Chat | qwen_chat | +| bigcode/starcoder | default | +| 01-ai/Yi-6B | default | +| 01-ai/Yi-34B | default | +| HuggingFaceH4/zephyr-7b-beta | zephyr | +| deepseek-ai/deepseek-moe-16b-base | deepseek_moe | +| deepseek-ai/deepseek-moe-16b-chat | deepseek_moe | +| internlm/internlm2-1_8b | default\* | +| internlm/internlm2-7b | default\* | +| internlm/internlm2-20b | default\* | +| internlm/internlm2-chat-1_8b | internlm2_chat | +| internlm/internlm2-chat-7b | internlm2_chat | +| internlm/internlm2-chat-20b | internlm2_chat | +| Qwen/Qwen1.5-0.5B | default\* | +| Qwen/Qwen1.5-0.5B-Chat | qwen_chat | +| Qwen/Qwen1.5-1.8B | default\* | +| Qwen/Qwen1.5-1.8B-Chat | qwen_chat | +| Qwen/Qwen1.5-4B | default\* | +| Qwen/Qwen1.5-4B-Chat | qwen_chat | +| Qwen/Qwen1.5-7B | default\* | +| Qwen/Qwen1.5-7B-Chat | qwen_chat | +| Qwen/Qwen1.5-14B | default\* | +| Qwen/Qwen1.5-14B-Chat | qwen_chat | +| Qwen/Qwen1.5-72B | default\* | +| Qwen/Qwen1.5-72B-Chat | qwen_chat | +| google/gemma-2b | default\* | +| google/gemma-2b-it | gemma\* | +| google/gemma-7b | default\* | +| google/gemma-7b-it | gemma\* | + +\*: 官方对话模版中存在特殊 token(比如 `<|im_start|>`、`<|im_end|>`),这类特殊 token 在预训练阶段并未得到训练。故,使用 `default` 模版。 diff --git a/data/xtuner/docs/zh_cn/user_guides/sequence_parallel.md b/data/xtuner/docs/zh_cn/user_guides/sequence_parallel.md new file mode 100644 index 0000000000000000000000000000000000000000..ce4beed64759dee8edb254fedf4dc6093eee502d --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/sequence_parallel.md @@ -0,0 +1,191 @@ +
+ +# 序列并行:训练极长序列大模型的系统优化 + +
+ +XTuner 中的序列并行设计思路参考了 DeepSpeed 的工作 [DeepSpeed Ulysses](https://arxiv.org/abs/2309.14509),并加以优化,以达到直接基于 transformers 算法库或 Huggingface Hub 上的开源模型训练 1M 以上超长序列的目标。 + +## 简介 + +从生成性AI到科研模型,长序列训练正在变得非常重要。 + +在生成性AI领域,会话式AI、长文档摘要、代码库理解和例如 Sora 这种视频生成任务都需要在空间和时间层面对长上下文进行推理。 + +对于科学AI来说,长序列同样至关重要,它为更好地理解结构生物学、医疗保健、气候和天气预测以及大分子模拟打开了大门。 + +然而,尽管序列长度的重要性不断增长,XTuner 现有的显存优化策略(如 zero 系列),却不足以解决大模型、长序列训练问题。 + +同时,受限于通信效率,现有的许多序列并行方法也不够高效。 + +另外,现有的序列并行方法普遍存在较多的代码侵入式修改,易用性和维护性都要大打折扣。同时也不满足 XTuner 基于 transformers 算法库或 Huggingface Hub 上的开源模型直接进行训练的要求。 + +
+ +

+
+ +为了解决上述长序列训练带来的问题,XTuner 采用了一种简单、易用且高效的序列并行算法。由于 Transformer 结构较为规整,除 attention 计算外,其他计算过程中 token 之间不会互相影响(即每个 token 的计算是独立的),这一条件为序列并行提供了有利条件。上图展示了序列并行的核心设计。设由 P 个 GPUs 共同计算一个长度为 N 的长序列,在 Attention 计算的第一阶段,长度为 N / P 的子序列会通过线性层投影为 Query、Key、Value。接下来, QKV Tensor 会在参与序列并行计算的多个 GPUs 之间通过高度优化的 all-to-all 通信算子汇聚,得到序列长度为 N ,但更少注意力头的子序列。注意力计算后,通过另一个 all-to-all 通信算子将其转换为长度为 N / P 的子序列,进行后续计算。 + +总体而言,XTuner 的序列并行算法具有以下关键特性: + +* 支持全量训练**超过百万个token**的序列 +* 支持百 B 级模型训练:XTuner 的序列并行不仅支持长序列训练,还可结合 zero3 显存优化策略训练大尺寸模型 +* 完全通用的序列并行 **API 抽象** + +## 使用 XTuner 进行序列并行训练 + +### Step 1 修改 config 文件 + +1. 在 config 中修改 `sequence_parallel_size` 字段即可调整 $sequence\\_parallel\\_world\\_size$ 。 +2. 同时若想保证与不使用序列并行的训练效果类似,需要同步增大梯度累积的数值为原来的 $sequence\\_parallel\\_world\\_size$ 倍,因为在使用序列并行训练时, $data\\_parallel\\_world\\_size$ 降为了原来的 $\frac{1}{sequence\\_parallel\\_world\\_size}$。 +3. 替换 DefaultSampler 为支持序列并行的 SequenceParallelSampler。 + +**注:需要保证所使用的 GPU 总数可以被 `sequence_parallel_size` 整除。** + +```diff ++ from xtuner.parallel.sequence import SequenceParallelSampler + +- sequence_parallel_size = 1 ++ sequence_parallel_size = 4 # take `sequence_parallel_size = 4`` as an example + +- accumulative_counts = 1 ++ accumulative_counts = 4 # accumulative_counts = accumulative_counts * sequence_parallel_size + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataloader = dict( +- sampler=dict(type=DefaultSampler, shuffle=True), ++ sampler=dict(type=SequenceParallelSampler, seed=1024, shuffle=True), + ...) +``` + +另外,若需要进一步拓展模型的长文本处理能力,需要进一步修改 config 中的 `max_position_embeddings` 字段。例如需要将模型的上下文长度拓展为 64K 时,可进行如下修改: + +```diff ++ max_position_embeddings = 65536 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +model = dict( + type=SupervisedFinetune, ++ max_position_embeddings = max_position_embeddings, + ...) +``` + +### Step 2 开始训练 + +需要使用 DeepSpeed 进行训练: + +```bash +(DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train ${CONFIG_PATH} --deepspeed deepspeed_zero2 +(SLURM) srun ${SRUN_ARGS} xtuner train ${CONFIG_PATH} --launcher slurm --deepspeed deepspeed_zero2 +``` + +- ${CONFIG_PATH} 为 Step 1 中修改得到的 config 文件路径 +- 可根据实际情况选择使用不同的 zero 策略 + +## 序列并行 API 抽象 + +为了提升算法的可迁移性,XTuner 中抽象出了序列并行所必须的五个 API 接口: +- 序列并行分布式环境初始化 (init_sequence_parallel) +- 适配序列并行的 Data Sampler (SequenceParallelSampler) +- 数据 Pad (pad_for_sequence_parallel) +- 数据切分 (split_for_sequence_parallel) +- 适配序列并行的 Attention (dispatch_modules) +- reduce loss 以正确打印训练损失 (reduce_sequence_parallel_loss) + +### 序列并行分布式环境初始化 + +由于序列并行算法会将长序列切分为 $sequence\\_parallel\\_world\\_size$ 块,并将每个子序列分发给对应的 GPU 独立进行计算。因此需要在训练开始前初始化序列并行分布式环境,以指定哪几块 GPU 共同负责一个长序列输入的计算。 + +一个 $sequence\\_parallel\\_world\\_size = 4$ 的示例如下: + +```python +# We have to initialize the distributed training environment first. +# Here is an example when training on slurm scheduler +# from xtuner.parallel.sequence import init_dist +# init_dist('slurm', 'nccl', init_backend='deepspeed') +from xtuner.parallel.sequence import init_sequence_parallel +sequence_parallel_world_size = 4 +init_sequence_parallel(sequence_parallel_world_size) +``` + +上述过程在 xtuner/engine/_strategy/deepspeed.py 中实现。 + +### Data Sampler 适配序列并行 + +在使用序列并行后,Dataloader 的采样策略需要进一步调整。例如当 $sequence\\_parallel\\_world\\_size = 4$ 时,4 块 GPU 从 Dataloader 拿到的数据需要是完全一样的。 + +在构建 Dataloader 时搭配 XTuner 中提供的 SequenceParallelSampler 使用即可: + +```python +from xtuner.parallel.sequence import SequenceParallelSampler +dataloader = DataLoader( + train_dataset, sampler=SequenceParallelSampler(train_dataset), + **other_dataloader_params) +``` + +### 数据 Pad + +由于每条训练数据的长度可能不尽相同,我们需要将数据进行 Pad 以使得序列长度可以被 $sequence\\_parallel\\_world\\_size$ 整除,这样一条长数据才能被均等地分发给不同的 GPU 上。 + +训练过程中需要被 Pad 的 Tensor 往往有 input_ids, labels, position_ids, attention_mask 四个,pad 的过程可以通过以下方式实现: + +```python +from xtuner.parallel.sequence import pad_for_sequence_parallel + +input_ids = pad_for_sequence_parallel(input_ids, padding_value=0) +labels = pad_for_sequence_parallel(labels, padding_value=-100) +position_ids = pad_for_sequence_parallel(position_ids, padding_value=0) +attention_mask = pad_for_sequence_parallel(attention_mask, padding_value=0) +``` + +以上过程在 `xtuner/dataset/collate_fns/default_collate_fn.py` 中实现。 + +### 数据切分 + +在传入给 Transformer 模型前,我们需要对长序列均等切分: + +```python +from xtuner.parallel.sequence import split_for_sequence_parallel +# attention mask should not be split +# `dim` is 1 as the shape of tensor is (bs, seq_len, ...) +input_ids = split_for_sequence_parallel(input_ids, dim=1) +labels = split_for_sequence_parallel(labels, dim=1) +position_ids = split_for_sequence_parallel(position_ids, dim=1) +``` + +以上过程在 `xtuner/model/sft.py` 中实现。 + +### Attention 适配序列并行 + +在 Attention 的计算过程中,序列中的不同 token 是不能独立运算的,但不同的 attention head 之间的计算却是独立的。因此,如[第一节](#简介)所述,需要在计算 Attention 前后(即 qkv_proj 后和 o_proj 前)分别插入一个 *all-to-all* 操作。 + +XTuner 提供了 dispatch_modules 接口以支持修改模型 Attention 的计算方式: + +```python +from xtuner.model.modules import dispatch_modules +model: AutoModelForCausalLM +dispatch_modules(model) +``` + +上述过程在 xtuner/model/sft.py 中实现。 + +### Reduce Loss 以正确打印训练损失 + +这个 API 对于保证训练的正确性不是必须的,但对于观测模型训练状态,打印训练 loss 是非常有用的。 + +```python +from xtuner.parallel.sequence import reduce_sequence_parallel_loss +outputs = llm(input_ids=input_ids, labels=labels, **kwargs) +num_tokens_per_rank = (labels != -100).sum() +# Suppose sequence parallel world size equals to 4, +# losses on rank0, rank1, rank2, rank3 are different. +loss = reduce_sequence_parallel_loss(outputs.loss, num_tokens_per_rank) +# After loss reduction, losses on rank0, rank1, rank2, rank3 are the same. +``` + +上述过程在 xtuner/model/sft.py 中实现。 diff --git a/data/xtuner/docs/zh_cn/user_guides/single_turn_conversation.md b/data/xtuner/docs/zh_cn/user_guides/single_turn_conversation.md new file mode 100644 index 0000000000000000000000000000000000000000..dd0e529051701d624107c941b9801be31989753a --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/single_turn_conversation.md @@ -0,0 +1,303 @@ +# 单轮对话 data pipeline + +- [使用 HuggingFace Hub 数据集](#使用-huggingface-hub-数据集) +- [使用自定义数据集](#使用自定义数据集) + - [使用 Alpaca 格式的自定义数据集](#使用-alpaca-格式的自定义数据集) + - [使用其他格式自定义数据集](#使用其他格式自定义数据集) + +单轮对话指令微调旨在提升模型回复特定指令的能力,在数据处理阶段需要将原始数据转换为XTuner支持的数据集格式。 + +XTuner 支持使用 HuggingFace Hub 数据集、Alpaca 格式的自定义数据集以及其他格式的自定义数据集进行 SFT(Supervised FineTune)。三者的主要区别在于: + +1. 使用 HuggingFace Hub 数据集时需要将原始数据映射为 XTuner 定义的[单轮对话数据格式](./dataset_format.md#单轮对话数据集格式); +2. 使用 Alpaca 格式的自定义数据集时,需要保证自定义数据集至少包含'instruction', 'input', 'output'三列; +3. 对于自定义数据集则推荐用户按照[单轮对话数据格式](./dataset_format.md#单轮对话数据集格式)构造数据集,**这会大幅度缩小数据预处理所消耗的时间**。 + +## 使用 HuggingFace Hub 数据集 + +### Step 1, 映射原始数据集为标准格式 + +由于不同数据集的格式各有不同,因此需要将原始数据映射为 XTuner 定义的[单轮对话数据格式](./dataset_format.md#单轮对话数据集格式)。XTuner 支持通过 map function 来实现格式的映射。下面以 [alpaca](https://huggingface.co/datasets/tatsu-lab/alpaca) 数据集为例介绍如何实现数据映射。 + +alpaca 数据集格式如下所示: + +```python +>>> from datasets import load_dataset + +>>> ds = load_dataset(path='tatsu-lab/alpaca') +>>> ds['train'] +Dataset({ + features: ['instruction', 'input', 'output', 'text'], + num_rows: 52002 +}) +``` + +由此可见,alpaca train dataset 有 52002 行,4 列,列名分别为 'instruction', 'input', 'output', 'text'。'instruction' 和 'input' 给出了问题描述,'output' 为对应 GroundTruth 回答。[单轮对话数据格式](./dataset_format.md#单轮对话数据集格式)中介绍了单轮对话指令微调过程中,数据格式应该为: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +因此,可以通过下面的 map function 将原始数据映射为标准格式: + +```python +# 假设将该函数存放在./map_fn.py文件中 +SYSTEM_ALPACA = ('Below is an instruction that describes a task. ' + 'Write a response that appropriately completes the request.\n') +def custom_map_fn(example): + if example.get('output') == '': + return {'conversation': []} + else: + return { + 'conversation': [{ + 'system': SYSTEM_ALPACA, + 'input': f"{example['instruction']}\n{example['input']}", + 'output': example['output'] + }] + } +``` + +### Step 2, 列出候选模型名字 + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +### Step 3, 复制 config 文件 + +如果所提供的配置文件不能满足使用需求,请导出所提供的配置文件并进行相应更改: + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +例如通过下列命令将名为 `internlm_7b_qlora_alpaca_e3` 的 config 导出至当前目录下: + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 导入 Step 1 中实现的映射函数 `custom_map_fn` +2. 用 `custom_map_fn` 替换 `train_dataset` 中的 `dataset_map_fn` +3. 调整原始数据集的路径,关于 `load_dataset` 的相关操作可以参考[用户文档](https://huggingface.co/docs/datasets/loading) + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory ++ from mmengine.config import read_base ++ with read_base(): ++ from .map_fn import custom_map_fn +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=alpaca_map_fn, ++ dataset_map_fn=custom_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 + +## 使用自定义数据集 + +### 使用 Alpaca 格式的自定义数据集 + +若自定义数据集的数据格式满足`alpaca`格式,可以参考以下步骤进行 SFT 训练。 + +#### Step 1,列出候选模型名字 + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称(如`baichuan`、`llama`)。 + +#### Step 2, 复制 config 文件 + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +由于自定义数据集满足 Alpaca 格式,因此`CONFIG_NAME`应该从 Step 1 列出的候选模型名字中选择与 Alpaca 相关的。例如通过下列命令将名为 `internlm_7b_qlora_alpaca_e3` 的 config 导出至当前目录下: + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +#### Step 3, 修改 config 文件 + +对 Step 2 复制得到的 config 文件需要进行如下修改: + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +### 使用其他格式自定义数据集 + +#### Step 1, 数据集准备 + +按照 XTuner 定义的[单轮对话数据格式](./dataset_format.md#单轮对话数据集格式)准备自定义数据: + +```json +[{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}, +{ + "conversation":[ + { + "system": "xxx", + "input": "xxx", + "output": "xxx" + } + ] +}] +``` + +#### Step 2, 列出候选模型名字 + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +#### Step 3, 复制 config 文件 + +```bash +xtuner copy-cfg internlm_7b_qlora_alpaca_e3 . +``` + +#### Step 4, 修改 config 文件 + +对 Step 3 复制得到的 config 文件需要进行如下修改: + +1. 调整原始数据集的路径 +2. 由于数据集格式已经是标准格式了,需要将 `train_dataset` 中的 `dataset_map_fn` 置为 `None` + +```diff +from xtuner.dataset import process_hf_dataset +from datasets import load_dataset +- from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory ++ from xtuner.dataset.map_fns import template_map_fn_factory +... +####################################################################### +# PART 1 Settings # +####################################################################### +- data_path = 'tatsu-lab/alpaca' ++ data_path = 'path/to/your/json/data' +... +####################################################################### +# STEP 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, +- dataset=dict(type=load_dataset, path=data_path), ++ dataset=dict( ++ type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, +- dataset_map_fn=alpaca_map_fn, ++ dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) +... +``` + +#### Step 5, 检查数据集(可选) + +在修改配置文件后,可以运行`xtuner/tools/check_custom_dataset.py`脚本验证数据集是否正确构建。 + +```bash +xtuner check-custom-dataset $CONFIG +``` + +其中 `$CONFIG` 是 Step 4 修改过的 config 的文件路径。 diff --git a/data/xtuner/docs/zh_cn/user_guides/varlen_attention.md b/data/xtuner/docs/zh_cn/user_guides/varlen_attention.md new file mode 100644 index 0000000000000000000000000000000000000000..5b4ea24d1898588ebe71db379d0bae90a95a1cd5 --- /dev/null +++ b/data/xtuner/docs/zh_cn/user_guides/varlen_attention.md @@ -0,0 +1,96 @@ +# 在训练大语言模型时使用变长注意力 (Variable Length Attention) + +## 使用教程 + +### Step 1, 安装 flash_attn + +XTuner 中实现的变长注意力需要依赖 Flash Attention 2,可通过以下命令安装: + +```bash +MAX_JOBS=4 pip install flash-attn --no-build-isolation +``` + +### Step 2, 列出候选模型名字 + +XTuner 提供多个开箱即用的配置文件,用户可以通过下列命令查看: + +```bash +xtuner list-cfg -p internlm +``` + +`-p` 为模糊查找,若想训练其他模型,可以修改 `internlm` 为 XTuner 支持的其他模型名称。 + +### Step 3, 复制 config 文件 + +导出需要使用的 config : + +```bash +xtuner copy-cfg ${CONFIG_NAME} ${SAVE_DIR} +``` + +例如通过下列命令将名为 `internlm_7b_full_oasst1_e3` 的 config 导出至当前目录下: + +```bash +xtuner copy-cfg internlm_7b_full_oasst1_e3 . +``` + +### Step 4, 修改 config 文件 + +将 Step 3 复制得到的 config 文件中的 `use_varlen_attn` 属性由 False 改为 True 即可激活变长注意力训练机制: + +```diff +... +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +- use_varlen_attn = False ++ use_varlen_attn = True +... +``` + +**需要注意,当设置 `use_varlen_attn = True` 后,请确保 `batch_size` 被设置为 1,且 `pack_to_max_length` 被设置为 True。** + +### Step 5, 开始训练 + +``` +xtuner train ${CONFIG_NAME_OR_PATH} +``` + +例如,我们可以基于 Step 4 中修改得到的 `internlm_7b_full_oasst1_e3_copy.py` 进行训练: + +```bash +# On a single GPU +xtuner train internlm_7b_full_oasst1_e3_copy.py --deepspeed deepspeed_zero1 +# On multiple GPUs +(DIST) NPROC_PER_NODE=${GPU_NUM} xtuner train internlm_7b_full_oasst1_e3_copy.py --deepspeed deepspeed_zero1 +(SLURM) srun ${SRUN_ARGS} xtuner train internlm_7b_full_oasst1_e3_copy.py --launcher slurm --deepspeed deepspeed_zero1 +``` + +- `--deepspeed` 表示使用 [DeepSpeed](https://github.com/microsoft/DeepSpeed) 🚀 来优化训练过程。若未安装 DeepSpeed ,可通过 `pip install deepspeed>=0.12.3` 进行安装。XTuner 内置了多种策略,包括 ZeRO-1、ZeRO-2、ZeRO-3 等。如果用户期望关闭此功能,请直接移除此参数。 + +### Step 6, 模型转换 + +将保存的 PTH 模型(如果使用的DeepSpeed,则将会是一个文件夹)转换为 HuggingFace 模型: + +``` +xtuner convert pth_to_hf ${CONFIG_NAME_OR_PATH} ${PTH} ${SAVE_PATH} +``` + +对应上面的例子,模型转换脚本为: + +``` +xtuner convert pth_to_hf internlm_7b_full_oasst1_e3_copy.py ${PTH} ${SAVE_PATH} +``` + +其中 `${PTH}` 为训练权重保存的路径,若未指定,默认保存在 `./work_dirs/internlm_7b_full_oasst1_e3_copy` 路径下。 + +## 变长注意力训练策略原理 + +
+ +

+
+ +假设一条由若干条*短数据*拼接成的数据长度为 4096 。若不使用变长注意力机制,在计算 attention 阶段,每个 token 会关注全部 4096 个 tokens ,如上图左侧所示。当使用变长注意力机制时,计算 attention 阶段每个 token 仅会关注他所在的那条*短数据*中所有的 tokens,如上图右侧所示。 diff --git a/data/xtuner/examples/demo_data/multi_turn_1/README.md b/data/xtuner/examples/demo_data/multi_turn_1/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0b612cda6cd078c93f8b9f6a2011f8c9235419a8 --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_1/README.md @@ -0,0 +1,268 @@ +# Multi-turn Conversation Example 1 + +> \[!IMPORTANT\] +> Data must be used in conjunction with the corresponding map_fn. + +## Data + +`./data.json` + +```json +[{ + "messages":[ + { + "toy_system": "You are a helpful AI assistant.", + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + }, + { + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}, +{ + "messages":[ + { + "toy_system": "You are a helpful AI assistant.", + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + }, + { + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def multi_turn_1_map_fn(example): + messages = example['messages'] + conversation = [] + for msg in messages: + conversation.append({ + 'system': msg['toy_system'], + 'input': msg['toy_input'], + 'output': msg['output'] + }) + return {'conversation': conversation} +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import multi_turn_1_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/multi_turn_1 +xtuner train config.py +``` diff --git a/data/xtuner/examples/demo_data/multi_turn_1/config.py b/data/xtuner/examples/demo_data/multi_turn_1/config.py new file mode 100644 index 0000000000000000000000000000000000000000..c2405ab9ab14f368fc752843d3173268e224be3d --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_1/config.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +with read_base(): + from .map_fn import multi_turn_1_map_fn as dataset_map_fn + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/examples/demo_data/multi_turn_1/data.json b/data/xtuner/examples/demo_data/multi_turn_1/data.json new file mode 100644 index 0000000000000000000000000000000000000000..10a87e1d6609b83a8fc669403ff3145f77072c6d --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_1/data.json @@ -0,0 +1,26 @@ +[{ + "messages":[ + { + "toy_system": "You are a helpful AI assistant.", + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + }, + { + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}, +{ + "messages":[ + { + "toy_system": "You are a helpful AI assistant.", + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + }, + { + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}] diff --git a/data/xtuner/examples/demo_data/multi_turn_1/map_fn.py b/data/xtuner/examples/demo_data/multi_turn_1/map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..df8ba0d208e38b9cdf3180b3a4b7f4b242e848a1 --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_1/map_fn.py @@ -0,0 +1,11 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def multi_turn_1_map_fn(example): + messages = example['messages'] + conversation = [] + for msg in messages: + conversation.append({ + 'system': msg['toy_system'], + 'input': msg['toy_input'], + 'output': msg['toy_output'] + }) + return {'conversation': conversation} diff --git a/data/xtuner/examples/demo_data/multi_turn_2/README.md b/data/xtuner/examples/demo_data/multi_turn_2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9c5edd3323ccba245899416dc47c3f6e2cac0b96 --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_2/README.md @@ -0,0 +1,606 @@ +# Multi-turn Conversation Example 2 + +> \[!IMPORTANT\] +> Data must be used in conjunction with the corresponding map_fn. + +## Data + +`./data.json` + +```json +[{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}, +{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def multi_turn_2_map_fn(example): + messages = example['messages'] + system = '' + input = '' + conversation = [] + while messages and messages[0]['role'] == 'assistant': + # Skip the first one if it is from assistant + messages = messages[1:] + for msg in messages: + if msg['role'] == 'system': + system = msg['content'] + elif msg['role'] == 'user': + input += msg['content'] + elif msg['role'] == 'assistant': + conversation.append({ + 'system': system, + 'input': input, + 'output': msg['content'] + }) + system = '' + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import multi_turn_2_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/multi_turn_2 +xtuner train config.py +``` + +# Multi-turn Conversation Example 2 + +## Data + +`./data.json` + +```json +[{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}, +{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def multi_turn_2_map_fn(example): + messages = example['messages'] + system = '' + input = '' + conversation = [] + while messages and messages[0]['role'] == 'assistant': + # Skip the first one if it is from assistant + messages = messages[1:] + for msg in messages: + if msg['role'] == 'system': + system = msg['content'] + elif msg['role'] == 'user': + input += msg['content'] + elif msg['role'] == 'assistant': + conversation.append({ + 'system': system, + 'input': input, + 'output': msg['content'] + }) + system = '' + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import multi_turn_2_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/multi_turn_2 +xtuner train config.py +``` diff --git a/data/xtuner/examples/demo_data/multi_turn_2/config.py b/data/xtuner/examples/demo_data/multi_turn_2/config.py new file mode 100644 index 0000000000000000000000000000000000000000..df063a399221e6c488c073df3351007ef9bc398b --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_2/config.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +with read_base(): + from .map_fn import multi_turn_2_map_fn as dataset_map_fn + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/examples/demo_data/multi_turn_2/data.json b/data/xtuner/examples/demo_data/multi_turn_2/data.json new file mode 100644 index 0000000000000000000000000000000000000000..79604a5f2943c4dce6f2301d2b7f131419613fa2 --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_2/data.json @@ -0,0 +1,48 @@ +[{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + } + ] +}, +{ + "messages":[ + { + "role": "system", + "content": "You are a helpful AI assistant." + }, + { + "role": "user", + "content": "How to study English?" + }, + { + "role": "assistant", + "content": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." + }, + { + "role": "user", + "content": "Give three tips for staying healthy." + }, + { + "role": "assistant", + "content": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." + } + ] +}] diff --git a/data/xtuner/examples/demo_data/multi_turn_2/map_fn.py b/data/xtuner/examples/demo_data/multi_turn_2/map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..72459fad751704750f0e824fd38b5b33a82e931e --- /dev/null +++ b/data/xtuner/examples/demo_data/multi_turn_2/map_fn.py @@ -0,0 +1,25 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def multi_turn_2_map_fn(example): + messages = example['messages'] + system = '' + input = '' + conversation = [] + while messages and messages[0]['role'] == 'assistant': + # Skip the first one if it is from assistant + messages = messages[1:] + for msg in messages: + if msg['role'] == 'system': + system = msg['content'] + elif msg['role'] == 'user': + input += msg['content'] + elif msg['role'] == 'assistant': + conversation.append({ + 'system': system, + 'input': input, + 'output': msg['content'] + }) + system = '' + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} diff --git a/data/xtuner/examples/demo_data/pretrain/README.md b/data/xtuner/examples/demo_data/pretrain/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3db0e7024272126efe09e600680ec798e7d9d0c1 --- /dev/null +++ b/data/xtuner/examples/demo_data/pretrain/README.md @@ -0,0 +1,248 @@ +# Pretrain Example + +> \[!IMPORTANT\] +> Data must be used in conjunction with the corresponding map_fn. + +## Data + +`./data.json` + +```json +[{ + "toy_text": "I am an artificial intelligence (AI) assistant named InternLM. I was created by the Shanghai AI Laboratory and my purpose is to assist users with various tasks through natural language processing technology." +}, +{ + "toy_text": "I am an artificial intelligence programmed to assist with various types of tasks, including answering questions, providing information, and performing automated processes." +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def pretrain_map_fn(example): + return { + 'conversation': [{ + 'input': '', + 'output': example['toy_text'].strip() + }] + } +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +-from xtuner.dataset.map_fns import template_map_fn_factory +-from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook ++from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +-from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import single_turn_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +-prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, +- template_map_fn=dict( +- type=template_map_fn_factory, template=prompt_template), ++ template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +-custom_hooks = [ +- dict(type=DatasetInfoHook, tokenizer=tokenizer), +- dict( +- type=EvaluateChatHook, +- tokenizer=tokenizer, +- every_n_iters=evaluation_freq, +- evaluation_inputs=evaluation_inputs, +- system=SYSTEM, +- prompt_template=prompt_template) +-] ++custom_hooks = [dict(type=DatasetInfoHook, tokenizer=tokenizer)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/pretrain +xtuner train config.py +``` diff --git a/data/xtuner/examples/demo_data/pretrain/config.py b/data/xtuner/examples/demo_data/pretrain/config.py new file mode 100644 index 0000000000000000000000000000000000000000..c1b8b14416e7c19c786844952f6d633f6f7efc6e --- /dev/null +++ b/data/xtuner/examples/demo_data/pretrain/config.py @@ -0,0 +1,186 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +with read_base(): + from .map_fn import pretrain_map_fn as dataset_map_fn + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +data_path = './data.json' +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=DatasetInfoHook, tokenizer=tokenizer)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/examples/demo_data/pretrain/data.json b/data/xtuner/examples/demo_data/pretrain/data.json new file mode 100644 index 0000000000000000000000000000000000000000..0f6621838d940a384eb4b388a2c22f651a05f637 --- /dev/null +++ b/data/xtuner/examples/demo_data/pretrain/data.json @@ -0,0 +1,6 @@ +[{ + "toy_text": "I am an artificial intelligence (AI) assistant named InternLM. I was created by the Shanghai AI Laboratory and my purpose is to assist users with various tasks through natural language processing technology." +}, +{ + "toy_text": "I am an artificial intelligence programmed to assist with various types of tasks, including answering questions, providing information, and performing automated processes." +}] diff --git a/data/xtuner/examples/demo_data/pretrain/map_fn.py b/data/xtuner/examples/demo_data/pretrain/map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..707d1d021cbb0a69de306152baf714f312746279 --- /dev/null +++ b/data/xtuner/examples/demo_data/pretrain/map_fn.py @@ -0,0 +1,8 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def pretrain_map_fn(example): + return { + 'conversation': [{ + 'input': '', + 'output': example['toy_text'].strip() + }] + } diff --git a/data/xtuner/examples/demo_data/single_turn/README.md b/data/xtuner/examples/demo_data/single_turn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7826ea3c2004a77f5f9833bd545670f266a9ac22 --- /dev/null +++ b/data/xtuner/examples/demo_data/single_turn/README.md @@ -0,0 +1,498 @@ +# Single-turn Conversation Example + +> \[!IMPORTANT\] +> Data must be used in conjunction with the corresponding map_fn. + +## Data + +`./data.json` + +```json +[{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." +}, +{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def single_turn_map_fn(example): + return { + 'conversation': [{ + 'system': example['toy_system'], + 'input': example['toy_input'], + 'output': example['output'] + }] + } +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import single_turn_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/single_turn +xtuner train config.py +``` + +# Single-turn Conversation Example + +## Data + +`./data.json` + +```json +[{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." +}, +{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." +}] +``` + +## Map Function + +`./map_fn.py` + +```python +def single_turn_map_fn(example): + return { + 'conversation': [{ + 'system': example['toy_system'], + 'input': example['toy_input'], + 'output': example['output'] + }] + } +``` + +## Config + +Based on [internlm_7b_qlora_json_e3](../../../xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py). + +```diff +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset ++ from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + ++with read_base(): ++ from .map_fn import single_turn_map_fn as dataset_map_fn ++ +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +-data_path = 'path/to/your/json_data' ++data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, ++ dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) +``` + +## Quick Start + +```bash +cd ./examples/demo_data/single_turn +xtuner train config.py +``` diff --git a/data/xtuner/examples/demo_data/single_turn/config.py b/data/xtuner/examples/demo_data/single_turn/config.py new file mode 100644 index 0000000000000000000000000000000000000000..d6d5210583cde1ae1343caab7c46191cb8cc2702 --- /dev/null +++ b/data/xtuner/examples/demo_data/single_turn/config.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.config import read_base +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +with read_base(): + from .map_fn import single_turn_map_fn as dataset_map_fn + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' + +# Data +data_path = './data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + end=max_epochs, + convert_to_iter_based=True) + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/examples/demo_data/single_turn/data.json b/data/xtuner/examples/demo_data/single_turn/data.json new file mode 100644 index 0000000000000000000000000000000000000000..a8399830c02746dea763a7091da8dc7d792873f0 --- /dev/null +++ b/data/xtuner/examples/demo_data/single_turn/data.json @@ -0,0 +1,10 @@ +[{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "Give three tips for staying healthy.", + "toy_output": "1.Eat a balanced diet. 2. Exercise regularly. 3. Get enough sleep." +}, +{ + "toy_system": "You are a helpful AI assistant.", + "toy_input": "How to study English?", + "toy_output": "1. Set clear goals. 2. Create a study plan. 3. Build vocabulary. 4. Practice speaking." +}] diff --git a/data/xtuner/examples/demo_data/single_turn/map_fn.py b/data/xtuner/examples/demo_data/single_turn/map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..b17c910949d1c3b1cbdc94d55f1c420feaba5e4a --- /dev/null +++ b/data/xtuner/examples/demo_data/single_turn/map_fn.py @@ -0,0 +1,9 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def single_turn_map_fn(example): + return { + 'conversation': [{ + 'system': example['toy_system'], + 'input': example['toy_input'], + 'output': example['toy_output'] + }] + } diff --git a/data/xtuner/examples/huggingface_trainer/README.md b/data/xtuner/examples/huggingface_trainer/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c827c99c3584d23c6d3c2f61991789c3e22bb467 --- /dev/null +++ b/data/xtuner/examples/huggingface_trainer/README.md @@ -0,0 +1,40 @@ +# How to use XTuner in HuggingFace training pipeline + +## Quick run + +1. step in `examples` + + ```shell + cd ./examples + ``` + +2. run training scripts + + ```shell + # qlora-training internlm-7b with alpaca dataset + python train_qlora_hf.py --model_name_or_path internlm/internlm-7b --dataset_name_or_path tatsu-lab/alpaca + ``` + + `--model_name_or_path`: specify the model name or path to train. + + `--dataset_name_or_path`: specify the dataset name or path to use. + +## How to customize your experiment + +XTuner APIs are compatible with the usage of HuggingFace's transformers. +If you want to customize your experiment, you just need to pass in your hyperparameters like HuggingFace. + +``` +# training example +python train_qlora_hf.py \ + # custom training args + --model_name_or_path internlm/internlm-7b \ + --dataset_name_or_path tatsu-lab/alpaca \ + # HuggingFace's default training args + --do_train = True + --per_device_train_batch_size = 1 + --learning_rate = 2e-5 + --save_strategy = 'epoch' + --lr_scheduler_type = 'cosine' + --logging_steps = 1 +``` diff --git a/data/xtuner/examples/huggingface_trainer/train_hf.py b/data/xtuner/examples/huggingface_trainer/train_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..7a7af2eb736916bd350d4e905e0837d7be648988 --- /dev/null +++ b/data/xtuner/examples/huggingface_trainer/train_hf.py @@ -0,0 +1,37 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import transformers +from transformers import Trainer + +from xtuner.apis import DefaultTrainingArguments, build_model +from xtuner.apis.datasets import alpaca_data_collator, alpaca_dataset + + +def train(): + # get DefaultTrainingArguments and to be updated with passed args + parser = transformers.HfArgumentParser(DefaultTrainingArguments) + training_args = parser.parse_args_into_dataclasses()[0] + + # init model and dataset + model, tokenizer = build_model( + model_name_or_path=training_args.model_name_or_path, + return_tokenizer=True) + train_dataset = alpaca_dataset( + tokenizer=tokenizer, path=training_args.dataset_name_or_path) + data_collator = alpaca_data_collator(return_hf_format=True) + + # build trainer + trainer = Trainer( + model=model, + args=training_args, + train_dataset=train_dataset, + data_collator=data_collator) + + # training + trainer.train() + + trainer.save_state() + trainer.save_model(output_dir=training_args.output_dir) + + +if __name__ == '__main__': + train() diff --git a/data/xtuner/examples/huggingface_trainer/train_lora_hf.py b/data/xtuner/examples/huggingface_trainer/train_lora_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..60dd949c5862827aead014ecc60cfec75b582aff --- /dev/null +++ b/data/xtuner/examples/huggingface_trainer/train_lora_hf.py @@ -0,0 +1,37 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import transformers +from transformers import Trainer + +from xtuner.apis import DefaultTrainingArguments, build_lora_model +from xtuner.apis.datasets import alpaca_data_collator, alpaca_dataset + + +def train(): + # get DefaultTrainingArguments and to be updated with passed args + parser = transformers.HfArgumentParser(DefaultTrainingArguments) + training_args = parser.parse_args_into_dataclasses()[0] + + # init model and dataset + model, tokenizer = build_lora_model( + model_name_or_path=training_args.model_name_or_path, + return_tokenizer=True) + train_dataset = alpaca_dataset( + tokenizer=tokenizer, path=training_args.dataset_name_or_path) + data_collator = alpaca_data_collator(return_hf_format=True) + + # build trainer + trainer = Trainer( + model=model, + args=training_args, + train_dataset=train_dataset, + data_collator=data_collator) + + # training + trainer.train() + + trainer.save_state() + trainer.save_model(output_dir=training_args.output_dir) + + +if __name__ == '__main__': + train() diff --git a/data/xtuner/examples/huggingface_trainer/train_qlora_hf.py b/data/xtuner/examples/huggingface_trainer/train_qlora_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..2e8dbab71d59e7f6925fc7fde12814ed0b64c24a --- /dev/null +++ b/data/xtuner/examples/huggingface_trainer/train_qlora_hf.py @@ -0,0 +1,37 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import transformers +from transformers import Trainer + +from xtuner.apis import DefaultTrainingArguments, build_qlora_model +from xtuner.apis.datasets import alpaca_data_collator, alpaca_dataset + + +def train(): + # get DefaultTrainingArguments and to be updated with passed args + parser = transformers.HfArgumentParser(DefaultTrainingArguments) + training_args = parser.parse_args_into_dataclasses()[0] + + # init model and dataset + model, tokenizer = build_qlora_model( + model_name_or_path=training_args.model_name_or_path, + return_tokenizer=True) + train_dataset = alpaca_dataset( + tokenizer=tokenizer, path=training_args.dataset_name_or_path) + data_collator = alpaca_data_collator(return_hf_format=True) + + # build trainer + trainer = Trainer( + model=model, + args=training_args, + train_dataset=train_dataset, + data_collator=data_collator) + + # training + trainer.train() + + trainer.save_state() + trainer.save_model(output_dir=training_args.output_dir) + + +if __name__ == '__main__': + train() diff --git a/data/xtuner/requirements.txt b/data/xtuner/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..d119b17d21924dc840db3a0de9d790e0d1bac522 --- /dev/null +++ b/data/xtuner/requirements.txt @@ -0,0 +1,3 @@ +-r requirements/runtime.txt +-r requirements/deepspeed.txt +-r requirements/modelscope.txt diff --git a/data/xtuner/requirements/deepspeed.txt b/data/xtuner/requirements/deepspeed.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7f9c3c0d80190930c15233343c9c00869472eb7 --- /dev/null +++ b/data/xtuner/requirements/deepspeed.txt @@ -0,0 +1,3 @@ +# Minimum 0.12.3, see https://github.com/microsoft/DeepSpeed/pull/4587 +deepspeed>=0.12.3 +mpi4py-mpich diff --git a/data/xtuner/requirements/docs.txt b/data/xtuner/requirements/docs.txt new file mode 100644 index 0000000000000000000000000000000000000000..95b3a01909a1214deb2922a83381f0e8e1d5be64 --- /dev/null +++ b/data/xtuner/requirements/docs.txt @@ -0,0 +1,7 @@ +docutils +myst-parser==2.0.0 +sphinx==6.2.1 +sphinx-argparse +sphinx-book-theme==1.0.1 +sphinx-copybutton==0.5.2 +sphinx_markdown_tables diff --git a/data/xtuner/requirements/modelscope.txt b/data/xtuner/requirements/modelscope.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f3264fc848c43f198543e7ff18b84e8451e393f --- /dev/null +++ b/data/xtuner/requirements/modelscope.txt @@ -0,0 +1 @@ +modelscope diff --git a/data/xtuner/requirements/runtime.txt b/data/xtuner/requirements/runtime.txt new file mode 100644 index 0000000000000000000000000000000000000000..3a4d2f84e4a20644f0d8982365247d08ea6de08a --- /dev/null +++ b/data/xtuner/requirements/runtime.txt @@ -0,0 +1,27 @@ +# Minimum 0.40.0.post4 to fix some 4-bit precision bugs +bitsandbytes>=0.40.0.post4 +# Minimum 2.16.0 to fix some bugs, see https://github.com/huggingface/datasets/pull/6444 +datasets>=2.16.0 +einops +# Minimum 0.1.2 to fix some bugs, see https://github.com/InternLM/lagent/pull/44 +lagent>=0.1.2 +# Minimum 0.10.3 to support distributed evaluation for MMBench +# see https://github.com/open-mmlab/mmengine/pull/1469 +mmengine>=0.10.3 +openpyxl +# Minimum 0.4.0 to support QLoRA, see https://github.com/huggingface/peft/pull/476 +peft>=0.4.0 +scikit-image +scipy +SentencePiece +tiktoken +torch +torchvision +# Minimum 4.36.0 to support `Cache` data structure used by KV Cache +# Registering a causal mask in `LlamaModel` is not friendly for very large +# `max_position_embeddings`. Refer to +# https://github.com/huggingface/transformers/blob/v4.38.0/src/transformers/models/llama/modeling_llama.py#L921-L923 +# transformers >= 4.43.0 use _flash_attention_forward but not self._flash_attention_forward +# to calculate attn output which lead to bc braeking +transformers>=4.36.0,!=4.38.0,!=4.38.1,!=4.38.2,<=4.42.4 +transformers_stream_generator diff --git a/data/xtuner/setup.cfg b/data/xtuner/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..44422e1efe10675c5a38be47e66a93bac113c291 --- /dev/null +++ b/data/xtuner/setup.cfg @@ -0,0 +1,16 @@ +[isort] +line_length = 79 +multi_line_output = 0 +extra_standard_library = setuptools +known_first_party = xtuner +known_third_party = pytest,yaml +no_lines_before = STDLIB,LOCALFOLDER +default_section = THIRDPARTY + +[yapf] +BASED_ON_STYLE = pep8 +BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF = true +SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN = true + +[codespell] +ignore-words-list = nd, ba, warmup diff --git a/data/xtuner/setup.py b/data/xtuner/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..7a95dfab4a46abae268df22c3e2af435b2bde4d1 --- /dev/null +++ b/data/xtuner/setup.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# Copyright (c) OpenMMLab. All rights reserved. +from setuptools import find_packages, setup + + +def readme(): + with open('README.md', encoding='utf-8') as f: + content = f.read() + return content + + +version_file = 'xtuner/version.py' + + +def get_version(): + with open(version_file) as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +def parse_requirements(fname='requirements.txt', with_version=True): + """Parse the package dependencies listed in a requirements file but strips + specific versioning information. + + Args: + fname (str): path to requirements file + with_version (bool, default=False): if True include version specs + + Returns: + List[str]: list of requirements items + + CommandLine: + python -c "import setup; print(setup.parse_requirements())" + """ + import re + import sys + from os.path import exists + require_fpath = fname + + def parse_line(line): + """Parse information from a line in a requirements text file.""" + if line.startswith('-r '): + # Allow specifying requirements in other files + target = line.split(' ')[1] + for info in parse_require_file(target): + yield info + else: + info = {'line': line} + if line.startswith('-e '): + info['package'] = line.split('#egg=')[1] + elif '@git+' in line: + info['package'] = line + else: + # Remove versioning from the package + pat = '(' + '|'.join(['>=', '==', '>']) + ')' + parts = re.split(pat, line, maxsplit=1) + parts = [p.strip() for p in parts] + + info['package'] = parts[0] + if len(parts) > 1: + op, rest = parts[1:] + if ';' in rest: + # Handle platform specific dependencies + # http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies + version, platform_deps = map(str.strip, + rest.split(';')) + info['platform_deps'] = platform_deps + else: + version = rest + info['version'] = (op, version) + yield info + + def parse_require_file(fpath): + with open(fpath) as f: + for line in f.readlines(): + line = line.strip() + if line and not line.startswith('#'): + yield from parse_line(line) + + def gen_packages_items(): + if exists(require_fpath): + for info in parse_require_file(require_fpath): + parts = [info['package']] + if with_version and 'version' in info: + parts.extend(info['version']) + if not sys.version.startswith('3.4'): + # apparently package_deps are broken in 3.4 + platform_deps = info.get('platform_deps') + if platform_deps is not None: + parts.append(';' + platform_deps) + item = ''.join(parts) + yield item + + packages = list(gen_packages_items()) + return packages + + +if __name__ == '__main__': + setup( + name='xtuner', + version=get_version(), + description=('An efficient, flexible and full-featured toolkit for ' + 'fine-tuning large models'), + long_description=readme(), + long_description_content_type='text/markdown', + author='XTuner Contributors', + author_email='openmmlab@gmail.com', + keywords='large language model, parameter-efficient fine-tuning', + url='https://github.com/InternLM/xtuner', + packages=find_packages(), + include_package_data=True, + classifiers=[ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Topic :: Utilities', + ], + # Python maximum version <3.11, to support mpi4py-mpich + python_requires='>=3.8, <3.11', + license='Apache License 2.0', + install_requires=parse_requirements('requirements/runtime.txt'), + extras_require={ + 'all': + parse_requirements('requirements.txt'), + 'deepspeed': + parse_requirements('requirements/runtime.txt') + + parse_requirements('requirements/deepspeed.txt'), + 'modelscope': + parse_requirements('requirements/runtime.txt') + + parse_requirements('requirements/modelscope.txt'), + }, + zip_safe=False, + entry_points={'console_scripts': ['xtuner = xtuner:cli']}) diff --git a/data/xtuner/xtuner/__init__.py b/data/xtuner/xtuner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cb1d94302bdd08088746432918edccd3a306d874 --- /dev/null +++ b/data/xtuner/xtuner/__init__.py @@ -0,0 +1,25 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os + +from mmengine.utils import digit_version + +from .entry_point import cli +from .version import __version__, version_info + +HF_CEPH_HUB = os.getenv('HF_CEPH_HUB', '') +HF_USE_CEPH = os.getenv('HF_USE_CEPH', 0) or HF_CEPH_HUB != '' +DS_CEPH_DIR = os.getenv('DS_CEPH_DIR', None) +if HF_USE_CEPH: + from .utils.fileio import (patch_hf_auto_from_pretrained, + patch_hf_save_pretrained) + patch_hf_auto_from_pretrained(HF_CEPH_HUB) + patch_hf_save_pretrained() + +if DS_CEPH_DIR: + from .utils.fileio import patch_deepspeed_engine + patch_deepspeed_engine() + +__all__ = [ + '__version__', 'version_info', 'digit_version', 'cli', 'HF_USE_CEPH', + 'DS_CEPH_DIR' +] diff --git a/data/xtuner/xtuner/apis/__init__.py b/data/xtuner/xtuner/apis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f49d493789960175c39a59a0b62e0fae44513766 --- /dev/null +++ b/data/xtuner/xtuner/apis/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .datasets import * # noqa: F401, F403 +from .model import * # noqa: F401, F403 +from .training_args import * # noqa: F401, F403 diff --git a/data/xtuner/xtuner/apis/datasets/__init__.py b/data/xtuner/xtuner/apis/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4ff4fe4789522dd117c77fe74e1c381ead461e91 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/__init__.py @@ -0,0 +1,37 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .alpaca import (alpaca_data_collator, alpaca_dataset, + alpaca_enzh_data_collator, alpaca_enzh_dataset, + alpaca_zh_data_collator, alpaca_zh_dataset) +from .arxiv import arxiv_data_collator, arxiv_dataset +from .code_alpaca import code_alpaca_data_collator, code_alpaca_dataset +from .colorist import colorist_data_collator, colorist_dataset +from .lawyer import (lawyer_crime_data_collator, lawyer_crime_dataset, + lawyer_data_collator, lawyer_dataset, + lawyer_reference_data_collator, lawyer_reference_dataset) +from .medical import medical_data_collator, medical_dataset +from .moss_003_sft import (moss_003_sft_data_collator, moss_003_sft_dataset, + moss_003_sft_no_plugins_data_collator, + moss_003_sft_no_plugins_dataset, + moss_003_sft_plugins_data_collator, + moss_003_sft_plugins_dataset) +from .oasst1 import oasst1_data_collator, oasst1_dataset +from .open_orca import openorca_data_collator, openorca_dataset +from .sql import sql_data_collator, sql_dataset +from .tiny_codes import tiny_codes_data_collator, tiny_codes_dataset +from .wizardlm import wizardlm_data_collator, wizardlm_dataset + +__all__ = [ + 'alpaca_data_collator', 'alpaca_dataset', 'alpaca_enzh_data_collator', + 'alpaca_enzh_dataset', 'alpaca_zh_data_collator', 'alpaca_zh_dataset', + 'arxiv_data_collator', 'arxiv_dataset', 'medical_data_collator', + 'medical_dataset', 'moss_003_sft_data_collator', 'moss_003_sft_dataset', + 'moss_003_sft_no_plugins_data_collator', 'moss_003_sft_no_plugins_dataset', + 'moss_003_sft_plugins_data_collator', 'moss_003_sft_plugins_dataset', + 'oasst1_data_collator', 'oasst1_dataset', 'openorca_data_collator', + 'openorca_dataset', 'lawyer_crime_dataset', 'lawyer_crime_data_collator', + 'lawyer_reference_dataset', 'lawyer_reference_data_collator', + 'lawyer_dataset', 'lawyer_data_collator', 'colorist_dataset', + 'colorist_data_collator', 'sql_dataset', 'sql_data_collator', + 'code_alpaca_dataset', 'code_alpaca_data_collator', 'tiny_codes_dataset', + 'tiny_codes_data_collator', 'wizardlm_data_collator', 'wizardlm_dataset' +] diff --git a/data/xtuner/xtuner/apis/datasets/alpaca.py b/data/xtuner/xtuner/apis/datasets/alpaca.py new file mode 100644 index 0000000000000000000000000000000000000000..8e284a9375f8ae286083e29c1ba92549414caff5 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/alpaca.py @@ -0,0 +1,92 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset +from torch.utils.data import ConcatDataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.utils import PROMPT_TEMPLATE + + +def alpaca_enzh_dataset(tokenizer, + path_en='tatsu-lab/alpaca', + path_zh='silk-road/alpaca-data-gpt4-chinese', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + alpaca = alpaca_dataset( + tokenizer, + path=path_en, + max_length=max_length, + prompt_template=prompt_template, + shuffle_before_pack=True, + remove_unused_columns=remove_unused_columns, + pack_to_max_length=pack_to_max_length) + alpaca_zh = alpaca_zh_dataset( + tokenizer, + path=path_zh, + max_length=max_length, + prompt_template=prompt_template, + shuffle_before_pack=True, + remove_unused_columns=remove_unused_columns, + pack_to_max_length=pack_to_max_length) + dataset = ConcatDataset([alpaca, alpaca_zh]) + return dataset + + +def alpaca_enzh_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def alpaca_zh_dataset(tokenizer, + path='silk-road/alpaca-data-gpt4-chinese', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def alpaca_zh_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def alpaca_dataset(tokenizer, + path='tatsu-lab/alpaca', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def alpaca_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/arxiv.py b/data/xtuner/xtuner/apis/datasets/arxiv.py new file mode 100644 index 0000000000000000000000000000000000000000..35521f3ea80b67fec779576a48de4779d59a8bb4 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/arxiv.py @@ -0,0 +1,38 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def arxiv_dataset(tokenizer, + data_file=None, + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + # 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv # noqa: E501 + # 2. Process data with `./tools/data_preprocess/arxiv.py` + if data_file is None: + data_file = './data/arxiv_postprocess_csAIcsCLcsCV_20200101.json' + dataset_org = load_dataset(path='json', data_files=dict(train=data_file)) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def arxiv_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/code_alpaca.py b/data/xtuner/xtuner/apis/datasets/code_alpaca.py new file mode 100644 index 0000000000000000000000000000000000000000..de3f94e24fb529932894143ee1a477ec1d06221e --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/code_alpaca.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def code_alpaca_dataset(tokenizer, + path='HuggingFaceH4/CodeAlpaca_20K', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def code_alpaca_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/colorist.py b/data/xtuner/xtuner/apis/datasets/colorist.py new file mode 100644 index 0000000000000000000000000000000000000000..00400d09e62be767b026a170ee7c2aaad26e6f97 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/colorist.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def colorist_dataset(tokenizer, + path='burkelibbey/colors', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def colorist_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/lawyer.py b/data/xtuner/xtuner/apis/datasets/lawyer.py new file mode 100644 index 0000000000000000000000000000000000000000..287dc2827d7cdf6ab54649af2434b9e270b8f155 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/lawyer.py @@ -0,0 +1,97 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset +from torch.utils.data import ConcatDataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.utils import PROMPT_TEMPLATE + + +def lawyer_dataset(tokenizer, + crime_data_file=None, + reference_data_file=None, + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + crime_dataset = lawyer_crime_dataset( + tokenizer, + data_file=crime_data_file, + max_length=max_length, + prompt_template=prompt_template, + remove_unused_columns=remove_unused_columns, + pack_to_max_length=pack_to_max_length) + reference_dataset = lawyer_reference_dataset( + tokenizer, + data_file=reference_data_file, + max_length=max_length, + prompt_template=prompt_template, + remove_unused_columns=remove_unused_columns, + pack_to_max_length=pack_to_max_length) + dataset = ConcatDataset([crime_dataset, reference_dataset]) + return dataset + + +def lawyer_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def lawyer_crime_dataset(tokenizer, + data_file=None, + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + # Download data from https://github.com/LiuHC0428/LAW-GPT # noqa: E501 + if data_file is None: + data_file = './data/law/CrimeKgAssitant清洗后_52k.json' + dataset_org = load_dataset(path='json', data_files=dict(train=data_file)) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def lawyer_crime_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def lawyer_reference_dataset(tokenizer, + data_file=None, + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + # Download data from https://github.com/LiuHC0428/LAW-GPT # noqa: E501 + if data_file is None: + data_file = './data/law/训练数据_带法律依据_92k.json' + dataset_org = load_dataset(path='json', data_files=dict(train=data_file)) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def lawyer_reference_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/medical.py b/data/xtuner/xtuner/apis/datasets/medical.py new file mode 100644 index 0000000000000000000000000000000000000000..cd430b8d24a7cc007be5d1677273a9172071d72b --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/medical.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def medical_dataset(tokenizer, + path='shibing624/medical', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=False, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def medical_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/moss_003_sft.py b/data/xtuner/xtuner/apis/datasets/moss_003_sft.py new file mode 100644 index 0000000000000000000000000000000000000000..7952238cf00132b142a8a0877d0e104424a49bcc --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/moss_003_sft.py @@ -0,0 +1,72 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from torch.utils.data import ConcatDataset + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn + + +def moss_003_sft_dataset(tokenizer, + plugins_data_file=None, + no_plugins_data_file=None, + bot_name=None, + max_length=2048): + plugins = moss_003_sft_plugins_dataset( + tokenizer, + data_file=plugins_data_file, + bot_name=bot_name, + max_length=max_length) + no_plugins = moss_003_sft_no_plugins_dataset( + tokenizer, + data_file=no_plugins_data_file, + bot_name=bot_name, + max_length=max_length) + dataset = ConcatDataset([plugins, no_plugins]) + return dataset + + +def moss_003_sft_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def moss_003_sft_no_plugins_dataset(tokenizer, + data_file=None, + bot_name=None, + max_length=2048): + + # Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data + if data_file is None: + data_file = './data/moss-003-sft-no-tools.jsonl' + dataset = MOSSSFTDataset( + data_file=data_file, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + + return dataset + + +def moss_003_sft_no_plugins_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) + + +def moss_003_sft_plugins_dataset(tokenizer, + data_file=None, + bot_name=None, + max_length=2048): + + # Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data + if data_file is None: + data_file = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 + dataset = MOSSSFTDataset( + data_file=data_file, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + + return dataset + + +def moss_003_sft_plugins_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/oasst1.py b/data/xtuner/xtuner/apis/datasets/oasst1.py new file mode 100644 index 0000000000000000000000000000000000000000..0b877239622ed68bc886efcf13a2936772005118 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/oasst1.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def oasst1_dataset(tokenizer, + path='timdettmers/openassistant-guanaco', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=False, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def oasst1_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/open_orca.py b/data/xtuner/xtuner/apis/datasets/open_orca.py new file mode 100644 index 0000000000000000000000000000000000000000..9e52d50e2271005ef87ac1952a13fe391b77a207 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/open_orca.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def openorca_dataset(tokenizer, + path='Open-Orca/OpenOrca', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def openorca_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/sql.py b/data/xtuner/xtuner/apis/datasets/sql.py new file mode 100644 index 0000000000000000000000000000000000000000..fed725ee05707fe455b7cfcf4cc8bf1621f32696 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/sql.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + + +def sql_dataset(tokenizer, + path='b-mc2/sql-create-context', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def sql_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/tiny_codes.py b/data/xtuner/xtuner/apis/datasets/tiny_codes.py new file mode 100644 index 0000000000000000000000000000000000000000..286d65e4f6e1e13b831e52f15ad98fc072a72719 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/tiny_codes.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.utils import PROMPT_TEMPLATE + + +def tiny_codes_dataset(tokenizer, + path='nampdn-ai/tiny-codes', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=True, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def tiny_codes_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/datasets/wizardlm.py b/data/xtuner/xtuner/apis/datasets/wizardlm.py new file mode 100644 index 0000000000000000000000000000000000000000..b5a084271075da12577fd0560b8572e9cd0eeb20 --- /dev/null +++ b/data/xtuner/xtuner/apis/datasets/wizardlm.py @@ -0,0 +1,34 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from datasets import load_dataset + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, wizardlm_map_fn +from xtuner.utils import PROMPT_TEMPLATE + + +def wizardlm_dataset(tokenizer, + path='WizardLM/WizardLM_evol_instruct_V2_196k', + max_length=2048, + prompt_template=PROMPT_TEMPLATE.default, + remove_unused_columns=False, + pack_to_max_length=True): + template_map_fn = template_map_fn_factory(template=prompt_template) + dataset_org = load_dataset(path) + dataset = process_hf_dataset( + dataset=dataset_org, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=wizardlm_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=remove_unused_columns, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + + return dataset + + +def wizardlm_data_collator(return_hf_format=False): + return partial(default_collate_fn, return_hf_format=return_hf_format) diff --git a/data/xtuner/xtuner/apis/model.py b/data/xtuner/xtuner/apis/model.py new file mode 100644 index 0000000000000000000000000000000000000000..efd9370df19a1b258fa7c93ef31284fff42dd589 --- /dev/null +++ b/data/xtuner/xtuner/apis/model.py @@ -0,0 +1,89 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from peft import LoraConfig +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.model import SupervisedFinetune + +__all__ = ['build_model', 'build_lora_model', 'build_qlora_model'] + + +def build_qlora_model(model_name_or_path, + quantization_config=None, + lora_config=None, + return_tokenizer=True): + + if quantization_config is None: + quantization_config = BitsAndBytesConfig( + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4') + if lora_config is None: + lora_config = LoraConfig( + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM') + + llm = AutoModelForCausalLM.from_pretrained( + model_name_or_path, + torch_dtype=torch.float16, + trust_remote_code=True, + quantization_config=quantization_config) + + model = SupervisedFinetune(llm, lora=lora_config) + + if return_tokenizer: + tokenizer = AutoTokenizer.from_pretrained( + model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + return model.llm, tokenizer + else: + return model.llm + + +def build_lora_model(model_name_or_path, + lora_config=None, + return_tokenizer=True): + if lora_config is None: + lora_config = LoraConfig( + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM') + + llm = AutoModelForCausalLM.from_pretrained( + model_name_or_path, torch_dtype=torch.float16, trust_remote_code=True) + + model = SupervisedFinetune(llm, lora=lora_config) + + if return_tokenizer: + tokenizer = AutoTokenizer.from_pretrained( + model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + return model.llm, tokenizer + else: + return model.llm + + +def build_model(model_name_or_path, return_tokenizer=True): + model = AutoModelForCausalLM.from_pretrained( + model_name_or_path, torch_dtype=torch.float16, trust_remote_code=True) + + if return_tokenizer: + tokenizer = AutoTokenizer.from_pretrained( + model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + return model, tokenizer + else: + return model diff --git a/data/xtuner/xtuner/apis/training_args.py b/data/xtuner/xtuner/apis/training_args.py new file mode 100644 index 0000000000000000000000000000000000000000..b0f65445c2e273e43244682e035e8e0a729bdd31 --- /dev/null +++ b/data/xtuner/xtuner/apis/training_args.py @@ -0,0 +1,61 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from dataclasses import dataclass, field +from typing import Union + +from transformers import TrainingArguments +from transformers.trainer_utils import IntervalStrategy, SchedulerType + +__all__ = ['DefaultTrainingArguments'] + + +@dataclass +class DefaultTrainingArguments(TrainingArguments): + # custom + model_name_or_path: str = field( + default=None, + metadata={'help': 'model name or path.'}, + ) + dataset_name_or_path: str = field( + default=None, + metadata={'help': 'dataset name or path.'}, + ) + + # huggingface + default_output_dir = './work_dirs' + default_do_train = True + default_per_device_train_batch_size = 1 + default_learning_rate = 2e-5 + default_save_strategy = 'epoch' + default_lr_scheduler_type = 'cosine' + default_logging_steps = 5 + + output_dir: str = field( + default=default_output_dir, + metadata={ + 'help': ('The output directory where the model predictions and ' + 'checkpoints will be written.') + }) + do_train: bool = field( + default=default_do_train, + metadata={'help': 'Whether to run training.'}) + per_device_train_batch_size: int = field( + default=default_per_device_train_batch_size, + metadata={'help': 'Batch size per GPU/TPU core/CPU for training.'}) + learning_rate: float = field( + default=default_learning_rate, + metadata={'help': 'The initial learning rate for AdamW.'}) + save_strategy: Union[IntervalStrategy, str] = field( + default=default_save_strategy, + metadata={'help': 'The checkpoint save strategy to use.'}, + ) + lr_scheduler_type: Union[SchedulerType, str] = field( + default=default_lr_scheduler_type, + metadata={'help': 'The scheduler type to use.'}, + ) + logging_steps: float = field( + default=default_logging_steps, + metadata={ + 'help': ('Log every X updates steps. Should be an integer or a ' + 'float in range `[0,1)`. If smaller than 1, will be ' + 'interpreted as ratio of total training steps.') + }) diff --git a/data/xtuner/xtuner/configs/__init__.py b/data/xtuner/xtuner/configs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..98ed1600e47d97b3cb902cd7e45d6ff0318da100 --- /dev/null +++ b/data/xtuner/xtuner/configs/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os + + +def get_cfgs_name_path(): + path = os.path.dirname(__file__) + mapping = {} + for root, dirs, files in os.walk(path): + for file_ in files: + if file_.endswith( + ('.py', '.json') + ) and not file_.startswith('.') and not file_.startswith('_'): + mapping[os.path.splitext(file_)[0]] = os.path.join(root, file_) + return mapping + + +cfgs_name_path = get_cfgs_name_path() + +__all__ = ['cfgs_name_path'] diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a2f74e66271886a9dc9152983c252e6fd4124481 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3bc341cb326e7fd70e850e4205d2b62878a0891b --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d97e8dce150cbe027950d3164e4c80306216b2f0 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..018f1a73f065d0e8c2f4db5e12bfb6d5daa52a6d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..6102ea24299369b225fd54364f0d0be15cd9ede5 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..438aee88537aa555012466f0ce38feac5af4fc34 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..74452c47054afecd3b95aa4ec2f01d66abf8f9a6 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a9adfc6a2f52bb88d14f3a74727ed9a03ad7203a --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..407eda99ca163f1d3fedb87228e44bb09c8af78b --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5ae05ffc0b2dad0feed6f735a4d2161e84831f0d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..549abb03d220e1bbc161e0b3927c5546e8088cc5 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_sql_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..316bd1cff408963d43d0968ec4bb95ef1a8929e9 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_base/baichuan2_13b_base_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e3ba1b1b289e5ea56f374e239b4dd26a4e495a2b --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8d81f97f59e880ce10d0e67825128a64bff72bcc --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8e64edb7524350ab7af39f5cccc368ca7adbfa01 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..532eb062c17f4f5f03f295599be66eb12cf510dd --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2937a31b8d157f821d162b30d43ace1f491ebacc --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..44e6c91ecd64107068a91a8e5dca02be27feda93 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..80b9c46161e75c6d015efe861569db21717b1e36 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..15f39c0696eaff80f57dc15ce14dd9343905f04a --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc775240472dee8900e1295431916a80b80fa8f9 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_13b_chat/baichuan2_13b_chat_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fede68a645f9c82d09ebf12ec43bf0da50cb707e --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ed6bab796a87a404494c4de0500bd8776b297ff7 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc0106fae754c54be948aee1d420a20a32b96def --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..372700ca7e0c98b170f69620938c993b7ec61030 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..847b4b2d2b1bc71a0eac73ec9db26e5149c36b95 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..78744124f12616786c1eb2d5e9b648ca2d9389ea --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..c62922c0933578e6b9dab8f7c5e2f64f37e9db0d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..26e8f29e38053e6db13d4f6147fca43915379d42 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc4be6f0d9f3e83b9b83a3b8a518849d8f6257e5 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..6db25f82d389c76b9497a9442c15fed8d24dfdec --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1323aae0a5efb08d48399ea8a0ac4729f46e6256 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_sql_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..06b7bc84259f38f44058911601e991aee287f141 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_base/baichuan2_7b_base_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b2b9a8a1d6f655363d200b78c3fe2354ed30b4d8 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7c4f63c4abb261d22dda06d1cbb1c4765f25560d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..26effe1fd5ec8854031b7d5e9197a83075954826 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a350033a134438996372a978297be13f53e61f45 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..68d0569cefe343ce2a7214ebf2510ebbfa150b37 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..43cdca592a0b514c7466a676188115867bb2db8d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..488ef97eafb855e1edb4c9cdaa0708a6bd49f487 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..23c15359647031e79d6410cd971d8eef8f07aef3 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..df3e8b40a404e3e869b963765aa0fc9f6069e082 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan2_7b_chat/baichuan2_7b_chat_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0f171e6bad043075d609bf23273c7a2f694c616a --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..30ba57c1acb46d82892e2a993006d9eada309de8 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..55a0dff0176a237ea5111998fd7d54d9ea8fc741 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b92847c5c9ca8ac689be19c7b5245de5bb8fae1e --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..06fefb3854c5c09352752695a9246dfea24d02fe --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc071ccbdc346cf3c85e73d0c05f2e4e65c601f --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..6b9e7c8be54132dca73c99a5259d038618200cf9 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..447f9a4c13843608444492a635a8a6062a87c252 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_medical_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..75523e268140cac5da2bb5816c92fc031dd0c6ed --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..fcbce6ae82d97c5cf2a20969e4c4d1cf3b1762c3 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..0c11b997c80a8d1533901f6f3284116e895491d2 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1c54c244f95a0295180bc4cef31b270d7bf9a033 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,206 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2f8e5b82e96c151c1f3063b22468ce8d01cb9ef8 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fc07626ae472daee5bbca0b37a0e1f9055607733 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..20b00ce15f58063302799d8b1d4f0bb04e58feaa --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..0e7828e545dc11414102d49393660e872e37e886 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_sql_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e9d16fd87c8a3e587369383210359f0241e8d609 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1384892b85fefd4a84a573e1a8055359165556 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_base/baichuan_13b_base_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Base' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..cf4c6e8d4daffe627dde2803575deaa24c2ff941 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2ceef3f79b12853f69955bc0924fd44a4ca2d893 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..409650951deb2385d8a4bc28f77f0ae37c203548 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4782566fbf025fa087d5b1131d2be3b3232b2ba3 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..24ed86b8c4378c8510282930e3527c26d89357d7 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2847642209508d302cc5a9cfa614266f28b99048 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..a5efef730910d8eacfbdec5aed466485caebdbf5 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0aef5aa4275e1536ae53b34fd38a98cbb3578803 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_medical_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..8bba8361372ca808e7f16a976b9da9e4089afe44 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..6d32a6df8ad1469a2b003b307625f8fff3de7b79 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7d7de0d52b217acab6859f11155bb33cdbb6fdf6 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..16146cb251d54eb138e0beedc7508b879bfee7bb --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b39f1f47ae6edcb861146376bf15540f5c66c140 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_sql_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..addcb97370fddd28ad24e997b33bd9507843b8fe --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7b8cb8bf04747ab3e4a36942bbe4e93e3e19e3c1 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_13b_chat/baichuan_13b_chat_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..600ba93a85c8b6dc80d0d47a0b5cdd2394e66ab6 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..eb0d7118ee90810b89d2d30c88f9e5f2f256fb2f --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7593b3ad5fc9b748f9c4f7c0477161314af5ef00 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..029a454e303d40ffd36dd9dee47b75572d236623 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1d04baf2139d8d360d77814d4ee17044964304bc --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d4bcdcad4616a355beabbfd7240ece14c8f4aa78 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..07720a2617d1e64474c700510e0e38da94004a64 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8e9c295aefe2cc021a859a0a17153556c6ed948e --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..17bfa1fc3390e6a22340f7b17874751a0c103170 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..11240e0c1081680fd16fd9273934c790a9eafd2d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..b38b1f8efc7c5cbcab0172340aa4b92fbddd57c8 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..8f63ca088a3cdada10af028db8c0ec51bad3cc61 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,206 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +bot_name = 'Baichuan' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..35ca978b664501cf73ff0231323a7525de8e2092 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4a46dc866750f9879befd0ba826f64a9dcc45cde --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ef125a0632551cd71a627070fdac5f6572a63c03 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..f26937326c63ae9c6b369db7ba49f31e533bb345 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..84e79893d300098e06d91fdb77c20454d510750d --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b9909df6d6f315d9db5ceb10c9ad60869cd10d73 --- /dev/null +++ b/data/xtuner/xtuner/configs/baichuan/baichuan_7b/baichuan_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..93c5c8d822feecaff55deae3c9ad866e4b8f0749 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b9e634ba972d91f6182e799cf7699df9d25c903b --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e434c1c9c08758f9a3207531905897f97a7c1464 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e9018a9d689a1d218a0f309aacc46b8fad02e6a5 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2b85a9adc429cec2d78182000f2f914791bb2714 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4fddb95553e37fa18b7a2475349805e93f774cb6 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..fc28979e7b359112e83de3c9a28b496b6250ed7e --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ffa3cba3b9782c6554a59ac09bf4a6585a5427e6 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..229688b3e28420c569d96c173e0f7d8d43d87b97 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..71fc0948e31a4b9f42dc9ba3a5ea485031d5e3e9 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ff784f9c6016edd45eba3a557c43852aa00d71e7 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..bad62ea40d9d4bfc0c8bf7366393b2dce5a308bd --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7d3e4d169e43e766d26066ed3b8bf1a0b217dacd --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..50c3b81b7c681c5c3c841f5af61820691ab395ab --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3ee1cd3709fe66a2eddd93bd9e1b1bbd46e414b3 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm2_6b/chatglm2_6b_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d51de1eee0efb964ed7a58d46552fbf6c8468b17 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c11f16911e9ebb46042a11e6532a4ff1f88732af --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9a2aec872df7e0ae3d681cef094df2f4dc686134 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,245 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a7dbeb120c9a1bf71e87b7f0245974f040cd3b38 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_alpaca_zh_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..225834597be74e9ff70c66eadff1fcec20d1d6cf --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,248 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a27934c25f88e0fdaf19c9f42f6737756794ae26 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_code_alpaca_e3.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..c33039955e7f336cec19f2a4a411d186369d246e --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_colorist_e5.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ff14468d7ea3941e2a23abbb46e63fe9f9940db6 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_lawyer_e3.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..9cc4dd06c2480fd722733d300606fcb861a89d85 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_medical_e1.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..93950f87782b2ad8da74e9a1fcbaff9c3adfa557 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_512_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f00b83a6d55b235e7b7ced25c8cc3656c5df37ce --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_oasst1_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..00f4e2146877f1df78d78614b9879c34d9d7e7f2 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_open_platypus_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..f2d68ea64362140caaf8cd54c6386740c42611ad --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_openorca_e1.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e50c9676afbd47bfdcb8d4b52da379e16daa7112 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_sql_e3.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7acece9c700377fd5c675891998ff856bf83c8b9 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b/chatglm3_6b_qlora_tiny_codes_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c6700732e1f809b00a499ef62ce3a21a39c6196a --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..665c8a1f9694b19bf1bd1cf60fadbc294ee3c691 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_e3.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5835d2d49936920cae89b8d185d94414ed08bb25 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,245 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..66b912a5fa2b619d84271ee46a5d4129a14f26bd --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_alpaca_zh_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2eb9869ca0eba826eb03a04eeddd78bf255d57f6 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,248 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..593e56ff97765d6ae08d47fc66ed89d94830a94e --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_code_alpaca_e3.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..eb462ed4d1f2295f5f67f14bcfeaf08529939d31 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_colorist_e5.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d9dd94d6869e1cafb961da2d2885d0ac8c764316 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_lawyer_e3.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_medical_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..8d4ca6677642b665a5a1d62b49c58f03fb0b5c42 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_medical_e1.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..539efccf076fd69d3919d8e9f095eb79b7142576 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_512_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fbffaebda022a659b0e57877b9b9e19e63972b68 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_oasst1_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d467fa14a949c52cdc378dd0bf4a004063f2307e --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_open_platypus_e3.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..abfe4b722e83aae32d2594fb6d309885ecc15861 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_openorca_e1.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_sql_e3.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c41cb53f5e1ee9f7772d08ca432db3ea8ec42b2c --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_sql_e3.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..4cc9c175c80dc4d844a99aa12ee3d10cab620a16 --- /dev/null +++ b/data/xtuner/xtuner/configs/chatglm/chatglm3_6b_base/chatglm3_6b_base_qlora_tiny_codes_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b-base' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/cohere/README.md b/data/xtuner/xtuner/configs/cohere/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5d306cb33797e496114bc0c46afdb64eac147f28 --- /dev/null +++ b/data/xtuner/xtuner/configs/cohere/README.md @@ -0,0 +1,48 @@ +# Cohere 8x7B + +## Install + +```bash +# Install the latest xtuner +pip install -U 'xtuner[deepspeed]' + +# Cohere requires the latest version of transformers. +pip install git+https://github.com/huggingface/transformers.git + +# Sequence parallel requires flash-attn +pip install flash-attn +``` + +## Full Parameter Fine-tune + +Full parameter fine-tune needs 64 A100-80G + +### slurm + +Note: `$PARTITION` means the virtual partition of slurm. + +```bash +srun -p $PARTITION --job-name=Cohere --nodes=8 --gres=gpu:8 --ntasks-per-node=8 xtuner train cohere_100b_128k_sp32 --deepspeed deepspeed_zero3 --launcher slurm +``` + +### torchrun + +Note: `$NODE_0_ADDR` means the ip address of the node_0 machine. + +```bash +# excuete on node 0 +NPROC_PER_NODE=8 NNODES=8 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=0 xtuner train cohere_100b_128k_sp32 --deepspeed deepspeed_zero3 + +# excuete on node 1 +NPROC_PER_NODE=8 NNODES=8 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=1 xtuner train cohere_100b_128k_sp32 --deepspeed deepspeed_zero3 +``` + +### Speed + +16 * A100 80G: + +| Model | Sequence Length | GPUs Number | Sequence Parallel World Size | Tokens per Second | TFLOPs | +| :---------: | :-------------: | :---------: | :--------------------------: | :---------------: | :----: | +| Cohere_100b | 128k | 64 | 32 | 97.3 | 173.4 | +| Cohere_100b | 128k | 128 | 16 | 102.1 | 182.7 | +| Cohere_100b | 128k | 256 | 16 | 101.3 | 181.3 | diff --git a/data/xtuner/xtuner/configs/cohere/cohere_104b/cohere_100b_128k_sp32.py b/data/xtuner/xtuner/configs/cohere/cohere_104b/cohere_100b_128k_sp32.py new file mode 100644 index 0000000000000000000000000000000000000000..0882be1aedbc645e8a05bb533d4795ca52f72a7a --- /dev/null +++ b/data/xtuner/xtuner/configs/cohere/cohere_104b/cohere_100b_128k_sp32.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'CohereForAI/c4ai-command-r-plus' +use_varlen_attn = False +sequence_parallel_size = 32 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.cohere_chat +max_length = 131072 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 32 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.05 + +# Save +save_steps = 500 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 10 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=0, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_iters=16) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + max_new_tokens=100, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_13b_base_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_13b_base_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..d246946ece0baaefcaaa61887d7c039da99abd08 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_13b_base_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Base' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_7b_base_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_7b_base_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..87cbbbb6224540c1944433ab74fde551b613c66f --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/baichuan/baichuan2_7b_base_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Base' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm2_6b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm2_6b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..086985fefa82ecd8494ed39bcb580a229b936255 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm2_6b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm3_6b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm3_6b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..174eb700b0914c8363d00db2fb77a2e53a2400d2 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/chatglm/chatglm3_6b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/deepseek/deepseek_moe_16b_base_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/deepseek/deepseek_moe_16b_base_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..4fbe2419d028a33c9628f407bfd8208b9697822f --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/deepseek/deepseek_moe_16b_base_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-base' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_2b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_2b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..f2e38b481ed85ac5ccb93056475046287f0f387e --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_2b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f9c3bd9f2847a1c70cfc6c3ca3e849e2879320 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/gemma/gemma_7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_1_8b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_1_8b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..ea900f0e9b806121ec89068201028cfa864adb92 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_1_8b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-1_8b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_20b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_20b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..35592294a04f83379940b50deb725492e9a575c9 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_20b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..ff212d7e32ecd885ec8578cd51e2252a17e7e412 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/internlm/internlm2_7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_70b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_70b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..66ee04e64c5cda7355152bed3435baa9b74d86a3 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_70b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b752fc8c50996727960a82e12734294d2dafa316 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/llama/llama2_7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm3_4b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm3_4b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..936b48f4acc7d768f3a22059b8613009d8820eeb --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm3_4b_full_custom_pretrain_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import ( + CheckpointHook, + DistSamplerSeedHook, + IterTimerHook, + LoggerHook, + ParamSchedulerHook, +) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import ( + DatasetInfoHook, + EvaluateChatHook, + VarlenAttnArgsToMessageHubHook, +) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = "openbmb/MiniCPM3-4B" +use_varlen_attn = False + +# Data +data_files = ["/path/to/your.json"] +max_length = 1024 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_steps = 10000 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = "" +evaluation_inputs = ["上海是", "Shanghai is"] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side="right", + eos_token="<|im_end|>", +) + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + ), +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path="json", data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn), +) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict(type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale="dynamic", + dtype="float16", +) + +# learning policy +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=max_steps * warmup_ratio, + convert_to_iter_based=True, + ), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=max_steps * warmup_ratio, + end=max_steps, + convert_to_iter_based=True, + ), +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_iters=max_steps) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + ), +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit, + ), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method="fork", opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend="nccl"), +) + +# set visualizer +visualizer = None + +# set log level +log_level = "INFO" + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_1b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_1b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..fc0da5ed3560495cc2bd8c9ccc923984938a175b --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_1b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='
') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_2b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_2b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..160495a860ba94b79702b3ff447435117fcf46b2 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/minicpm_2b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='
') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/mistral/mistral_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/mistral/mistral_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..197841816e744b7c6122a9d239dd2de036d10d01 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/mistral/mistral_7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/mixtral/mixtral_8x7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/mixtral/mixtral_8x7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b2f5a6888d8580b478eecec2ff165f350079520e --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/mixtral/mixtral_8x7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-v0.1' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_0_5b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_0_5b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..0e0e6cabd00e75e7d0b51c81b6d4d51ff6de29dd --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_0_5b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_14b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_14b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3d6b4cbbae2a8e70ee8cc720ea208b93c7c0a3c8 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_14b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_1_8b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_1_8b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1e4724e2e37787aee804a1079218573d25ad9be3 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_1_8b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_4b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_4b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1ad11ff3b3f5bcfc082e3ee2491c7cc51df25283 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_4b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_72b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_72b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2f7cf2117e7a75318b9e27a6f156969b528de04a --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_72b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..911c22344f96a9b8e787a04f5d4edb955cd9ae4b --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen1_5_7b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_1_8b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_1_8b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..a1cbd63ddc6f892a0c423742408bbd3c2a93e153 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_1_8b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_72b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_72b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..07812fb59158a4b768041b67d1ca26938896fae5 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_72b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_7b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_7b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..16da30039d0565fca863210f6ef6f551392eec8c --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/qwen/qwen_7b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/starcoder/starcoder_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/starcoder/starcoder_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..40f10f73c97609a72a980d13515553b07cdf9d52 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/starcoder/starcoder_full_custom_pretrain_e1.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'bigcode/starcoder' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + 'from typing import List def has_close_elements(numbers: List[float], threshold: float) -> bool: """ Check if in given list of numbers, are any two numbers closer to each other than given threshold. >>> has_close_elements([1.0, 2.0, 3.0], 0.5) False >>> has_close_elements([1.0, 2.8, 3.0, 4.0, 5.0, 2.0], 0.3) True """' # noqa: E501 +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_34b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_34b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..38d86efe7848f5435d4090349bd8aded49d0707c --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_34b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_6b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_6b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..d1524d23c11b542914b80a7dbf7f0b927432bbf6 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/yi/yi_6b_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-6B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/pretrain/zephyr/zephyr_7b_beta_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/pretrain/zephyr/zephyr_7b_beta_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..0065eff953a1fcd92f93fd5fc81e300a6525f13d --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/pretrain/zephyr/zephyr_7b_beta_full_custom_pretrain_e1.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'HuggingFaceH4/zephyr-7b-beta' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_13b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_13b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..558887c04ad01ee96387c9545669661cc11822e5 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_13b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-13B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_7b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_7b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..8df388a67f58c6e062162681645ca27af1c1fe87 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan2_7b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan2-7B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.baichuan2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_13b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_13b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3dc38eb4f73456cade5652c248568e7f3cffef40 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_13b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-13B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.baichuan_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_7b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_7b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..dc15b62891cc34b19c2c56f1c3081ca97af99302 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/baichuan/baichuan_7b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'baichuan-inc/Baichuan-7B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm2_6b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm2_6b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..09b3549291fa82c72bbfe92b7655a7d7d0f07081 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm2_6b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm2-6b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.chatglm2 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm3_6b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm3_6b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7e3abba71902a0d4acc34ee36645dfea0bde91af --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/chatglm/chatglm3_6b_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'THUDM/chatglm3-6b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.chatglm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True, + padding_side='left') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseek_moe_16b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseek_moe_16b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..f7621bc6c7bfb774868e307c62344fd07dbeba93 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseek_moe_16b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.deepseek_moe +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=16, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseekcoder_6_7b_instruct_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseekcoder_6_7b_instruct_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..629012f5bba417cae04bcc32c690bc2eb21d44f3 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/deepseek/deepseekcoder_6_7b_instruct_qlora_custom_sft_e1.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-coder-6.7b-instruct' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.deepseek_coder +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = '' +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_it_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_it_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..122ddf0230da193db200d7d00799cd99898e4a37 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_it_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..9a3d36b309ae0eca7ccf2f312a48a20e096acd84 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_2b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_it_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_it_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..c677c9d095711950e39a27f7bdeac466573f206f --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_it_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..443a1e66345fab7ef262cee750d9a747be9a643b --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/gemma/gemma_7b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_1_8b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_1_8b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2aaa6f24dd8ca406725a84aa70b744c337a195e1 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_1_8b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_20b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_20b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..dfb4238395e7f2b868a4681e83dbf9f0420c314a --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_20b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_7b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_7b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3131039926c734e8167d0300ee6ef2876327ddbd --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/internlm/internlm2_chat_7b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_70b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_70b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2b0f889b4b947f1deeb9bbe6b1843b9bb3995a0c --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_70b_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 3e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=['gate_proj', 'down_proj', 'up_proj'], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_7b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_7b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..9aa9b63626c5dc2c226ad105d7ff7c891296b423 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/llama/llama2_7b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm3_4b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm3_4b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..499d475fec4afaa59bff3d1e0c13d6edfe5b5b83 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm3_4b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM3-4B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/your.json'] +prompt_template = PROMPT_TEMPLATE.minicpm3 +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_steps = 10000 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_steps, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_steps, + end=max_steps, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_iters=max_steps) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_1b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_1b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..fc0da5ed3560495cc2bd8c9ccc923984938a175b --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_1b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='
') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_2b_full_custom_pretrain_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_2b_full_custom_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..160495a860ba94b79702b3ff447435117fcf46b2 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/minicpm/minicpm_2b_full_custom_pretrain_e1.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[ + { + "text": "xxx" + }, + { + "text": "xxx" + }, + ... +] +""" # noqa: E501 + +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = ['上海是', 'Shanghai is'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='
') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/mistral/mistral_7b_full_finetune_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/mistral/mistral_7b_full_finetune_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..0af78f79f78b38fa91a7dca6951da58f3e3d25a9 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/mistral/mistral_7b_full_finetune_custom_sft_e1.py @@ -0,0 +1,234 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.mistral +max_length = 32768 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2', + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict(type=BatchSampler, drop_last=True, batch_size=1), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/mixtral/mixtral_8x7b_instruct_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/mixtral/mixtral_8x7b_instruct_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..91cda57ecce031fcf814c49062dbbf0501826fb9 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/mixtral/mixtral_8x7b_instruct_qlora_custom_sft_e1.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-Instruct-v0.1' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.mixtral +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=[ + 'q_proj', 'k_proj', 'v_proj', 'o_proj', 'w1', 'w2', 'w3' + ], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_0_5b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_0_5b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3066f0be9f30fbf4b3a211dda9461d9e112afd98 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_0_5b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_14b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_14b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..642592f0ca04acb7e20b71db3993abed48c56e9c --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_14b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_1_8b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_1_8b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3790006d76629d51923504bfec0272c7dda6eafb --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_1_8b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_4b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_4b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..36d3e6cd0762a440ecdff43852728a88b30db4a0 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_4b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_72b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_72b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..d152c207d45801817a36131240273a2a0b96e63c --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_72b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_7b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_7b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1098c5ca84a604ee90f5a3d8c5ff58263b70dad6 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen1_5_7b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_1_8b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_1_8b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2d517e897f8ec0055e479e754ce99408e30b3dc0 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_1_8b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_72b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_72b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..e1156a1aa9b6bb7071d16285feda8e05c4a48137 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_72b_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_7b_chat_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_7b_chat_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b6fcaacba7e095b2f5ef9d348fcfc276eb111e23 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/qwen/qwen_7b_chat_qlora_custom_sft_e1.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/starcoder/starcoder_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/starcoder/starcoder_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..d79484dcf29de1e4fe70ef5b607bcb778d1b2049 --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/starcoder/starcoder_qlora_custom_sft_e1.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'bigcode/starcoder' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +# randomly select 20000 samples from the original dataset +max_dataset_length = 20000 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 1e-4 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = '' +evaluation_inputs = [ + 'from typing import List def has_close_elements(numbers: List[float], threshold: float) -> bool: """ Check if in given list of numbers, are any two numbers closer to each other than given threshold. >>> has_close_elements([1.0, 2.0, 3.0], 0.5) False >>> has_close_elements([1.0, 2.8, 3.0, 4.0, 5.0, 2.0], 0.3) True """' # noqa: E501 +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=32, + lora_dropout=0.05, + bias='none', + target_modules=['c_proj', 'c_attn', 'q_attn'], + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_dataset_length=max_dataset_length, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_34b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_34b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..4906ab5f7dd688eeae2adfb3943008ec253bdf2d --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_34b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_6b_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_6b_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..96a684a22a7126c361961ecea1bf4e74752f11ac --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/yi/yi_6b_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-6B' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/custom_dataset/sft/zephyr/zephyr_7b_beta_qlora_custom_sft_e1.py b/data/xtuner/xtuner/configs/custom_dataset/sft/zephyr/zephyr_7b_beta_qlora_custom_sft_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b2349c2dadb93a5a1f3d0540bda761808a226eec --- /dev/null +++ b/data/xtuner/xtuner/configs/custom_dataset/sft/zephyr/zephyr_7b_beta_qlora_custom_sft_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: + +[{ + "messages": [ + { "role": "system", "content": "xxx." }, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": false}, + { "role": "user", "content": "xxx." }, + { "role": "assistant", "content": "xxx.", "loss": true} + ] +}, +... +] +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openai_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'HuggingFaceH4/zephyr-7b-beta' +use_varlen_attn = False + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.zephyr +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # bs = 1 GPU * 1 batch_size_per_device * 16 acc +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/README.md b/data/xtuner/xtuner/configs/deepseek/README.md new file mode 100644 index 0000000000000000000000000000000000000000..dd16619c0f806817c3babc21d7102f02e5b5a465 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/README.md @@ -0,0 +1,59 @@ +# DeepSeek V2 + +## Install + +```bash +# Git clone the latest xtuner +git clone https://github.com/InternLM/xtuner.git + +# Install the latest xtuner +cd xtuner +pip install -e '.[all]' + +# Mixtral requires flash-attn +pip install flash-attn + +# install the latest transformers +pip install -U transformers +``` + +## Full Parameter Fine-tune + +Full parameter fine-tune DeepSeek V2 236B needs at least 64 A100-80G. The full-tuned model will be saved to `${WORK_DIRS}/hf_model` by `HFCheckpointHook`. + +### slurm + +Note: `$PARTITION` means the virtual partition of slurm. + +```bash +srun -p $PARTITION --job-name=mixtral --nodes=8 --gres=gpu:8 --ntasks-per-node=8 xtuner train deepseek_v2_chat_full_alpaca_e3 --deepspeed deepspeed_zero3 --launcher slurm +``` + +### torchrun + +Note: `$NODE_0_ADDR` means the ip address of the node_0 machine. + +```bash +# excuete on node 0 +NPROC_PER_NODE=8 NNODES=8 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=0 xtuner train deepseek_v2_chat_full_alpaca_e3 --deepspeed deepspeed_zero3 --launcher pytorch + +# excuete on node 1 +NPROC_PER_NODE=8 NNODES=8 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=1 xtuner train deepseek_v2_chat_full_alpaca_e3 --deepspeed deepspeed_zero3 --launcher pytorch + +# excuete on node 2, 3, ..., 7 +``` + +### Speed + +128 * A100 80G: + +| Model | Sequence Length | Use Varlen Attn | Sequence Parallel World Size | Tokens per Second | +| :--------------------: | :-------------: | :-------------: | :--------------------------: | :---------------: | +| deepseek v2 hf | 8k | False | 1 | 60 | +| **deepseek v2 XTuner** | **8k** | **False** | **1** | **120 (2x)** | +| deepseek v2 hf | 8k | True | 1 | 60 | +| **deepseek v2 XTuner** | **8k** | **True** | **1** | **130 (2.2x)** | +| deepseek v2 hf | 16k | False | 1 | OOM | +| **deepseek v2 XTuner** | **16k** | **False** | **1** | **148** | +| deepseek v2 hf | 16k | True | 1 | 95 | +| **deepseek v2 XTuner** | **16k** | **True** | **1** | **180 (1.9x)** | diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_base/deepseek_coder_6_7b_base_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_base/deepseek_coder_6_7b_base_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..38975d8df61af3a573ee6ae56244bfe351325087 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_base/deepseek_coder_6_7b_base_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-coder-6.7b-base' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.deepseek_coder +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_instruct/deepseekcoder_6_7b_instruct_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_instruct/deepseekcoder_6_7b_instruct_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..74601407f37935b21506be376282875185f4ea8a --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_coder_6_7b_instruct/deepseekcoder_6_7b_instruct_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-coder-6.7b-instruct' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.deepseek_coder +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_full_oasst1_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_full_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ba915888e89cbdcaa8151d4e8f51297d213aeb91 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_full_oasst1_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.deepseek_moe +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..69dec51412bc42b3031869ba18d7cab3853bf246 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_base/deepseek_moe_16b_base_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-base' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.deepseek_moe +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=16, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_full_oasst1_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_full_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e224e3f29b671781bcf7be4c18c8c93a40851e55 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_full_oasst1_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.deepseek_moe +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ba6c3a80512a00d251e06034d4ca5877560d0483 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_moe_16b_chat/deepseek_moe_16b_chat_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/deepseek-moe-16b-chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.deepseek_moe +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=16, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_v2_chat/deepseek_v2_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_chat/deepseek_v2_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..016e7aed03105229f1bb0a35875db548311fa3bd --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_chat/deepseek_v2_chat_full_alpaca_e3.py @@ -0,0 +1,198 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, HFCheckpointHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.model.transformers_models.deepseek_v2 import DeepseekV2ForCausalLM +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/DeepSeek-V2-Chat' +use_varlen_attn = False + +# Data +data_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.deepseek_v2 +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs per device 1 * acc 1 * 128 gpus = 128 total bs +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 50 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) +# Save the optimizer states of deepseek v2 236B will require a lot of +# storage space. It is recommended to set `save_optimizer` to False +# (The training phase can not be resumed.) +save_optimizer = True + +# Evaluate the generation performance during the training +evaluation_freq = 25 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + # Only full-finetune is supported in `DeepseekV2ForCausalLM``, XTuner. + # Please use `AutoModelForCausalLM` for lora or qlora finetune. + type=DeepseekV2ForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + moe_implementation='shard', + expert_in_one_shard=10, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=0, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict(type=HFCheckpointHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=1) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0d59ed45d8ef1344eb9dd1a964c35e0d71cb1ac1 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3.py @@ -0,0 +1,195 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, HFCheckpointHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.model.transformers_models.deepseek_v2 import DeepseekV2ForCausalLM +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/DeepSeek-V2-Lite-Chat' +use_varlen_attn = False + +# Data +data_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.deepseek_v2 +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs per device 1 * acc 1 * 128 gpus = 128 total bs +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 50 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) +save_optimizer = True + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + # Only full-finetune is supported in `DeepseekV2ForCausalLM``, XTuner. + # Please use `AutoModelForCausalLM` for lora or qlora finetune. + type=DeepseekV2ForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + moe_implementation='shard', + expert_in_one_shard=8, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=0, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict(type=HFCheckpointHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=1) diff --git a/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3_32k_varlen.py b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3_32k_varlen.py new file mode 100644 index 0000000000000000000000000000000000000000..03b042daf4a629ce94d767b9a9bfdfb081a330d5 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepseek/deepseek_v2_lite_chat/deepseek_v2_lite_chat_full_alpaca_e3_32k_varlen.py @@ -0,0 +1,195 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, HFCheckpointHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.model.transformers_models.deepseek_v2 import DeepseekV2ForCausalLM +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'deepseek-ai/DeepSeek-V2-Lite-Chat' +use_varlen_attn = True + +# Data +data_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.deepseek_v2 +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # bs per device 1 * acc 1 * 128 gpus = 128 total bs +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 50 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) +save_optimizer = True + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + # Only full-finetune is supported in `DeepseekV2ForCausalLM``, XTuner. + # Please use `AutoModelForCausalLM` for lora or qlora finetune. + type=DeepseekV2ForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + moe_implementation='shard', + expert_in_one_shard=8, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=0, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict(type=HFCheckpointHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=1) diff --git a/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero1.json b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero1.json new file mode 100644 index 0000000000000000000000000000000000000000..a5df9d1365a90ac6929cdeed196423a1d31f5d5f --- /dev/null +++ b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero1.json @@ -0,0 +1,18 @@ +{ + "gradient_accumulation_steps": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_clipping": "auto", + "zero_allow_untested_optimizer": true, + "zero_force_ds_cpu_optimizer": false, + "zero_optimization": { + "stage": 1, + "overlap_comm": true + }, + "fp16": { + "enabled": "auto", + "initial_scale_power": 16 + }, + "bf16": { + "enabled": "auto" + } +} diff --git a/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2.json b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2.json new file mode 100644 index 0000000000000000000000000000000000000000..cf1fa0addb92c492987f188288bcc4ae04ca79c0 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2.json @@ -0,0 +1,18 @@ +{ + "gradient_accumulation_steps": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_clipping": "auto", + "zero_allow_untested_optimizer": true, + "zero_force_ds_cpu_optimizer": false, + "zero_optimization": { + "stage": 2, + "overlap_comm": true + }, + "fp16": { + "enabled": "auto", + "initial_scale_power": 16 + }, + "bf16": { + "enabled": "auto" + } +} diff --git a/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2_offload.json b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2_offload.json new file mode 100644 index 0000000000000000000000000000000000000000..7f3c0671c7bd64b159fb0a129e2262e2586b188b --- /dev/null +++ b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero2_offload.json @@ -0,0 +1,22 @@ +{ + "gradient_accumulation_steps": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_clipping": "auto", + "zero_allow_untested_optimizer": true, + "zero_force_ds_cpu_optimizer": false, + "zero_optimization": { + "stage": 2, + "overlap_comm": true, + "offload_optimizer": { + "device": "cpu", + "pin_memory": true + } + }, + "fp16": { + "enabled": "auto", + "initial_scale_power": 16 + }, + "bf16": { + "enabled": "auto" + } +} diff --git a/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3.json b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3.json new file mode 100644 index 0000000000000000000000000000000000000000..1a2c666df2f10d4c58c13ea8a57ebb51a1ae5575 --- /dev/null +++ b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3.json @@ -0,0 +1,19 @@ +{ + "gradient_accumulation_steps": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_clipping": "auto", + "zero_allow_untested_optimizer": true, + "zero_force_ds_cpu_optimizer": false, + "zero_optimization": { + "stage": 3, + "overlap_comm": true, + "stage3_gather_16bit_weights_on_model_save": true + }, + "fp16": { + "enabled": "auto", + "initial_scale_power": 16 + }, + "bf16": { + "enabled": "auto" + } +} diff --git a/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3_offload.json b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3_offload.json new file mode 100644 index 0000000000000000000000000000000000000000..3f3b9506bbc2e9ba4c98bec683eb592a71e1accc --- /dev/null +++ b/data/xtuner/xtuner/configs/deepspeed/deepspeed_zero3_offload.json @@ -0,0 +1,27 @@ +{ + "gradient_accumulation_steps": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_clipping": "auto", + "zero_allow_untested_optimizer": true, + "zero_force_ds_cpu_optimizer": false, + "zero_optimization": { + "stage": 3, + "overlap_comm": true, + "offload_optimizer": { + "device": "cpu", + "pin_memory": true + }, + "offload_param": { + "device": "cpu", + "pin_memory": true + }, + "stage3_gather_16bit_weights_on_model_save": true + }, + "fp16": { + "enabled": "auto", + "initial_scale_power": 16 + }, + "bf16": { + "enabled": "auto" + } +} diff --git a/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full.py b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full.py new file mode 100644 index 0000000000000000000000000000000000000000..908683fe6220767f469aa0fed3b7d9f11d6052cb --- /dev/null +++ b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = False +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn.py b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn.py new file mode 100644 index 0000000000000000000000000000000000000000..787ad68bb5301b594a0a8d6d8df07625e8addfe5 --- /dev/null +++ b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn_jsonl_dataset.py b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn_jsonl_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..ae1a3cdca9990864867f28e52b039c13aa0ea9b2 --- /dev/null +++ b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_1_8b_dpo_full_varlenattn_jsonl_dataset.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + load_jsonl_dataset) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_jsonl_dataset, + data_files=[ + '/your/jsonl/path/here.jsonl', + '/your/another/jsonl/path/here.jsonl' + ]), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_7b_dpo_qlora_varlenattn.py b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_7b_dpo_qlora_varlenattn.py new file mode 100644 index 0000000000000000000000000000000000000000..659d029b3af0314531def69df63c683ac0752ba1 --- /dev/null +++ b/data/xtuner/xtuner/configs/dpo/internlm/internlm2_chat_7b_dpo_qlora_varlenattn.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b-sft' +use_varlen_attn = True +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/dpo/llama/llama3_8b_instruct_dpo_qlora_varlenattn.py b/data/xtuner/xtuner/configs/dpo/llama/llama3_8b_instruct_dpo_qlora_varlenattn.py new file mode 100644 index 0000000000000000000000000000000000000000..e94b88fd05327df31e12280784736ddcac1e873b --- /dev/null +++ b/data/xtuner/xtuner/configs/dpo/llama/llama3_8b_instruct_dpo_qlora_varlenattn.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +use_varlen_attn = True +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + loss_type=dpo_loss_type, + use_varlen_attn=use_varlen_attn, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..560b6fd2797f2886a9505adc485d7785362b3af8 --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_full_alpaca_e3.py @@ -0,0 +1,192 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..068064557fae817584c923699184f712067d0466 --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_2b/gemma_2b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_full_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d6e2476472460752cf51c73e1077af6c115f6432 --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_full_alpaca_e3.py @@ -0,0 +1,192 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3936cde9347ecbae987d889761140a8cb78f4fd7 --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_2b_it/gemma_2b_it_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-2b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e51afc7e7874b1ac119aa1c955e8e6de863186cd --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_full_alpaca_e3.py @@ -0,0 +1,192 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..deb92ad7554f9681892a3613497f785e4f692fed --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_7b/gemma_7b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_full_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4dc34e0dd6d5e79817221207a20ceb30fdd3577d --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_full_alpaca_e3.py @@ -0,0 +1,192 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..28f1e3a993b902800ec0d2e73bf0e64a6b2b4483 --- /dev/null +++ b/data/xtuner/xtuner/configs/gemma/gemma_7b_it/gemma_7b_it_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'google/gemma-7b-it' # Gemma requires transformers>=4.38.1 # noqa: E501 +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.gemma +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..914091a31395b772088df68fd4396ec11aa86b5c --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-1_8b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f681ea1f65822ef6cdde0fc0db22c35bb6207bc6 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_1_8b/internlm2_1_8b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-1_8b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..56381dfa543d12553370fa63e4187c0f8f3debf5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..61b2001a1a8ed4458de3c74346525f6c6c3920f5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..db87798317657e40809efb11b0ba288c5382b3ec --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,254 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..cbd7df93a6052fdd7554bef3cfd25a6430e7b691 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..d3d830ff5d200dae20a09ec0697a026836dc5e1a --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_colorist_e5.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a704b826534d4404e58340149b7dc1f8ff4522ae --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_msagent_react_e3_gpu8.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_msagent_react_e3_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..9ad8743fede7644c95eedf132dad5fbe568e62d5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_msagent_react_e3_gpu8.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from modelscope.msdatasets import MsDataset +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_ms_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (msagent_react_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'damo/MSAgent-Bench' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 2 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = ( + '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + "{{\'GoogleSearch\': \'一个可以从谷歌搜索结果的API。\\n" + '当你需要对于一个特定问题找到简短明了的回答时,可以使用它。\\n' + "输入应该是一个搜索查询。\\n\\n\'," + "\'PythonInterpreter\': \"用来执行Python代码。代码必须是一个函数,\\n" + "函数名必须得是 \'solution\',代码对应你的思考过程。代码实例格式如下:\\n" + '```python\\n# import 依赖包\\nimport xxx\\ndef solution():' + '\\n # 初始化一些变量\\n variable_names_with_real_meaning = xxx' + '\\n # 步骤一\\n mid_variable = func(variable_names_with_real_meaning)' + '\\n # 步骤 x\\n mid_variable = func(mid_variable)\\n # 最后结果' + '\\n final_answer = func(mid_variable)\\n return final_answer' + "\\n```\\n\"}}\n" + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + "Action:工具名称,你的工具必须从 [[\'GoogleSearch\', \'PythonInterpreter\']] 选择" + '\nAction Input:工具输入参数\n```\n工具返回按照以下格式回复:\n' + '```\nResponse:调用工具后的结果\n```' + '\n如果你已经知道了答案,或者你不需要工具,请遵循以下格式回复\n```' + '\nThought:给出最终答案的思考过程\nFinal Answer:最终答案\n```\n开始!\n') +evaluation_inputs = ['上海明天天气怎么样?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_ms_dataset, + dataset=dict(type=MsDataset.load, dataset_name=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=msagent_react_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c6fb710dae1ad07bdf4fcc5cf3b337c77a369cbc --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2b1deb35db73c06917a82419b99ba3e864c5eaf4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c3b8629072a7f34cd90039f014c71ed69edb4d02 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_20b/internlm2_20b_qlora_sql_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-20b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f67fc1a221827c43b48ec274acf1ac3bfd01268c --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_alpaca_e3.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2_5-20b-chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f695e79222e929eb77c5705846de5c1e634582c5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_20b/internlm2_5_chat_20b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2_5-20b-chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..bc8a2816ac053f4729e8eed4b6a880a3315d5f51 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2_5-7b-chat' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7dfc9261797c2a9ce08a6197021c173bc111dbce --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2_5-7b-chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..98b097efb5db3735c68e58c727b727e3784f82c1 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_5_chat_7b/internlm2_5_chat_7b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2_5-7b-chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..ea4787f8de482c4da6bf8b0aa4435d455449198c --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1_sequence_parallel_4.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1_sequence_parallel_4.py new file mode 100644 index 0000000000000000000000000000000000000000..f7127c28171f54ee3f711a833da901db39c47958 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_full_finetune_custom_dataset_e1_sequence_parallel_4.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 4 + +# Scheduler & Optimizer +batch_size = 1 # per_device +# accumulative_counts = accumulative_counts * sequence_parallel_size +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict(type=BatchSampler, drop_last=True, batch_size=1), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..10d31809b45662fb2da918672ba719f0e18d1f78 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e8bbaf7df3ef8d2b5bb94a0d035d58cb340222cd --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,254 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..348233d69adbf693b35e2032d2c3fbb75a58e99b --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d65ce97d2fee4e768484d9ee7e9bcd55337b3d --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_colorist_e5.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_json_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_json_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..99b315b74d7ae3a5da510b97bd01790229aa60c3 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_json_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'path/to/your/json_data' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e7dd0a2c24d870bb5ee0b9b211347e65d245167d --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_msagent_react_e3_gpu8.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_msagent_react_e3_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..4cb46b27159846a29617728c3e9df522fc623dcc --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_msagent_react_e3_gpu8.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from modelscope.msdatasets import MsDataset +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_ms_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (msagent_react_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'damo/MSAgent-Bench' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 2 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = ( + '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + "{{\'GoogleSearch\': \'一个可以从谷歌搜索结果的API。\\n" + '当你需要对于一个特定问题找到简短明了的回答时,可以使用它。\\n' + "输入应该是一个搜索查询。\\n\\n\'," + "\'PythonInterpreter\': \"用来执行Python代码。代码必须是一个函数,\\n" + "函数名必须得是 \'solution\',代码对应你的思考过程。代码实例格式如下:\\n" + '```python\\n# import 依赖包\\nimport xxx\\ndef solution():' + '\\n # 初始化一些变量\\n variable_names_with_real_meaning = xxx' + '\\n # 步骤一\\n mid_variable = func(variable_names_with_real_meaning)' + '\\n # 步骤 x\\n mid_variable = func(mid_variable)\\n # 最后结果' + '\\n final_answer = func(mid_variable)\\n return final_answer' + "\\n```\\n\"}}\n" + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + "Action:工具名称,你的工具必须从 [[\'GoogleSearch\', \'PythonInterpreter\']] 选择" + '\nAction Input:工具输入参数\n```\n工具返回按照以下格式回复:\n' + '```\nResponse:调用工具后的结果\n```' + '\n如果你已经知道了答案,或者你不需要工具,请遵循以下格式回复\n```' + '\nThought:给出最终答案的思考过程\nFinal Answer:最终答案\n```\n开始!\n') +evaluation_inputs = ['上海明天天气怎么样?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_ms_dataset, + dataset=dict(type=MsDataset.load, dataset_name=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=msagent_react_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a7d9a22cd4e01c67d5497d70849befb0f06ded9e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3c61cb9540cca4f11fa8da69a89c8783232bb6e4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..170f43c4c0ed522e0335b0293a3799533705f6ad --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_qlora_sql_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_internevo_dataset.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_internevo_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..de45284b3bb5de14cde2fc34cc54c6499c41e088 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_internevo_dataset.py @@ -0,0 +1,196 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_tokenized_dataset) +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +dataset_folder = '/path/to/sft/data/folder' # noqa: E501 +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + data_order_path=None, + folder=dataset_folder, + min_length=0, + file_type='.bin'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict(type=BatchSampler, drop_last=True, batch_size=1), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_tokenized_dataset.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_tokenized_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..f16a7a442ddecb584d8a3d2c701fb9176fa3ee06 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_tokenized_dataset.py @@ -0,0 +1,204 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_tokenized_dataset) +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +dataset_folder = '/path/to/sft/data/folder' # noqa: E501 +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + data_order_path=None, + folder=dataset_folder, + min_length=0, + file_type='.bin'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_untokenized_dataset.py b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_untokenized_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..4d24d5d83be29afb75920541aea1233353f29254 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_7b/internlm2_7b_w_untokenized_dataset.py @@ -0,0 +1,208 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_untokenized_dataset) +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-7b' +use_varlen_attn = True + +# Data +dataset_folder = '/mnt/petrelfs/share_data/caoweihan/v1_sample_with_legal_cate' # noqa: E501 +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_untokenized_dataset, + data_order_path=None, + folder=dataset_folder, + tokenizer=tokenizer, + max_length=max_length, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + file_type='.json'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0a47baa8a616ba45fe6750ab3979011e6fa81237 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f193c61c98e8cc973515032434e86a1c2ddafeb6 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_1_8b/internlm2_chat_1_8b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..c88ee5ebd8d0cb2ef908665c80e600f0b392407e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..6ef9398413dd7fbf0c662ef8b5373bd9321fa1f6 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..72deb887dc73f92938cd5cb1e6e11fe456bd96a0 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3a767cd002e994e1642e23fbdad200a0f9e9f90e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ef966f2b7ee9542d96e9aa4c961173d2f0cf474e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..56af60036f510db012a2bb7b6485ea081d945010 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_20b/internlm2_chat_20b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..936c5938f34f5948c1016917ca7317d054d9ea0f --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3d0746d3e65ede92cee9e5ef727f23b9bc09138f --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..95904d2f879337e8781a7716d8d40f23d90e0670 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7a2da9993ca772973774a5aca93e6ff6f8c59f32 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9a4c054867d516de9d804b074ec7156950785700 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..cf1c038ca16e996df5ff292e7d034de172fde575 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm2_chat_7b/internlm2_chat_7b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0a08c6be44f7c0ea5e7e5f126192f4a4137deeda --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..048e8e6af7c180199c626723fd6b70eeb9d18b63 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2515d51f3766735768b721e5bd5e3fed1799853e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3fd2bb282e783d628487584b3fc0f249fc3e622e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..afc6eafd64f056e03d5c3a8e25031f57dc6faf28 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..78cf154f7ddaf2005f3d619620a79ffd642c1363 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..989d26206631c7940db9ddfdefbdd134e8bb1080 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..28596484389fb3db6bd2114c2e7b480c22f2b162 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_msagent_react_e3_gpu8.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_msagent_react_e3_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..bc8cea11e08915d2a15c9d940d2d3fa4af7fc613 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_msagent_react_e3_gpu8.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from modelscope.msdatasets import MsDataset +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_ms_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (msagent_react_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'damo/MSAgent-Bench' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = ( + '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + "{{\'GoogleSearch\': \'一个可以从谷歌搜索结果的API。\\n" + '当你需要对于一个特定问题找到简短明了的回答时,可以使用它。\\n' + "输入应该是一个搜索查询。\\n\\n\'," + "\'PythonInterpreter\': \"用来执行Python代码。代码必须是一个函数,\\n" + "函数名必须得是 \'solution\',代码对应你的思考过程。代码实例格式如下:\\n" + '```python\\n# import 依赖包\\nimport xxx\\ndef solution():' + '\\n # 初始化一些变量\\n variable_names_with_real_meaning = xxx' + '\\n # 步骤一\\n mid_variable = func(variable_names_with_real_meaning)' + '\\n # 步骤 x\\n mid_variable = func(mid_variable)\\n # 最后结果' + '\\n final_answer = func(mid_variable)\\n return final_answer' + "\\n```\\n\"}}\n" + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + "Action:工具名称,你的工具必须从 [[\'GoogleSearch\', \'PythonInterpreter\']] 选择" + '\nAction Input:工具输入参数\n```\n工具返回按照以下格式回复:\n' + '```\nResponse:调用工具后的结果\n```' + '\n如果你已经知道了答案,或者你不需要工具,请遵循以下格式回复\n```' + '\nThought:给出最终答案的思考过程\nFinal Answer:最终答案\n```\n开始!\n') +evaluation_inputs = ['上海明天天气怎么样?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_ms_dataset, + dataset=dict(type=MsDataset.load, dataset_name=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=msagent_react_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ee141a41abb7f2c6810d865c315bb838615971b1 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c4f24fcbecf8a8556ab09b54ba050cf054703cce --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f9c2b82175f65154fb364df90e6536e69a3ac562 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1595b38ef370649412afa61d842cb64c5b8bd6bf --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_20b/internlm_20b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-20b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a3ec576e3d4e9d81c16f900e24e32f32c27e4589 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_e3.py @@ -0,0 +1,193 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..69fee7b3aefa980127ec247361b2ca3a7d2d55ec --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_e3.py @@ -0,0 +1,210 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..23f30895903a768503db0f1d801bd9861e03b62e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a1f854f182d7d08ea9ea9e137fa75a819607a992 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_alpaca_zh_e3.py @@ -0,0 +1,193 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_intern_repo_dataset_template.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_intern_repo_dataset_template.py new file mode 100644 index 0000000000000000000000000000000000000000..a389caa9567a9bc485fdd34c26c9d592a2019d98 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_intern_repo_dataset_template.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_tokenized_dataset) +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '/path/to/your/base/model' +use_varlen_attn = True + +# Data +dataset_folder = '/path/to/your/train/dataset' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 8192 +pack_to_max_length = True + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 4 # 1bs * 4acc * 32gpu = 128 batchsize +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + folder=dataset_folder, + min_length=0, + file_type='.bin'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) + +log_processor = dict( + window_size=1, mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..eb0961068b1fae3bf69d600669d47e0efe1827b4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_full_oasst1_e3.py @@ -0,0 +1,194 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e7d3e41843106f604cbd78c5ac852d35805b5ca9 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a7e4b90cea555b439de25203b0e8fec77ddcc2c5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3badde9db99a3c92c8ea973fdbe82fcca7c5ff35 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7f9fe982bf253e01e9fed6985c2662a31e7c3473 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..271af523eb35c21f5ee425471856244b5fd66943 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a83ea8c411189c8ece78e264fa2bd5e7c105f2dc --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..a3bea987fe529ea9df0512c6be609d4c82b3e568 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..84811971a9392098e8a7fd112413a9a804e35bf1 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_json_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'path/to/your/json_data' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc1c82dd3128aca83f623565a482626123f11cb5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..e12c5ea291f3ebf10c3fc45811f4b7a453459712 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b4292e1ab8bc0a7b37e26e8ddffb9dc719ee9dba --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +bot_name = 'InternLM' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..46d1db42ee75f7da24ab3058360c181c98256cc9 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +bot_name = 'InternLM' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..dc89be25756efe85e2492bc73a209c77cfebabdf --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,206 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +bot_name = 'InternLM' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_msagent_react_e3_gpu8.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_msagent_react_e3_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..c8402f140fc18df6cf9120254e457bccdb9a441f --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_msagent_react_e3_gpu8.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from modelscope.msdatasets import MsDataset +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_ms_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (msagent_react_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'damo/MSAgent-Bench' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = ( + '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + "{{\'GoogleSearch\': \'一个可以从谷歌搜索结果的API。\\n" + '当你需要对于一个特定问题找到简短明了的回答时,可以使用它。\\n' + "输入应该是一个搜索查询。\\n\\n\'," + "\'PythonInterpreter\': \"用来执行Python代码。代码必须是一个函数,\\n" + "函数名必须得是 \'solution\',代码对应你的思考过程。代码实例格式如下:\\n" + '```python\\n# import 依赖包\\nimport xxx\\ndef solution():' + '\\n # 初始化一些变量\\n variable_names_with_real_meaning = xxx' + '\\n # 步骤一\\n mid_variable = func(variable_names_with_real_meaning)' + '\\n # 步骤 x\\n mid_variable = func(mid_variable)\\n # 最后结果' + '\\n final_answer = func(mid_variable)\\n return final_answer' + "\\n```\\n\"}}\n" + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + "Action:工具名称,你的工具必须从 [[\'GoogleSearch\', \'PythonInterpreter\']] 选择" + '\nAction Input:工具输入参数\n```\n工具返回按照以下格式回复:\n' + '```\nResponse:调用工具后的结果\n```' + '\n如果你已经知道了答案,或者你不需要工具,请遵循以下格式回复\n```' + '\nThought:给出最终答案的思考过程\nFinal Answer:最终答案\n```\n开始!\n') +evaluation_inputs = ['上海明天天气怎么样?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_ms_dataset, + dataset=dict(type=MsDataset.load, dataset_name=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=msagent_react_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..faa76b97c0f05d793dedbb0ce59ac86575c2b789 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f9f7e3754af4bf9f3dd72b0380d3bb7c185d9348 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3_hf.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..3f6544867ae69c3d6e1e0b1102e6dc6325b211bf --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_e3_hf.py @@ -0,0 +1,78 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from peft import LoraConfig +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, Trainer, TrainingArguments) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + +framework = 'huggingface' +pretrained_model_name_or_path = 'internlm/internlm-7b' +dataset_name_or_path = 'timdettmers/openassistant-guanaco' +max_length = 2048 +pack_to_max_length = True +prompt_template = PROMPT_TEMPLATE.default + +trainer = Trainer + +training_args = dict( + type=TrainingArguments, + do_train=True, + learning_rate=2e-4, + weight_decay=0, + lr_scheduler_type='cosine', + warmup_steps=100, + per_device_train_batch_size=1, + gradient_accumulation_steps=16, + num_train_epochs=3, + fp16=True, + logging_steps=1, + optim='paged_adamw_32bit', + save_strategy='steps', + save_steps=1000, + save_total_limit=2, + ddp_find_unused_parameters=False) + +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')) + +lora = dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM') + +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=dataset_name_or_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_mmlu_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_mmlu_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9b04a8b64a7d1c3f1cdb6a080b3aeb0246a4be77 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_oasst1_mmlu_e3.py @@ -0,0 +1,269 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn, mmlu_collate_fn +from xtuner.dataset.map_fns import (default_map_fn, oasst1_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.evaluation import MMLUMetric +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Val/Test data +# Download from https://github.com/artidoro/qlora/tree/main/data/mmlu +mmlu_data_root = './data/mmlu/' +evaluate_steps = 500 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +mmlu_fs_dataset = dict( + type=load_dataset, + path='json', + data_files=dict( + val=mmlu_data_root + 'five_shot_mmlu_val.json', + test=mmlu_data_root + 'five_shot_mmlu_test.json')) + +val_mmlu_fs = dict( + type=process_hf_dataset, + dataset=mmlu_fs_dataset, + tokenizer=tokenizer, + dataset_map_fn=default_map_fn, + max_length=max_length, + input_ids_with_output=False, + pack_to_max_length=False, + split='val') + +val_dataloader = dict( + batch_size=1, + num_workers=0, + dataset=val_mmlu_fs, + sampler=dict(type=DefaultSampler, shuffle=False), + collate_fn=dict(type=mmlu_collate_fn)) + +val_evaluator = dict( + type=MMLUMetric, tokenizer=tokenizer, prefix='mmlu_fs_val') + +test_mmlu_fs = dict( + type=process_hf_dataset, + dataset=mmlu_fs_dataset, + tokenizer=tokenizer, + dataset_map_fn=default_map_fn, + max_length=max_length, + input_ids_with_output=False, + pack_to_max_length=False, + split='test') + +test_dataloader = dict( + batch_size=1, + num_workers=0, + dataset=test_mmlu_fs, + sampler=dict(type=DefaultSampler, shuffle=False), + collate_fn=dict(type=mmlu_collate_fn)) + +test_evaluator = dict( + type=MMLUMetric, tokenizer=tokenizer, prefix='mmlu_fs_test') + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict( + type=TrainLoop, max_epochs=max_epochs, val_interval=evaluate_steps) +val_cfg = dict(type='ValLoop') +test_cfg = dict(type='TestLoop') + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d82350c28006d27123f49928bb6919e5c3ef8a92 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7013a04ee1fed4252de3865f4f645007c3c10f64 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5862203f15b7f69568033de95f09c8af5de50ec3 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..9dbd7d1019ba058b1f286beb5fa7c3df9f7358d2 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_7b/internlm_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-7b' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d1f9f117bf1564c9944e86e7c9ee367095c9c7c6 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..341455a4816f0eafb23f42b4a203208a9220ae72 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..aae0b1d1aeb0fe42ae58e3117dba16eea7980ab4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4ea4a142d03a5742390bdbc8d31069b7e32eb889 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3540894a44069bc69bf9a50dc7ad0d8286427b3b --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9bad451d44127399d52a21e6f04ae26d2ee15790 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b34e479980d087ccfb73c406e1ea0a2af3516565 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e3e3d574c88b2e14771dd56b14d30acecc59a --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..512f27b772d3efc298a8bbc829d1528c729f1f0e --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_20b/internlm_chat_20b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-20b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..efdea7af55245bc64852793729115776cee0a06f --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9192aacb6ce87d240f2d7b0e70288c4f2b7232ff --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4c6921e9b56e54a5944a359895a306bffe543e64 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..140e95b6f3ff355639ef003e184d279a095775a3 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..14f05321263b387b9dcce3a35cf00e9f8693a6c9 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0ab192f26b38ed3a1b8384e4cd20283d34c86e92 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..bf0a83d87ac562f77754e357f7306e1ab7f53297 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f3504de6de06a357377bc3ad75839b8bcb62f664 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..7709ae3abc9713e7a2ff329bbc313edc906734a8 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..378591a1b2438abb7d1b61294b135514bbc4a595 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..af8170d371692fe7e102492fa5c51045174fa7a2 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c57ff374668d28168b6a60cf664069f2bf54c1d9 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2056ee5b2c07d87bfe5ac26022edc96212603b84 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..23b928b08817b11306256fe3317adf5235bbf946 --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..ac788632afc6fa2d2e533c711c1f258156c7718d --- /dev/null +++ b/data/xtuner/xtuner/configs/internlm/internlm_chat_7b/internlm_chat_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm-chat-7b' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/README.md b/data/xtuner/xtuner/configs/internvl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1f1acf191b819febeee000eba80c76d2f8bdc916 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/README.md @@ -0,0 +1,152 @@ +# InterVL Full Pipeline + +English | [简体中文](./README_zh-CN.md) + +## InterVL 2 + +> [InternVL-2: Better than the Best—Expanding Performance Boundaries of Open-Source Multimodal Models with the Progressive Scaling Strategy](https://internvl.github.io/blog/2024-07-02-InternVL-2.0/) + +We introduce InternVL-2, currently the most powerful open-source Multimodal Large Language Model (MLLM). The InternVL-2 family includes models ranging from a 2B model, suitable for edge devices, to a 108B model, which is significantly more powerful. With larger-scale language models, InternVL-2-Pro demonstrates outstanding multimodal understanding capabilities, matching the performance of commercial closed-source models across various benchmarks. + +InternVL-2 family is built upon the following designs: + +- Progressive with larger language models: We introduce a progressive alignment training strategy, resulting in the first vision foundation model aligned with large language models. By employing the progressive training strategy where the model scales from small to large while the data refines from coarse to fine, we have completed the training of large models at a relatively low cost. This approach has demonstrated excellent performance with limited resources. +- Multimodal input: With one set of parameters, our model supports multiple modalities of input, including text, images, video, audio, and 3D point clouds. +- Multitask output: Our model supports various output formats, such as images, bounding boxes, and masks, demonstrating extensive versatility. By connecting the MLLM with multiple downstream task decoders, InternVL-2 can be generalized to hundreds of vision-language tasks while achieving performance comparable to expert models. + +
+Image +
+ +### Basic Introduction + +- `./v2/` contains the configuration files for training InterVL 2 +- Supported fine-tuning of the InternVL 2B/4B/8B/26B model in full/LoRA/QLoRA single-image mode for now. We will support fine-tuning on multiple images and videos as soon as possible. +- After training, you can use the `./v1_5/convert_to_official.py` script to convert the model trained by XTuner to the official format, so as to reuse all the official supported toolchains +- All configurations are based on 8xA100 80G graphics cards, 2B/4B can use ZERO1 training, 8B models can use ZERO2, 26B models must run ZERO3, and there is no excessive adjustment of parameters, you can modify them according to your own needs +- It is verified with LLaVA SFT data, which cannot fully reflect the fine-tuning performance. You can customize the data according to your own needs. We will provide a relatively fair fine-tuning dataset later + +### Data preparation + +If you also want to use the LLaVA SFT dataset for training, please refer to the [document](../../../docs/en/user_guides/dataset_prepare.md#llava-dataset) to prepare the data. + +For custom data, support multiple json and jsonl formats, the data organization can refer to the LLaVA SFT format, and support data sampling operations. + +**(1) Support multiple json or jsonl data** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + template=prompt_template, + max_length=max_length) +``` + +**(2) Support custom sampling** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + repeat_times=[2,0.5,3.5], + template=prompt_template, + max_length=max_length) +``` + +### Training + +The provided configuration is mainly used for fine-tuning based on the official weights. After preparing the data, you can use the following command to train: + +```bash +NPROC_PER_NODE=8 xtuner train internvl_v2_internlm2_5_8b_lora_finetune --deepspeed deepspeed_zero2 +``` + +Default saved in `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/`. + +### Model Conversion + +After training, we will get a set of weights, that is `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/iter_xxx.pth`, in order to facilitate evaluation and dialogue, we can convert it to official weights. + +```bash +python xtuner/configs/internvl/v1_5/convert_to_official.py xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_lora_finetune.py ./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/iter_xxx.pth ./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/convert_model/ +``` + +Here, a complete set of official weights including configuration will be generated under `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/convert_model`, you can use the [official toolchain](https://huggingface.co/OpenGVLab/InternVL2-8B) for evaluation and dialogue. + +If you encounter any problems during use, please feel free to contact us!!! + +## InterVL 1.5 + +> [How Far Are We to GPT-4V? Closing the Gap to Commercial Multimodal Models with Open-Source Suites](https://arxiv.org/abs/2404.16821) + +In this report, we introduce InternVL 1.5, an open-source multimodal large language model (MLLM) to bridge the capability gap between open-source and proprietary commercial models in multimodal understanding. We introduce three simple improvements: (1) Strong Vision Encoder: we explored a continuous learning strategy for the large-scale vision foundation model -- InternViT-6B, boosting its visual understanding capabilities, and making it can be transferred and reused in different LLMs. (2) Dynamic High-Resolution: we divide images into tiles ranging from 1 to 40 of 448×448 pixels according to the aspect ratio and resolution of the input images, which supports up to 4K resolution input. (3) High-Quality Bilingual Dataset: we carefully collected a high-quality bilingual dataset that covers common scenes, document images, and annotated them with English and Chinese question-answer pairs, significantly enhancing performance in OCR- and Chinese-related tasks. We evaluate InternVL 1.5 through a series of benchmarks and comparative studies. Compared to both open-source and proprietary models, InternVL 1.5 shows competitive performance, achieving state-of-the-art results in 8 of 18 benchmarks. + +
+Image +
+ +### Basic Introduction + +- `./v1_5/` contains the configuration files for training InterVL 1.5 +- Support InternVL 2B/4B/26B model full/LoRA/Qing efficiency and performance, it is recommended to choose the 4B model first +- After training, you can use the `./v1_5/convert_to_official.py` script to convert the model trained by XTuner to the official format, so as to reuse all the official supported toolchains +- All configurations are based on 8xA100 80G graphics cards, 2B/4B can use ZERO1 training, 8B models must run ZERO2, and there is no excessive adjustment of parameters, you can modify them according to your own needs +- It is verified with LLaVA SFT data, which cannot fully reflect the fine-tuning performance. You can customize the data according to your own needs. We will provide a relatively fair fine-tuning dataset later + +### Data preparation + +If you also want to use the LLaVA SFT dataset for training, please refer to the [document](../../../docs/en/user_guides/dataset_prepare.md#llava-dataset) to prepare the data. + +For custom data, support multiple json and jsonl formats, the data organization can refer to the LLaVA SFT format, and support data sampling operations. + +**(1) Support multiple json or jsonl data** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + template=prompt_template, + max_length=max_length) +``` + +**(2) Support custom sampling** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + repeat_times=[2,0.5,3.5], + template=prompt_template, + max_length=max_length) +``` + +### Training + +The provided configuration is mainly used for fine-tuning based on the official weights. After preparing the data, you can use the following command to train: + +```bash +NPROC_PER_NODE=8 xtuner train internvl_v1_5_phi3_4b_lora_finetune --deepspeed deepspeed_zero1 +# NPROC_PER_NODE=8 xtuner train internvl_v1_5_internlm2_26b_lora_finetune.py --deepspeed deepspeed_zero3 +``` + +Default saved in `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/`. + +### Model Conversion + +After training, we will get a set of weights, that is `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/iter_xxx.pth`, in order to facilitate evaluation and dialogue, we can convert it to official weights. + +```bash +python xtuner/configs/internvl/v1_5/convert_to_official.py xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_lora_finetune.py ./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/iter_xxx.pth ./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/internvl_v1_5_phi3_4b/ +``` + +Here, a complete set of official weights including configuration will be generated under `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/internvl_v1_5_phi3_4b/`, you can use the [official toolchain](https://github.com/OpenGVLab/InternVL) for evaluation and dialogue. + +If you encounter any problems during use, please feel free to contact us!!! diff --git a/data/xtuner/xtuner/configs/internvl/README_zh-CN.md b/data/xtuner/xtuner/configs/internvl/README_zh-CN.md new file mode 100644 index 0000000000000000000000000000000000000000..cdaa59348a42cd65a014a0a93446b9f324853497 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/README_zh-CN.md @@ -0,0 +1,152 @@ +# InterVL 全流程 + +[English](./README.md) | 简体中文 + +## InterVL 2 + +> [InternVL-2: Better than the Best—Expanding Performance Boundaries of Open-Source Multimodal Models with the Progressive Scaling Strategy](https://internvl.github.io/blog/2024-07-02-InternVL-2.0/) + +我们引入了 InternVL-2,目前最强大的开源多模态大语言模型(MLLM)。InternVL-2 系列包括从适合于边缘设备的 2B 模型到强大的 108B 模型等多种规模的模型。借助更大规模的语言模型,InternVL-2-Pro 展现出了出色的多模态理解能力,在各种基准测试中的性能与商业闭源模型相匹配。 + +InternVL-2 系列基于以下设计: + +- 渐进式的大型语言模型:我们引入了一种渐进式对齐训练策略,实现了首个与大型语言模型对齐的视觉基础模型。通过采用从小到大模型扩展、从粗到细数据优化的渐进式训练策略,我们以较低的成本完成了大模型的训练。这种方法已经展示了出色的性能,资源有限的情况下也能取得良好的结果。 +- 多模态输入:使用一套参数,我们的模型支持文本、图像、视频、音频和 3D 点云等多种输入模态。 +- 多任务输出:我们的模型支持图像、边界框和掩码等各种输出格式,展现出广泛的多功能性。通过将 MLLM 与多个下游任务解码器相连接,InternVL-2 可以泛化到数百个视觉语言任务,并取得与专家模型相当的性能。 + +
+Image +
+ +### 基本说明 + +- `./v2/` 包含着 InterVL 2 训练配置的配置文件 +- 支持了 InternVL 2B/4B/8B/26B 模型全量/LoRA/QLoRA 单图模式的微调,会尽快支持多图和视频的微调。 +- 在训练完成后,可以使用 `./v1_5/convert_to_official.py` 脚本将 XTuner 训练的模型转换为官方格式,从而复用官方所支持的所有工具链 +- 目前所有配置都是以 8xA100 80G 显卡为基准,2B/4B 可以使用 ZERO1 训练,8B 模型要 ZERO2 运行,26B 模型必须要 ZERO3,并且没有对参数进行过多的调整,你可以按照你自己的需求进行修改 +- 目前是以 LLaVA SFT 数据进行验证,无法充分反应微调性能,你可以根据自己的需求进行数据自定义,后续我们会提供一个相对公平的微调数据集 + +### 数据准备 + +如果你也想使用 LLaVA SFT 数据集进行训练,请参考[文档](../../../docs/zh_cn/user_guides/dataset_prepare.md#llava-dataset) 准备数据。 + +对于自定义数据,支持多种 json 和 jsonl 格式,内部数据组织可以参考 LLaVA SFT 格式,且支持数据采样操作。 + +**(1) 支持多个 json 或者 jsonl 数据** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + template=prompt_template, + max_length=max_length) +``` + +**(2) 支持自定义采样** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + repeat_times=[2,0.5,3.5], + template=prompt_template, + max_length=max_length) +``` + +### 训练流程 + +所提供的配置主要用于基于官方权重继续微调。在准备好数据后,你可以使用以下命令进行训练: + +```bash +NPROC_PER_NODE=8 xtuner train internvl_v2_internlm2_5_8b_lora_finetune --deepspeed deepspeed_zero2 +``` + +默认保存在 `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/`。 + +### 模型转换 + +训练后,我们将获得一组权重即 `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/iter_xxx.pth`,为了方便评测和对话,可以将其转换为官方权重。 + +```bash +python xtuner/configs/internvl/v1_5/convert_to_official.py xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_lora_finetune.py ./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/iter_xxx.pth ./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/convert_model/ +``` + +此时,会在 `./work_dirs/internvl_v2_internlm2_5_8b_lora_finetune/convert_model` 下生成一组包括配置的完整官方权重,你可以使用[官方工具链](https://huggingface.co/OpenGVLab/InternVL2-8B)进行评测和对话。 + +如果你在使用中碰到任何问题,欢迎联系我们!!! + +## InterVL 1.5 + +> [How Far Are We to GPT-4V? Closing the Gap to Commercial Multimodal Models with Open-Source Suites](https://arxiv.org/abs/2404.16821) + +在本报告中,我们介绍了开源多模态大语言模型 InternVL 1.5,以弥补开源模型与商业专有模型在多模态理解能力上的差距。我们引入了三项简单的改进:(1) 强大的视觉编码器:我们探索了大规模视觉基础模型 InternViT-6B 的连续学习策略,提升了其视觉理解能力,并使其可以在不同的大语言模型中进行迁移和重复利用。(2) 动态高分辨率:我们根据输入图像的长宽比和分辨率,将图像划分为从1到40个448×448像素的瓦片,支持高达4K分辨率的输入。(3) 高质量双语数据集:我们精心收集了一个高质量的双语数据集,涵盖了常见场景、文档图像,并用英语和中文问答对进行了注释,显著提升了在OCR和中文相关任务中的性能。我们通过一系列基准测试和对比研究评估了 InternVL 1.5。与开源和专有模型相比,InternVL 1.5 表现出了竞争力,在18个基准中的8个中取得了最先进的结果。 + +
+Image +
+ +### 基本说明 + +- `./v1_5/` 包含着 InterVL 1.5 训练配置的配置文件 +- 支持 InternVL 2B/4B/26B 模型全量/LoRA/QLoRA 微调,综合考虑效率性能,建议你优先选择 4B 模型 +- 在训练完成后,可以使用 `./v1_5/convert_to_official.py` 脚本将 XTuner 训练的模型转换为官方格式,从而复用官方所支持的所有工具链 +- 目前所有配置都是以 8xA100 80G 显卡为基准,2B/4B 可以使用 ZERO1 训练,26B 模型必须要 ZERO3 运行,并且没有对参数进行过多的调整,你可以按照你自己的需求进行修改 +- 目前是以 LLaVA SFT 数据进行验证,无法充分反应微调性能,你可以根据自己的需求进行数据自定义,后续我们会提供一个相对公平的微调数据集 + +### 数据准备 + +如果你也想使用 LLaVA SFT 数据集进行训练,请参考[文档](../../../docs/zh_cn/user_guides/dataset_prepare.md#llava-dataset) 准备数据。 + +对于自定义数据,支持多种 json 和 jsonl 格式,内部数据组织可以参考 LLaVA SFT 格式,且支持数据采样操作。 + +**(1) 支持多个 json 或者 jsonl 数据** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + template=prompt_template, + max_length=max_length) +``` + +**(2) 支持自定义采样** + +```text +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=['a.json','b.jsonl','c.json'], + image_folders=['a',None,'c'], + repeat_times=[2,0.5,3.5], + template=prompt_template, + max_length=max_length) +``` + +### 训练流程 + +所提供的配置主要用于基于官方权重继续微调。在准备好数据后,你可以使用以下命令进行训练: + +```bash +NPROC_PER_NODE=8 xtuner train internvl_v1_5_phi3_4b_lora_finetune --deepspeed deepspeed_zero1 +# NPROC_PER_NODE=8 xtuner train internvl_v1_5_internlm2_26b_lora_finetune.py --deepspeed deepspeed_zero3 +``` + +默认保存在 `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/`。 + +### 模型转换 + +训练后,我们将获得一组权重即 `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/iter_xxx.pth`,为了方便评测和对话,可以将其转换为官方权重。 + +```bash +python xtuner/configs/internvl/v1_5/convert_to_official.py xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_lora_finetune.py ./work_dirs/iter_xxx.pth ./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/internvl_v1_5_phi3_4b/ +``` + +此时,会在 `./work_dirs/internvl_v1_5_phi3_4b_lora_finetune/internvl_v1_5_phi3_4b/` 下生成一组包括配置的完整官方权重,你可以使用[官方工具链](https://github.com/OpenGVLab/InternVL)进行评测和对话。 + +如果你在使用中碰到任何问题,欢迎联系我们!!! diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/convert_to_official.py b/data/xtuner/xtuner/configs/internvl/v1_5/convert_to_official.py new file mode 100644 index 0000000000000000000000000000000000000000..765855daad063c0c9d11ca482a5992d60a0adb1b --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/convert_to_official.py @@ -0,0 +1,56 @@ +import argparse +import os.path as osp + +import torch +from mmengine.config import Config +from transformers import AutoTokenizer + +from xtuner.model.utils import LoadWoInit +from xtuner.registry import BUILDER + + +def convert_to_official(config, trained_path, save_path): + cfg = Config.fromfile(config) + cfg.model.pretrained_pth = trained_path + cfg.model.quantization_vit = False + cfg.model.quantization_llm = False + + with LoadWoInit(): + model = BUILDER.build(cfg.model) + model.to(torch.bfloat16) + + if model.use_visual_encoder_lora: + vision_model = model.model.vision_model.merge_and_unload() + model.model.vision_model = vision_model + + if model.use_llm_lora: + language_model = model.model.language_model.merge_and_unload() + model.model.language_model = language_model + + model.model.save_pretrained(save_path) + + tokenizer = AutoTokenizer.from_pretrained( + cfg.model.model_path, trust_remote_code=True) + tokenizer.save_pretrained(save_path) + + print(model) + + +def main(): + parser = argparse.ArgumentParser( + description='Convert the pth model to HuggingFace model') + parser.add_argument('config', help='config file name or path.') + parser.add_argument('trained_model_pth', help='The trained model path.') + parser.add_argument( + 'save_path', help='The path to save the converted model.') + args = parser.parse_args() + + if osp.realpath(args.trained_model_pth) == osp.realpath(args.save_path): + raise ValueError( + 'The trained path and save path should not be the same.') + + convert_to_official(args.config, args.trained_model_pth, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..d5eec78294c29c4f17715a67d239f5c94b24ceed --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL-Chat-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 4096 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 2e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.01 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..0fb511d42a3cc2b467a28f9793aadc45cb4248cc --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL-Chat-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 4096 + +# Scheduler & Optimizer +batch_size = 2 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 2e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.01 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..8d994c81d065b6b7ee31b5d0ffb4d126bd7feb69 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_26b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL-Chat-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 4096 + +# Scheduler & Optimizer +batch_size = 2 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 2e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.01 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..09fb01e3f032dfbbb37a8e8f4637bf5c808f8f82 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-2B-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..193e2f269dd534e2cef612e6bb9f33f8b84ce094 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-2B-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..6bb28e490abfae77d81fe3ca7ad3c211ae37e4b4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_internlm2_2b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-2B-V1-5' +prompt_template = PROMPT_TEMPLATE.internlm2_chat + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..5d34a928bbb400b6905e48df329cf5e7e2ace9cd --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-4B-V1-5' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..19588cb95a7fff280de536a24e97fbd27f300900 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-4B-V1-5' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..cb150f0c48b1266d22398d60ab2ceed266108671 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v1_5/internvl_v1_5_phi3_4b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/Mini-InternVL-Chat-4B-V1-5' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..0916df44a182d1b634920f7c5be9b76cb7cf5d44 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-26B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..045fd70556ba4d656e270a93cdb19d6166e34d41 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-26B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 2 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..60717b312a54e019cac51d1186f6918710147979 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_26b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-26B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 2 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..a921cf0c0e5098e8da824b3e8ff0a67b68093b22 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-2B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..44b3c39445d730f5a499c63caf199a98b41f9f7f --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-2B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..5840a593f82a2636b80b2522c41842d888852dc9 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_2b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-2B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..2a92c017f64d3b038f4761dfbc222cf136a78f92 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-8B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..d9fa7ab3a2ba3508a8a1e63b7b7b2b5168a61a94 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-8B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..b3d04bb43c927d40dc4e314c18d2c536f0e28df4 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_internlm2_5_8b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-8B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..41a7125693867d68a67f63352d9909d808f1e14f --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_finetune.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-4B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=False, + freeze_visual_encoder=True # or False +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_lora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_lora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..64a20450fc77069adbd5ad3a57d936b45d45d9b5 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_lora_finetune.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-4B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_qlora_finetune.py b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_qlora_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..8302fa5cc2d07ce4768b31d0a3d5ff96b0850d54 --- /dev/null +++ b/data/xtuner/xtuner/configs/internvl/v2/internvl_v2_phi3_4b_qlora_finetune.py @@ -0,0 +1,185 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoTokenizer + +from xtuner.dataset import InternVL_V1_5_Dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import InternVL_V1_5 +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +path = 'OpenGVLab/InternVL2-4B' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 8192 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +# official 1024 -> 4e-5 +lr = 1e-6 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +model = dict( + type=InternVL_V1_5, + model_path=path, + freeze_llm=True, + freeze_visual_encoder=True, + quantization_llm=True, # or False + quantization_vit=False, # or True and uncomment visual_encoder_lora + # comment the following lines if you don't want to use Lora in llm + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + target_modules=None, + task_type='CAUSAL_LM'), + # uncomment the following lines if you don't want to use Lora in visual encoder # noqa + # visual_encoder_lora=dict( + # type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, + # target_modules=['attn.qkv', 'attn.proj', 'mlp.fc1', 'mlp.fc2']) +) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=InternVL_V1_5_Dataset, + model_path=path, + data_paths=data_path, + image_folders=image_folder, + template=prompt_template, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=path, + trust_remote_code=True) + +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_full_wizardlm_e1.py b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_full_wizardlm_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..2a2f481a49dcea8ccf91a6a4ffb53f9a0a4ef576 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_full_wizardlm_e1.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, wizardlm_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False + +# Data +data_path = 'WizardLM/WizardLM_evol_instruct_V2_196k' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 4 # 1bs * 4acc * 32gpu = 128 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer #q +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=wizardlm_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1.py b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..27f8b262aa937e2ec2dada796d05da6de8099048 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 3e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + load_in_8bit=True), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=16, + lora_dropout=0.05, + target_modules=['gate_proj', 'down_proj', 'up_proj'], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1_hf.py b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..1819ea5448988f15b47a26141b090504b0e077d2 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_int8_lora_open_platypus_e1_hf.py @@ -0,0 +1,71 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from peft import LoraConfig +from transformers import (AutoModelForCausalLM, AutoTokenizer, Trainer, + TrainingArguments) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + +framework = 'huggingface' +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +dataset_name_or_path = 'garage-bAInd/Open-Platypus' +max_length = 2048 +pack_to_max_length = True +prompt_template = PROMPT_TEMPLATE.llama2_chat + +trainer = Trainer + +training_args = dict( + type=TrainingArguments, + do_train=True, + learning_rate=3e-4, + weight_decay=0, + lr_scheduler_type='cosine', + warmup_steps=100, + per_device_train_batch_size=1, + gradient_accumulation_steps=16, + num_train_epochs=1, + fp16=True, + logging_steps=1, + optim='adamw_torch', + save_strategy='steps', + save_steps=1000, + save_total_limit=2, + ddp_find_unused_parameters=False) + +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + load_in_8bit=True) + +lora = dict( + type=LoraConfig, + r=16, + lora_alpha=16, + lora_dropout=0.05, + target_modules=['gate_proj', 'down_proj', 'up_proj'], + bias='none', + task_type='CAUSAL_LM') + +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=dataset_name_or_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) diff --git a/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1.py b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..75278d3c43c08dbaa835c9bb864448e50cf34abe --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 3e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=['gate_proj', 'down_proj', 'up_proj'], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1_hf.py b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..7ee147c03bf807767a8473856da243aae8c0630f --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_70b/llama2_70b_qlora_open_platypus_e1_hf.py @@ -0,0 +1,79 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from peft import LoraConfig +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, Trainer, TrainingArguments) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.utils import PROMPT_TEMPLATE + +framework = 'huggingface' +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +dataset_name_or_path = 'garage-bAInd/Open-Platypus' +max_length = 2048 +pack_to_max_length = True +prompt_template = PROMPT_TEMPLATE.llama2_chat + +trainer = Trainer + +training_args = dict( + type=TrainingArguments, + do_train=True, + learning_rate=3e-4, + weight_decay=0, + lr_scheduler_type='cosine', + warmup_steps=100, + per_device_train_batch_size=1, + gradient_accumulation_steps=16, + num_train_epochs=1, + fp16=True, + logging_steps=1, + optim='adamw_torch', + save_strategy='steps', + save_steps=1000, + save_total_limit=2, + ddp_find_unused_parameters=False) + +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')) + +lora = dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=['gate_proj', 'down_proj', 'up_proj'], + bias='none', + task_type='CAUSAL_LM') + +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=dataset_name_or_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp1.py new file mode 100644 index 0000000000000000000000000000000000000000..e45badc09ebe408fb351160833636ae6b848e14b --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp1.py @@ -0,0 +1,203 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False +sequence_parallel_size = 1 + +# Data +data_path = 'emozilla/pg_books-tokenized-bos-eos-chunked-65536' +data_files = [ + 'data/train-00000-of-00136-877a1768c20d5900.parquet', + 'data/train-00001-of-00136-70d7d139dca61754.parquet', + 'data/train-00002-of-00136-62d53594e098f3d8.parquet', + 'data/train-00003-of-00136-8bd300fecc4c720e.parquet', + 'data/train-00004-of-00136-2a9456b5f975ae95.parquet', + 'data/train-00005-of-00136-ca38cf7907bb7555.parquet', + 'data/train-00006-of-00136-1ae2e4c63f3966da.parquet', + 'data/train-00007-of-00136-a00cc39a4ee65ab6.parquet', +] +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 65536 +max_position_embeddings = 65536 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 8 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.05 + +# Save +save_steps = 500 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + max_position_embeddings=max_position_embeddings, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path=data_path, + data_files=data_files, + ignore_verifications=True), + do_dataset_tokenization=False, + remove_unused_columns=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp4.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp4.py new file mode 100644 index 0000000000000000000000000000000000000000..ceefa96f2cfdb8a2b0e6415559729847fe8c27aa --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_pgbooks_400iters_sp4.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'emozilla/pg_books-tokenized-bos-eos-chunked-65536' +data_files = [ + 'data/train-00000-of-00136-877a1768c20d5900.parquet', + 'data/train-00001-of-00136-70d7d139dca61754.parquet', + 'data/train-00002-of-00136-62d53594e098f3d8.parquet', + 'data/train-00003-of-00136-8bd300fecc4c720e.parquet', + 'data/train-00004-of-00136-2a9456b5f975ae95.parquet', + 'data/train-00005-of-00136-ca38cf7907bb7555.parquet', + 'data/train-00006-of-00136-1ae2e4c63f3966da.parquet', + 'data/train-00007-of-00136-a00cc39a4ee65ab6.parquet', +] +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 65536 +max_position_embeddings = 65536 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 4 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 8 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.05 + +# Save +save_steps = 500 +save_total_limit = 1 # Maximum checkpoints to keep (-1 means unlimited) + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + max_position_embeddings=max_position_embeddings, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path=data_path, + data_files=data_files, + ignore_verifications=True), + do_dataset_tokenization=False, + remove_unused_columns=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + save_optimizer=False, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_wizardlm_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_wizardlm_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..0418b1d8fe0d640130f5cbfd8b1ac277d7db526d --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_full_wizardlm_e1.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, wizardlm_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'WizardLM/WizardLM_evol_instruct_V2_196k' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 2 # per_device +accumulative_counts = 16 # 2bs * 16acc * 4gpu = 128 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=wizardlm_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..00422406cc9a0a40e86b937f367da756c69a2ff9 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..eae023551941173607edd35d3b74d6d69a84a9cd --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5de71331bcb3aa6ac30c576efd10ad6a3804b2ad --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,251 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d9f65f90f0aa299227232670dfedd10c10638e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f3451d916f6c7cd9894ee575c8df5eeab52b7ba7 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,254 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5aa84be9fac46b57881a0a64f21e83976da127fe --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..a9ac2e71a87dd0e2af22cde2c69a780494eb07ec --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_colorist_e5.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..98c2ff58015b276c8d8911d86f202b79b53e4342 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..66aa31cb2d2d5c0c0fae0f50b65a4655bfe90f04 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_medical_e1.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..037cf43836930fff60fb3b8598edd7991de7b4e8 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e1.py @@ -0,0 +1,224 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +bot_name = 'Llama2' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..598ec5094ca289d20e9e1763f3fa07065ef044e0 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,224 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +bot_name = 'Llama2' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b4130fb994a5d830d43b808bf93777d630567b81 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +bot_name = 'Llama2' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_msagent_react_e3_gpu8.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_msagent_react_e3_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..603bfc04dd4e810594bfdd81537df8b8b1f08dfb --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_msagent_react_e3_gpu8.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from modelscope.msdatasets import MsDataset +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_ms_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (msagent_react_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'damo/MSAgent-Bench' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 2 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = ( + '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + "{{\'GoogleSearch\': \'一个可以从谷歌搜索结果的API。\\n" + '当你需要对于一个特定问题找到简短明了的回答时,可以使用它。\\n' + "输入应该是一个搜索查询。\\n\\n\'," + "\'PythonInterpreter\': \"用来执行Python代码。代码必须是一个函数,\\n" + "函数名必须得是 \'solution\',代码对应你的思考过程。代码实例格式如下:\\n" + '```python\\n# import 依赖包\\nimport xxx\\ndef solution():' + '\\n # 初始化一些变量\\n variable_names_with_real_meaning = xxx' + '\\n # 步骤一\\n mid_variable = func(variable_names_with_real_meaning)' + '\\n # 步骤 x\\n mid_variable = func(mid_variable)\\n # 最后结果' + '\\n final_answer = func(mid_variable)\\n return final_answer' + "\\n```\\n\"}}\n" + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + "Action:工具名称,你的工具必须从 [[\'GoogleSearch\', \'PythonInterpreter\']] 选择" + '\nAction Input:工具输入参数\n```\n工具返回按照以下格式回复:\n' + '```\nResponse:调用工具后的结果\n```' + '\n如果你已经知道了答案,或者你不需要工具,请遵循以下格式回复\n```' + '\nThought:给出最终答案的思考过程\nFinal Answer:最终答案\n```\n开始!\n') +evaluation_inputs = ['上海明天天气怎么样?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_ms_dataset, + dataset=dict(type=MsDataset.load, dataset_name=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=msagent_react_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..89a4f9624350f5d3f7b87517f6f57e9bc2db2b3d --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1ea76dbed1166de67da56dd71715fa339a7bd547 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..cb661af57a1ff6c2ce26f280591d1379a92e1b25 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_open_platypus_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..425cf016089fcddb6f095022b354fdc9992c0df4 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_openorca_e1.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dd1c4f3f0eda1abe4ac8f64112c95609d5f7b77e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_sql_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..9e1106680aa4061676adee24aa04225fb7313391 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b/llama2_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-hf' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b761b473e48aa4784e67c958a867d4fa5e1b1ad7 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..606eca42e1b1e29d443cc4b00799380322604809 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d05afa68c8af53f82f45ad38c9802be4a7b08cb5 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,251 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..536e375bdcc9b2ab94d1aee2ebc29274fbd5c099 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fe18efd88a92e1de7124a33534b2c8745461520b --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,254 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..546e0b6b4b0c13c5e195d3ba03335c2435d6448c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..398f24b6dc31393686f5bb2d069f8dbfc24c425c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_colorist_e5.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..da3b5d9dd42343f201911fc54b21bba6c350d415 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_lawyer_e3.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_medical_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..c326602812fda0634f860091215967db8a0ad55e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_medical_e1.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b0673b8ca40c6a0549b13a310b118a2298de8963 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_512_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d1f4bb05bd8004e383dddd07456da69adb28ae --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_oasst1_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..04ac0a0f4e6094399950a52719abecefacde8716 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_open_platypus_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..b72776d216410255bfd7ebae3f376219d573f48c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_openorca_e1.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_sql_e3.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d5d4dbdcd80171b54f3793e596d5097ca2a1802b --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_sql_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1513d4f8aa8cc3ef5f75c388b3ec197daaa105cc --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama2_7b_chat/llama2_7b_chat_qlora_tiny_codes_e1.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b-chat-hf' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama3_70b_instruct/llama3_70b_instruct_qlora_alpaca_e3_2k_gpu8.py b/data/xtuner/xtuner/configs/llama/llama3_70b_instruct/llama3_70b_instruct_qlora_alpaca_e3_2k_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..89feac44ebefc7983fc956915aed8bd013a725ec --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama3_70b_instruct/llama3_70b_instruct_qlora_alpaca_e3_2k_gpu8.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-70B-Instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 2 # total bs = 1 bs_per_device * 8 gpus * 2 acc = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-4 # 70B model use smaller lr +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4', + bnb_4bit_quant_storage=torch.float16)), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama3_8b/README.md b/data/xtuner/xtuner/configs/llama/llama3_8b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f77193dab2ff6c851a372c6301f3e823d495bb77 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama3_8b/README.md @@ -0,0 +1,51 @@ +# Llama3 8B + +## Install + +```bash +# Install the latest xtuner +pip install -U 'xtuner[deepspeed]' + +# install the latest transformers +pip install -U transformers +``` + +## QLoRA Fine-tune + +QLoRA only need a single A100-80G + +```bash +xtuner train llama3_8b_instruct_qlora_alpaca_e3 +``` + +## Full Parameter Fine-tune + +Full parameter fine-tune Llama3 8B in 8k context only requires 2 * A100-80G + +### torchrun + +```bash +NPROC_PER_NODE=${GPU_NUM} xtuner train llama3_8b_instruct_full_alpaca_e3 --deepspeed deepspeed_zero2 +``` + +### slurm + +```bash +srun ${SRUN_ARGS} xtuner train llama3_8b_instruct_full_alpaca_e3 --launcher slurm --deepspeed deepspeed_zero3 +``` + +### Speed + +| Model | Sequence Length | GPU Number | ZeRO | Sequence Parallel | Tokens per Second | TFLOPs | +| :-------: | :-------------: | :--------: | :----: | :---------------: | :---------------: | :----: | +| Llama3 8B | 8k | 2 | ZeRO-3 | 2 | 1037.0 | 76.8 | +| Llama3 8B | 8k | 4 | ZeRO-3 | 1 | 2331.3 | 172.6 | +| Llama3 8B | 8k | 8 | ZeRO-3 | 1 | 2771.2 | 205.1 | + +| Model | Sequence Length | GPU Number | ZeRO | Sequence Parallel | Tokens per Second | TFLOPs | +| :-------: | :-------------: | :--------: | :----: | :---------------: | :---------------: | :----: | +| Llama3 8B | 8k | 8 | ZeRO-3 | 1 | 2771.2 | 205.1 | +| Llama3 8B | 16k | 8 | ZeRO-3 | 2 | 2320.7 | 191.7 | +| Llama3 8B | 32k | 8 | ZeRO-3 | 4 | 1870.2 | 186.6 | +| Llama3 8B | 64k | 8 | ZeRO-3 | 8 | 1356.4 | 182.0 | +| Llama3 8B | 128k | 8 | ZeRO-3 | 8 | 875.7 | 177.7 | diff --git a/data/xtuner/xtuner/configs/llama/llama3_8b/llama3_8b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama3_8b/llama3_8b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..04f2e4dabd2eaa42446f34eac695a47a6efb864c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama3_8b/llama3_8b_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_full_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..613ecad1ecca6b0b1576dc1e9fbe6e3a3bf6ac1f --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0373d41db9485678e46f94d7a77e8921b2a84a35 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama3_8b_instruct/llama3_8b_instruct_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..80976df2397f2898b310bf218871d996b427cb97 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fc52f0b10318cd9e2f8174fa9c95032539f77f17 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1e69191bc48da599ee065c47eeec6252d1eabc4b --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e5025e3f37725c5c8f5dfe4f4fac9e5950b3d123 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e32bde6c8859122291e0f69f745863d3bb5d8da3 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a38ddf943937fcd927fb0e515c701abc0ffc257f --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..50c803bdae05599e458cca5a92b29c60137a695d --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_colorist_e5.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..65d922a145cc7a58fff744efaec7936a7109fd6e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_lawyer_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..eb9fee20822cbc27ab44b9d6a8da2c06cb5d2fa8 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_medical_e1.py @@ -0,0 +1,214 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..5cddeb99f545bceea73d4b7f56ebfccc3674d6be --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e1.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +bot_name = 'Llama' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..d97b4c9b63df8118ff5fb26c5045eb9b2bafa530 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +bot_name = 'Llama' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..043e03b5562bf6ed39bcda6c0b862e87b3c6078f --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,206 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +bot_name = 'Llama' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8415d36aa97068a4d09d6dd434f05d2fd5d3d89e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..75589757ea44c83beaf14e629096601d9e7cd9e3 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_oasst1_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a4ed8859825045a66292cea32e1b2715e71220ed --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_open_platypus_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..13e5e57d5d6b6ce4b36d6bf453c790c9c9d661d0 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_openorca_e1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..bcce8a2aec73538fdcb4f0da30bd2ab38ee8cf4d --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_sql_e3.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..bfcd0dcd41e5845e5ad2022f591be823ed6c9ed4 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama/llama_7b/llama_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'huggyllama/llama-7b' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_128k_sp8.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_128k_sp8.py new file mode 100644 index 0000000000000000000000000000000000000000..74554b4691be492ba9863131a679f59cbdfb98af --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_128k_sp8.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False +sequence_parallel_size = 8 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 131072 # 128k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 8 acc / 8 sequence parallel +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_256k_sp16.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_256k_sp16.py new file mode 100644 index 0000000000000000000000000000000000000000..f0c2139451b1127f5c684a5b6b5db0faf5d9d3b7 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_256k_sp16.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False +sequence_parallel_size = 16 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 262144 # 256k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 16 acc / 16 sequence parallel +accumulative_counts = 16 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_32k_sp4.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_32k_sp4.py new file mode 100644 index 0000000000000000000000000000000000000000..679e89107cf1f37916b937d0e4feb5beefe03a1b --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_32k_sp4.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False +sequence_parallel_size = 4 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 32768 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 4 acc / 4 sequence parallel +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_8k_sp1.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_8k_sp1.py new file mode 100644 index 0000000000000000000000000000000000000000..7ddc66215d8c8ca0e156d3630abba0acad939c73 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_70b/llama2_70b_full_alpaca_enzh_8k_sp1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-70b-hf' +use_varlen_attn = False +sequence_parallel_size = 1 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 8192 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 1 acc / 1 sequence parallel +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_128k_sp8.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_128k_sp8.py new file mode 100644 index 0000000000000000000000000000000000000000..6be9ef2df3e347d0d39f8f80aebe327a2f1497a2 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_128k_sp8.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b' +use_varlen_attn = False +sequence_parallel_size = 8 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 131072 # 128k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 8 gpus * 1 batch_size_per_device * 8 acc / 8 sequence parallel +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_1M_sp16.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_1M_sp16.py new file mode 100644 index 0000000000000000000000000000000000000000..7827c9dfb71c329a86cbea079a4397381d4c2a3c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_1M_sp16.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b' +use_varlen_attn = False +sequence_parallel_size = 16 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 1048576 # 1M +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 16 acc / 16 sequence parallel +accumulative_counts = 16 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_256k_sp8.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_256k_sp8.py new file mode 100644 index 0000000000000000000000000000000000000000..ba0c94bb6b2cb0f26d5dc8a14c748fd19221992c --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_256k_sp8.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b' +use_varlen_attn = False +sequence_parallel_size = 8 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 262144 # 256k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 8 gpus * 1 batch_size_per_device * 8 acc / 8 sequence parallel +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_32k_sp1.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_32k_sp1.py new file mode 100644 index 0000000000000000000000000000000000000000..b871ce6f5edc9208b27508fff9a815bb7931cab2 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_32k_sp1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b' +use_varlen_attn = False +sequence_parallel_size = 1 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 32768 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 8 gpus * 1 batch_size_per_device * 1 acc / 1 sequence parallel +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_8k_sp1.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_8k_sp1.py new file mode 100644 index 0000000000000000000000000000000000000000..d6178015bce67f69750b388e7e7149b0f2d2f58e --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/llama2_7b/llama2_7b_full_alpaca_enzh_8k_sp1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Llama-2-7b' +use_varlen_attn = False +sequence_parallel_size = 1 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 8192 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 8 gpus * 1 batch_size_per_device * 1 acc / 1 sequence parallel +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_128k_sp8.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_128k_sp8.py new file mode 100644 index 0000000000000000000000000000000000000000..60de99deb8c896740d6ac7415eaa981a39564f56 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_128k_sp8.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B-200K' +use_varlen_attn = False +sequence_parallel_size = 8 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 131072 # 128k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 8 acc / 8 sequence parallel +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_256k_sp8.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_256k_sp8.py new file mode 100644 index 0000000000000000000000000000000000000000..86303fb5244d58c47c2d47f57dfa799b20b7cb54 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_256k_sp8.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B-200K' +use_varlen_attn = False +sequence_parallel_size = 8 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 262144 # 256k +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 8 acc / 8 sequence parallel +accumulative_counts = 8 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_32k_sp2.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_32k_sp2.py new file mode 100644 index 0000000000000000000000000000000000000000..452f999f6bf87ba100e19d587755c16efd7aed9f --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_32k_sp2.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B-200K' +use_varlen_attn = False +sequence_parallel_size = 2 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 32768 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 2 acc / 2 sequence parallel +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_8k_sp1.py b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_8k_sp1.py new file mode 100644 index 0000000000000000000000000000000000000000..28e8c919cb5a56aba51aeaa71e0ac9cc84520306 --- /dev/null +++ b/data/xtuner/xtuner/configs/llama_speed_benchmark/yi_34b/yi_34b_200k_full_alpaca_enzh_8k_sp1.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import ThroughputHook, VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B-200K' +use_varlen_attn = False +sequence_parallel_size = 1 + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.llama2_chat +max_length = 8192 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +# Suppose I aim to employ a training strategy using a batch size per device +# of 1 with a maximum length of `max_length` on N GPUs. +# Upon setting the sequence parallelism dimension to `SP`, +# the accumulative counts have to be adjusted to `SP` times the original value. +# This modification is essential to assure training equivalence, +# as the sequence of `max_length` length will be segmented into `SP` parts, +# with each part being allocated to its respective GPU among the `SP` GPUs +# for parallelized training. +# bs = 32 gpus * 1 batch_size_per_device * 1 acc / 1 sequence parallel +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +log_interval = 1 + +# Save +save_steps = -1 # speed only +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=SequenceParallelSampler, seed=1024), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [dict(type=ThroughputHook)] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict( + type=LoggerHook, log_metric_by_epoch=False, interval=log_interval), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=-1, + save_last=False, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=log_interval) diff --git a/data/xtuner/xtuner/configs/llava/README.md b/data/xtuner/xtuner/configs/llava/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8d9db0f77185d8f6ba3d917ab838ffc4316e07d9 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/README.md @@ -0,0 +1,125 @@ +# LLaVA Full Pipeline + +English | [简体中文](./README_zh-CN.md) + +## Configs + +- `./${LLM}_${ViT}/` contains configs that align with LLaVA-InternLM settings (*i.e.*, using LoRA / QLoRA). +- `./official/` contains configs that align with LLaVA official settings. + +## Results + +XTuner primarily promotes the LLM-QLoRA / ViT-LoRA LLaVA architecture, and the evaluation results on various datasets are as follows: + +| Model | MMBench Test (EN) | MMBench Dev (EN) | MMBench Test (CN) | MMBench Dev (CN) | CCBench Dev | MME | SEEDBench_IMG | MMVet | MMMU Dev | MathVista MiniTest | HallusionBench aAcc | Configs | Pretrained Projector Checkpoints | Fine-tuned LLaVA Checkpoints | +| :--------------------------- | :---------------: | :--------------: | :---------------: | :--------------: | :---------: | :--: | :-----------: | :---: | :------: | :----------------: | :-----------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B (XTuner) | 67.7 | 69.2 | 61.0 | 59.7 | 28.4 | 1716 | 66.4 | 32.2 | 33.7 | 24.2 | 46.2 | [Pretrain](./vicuna_7b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-7b-xtuner-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-7b-xtuner-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-7b-xtuner) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-7b-xtuner) | +| LLaVA-v1.5-13B (XTuner) | 68.8 | 69.5 | 64.7 | 63.1 | 32.9 | 1766 | 67.9 | 35.9 | 35.2 | 26.2 | 46.9 | [Pretrain](./vicuna_13b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./vicuna_13b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_13b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-13b-xtuner-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-13b-xtuner-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-13b-xtuner) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-13b-xtuner) | +| LLaVA-InternLM-7B (XTuner) | 69.0 | 68.5 | 66.7 | 63.8 | 37.3 | 1637 | 65.7 | 32.4 | 36.9 | 26.3 | 49.1 | [Pretrain](./internlm_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm-7b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm-7b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm-7b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm-7b) | +| LLaVA-InternLM2-7B (XTuner) | 73.3 | 74.6 | 71.7 | 72.0 | 42.5 | 1700 | 71.2 | 35.9 | 40.1 | 25.5 | 46.8 | [Pretrain](./internlm2_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b) | +| LLaVA-InternLM2-20B (XTuner) | 75.1 | 73.5 | 73.7 | 72.8 | 46.3 | 1868 | 70.2 | 37.2 | 39.4 | 24.6 | 47.7 | [Pretrain](./internlm2_chat_20b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-20b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-20b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-20b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-20b) | + +When aligned completely with the official training settings, the results are as follows: + +| Model | Framework | MMBench Test (EN) | MMBench Dev (EN) | MMBench Test (CN) | MMBench Dev (CN) | CCBench Dev | MME | SEEDBench_IMG | MMVet | Configs | +| :------------ | :-------: | :---------------: | :--------------: | :---------------: | :--------------: | :---------: | :--: | :-----------: | :---: | :--------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B | Official | 65.2 | 63.0 | 57.3 | 57.4 | 25.2 | 1775 | 65.6 | 32.7 | - | +| LLaVA-v1.5-7B | XTuner | 68.6 | 68.0 | 61.5 | 61.4 | 26.5 | 1786 | 65.8 | 31.4 | [Pretrain](./official/llava_v15_7b/llava_v15_7b_pretrain.py) / [Fine-tune](./official/llava_v15_7b/llava_v15_7b_finetune.py) | + +## Data Preparation + +Please refer to the [docs](../../../docs/en/user_guides/dataset_prepare.md#llava-dataset). + +## Training + +The training of LLaVA consists of two steps: alignment module (i.e., MLP) pretraining and instruction following fine-tuning + +Note: this guide takes 8-card training LLaVA-InternLM2-7B as an example, if there are insufficient GPU resources or memory during actual use, you can reduce the batchsize appropriately to decrease memory consumption. The Pretrained projector is saved and re-loaded by default in `./work_dirs/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth`. + +1. Alignment module pretraining (saved by default in `./work_dirs/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain --deepspeed deepspeed_zero2 +``` + +2. Instruction following fine-tuning (saved by default in `./work_dirs/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune --deepspeed deepspeed_zero2 +``` + +## Model Conversion (and Merge) + +After training, we will obtain a set of weights (*i.e.*, `iter_xxx.pth`), which are not in the universal HuggingFace format. We first need to convert them. + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH +# e.g., xtuner convert pth_to_hf llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune ./iter_5198.pth ./iter_5198_hf +``` + +At this point, we have obtained the relevant model (LLM or the corresponding LoRA). + +Afterwards, if you want to merge LoRA into LLM or CLIP-ViT, please use the following command: + +```bash +(LLM) xtuner convert merge $LLM $LLM_ADAPTER $SAVE_PATH +(CLIP) xtuner convert merge $CLIP $CLIP_ADAPTER $SAVE_PATH --is-clip +``` + +## Chat + +You can download the released LLaVA-InternLM2-7B model from 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b) or 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b), and achieve image-text question answering with the following command! + +```bash +xtuner chat internlm/internlm2-chat-7b \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava xtuner/llava-internlm2-7b \ + --prompt-template internlm2_chat \ + --image $IMAGE_PATH +``` + +Here, `--llava` is the converted weight from the above step (in our example, it is `./iter_5198_hf` ). + +## Evaluation + +XTuner's LLaVA models can be evaluated using [VLMEvalKit](https://github.com/open-compass/VLMEvalKit). + +For convenience, XTuner also integrates the [MMBench](https://mmbench.opencompass.org.cn/home) evaluation. + +User can download the MMBench dataset with + +``` +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/CCBench.tsv +``` + +After that, the evaluations can be run with + +```bash +xtuner mmbench internlm/internlm2-chat-7b \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava xtuner/llava-internlm2-7b \ + --prompt-template internlm2_chat \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` + +Here, `$DATA_PATH` refers to one of the datasets downloaded as mentioned above, such as `MMBench_DEV_EN.tsv`. + +After the evaluation is completed, if it's a development set, it will directly print out the results; If it's a test set, you need to submit `mmbench_result.xlsx` to the official MMBench for final evaluation to obtain precision results! + +### Refcoco + +To evaluate your model with refcoco, you need download the evaluation data files in [link](https://github.com/Vision-CAIR/MiniGPT-4/tree/main/eval_scripts/eval_data). Second, you can use following command to evaluate your model. + +```bash +xtuner eval_refcoco $LLM \ + --visual-encoder $VISUAL_ENCODER \ + --llava $LLAVA_PATH \ + --prompt-template $PROMPT_TEMPLATE \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` diff --git a/data/xtuner/xtuner/configs/llava/README_zh-CN.md b/data/xtuner/xtuner/configs/llava/README_zh-CN.md new file mode 100644 index 0000000000000000000000000000000000000000..b8f2595208aabf67742af31884b03fe3640c3be0 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/README_zh-CN.md @@ -0,0 +1,123 @@ +# LLaVA 全流程 + +[English](./README.md) | 简体中文 + +## 配置文件 + +- `./${LLM}_${ViT}/` 包含着与 LLaVA-InternLM 训练配置对齐的配置文件(即使用 LoRA / QLoRA)。 +- `./official/` 包含着与 LLaVA-v1.5 官方训练配置对齐的配置文件。 + +## 结果 + +XTuner 推荐使用基于 LLM-QLoRA / ViT-LoRA 的 LLaVA 架构,其在各个数据集的评测结果如下: + +| 模型 | MMBench Test (EN) | MMBench Dev (EN) | MMBench Test (CN) | MMBench Dev (CN) | CCBench Dev | MME | SEEDBench_IMG | MMVet | MMMU Dev | MathVista MiniTest | HallusionBench aAcc | 配置文件 | 预训练 Projector 权重 | 微调 LLaVA 权重 | +| :--------------------------- | :---------------: | :--------------: | :---------------: | :--------------: | :---------: | :--: | :-----------: | :---: | :------: | :----------------: | :-----------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B (XTuner) | 67.7 | 69.2 | 61.0 | 59.7 | 28.4 | 1716 | 66.4 | 32.2 | 33.7 | 24.2 | 46.2 | [Pretrain](./vicuna_7b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-7b-xtuner-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-7b-xtuner-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-7b-xtuner) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-7b-xtuner) | +| LLaVA-v1.5-13B (XTuner) | 68.8 | 69.5 | 64.7 | 63.1 | 32.9 | 1766 | 67.9 | 35.9 | 35.2 | 26.2 | 46.9 | [Pretrain](./vicuna_13b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./vicuna_13b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_13b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-13b-xtuner-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-13b-xtuner-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-v1.5-13b-xtuner) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-v1.5-13b-xtuner) | +| LLaVA-InternLM-7B (XTuner) | 69.0 | 68.5 | 66.7 | 63.8 | 37.3 | 1637 | 65.7 | 32.4 | 36.9 | 26.3 | 49.1 | [Pretrain](./internlm_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm-7b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm-7b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm-7b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm-7b) | +| LLaVA-InternLM2-7B (XTuner) | 73.3 | 74.6 | 71.7 | 72.0 | 42.5 | 1700 | 71.2 | 35.9 | 40.1 | 25.5 | 46.8 | [Pretrain](./internlm2_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b) | +| LLaVA-InternLM2-20B (XTuner) | 75.1 | 73.5 | 73.7 | 72.8 | 46.3 | 1868 | 70.2 | 37.2 | 39.4 | 24.6 | 47.7 | [Pretrain](./internlm2_chat_20b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-20b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-20b-pretrain) | 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-20b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-20b) | + +当与 LLaVA 官方训练架构对齐时,其评测结果如下: + +| 模型 | 框架 | MMBench Test (EN) | MMBench Dev (EN) | MMBench Test (CN) | MMBench Dev (CN) | CCBench Dev | MME | SEEDBench_IMG | MMVet | 配置文件 | +| :------------ | :------: | :---------------: | :--------------: | :---------------: | :--------------: | :---------: | :--: | :-----------: | :---: | :--------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B | Official | 65.2 | 63.0 | 57.3 | 57.4 | 25.2 | 1775 | 65.6 | 32.7 | - | +| LLaVA-v1.5-7B | XTuner | 68.6 | 68.0 | 61.5 | 61.4 | 26.5 | 1786 | 65.8 | 31.4 | [Pretrain](./official/llava_v15_7b/llava_v15_7b_pretrain.py) / [Fine-tune](./official/llava_v15_7b/llava_v15_7b_finetune.py) | + +## 数据准备 + +请参考[文档](../../../docs/zh_cn/user_guides/dataset_prepare.md#llava-dataset)。 + +## 训练流程 + +LLaVA 训练一共分为两步:对齐模块预训练、指令跟随微调(本指南以 8 卡训练 LLaVA-InternLM2-7B 为例,实际使用时如遇到显卡数量不足、显存不足等情况可以适当调低 batchsize 来降低显存开销) + +预训练的 Projector 默认保存在 `./work_dirs/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain`,并且指令微调阶段将默认在此路径载入 Projector 权重 (`iter_2181.pth`)。 + +1. 对齐模块训练(默认保存在 `./work_dirs/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain --deepspeed deepspeed_zero2 +``` + +2. 指令跟随微调(默认保存在 `./work_dirs/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune --deepspeed deepspeed_zero2 +``` + +## 模型转换(和合并) + +训练后,我们将获得一组权重(即,`iter_xxx.pth`,但它并不是通用的 HuggingFace 格式。我们需要对其进行转换。 + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH +# e.g., xtuner convert pth_to_hf llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune ./iter_5198.pth ./iter_5198_hf +``` + +此时,我们将获得所需要的模型(LLM或对应的 LoRA)。 + +之后,如果想要合并 LoRA 至 LLM 或 CLIP-ViT 中,请使用下列命令: + +```bash +(LLM) xtuner convert merge $LLM $LLM_ADAPTER $SAVE_PATH +(CLIP) xtuner convert merge $CLIP $CLIP_ADAPTER $SAVE_PATH --is-clip +``` + +## 对话测试 + +开源的 LLaVA-InternLM2-7B 模型在 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-internlm2-7b) 和 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-internlm2-7b) 都可以下载,您可以利用下列命令实现图文问答! + +```bash +xtuner chat internlm/internlm2-chat-7b \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava xtuner/llava-internlm2-7b \ + --prompt-template internlm2_chat \ + --image $IMAGE_PATH +``` + +此处, `--llava` 请传入模型转换阶段所获得的权重(示例中为 `./iter_5198_hf`)。 + +## 评测 + +XTuner 的 LLaVA 模型可以利用 [VLMEvalKit](https://github.com/open-compass/VLMEvalKit) 进行评测。 + +同时,为了方便使用,XTuner 内也集成了 MMBench 评测,您可以通过下列命令下载 MMBench 评测数据集: + +``` +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/CCBench.tsv +``` + +之后,您可以利用下列命令实现评测: + +```bash +xtuner mmbench internlm/internlm2-chat-7b \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava xtuner/llava-internlm2-7b \ + --prompt-template internlm2_chat \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` + +其中,`$DATA_PATH` 指上一步骤所下载的某一个 tsv 文件,如 `MMBench_DEV_EN.tsv`。 + +评测完成后,若为开发集则会直接打印出结果;若为测试集,则需将 mmbench_result.xlsx 提交至 MMBench 官方完成评测取得精度结果! + +### Refcoco + +若您想要评测 Refcoco 数据集,您需要下载评测数据文件 [链接](https://github.com/Vision-CAIR/MiniGPT-4/tree/main/eval_scripts/eval_data). 之后,您可以利用下列命令实现评测: + +```bash +xtuner eval_refcoco $LLM \ + --visual-encoder $VISUAL_ENCODER \ + --llava $LLAVA_PATH \ + --prompt-template $PROMPT_TEMPLATE \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_1_8b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_1_8b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..96e18e0e18e39ae8f330f128f5899f99528be2eb --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_1_8b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-1_8b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm2_chat_1_8b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_1_8b_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_1_8b_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..e14cdc91ab9701482e4fb2136d06456c5392bb65 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_1_8b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_1_8b_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-1_8b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..ff4e20ce3dee1e4c5accdadebcd8ebb4115cd383 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_finetune.py @@ -0,0 +1,207 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-20b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 + +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float32), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..1dacbeb923dd6de1426ab2a5c4f39a4d17e4bbea --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_20b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-20b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..3cc2839a94d8bf69fd5da5c2c19d8de0acbc22ec --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_20b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_20b_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-20b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..e9f4d8b5f9980756ea151252bd3b0cc521453ff7 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_finetune.py @@ -0,0 +1,206 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-7b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float32), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..3652333c91c555807daf4034674fa56858c71885 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm2_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-7b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..72d69b4b3747fd279300c88d0a20a907660d839c --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm2_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm2_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm2-chat-7b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..e25dc4cc1ebffce7e743e744d1321519f3701b3f --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/finetune/llava_internlm_chat_7b_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm-chat-7b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..fbbbeb5ff15736b488e31a79b094c6dfd9ea5e71 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/internlm_chat_7b_clip_vit_large_p14_336/pretrain/llava_internlm_chat_7b_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'internlm/internlm-chat-7b' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.internlm_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_70b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_70b_instruct_quant_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/llama3_70b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_70b_instruct_quant_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..e3ef732979294ca9515a3c9120fc1d853d803ab2 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_70b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_70b_instruct_quant_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,210 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 5e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/README.md b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f0112fe57d3b42a846405fc8831bfd9a14c01c03 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/README.md @@ -0,0 +1,424 @@ +# LLaVA-Llama-3-8B + +## Results + +
+Image +
+ +| Model | MMBench Test (EN) | MMBench Test (CN) | CCBench Dev | MMMU Val | SEED-IMG | AI2D Test | ScienceQA Test | HallusionBench aAcc | POPE | GQA | TextVQA | MME | MMStar | Configs | +| :-------------------- | :---------------: | :---------------: | :---------: | :-------: | :------: | :-------: | :------------: | :-----------------: | :--: | :--: | :-----: | :------: | :----: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B | 66.5 | 59.0 | 27.5 | 35.3 | 60.5 | 54.8 | 70.4 | 44.9 | 85.9 | 62.0 | 58.2 | 1511/348 | 30.3 | - | +| LLaVA-Llama-3-8B | 68.9 | 61.6 | 30.4 | 36.8 | 69.8 | 60.9 | 73.3 | 47.3 | 87.2 | 63.5 | 58.0 | 1506/295 | 38.2 | [Pretrain](./pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](./finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | +| LLaVA-Llama-3-8B-v1.1 | 72.3 | 66.4 | 31.6 | 36.8 | 70.1 | 70.0 | 72.9 | 47.7 | 86.4 | 62.6 | 59.0 | 1469/349 | 45.1 | [Pretrain](./pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py) / [Fine-tune](./finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune.py) | + +## Resources + +- LLaVA-Llama-3-8B-v1.1 + + - Official LLaVA format model (`xtuner/llava-llama-3-8b-v1_1-hf`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-hf) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-v1_1-hf) + - HuggingFace LLaVA format model (`xtuner/llava-llama-3-8b-v1_1-transformers`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-transformers) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-v1_1-transformers) + - XTuner LLaVA format model (`xtuner/llava-llama-3-8b-v1_1`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-v1_1) + - GGUF model (`xtuner/llava-llama-3-8b-v1_1-gguf`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-gguf) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-v1_1-gguf) + - Pretrained projector weights: 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-v1_1-pretrain) + +- LLaVA-Llama-3-8B + + - Official LLaVA format model (`xtuner/llava-llama-3-8b-hf`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-hf) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-hf) + - HuggingFace LLaVA format model (`xtuner/llava-llama-3-8b-transformers`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-transformers) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-transformers) + - XTuner LLaVA format model (`xtuner/llava-llama-3-8b`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b) + - Pretrained projector weights: 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-llama-3-8b-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-llama-3-8b-pretrain) + +## Data Preparation + +### LLaVA dataset + +#### File structure + +``` +./data/llava_data +├── LLaVA-Pretrain +│   ├── blip_laion_cc_sbu_558k.json +│   ├── blip_laion_cc_sbu_558k_meta.json +│   └── images +├── LLaVA-Instruct-150K +│   └── llava_v1_5_mix665k.json +└── llava_images +    ├── coco +    │ └── train2017 +    ├── gqa +    │ └── images +    ├── ocr_vqa +    │ └── images +    ├── textvqa +    │ └── train_images +    └── vg +       ├── VG_100K +    └── VG_100K_2 +``` + +#### Pretrain + +LLaVA-Pretrain + +```shell +# Make sure you have git-lfs installed (https://git-lfs.com) +git lfs install +git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Pretrain --depth=1 +``` + +#### Finetune + +1. Text data + + 1. LLaVA-Instruct-150K + + ```shell + # Make sure you have git-lfs installed (https://git-lfs.com) + git lfs install + git clone https://huggingface.co/datasets/liuhaotian/LLaVA-Instruct-150K --depth=1 + ``` + +2. Image data + + 1. COCO (coco): [download url](http://images.cocodataset.org/zips/train2017.zip) + + 2. GQA (gqa): [download url](https://downloads.cs.stanford.edu/nlp/data/gqa/images.zip) + + 3. OCR-VQA (ocr_vqa): [download script](https://drive.google.com/drive/folders/1_GYPY5UkUy7HIcR0zq3ZCFgeZN7BAfm_?usp=sharing) + + 1. ⚠️ Modify the name of OCR-VQA's images to keep the extension as `.jpg`! + + ```shell + #!/bin/bash + ocr_vqa_path="" + + find "$target_dir" -type f | while read file; do + extension="${file##*.}" + if [ "$extension" != "jpg" ] + then + cp -- "$file" "${file%.*}.jpg" + fi + done + ``` + + 4. TextVQA (textvqa): [download url](https://dl.fbaipublicfiles.com/textvqa/images/train_val_images.zip) + + 5. VisualGenome (VG): [part1](https://cs.stanford.edu/people/rak248/VG_100K_2/images.zip), [part2](https://cs.stanford.edu/people/rak248/VG_100K_2/images2.zip) + +### ShareGPT4V dataset + +> Reference: https://github.com/InternLM/InternLM-XComposer/blob/main/projects/ShareGPT4V/docs/Data.md + +#### File structure + +``` +./data/sharegpt4v +├── share-captioner_coco_lcs_sam_1246k_1107.json +├── sharegpt4v_instruct_gpt4-vision_cap100k.json +├── sharegpt4v_mix665k_cap23k_coco-ap9k_lcs3k_sam9k_div2k.json +└── data + ├── sam + │ └── images + ├── share_textvqa + │ └── images + ├── web-celebrity + │ └── images + ├── web-landmark + │ └── images + ├── wikiart + │ └── images + ├── llava + │ └── llava_pretrain + │ └── images -> ../../../../llava_data/LLaVA-Pretrain/images + ├── coco -> ../../llava_data/llava_images/coco + ├── gqa -> ../../llava_data/llava_images/gqa + ├── ocr_vqa -> ../../llava_data/llava_images/ocr_vqa + ├── textvqa -> ../../llava_data/llava_images/textvqa + └── vg -> ../../llava_data/llava_images/vg +``` + +#### Download + +1. Text data + + ```shell + wget https://huggingface.co/datasets/Lin-Chen/ShareGPT4V/blob/main/sharegpt4v_instruct_gpt4-vision_cap100k.json + wget https://huggingface.co/datasets/Lin-Chen/ShareGPT4V/blob/main/share-captioner_coco_lcs_sam_1246k_1107.json + wget https://huggingface.co/datasets/Lin-Chen/ShareGPT4V/blob/main/sharegpt4v_mix665k_cap23k_coco-ap9k_lcs3k_sam9k_div2k.json + ``` + +2. Image data + + 1. SAM (sam): [download url](https://drive.google.com/file/d/1dKumdOKSXtV7lIXdrG7jsIK_z2vZv2gs/view?usp=drive_link) + + 2. ShareTextVQA (share_textvqa): [download url](https://drive.google.com/file/d/1f4v_3e1OJtyYqam1CEp6RenCNTU5_mG2/view?usp=share_link) + + 3. Web-Celebrity (web-celebrity): [download url](https://drive.google.com/file/d/1-SB71C3j1mVg0kDDXwj2IWGEoBoRUD-J/view?usp=share_link) + + 4. Web-Landmark (web-landmark): [download url](https://drive.google.com/file/d/1JpJkN7ZMA50xAhMx9O-rVb5yLhfGm3_o/view?usp=share_link) + + 5. WikiArt (wikiart): [download url](https://drive.google.com/file/d/1FxB2Nw-vWUcTUSI_dBpPIykb-uGYoEqV/view?usp=share_link) + + 6. llava, coco , gqa, ocr_vqa, textvqa, vg: Please refer to the preparation of LLaVA dataset. + +### InternVL-SFT + +> Reference: https://github.com/OpenGVLab/InternVL/tree/main/internvl_chat#prepare-training-datasets + +#### File structure + +``` +./data/internvl_sft +├── sharegpt4v_instruct_gpt4-vision_cap100k.jsonl +├── llava_instruct_150k_zh.jsonl +├── sharegpt4v_mix665k_cap23k_coco-ap9k_lcs3k_sam9k_div2k.jsonl +├── dvqa_train_200k.jsonl +├── chartqa_train_18k.jsonl +├── ai2d_train_12k.jsonl +├── docvqa_train_10k.jsonl +├── geoqa+.jsonl +├── synthdog_en.jsonl +└── data + ├── ai2d + │ ├── abc_images + │ └── images + ├── chartqa + │ ├── test + │ ├── train + │ └── val + ├── docvqa + │ ├── test + │ ├── train + │ └── val + ├── dvqa + │ └── images + ├── synthdog-en + │ └── images + ├── geoqa+ + │ └── images + ├── llava + │ └── llava_pretrain + │ └── images -> ../../../../llava_data/LLaVA-Pretrain/images + ├── coco -> ../../llava_data/llava_images/coco + ├── gqa -> ../../llava_data/llava_images/gqa + ├── ocr_vqa -> ../../llava_data/llava_images/ocr_vqa + ├── textvqa -> ../../llava_data/llava_images/textvqa + ├── vg -> ../../llava_data/llava_images/vg + ├── sam -> ../../sharegpt4v/data/sam + ├── share_textvqa -> ../../sharegpt4v/data/share_textvqa + ├── web-celebrity -> ../../sharegpt4v/data/web-celebrity + ├── web-landmark -> ../../sharegpt4v/data/web-landmark +    └── wikiart -> ../../sharegpt4v/data/wikiart +``` + +#### Download + +1. Text data + + ```shell + wget https://huggingface.co/OpenGVLab/InternVL/resolve/main/playground.zip + unzip ./playground.zip + ``` + +2. Image data + + 1. AI2D (ai2d): [download url](https://drive.google.com/file/d/1dqqa3MnrxMXaU_K9JA6C83je32ibwdOY/view?usp=sharing) + + 2. ChartQA (chartqa): [download url](https://huggingface.co/datasets/ahmed-masry/ChartQA/resolve/main/ChartQA%20Dataset.zip) + + 3. DocVQA (docvqa): [train](https://datasets.cvc.uab.es/rrc/DocVQA/train.tar.gz), [val](https://datasets.cvc.uab.es/rrc/DocVQA/val.tar.gz), [test](https://datasets.cvc.uab.es/rrc/DocVQA/test.tar.gz) + + 4. DVQA (dvqa): [download url](https://drive.google.com/file/d/1iKH2lTi1-QxtNUVRxTUWFvUvRHq6HAsZ/view) + + 5. SynthDoG-EN (synthdog-en): [download url](https://huggingface.co/OpenGVLab/InternVL/resolve/main/synthdog-en-images.zip) + + 6. GeoQA+ (geoqa+): [download url](https://huggingface.co/OpenGVLab/InternVL/resolve/main/geoqa%2B_images.zip) + + 7. llava, coco, gqa, ocr_vqa, textvqa, vg: Please refer to the preparation of LLaVA dataset. + + 8. sam, share_textvqa, web-celebrity, web-landmark, wikiart: Please refer to the preparation of ShareGPT4V dataset. + +## Training + +### LLaVA-LLama-3-8B + +1. Pretrain (saved by default in `./work_dirs/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain --deepspeed deepspeed_zero2 --seed 1024 +``` + +2. Fine-tune (saved by default in `./work_dirs/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune --deepspeed deepspeed_zero2 --seed 1024 +``` + +### LLaVA-LLama-3-8B-v1.1 (Recommended) + +1. Pretrain (saved by default in `./work_dirs/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain --deepspeed deepspeed_zero2 --seed 1024 +``` + +2. Fine-tune (saved by default in `./work_dirs/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune/`) + +```bash +NPROC_PER_NODE=8 xtuner train llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune --deepspeed deepspeed_zero2 --seed 1024 +``` + +### Singlg card? + +XTuner also supports single-card training for LLaVA-Llama-3-8B (Youth Edition), requiring only a single card with 20GB to complete the entire process of multi-modal training. + +1. Pretrain (saved by default in `./work_dirs/llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain/`) + +```bash +xtuner train llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain --deepspeed deepspeed_zero2 --seed 1024 +``` + +2. Fine-tune (saved by default in `./work_dirs/llava_llama3_8b_instruct_qlora_clip_vit_large_p14_336_e1_gpu1_finetune/`) + +```bash +xtuner train llava_llama3_8b_instruct_qlora_clip_vit_large_p14_336_e1_gpu1_finetune --deepspeed deepspeed_zero2 --seed 1024 +``` + +## Model Conversion + +After training, we will obtain a set of weights (*i.e.*, `iter_xxx.pth`), which are not in the universal HuggingFace format. We first need to convert them to the LLaVA model. + +### Convert `.pth` file to LLaVA model in xtuner format ([xtuner/llava-llama-3-8b-v1_1](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1)) + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH +# e.g., xtuner convert pth_to_hf llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune ./iter_39620.pth ./iter_39620_xtuner +``` + +At this point, we have obtained the relevant model (LLM or the corresponding LoRA). +If you use the default configuration of LLaVA-Llama-3-8B, you will obtain the following file structure after converting. +It includes the full-finetuned LLM weights, projector weights, and LoRA weights of the visual encoder. + +``` +./iter_39620_xtuner +├── config.json +├── generation_config.json +├── model-00001-of-00009.safetensors +├── model-00002-of-00009.safetensors +├── model-00003-of-00009.safetensors +├── model-00004-of-00009.safetensors +├── model-00005-of-00009.safetensors +├── model-00006-of-00009.safetensors +├── model-00007-of-00009.safetensors +├── model-00008-of-00009.safetensors +├── model-00009-of-00009.safetensors +├── model.safetensors.index.json +├── projector +│   ├── config.json +│   ├── configuration_projector.py +│   ├── modeling_projector.py +│   └── model.safetensors +├── special_tokens_map.json +├── tokenizer_config.json +├── tokenizer.json +└── visual_encoder_adapter +    ├── adapter_config.json +    ├── adapter_model.safetensors +    └── README.md +``` + +LLaVA model in xtuner format can engage in conversation using xtuner chat, by + +```bash +xtuner chat ./iter_39620_xtuner \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava ./iter_39620_xtuner \ + --prompt-template llama3_chat \ + --image $IMAGE_PATH +``` + +and in MMBench evaluation, by + +```bash +xtuner mmbench ./iter_39620_xtuner \ + --visual-encoder openai/clip-vit-large-patch14-336 \ + --llava ./iter_39620_xtuner \ + --prompt-template llama3_chat \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` + +Here, `$DATA_PATH` refers to one of the mmbench datasets. You can download the expected data by + +```bash +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/CCBench.tsv +``` + +### Convert `.pth` file to LLaVA model in official format ([xtuner/llava-llama-3-8b-v1_1-hf](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-hf)) + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH --save-format official +# e.g., xtuner convert pth_to_hf llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune ./iter_39620.pth ./iter_39620_official --save-format official +``` + +Here, the converted LLaVA model in official LLaVA format is saved to `./iter_39620_official`. + +``` +./iter_39620_official +├── config.json +├── generation_config.json +├── model-00001-of-00009.safetensors +├── model-00002-of-00009.safetensors +├── model-00003-of-00009.safetensors +├── model-00004-of-00009.safetensors +├── model-00005-of-00009.safetensors +├── model-00006-of-00009.safetensors +├── model-00007-of-00009.safetensors +├── model-00008-of-00009.safetensors +├── model-00009-of-00009.safetensors +├── model.safetensors.index.json +├── preprocessor_config.json +├── special_tokens_map.json +├── tokenizer_config.json +└── tokenizer.json +``` + +### Convert `.pth` file to LLaVA model in HuggingFace format ([xtuner/llava-llama-3-8b-v1_1-transformers](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-transformers)) + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH --save-format huggingface +# e.g., xtuner convert pth_to_hf llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune ./iter_39620.pth ./iter_39620_huggingface --save-format huggingface +``` + +Here, the converted LLaVA model in HuggingFace LLaVA format is saved to `./iter_39620_huggingface`. + +``` +./iter_39620_huggingface +├── config.json +├── generation_config.json +├── model-00001-of-00004.safetensors +├── model-00002-of-00004.safetensors +├── model-00003-of-00004.safetensors +├── model-00004-of-00004.safetensors +├── model.safetensors.index.json +├── preprocessor_config.json +├── special_tokens_map.json +├── tokenizer_config.json +└── tokenizer.json +``` + +## Chat + +- XTuner LLaVA format [docs](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1#quickstart) +- Official LLaVA format [docs](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-hf#quickstart) +- HuggingFace LLaVA format [docs](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-transformers#quickstart) +- GGUF format [docs](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-gguf#quickstart) + +## Deployment + +[LMDeploy](https://github.com/InternLM/lmdeploy) now supports the deployment of official LLaVA format models (e.g.,[xtuner/llava-llama-3-8b-v1_1-hf](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-hf)). For specifics, please refer to [here](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-hf#chat-by-lmdeploy). diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..17c5eb2ef3c52baef81dcd845738663c2a38c18d --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py @@ -0,0 +1,143 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from https://github.com/huggingface/transformers/blob/v4.40.1/src/transformers/models/llava/convert_llava_weights_to_hf.py # noqa: E501 +import argparse + +import torch +from safetensors import safe_open +from transformers import (AddedToken, AutoConfig, AutoModelForCausalLM, + CLIPImageProcessor, CLIPVisionModel, + LlamaTokenizerFast, LlavaConfig, + LlavaForConditionalGeneration, LlavaProcessor) + +KEYS_TO_MODIFY_MAPPING_LLM = { + 'model': 'language_model.model', + 'lm_head': 'language_model.lm_head', +} +KEYS_TO_MODIFY_MAPPING_VIT = { + 'vision_model': 'vision_tower.vision_model', +} +KEYS_TO_MODIFY_MAPPING_PROJECTOR = { + 'model.0': 'multi_modal_projector.linear_1', + 'model.2': 'multi_modal_projector.linear_2', +} + + +def convert_state_dict_to_hf(state_dict, mapping): + new_state_dict = {} + for key, value in state_dict.items(): + if key.endswith('.inv_freq'): + continue + for key_to_modify, new_key in mapping.items(): + if key_to_modify in key: + key = key.replace(key_to_modify, new_key) + + new_state_dict[key] = value + return new_state_dict + + +def convert_to_hf(text_model_id, vision_model_id, projector_weight, save_path): + torch.set_default_dtype(torch.float16) + text_config = AutoConfig.from_pretrained( + text_model_id, trust_remote_code=True) + vision_config = AutoConfig.from_pretrained(vision_model_id) + if hasattr(vision_config, 'vision_config'): + vision_config = vision_config.vision_config + + tokenizer = LlamaTokenizerFast.from_pretrained(text_model_id) + tokenizer.add_tokens( + AddedToken('', special=True, normalized=False), + special_tokens=True) + tokenizer.add_special_tokens({'pad_token': ''}) + + image_processor = CLIPImageProcessor.from_pretrained(vision_model_id) + + processor = LlavaProcessor( + tokenizer=tokenizer, image_processor=image_processor) + + config = LlavaConfig( + text_config=text_config, + vision_config=vision_config, + attn_implementation='eager') + + with torch.device('meta'): + model = LlavaForConditionalGeneration(config) + + # Pad to 64 for performance reasons + pad_shape = 64 + + projector_state_dict = {} + with safe_open(projector_weight, framework='pt', device='cpu') as f: + for key in f.keys(): + projector_state_dict[key] = f.get_tensor(key) + + ori_llm = AutoModelForCausalLM.from_pretrained( + text_model_id, trust_remote_code=True) + ori_vit = CLIPVisionModel.from_pretrained(vision_model_id) + + llm_state_dict = ori_llm.state_dict() + vit_state_dict = ori_vit.state_dict() + + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, KEYS_TO_MODIFY_MAPPING_PROJECTOR) + llm_state_dict = convert_state_dict_to_hf(llm_state_dict, + KEYS_TO_MODIFY_MAPPING_LLM) + vit_state_dict = convert_state_dict_to_hf(vit_state_dict, + KEYS_TO_MODIFY_MAPPING_VIT) + state_dict = {**projector_state_dict, **llm_state_dict, **vit_state_dict} + model.load_state_dict(state_dict, strict=True, assign=True) + + pre_expansion_embeddings = \ + model.language_model.model.embed_tokens.weight.data + mu = torch.mean(pre_expansion_embeddings, dim=0).float() + n = pre_expansion_embeddings.size()[0] + sigma = ((pre_expansion_embeddings - mu).T + @ (pre_expansion_embeddings - mu)) / n + dist = torch.distributions.multivariate_normal.MultivariateNormal( + mu, covariance_matrix=1e-5 * sigma) + + # We add an image token so we resize the model + ori_vocab_size = config.text_config.vocab_size + tokenizer_vocab_size = tokenizer.encode('')[-1] + added_token = tokenizer_vocab_size - ori_vocab_size + + if added_token > 0: + model.resize_token_embeddings(ori_vocab_size + added_token, pad_shape) + model.language_model.model.embed_tokens.weight.data[ + ori_vocab_size:] = torch.stack( + tuple(dist.sample() + for _ in range(model.language_model.model.embed_tokens. + weight.data[ori_vocab_size:].shape[0])), + dim=0, + ) + model.language_model.lm_head.weight.data[ + ori_vocab_size:] = torch.stack( + tuple(dist.sample() + for _ in range(model.language_model.lm_head.weight. + data[ori_vocab_size:].shape[0])), + dim=0, + ) + + model.config.image_token_index = tokenizer.encode('')[-1] + model.config.pad_token_id = tokenizer.encode('')[-1] + + if ori_vit.__class__.__name__ == 'SiglipVisionModel': + model.config.vision_feature_select_strategy = 'full' + + model.save_pretrained(save_path) + processor.save_pretrained(save_path) + print(f'Saved to {save_path}') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--text_model_id') + parser.add_argument('--vision_model_id') + parser.add_argument('--projector_weight') + parser.add_argument('--save_path') + args = parser.parse_args() + convert_to_hf(args.text_model_id, args.vision_model_id, + args.projector_weight, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py new file mode 100644 index 0000000000000000000000000000000000000000..8a1df62330ca8367cf031afef72af5f0ae84e6ab --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py @@ -0,0 +1,106 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import torch + +try: + from llava.model import LlavaConfig, LlavaLlamaForCausalLM + from llava.utils import disable_torch_init +except ImportError: + raise ImportError( + 'Please install llava with ' + '`pip install git+https://github.com/haotian-liu/LLaVA.git ' + '--no-deps`.') +from safetensors import safe_open +from transformers import (AutoConfig, AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +KEYS_TO_MODIFY_MAPPING_VIT = { + 'vision_model': 'model.vision_tower.vision_tower.vision_model', +} +KEYS_TO_MODIFY_MAPPING_PROJECTOR = { + 'model.0': 'model.mm_projector.0', + 'model.2': 'model.mm_projector.2', +} + + +def convert_state_dict_to_hf(state_dict, mapping): + new_state_dict = {} + for key, value in state_dict.items(): + if key.endswith('.inv_freq'): + continue + for key_to_modify, new_key in mapping.items(): + if key_to_modify in key: + key = key.replace(key_to_modify, new_key) + new_state_dict[key] = value + return new_state_dict + + +def convert_to_llava(text_model_id, vision_model_id, projector_weight, + save_path): + disable_torch_init() + torch.set_default_dtype(torch.float16) + + projector_state_dict = {} + with safe_open(projector_weight, framework='pt', device='cpu') as f: + for key in f.keys(): + projector_state_dict[key] = f.get_tensor(key) + + ori_llm = AutoModelForCausalLM.from_pretrained( + text_model_id, trust_remote_code=True, device_map='auto') + ori_vit = CLIPVisionModel.from_pretrained(vision_model_id) + llm_state_dict = ori_llm.state_dict() + vit_state_dict = ori_vit.state_dict() + + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, KEYS_TO_MODIFY_MAPPING_PROJECTOR) + vit_state_dict = convert_state_dict_to_hf(vit_state_dict, + KEYS_TO_MODIFY_MAPPING_VIT) + state_dict = {**projector_state_dict, **llm_state_dict, **vit_state_dict} + + tokenizer = AutoTokenizer.from_pretrained(text_model_id) + text_config = AutoConfig.from_pretrained( + text_model_id, trust_remote_code=True) + + ori_config = text_config.__dict__.copy() + ori_config.update( + dict( + image_aspect_ratio='pad', + mm_hidden_size=ori_vit.config.hidden_size, + mm_projector_type='mlp2x_gelu', + mm_use_im_patch_token=False, + mm_use_im_start_end=False, + mm_vision_select_feature='patch', + mm_vision_select_layer=-2, + mm_vision_tower=vision_model_id, + unfreeze_mm_vision_tower=True, + model_type='llava', + use_cache=True, + use_mm_proj=True)) + config = LlavaConfig(**ori_config) + + with torch.device('meta'): + model = LlavaLlamaForCausalLM(config) + + image_processor = CLIPImageProcessor.from_pretrained(vision_model_id) + + model.load_state_dict(state_dict, strict=True, assign=True) + model.save_pretrained(save_path, max_shard_size='2GB') + image_processor.save_pretrained(save_path) + tokenizer.save_pretrained(save_path) + print(f'Saved to {save_path}') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--text_model_id') + parser.add_argument('--vision_model_id') + parser.add_argument('--projector_weight') + parser.add_argument('--save_path') + args = parser.parse_args() + convert_to_llava(args.text_model_id, args.vision_model_id, + args.projector_weight, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..6db8ed31b124eeae665b3ae4403569f4d536a697 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py @@ -0,0 +1,205 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 1000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..e35984b5e83ead1e48203669ecc8634e38e5d6b0 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,208 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 1000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..98cddc939a4f7ff787437069f39c0d3c38184dbb --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune.py @@ -0,0 +1,337 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import ConcatDataset, LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain/iter_9742.pth' # noqa: E501 +# Data +data_root = './data/internvl_sft/' + +sharegpt4v_caption_data_path = data_root + 'sharegpt4v_instruct_gpt4-vision_cap100k.jsonl' # noqa: E501 +sharegpt4v_caption_image_folder = data_root + 'data' + +llava_data_path = data_root + 'llava_instruct_150k_zh.jsonl' +llava_image_folder = data_root + 'data/coco' + +sharegpt4v_data_path = data_root + 'sharegpt4v_mix665k_cap23k_coco-ap9k_lcs3k_sam9k_div2k.jsonl' # noqa: E501 +sharegpt4v_image_folder = data_root + 'data' + +dvqa_data_path = data_root + 'dvqa_train_200k.jsonl' +dvqa_image_folder = data_root + 'data/dvqa' + +chartqa_data_path = data_root + 'chartqa_train_18k.jsonl' +chartqa_image_folder = data_root + 'data/chartqa' + +ai2d_data_path = data_root + 'ai2d_train_12k.jsonl' +ai2d_image_folder = data_root + 'data/ai2d' + +docvqa_data_path = data_root + 'docvqa_train_10k.jsonl' +docvqa_image_folder = data_root + 'data/docvqa' + +geoqa_data_path = data_root + 'geoqa+.jsonl' +geoqa_image_folder = data_root + 'data/geoqa+' + +synthdog_data_path = data_root + 'synthdog_en.jsonl' +synthdog_image_folder = data_root + 'data/synthdog-en' + +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(4096 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 4 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 5000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sharegpt4v_caption_dataset = dict( + type=LLaVADataset, + data_path=sharegpt4v_caption_data_path, + image_folder=sharegpt4v_caption_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +llava_dataset = dict( + type=LLaVADataset, + data_path=llava_data_path, + image_folder=llava_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +sharegpt4v_dataset = dict( + type=LLaVADataset, + data_path=sharegpt4v_data_path, + image_folder=sharegpt4v_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +dvqa_dataset = dict( + type=LLaVADataset, + data_path=dvqa_data_path, + image_folder=dvqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +chartqa_dataset = dict( + type=LLaVADataset, + data_path=chartqa_data_path, + image_folder=chartqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +ai2d_dataset = dict( + type=LLaVADataset, + data_path=ai2d_data_path, + image_folder=ai2d_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +docvqa_dataset = dict( + type=LLaVADataset, + data_path=docvqa_data_path, + image_folder=docvqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +geoqa_dataset = dict( + type=LLaVADataset, + data_path=geoqa_data_path, + image_folder=geoqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +synthdog_dataset = dict( + type=LLaVADataset, + data_path=synthdog_data_path, + image_folder=synthdog_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataset = dict( + type=ConcatDataset, + datasets=[ + sharegpt4v_caption_dataset, llava_dataset, sharegpt4v_dataset, + dvqa_dataset, chartqa_dataset, ai2d_dataset, docvqa_dataset, + geoqa_dataset, synthdog_dataset + ]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=train_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_qlora_clip_vit_large_p14_336_e1_gpu1_finetune.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_qlora_clip_vit_large_p14_336_e1_gpu1_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..99d20900534316417b5102c4b40b86cfcb8e2e97 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_qlora_clip_vit_large_p14_336_e1_gpu1_finetune.py @@ -0,0 +1,224 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain/558128.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 128 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 50000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..342348370ce0ca95f3d3f2543a0af45621acff8a --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..6e2e324318fb1db54bc0d7bd547b4c3bb256ea3f --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/sharegpt4v/' +data_path = data_root + 'share-captioner_coco_lcs_sam_1246k_1107.json' +image_folder = data_root + 'data' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(4096 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 1000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain.py b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..98a4813e257f1d774432dc8bb506de97093e06d4 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_quant_clip_vit_large_p14_336_e1_gpu1_pretrain.py @@ -0,0 +1,210 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 256 +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 50000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..183b73a9e0d94344bc67cb987378fb92eb719b25 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune.py @@ -0,0 +1,205 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-13b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_v15_13b_pretrain/iter_2181.pth' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune_lora.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune_lora.py new file mode 100644 index 0000000000000000000000000000000000000000..2384bbf716902531a6e1aba04aa39a014517393d --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_finetune_lora.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-13b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_v15_13b_pretrain/iter_2181.pth' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_pretrain.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..358f09934c754082048d9721e6fa4788ee6925cf --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_13b/llava_v15_13b_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-13b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..7bef64a4e9d2aa2edf0ab97d8887d1b5b38e9e47 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune.py @@ -0,0 +1,205 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_v15_7b_pretrain/iter_2181.pth' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune_lora.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune_lora.py new file mode 100644 index 0000000000000000000000000000000000000000..b17974f5d78ce13f007a379d14c57e73816e9cad --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_finetune_lora.py @@ -0,0 +1,213 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_v15_7b_pretrain/iter_2181.pth' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + llm_lora=dict( + type=LoraConfig, + r=128, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_pretrain.py b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..a30457cf81ebbd77a0da892836c2ccb461160966 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/official/llava_v15_7b/llava_v15_7b_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/README.md b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/README.md new file mode 100644 index 0000000000000000000000000000000000000000..00c39b26ccbeb8ac47407df6c4f24eaa118dd087 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/README.md @@ -0,0 +1,179 @@ +# LLaVA-Phi-3-mini + +## Results + +
+Image +
+ +| Model | MMBench Test (EN) | MMMU Val | SEED-IMG | AI2D Test | ScienceQA Test | HallusionBench aAcc | POPE | GQA | TextVQA | MME | MMStar | Configs | +| :-------------------- | :---------------: | :-------: | :------: | :-------: | :------------: | :-----------------: | :--: | :--: | :-----: | :------: | :----: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| LLaVA-v1.5-7B | 66.5 | 35.3 | 60.5 | 54.8 | 70.4 | 44.9 | 85.9 | 62.0 | 58.2 | 1511/348 | 30.3 | - | +| LLaVA-Llama-3-8B | 68.9 | 36.8 | 69.8 | 60.9 | 73.3 | 47.3 | 87.2 | 63.5 | 58.0 | 1506/295 | 38.2 | [Pretrain](https://github.com/InternLM/xtuner/blob/main/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py) / [Fine-tune](https://github.com/InternLM/xtuner/blob/main/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py) | +| LLaVA-Llama-3-8B-v1.1 | 72.3 | 37.1 | 70.1 | 70.0 | 72.9 | 47.7 | 86.4 | 62.6 | 59.0 | 1469/349 | 45.1 | [Pretrain](https://github.com/InternLM/xtuner/blob/main/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/pretrain/llava_llama3_8b_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py) / [Fine-tune](https://github.com/InternLM/xtuner/blob/main/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336/finetune/llava_llama3_8b_instruct_full_clip_vit_large_p14_336_lora_e1_gpu8_internvl_finetune.py) | +| **LLaVA-Phi-3-mini** | 69.2 | 41.4 | 70.0 | 69.3 | 73.7 | 49.8 | 87.3 | 61.5 | 57.8 | 1477/313 | 43.7 | [Pretrain](./pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py) / [Fine-tune](./finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune.py) | + +## Resources + +- Official LLaVA format model (`xtuner/llava-phi-3-mini`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-phi-3-mini) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-phi-3-mini) +- HuggingFace LLaVA format model (`xtuner/llava-phi-3-mini-hf`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-phi-3-mini-hf) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-phi-3-mini-hf) +- XTuner LLaVA format model (`xtuner/llava-phi-3-mini-xtuner`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-phi-3-mini-xtuner) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-phi-3-mini-xtuner) +- GGUF model (`xtuner/llava-phi-3-mini-gguf`): 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-phi-3-mini-gguf) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-phi-3-mini-gguf) +- Pretrained projector weights: 🤗 [HuggingFace](https://huggingface.co/xtuner/llava-phi-3-mini-pretrain) / 🤖 [ModelScope](https://modelscope.cn/models/xtuner/llava-phi-3-mini-pretrain) + +## Data Preparation + +Please refer to [here](https://github.com/InternLM/xtuner/tree/main/xtuner/configs/llava/llama3_8b_instruct_clip_vit_large_p14_336#data-preparation). + +## Training + +### LLaVA-Phi-3-mini + +1. Pretrain + +```bash +NPROC_PER_NODE=8 xtuner train llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain --deepspeed deepspeed_zero2 --seed 1024 +``` + +2. Fine-tune + +```bash +NPROC_PER_NODE=8 xtuner train llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune --deepspeed deepspeed_zero2 --seed 1024 +``` + +## Model Conversion + +### Step 0. Convert `.pth` file to LLaVA model in xtuner format ([LLaVA-Phi-3-mini-xtuner](https://huggingface.co/xtuner/llava-phi-3-mini-xtuner)) + +After training, we will obtain a set of weights (*i.e.*, `iter_xxx.pth`), which are not in the universal HuggingFace format. We first need to convert them to the LLaVA model in xtuner format. + +```bash +xtuner convert pth_to_hf $FINETUNE_CFG $PTH_PATH $SAVE_PATH +# e.g., xtuner convert pth_to_hf llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune ./iter_39620.pth ./iter_39620_xtuner +``` + +``` +./iter_39620_xtuner +├── added_tokens.json +├── config.json +├── model-00001-of-00004.safetensors +├── model-00002-of-00004.safetensors +├── model-00003-of-00004.safetensors +├── model-00004-of-00004.safetensors +├── model.safetensors.index.json +├── projector +│   ├── config.json +│   ├── configuration_projector.py +│   ├── modeling_projector.py +│   └── model.safetensors +├── special_tokens_map.json +├── tokenizer_config.json +├── tokenizer.json +├── tokenizer.model +└── visual_encoder +    ├── config.json +    ├── model.safetensors +    └── preprocessor_config.json +``` + +At this time, the LLaVA model of xtuner-format can engage in conversation using xtuner chat, by + +```bash +xtuner chat ./iter_39620_xtuner \ + --llava ./iter_39620_xtuner \ + --prompt-template phi3_chat \ + --image $IMAGE_PATH +``` + +and in MMBench evaluation, by + +```bash +xtuner mmbench ./iter_39620_xtuner \ + --llava ./iter_39620_xtuner \ + --prompt-template phi3_chat \ + --data-path $DATA_PATH \ + --work-dir $RESULT_PATH +``` + +Here, `$DATA_PATH` refers to one of the mmbench datasets. You can download the expected data by + +```bash +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_EN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_DEV_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/MMBench_TEST_CN.tsv +wget https://opencompass.openxlab.space/utils/VLMEval/CCBench.tsv +``` + +### Step 1. Convert LLaVA in xtuner format to official LLaVA format or HuggingFace LLaVA format + +- The official LLaVA format is structured similarly to the architecture of the [liuhaotian/llava-v1.5-7b](https://huggingface.co/liuhaotian/llava-v1.5-7b) model. +- The HuggingFace LLaVA format is structured similarly to the architecture of the [llava-hf/llava-1.5-7b-hf](https://huggingface.co/llava-hf/llava-1.5-7b-hf) model. + +Since the official LLaVA format and the HuggingFace LLaVA format only support Llama architecture as the LLM, we need to first convert the phi-3 model to an equivalent Llama LLM. + +```bash +python ./convert_phi_to_llama.py --phi_path ./iter_39620_xtuner --save_path ./iter_39620_xtuner_llama_llm +``` + +Here, `--phi_path` should specify the path to phi-3, which is the path obtained from Step.0 for the xtuner-format LLaVA model. `--save_path` should specify the save path for the converted Llama LLM. + +#### To official LLaVA format ([LLaVA-Phi-3-mini](https://huggingface.co/xtuner/llava-phi-3-mini)) + +We can utilize the following command to obtain the LLaVA model in the official LLaVA format. + +```bash +python ./convert_xtuner_weights_to_llava.py --text_model_id ./iter_39620_xtuner_llama_llm --vision_model_id ./iter_39620_xtuner/visual_encoder --projector_weight ./iter_39620_xtuner/projector/model.safetensors --save_path ./iter_39620_llava +``` + +Here, the converted LLaVA model in official LLaVA format is saved to `./iter_39620_llava`. + +``` +./iter_39620_llava +├── added_tokens.json +├── config.json +├── generation_config.json +├── model-00001-of-00005.safetensors +├── model-00002-of-00005.safetensors +├── model-00003-of-00005.safetensors +├── model-00004-of-00005.safetensors +├── model-00005-of-00005.safetensors +├── model.safetensors.index.json +├── preprocessor_config.json +├── special_tokens_map.json +├── tokenizer_config.json +├── tokenizer.json +└── tokenizer.model +``` + +#### To HuggingFace LLaVA format ([LLaVA-Phi-3-mini-hf](https://huggingface.co/xtuner/llava-phi-3-mini-hf)) + +We can utilize the following command to obtain the LLaVA model in the HuggingFace LLaVA format. + +```bash +python ./convert_xtuner_weights_to_hf.py --text_model_id ./iter_39620_xtuner_llama_llm --vision_model_id ./iter_39620_xtuner/visual_encoder --projector_weight ./iter_39620_xtuner/projector/model.safetensors --save_path ./iter_39620_hf +``` + +Here, the converted LLaVA model in HuggingFace LLaVA format is saved to `./iter_39620_hf`. + +``` +./iter_39620_hf +├── added_tokens.json +├── config.json +├── generation_config.json +├── model-00001-of-00002.safetensors +├── model-00002-of-00002.safetensors +├── model.safetensors.index.json +├── preprocessor_config.json +├── special_tokens_map.json +├── tokenizer_config.json +├── tokenizer.json +└── tokenizer.model +``` + +## Chat + +- XTuner LLaVA format [docs](https://huggingface.co/xtuner/llava-phi-3-mini-xtuner#quickstart) +- Official LLaVA format [docs](https://huggingface.co/xtuner/llava-phi-3-mini#quickstart) +- HuggingFace LLaVA format [docs](https://huggingface.co/xtuner/llava-phi-3-mini-hf#quickstart) +- GGUF format [docs](https://huggingface.co/xtuner/llava-phi-3-mini-gguf#quickstart) diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_phi_to_llama.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_phi_to_llama.py new file mode 100644 index 0000000000000000000000000000000000000000..fea4a58f92268fc7a0243546dce15dd2ea176e16 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_phi_to_llama.py @@ -0,0 +1,100 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json +import os + +from mmengine.utils import mkdir_or_exist +from safetensors import safe_open +from safetensors.torch import save_file +from tqdm import tqdm +from transformers import AutoTokenizer + + +def convert_phi_to_llama(phi_path, save_path): + files = [f for f in os.listdir(phi_path) if f.endswith('safetensors')] + mkdir_or_exist(save_path) + + index_json = os.path.join(phi_path, 'model.safetensors.index.json') + config_json = os.path.join(phi_path, 'config.json') + + with open(index_json) as f: + index = json.load(f) + + with open(config_json) as f: + config = json.load(f) + + config.pop('_name_or_path') + if 'auto_map' in config: + config.pop('auto_map') + config.pop('embd_pdrop') + config.pop('resid_pdrop') + config['architectures'] = ['LlamaForCausalLM'] + config['model_type'] = 'llama' + + for file in tqdm(files, desc='Convert'): + tensors = {} + new_path = os.path.join(save_path, file) + old_path = os.path.join(phi_path, file) + with safe_open(old_path, framework='pt', device='cpu') as f: + for key in f.keys(): + + if 'qkv_proj' in key: + qkv = f.get_tensor(key) + + q, k, v = qkv.chunk(3, dim=0) + q_name = key.replace('qkv_proj', 'q_proj') + k_name = key.replace('qkv_proj', 'k_proj') + v_name = key.replace('qkv_proj', 'v_proj') + + tensors[q_name] = q + tensors[k_name] = k + tensors[v_name] = v + + index['weight_map'].pop(key) + + filename = os.path.basename(new_path) + index['weight_map'][q_name] = filename + index['weight_map'][k_name] = filename + index['weight_map'][v_name] = filename + + elif 'gate_up_proj' in key: + gate_up_proj = f.get_tensor(key) + gate_proj, up_proj = gate_up_proj.chunk(2, dim=0) + + gate_name = key.replace('gate_up_proj', 'gate_proj') + up_name = key.replace('gate_up_proj', 'up_proj') + tensors[gate_name] = gate_proj + tensors[up_name] = up_proj + + index['weight_map'].pop(key) + filename = os.path.basename(new_path) + index['weight_map'][gate_name] = filename + index['weight_map'][up_name] = filename + else: + tensors[key] = f.get_tensor(key) + metadata = f.metadata() + save_file(tensors, new_path, metadata=metadata) + + new_config_json = os.path.join(save_path, 'config.json') + with open(new_config_json, 'w') as f: + json.dump(config, f, indent=2) + + new_index_json = os.path.join(save_path, 'model.safetensors.index.json') + with open(new_index_json, 'w') as f: + json.dump(index, f, indent=2) + + tokenizer = AutoTokenizer.from_pretrained(phi_path, trust_remote_code=True) + tokenizer.save_pretrained(save_path) + print(f'Saved to {save_path}') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--phi_path') + parser.add_argument('--save_path') + args = parser.parse_args() + convert_phi_to_llama(args.phi_path, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..e14ca29cd378e62e803dadca6429f18903d32e8c --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_hf.py @@ -0,0 +1,140 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from https://github.com/huggingface/transformers/blob/v4.40.1/src/transformers/models/llava/convert_llava_weights_to_hf.py # noqa: E501 +import argparse + +import torch +from safetensors import safe_open +from transformers import (AddedToken, AutoConfig, AutoModel, + AutoModelForCausalLM, CLIPImageProcessor, + LlamaTokenizerFast, LlavaConfig, + LlavaForConditionalGeneration, LlavaProcessor) + +KEYS_TO_MODIFY_MAPPING_LLM = { + 'model': 'language_model.model', + 'lm_head': 'language_model.lm_head', +} +KEYS_TO_MODIFY_MAPPING_VIT = { + 'vision_model': 'vision_tower.vision_model', +} +KEYS_TO_MODIFY_MAPPING_PROJECTOR = { + 'model.0': 'multi_modal_projector.linear_1', + 'model.2': 'multi_modal_projector.linear_2', +} + + +def convert_state_dict_to_hf(state_dict, mapping): + new_state_dict = {} + for key, value in state_dict.items(): + if key.endswith('.inv_freq'): + continue + for key_to_modify, new_key in mapping.items(): + if key_to_modify in key: + key = key.replace(key_to_modify, new_key) + + new_state_dict[key] = value + return new_state_dict + + +def convert_to_hf(text_model_id, vision_model_id, projector_weight, save_path): + torch.set_default_dtype(torch.float16) + text_config = AutoConfig.from_pretrained( + text_model_id, trust_remote_code=True) + vision_config = AutoConfig.from_pretrained(vision_model_id) + + tokenizer = LlamaTokenizerFast.from_pretrained(text_model_id) + tokenizer.add_tokens( + AddedToken('', special=True, normalized=False), + special_tokens=True) + tokenizer.add_special_tokens({'pad_token': ''}) + + image_processor = CLIPImageProcessor.from_pretrained(vision_model_id) + + processor = LlavaProcessor( + tokenizer=tokenizer, image_processor=image_processor) + + config = LlavaConfig( + text_config=text_config, + vision_config=vision_config, + attn_implementation='eager') + + with torch.device('meta'): + model = LlavaForConditionalGeneration(config) + + # Pad to 64 for performance reasons + pad_shape = 64 + + projector_state_dict = {} + with safe_open(projector_weight, framework='pt', device='cpu') as f: + for key in f.keys(): + projector_state_dict[key] = f.get_tensor(key) + + ori_llm = AutoModelForCausalLM.from_pretrained( + text_model_id, trust_remote_code=True) + ori_vit = AutoModel.from_pretrained(vision_model_id) + llm_state_dict = ori_llm.state_dict() + vit_state_dict = ori_vit.state_dict() + + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, KEYS_TO_MODIFY_MAPPING_PROJECTOR) + llm_state_dict = convert_state_dict_to_hf(llm_state_dict, + KEYS_TO_MODIFY_MAPPING_LLM) + vit_state_dict = convert_state_dict_to_hf(vit_state_dict, + KEYS_TO_MODIFY_MAPPING_VIT) + state_dict = {**projector_state_dict, **llm_state_dict, **vit_state_dict} + model.load_state_dict(state_dict, strict=True, assign=True) + + pre_expansion_embeddings = \ + model.language_model.model.embed_tokens.weight.data + mu = torch.mean(pre_expansion_embeddings, dim=0).float() + n = pre_expansion_embeddings.size()[0] + sigma = ((pre_expansion_embeddings - mu).T + @ (pre_expansion_embeddings - mu)) / n + dist = torch.distributions.multivariate_normal.MultivariateNormal( + mu, covariance_matrix=1e-5 * sigma) + + # We add an image token so we resize the model + ori_vocab_size = config.text_config.vocab_size + tokenizer_vocab_size = tokenizer.encode('')[-1] + added_token = tokenizer_vocab_size - ori_vocab_size + + if added_token > 0: + model.resize_token_embeddings(ori_vocab_size + added_token, pad_shape) + model.language_model.model.embed_tokens.weight.data[ + ori_vocab_size:] = torch.stack( + tuple(dist.sample() + for _ in range(model.language_model.model.embed_tokens. + weight.data[ori_vocab_size:].shape[0])), + dim=0, + ) + model.language_model.lm_head.weight.data[ + ori_vocab_size:] = torch.stack( + tuple(dist.sample() + for _ in range(model.language_model.lm_head.weight. + data[ori_vocab_size:].shape[0])), + dim=0, + ) + + model.config.image_token_index = tokenizer.encode('')[-1] + model.config.pad_token_id = tokenizer.encode('')[-1] + + if ori_vit.__class__.__name__ == 'SiglipVisionModel': + model.config.vision_feature_select_strategy = 'full' + + model.save_pretrained(save_path) + processor.save_pretrained(save_path) + print(f'Saved to {save_path}') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--text_model_id') + parser.add_argument('--vision_model_id') + parser.add_argument('--projector_weight') + parser.add_argument('--save_path') + args = parser.parse_args() + convert_to_hf(args.text_model_id, args.vision_model_id, + args.projector_weight, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py new file mode 100644 index 0000000000000000000000000000000000000000..8a1df62330ca8367cf031afef72af5f0ae84e6ab --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/convert_xtuner_weights_to_llava.py @@ -0,0 +1,106 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import torch + +try: + from llava.model import LlavaConfig, LlavaLlamaForCausalLM + from llava.utils import disable_torch_init +except ImportError: + raise ImportError( + 'Please install llava with ' + '`pip install git+https://github.com/haotian-liu/LLaVA.git ' + '--no-deps`.') +from safetensors import safe_open +from transformers import (AutoConfig, AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +KEYS_TO_MODIFY_MAPPING_VIT = { + 'vision_model': 'model.vision_tower.vision_tower.vision_model', +} +KEYS_TO_MODIFY_MAPPING_PROJECTOR = { + 'model.0': 'model.mm_projector.0', + 'model.2': 'model.mm_projector.2', +} + + +def convert_state_dict_to_hf(state_dict, mapping): + new_state_dict = {} + for key, value in state_dict.items(): + if key.endswith('.inv_freq'): + continue + for key_to_modify, new_key in mapping.items(): + if key_to_modify in key: + key = key.replace(key_to_modify, new_key) + new_state_dict[key] = value + return new_state_dict + + +def convert_to_llava(text_model_id, vision_model_id, projector_weight, + save_path): + disable_torch_init() + torch.set_default_dtype(torch.float16) + + projector_state_dict = {} + with safe_open(projector_weight, framework='pt', device='cpu') as f: + for key in f.keys(): + projector_state_dict[key] = f.get_tensor(key) + + ori_llm = AutoModelForCausalLM.from_pretrained( + text_model_id, trust_remote_code=True, device_map='auto') + ori_vit = CLIPVisionModel.from_pretrained(vision_model_id) + llm_state_dict = ori_llm.state_dict() + vit_state_dict = ori_vit.state_dict() + + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, KEYS_TO_MODIFY_MAPPING_PROJECTOR) + vit_state_dict = convert_state_dict_to_hf(vit_state_dict, + KEYS_TO_MODIFY_MAPPING_VIT) + state_dict = {**projector_state_dict, **llm_state_dict, **vit_state_dict} + + tokenizer = AutoTokenizer.from_pretrained(text_model_id) + text_config = AutoConfig.from_pretrained( + text_model_id, trust_remote_code=True) + + ori_config = text_config.__dict__.copy() + ori_config.update( + dict( + image_aspect_ratio='pad', + mm_hidden_size=ori_vit.config.hidden_size, + mm_projector_type='mlp2x_gelu', + mm_use_im_patch_token=False, + mm_use_im_start_end=False, + mm_vision_select_feature='patch', + mm_vision_select_layer=-2, + mm_vision_tower=vision_model_id, + unfreeze_mm_vision_tower=True, + model_type='llava', + use_cache=True, + use_mm_proj=True)) + config = LlavaConfig(**ori_config) + + with torch.device('meta'): + model = LlavaLlamaForCausalLM(config) + + image_processor = CLIPImageProcessor.from_pretrained(vision_model_id) + + model.load_state_dict(state_dict, strict=True, assign=True) + model.save_pretrained(save_path, max_shard_size='2GB') + image_processor.save_pretrained(save_path) + tokenizer.save_pretrained(save_path) + print(f'Saved to {save_path}') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--text_model_id') + parser.add_argument('--vision_model_id') + parser.add_argument('--projector_weight') + parser.add_argument('--save_path') + args = parser.parse_args() + convert_to_llava(args.text_model_id, args.vision_model_id, + args.projector_weight, args.save_path) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..a1d3cbcd8b6ea964a9b44093531f1ebacab32458 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_e1_gpu8_finetune.py @@ -0,0 +1,205 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 1000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..7ba93bb2463ec01eb043e2634d70784990cc1b15 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/finetune/llava_phi3_mini_4k_instruct_full_clip_vit_large_p14_336_full_e2_gpu8_internvl_finetune.py @@ -0,0 +1,334 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import ConcatDataset, LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain/iter_9742.pth' # noqa: E501 +# Data +data_root = './data/internvl_sft/' + +sharegpt4v_caption_data_path = data_root + 'sharegpt4v_instruct_gpt4-vision_cap100k.jsonl' # noqa: E501 +sharegpt4v_caption_image_folder = data_root + 'data' + +llava_data_path = data_root + 'llava_instruct_150k_zh.jsonl' +llava_image_folder = data_root + 'data/coco' + +sharegpt4v_data_path = data_root + 'sharegpt4v_mix665k_cap23k_coco-ap9k_lcs3k_sam9k_div2k.jsonl' # noqa: E501 +sharegpt4v_image_folder = data_root + 'data' + +dvqa_data_path = data_root + 'dvqa_train_200k.jsonl' +dvqa_image_folder = data_root + 'data/dvqa' + +chartqa_data_path = data_root + 'chartqa_train_18k.jsonl' +chartqa_image_folder = data_root + 'data/chartqa' + +ai2d_data_path = data_root + 'ai2d_train_12k.jsonl' +ai2d_image_folder = data_root + 'data/ai2d' + +docvqa_data_path = data_root + 'docvqa_train_10k.jsonl' +docvqa_image_folder = data_root + 'data/docvqa' + +geoqa_data_path = data_root + 'geoqa+.jsonl' +geoqa_image_folder = data_root + 'data/geoqa+' + +synthdog_data_path = data_root + 'synthdog_en.jsonl' +synthdog_image_folder = data_root + 'data/synthdog-en' + +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = int(4096 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 2 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 5000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=False, + freeze_visual_encoder=False, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sharegpt4v_caption_dataset = dict( + type=LLaVADataset, + data_path=sharegpt4v_caption_data_path, + image_folder=sharegpt4v_caption_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +llava_dataset = dict( + type=LLaVADataset, + data_path=llava_data_path, + image_folder=llava_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +sharegpt4v_dataset = dict( + type=LLaVADataset, + data_path=sharegpt4v_data_path, + image_folder=sharegpt4v_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +dvqa_dataset = dict( + type=LLaVADataset, + data_path=dvqa_data_path, + image_folder=dvqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +chartqa_dataset = dict( + type=LLaVADataset, + data_path=chartqa_data_path, + image_folder=chartqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +ai2d_dataset = dict( + type=LLaVADataset, + data_path=ai2d_data_path, + image_folder=ai2d_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +docvqa_dataset = dict( + type=LLaVADataset, + data_path=docvqa_data_path, + image_folder=docvqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +geoqa_dataset = dict( + type=LLaVADataset, + data_path=geoqa_data_path, + image_folder=geoqa_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +synthdog_dataset = dict( + type=LLaVADataset, + data_path=synthdog_data_path, + image_folder=synthdog_image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataset = dict( + type=ConcatDataset, + datasets=[ + sharegpt4v_caption_dataset, llava_dataset, sharegpt4v_dataset, + dvqa_dataset, chartqa_dataset, ai2d_dataset, docvqa_dataset, + geoqa_dataset, synthdog_dataset + ]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=train_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..cdd4bb4842d806379fcf6645bd13c31033a9fdc6 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..e74b12097a101c07e456d74a2ae5f39c1a7495a4 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/phi3_mini_4k_instruct_clip_vit_large_p14_336/pretrain/llava_phi3_mini_4k_instruct_clip_vit_large_p14_336_e1_gpu8_sharegpt4v_pretrain.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/sharegpt4v/' +data_path = data_root + 'share-captioner_coco_lcs_sam_1246k_1107.json' +image_folder = data_root + 'data' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = int(4096 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 2 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 1000 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 1000 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_13b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_13b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..a82c42c5693ffa504a4c37f8eb1621c3e81ab8d7 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_13b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-13b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..d0620fe61aa5fb205e41d08f6c43670b0ca88997 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/vicuna_13b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_13b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-13b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..21d80a8cae066e3b0dac969aafdcd84fd593018b --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune_refcoco.py b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune_refcoco.py new file mode 100644 index 0000000000000000000000000000000000000000..c3fb0f8320748dccd8f573bb81d889933283b086 --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/finetune/llava_vicuna_7b_v15_qlora_clip_vit_large_p14_336_lora_e1_gpu8_finetune_refcoco.py @@ -0,0 +1,264 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import ConcatDataset, LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.refcoco_json import (InvRefCOCOJsonDataset, + RefCOCOJsonDataset) +from xtuner.dataset.samplers import LengthGroupedSampler +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' +# Specify the pretrained pth +pretrained_pth = './work_dirs/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain/iter_2181.pth' # noqa: E501 + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Instruct-150K/llava_v1_5_mix665k.json' +refcoco_path = data_root + 'RefCOCOJson/train.json' +image_folder = data_root + 'llava_images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 16 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + pretrained_pth=pretrained_pth, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + llm_lora=dict( + type=LoraConfig, + r=512, + lora_alpha=256, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM'), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path), + visual_encoder_lora=dict( + type=LoraConfig, r=64, lora_alpha=16, lora_dropout=0.05, bias='none')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### + +# The refcoco and inv_refcoco datasets have more than 30w items +# we limit their length for balance with the llava dataset. +refcoco_dataset = dict( + type=RefCOCOJsonDataset, + data_path=refcoco_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True, + max_dataset_length=70000, +) +inv_refcoco_dataset = dict( + type=InvRefCOCOJsonDataset, + data_path=refcoco_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True, + max_dataset_length=70000, +) +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=True, +) +train_dataset = dict( + type=ConcatDataset, + datasets=[refcoco_dataset, inv_refcoco_dataset, llava_dataset], +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=train_dataset, + sampler=dict( + type=LengthGroupedSampler, + length_property='modality_length', + per_device_batch_size=batch_size * accumulative_counts), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..46c6f4c9de376ba59895c2f519bceaf6454eeade --- /dev/null +++ b/data/xtuner/xtuner/configs/llava/vicuna_7b_v15_clip_vit_large_p14_336/pretrain/llava_vicuna_7b_v15_clip_vit_large_p14_336_e1_gpu8_pretrain.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel) + +from xtuner.dataset import LLaVADataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.engine.hooks import DatasetInfoHook, EvaluateChatHook +from xtuner.engine.runner import TrainLoop +from xtuner.model import LLaVAModel +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +llm_name_or_path = 'lmsys/vicuna-7b-v1.5' +visual_encoder_name_or_path = 'openai/clip-vit-large-patch14-336' + +# Data +data_root = './data/llava_data/' +data_path = data_root + 'LLaVA-Pretrain/blip_laion_cc_sbu_558k.json' +image_folder = data_root + 'LLaVA-Pretrain/images' +prompt_template = PROMPT_TEMPLATE.vicuna +max_length = int(2048 - (336 / 14)**2) + +# Scheduler & Optimizer +batch_size = 32 # per_device +accumulative_counts = 1 +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 1e-3 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_images = 'https://llava-vl.github.io/static/images/view.jpg' +evaluation_inputs = ['请描述一下这张照片', 'Please describe this picture'] + +####################################################################### +# PART 2 Model & Tokenizer & Image Processor # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + padding_side='right') + +image_processor = dict( + type=CLIPImageProcessor.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path, + trust_remote_code=True) + +model = dict( + type=LLaVAModel, + freeze_llm=True, + freeze_visual_encoder=True, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=llm_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + visual_encoder=dict( + type=CLIPVisionModel.from_pretrained, + pretrained_model_name_or_path=visual_encoder_name_or_path)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +llava_dataset = dict( + type=LLaVADataset, + data_path=data_path, + image_folder=image_folder, + tokenizer=tokenizer, + image_processor=image_processor, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_length=max_length, + pad_image_to_square=False) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + pin_memory=True, + dataset=llava_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + image_processor=image_processor, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + evaluation_images=evaluation_images, + system=SYSTEM, + prompt_template=prompt_template) +] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_dpo_qlora.py b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_dpo_qlora.py new file mode 100644 index 0000000000000000000000000000000000000000..b0fc4556a75a4edcc26493878132a9c0226cd625 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_dpo_qlora.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_full_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_full_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2c1e37ff342db406b44273a62411f464cf879093 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_full_alpaca_zh_e3.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_en_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_lora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_lora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e0ed461479bc1f89670bf1ea2d590b72d5e92d09 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_lora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +gradient_checkpointing = True +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + ), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0adc91aec38500540c16d1edc2762f7c53cfc9a2 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ca7816c0a4007a30b6467a388bb7bf1dcf0c60e9 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/1_2b/minicpm_1b_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-1B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +gradient_checkpointing = True +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_dpo_qlora.py b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_dpo_qlora.py new file mode 100644 index 0000000000000000000000000000000000000000..abf1e7ef9b55a8237e6122f240b23301af86492c --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_dpo_qlora.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_full_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_full_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c699ff876b82ef940df39a66eb7ce4b1cd750e80 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_full_alpaca_zh_e3.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_en_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_lora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_lora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a50fe91abc02c9fe4e92c7b3a4e13c86d619c34d --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_lora_alpaca_zh_e3.py @@ -0,0 +1,212 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +gradient_checkpointing = True +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + ), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2082e4c242b03a73ccca17130887fa37f99e2915 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..86d3564da362dc483e66a99ba3d644b16f4a8035 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/2b/minicpm_2b_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM-2B-sft-bf16' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 +gradient_checkpointing = True +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_dpo_qlora.py b/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_dpo_qlora.py new file mode 100644 index 0000000000000000000000000000000000000000..dcb3344db075547452143402f67fe29120b68395 --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_dpo_qlora.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.dpo import DPO +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM3-4B' +use_varlen_attn = False +dpo_loss_type = 'sigmoid' # One of ['sigmoid', 'hinge', 'ipo', 'kto_pair', 'sppo_hard', 'nca_pair', 'robust'] # noqa: E501 +loss_beta = 0.1 +label_smoothing = 0.0 + +# Data +prompt_template = PROMPT_TEMPLATE.minicpm +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_steps = 3 +optim_type = AdamW +lr = 5e-7 # refer to alignment handbook +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=DPO, + use_varlen_attn=use_varlen_attn, + loss_type=dpo_loss_type, + beta=loss_beta, + label_smoothing=label_smoothing, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_steps, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_steps, + end=max_steps, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_iters=max_steps) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_full_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_full_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1a9e249a66573adc18b639f097009896e82bae0e --- /dev/null +++ b/data/xtuner/xtuner/configs/minicpm/minicpm3_4b/minicpm3_4b_full_alpaca_zh_e3.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'openbmb/MiniCPM3-4B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.minicpm3 +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_steps = 10000 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_steps, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_steps, + end=max_steps, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_iters=max_steps) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/mistral/mistral_7b_full_finetune_custom_dataset_e1.py b/data/xtuner/xtuner/configs/mistral/mistral_7b_full_finetune_custom_dataset_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..72c7a50aa43e915c006ebfa683ed47f2ea2d6c47 --- /dev/null +++ b/data/xtuner/xtuner/configs/mistral/mistral_7b_full_finetune_custom_dataset_e1.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Data format: +[ + { + "conversation": [ + { + "system": "", + "input": "xxx", + "output": "xxx" + }, + { + "input": "xxx", + "output": "xxx" + } + ] + }, +... +] +Please refer to https://github.com/InternLM/xtuner/blob/main/docs/en/user_guides/dataset_format.md for details. +""" # noqa: E501 +import torch +from datasets import load_dataset +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +use_varlen_attn = True + +# Data +data_files = ['/path/to/json/file.json'] +prompt_template = PROMPT_TEMPLATE.mistral +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 # 1bs * 1acc * 64gpu = 64 batchsize +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + use_varlen_attn=use_varlen_attn, + dataset=dict(type=load_dataset, path='json', data_files=data_files), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', +) + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/mistral/mistral_7b_qlora_skypile_pretrain_e1.py b/data/xtuner/xtuner/configs/mistral/mistral_7b_qlora_skypile_pretrain_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..e1260fe5b8aee0197581754b538502c64e3c3e20 --- /dev/null +++ b/data/xtuner/xtuner/configs/mistral/mistral_7b_qlora_skypile_pretrain_e1.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import BitsAndBytesConfig, LlamaTokenizer, MistralForCausalLM + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import pretrain_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +use_varlen_attn = False + +# Data +data_path = 'Skywork/SkyPile-150B' +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +evaluation_inputs = ['上海的景点有'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=LlamaTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=MistralForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.05, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=pretrain_map_fn, + template_map_fn=None, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + max_new_tokens=100) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/mistral/mistral_7b_w_tokenized_dataset.py b/data/xtuner/xtuner/configs/mistral/mistral_7b_w_tokenized_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..660a023ccfe407be2c5f29d90e81fb163a73d618 --- /dev/null +++ b/data/xtuner/xtuner/configs/mistral/mistral_7b_w_tokenized_dataset.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_tokenized_dataset) +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mistral-7B-v0.1' +# 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板,new tokenizer 中已经 +# 添加了 Internlm2 对话模板中的特殊字符。 +# 请参考 docs/zh_cn/user_guides/finetune_custom_dataset.md +tokenizer_path = '/new/tokenizer/path' +use_varlen_attn = True + +# Data +dataset_folder = '/path/to/sft/data/folder' +# 已经使用 Internlm2 的对话模板覆盖了 Mistral 的原有模板 +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=tokenizer_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + tokenizer=tokenizer, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_tokenized_dataset, + data_order_path=None, + folder=dataset_folder, + min_length=0, + file_type='.bin'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/mistral/mistral_7b_w_untokenized_dataset.py b/data/xtuner/xtuner/configs/mistral/mistral_7b_w_untokenized_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..e1bbe93043ef19da1a3114e64a96780f60af4c81 --- /dev/null +++ b/data/xtuner/xtuner/configs/mistral/mistral_7b_w_untokenized_dataset.py @@ -0,0 +1,208 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR +from torch.optim import AdamW +from torch.utils.data import BatchSampler +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.intern_repo import (build_packed_dataset, + load_intern_repo_untokenized_dataset) +from xtuner.dataset.map_fns import template_map_fn_factory +from xtuner.dataset.samplers import InternRepoSampler +from xtuner.engine import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '/mnt/petrelfs/share_data/basemodel/checkpoints/llm/hf_hub/models--mistralai--Mistral-7B-v0.1/snapshots/5e9c98b96d071dce59368012254c55b0ec6f8658' # noqa: E501 +use_varlen_attn = True + +# Data +dataset_folder = '/mnt/petrelfs/share_data/caoweihan/v1_sample_with_legal_cate' +prompt_template = PROMPT_TEMPLATE.mistral +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +# batch size per device, set to 1 if `use_varlen_attn` = True +# To clarify, enlarging the batch size essentially enlarges the `max_length`. +# For example, doubling the max length is tantamount to doubling the batch size +batch_size = 1 +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 4e-5 +betas = (0.9, 0.95) +weight_decay = 0.01 +max_norm = 1 # grad clip +warm_up_ratio = 0.025 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.bfloat16, + attn_implementation='flash_attention_2')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_packed_dataset, + dataset_cfg=dict( + type=load_intern_repo_untokenized_dataset, + folder=dataset_folder, + tokenizer=tokenizer, + max_length=max_length, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + file_type='.json'), + packed_length=max_length, + seed=1024) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=InternRepoSampler, shuffle=True, seed=1024), + batch_sampler=dict( + type=BatchSampler, drop_last=True, batch_size=batch_size), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1 / 40, + by_epoch=True, + begin=0, + end=warm_up_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=lr * 0.15, + by_epoch=True, + begin=warm_up_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +custom_hooks = [ + dict( + type=DatasetInfoHook, tokenizer=tokenizer, + is_intern_repo_dataset=True), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 100 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +log_processor = dict( + by_epoch=False, + window_size=1, + mean_pattern=r'.*(loss|time|data_time|grad_norm|tflops).*') diff --git a/data/xtuner/xtuner/configs/mixtral/README.md b/data/xtuner/xtuner/configs/mixtral/README.md new file mode 100644 index 0000000000000000000000000000000000000000..eaee3324d760821074d09e1d6c11416dc0b8941a --- /dev/null +++ b/data/xtuner/xtuner/configs/mixtral/README.md @@ -0,0 +1,57 @@ +# Mixtral 8x7B + +## Install + +```bash +# Install the latest xtuner +pip install -U 'xtuner[deepspeed]' + +# Mixtral requires flash-attn +pip install flash-attn + +# install the latest transformers +pip install -U transformers +``` + +## QLoRA Fine-tune + +QLoRA only need a single A100-80G + +```bash +xtuner train mixtral_8x7b_instruct_qlora_oasst1_e3 --deepspeed deepspeed_zero2 +``` + +## Full Parameter Fine-tune + +Full parameter fine-tune needs 16 A100-80G + +### slurm + +Note: `$PARTITION` means the virtual partition of slurm. + +```bash +srun -p $PARTITION --job-name=mixtral --nodes=2 --gres=gpu:8 --ntasks-per-node=8 xtuner train mixtral_8x7b_instruct_full_oasst1_e3 --deepspeed deepspeed_zero3 --launcher slurm +``` + +### torchrun + +Note: `$NODE_0_ADDR` means the ip address of the node_0 machine. + +```bash +# excuete on node 0 +NPROC_PER_NODE=8 NNODES=2 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=0 xtuner train mixtral_8x7b_instruct_full_oasst1_e3 --deepspeed deepspeed_zero3 + +# excuete on node 1 +NPROC_PER_NODE=8 NNODES=2 PORT=29600 ADDR=$NODE_0_ADDR NODE_RANK=1 xtuner train mixtral_8x7b_instruct_full_oasst1_e3 --deepspeed deepspeed_zero3 +``` + +### Speed + +16 * A100 80G: + +| Model | Sequence Length | Use Varlen Attn | Sequence Parallel World Size | Tokens per Second | +| :----------: | :-------------: | :-------------: | :--------------------------: | :---------------: | +| mixtral_8x7b | 32k | False | 1 | 853.7 | +| mixtral_8x7b | 32k | True | 1 | 910.1 | +| mixtral_8x7b | 32k | False | 2 | 635.2 | +| mixtral_8x7b | 32k | True | 2 | 650.9 | diff --git a/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_full_oasst1_e3.py b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_full_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..784879ac24149c7761ad75acb351b3ef9ccc1ce0 --- /dev/null +++ b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_full_oasst1_e3.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-v0.1' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.mixtral +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..cb11f102f4c4dfc07149ac36c7fd97c69fdda8df --- /dev/null +++ b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b/mixtral_8x7b_qlora_oasst1_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-v0.1' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.mixtral +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=[ + 'q_proj', 'k_proj', 'v_proj', 'o_proj', 'w1', 'w2', 'w3' + ], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_full_oasst1_e3.py b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_full_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..0093d0d9a42f10de993fe9d48231a542ff5532d9 --- /dev/null +++ b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_full_oasst1_e3.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-Instruct-v0.1' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.mixtral +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..3f348f9d931dbc5d8a3311b70e006796af6e0c3c --- /dev/null +++ b/data/xtuner/xtuner/configs/mixtral/mixtral_8x7b_instruct/mixtral_8x7b_instruct_qlora_oasst1_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'mistralai/Mixtral-8x7B-Instruct-v0.1' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.mixtral +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + target_modules=[ + 'q_proj', 'k_proj', 'v_proj', 'o_proj', 'w1', 'w2', 'w3' + ], + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full.py b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full.py new file mode 100644 index 0000000000000000000000000000000000000000..52881739ae8385b41b5e644d7344c2f2ad7078aa --- /dev/null +++ b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full.py @@ -0,0 +1,197 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.orpo import ORPO +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = False +loss_beta = 0.1 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-6 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=ORPO, + beta=loss_beta, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + # dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn.py b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn.py new file mode 100644 index 0000000000000000000000000000000000000000..d4cf3d65a7f34d4fd28937702f220482a5960d61 --- /dev/null +++ b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn.py @@ -0,0 +1,207 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.orpo import ORPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +loss_beta = 0.1 + +# parallel +sequence_parallel_size = 1 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-6 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=ORPO, + use_varlen_attn=use_varlen_attn, + beta=loss_beta, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict(type=load_dataset, path='mlabonne/orpo-dpo-mix-40k'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + # dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn_jsonl_dataset.py b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn_jsonl_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..126ff4bd8dc6f3ff3099d58dfb56e6d833542418 --- /dev/null +++ b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_1_8b_orpo_full_varlenattn_jsonl_dataset.py @@ -0,0 +1,211 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + load_jsonl_dataset) +from xtuner.engine.hooks import (EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.orpo import ORPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +loss_beta = 0.1 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 5e-6 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=ORPO, + use_varlen_attn=use_varlen_attn, + beta=loss_beta, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_jsonl_dataset, + data_files=[ + '/your/jsonl/path/here.jsonl', + '/your/another/jsonl/path/here.jsonl' + ]), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + # dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_7b_orpo_qlora_varlenattn_ultrafeedback_e5.py b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_7b_orpo_qlora_varlenattn_ultrafeedback_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..2e7cdaa0a827328734bc2f86d74f48481e3ff3a8 --- /dev/null +++ b/data/xtuner/xtuner/configs/orpo/internlm/internlm2_chat_7b_orpo_qlora_varlenattn_ultrafeedback_e5.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.orpo import ORPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +loss_beta = 0.1 + +# Data +prompt_template = PROMPT_TEMPLATE.internlm2_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 # refer to orpo repo +optim_type = AdamW +lr = 5e-6 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.01 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=ORPO, + use_varlen_attn=use_varlen_attn, + beta=loss_beta, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + # dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/orpo/llama/llama3_8b_instruct_orpo_qlora_varlenattn_ultrafeedback_e5.py b/data/xtuner/xtuner/configs/orpo/llama/llama3_8b_instruct_orpo_qlora_varlenattn_ultrafeedback_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..00608c621227b40ba0828c4153a90ee5add51902 --- /dev/null +++ b/data/xtuner/xtuner/configs/orpo/llama/llama3_8b_instruct_orpo_qlora_varlenattn_ultrafeedback_e5.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import (EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model.orpo import ORPO +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +use_varlen_attn = True +loss_beta = 0.1 + +# Data +prompt_template = PROMPT_TEMPLATE.llama3_chat +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 # refer to orpo repo +optim_type = AdamW +lr = 5e-6 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.01 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + 'What famous British author, known for his tales of mystery and the macabre, shares his initials with a common abbreviation for "rest in peace"?', # noqa: E501 + 'Please tell me five scenic spots in Shanghai', + '890729 - 425663? Only respond with math and no words.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=ORPO, + use_varlen_attn=use_varlen_attn, + beta=loss_beta, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=True, + is_reward=False, + reward_token_id=-1, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + # dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_full_alpaca_e3.py b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d60f675331431cc329f8e3d9d684cd0c051d6f07 --- /dev/null +++ b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'microsoft/Phi-3-mini-128k-instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 128 * 1024 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f528da716520b04019e7005b1a25d1afc7e1ea0e --- /dev/null +++ b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_128k_instruct_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'microsoft/Phi-3-mini-128k-instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 128 * 1024 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_full_alpaca_e3.py b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..64f198d34e977bc18a6f08bc4337344bf7671749 --- /dev/null +++ b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_full_alpaca_e3.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 4096 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e90e17a14844b33b3e45c5a19cdc53c5abe48753 --- /dev/null +++ b/data/xtuner/xtuner/configs/phi/phi3/phi3_mini_4k_instruct_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'microsoft/Phi-3-mini-4k-instruct' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.phi3_chat +max_length = 4096 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9245722b64938dd1e00caa20e7b9db9d54348c51 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..88b8225142949eab67ce4b8ccc81f135553ccd85 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..bce10312882e1e8773177a2d7047df4ba06fd2b8 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,253 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..332cff37bf5b3a5c9e902aea1282dec542386951 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..d7c087735e9271d476c59d53775ef6e2ae11f596 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b/qwen_1_8b_qlora_code_alpaca_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..24c0040fab9591eb32b3fc3f52698489e46b56f3 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..366958d49da16ca6288023956c45be986ca9aa89 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..60bdd3dca0e95e16af70df79f4739ed4bdd2076e --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,253 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..058e200eef9504dd9ed2fb4236c3c523bca0c75a --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c50519930a061012991070e46f3789a3c143971e --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_1_8b_chat/qwen_1_8b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-1_8B-Chat' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9f4d5ceb9431afddc309ea10719641244c3d2c53 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f985d04c4103e810eed80ffa9f3c4dc7362904d6 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2c5b951b06cc9e8af02f9344ce2fd6bf9a317cbb --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,253 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4c3f85eb41f76201325341493f5d5487d2628d72 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5cc74fe06d1cf212c2187f1d1e256a86f74cb8c6 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_72b/qwen_72b_qlora_code_alpaca_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-72B' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c2e267f0ca0804589d9f1904e37ec5d46c55c5a0 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..77af4d90305de7f0bfa9afb86115e2959a877f0f --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9a84fa1bfd8cbb4dda4a6e7636050e55810b8edb --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,253 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e4967ac51008f23d05be6f41388e4523eaf14881 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..256a2dfc388bedaf954a679887273e646108b915 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,256 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..853cd63bce25af12b741cf0615dc47593db00ce6 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_code_alpaca_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..631441e764798a840db901a4e03f795914e1f8fe --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_colorist_e5.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9c1b64f847f48dc9000add6af53f2261a4053ff1 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_lawyer_e3.py @@ -0,0 +1,245 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_medical_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..c8b657d039e0034a8b24edbe130de44f7ec8d022 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_medical_e1.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..6ae00805cb0b480d91b6b7fbc6a4ce0fccfa7f18 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e1.py @@ -0,0 +1,226 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +bot_name = 'Qwen' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e2_gpu8.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e2_gpu8.py new file mode 100644 index 0000000000000000000000000000000000000000..99cfdc985b09b6bac6b236cba7c7b1fbc1d8a25d --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_all_e2_gpu8.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +bot_name = 'Qwen' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_no_plugins_path = './data/moss-003-sft-no-tools.jsonl' +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 8 # per_device +accumulative_counts = 1 +dataloader_num_workers = 2 +max_epochs = 2 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +moss_sft_no_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_no_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +moss_sft_plugins = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +train_dataset = dict( + type=ConcatDataset, datasets=[moss_sft_no_plugins, moss_sft_plugins]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_plugins_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_plugins_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..3f391dc332a61a45b1e56bd69502467b8ed765a7 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_moss_sft_plugins_e1.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import MOSSSFTDataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +bot_name = 'Qwen' +use_varlen_attn = False + +# Data +# Download data from https://huggingface.co/datasets/fnlp/moss-003-sft-data +moss_sft_plugins_path = './data/conversations_with_tools_with_inner_instruction_no_text2image_train_all_random_meta0.5_0.1_0.01_moss_0709.jsonl' # noqa: E501 +max_length = 2048 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +SYSTEM = SYSTEM_TEMPLATE.moss_sft +prompt_template = PROMPT_TEMPLATE.moss_sft +evaluation_freq = 500 +evaluation_inputs = [ + '一个球体的表面积是384平方厘米,求它的体积。', '今有鸡兔同笼,上有二十头,下有六十二足, 问鸡兔各几何?', '介绍一下比尔盖茨' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=MOSSSFTDataset, + data_file=moss_sft_plugins_path, + bot_name=bot_name, + tokenizer=tokenizer, + max_length=max_length) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + stop_words=[''], + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ec7704e6f34ba32d434089229a0b8d772df8a728 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_512_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..080e4cfc99b34eaab6b5971c7fec9d1981891ef8 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_oasst1_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..bead036543a1bda5132ca69c6113dcd3b121e801 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_open_platypus_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..bbe3f18e0aedd6069db15b1a903407eeef393e66 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_openorca_e1.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_sql_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..19de9c3c4a3a1507657162bdb0457d909f4d650f --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_sql_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..c2391f8bc94da23a2414c743c4e54a085979ff19 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b/qwen_7b_qlora_tiny_codes_e1.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|endoftext|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..eda0f5c9e0c353ebc4aa50df32943a230c70cf10 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d5c76e625dc8452e7cba698f3fd447bbaa94bb --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_e3.py @@ -0,0 +1,238 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ee0420ac0d20b12b9e2bdeb5e894711c6a6acf --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_enzh_oasst1_e3.py @@ -0,0 +1,253 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + oasst1_map_fn, template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +oasst1_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +oasst1 = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=oasst1_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[alpaca_en, alpaca_zh, oasst1]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_zh_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_zh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4aa6bac4f68749d09a00dd442a8ee3669fbaa9d0 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_alpaca_zh_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_zh_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_zh, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_arxiv_gentitle_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_arxiv_gentitle_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..be1b36849649268b4b82fed9732bda2283c2d414 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_arxiv_gentitle_e3.py @@ -0,0 +1,256 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import arxiv_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +# 1. Download data from https://kaggle.com/datasets/Cornell-University/arxiv +# 2. Process data by `xtuner preprocess arxiv ${DOWNLOADED_DATA} ./data/arxiv_data.json [optional arguments]` # noqa: E501 +data_path = './data/arxiv_data.json' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.arxiv_gentile +evaluation_inputs = [ + ('We present InternLM, a multilingual foundational language ' + 'model with 104B parameters. InternLM is pre-trained on a large ' + 'corpora with 1.6T tokens with a multi-phase progressive ' + 'process, and then fine-tuned to align with human preferences. ' + 'We also developed a training system called Uniscale-LLM for ' + 'efficient large language model training. The evaluation on a ' + 'number of benchmarks shows that InternLM achieves ' + 'state-of-the-art performance in multiple aspects, including ' + 'knowledge understanding, reading comprehension, mathematics, ' + 'and coding. With such well-rounded capabilities, InternLM ' + 'achieves outstanding performances on comprehensive exams, ' + 'including MMLU, AGIEval, C-Eval and GAOKAO-Bench, without ' + 'resorting to external tools. On these benchmarks, InternLM ' + 'not only significantly outperforms open-source models, but ' + 'also obtains superior performance compared to ChatGPT. Also, ' + 'InternLM demonstrates excellent capability of understanding ' + 'Chinese language and Chinese culture, which makes it a ' + 'suitable foundation model to support Chinese-oriented language ' + 'applications. This manuscript gives a detailed study of ' + 'our results, with benchmarks and examples across a diverse ' + 'set of knowledge domains and tasks.'), + ('In this work, we develop and release Llama 2, a collection of ' + 'pretrained and fine-tuned large language models (LLMs) ranging ' + 'in scale from 7 billion to 70 billion parameters.\nOur ' + 'fine-tuned LLMs, called LLAMA 2-CHAT, are optimized for ' + 'dialogue use cases. Our models outperform open-source chat ' + 'models on most benchmarks we tested, and based on our human ' + 'evaluations for helpfulness and safety, may be a suitable ' + 'substitute for closedsource models. We provide a detailed ' + 'description of our approach to fine-tuning and safety ' + 'improvements of LLAMA 2-CHAT in order to enable the community ' + 'to build on our work and contribute to the responsible ' + 'development of LLMs.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, path='json', data_files=dict(train=data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=arxiv_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_code_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_code_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..46ea7f28f0268413ba4383eaebb6fd2913f30b02 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_code_alpaca_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import code_alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'HuggingFaceH4/CodeAlpaca_20K' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 100 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=code_alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_colorist_e5.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_colorist_e5.py new file mode 100644 index 0000000000000000000000000000000000000000..59eed5896ce4dcc525fb761fc81537bb5ff9f100 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_colorist_e5.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import colors_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'burkelibbey/colors' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 5 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = SYSTEM_TEMPLATE.colorist +evaluation_inputs = [ + '请给我一个像天空一样清澈透明的蓝色。', 'Please give me a clear blue like the sky.' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=colors_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_lawyer_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_lawyer_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b2cd75040837c8d43da22a7df54ec0bb9d5996e7 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_lawyer_e3.py @@ -0,0 +1,245 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (crime_kg_assitant_map_fn, + law_reference_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +# download data from https://github.com/LiuHC0428/LAW-GPT +crime_kg_assitant_path = './data/CrimeKgAssitant清洗后_52k.json' +law_reference_data_path = './data/训练数据_带法律依据_92k.json' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.lawyer +evaluation_inputs = ['请问离婚需要准备什么材料?', '销售鳄鱼皮包违法吗?'] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +crime_kg_assitant = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=crime_kg_assitant_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=crime_kg_assitant_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +law_reference_data = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path='json', + data_files=dict(train=law_reference_data_path)), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=law_reference_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict( + type=ConcatDataset, datasets=[crime_kg_assitant, law_reference_data]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_medical_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_medical_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..a3037d86f13233ba41e490615b1f1f96cc956699 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_medical_e1.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import medical_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'shibing624/medical' +data_config_name = 'finetune' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.medical +evaluation_inputs = [ + '我有家族遗传性的过敏,请问可以可以献血吗?', '我爷爷有高血压,请问他可以喝咖啡吗?', + '我女儿今年3岁了,从昨天晚上九点开始腹泻,到现在已经八个小时了,请问应该怎么办?' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path, name=data_config_name), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=medical_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_512_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_512_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..899939b24587e0b56d1c475afbb67f13ffafc675 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_512_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 512 +pack_to_max_length = False + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..20eb1f806639d7cb0773a3fe8bebc9ff25c1c0ca --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_oasst1_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import oasst1_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'timdettmers/openassistant-guanaco' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=oasst1_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_open_platypus_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_open_platypus_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..aa09ec408417dcaa0065a3078227d380a8a354b7 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_open_platypus_e3.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'garage-bAInd/Open-Platypus' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_openorca_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_openorca_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..1abd4ec504e9ee6785e25d02dd13ba94325bc5b3 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_openorca_e1.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import openorca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'Open-Orca/OpenOrca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 5000 +SYSTEM = '' +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openorca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_sql_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_sql_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8f5a6fe4d02e61c1e36d3212829402773a2a426c --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_sql_e3.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import sql_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'b-mc2/sql-create-context' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.sql +evaluation_inputs = [ + ('CREATE TABLE station (name VARCHAR, lat VARCHAR, city VARCHAR)\n' + 'Find the name, latitude, and city of stations with latitude ' + 'above 50.'), + ('CREATE TABLE weather (zip_code VARCHAR, mean_visibility_miles ' + 'INTEGER)\n找到mean_visibility_miles最大的zip_code。') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=sql_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_tiny_codes_e1.py b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_tiny_codes_e1.py new file mode 100644 index 0000000000000000000000000000000000000000..f0044f04392d6ae4cebc740729f8ea6ff003f7f6 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1/qwen_7b_chat/qwen_7b_chat_qlora_tiny_codes_e1.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import template_map_fn_factory, tiny_codes_map_fn +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen-7B-Chat' +use_varlen_attn = False + +# Data +data_path = 'nampdn-ai/tiny-codes' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.coder +evaluation_inputs = [ + ('写一个Python函数,将十六进制颜色代码(如#0066ee)转换为对应的' + '红、绿、蓝(RGB)三个颜色分量值,并以元组的形式返回。'), + ('Write a Python function that takes a hexadecimal color code ' + '(e.g., #0066ee) as input and converts it into the corresponding ' + 'red, green, and blue (RGB) color component values.') +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right', + eos_token='<|im_end|>') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=data_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=tiny_codes_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dec0ed76ef7337b46be16d1d3ea83602d6a5e280 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..341544eb982cab58e3c0875ce69e3bbcc61c84d1 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b/qwen1_5_0_5b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..fcd9c24d239bbb3ec373878332bc03c1e2d003d3 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..129b12752a8f6c82da48266672f6f703266c6b55 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_0_5b_chat/qwen1_5_0_5b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-0.5B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b16660ec064aa6cc16e096c52b5adfd9ca9cb206 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_full_alpaca_e3.py @@ -0,0 +1,203 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-110B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +# total batch = 32gpus * batch_size_per_device 1 * acc 1 = 32 +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..747d0fe1732d407f8a4325f74131169bb766cf51 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b/qwen1_5_110b_qlora_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-110B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # total bs = 1 bs_per_device * 8 gpus * 1 acc = 8 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-4 # 110B model use smaller lr +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4', + bnb_4bit_quant_storage=torch.float16)), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/README.md b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fc78ad510341cf1ba3330630a45b75eb860f8bb9 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/README.md @@ -0,0 +1,26 @@ +# Qwen 110B + +## Install + +```bash +# Install the latest xtuner +pip install -U 'xtuner[deepspeed]' + +# We recommend installing flash_attn +# pip install flash-attn + +# install the latest transformers +pip install -U transformers +``` + +## QLoRA Fine-tune + +Training Qwen 110B with 32k context capability requires only 2 * A100 80G. + +```bash +xtuner train xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3_16k_2gpus.py --deepspeed deepspeed_zero3 +``` + +
+ +
diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..9e16cc04d136e0ecf1aaa7ab8d81e53bf2bfdc36 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_full_alpaca_e3.py @@ -0,0 +1,203 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-110B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +# total batch = 32gpus * batch_size_per_device 1 * acc 1 = 32 +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 4 +max_epochs = 3 +optim_type = AdamW +lr = 1e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2abcf1d721931b0205830f0ada7d749f5b9f66a6 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-110B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # total bs = 1 bs_per_device * 8 gpus * 1 acc = 8 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 1e-4 # 110B model use smaller lr +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4', + bnb_4bit_quant_storage=torch.float16)), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3_16k_2gpus.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3_16k_2gpus.py new file mode 100644 index 0000000000000000000000000000000000000000..ef8c7b6e61b1a4b7b6e426a9337a2dcaa4547e7c --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_110b_chat/qwen1_5_110b_chat_qlora_alpaca_e3_16k_2gpus.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-110B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 16384 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 2 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 # total bs = 1 bs_per_device * 2 gpus * 1 acc = 2 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 1e-4 # 110B model use smaller lr +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4', + bnb_4bit_quant_storage=torch.float16)), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict(type=ThroughputHook), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=1) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ff77e391fd39d00eff36827344eea7d31e5276e8 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc2acd8b271615a820d85ec1881578d0eb519b50 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b/qwen1_5_14b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..c217888b309bcdb9457d1d5c322de82d0adf3571 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..36cff5aac69039d84b2fad2c1427d5e125f60e78 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_14b_chat/qwen1_5_14b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-14B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..4afdc0a7516ae3ef8f8830a011c02a2c013d1be0 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..a4687d7ae96e7e91866d603de2878c9f4eef8cb7 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b/qwen1_5_1_8b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..2ef12cb79eb2c1a34af81a1bb893297fb6093a69 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..804bbbf9672e07c8f271b27aa6492d7191c2c625 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_1_8b_chat/qwen1_5_1_8b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-1.8B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..32dea90ddc7d5456f6a92185dd7ffce57a515881 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..8f8b90229cb34df12c709e6038d4517e4a4ec9d0 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3_openmind.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3_openmind.py new file mode 100644 index 0000000000000000000000000000000000000000..b1446eb48eb712ecc50380cdbf1ae41cfa6f1251 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b/qwen1_5_4b_qlora_alpaca_e3_openmind.py @@ -0,0 +1,230 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE +from openmind_hub import snapshot_download +from openmind import OmDataset + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Tianjin_Ascend/Qwen1.5-4B' +model_resource = { + "fn": snapshot_download, + "args":{ + # "token":"xxxxxxxxxx" + } +} +use_varlen_attn = False + +# Data +alpaca_en_path = 'AI_Connect/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + # NPU does not support quantization + # quantization_config=dict( + # type=BitsAndBytesConfig, + # load_in_4bit=True, + # load_in_8bit=False, + # llm_int8_threshold=6.0, + # llm_int8_has_fp16_weight=False, + # bnb_4bit_compute_dtype=torch.float16, + # bnb_4bit_use_double_quant=True, + # bnb_4bit_quant_type='nf4') + ), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=OmDataset.load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..b959a1cd90ce08e2dd951050f6f04fb9a434d7dc --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb502e35a523bf199fa24664ef0340388081e53 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_4b_chat/qwen1_5_4b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-4B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..84235486e8ad462f9ea230ee5c43e83f65bf67ce --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..373db51871200c0569360836f29479542d996f91 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b/qwen1_5_72b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..1de7c92b4d2196d60fcc436bf11e51b039b6e661 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..94786106d06959067a99fe09da1cc3c7a9ab43ca --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_72b_chat/qwen1_5_72b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-72B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..f4c7b1be34a27c30a808b6170988e6d9c6859534 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..03cd6f6cbc3d67ef7d255ed37c128e72a592277e --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b/qwen1_5_7b_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..62bf9ed31743081122230715ac516e6c82263526 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_full_alpaca_e3.py @@ -0,0 +1,200 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..5b42c8d70b2d8138327f9492a58a2c767186001b --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen/qwen1_5/qwen1_5_7b_chat/qwen1_5_7b_chat_qlora_alpaca_e3.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-7B-Chat' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/qwen_moe/qwen1_5/qwen1_5_moe_a2_7_b_chat/qwen1_5_moe_a2_7_b_chat_full_alpaca_e3.py b/data/xtuner/xtuner/configs/qwen_moe/qwen1_5/qwen1_5_moe_a2_7_b_chat/qwen1_5_moe_a2_7_b_chat_full_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..6e8c2fb0060132f80cd01bd21d0b3ff25d8a22a1 --- /dev/null +++ b/data/xtuner/xtuner/configs/qwen_moe/qwen1_5/qwen1_5_moe_a2_7_b_chat/qwen1_5_moe_a2_7_b_chat_full_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'Qwen/Qwen1.5-MoE-A2.7B-Chat' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.qwen_chat +max_length = 32768 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 1 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 50 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=False, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template), + dict(type=ThroughputHook), +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=1), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False, window_size=1) diff --git a/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_ultrafeedback.py b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_ultrafeedback.py new file mode 100644 index 0000000000000000000000000000000000000000..ce48f5cda37a85b842604dbd0a28dbcf82000cde --- /dev/null +++ b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_ultrafeedback.py @@ -0,0 +1,184 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model.reward import RewardModel + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = False +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +loss_type = 'focal' +penalty_type = 'log_barrier' + +# Data +max_length = 2048 + +# Scheduler & Optimizer +batch_size = 4 # per_device +accumulative_counts = 16 +dataloader_num_workers = 0 +max_epochs = 1 # reward model should not be trained for more than 1 epoch to avoid overfitting # noqa: E501 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=RewardModel, + use_varlen_attn=use_varlen_attn, + loss_type=loss_type, + penalty_type=penalty_type, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_jsonl_dataset.py b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_jsonl_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..fc10c318971e458531e4d0e05bb31d58757f848d --- /dev/null +++ b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_jsonl_dataset.py @@ -0,0 +1,197 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + load_jsonl_dataset) +from xtuner.engine.hooks import VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model.reward import RewardModel +from xtuner.parallel.sequence import SequenceParallelSampler + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +loss_type = 'focal' +penalty_type = 'log_barrier' + +# Data +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 # reward model should not be trained for more than 1 epoch to avoid overfitting # noqa: E501 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +# TODO: eval +# evaluation_freq = 500 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=RewardModel, + use_varlen_attn=use_varlen_attn, + loss_type=loss_type, + penalty_type=penalty_type, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_jsonl_dataset, + data_files=[ + '/your/jsonl/path/here.jsonl', + '/your/another/jsonl/path/here.jsonl' + ]), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=None, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_ultrafeedback.py b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_ultrafeedback.py new file mode 100644 index 0000000000000000000000000000000000000000..b2c7ebed7fbca6e7803dbc689a7bab18937ad09f --- /dev/null +++ b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_full_varlenattn_ultrafeedback.py @@ -0,0 +1,195 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model.reward import RewardModel +from xtuner.parallel.sequence import SequenceParallelSampler + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +loss_type = 'focal' +penalty_type = 'log_barrier' + +# Data +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 # reward model should not be trained for more than 1 epoch to avoid overfitting # noqa: E501 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +# TODO: eval +# evaluation_freq = 500 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=RewardModel, + use_varlen_attn=use_varlen_attn, + loss_type=loss_type, + penalty_type=penalty_type, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_qlora_varlenattn_ultrafeedback.py b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_qlora_varlenattn_ultrafeedback.py new file mode 100644 index 0000000000000000000000000000000000000000..ffcf30cefd1cf270bae5317adc8dc3d6e5edbf88 --- /dev/null +++ b/data/xtuner/xtuner/configs/reward_model/internlm/internlm2_chat_1_8b_reward_qlora_varlenattn_ultrafeedback.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model.reward import RewardModel +from xtuner.parallel.sequence import SequenceParallelSampler + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'internlm/internlm2-chat-1_8b-sft' +use_varlen_attn = True +reward_token_id = 92527 # use [UNUSED_TOKEN_130] as reward token +loss_type = 'focal' +penalty_type = 'log_barrier' + +# Data +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 # reward model should not be trained for more than 1 epoch to avoid overfitting # noqa: E501 +optim_type = AdamW +lr = 1e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +# TODO: eval +# evaluation_freq = 500 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=RewardModel, + use_varlen_attn=use_varlen_attn, + loss_type=loss_type, + penalty_type=penalty_type, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='FEATURE_EXTRACTION')) # this setting is important + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/reward_model/llama/llama3_8b_instruct_reward_full_varlenattn_ultrafeedback.py b/data/xtuner/xtuner/configs/reward_model/llama/llama3_8b_instruct_reward_full_varlenattn_ultrafeedback.py new file mode 100644 index 0000000000000000000000000000000000000000..57d822a058ee9dea258ef9fe22457dece13e7f61 --- /dev/null +++ b/data/xtuner/xtuner/configs/reward_model/llama/llama3_8b_instruct_reward_full_varlenattn_ultrafeedback.py @@ -0,0 +1,195 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from torch.optim import AdamW +from transformers import AutoModelForCausalLM, AutoTokenizer + +from xtuner.dataset.collate_fns.preference_collate_fn import \ + preference_collate_fn +from xtuner.dataset.preference_dataset import (build_preference_dataset, + orpo_dpo_mix_40k_map_fn) +from xtuner.engine.hooks import VarlenAttnArgsToMessageHubHook +from xtuner.engine.runner import TrainLoop +from xtuner.model.reward import RewardModel +from xtuner.parallel.sequence import SequenceParallelSampler + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'meta-llama/Meta-Llama-3-8B-Instruct' +use_varlen_attn = True +reward_token_id = 128002 # use <|reserved_special_token_0|> as reward token +loss_type = 'focal' +penalty_type = 'log_barrier' + +# Data +max_length = 2048 +max_packed_length = max_length * 2 + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 1 # reward model should not be trained for more than 1 epoch to avoid overfitting # noqa: E501 +optim_type = AdamW +lr = 2e-5 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +# TODO: eval +# evaluation_freq = 500 + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=RewardModel, + use_varlen_attn=use_varlen_attn, + loss_type=loss_type, + penalty_type=penalty_type, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True)) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler + +train_dataset = dict( + type=build_preference_dataset, + dataset=dict( + type=load_dataset, + path='argilla/ultrafeedback-binarized-preferences-cleaned'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=orpo_dpo_mix_40k_map_fn, + is_dpo=False, + is_reward=True, + reward_token_id=reward_token_id, + num_proc=32, + use_varlen_attn=use_varlen_attn, + max_packed_length=max_packed_length, + shuffle_before_pack=True, +) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict( + type=preference_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/starcoder/starcoder_qlora_stack_exchange_example.py b/data/xtuner/xtuner/configs/starcoder/starcoder_qlora_stack_exchange_example.py new file mode 100644 index 0000000000000000000000000000000000000000..688fabb30be8063d27627f881490f769b4cc07e6 --- /dev/null +++ b/data/xtuner/xtuner/configs/starcoder/starcoder_qlora_stack_exchange_example.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (stack_exchange_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.utils import PROMPT_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'bigcode/starcoder' +use_varlen_attn = False + +# Data +data_path = 'ArmelR/stack-exchange-instruction' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +# randomly select 20000 samples from the original dataset +max_dataset_length = 20000 +pack_to_max_length = True + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 # 1bs * 16acc * 1gpu = 16 batchsize +dataloader_num_workers = 0 +max_epochs = 1 +optim_type = AdamW +lr = 1e-4 +betas = (0.9, 0.999) +weight_decay = 0.05 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 200 +SYSTEM = '' +evaluation_inputs = [ + 'from typing import List def has_close_elements(numbers: List[float], threshold: float) -> bool: """ Check if in given list of numbers, are any two numbers closer to each other than given threshold. >>> has_close_elements([1.0, 2.0, 3.0], 0.5) False >>> has_close_elements([1.0, 2.8, 3.0, 4.0, 5.0, 2.0], 0.3) True """' # noqa: E501 +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=16, + lora_alpha=32, + lora_dropout=0.05, + bias='none', + target_modules=['c_proj', 'c_attn', 'q_attn'], + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +train_dataset = dict( + type=process_hf_dataset, + dataset=dict( + type=load_dataset, + path=data_path, + data_dir='data/finetune', + split='train'), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=stack_exchange_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + max_dataset_length=max_dataset_length, + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=DefaultSampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/yi/yi_34b/yi_34b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/yi/yi_34b/yi_34b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..960980e2eb948fd9b960ee208107a01404f528f0 --- /dev/null +++ b/data/xtuner/xtuner/configs/yi/yi_34b/yi_34b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-34B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/yi/yi_6b/yi_6b_qlora_alpaca_enzh_e3.py b/data/xtuner/xtuner/configs/yi/yi_6b/yi_6b_qlora_alpaca_enzh_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..897c80b3aaf6a4ed73bc9360b2743ec106232ea5 --- /dev/null +++ b/data/xtuner/xtuner/configs/yi/yi_6b/yi_6b_qlora_alpaca_enzh_e3.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import ConcatDataset, process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import (alpaca_map_fn, alpaca_zh_map_fn, + template_map_fn_factory) +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = '01-ai/Yi-6B' +use_varlen_attn = False + +# Data +alpaca_zh_path = 'silk-road/alpaca-data-gpt4-chinese' +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.default +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +alpaca_zh = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_zh_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_zh_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +train_dataset = dict(type=ConcatDataset, datasets=[alpaca_en, alpaca_zh]) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=train_dataset, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/configs/zephyr/zephyr_7b_beta_qlora_alpaca_e3.py b/data/xtuner/xtuner/configs/zephyr/zephyr_7b_beta_qlora_alpaca_e3.py new file mode 100644 index 0000000000000000000000000000000000000000..ffb46151890a2ef9f7146ae8e8d482ed5984d54f --- /dev/null +++ b/data/xtuner/xtuner/configs/zephyr/zephyr_7b_beta_qlora_alpaca_e3.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from datasets import load_dataset +from mmengine.dataset import DefaultSampler +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR +from peft import LoraConfig +from torch.optim import AdamW +from transformers import (AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig) + +from xtuner.dataset import process_hf_dataset +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.dataset.map_fns import alpaca_map_fn, template_map_fn_factory +from xtuner.engine.hooks import (DatasetInfoHook, EvaluateChatHook, + VarlenAttnArgsToMessageHubHook) +from xtuner.engine.runner import TrainLoop +from xtuner.model import SupervisedFinetune +from xtuner.parallel.sequence import SequenceParallelSampler +from xtuner.utils import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +####################################################################### +# PART 1 Settings # +####################################################################### +# Model +pretrained_model_name_or_path = 'HuggingFaceH4/zephyr-7b-beta' +use_varlen_attn = False + +# Data +alpaca_en_path = 'tatsu-lab/alpaca' +prompt_template = PROMPT_TEMPLATE.zephyr +max_length = 2048 +pack_to_max_length = True + +# parallel +sequence_parallel_size = 1 + +# Scheduler & Optimizer +batch_size = 1 # per_device +accumulative_counts = 16 +accumulative_counts *= sequence_parallel_size +dataloader_num_workers = 0 +max_epochs = 3 +optim_type = AdamW +lr = 2e-4 +betas = (0.9, 0.999) +weight_decay = 0 +max_norm = 1 # grad clip +warmup_ratio = 0.03 + +# Save +save_steps = 500 +save_total_limit = 2 # Maximum checkpoints to keep (-1 means unlimited) + +# Evaluate the generation performance during the training +evaluation_freq = 500 +SYSTEM = SYSTEM_TEMPLATE.alpaca +evaluation_inputs = [ + '请给我介绍五个上海的景点', 'Please tell me five scenic spots in Shanghai' +] + +####################################################################### +# PART 2 Model & Tokenizer # +####################################################################### +tokenizer = dict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + padding_side='right') + +model = dict( + type=SupervisedFinetune, + use_varlen_attn=use_varlen_attn, + llm=dict( + type=AutoModelForCausalLM.from_pretrained, + pretrained_model_name_or_path=pretrained_model_name_or_path, + trust_remote_code=True, + torch_dtype=torch.float16, + quantization_config=dict( + type=BitsAndBytesConfig, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4')), + lora=dict( + type=LoraConfig, + r=64, + lora_alpha=16, + lora_dropout=0.1, + bias='none', + task_type='CAUSAL_LM')) + +####################################################################### +# PART 3 Dataset & Dataloader # +####################################################################### +alpaca_en = dict( + type=process_hf_dataset, + dataset=dict(type=load_dataset, path=alpaca_en_path), + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=alpaca_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=prompt_template), + remove_unused_columns=True, + shuffle_before_pack=True, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn) + +sampler = SequenceParallelSampler \ + if sequence_parallel_size > 1 else DefaultSampler +train_dataloader = dict( + batch_size=batch_size, + num_workers=dataloader_num_workers, + dataset=alpaca_en, + sampler=dict(type=sampler, shuffle=True), + collate_fn=dict(type=default_collate_fn, use_varlen_attn=use_varlen_attn)) + +####################################################################### +# PART 4 Scheduler & Optimizer # +####################################################################### +# optimizer +optim_wrapper = dict( + type=AmpOptimWrapper, + optimizer=dict( + type=optim_type, lr=lr, betas=betas, weight_decay=weight_decay), + clip_grad=dict(max_norm=max_norm, error_if_nonfinite=False), + accumulative_counts=accumulative_counts, + loss_scale='dynamic', + dtype='float16') + +# learning policy +# More information: https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/param_scheduler.md # noqa: E501 +param_scheduler = [ + dict( + type=LinearLR, + start_factor=1e-5, + by_epoch=True, + begin=0, + end=warmup_ratio * max_epochs, + convert_to_iter_based=True), + dict( + type=CosineAnnealingLR, + eta_min=0.0, + by_epoch=True, + begin=warmup_ratio * max_epochs, + end=max_epochs, + convert_to_iter_based=True) +] + +# train, val, test setting +train_cfg = dict(type=TrainLoop, max_epochs=max_epochs) + +####################################################################### +# PART 5 Runtime # +####################################################################### +# Log the dialogue periodically during the training process, optional +custom_hooks = [ + dict(type=DatasetInfoHook, tokenizer=tokenizer), + dict( + type=EvaluateChatHook, + tokenizer=tokenizer, + every_n_iters=evaluation_freq, + evaluation_inputs=evaluation_inputs, + system=SYSTEM, + prompt_template=prompt_template) +] + +if use_varlen_attn: + custom_hooks += [dict(type=VarlenAttnArgsToMessageHubHook)] + +# configure default hooks +default_hooks = dict( + # record the time of every iteration. + timer=dict(type=IterTimerHook), + # print log every 10 iterations. + logger=dict(type=LoggerHook, log_metric_by_epoch=False, interval=10), + # enable the parameter scheduler. + param_scheduler=dict(type=ParamSchedulerHook), + # save checkpoint per `save_steps`. + checkpoint=dict( + type=CheckpointHook, + by_epoch=False, + interval=save_steps, + max_keep_ckpts=save_total_limit), + # set sampler seed in distributed evrionment. + sampler_seed=dict(type=DistSamplerSeedHook), +) + +# configure environment +env_cfg = dict( + # whether to enable cudnn benchmark + cudnn_benchmark=False, + # set multi process parameters + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + # set distributed parameters + dist_cfg=dict(backend='nccl'), +) + +# set visualizer +visualizer = None + +# set log level +log_level = 'INFO' + +# load from which checkpoint +load_from = None + +# whether to resume training from the loaded checkpoint +resume = False + +# Defaults to use random seed and disable `deterministic` +randomness = dict(seed=None, deterministic=False) + +# set log processor +log_processor = dict(by_epoch=False) diff --git a/data/xtuner/xtuner/dataset/__init__.py b/data/xtuner/xtuner/dataset/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8f679a8cd937f0483f6fd8c65fc03392e2ef665d --- /dev/null +++ b/data/xtuner/xtuner/dataset/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings + +from .concat_dataset import ConcatDataset +from .huggingface import process_hf_dataset +from .intern_repo import (build_packed_dataset, + load_intern_repo_tokenized_dataset, + load_intern_repo_untokenized_dataset) +from .internvl_dataset import InternVL_V1_5_Dataset +from .json_dataset import load_json_file +from .llava import LLaVADataset +from .modelscope import process_ms_dataset +from .moss_sft import MOSSSFTDataset +from .refcoco_json import (InvRefCOCOJsonDataset, RefCOCOJsonDataset, + RefCOCOJsonEvalDataset) +from .utils import decode_base64_to_image, expand2square, load_image + +# ignore FutureWarning in hf datasets +warnings.simplefilter(action='ignore', category=FutureWarning) + +__all__ = [ + 'process_hf_dataset', 'ConcatDataset', 'MOSSSFTDataset', + 'process_ms_dataset', 'LLaVADataset', 'expand2square', + 'decode_base64_to_image', 'load_image', + 'load_intern_repo_tokenized_dataset', + 'load_intern_repo_untokenized_dataset', 'build_packed_dataset', + 'RefCOCOJsonDataset', 'RefCOCOJsonEvalDataset', 'InvRefCOCOJsonDataset', + 'load_json_file', 'InternVL_V1_5_Dataset' +] diff --git a/data/xtuner/xtuner/dataset/collate_fns/__init__.py b/data/xtuner/xtuner/dataset/collate_fns/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..96652b2599c75353faad7d54b11622f7ccee7eb3 --- /dev/null +++ b/data/xtuner/xtuner/dataset/collate_fns/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .default_collate_fn import default_collate_fn +from .mmlu_collate_fn import mmlu_collate_fn + +__all__ = ['default_collate_fn', 'mmlu_collate_fn'] diff --git a/data/xtuner/xtuner/dataset/collate_fns/default_collate_fn.py b/data/xtuner/xtuner/dataset/collate_fns/default_collate_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..3d9fe18fb166c5849ae9d1d658f516c4e4b0590c --- /dev/null +++ b/data/xtuner/xtuner/dataset/collate_fns/default_collate_fn.py @@ -0,0 +1,99 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Sequence + +import torch +from torch.nn.utils.rnn import pad_sequence + +from xtuner.parallel.sequence import (get_sequence_parallel_world_size, + pad_for_sequence_parallel) +from xtuner.utils import DEFAULT_PAD_TOKEN_INDEX, IGNORE_INDEX + + +def default_collate_fn(instances: Sequence[Dict], + pad_index: int = DEFAULT_PAD_TOKEN_INDEX, + return_hf_format: bool = False, + use_varlen_attn: bool = False): + seq_parallel_world_size = get_sequence_parallel_world_size() + + input_ids, labels = [], [] + has_image = any(inst.get('pixel_values') is not None for inst in instances) + if use_varlen_attn: + position_ids, cumulative_len = [], [] + assert len(instances) == 1, ( + f'If utilizing varlen attention, the batch size should be' + f' set to 1, but got {len(instances)}') + assert not has_image, 'Currently, it is not configured to ' + 'accommodate the use of varlen Attention in multimodal training' + + if has_image: + pixel_values = [] + + for example in instances: + input_ids.append(torch.LongTensor(example['input_ids'])) + labels.append(torch.LongTensor(example['labels'])) + if use_varlen_attn: + cumulative_len.append(torch.IntTensor(example['cumulative_len'])) + position_ids.append(torch.LongTensor(example['position_ids'])) + + if has_image: + pixel_values.append(example['pixel_values']) + + ori_length = [len(ids) for ids in input_ids] + if len(instances) > 1: + input_ids = pad_sequence( + input_ids, batch_first=True, padding_value=pad_index) + labels = pad_sequence( + labels, batch_first=True, padding_value=IGNORE_INDEX) + else: + input_ids = torch.stack(input_ids) + labels = torch.stack(labels) + + if use_varlen_attn: + assert input_ids.size(1) % seq_parallel_world_size == 0 + attention_mask = None + position_ids = torch.stack(position_ids, dim=0) + else: + # Some tokenizers have the same eos token and pad token, so input_ids + # cannot be masked directly based on the pad token id. + attention_mask = torch.zeros_like(input_ids).bool() + for i, length in enumerate(ori_length): + attention_mask[i, :length] = True + + bs, seq_len = input_ids.shape + position_ids = torch.arange(seq_len).unsqueeze(0).long().repeat(bs, 1) + + if seq_parallel_world_size > 1: + input_ids = pad_for_sequence_parallel(input_ids, pad_index) + labels = pad_for_sequence_parallel(labels, IGNORE_INDEX) + position_ids = pad_for_sequence_parallel(position_ids, 0) + if attention_mask is not None: + attention_mask = pad_for_sequence_parallel(attention_mask, 0) + + if use_varlen_attn: + max_seqlen = ( + cumulative_len[0][1:] - # noqa: W504 + cumulative_len[0][:-1]).max().item() + data_dict = { + 'input_ids': input_ids, + 'cumulative_len': cumulative_len, + 'position_ids': position_ids, + 'labels': labels, + 'max_seqlen': max_seqlen + } + else: + data_dict = { + 'input_ids': input_ids, + 'attention_mask': attention_mask, + 'position_ids': position_ids, + 'labels': labels + } + + if has_image: + if all(x.shape == pixel_values[0].shape for x in pixel_values): + pixel_values = torch.stack(pixel_values, dim=0) + data_dict['pixel_values'] = pixel_values + + if return_hf_format: + return data_dict + else: + return {'data': data_dict, 'data_samples': None} diff --git a/data/xtuner/xtuner/dataset/collate_fns/mmlu_collate_fn.py b/data/xtuner/xtuner/dataset/collate_fns/mmlu_collate_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..5c0e2a9894f897cbe7ed80680b15b364e767a33c --- /dev/null +++ b/data/xtuner/xtuner/dataset/collate_fns/mmlu_collate_fn.py @@ -0,0 +1,39 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Sequence + +import torch +from torch.nn.utils.rnn import pad_sequence + +from xtuner.utils import DEFAULT_PAD_TOKEN_INDEX, IGNORE_INDEX + + +def mmlu_collate_fn(instances: Sequence[Dict], + pad_index: int = DEFAULT_PAD_TOKEN_INDEX, + return_hf_format: bool = False) -> Dict[str, torch.Tensor]: + input_ids = [] + labels = [] + data_samples = {'labels': [], 'subjects': []} + for example in instances: + input_ids.append(torch.tensor(example['input_ids'])) + labels.append(torch.tensor(example['labels'])) + data_samples['labels'].append(example['output']) + data_samples['subjects'].append(example['subject']) + if len(instances) > 1: + input_ids = pad_sequence( + input_ids, batch_first=True, padding_value=pad_index) + labels = pad_sequence( + labels, batch_first=True, padding_value=IGNORE_INDEX) + else: + input_ids = torch.stack(input_ids) + labels = torch.stack(labels) + + data_dict = { + 'input_ids': input_ids, + 'attention_mask': input_ids.ne(pad_index), + 'labels': labels + } + + if return_hf_format: + return data_dict + else: + return {'data': data_dict, 'data_samples': data_samples} diff --git a/data/xtuner/xtuner/dataset/collate_fns/preference_collate_fn.py b/data/xtuner/xtuner/dataset/collate_fns/preference_collate_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..4b6a7f5c3eacdeb97b402ad340f3d67a6d7fbccb --- /dev/null +++ b/data/xtuner/xtuner/dataset/collate_fns/preference_collate_fn.py @@ -0,0 +1,109 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Sequence + +import torch +from torch.nn.utils.rnn import pad_sequence + +from xtuner.parallel.sequence import (get_sequence_parallel_world_size, + pad_cumulative_len_for_sequence_parallel, + pad_for_sequence_parallel) +from xtuner.utils import DEFAULT_PAD_TOKEN_INDEX, IGNORE_INDEX + + +def preference_collate_fn(instances: Sequence[Dict], + pad_index: int = DEFAULT_PAD_TOKEN_INDEX, + return_hf_format: bool = False, + use_varlen_attn: bool = False): + seq_parallel_world_size = get_sequence_parallel_world_size() + ds_names = [] + if not use_varlen_attn: + # split chosen and rejected into two instances + splited_instances = [] + for d in instances: + splited_instances.append({ + 'input_ids': d['chosen_ids'], + 'labels': d['chosen_labels'] + }) + splited_instances.append({ + 'input_ids': d['rejected_ids'], + 'labels': d['rejected_labels'] + }) + ds_names.append(d.get('ds_name', None)) + instances = splited_instances + + input_ids, labels = [], [] + if use_varlen_attn: + position_ids, cumulative_len = [], [] + assert len(instances) == 1, ( + f'If utilizing varlen attention, the batch size should be' + f' set to 1, but got {len(instances)}') + + for example in instances: + input_ids.append(torch.LongTensor(example['input_ids'])) + labels.append(torch.LongTensor(example['labels'])) + if use_varlen_attn: + cumulative_len.append(torch.IntTensor(example['cumulative_len'])) + position_ids.append(torch.LongTensor(example['position_ids'])) + num_samples = (len(example['cumulative_len']) - 1) // 2 + ds_names.extend(example.get('ds_names', [None] * num_samples)) + + ori_length = [len(ids) for ids in input_ids] + if len(instances) > 1: + input_ids = pad_sequence( + input_ids, batch_first=True, padding_value=pad_index) + labels = pad_sequence( + labels, batch_first=True, padding_value=IGNORE_INDEX) + else: + input_ids = torch.stack(input_ids) + labels = torch.stack(labels) + + if use_varlen_attn: + attention_mask = None + position_ids = torch.stack(position_ids, dim=0) + else: + # Some tokenizers have the same eos token and pad token, so input_ids + # cannot be masked directly based on the pad token id. + attention_mask = torch.zeros_like(input_ids).bool() + for i, length in enumerate(ori_length): + attention_mask[i, :length] = True + + bs, seq_len = input_ids.shape + position_ids = torch.arange(seq_len).unsqueeze(0).long().repeat(bs, 1) + + if seq_parallel_world_size > 1: + input_ids = pad_for_sequence_parallel(input_ids, pad_index) + labels = pad_for_sequence_parallel(labels, IGNORE_INDEX) + position_ids = pad_for_sequence_parallel(position_ids, 0) + if attention_mask is not None: + attention_mask = pad_for_sequence_parallel(attention_mask, 0) + if use_varlen_attn: + # We use attention_mask to distinguish `input_ids` from + # (sequence parallel) pad tokens in `get_var_len_atten_logps` + # method of class `DPO` and `ORPO` + (cumulative_len, attention_mask + ) = pad_cumulative_len_for_sequence_parallel(cumulative_len) + + if use_varlen_attn: + max_seqlen = ( + cumulative_len[0][1:] - # noqa: W504 + cumulative_len[0][:-1]).max().item() + data_dict = { + 'input_ids': input_ids, + 'attention_mask': attention_mask, + 'cumulative_len': cumulative_len, + 'position_ids': position_ids, + 'labels': labels, + 'max_seqlen': max_seqlen + } + else: + data_dict = { + 'input_ids': input_ids, + 'attention_mask': attention_mask, + 'position_ids': position_ids, + 'labels': labels + } + + if return_hf_format: + return data_dict + else: + return {'data': data_dict, 'data_samples': {'ds_names': ds_names}} diff --git a/data/xtuner/xtuner/dataset/concat_dataset.py b/data/xtuner/xtuner/dataset/concat_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..18d0a4c2f1d68755768132aa97d6852ac7b311e1 --- /dev/null +++ b/data/xtuner/xtuner/dataset/concat_dataset.py @@ -0,0 +1,19 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from torch.utils.data import ConcatDataset as TorchConcatDataset + +from xtuner.registry import BUILDER + + +class ConcatDataset(TorchConcatDataset): + + def __init__(self, datasets): + datasets_instance = [] + for cfg in datasets: + datasets_instance.append(BUILDER.build(cfg)) + super().__init__(datasets=datasets_instance) + + def __repr__(self): + main_str = 'Dataset as a concatenation of multiple datasets. \n' + main_str += ',\n'.join( + [f'{repr(dataset)}' for dataset in self.datasets]) + return main_str diff --git a/data/xtuner/xtuner/dataset/huggingface.py b/data/xtuner/xtuner/dataset/huggingface.py new file mode 100644 index 0000000000000000000000000000000000000000..c44e88688d3157359f620dc43d4543dcff00e200 --- /dev/null +++ b/data/xtuner/xtuner/dataset/huggingface.py @@ -0,0 +1,315 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import logging +import os +from datetime import timedelta +from functools import partial + +import numpy as np +from datasets import DatasetDict, concatenate_datasets +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from mmengine.utils.misc import get_object_from_string +from torch import distributed as dist + +from xtuner.registry import BUILDER, MAP_FUNC +from .utils import Packer, encode_fn + + +def get_lengths(example): + return {'length': len(example['input_ids'])} + + +def build_origin_dataset(dataset, split): + if isinstance(dataset, DatasetDict): + if split is None: + dataset = concatenate_datasets(dataset.values()) + else: + dataset = dataset[split] + elif isinstance(dataset, dict) or isinstance( + dataset, Config) or isinstance(dataset, ConfigDict): + dataset = BUILDER.build(dataset) + if isinstance(dataset, DatasetDict): + if split is None: + dataset = concatenate_datasets(dataset.values()) + else: + dataset = dataset[split] + return dataset + + +def map_dataset(dataset, dataset_map_fn, map_num_proc): + if isinstance(dataset_map_fn, str): + map_fn_obj = MAP_FUNC.get(dataset_map_fn) or get_object_from_string( + dataset_map_fn) + if map_fn_obj is not None: + dataset_map_fn = map_fn_obj + else: + raise TypeError('dataset_map_fn must be a function or a ' + "registered function's string in MAP_FUNC, " + f"but got a string of '{dataset_map_fn}'") + + dataset = dataset.map(dataset_map_fn, num_proc=map_num_proc) + return dataset + + +def add_template_to_dataset(dataset, template_map_fn, map_num_proc): + if isinstance(template_map_fn, + dict) or isinstance(template_map_fn, Config) or isinstance( + template_map_fn, ConfigDict): + template_map_fn = BUILDER.build(template_map_fn) + dataset = dataset.map(template_map_fn, num_proc=map_num_proc) + # remove invalid data + dataset = dataset.filter( + lambda example: len(example['conversation']) > 0, + num_proc=map_num_proc) + return dataset + + +def tokenize_dataset(dataset, tokenizer, max_length, with_image_token, + input_ids_with_output, remove_unused_columns, + map_num_proc): + assert (tokenizer is not None) and (max_length is not None), \ + f'({tokenizer}, {max_length})' + if isinstance(tokenizer, dict) or isinstance( + tokenizer, Config) or isinstance(tokenizer, ConfigDict): + tokenizer = BUILDER.build(tokenizer) + dataset = dataset.map( + partial( + encode_fn, + tokenizer=tokenizer, + max_length=max_length, + with_image_token=with_image_token, + input_ids_with_output=input_ids_with_output), + remove_columns=list(dataset.column_names) + if remove_unused_columns else None, + num_proc=map_num_proc) + return dataset + + +def pack_dataset(dataset, max_length, use_varlen_attn, shuffle_before_pack, + map_num_proc): + if shuffle_before_pack: + dataset = dataset.shuffle() + dataset = dataset.flatten_indices(num_proc=map_num_proc) + dataset = dataset.map( + Packer(max_length, use_varlen_attn=use_varlen_attn), + batched=True, + num_proc=map_num_proc) + return dataset + + +def process(dataset, + do_dataset_tokenization=True, + tokenizer=None, + max_length=None, + dataset_map_fn=None, + template_map_fn=None, + max_dataset_length=None, + split='train', + remove_unused_columns=False, + rename_maps=[], + shuffle_before_pack=True, + pack_to_max_length=True, + use_varlen_attn=False, + input_ids_with_output=True, + with_image_token=False, + map_num_proc=32): + """Post-process the dataset loaded from the Hugging Face Hub, or a local + dataset. + + Args: + dataset: The dataset to be post-processed. + do_dataset_tokenization: Whether the dataset need to be tokenized + in this function. Default to True. + tokenizer: The tokenizer processes some raw text as input and outputs + an Encoding. If `do_dataset_tokenization` is True, this argument + should not be None. Default to None. + max_length: Max length of the sequence. If `do_dataset_tokenization` + or `pack_to_max_length` is True, this argument should not be None. + Default to None. + dataset_map_fn: Map the original dataset format to the one defined + by xTuner. + template_map_fn: Add the prompt template to the dataset + max_dataset_length: If the length of the dataset is too long, we can + randomly extract `max_dataset_length` from it. + split: Which split of the data to load. + If `None`, will return a single concatenated dataset with all + splits (typically `datasets.Split.TRAIN` and + `datasets.Split.TEST`). + If given, will return a single Dataset. + remove_unused_columns: Whether to remove columns from the dataset + that are not used during training. + rename_maps: Rename the column name of the dataset. + shuffle_before_pack: Whether to shuffle the dataset before + packing them. + pack_to_max_length: Whether to pack the dataset to the `max_length `. + This usually improves gpu utilization and therefore reduces + training time. + use_varlen_attn: If use_varlen_attn is True, we calculate attention + the actual length of the sequence rather than the actual length + of the sequence + input_ids_with_output: Whether to put the groundtruth output + corresponding to the question into the dataset. Typically set + it to True during training and False during testing. + with_image_token: Whether to convert DEFAULT_IMAGE_TOKEN to + IMAGE_TOKEN_INDEX. Typically set it to True during the training + of VLM. + map_num_proc: Max number of processes when mapping the dataset. + """ + if use_varlen_attn: + assert pack_to_max_length, \ + '`pack_to_max_length` in `process_hf_dataset` should be set to ' \ + 'True if `use_varlen_attn` is True.' + if pack_to_max_length: + assert split == 'train' or split is None, \ + ('`split` should be `train` or `None` if `pack_to_max_length` is ' + f'True, but got {split}.') + + dataset = build_origin_dataset(dataset, split) + + # sample `max_dataset_length` items from the original dataset to + # save time consumed by map function + if max_dataset_length is not None: + max_dataset_length = min(max_dataset_length, len(dataset)) + indices = np.random.choice( + len(dataset), max_dataset_length, replace=False) + dataset = dataset.select(indices) + + # Extract the useful data for training from the original dataset. + if dataset_map_fn is not None: + dataset = map_dataset(dataset, dataset_map_fn, map_num_proc) + + # Add prompt template, such as <|System|>: xxx <|User|>: xxx <|Bot|>: xxx + if template_map_fn is not None: + dataset = add_template_to_dataset(dataset, template_map_fn, + map_num_proc) + + for old, new in rename_maps: + dataset = dataset.rename_column(old, new) + + # remove unused columns + if pack_to_max_length and (not remove_unused_columns): + print_log( + 'We have to remove unused columns if ' + '`pack_to_max_length` is set to True.', + logger='current', + level=logging.WARNING) + remove_unused_columns = True + + if do_dataset_tokenization: + dataset = tokenize_dataset(dataset, tokenizer, max_length, + with_image_token, input_ids_with_output, + remove_unused_columns, map_num_proc) + + if input_ids_with_output: + assert {'input_ids', 'labels'}.issubset(dataset.column_names) + # remove data that does not have the valid labels. + dataset = dataset.filter( + lambda example: any(label >= 0 for label in example['labels']), + num_proc=map_num_proc) + + # pack to max length + if pack_to_max_length: + dataset = pack_dataset(dataset, max_length, use_varlen_attn, + shuffle_before_pack, map_num_proc) + + # add 'length' + dataset = dataset.map(get_lengths, num_proc=map_num_proc) + setattr(dataset, 'length', dataset['length']) + + return dataset + + +def process_hf_dataset(dataset, + do_dataset_tokenization=True, + tokenizer=None, + max_length=None, + dataset_map_fn=None, + template_map_fn=None, + max_dataset_length=None, + split='train', + remove_unused_columns=False, + rename_maps=[], + shuffle_before_pack=True, + pack_to_max_length=True, + use_varlen_attn=False, + input_ids_with_output=True, + with_image_token=False, + map_num_proc=32): + """Post-process the dataset loaded from the Hugging Face Hub, or a local + dataset. + + Args: + dataset: The dataset to be post-processed. + do_dataset_tokenization: Whether the dataset need to be tokenized + in this function. Default to True. + tokenizer: The tokenizer processes some raw text as input and outputs + an Encoding. If `do_dataset_tokenization` is True, this argument + should not be None. Default to None. + max_length: Max length of the sequence. If `do_dataset_tokenization` + or `pack_to_max_length` is True, this argument should not be None. + Default to None. + dataset_map_fn: Map the original dataset format to the one defined + by xTuner. + template_map_fn: Add the prompt template to the dataset + max_dataset_length: If the length of the dataset is too long, we can + randomly extract `max_dataset_length` from it. + split: Which split of the data to load. + If `None`, will return a single concatenated dataset with all + splits (typically `datasets.Split.TRAIN` and + `datasets.Split.TEST`). + If given, will return a single Dataset. + remove_unused_columns: Whether to remove columns from the dataset + that are not used during training. + rename_maps: Rename the column name of the dataset. + shuffle_before_pack: Whether to shuffle the dataset before + packing them. + pack_to_max_length: Whether to pack the dataset to the `max_length `. + This usually improves gpu utilization and therefore reduces + training time. + use_varlen_attn: If use_varlen_attn is True, we calculate attention + the actual length of the sequence rather than the actual length + of the sequence + input_ids_with_output: Whether to put the groundtruth output + corresponding to the question into the dataset. Typically set + it to True during training and False during testing. + with_image_token: Whether to convert DEFAULT_IMAGE_TOKEN to + IMAGE_TOKEN_INDEX. Typically set it to True during the training + of VLM. + map_num_proc: Max number of processes when mapping the dataset. + """ + kwargs = dict( + dataset=dataset, + do_dataset_tokenization=do_dataset_tokenization, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=template_map_fn, + max_dataset_length=max_dataset_length, + split=split, + remove_unused_columns=remove_unused_columns, + rename_maps=rename_maps, + shuffle_before_pack=shuffle_before_pack, + pack_to_max_length=pack_to_max_length, + use_varlen_attn=use_varlen_attn, + input_ids_with_output=input_ids_with_output, + with_image_token=with_image_token, + map_num_proc=map_num_proc) + if not (dist.is_available() and dist.is_initialized()): + return process(**kwargs) + + xtuner_dataset_timeout = timedelta( + minutes=int(os.getenv('XTUNER_DATASET_TIMEOUT', default=60))) + print_log( + f'xtuner_dataset_timeout = {xtuner_dataset_timeout}', logger='current') + # monitored barrier requires gloo process group to perform host-side sync. + group_gloo = dist.new_group(backend='gloo', timeout=xtuner_dataset_timeout) + + if dist.get_rank() == 0: + dataset = process(**kwargs) + objects = [dataset] + else: + objects = [None] + + dist.monitored_barrier(group=group_gloo, timeout=xtuner_dataset_timeout) + dist.broadcast_object_list(objects, src=0) + return objects[0] diff --git a/data/xtuner/xtuner/dataset/intern_repo.py b/data/xtuner/xtuner/dataset/intern_repo.py new file mode 100644 index 0000000000000000000000000000000000000000..95cd7cf99ad65da9880ae54235e7791cb6016fd5 --- /dev/null +++ b/data/xtuner/xtuner/dataset/intern_repo.py @@ -0,0 +1,362 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import itertools as it +import json +import mmap +import operator +import os +import threading +from pathlib import Path + +import numpy as np +import torch +from datasets import Dataset, load_dataset, load_from_disk +from mmengine import print_log +from torch import distributed as dist +from torch.utils.data import ConcatDataset + +from xtuner.dataset.map_fns import openai_map_fn +from xtuner.registry import BUILDER +from .huggingface import process + + +class JsonlDataset(torch.utils.data.Dataset): + """ + + JSONL format is expected to roughly follow that of The Pile. + One-line-per-document of the form: + ``` + { + "input_ids": List[int], + "labels": List[int] + } + ``` + + """ + + def __init__(self, path: str, min_length=50): + self.path = path + self.threadlocal = threading.local() + resolved_path = Path(path).resolve() + self.resolved_path = resolved_path + self.meta = Path(f'{resolved_path}.meta') + + # only build the cache in on the primary worker to prevent + # overloading nfs + assert os.path.exists( + self.meta + ), f'The cache file:{self.meta} is not found for file:{self.path}' + try: + with open(self.meta, 'rb') as f: + meta = np.load(f) + except Exception as e: + print(f'Cannot load file {self.meta}...') + raise e + self.offsets = meta[:, 0] + self.length = meta[:, -1] + + if min_length > 0: + mask = self.length >= min_length + self.offsets = self.offsets[mask] + self.length = self.length[mask] + + def __getitem__(self, idx): + f = self._get_mmap() + position = self.offsets[idx] + f.seek(position) + item = f.readline().decode('utf-8') + try: + item = json.loads(item) + item['input_ids'] = item['tokens'] + del item['tokens'] + labels = [x if x > 0 else -100 for x in item['input_ids']] + item['input_ids'] = [abs(x) for x in item['input_ids']] + item['labels'] = labels + item['length'] = len(item['input_ids']) # add a length info + except Exception as err: + raise json.decoder.JSONDecodeError( + doc=self.path, + pos=position, + msg=(f'Error while loading JSONL line in file {self.path} ' + f'at byte {position}. Contents of line:\n{item}\n{err}'), + ) + return item + + def get_dataset_name(self): + return str(self.resolved_path) + + def _get_mmap(self): + if not hasattr(self.threadlocal, 'handles'): + with open(self.path, 'rb') as f: + mm = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + self.threadlocal.handles = [f, mm] + if self.path.endswith('.gz') or self.path.endswith( + '.bz') or self.path.endswith('.bz2'): + raise NotImplementedError( + 'Compressed files are not supported because .seek() ' + 'would require rereading the entire file, making ' + 'performance too slow.') + return self.threadlocal.handles[-1] + + def __setstate__(self, state): + self.__dict__ = state + self.threadlocal = threading.local() + + def __getstate__(self): + d = {} + for i, v in self.__dict__.items(): + if i != 'threadlocal': + d[i] = v + return d + + def __del__(self): + if hasattr(self.threadlocal, 'handles'): + # cleanup files we opened on initialization + while self.threadlocal.handles: + self.threadlocal.handles.pop().close() + + @staticmethod + def exists(path): + return os.path.exists(path) + + def __len__(self): + # Virtual length of the dataset depends on the epoch number + # if the number of documents is not perfectly divisible by the + # data_subshard_count + return len(self.offsets) + + +class PackedDataset(torch.utils.data.Dataset): + """The class PackedDataset takes in a dataset and aggregates samples of + different lengths together based on the packed_length. + + Args: + dataset: The original dataset to pack. + packed_length: The length of each packed sample. Default is 8192. + """ + + def __init__(self, dataset, packed_length: int = 8192, seed: int = 1024): + self.dataset = dataset + self.packed_length = packed_length + if isinstance(dataset, JsonlDataset): + self.length = dataset.length + elif isinstance(dataset, Dataset): + if hasattr(dataset, 'length'): + length = dataset.length + else: + length = [len(i['input_ids']) for i in dataset] + self.length = length + else: + raise NotImplementedError + self.seed = seed + + rng = np.random.RandomState(self.seed) + shuffled_indices = np.arange(len(self.length)) + rng.shuffle(shuffled_indices) + self.shuffled_indices = shuffled_indices.tolist() + self.shuffled_samples_len = list( + map(self.length.__getitem__, shuffled_indices)) + self.shuffled_accumulated_samples_len = list( + it.accumulate(self.shuffled_samples_len, operator.add)) + self.num_tokens = sum(self.length) + + def __len__(self): + return self.num_tokens // self.packed_length + + def search_sample_index(self, pack_idx: int = 0): + assert pack_idx >= 0 + length_train = (pack_idx + 1) * self.packed_length + sample_index = np.searchsorted( + self.shuffled_accumulated_samples_len, length_train, side='left') + return sample_index + + def mapping(self, pack_idx: int = 0): + begin_sample_idx, begin_token_id = 0, 0 + if pack_idx > 0: + begin_sample_idx = self.search_sample_index(pack_idx - 1) + # The position where the previous packed data ends + begin_token_id = self.shuffled_samples_len[begin_sample_idx] - ( + self.shuffled_accumulated_samples_len[begin_sample_idx] + - # noqa: W504,W503 + (pack_idx) * self.packed_length) + if begin_token_id == self.shuffled_samples_len[begin_sample_idx]: + begin_sample_idx += 1 + begin_token_id = 0 + + end_sample_idx = self.search_sample_index(pack_idx) + end_token_id = self.shuffled_samples_len[end_sample_idx] - ( + self.shuffled_accumulated_samples_len[end_sample_idx] + - # noqa: W504,W503 + (pack_idx + 1) * self.packed_length) + return begin_sample_idx, begin_token_id, end_sample_idx, end_token_id + + def build_pack(self, begin_sample_idx: int, begin_token_id: int, + end_sample_idx: int, end_token_id: int): + pack, cumulative_len, position_ids, labels = [], [0], [], [] + + while begin_sample_idx < end_sample_idx: + sample_idx = self.shuffled_indices[begin_sample_idx] + sample = self.dataset[sample_idx] + chunk = sample['input_ids'][begin_token_id:] + pack.extend(chunk) + _labels = sample['labels'][begin_token_id:] + assert len(_labels) == len(chunk), (_labels, chunk) + labels.extend(_labels) + cumulative_len.append(cumulative_len[-1] + len(chunk)) + position_ids.extend(list(range(len(chunk)))) + begin_sample_idx = begin_sample_idx + 1 + begin_token_id = 0 + + sample_idx = self.shuffled_indices[end_sample_idx] + sample = self.dataset[sample_idx] + chunk = sample['input_ids'][begin_token_id: + end_token_id] # fragment of a sample + _labels = sample['labels'][begin_token_id:end_token_id] + pack.extend(chunk) + assert len(_labels) == len(chunk), (_labels, chunk) + labels.extend(_labels) + cumulative_len.append(cumulative_len[-1] + len(chunk)) + position_ids.extend(list(range(len(chunk)))) + + out = { + 'input_ids': pack, + 'cumulative_len': cumulative_len, + 'position_ids': position_ids, + 'labels': labels + } + return out + + def __getitem__(self, item: int): + pos_before, token_id_before, pos_after, token_id_after = self.mapping( + item) + return self.build_pack(pos_before, token_id_before, pos_after, + token_id_after) + + +def load_intern_repo_tokenized_dataset(folder, + min_length=0, + data_order_path=None, + file_type='.bin'): + assert os.path.exists(folder), f'{folder} does not exist.' + datasets = [] + + if data_order_path is not None: + data_order = load_dataset( + 'text', data_files=data_order_path, split='train')['text'] + for i, fp in enumerate(data_order): + data_order[i] = os.path.join(folder, fp) + else: + triples = list(os.walk(folder, followlinks=True)) + data_order = [] + for root, dirs, files in triples: + dirs.sort() + for fn in sorted(files): + if fn.endswith(file_type): + fp = os.path.join(root, fn) + data_order.append(fp) + + for fp in data_order: + print_log(f'Reading {fp}...', logger='current') + ds = JsonlDataset(fp, min_length=min_length) + + if len(ds) == 0: + continue + datasets.append(ds) + + return datasets + + +def load_intern_repo_untokenized_dataset(processed_dataset_dict_path=None, + folder=None, + tokenizer=None, + max_length=None, + template_map_fn=None, + data_order_path=None, + file_type='.json'): + + assert processed_dataset_dict_path or (folder and tokenizer and max_length) + + if processed_dataset_dict_path is not None: + ds = load_from_disk(processed_dataset_dict_path) + datasets = [] + for key, data in ds.items(): + datasets.append((key, data)) + datasets = sorted(datasets, key=lambda x: int(x[0])) + datasets = [x[1] for x in datasets] + return datasets + + assert os.path.exists(folder), f'{folder} does not exist.' + datasets = [] + + if data_order_path is not None: + data_order = load_dataset( + 'text', data_files=data_order_path, split='train')['text'] + for i, fp in enumerate(data_order): + data_order[i] = os.path.join(folder, fp) + else: + triples = list(os.walk(folder, followlinks=True)) + data_order = [] + for root, dirs, files in triples: + dirs.sort() + for fn in sorted(files): + if fn.endswith(file_type): + fp = os.path.join(root, fn) + data_order.append(fp) + + for fp in data_order: + print_log(f'Reading {fp}...', logger='current') + dataset = [] + with open(fp) as file: + lines = file.readlines() + for line in lines: + line = json.loads(line) + dataset.append({'messages': line}) + dataset = Dataset.from_list(dataset) + dataset = process( + dataset, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=openai_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=True, + pack_to_max_length=False, + map_num_proc=32) + + if len(dataset) == 0: + continue + + datasets.append(dataset) + + return datasets + + +def build_packed_dataset_rank0(dataset_cfg, packed_length=8192, seed=1024): + if isinstance(dataset_cfg, dict): + datasets = BUILDER.build(dataset_cfg) + else: + datasets = dataset_cfg + + if not isinstance(datasets, list): + datasets = [datasets] + + packed_datasets = [] + + for dataset in datasets: + ds = PackedDataset(dataset, packed_length, seed=seed) + packed_datasets.append(ds) + + dataset = ConcatDataset(datasets=packed_datasets) + + return dataset + + +def build_packed_dataset(*args, **kwargs): + if not (dist.is_available() and dist.is_initialized()): + return build_packed_dataset_rank0(*args, **kwargs) + + if dist.get_rank() == 0: + dataset = build_packed_dataset_rank0(*args, **kwargs) + objects = [dataset] + else: + objects = [None] + dist.broadcast_object_list(objects, src=0) + return objects[0] diff --git a/data/xtuner/xtuner/dataset/internvl_dataset.py b/data/xtuner/xtuner/dataset/internvl_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..82904ae8777bd8a6eab9f9fc3b4ed929b6d350ce --- /dev/null +++ b/data/xtuner/xtuner/dataset/internvl_dataset.py @@ -0,0 +1,409 @@ +import copy +import io +import json +import os +import random +import warnings + +import numpy as np +import torch +import torchvision.transforms as T +from mmengine import print_log +from mmengine.fileio import get +from PIL import Image +from torch.utils.data import Dataset +from torchvision.transforms.functional import InterpolationMode +from transformers import AutoConfig, AutoTokenizer + +from xtuner.utils import IGNORE_INDEX + + +# Referenced from InternVL +def find_closest_aspect_ratio(aspect_ratio, target_ratios, width, height, + image_size): + best_ratio_diff = float('inf') + best_ratio = (1, 1) + area = width * height + for ratio in target_ratios: + target_aspect_ratio = ratio[0] / ratio[1] + ratio_diff = abs(aspect_ratio - target_aspect_ratio) + if ratio_diff < best_ratio_diff: + best_ratio_diff = ratio_diff + best_ratio = ratio + elif ratio_diff == best_ratio_diff: + if area > 0.5 * image_size * image_size * ratio[0] * ratio[1]: + best_ratio = ratio + return best_ratio + + +def dynamic_preprocess(image, + min_num=1, + max_num=6, + image_size=448, + use_thumbnail=False): + orig_width, orig_height = image.size + aspect_ratio = orig_width / orig_height + + # calculate the existing image aspect ratio + target_ratios = {(i, j) + for n in range(min_num, max_num + 1) + for i in range(1, n + 1) for j in range(1, n + 1) + if i * j <= max_num and i * j >= min_num} + target_ratios = sorted(target_ratios, key=lambda x: x[0] * x[1]) + + # find the closest aspect ratio to the target + target_aspect_ratio = find_closest_aspect_ratio(aspect_ratio, + target_ratios, orig_width, + orig_height, image_size) + + # calculate the target width and height + target_width = image_size * target_aspect_ratio[0] + target_height = image_size * target_aspect_ratio[1] + blocks = target_aspect_ratio[0] * target_aspect_ratio[1] + + # resize the image + resized_img = image.resize((target_width, target_height)) + processed_images = [] + for i in range(blocks): + box = ((i % (target_width // image_size)) * image_size, + (i // (target_width // image_size)) * image_size, + ((i % (target_width // image_size)) + 1) * image_size, + ((i // (target_width // image_size)) + 1) * image_size) + # split the image + split_img = resized_img.crop(box) + processed_images.append(split_img) + assert len(processed_images) == blocks + if use_thumbnail and len(processed_images) != 1: + thumbnail_img = image.resize((image_size, image_size)) + processed_images.append(thumbnail_img) + return processed_images + + +def total_image_token(orig_size, + min_num=1, + max_num=12, + image_size=448, + use_thumbnail=True): + orig_width, orig_height = orig_size + + aspect_ratio = orig_width / orig_height + + # calculate the existing image aspect ratio + target_ratios = {(i, j) + for n in range(min_num, max_num + 1) + for i in range(1, n + 1) for j in range(1, n + 1) + if max_num >= i * j >= min_num} + target_ratios = sorted(target_ratios, key=lambda x: x[0] * x[1]) + + # find the closest aspect ratio to the target + target_aspect_ratio = find_closest_aspect_ratio(aspect_ratio, + target_ratios, orig_width, + orig_height, image_size) + blocks = target_aspect_ratio[0] * target_aspect_ratio[1] + + if use_thumbnail: + blocks += 1 + + return blocks + + +def load_json_or_jsonl(json_path): + if json_path.endswith('.json'): + with open(json_path) as f: + data = json.load(f) + elif json_path.endswith('.jsonl'): + with open(json_path) as f: + data = [json.loads(line) for line in f] + else: + raise ValueError(f'Unsupported file format: {json_path}, ' + f'only support .json and .jsonl.') + return data + + +class InternVL_V1_5_Dataset(Dataset): + os.environ['TOKENIZERS_PARALLELISM'] = 'true' + IMG_CONTEXT_TOKEN = '' + IMG_START_TOKEN = '' + IMG_END_TOKEN = '' + + IMAGENET_MEAN = (0.485, 0.456, 0.406) + IMAGENET_STD = (0.229, 0.224, 0.225) + + def __init__(self, + model_path, + template, + data_paths, + image_folders=None, + repeat_times=1, + max_length=8192): + self.template = template + self.max_length = max_length + + self.cfg = AutoConfig.from_pretrained( + model_path, trust_remote_code=True) + + # The following modifications are only to ensure full + # consistency with the official template, + # without investigating the impact on performance. + if self.cfg.llm_config.architectures[0] == 'Phi3ForCausalLM': + self._system = 'You are an AI assistant whose name is Phi-3.' + self.template[ + 'INSTRUCTION'] = '<|user|>\n{input}<|end|><|assistant|>\n' + elif self.cfg.llm_config.architectures[0] == 'InternLM2ForCausalLM': + self._system = 'You are an AI assistant whose name ' \ + 'is InternLM (书生·浦语).' + self.template['SYSTEM'] = '<|im_start|>system\n{system}<|im_end|>' + self.template[ + 'INSTRUCTION'] = '<|im_start|>user\n{input}' \ + '<|im_end|><|im_start|>assistant\n' + else: + raise NotImplementedError + + self.min_dynamic_patch = self.cfg.min_dynamic_patch + self.max_dynamic_patch = self.cfg.max_dynamic_patch + self.downsample_ratio = self.cfg.downsample_ratio + self.image_size = self.cfg.force_image_size + self.use_thumbnail = self.cfg.use_thumbnail + patch_size = self.cfg.vision_config.patch_size + self.patch_token = int( + (self.image_size // patch_size)**2 * (self.downsample_ratio**2)) + self.tokenizer = AutoTokenizer.from_pretrained( + model_path, trust_remote_code=True) + self.transformer = T.Compose([ + T.Lambda(lambda img: img.convert('RGB') + if img.mode != 'RGB' else img), + T.Resize((self.image_size, self.image_size), + interpolation=InterpolationMode.BICUBIC), + T.ToTensor(), + T.Normalize(mean=self.IMAGENET_MEAN, std=self.IMAGENET_STD) + ]) + + if not isinstance(data_paths, (list, tuple)): + data_paths = [data_paths] + if not isinstance(image_folders, (list, tuple)): + image_folders = [image_folders] + if not isinstance(repeat_times, (list, tuple)): + repeat_times = [repeat_times] + assert len(data_paths) == len(image_folders) == len(repeat_times) + + print_log('Starting to loading data and calc length', logger='current') + self.data = [] + self.image_folder = [] + self.group_length = [] + self.conv2length_text = { + } # using dict to speedup the calculation of token length + + for data_file, image_folder, repeat_time in zip( + data_paths, image_folders, repeat_times): + print_log( + f'=======Starting to process {data_file} =======', + logger='current') + assert repeat_time > 0 + json_data = load_json_or_jsonl(data_file) + if repeat_time < 1: + json_data = random.sample(json_data, + int(len(json_data) * repeat_time)) + elif repeat_time > 1: + int_repeat_time = int(repeat_time) + remaining_repeat_time = repeat_time - repeat_time + if remaining_repeat_time > 0: + remaining_json_data = random.sample( + json_data, int(len(json_data) * remaining_repeat_time)) + json_data = json_data * int_repeat_time + json_data.extend(remaining_json_data) + else: + json_data = json_data * int_repeat_time + + self.data.extend(json_data) + self.image_folder.extend([image_folder] * len(json_data)) + + # TODO: multi process + for data_item in json_data: + if 'length' in data_item: + token_length = data_item['length'] # include image token + else: + conversations = '\n'.join( + [temp['value'] for temp in data_item['conversations']]) + str_length = len(conversations) + + if str_length not in self.conv2length_text: + token_length = self.tokenizer( + conversations, + return_tensors='pt', + padding=False, + truncation=False, + ).input_ids.size(1) + self.conv2length_text[str_length] = token_length + else: + token_length = self.conv2length_text[str_length] + + if 'image' in data_item and data_item['image'] is not None: + if 'image_wh' in data_item and data_item[ + 'image_wh'] is not None: + # more accurate calculation of image token + image_wh = data_item['image_wh'] + if isinstance(image_wh[0], list): + image_wh = image_wh[0] + image_token = total_image_token( + image_wh, self.min_dynamic_patch, + self.max_dynamic_patch, self.image_size, + self.use_thumbnail) + image_token = self.patch_token * image_token + else: + # max_dynamic_patch + use_thumbnail + image_token = self.patch_token * ( + self.max_dynamic_patch + self.use_thumbnail) + + token_length = token_length + image_token + else: + token_length = -token_length + + self.group_length.append(token_length) + print_log( + f'=======total {len(json_data)} samples of {data_file}=======', + logger='current') + + assert len(self.group_length) == len(self.data) + print_log('end loading data and calc length', logger='current') + print_log( + f'=======total {len(self.data)} samples=======', logger='current') + self._max_refetch = 1000 + + def __getitem__(self, index): + for _ in range(self._max_refetch + 1): + data = self.prepare_data(index) + # Broken images may cause the returned data to be None + if data is None: + index = self._rand_another() + continue + return data + + def __len__(self): + return len(self.data) + + @property + def modality_length(self): + return self.group_length + + @property + def length(self): + group_length = np.array(self.group_length) + group_length = np.abs(group_length).tolist() + return group_length + + def prepare_data(self, index): + data_dict: dict = self.data[index] + image_folder = self.image_folder[index] + + out_data_dict = {} + if data_dict.get('image', None) is not None: + image_file = data_dict['image'] + if isinstance(image_file, (list, tuple)): + assert len(image_file) == 1 + image_file = image_file[0] + + try: + image = self.get_image(os.path.join(image_folder, image_file)) + except Exception as e: + print(f'Error: {e}', flush=True) + print_log(f'Error: {e}', logger='current') + return None + + images = dynamic_preprocess(image, self.min_dynamic_patch, + self.max_dynamic_patch, + self.image_size, self.use_thumbnail) + pixel_values = [self.transformer(image) for image in images] + pixel_values = torch.stack(pixel_values) + out_data_dict['pixel_values'] = pixel_values + + num_image_tokens = pixel_values.shape[0] * self.patch_token + image_token_str = f'{self.IMG_START_TOKEN}' \ + f'{self.IMG_CONTEXT_TOKEN * num_image_tokens}' \ + f'{self.IMG_END_TOKEN}' + token_dict = self.get_inputid_labels(data_dict['conversations'], + image_token_str) + out_data_dict.update(token_dict) + else: + token_dict = self.get_inputid_labels(data_dict['conversations'], + None) + out_data_dict.update(token_dict) + out_data_dict['pixel_values'] = torch.zeros( + 1, 3, self.image_size, self.image_size) + return out_data_dict + + def _rand_another(self) -> int: + return np.random.randint(0, len(self.data)) + + def get_image(self, path): + if 's3://' in path: + img_bytes = get(path) + with io.BytesIO(img_bytes) as buff: + img = Image.open(buff).convert('RGB') + return img + else: + return Image.open(path).convert('RGB') + + def get_inputid_labels(self, conversations, image_token_str) -> dict: + input = '' + out_conversation = [] + while conversations and conversations[0]['from'] == 'gpt': + # Skip the first one if it is from gpt + conversations = conversations[1:] + for msg in conversations: + if msg['from'] == 'human': + if image_token_str is None and '' in msg['value']: + warnings.warn( + f'The current data << {msg["value"]} >> is ' + f'in plain text mode, but ' + 'there are tags present in the data. ' + 'We need to remove the tags.') + msg['value'] = msg['value'].replace('', '') + if '' in msg['value']: + msg['value'] = msg['value'].replace('', '').strip() + msg['value'] = image_token_str + '\n' + msg['value'] + msg['value'] = msg['value'].strip() + input += msg['value'].strip() + elif msg['from'] == 'gpt': + out_conversation.append({ + 'input': input, + 'output': msg['value'].strip() + }) + input = '' + else: + raise NotImplementedError + + input_ids, labels = [], [] + for i, single_turn_conversation in enumerate(out_conversation): + input = single_turn_conversation.get('input', '') + if input is None: + input = '' + input_text = self.template.INSTRUCTION.format( + input=input, round=i + 1) + + if i == 0: + system = self.template.SYSTEM.format(system=self._system) + input_text = system + input_text + input_encode = self.tokenizer.encode( + input_text, add_special_tokens=True) + else: + input_encode = self.tokenizer.encode( + input_text, add_special_tokens=False) + input_ids += input_encode + labels += [IGNORE_INDEX] * len(input_encode) + + output_text = single_turn_conversation.get('output', '') + if self.template.get('SUFFIX', None): + output_text += self.template.SUFFIX + output_encode = self.tokenizer.encode( + output_text, add_special_tokens=False) + input_ids += output_encode + labels += copy.deepcopy(output_encode) + + if len(input_ids) > self.max_length: + input_ids = input_ids[:self.max_length] + labels = labels[:self.max_length] + print_log( + f'Warning: input_ids length({len(input_ids)}) ' + f'is longer than max_length, cut to {self.max_length}', + logger='current') + return {'input_ids': input_ids, 'labels': labels} diff --git a/data/xtuner/xtuner/dataset/json_dataset.py b/data/xtuner/xtuner/dataset/json_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..1c7ca016300c94d19acb14bf9934d49c156a7987 --- /dev/null +++ b/data/xtuner/xtuner/dataset/json_dataset.py @@ -0,0 +1,24 @@ +import json +import os + +from datasets import Dataset, concatenate_datasets + + +def load_json_file(data_files=None, data_dir=None, suffix=None): + assert (data_files is not None) != (data_dir is not None) + if data_dir is not None: + data_files = os.listdir(data_dir) + data_files = [os.path.join(data_dir, fn) for fn in data_files] + if suffix is not None: + data_files = [fp for fp in data_files if fp.endswith(suffix)] + elif isinstance(data_files, str): + data_files = [data_files] + + dataset_list = [] + for fp in data_files: + with open(fp, encoding='utf-8') as file: + data = json.load(file) + ds = Dataset.from_list(data) + dataset_list.append(ds) + dataset = concatenate_datasets(dataset_list) + return dataset diff --git a/data/xtuner/xtuner/dataset/llava.py b/data/xtuner/xtuner/dataset/llava.py new file mode 100644 index 0000000000000000000000000000000000000000..0fab0258af8fa507aac81a45734cee7d71ff63e3 --- /dev/null +++ b/data/xtuner/xtuner/dataset/llava.py @@ -0,0 +1,122 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +import logging +import os + +import torch +from datasets import Dataset as HFDataset +from datasets import DatasetDict, load_from_disk +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from PIL import Image +from torch.utils.data import Dataset + +from xtuner.registry import BUILDER +from .huggingface import process_hf_dataset +from .utils import expand2square + + +def load_jsonl(json_file): + with open(json_file) as f: + lines = f.readlines() + data = [] + for line in lines: + data.append(json.loads(line)) + return data + + +class LLaVADataset(Dataset): + + def __init__(self, + image_folder, + image_processor, + data_path=None, + tokenizer=None, + offline_processed_text_folder=None, + max_dataset_length=None, + dataset_map_fn=None, + template_map_fn=None, + max_length=2048, + pad_image_to_square=False): + super().__init__() + + assert offline_processed_text_folder or (data_path and tokenizer) + if offline_processed_text_folder and data_path: + print_log( + 'Both `offline_processed_text_folder` and ' + '`data_path` are set, and we load dataset from' + '`offline_processed_text_folder` ' + f'({offline_processed_text_folder})', + logger='current', + level=logging.WARNING) + + if offline_processed_text_folder is not None: + self.text_data = load_from_disk(offline_processed_text_folder) + else: + if data_path.endswith('.json'): + json_data = json.load(open(data_path)) + elif data_path.endswith('.jsonl'): + json_data = load_jsonl(data_path) + else: + raise NotImplementedError + + for idx in range(len(json_data)): + if isinstance(json_data[idx]['id'], int): + json_data[idx]['id'] = str(json_data[idx]['id']) + json_data = DatasetDict({'train': HFDataset.from_list(json_data)}) + self.text_data = process_hf_dataset( + dataset=json_data, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=template_map_fn, + split='train', + max_dataset_length=max_dataset_length, + remove_unused_columns=False, + pack_to_max_length=False, + with_image_token=True) + + self.image_folder = image_folder + if isinstance(image_processor, dict) or isinstance( + image_processor, Config) or isinstance(image_processor, + ConfigDict): + self.image_processor = BUILDER.build(image_processor) + else: + self.image_processor = image_processor + self.pad_image_to_square = pad_image_to_square + + @property + def modality_length(self): + length_list = [] + for data_dict in self.text_data: + cur_len = len(data_dict['input_ids']) + if data_dict.get('image', None) is None: + cur_len = -cur_len + length_list.append(cur_len) + return length_list + + def __len__(self): + return len(self.text_data) + + def __getitem__(self, index): + data_dict = self.text_data[index] + if data_dict.get('image', None) is not None: + image_file = data_dict['image'] + image = Image.open(os.path.join(self.image_folder, + image_file)).convert('RGB') + if self.pad_image_to_square: + image = expand2square( + image, + tuple( + int(x * 255) for x in self.image_processor.image_mean)) + image = self.image_processor.preprocess( + image, return_tensors='pt')['pixel_values'][0] + data_dict['pixel_values'] = image + else: + if hasattr(self.image_processor, 'crop_size'): + crop_size = self.image_processor.crop_size + else: + crop_size = self.image_processor.size + data_dict['pixel_values'] = torch.zeros(3, crop_size['height'], + crop_size['width']) + return data_dict diff --git a/data/xtuner/xtuner/dataset/map_fns/__init__.py b/data/xtuner/xtuner/dataset/map_fns/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4a488c53eab57eedcd0437c2f239faec445292cf --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .dataset_map_fns import * # noqa: F401, F403 +from .template_map_fn import template_map_fn # noqa: F401 +from .template_map_fn import template_map_fn_factory # noqa: F401 diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/__init__.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..449b7b4f20efec582e419fb15f7fcc45f200a585 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/__init__.py @@ -0,0 +1,53 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .alpaca_map_fn import alpaca_map_fn +from .alpaca_zh_map_fn import alpaca_zh_map_fn +from .arxiv_map_fn import arxiv_map_fn +from .code_alpaca_map_fn import code_alpaca_map_fn +from .colors_map_fn import colors_map_fn +from .crime_kg_assitant_map_fn import crime_kg_assitant_map_fn +from .default_map_fn import default_map_fn +from .law_reference_map_fn import law_reference_map_fn +from .llava_map_fn import llava_image_only_map_fn, llava_map_fn +from .medical_map_fn import medical_map_fn +from .msagent_map_fn import msagent_react_map_fn +from .oasst1_map_fn import oasst1_map_fn +from .openai_map_fn import openai_map_fn +from .openorca_map_fn import openorca_map_fn +from .pretrain_map_fn import pretrain_map_fn +from .sql_map_fn import sql_map_fn +from .stack_exchange_map_fn import stack_exchange_map_fn +from .tiny_codes_map_fn import tiny_codes_map_fn +from .wizardlm_map_fn import wizardlm_map_fn + +DATASET_FORMAT_MAPPING = dict( + alpaca=alpaca_map_fn, + alpaca_zh=alpaca_zh_map_fn, + arxiv=arxiv_map_fn, + code_alpaca=code_alpaca_map_fn, + colors=colors_map_fn, + crime_kg_assitan=crime_kg_assitant_map_fn, + default=default_map_fn, + law_reference=law_reference_map_fn, + llava_image_only=llava_image_only_map_fn, + llava=llava_map_fn, + medical=medical_map_fn, + msagent_react=msagent_react_map_fn, + oasst1=oasst1_map_fn, + openai=openai_map_fn, + openorca=openorca_map_fn, + pretrain=pretrain_map_fn, + sql=sql_map_fn, + stack_exchange=stack_exchange_map_fn, + tiny_codes=tiny_codes_map_fn, + wizardlm=wizardlm_map_fn, +) + +__all__ = [ + 'alpaca_map_fn', 'alpaca_zh_map_fn', 'oasst1_map_fn', 'arxiv_map_fn', + 'medical_map_fn', 'openorca_map_fn', 'code_alpaca_map_fn', + 'tiny_codes_map_fn', 'colors_map_fn', 'law_reference_map_fn', + 'crime_kg_assitant_map_fn', 'sql_map_fn', 'openai_map_fn', + 'wizardlm_map_fn', 'stack_exchange_map_fn', 'msagent_react_map_fn', + 'pretrain_map_fn', 'default_map_fn', 'llava_image_only_map_fn', + 'llava_map_fn', 'DATASET_FORMAT_MAPPING' +] diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..d64ac3a1cb6f2d5ee5c84b2f5cb08f84d5001ac5 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_map_fn.py @@ -0,0 +1,13 @@ +# Copyright (c) OpenMMLab. All rights reserved. + + +def alpaca_map_fn(example): + if example.get('output') == '': + return {'conversation': []} + else: + return { + 'conversation': [{ + 'input': f"{example['instruction']}\n{example['input']}", + 'output': example['output'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_zh_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_zh_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..5e17cfa048325af7feadc1fd0452481d65b64cd8 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/alpaca_zh_map_fn.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. + + +def alpaca_zh_map_fn(example): + return { + 'conversation': [{ + 'input': f"{example['instruction_zh']}\n{example['input_zh']}", + 'output': example['output_zh'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/arxiv_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/arxiv_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..52bcc4e341708d51d474a3d9db6dcc2ad65df454 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/arxiv_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def arxiv_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.arxiv_gentile, + 'input': example['abstract'], + 'output': example['title'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/code_alpaca_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/code_alpaca_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..ece86ff209807d6e8a555eef95a3205d62aa5144 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/code_alpaca_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def code_alpaca_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.coder, + 'input': example['prompt'], + 'output': example['completion'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/colors_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/colors_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..17d08bf207cc02d74c2833f1d24da7962e4cd629 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/colors_map_fn.py @@ -0,0 +1,13 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def colors_map_fn(example): + desc = ':'.join(example['description'].split(':')[1:]).strip() + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.colorist, + 'input': desc, + 'output': example['color'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/crime_kg_assitant_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/crime_kg_assitant_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..b7511a98d94d53aea340a216d9f323c9ae166a41 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/crime_kg_assitant_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def crime_kg_assitant_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.lawyer, + 'input': example['input'], + 'output': example['output'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/default_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/default_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..0424b884839cd20168ef9c8d26e4363eb8850503 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/default_map_fn.py @@ -0,0 +1,8 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def default_map_fn(example): + return { + 'conversation': [{ + 'input': example['input'], + 'output': example['output'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/law_reference_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/law_reference_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..297086fa082c9c045e6f67af4d74568029b4ffd6 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/law_reference_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def law_reference_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.lawyer, + 'input': example['question'], + 'output': example['answer'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/llava_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/llava_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..a08ca395b6c4fd208a944d97e98e94fa235c15e4 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/llava_map_fn.py @@ -0,0 +1,46 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import DEFAULT_IMAGE_TOKEN + + +def llava_image_only_map_fn(example): + # input contains the DEFAULT_IMAGE_TOKEN only + messages = example['conversations'] + input = '' + conversation = [] + while messages and messages[0]['from'] == 'gpt': + # Skip the first one if it is from gpt + messages = messages[1:] + for msg in messages: + if msg['from'] == 'human': + assert DEFAULT_IMAGE_TOKEN in msg['value'] + input += DEFAULT_IMAGE_TOKEN + elif msg['from'] == 'gpt': + conversation.append({'input': input, 'output': msg['value']}) + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} + + +def llava_map_fn(example): + messages = example['conversations'] + input = '' + conversation = [] + while messages and messages[0]['from'] == 'gpt': + # Skip the first one if it is from gpt + messages = messages[1:] + for msg in messages: + if msg['from'] == 'human': + if DEFAULT_IMAGE_TOKEN in msg['value']: + msg['value'] = msg['value'].replace(DEFAULT_IMAGE_TOKEN, + '').strip() + msg['value'] = DEFAULT_IMAGE_TOKEN + '\n' + msg['value'] + msg['value'] = msg['value'].strip() + input += msg['value'] + + elif msg['from'] == 'gpt': + conversation.append({'input': input, 'output': msg['value']}) + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/medical_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/medical_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..60a955454bee80e283ac950ef561e642affc6eef --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/medical_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def medical_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.medical, + 'input': '{instruction}\n{input}'.format(**example), + 'output': example['output'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/msagent_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/msagent_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..fef8b1c5c680b58bf4a6817a6881b1adb021b3f4 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/msagent_map_fn.py @@ -0,0 +1,129 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +import re + +think_regex = r'(.*?)(<\|startofthink\|\>)(.*?)(<\|endofthink\|\>)' +exec_regex = r'(<\|startofexec\|\>)(.*?)(<\|endofexec\|\>)(.*?)$' + + +def replace_think(match): + out_text = '' + if match.group(1).strip() != '': + out_text += f'Thought:{match.group(1).strip()}\n' + think_text = match.group(3).replace('```JSON', + '').replace('```', + '').replace('\n', '') + think_json = json.loads(think_text) + out_text += (f"Action:{think_json['api_name']}\n" + f"Action Input:{think_json['parameters']}\n") + return out_text + + +def replace_exec(match): + out_text = '' + exec_text = match.group(2).replace('```JSON', + '').replace('```', + '').replace('\n', '') + exec_json = json.loads(exec_text) + out_text += f'Response:{exec_json}\n' + if match.group(4).strip() != '': + out_text += f'Final Answer:{match.group(4).strip()}\n' + return out_text + + +def extract_json_objects(text, decoder=json.JSONDecoder()): + pos = 0 + results = [] + while True: + match = text.find('{', pos) + if match == -1: + break + try: + result, index = decoder.raw_decode(text[match:]) + if 'name' in result and 'description' in result: + results.append(result) + pos = match + index + else: + pos = match + 1 + except ValueError: + pos = match + 1 + return results + + +def msagent_react_map_fn(example): + text = example['conversations'] + if isinstance(text, str): + text = eval(text) + if len(text) < 2: # Filter out invalid data + return {'conversation': []} + conversation = [] + system_text = '' + input_text = '' + for t in text: + if t['from'] == 'system': + system_text += '你是一个可以调用外部工具的助手,可以使用的工具包括:\n' + json_objects = extract_json_objects(t['value']) + api_dict = {} + for obj in json_objects: + api_dict[obj['name']] = obj['description'] + try: + params = { + i['name']: i['description'] + for i in obj['paths'][0]['parameters'] + } + api_dict[obj['name']] += f'\n输入参数: {params}' + except Exception: + pass + system_text += f'{api_dict}\n' + system_text += ( + '如果使用工具请遵循以下格式回复:\n```\n' + 'Thought:思考你当前步骤需要解决什么问题,是否需要使用工具\n' + f'Action:工具名称,你的工具必须从 [{str(list(api_dict.keys()))}] 选择\n' + 'Action Input:工具输入参数\n```\n工具返回按照以下格式回复:\n```\n' + 'Response:调用工具后的结果\n```\n如果你已经知道了答案,或者你不需要工具,' + '请遵循以下格式回复\n```\n' + 'Thought:给出最终答案的思考过程\n' + 'Final Answer:最终答案\n```\n开始!\n') + elif t['from'] == 'user': + input_text += f"{t['value']}\n" + elif t['from'] == 'assistant': + output = t['value'] + output_response = None + try: + if '<|startofexec|>' in output: + output, output_response = output.split('<|startofexec|>') + output_response = '<|startofexec|>' + output_response + output, think_cnt = re.subn( + think_regex, replace_think, output, flags=re.DOTALL) + except Exception: + return {'conversation': []} + + if think_cnt == 0: + output = f'Final Answer:{output}\n' + else: + output = f'{output}\n' + conversation.append({ + 'system': system_text, + 'input': input_text, + 'output': output + }) + system_text = '' + input_text = '' + if output_response is not None: + try: + output_response, exec_cnt = re.subn( + exec_regex, + replace_exec, + output_response, + flags=re.DOTALL) + if 'Final Answer:' in output_response: + output_response, output_answer = output_response.split( + 'Final Answer:') + output_answer = 'Final Answer:' + output_answer + conversation.append({ + 'system': output_response, + 'output': output_answer + }) + except Exception: + pass + return {'conversation': conversation} diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/oasst1_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/oasst1_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..e1e13a01525c8beacc03cc27bb36745dbe63da58 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/oasst1_map_fn.py @@ -0,0 +1,38 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def oasst1_map_fn(example): + r"""Example before preprocessing: + example['text'] = '### Human: Can you explain xxx' + '### Assistant: Sure! xxx' + '### Human: I didn't understand how xxx' + '### Assistant: It has to do with a process xxx.' + + Example after preprocessing: + example['conversation'] = [ + { + 'input': 'Can you explain xxx', + 'output': 'Sure! xxx' + }, + { + 'input': 'I didn't understand how xxx', + 'output': 'It has to do with a process xxx.' + } + ] + """ + data = [] + for sentence in example['text'].strip().split('###'): + sentence = sentence.strip() + if sentence[:6] == 'Human:': + data.append(sentence[6:].strip()) + elif sentence[:10] == 'Assistant:': + data.append(sentence[10:].strip()) + if len(data) % 2: + # The last round of conversation solely consists of input + # without any output. + # Discard the input part of the last round, as this part is ignored in + # the loss calculation. + data.pop() + conversation = [] + for i in range(0, len(data), 2): + single_turn_conversation = {'input': data[i], 'output': data[i + 1]} + conversation.append(single_turn_conversation) + return {'conversation': conversation} diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openai_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openai_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..468e738f707e0ecae75e89e6a18b91f39b466d56 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openai_map_fn.py @@ -0,0 +1,48 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def openai_map_fn(example): + """ + Example before preprocessing: + example["messages"] = [ + { "role": "system", "content": "You are an assistant that + occasionally misspells words." }, + { "role": "user", "content": "Tell me a story." }, + { "role": "assistant", "content": "One day a student + went to schoool." } + ] + Example after preprocessing: + example["conversation"] = [ + { + "system": "You are an assistant that occasionally misspells + words.", + "input": "Tell me a story.", + "output": "One day a student went to schoool." + } + ] + """ + messages = example['messages'] + system = '' + input = '' + conversation = [] + while messages and messages[0]['role'] == 'assistant': + # Skip the first one if it is from assistant + messages = messages[1:] + for msg in messages: + if msg['role'] == 'system': + system = msg['content'] + elif msg['role'] == 'user': + input += msg['content'] + elif msg['role'] == 'assistant': + output_with_loss = msg.get('loss', 'True') + output_with_loss = str(output_with_loss) + output_with_loss = output_with_loss.lower() == 'true' + conversation.append({ + 'system': system, + 'input': input, + 'output': msg['content'], + 'output_with_loss': output_with_loss + }) + system = '' + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openorca_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openorca_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..45e58f3b9dd8e495c27050573eac4271eb7c746c --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/openorca_map_fn.py @@ -0,0 +1,9 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def openorca_map_fn(example): + return { + 'conversation': [{ + 'system': example['system_prompt'], + 'input': example['question'], + 'output': example['response'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/pretrain_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/pretrain_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..861302ba8690074210ae8a751ba423075d10a240 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/pretrain_map_fn.py @@ -0,0 +1,20 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def pretrain_map_fn(example): + r"""Example before preprocessing: + example['text'] = 'xxx' + + Example after preprocessing: + example['conversation'] = [ + { + 'input': '', + 'output': 'xxx' + }, + ] + """ + return { + 'conversation': [{ + 'input': '', + 'output': example['text'].strip(), + 'need_eos_token': False + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/sql_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/sql_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..c83434f8de496a5a15f18c3038771070b0e4b608 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/sql_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def sql_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.sql, + 'input': '{context}\n{question}'.format(**example), + 'output': example['answer'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/stack_exchange_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/stack_exchange_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc3520e2919283133afb7ec26ff009469f38475 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/stack_exchange_map_fn.py @@ -0,0 +1,8 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def stack_exchange_map_fn(example): + return { + 'conversation': [{ + 'input': example['question'], + 'output': example['response'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/tiny_codes_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/tiny_codes_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..fe0cc02b48c33ab3d9a0e717c293399f74cd6cfa --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/tiny_codes_map_fn.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.utils import SYSTEM_TEMPLATE + + +def tiny_codes_map_fn(example): + return { + 'conversation': [{ + 'system': SYSTEM_TEMPLATE.coder, + 'input': example['prompt'], + 'output': example['response'] + }] + } diff --git a/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/wizardlm_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/wizardlm_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..0174760d006b3efe2240671da672e2367076d30b --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/dataset_map_fns/wizardlm_map_fn.py @@ -0,0 +1,17 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def wizardlm_map_fn(example): + messages = example['conversations'] + input = '' + conversation = [] + while messages and messages[0]['from'] == 'gpt': + # Skip the first one if it is from gpt + messages = messages[1:] + for msg in messages: + if msg['from'] == 'human': + input += msg['value'] + elif msg['from'] == 'gpt': + conversation.append({'input': input, 'output': msg['value']}) + input = '' + else: + raise NotImplementedError + return {'conversation': conversation} diff --git a/data/xtuner/xtuner/dataset/map_fns/template_map_fn.py b/data/xtuner/xtuner/dataset/map_fns/template_map_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..d7673b99efcdc2e1215303755401d68f570eedf2 --- /dev/null +++ b/data/xtuner/xtuner/dataset/map_fns/template_map_fn.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial + +from mmengine.utils.misc import get_object_from_string + + +def template_map_fn(example, template): + conversation = example.get('conversation', []) + for i, single_turn_conversation in enumerate(conversation): + input = single_turn_conversation.get('input', '') + if input is None: + input = '' + input_text = template.INSTRUCTION.format(input=input, round=i + 1) + system = single_turn_conversation.get('system', '') + if system != '' and system is not None: + system = template.SYSTEM.format(system=system) + input_text = system + input_text + single_turn_conversation['input'] = input_text + + if template.get('SUFFIX', None): + output_text = single_turn_conversation.get('output', '') + output_text += template.SUFFIX + single_turn_conversation['output'] = output_text + + # SUFFIX_AS_EOS is False ==> need_eos_token is True + single_turn_conversation['need_eos_token'] = \ + not template.get('SUFFIX_AS_EOS', False) + single_turn_conversation['sep'] = template.get('SEP', '') + + return {'conversation': conversation} + + +def template_map_fn_factory(template): + if isinstance(template, str): # for resume + template = get_object_from_string(template) + return partial(template_map_fn, template=template) diff --git a/data/xtuner/xtuner/dataset/modelscope.py b/data/xtuner/xtuner/dataset/modelscope.py new file mode 100644 index 0000000000000000000000000000000000000000..9400050c34553dc8087a0f78e62918e47835d349 --- /dev/null +++ b/data/xtuner/xtuner/dataset/modelscope.py @@ -0,0 +1,16 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.config import Config, ConfigDict + +from xtuner.registry import BUILDER +from .huggingface import process_hf_dataset + + +def process_ms_dataset(dataset, split='train', *args, **kwargs): + """Post-process the dataset loaded from the ModelScope Hub.""" + + if isinstance(dataset, (Config, ConfigDict)): + dataset = BUILDER.build(dataset) + if isinstance(dataset, dict): + dataset = dataset[split] + dataset = dataset.to_hf_dataset() + return process_hf_dataset(dataset, *args, **kwargs) diff --git a/data/xtuner/xtuner/dataset/moss_sft.py b/data/xtuner/xtuner/dataset/moss_sft.py new file mode 100644 index 0000000000000000000000000000000000000000..a5b7122bb700847dcab584e93b3ecc44c37404d3 --- /dev/null +++ b/data/xtuner/xtuner/dataset/moss_sft.py @@ -0,0 +1,129 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import json +import os + +import torch +from mmengine.config import Config, ConfigDict +from mmengine.logging import print_log +from torch.utils.data import Dataset +from tqdm import tqdm + +from xtuner.registry import BUILDER + + +class MOSSSFTDataset(Dataset): + + def __init__(self, data_file, tokenizer, max_length=2048, bot_name=None): + super().__init__() + self.bot_name = bot_name + self.src_data_file = data_file + if isinstance(tokenizer, dict) or isinstance( + tokenizer, Config) or isinstance(tokenizer, ConfigDict): + self.tokenizer = BUILDER.build(tokenizer) + else: + self.tokenizer = tokenizer + self.max_length = max_length + + self.data = [] + # We do not calculate losses for the meta instruction or results + # returned by plugins + # The token spans with label -100, [(span_start, span_end), ...] + self.no_loss_spans = [] + self.labels = [] + + self.pre = len( + self.tokenizer.encode('<|Results|>:', add_special_tokens=False)) + self.post = len( + self.tokenizer.encode('\n', add_special_tokens=False)) + + self.load_data() + self.process_data() + + def load_data(self): + print_log('Loading MOSS SFT data...', 'current') + name = f'{self.tokenizer.__class__.__name__}_{self.bot_name}' + data_file = self.src_data_file.replace('.jsonl', f'_data_{name}') + no_loss_spans_file = self.src_data_file.replace( + '.jsonl', f'_no_loss_spans_{name}') + if os.path.exists(data_file) and os.path.exists(no_loss_spans_file): + self.data = torch.load(data_file, map_location='cpu') + self.no_loss_spans = torch.load( + no_loss_spans_file, map_location='cpu') + else: + with open(self.src_data_file) as f: + for line in tqdm(f): + sample = json.loads(line) + + chat = sample['chat'] + num_turns = int(sample['num_turns']) + + meta_instruction = sample['meta_instruction'] + if self.bot_name is not None: + meta_instruction = meta_instruction.replace( + 'MOSS', self.bot_name) + instruction_ids = self.tokenizer.encode(meta_instruction) + assert isinstance(instruction_ids, + list) and len(instruction_ids) > 0 + + input_ids = copy.deepcopy(instruction_ids) + no_loss_spans = [(0, len(instruction_ids))] + try: + for i in range(num_turns): + cur_turn_ids = [] + cur_no_loss_spans = [] + cur_turn = chat[f'turn_{i+1}'] + for key, value in cur_turn.items(): + if self.bot_name is not None: + value = value.replace( + 'MOSS', self.bot_name) + cur_ids = self.tokenizer.encode( + value, add_special_tokens=False) + if key == 'Tool Responses': + # The format tokens + # (<|Results|>:...\n) + # should have losses. + cur_no_loss_spans.append( + (len(input_ids + cur_turn_ids) + + self.pre, + len(input_ids + cur_turn_ids + + cur_ids) - self.post)) + + assert isinstance(cur_ids, + list) and len(cur_ids) > 0 + + cur_turn_ids.extend(cur_ids) + + if len(input_ids + cur_turn_ids) > self.max_length: + break + + input_ids.extend(cur_turn_ids) + no_loss_spans.extend(cur_no_loss_spans) + if len(input_ids) == len(instruction_ids): + continue + + assert len(input_ids) > 0 and len( + input_ids) <= self.max_length + + self.data.append(input_ids) + self.no_loss_spans.append(no_loss_spans) + except Exception: + pass + torch.save(self.data, data_file) + torch.save(self.no_loss_spans, no_loss_spans_file) + print_log( + f'Load data successfully, total {len(self.data)} training samples', + 'current') + + def process_data(self): + for item, no_loss in zip(self.data, self.no_loss_spans): + label = copy.deepcopy(item) + for loc in no_loss: + label[loc[0]:loc[1]] = [-100] * (loc[1] - loc[0]) + self.labels.append(label) + + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + return {'input_ids': self.data[index], 'labels': self.labels[index]} diff --git a/data/xtuner/xtuner/dataset/preference_dataset.py b/data/xtuner/xtuner/dataset/preference_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..371ef829039742762ec7c725fb3a1acd4a57b420 --- /dev/null +++ b/data/xtuner/xtuner/dataset/preference_dataset.py @@ -0,0 +1,386 @@ +import copy +import json +import os +from datetime import timedelta +from functools import partial +from multiprocessing import Process, Queue +from typing import Callable, Dict, List + +import numpy as np +import torch.distributed as dist +import tqdm +from datasets import Dataset as HFDataset +from datasets import concatenate_datasets +from mmengine.config import Config, ConfigDict +from mmengine.logging import print_log +from mmengine.utils.misc import get_object_from_string +from torch.utils.data import Dataset +from transformers import AutoTokenizer + +from xtuner.registry import BUILDER, MAP_FUNC +from .huggingface import build_origin_dataset + + +def _worker( + tokenize_fun: Callable, + data_queue: Queue, + out_queue: Queue, +): + while True: + data_chunk = data_queue.get() + + if data_chunk is None: + out_queue.put(None) + break + chunk_results = [] + for idx, data in data_chunk: + chunk_results.append([idx, tokenize_fun(data)]) + out_queue.put(chunk_results) + + +def _chunk_data_to_queue(data_queue: Queue, data: List[Dict], chunk_size: int, + nproc): + data_iter = iter(data) + chunk_data = [] + while True: + try: + item = next(data_iter) + except StopIteration: + break + chunk_data.append(item) + if len(chunk_data) == chunk_size: + data_queue.put(chunk_data) + chunk_data = [] + if chunk_data: + data_queue.put(chunk_data) + + for _ in range(nproc): + data_queue.put(None) + + +def _multi_progress(tokenize_fun_p, dataset, nproc, task_num, chunksize, + description): + processes = [] + data_queue = Queue() + output_queue = Queue() + bar = tqdm.tqdm(total=task_num, desc=description) + # task_id = bar.add_task(total=task_num, description=description) + dataset = enumerate(dataset) + _chunk_data_to_queue(data_queue, dataset, chunksize, nproc) + for _ in range(nproc): + process = Process( + target=_worker, args=(tokenize_fun_p, data_queue, output_queue)) + process.start() + processes.append(process) + + results = [] + finished_process = 0 + while finished_process < nproc: + chunk_results = output_queue.get() + if chunk_results is None: + finished_process += 1 + continue + results.extend(chunk_results) + bar.update(len(chunk_results)) + bar.refresh() + results = map(lambda x: x[1], sorted(results, key=lambda x: x[0])) + return results + + +def load_jsonl_dataset(data_files=None, data_dir=None, suffix=None): + assert (data_files is not None) != (data_dir is not None) + if data_dir is not None: + data_files = os.listdir(data_dir) + data_files = [os.path.join(data_dir, fn) for fn in data_files] + if suffix is not None: + data_files = [fp for fp in data_files if fp.endswith(suffix)] + elif isinstance(data_files, str): + data_files = [data_files] + + dataset_list = [] + for fp in data_files: + with open(fp, encoding='utf-8') as file: + data = [json.loads(line) for line in file] + ds = HFDataset.from_list(data) + dataset_list.append(ds) + dataset = concatenate_datasets(dataset_list) + return dataset + + +def tokenize(pair: str, + tokenizer: AutoTokenizer, + max_length: int, + is_reward: bool = False, + reward_token_id: int = -1): + prompt = tokenizer.apply_chat_template( + pair['prompt'], tokenize=False, add_generation_prompt=True) + chosen = tokenizer.apply_chat_template( + pair['prompt'] + pair['chosen'], + tokenize=False, + add_generation_prompt=False) + rejected = tokenizer.apply_chat_template( + pair['prompt'] + pair['rejected'], + tokenize=False, + add_generation_prompt=False) + prompt_ids = tokenizer.encode(prompt, add_special_tokens=False) + chosen_ids = tokenizer.encode(chosen, add_special_tokens=False) + rejected_ids = tokenizer.encode(rejected, add_special_tokens=False) + + if len(chosen_ids) > max_length: + chosen_ids = chosen_ids[:max_length] + if len(rejected_ids) > max_length: + rejected_ids = rejected_ids[:max_length] + + if is_reward: + # reward label + chosen_ids = chosen_ids + [reward_token_id] + rejected_ids = rejected_ids + [reward_token_id] + chosen_labels = [-100] * len(chosen_ids[:-1]) + [0] + rejected_labels = [-100] * len(rejected_ids[:-1]) + [1] + else: + # dpo label + prompt_len = min(len(prompt_ids), max_length) + chosen_labels = [-100] * prompt_len + copy.deepcopy( + chosen_ids[prompt_len:]) + rejected_labels = [-100] * prompt_len + copy.deepcopy( + rejected_ids[prompt_len:]) + + return { + 'chosen_ids': chosen_ids, + 'rejected_ids': rejected_ids, + 'chosen_labels': chosen_labels, + 'rejected_labels': rejected_labels, + } + + +class PreferenceDataset(Dataset): + + def __init__( + self, + dataset: HFDataset, + tokenizer: AutoTokenizer, + max_length: int, + is_dpo: bool = True, + is_reward: bool = False, + reward_token_id: int = -1, + num_proc: int = 32, + ) -> None: + self.max_length = max_length + assert is_dpo != is_reward, \ + 'Only one of is_dpo and is_reward can be True' + if is_reward: + assert reward_token_id != -1, \ + 'reward_token_id should be set if is_reward is True' + + self.is_dpo = is_dpo + self.is_reward = is_reward + self.reward_token_id = reward_token_id + self.tokenized_pairs = [] + + for tokenized_pair in _multi_progress( + partial( + tokenize, + tokenizer=tokenizer, + max_length=max_length, + is_reward=is_reward, + reward_token_id=reward_token_id), + dataset, + nproc=num_proc, + task_num=len(dataset), + chunksize=num_proc, + description='Tokenizing dataset'): + self.tokenized_pairs.append(tokenized_pair) + + def __len__(self): + return len(self.tokenized_pairs) + + def __getitem__(self, idx): + return self.tokenized_pairs[idx] + + +class PackedDatasetWrapper(Dataset): + + def __init__(self, + dataset, + max_packed_length=16384, + shuffle_before_pack=True) -> None: + super().__init__() + self.max_packed_length = max_packed_length + self.lengths = [] + self.data = [] + + indices = np.arange(len(dataset)) + if shuffle_before_pack: + np.random.shuffle(indices) + + data_bin = [] + bin_seq_len = 0 + removed = 0 + for idx in indices: + data = dataset[int(idx)] + cur_len = len(data['chosen_ids']) + len(data['rejected_ids']) + if cur_len > max_packed_length: + print_log( + f'sequence length {cur_len} is ' + f'larger than max_packed_length {max_packed_length}', + logger='current') + removed += 1 + continue + if (bin_seq_len + + cur_len) > max_packed_length and len(data_bin) > 0: + self.data.append(data_bin) + self.lengths.append(bin_seq_len) + data_bin = [] + bin_seq_len = 0 + data_bin.append(data) + bin_seq_len += cur_len + + if len(data_bin) > 0: + self.data.append(data_bin) + self.lengths.append(bin_seq_len) + if removed > 0: + print_log( + f'removed {removed} samples because ' + f'of length larger than {max_packed_length}', + logger='current') + print_log( + f'The batch numbers of dataset is changed ' + f'from {len(dataset)} to {len(self)} after' + ' using var len attention.', + logger='current') + + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + pairs = self.data[index] + input_ids, cu_seqlens, position_ids, labels = [], [0], [], [] + + for pair in pairs: + input_ids.extend(pair['chosen_ids']) + input_ids.extend(pair['rejected_ids']) + + position_ids.extend(list(range(len(pair['chosen_ids'])))) + position_ids.extend(list(range(len(pair['rejected_ids'])))) + + labels.extend(pair['chosen_labels']) + labels.extend(pair['rejected_labels']) + + cu_seqlens.append(cu_seqlens[-1] + len(pair['chosen_ids'])) + cu_seqlens.append(cu_seqlens[-1] + len(pair['rejected_ids'])) + + return { + 'input_ids': input_ids, + 'labels': labels, + 'position_ids': position_ids, + 'cumulative_len': cu_seqlens + } + + +def unpack_seq(seq, cu_seqlens): + """Unpack a packed sequence to a list of sequences with different + lengths.""" + seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + subseqs = seq.split(seqlens) + return subseqs + + +def broad_cast_dataset(dataset): + xtuner_dataset_timeout = timedelta( + minutes=int(os.getenv('XTUNER_DATASET_TIMEOUT', default=60))) + print_log( + f'xtuner_dataset_timeout = {xtuner_dataset_timeout}', logger='current') + using_dist = dist.is_available() and dist.is_initialized() + if using_dist: + # monitored barrier requires gloo process group to perform host-side sync. # noqa + group_gloo = dist.new_group( + backend='gloo', timeout=xtuner_dataset_timeout) + if not using_dist or dist.get_rank() == 0: + objects = [dataset] + else: + objects = [None] + if using_dist: + dist.monitored_barrier( + group=group_gloo, timeout=xtuner_dataset_timeout) + dist.broadcast_object_list(objects, src=0) + return objects[0] + + +def map_dataset(dataset, dataset_map_fn, map_num_proc): + if isinstance(dataset_map_fn, str): + map_fn_obj = MAP_FUNC.get(dataset_map_fn) or get_object_from_string( + dataset_map_fn) + if map_fn_obj is not None: + dataset_map_fn = map_fn_obj + else: + raise TypeError('dataset_map_fn must be a function or a ' + "registered function's string in MAP_FUNC, " + f"but got a string of '{dataset_map_fn}'") + + dataset = dataset.map(dataset_map_fn, num_proc=map_num_proc) + return dataset + + +def build_preference_dataset( + dataset: str, + tokenizer: AutoTokenizer, + max_length: int, + dataset_map_fn: Callable = None, + is_dpo: bool = True, + is_reward: bool = False, + reward_token_id: int = -1, + num_proc: int = 32, + use_varlen_attn: bool = False, + max_packed_length: int = 16384, + shuffle_before_pack: bool = True, +) -> Dataset: + using_dist = dist.is_available() and dist.is_initialized() + tokenized_ds = None + if not using_dist or dist.get_rank() == 0: + if isinstance(tokenizer, dict) or isinstance( + tokenizer, Config) or isinstance(tokenizer, ConfigDict): + tokenizer = BUILDER.build(tokenizer) + + dataset = build_origin_dataset(dataset, split='train') + if dataset_map_fn is not None: + dataset = map_dataset( + dataset, dataset_map_fn, map_num_proc=num_proc) + + tokenized_ds = PreferenceDataset( + dataset=dataset, + tokenizer=tokenizer, + max_length=max_length, + is_dpo=is_dpo, + is_reward=is_reward, + reward_token_id=reward_token_id, + num_proc=num_proc, + ) + if use_varlen_attn: + tokenized_ds = PackedDatasetWrapper( + dataset=tokenized_ds, + max_packed_length=max_packed_length, + shuffle_before_pack=shuffle_before_pack, + ) + tokenized_ds = broad_cast_dataset(tokenized_ds) + return tokenized_ds + + +def intel_orca_dpo_map_fn(example): + prompt = [{ + 'role': 'system', + 'content': example['system'] + }, { + 'role': 'user', + 'content': example['question'] + }] + chosen = [{'role': 'assistant', 'content': example['chosen']}] + rejected = [{'role': 'assistant', 'content': example['rejected']}] + return {'prompt': prompt, 'chosen': chosen, 'rejected': rejected} + + +def orpo_dpo_mix_40k_map_fn(example): + assert len(example['chosen']) == len(example['rejected']) + prompt = example['chosen'][:-1] + chosen = example['chosen'][-1:] + rejected = example['rejected'][-1:] + return {'prompt': prompt, 'chosen': chosen, 'rejected': rejected} diff --git a/data/xtuner/xtuner/dataset/refcoco_json.py b/data/xtuner/xtuner/dataset/refcoco_json.py new file mode 100644 index 0000000000000000000000000000000000000000..e32f08ae459a21697e5a1736ad8a19bafaf767e5 --- /dev/null +++ b/data/xtuner/xtuner/dataset/refcoco_json.py @@ -0,0 +1,496 @@ +import copy +import itertools +import json +import os +import pickle +import time +from collections import defaultdict + +import matplotlib.pyplot as plt +import numpy as np +import skimage.io as io +import torch +from datasets import Dataset as HFDataset +from datasets import DatasetDict +from matplotlib.patches import Polygon, Rectangle +from mmengine.config import Config, ConfigDict +from PIL import Image + +from xtuner.registry import BUILDER +from ..registry import BUILDER +from .huggingface import process_hf_dataset +from .llava import LLaVADataset +from .utils import expand2square + + +class RefCOCOJsonDataset(LLaVADataset): + instruction_pool = [ + '[refer] {}', + '[refer] give me the location of {}', + '[refer] where is {} ?', + '[refer] from this image, tell me the location of {}', + '[refer] the location of {} is', + '[refer] could you tell me the location for {} ?', + '[refer] where can I locate the {} ?', + ] + + def __init__( + self, + data_path, + image_folder, + tokenizer, + image_processor, + max_dataset_length=None, + dataset_map_fn=None, + template_map_fn=None, + max_length=2048, + pad_image_to_square=False, + ): + json_data = json.load(open(data_path)) + + ###################################################### + # Only this part is different from LLaVADataset.__init__ + json_data = self.reformat_data(json_data) + ###################################################### + + for idx in range(len(json_data)): + if isinstance(json_data[idx]['id'], int): + json_data[idx]['id'] = str(json_data[idx]['id']) + json_data = DatasetDict({'train': HFDataset.from_list(json_data)}) + self.text_data = process_hf_dataset( + dataset=json_data, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=template_map_fn, + split='train', + max_dataset_length=max_dataset_length, + remove_unused_columns=False, + pack_to_max_length=False, + with_image_token=True) + + self.image_folder = image_folder + if isinstance(image_processor, dict) or isinstance( + image_processor, Config) or isinstance(image_processor, + ConfigDict): + self.image_processor = BUILDER.build(image_processor) + else: + self.image_processor = image_processor + self.pad_image_to_square = pad_image_to_square + + def reformat_data(self, json_data): + new_json_data = [] + for sample in json_data: + for instruction_template in self.instruction_pool: + sample['conversations'] = self.gen_refcoco_conversations( + sample, instruction_template) + new_json_data.append(copy.deepcopy(sample)) + return new_json_data + + @classmethod + def gen_refcoco_conversations(cls, data, instruction_template='{}'): + """build conversition data from refcoco json data as below. + + "id": "xxx", + "image": "xxx.jpg", + "conversations": [ + { + "from": "human", + "value": "xxxx" + }, + { + "from": "gpt", + "value": "xxx" + } + """ + + conversation = [ + { + 'from': 'human', + 'value': '' + }, + { + 'from': 'gpt', + 'value': '' + }, + ] + + instruction = instruction_template.format(data['sents']) + bbox = cls.normalize_bbox(data['bbox'], data['height'], data['width']) + answer = '{{<{}><{}><{}><{}>}}'.format(bbox[0], bbox[1], bbox[2], + bbox[3]) + conversation[0]['value'] = instruction + '\n' + conversation[1]['value'] = answer + return conversation + + @classmethod + def get_data_json( + cls, + ann_path, + image_path, + dataset='refcoco', + splitBy='unc', + ): + refer = REFER(ann_path, image_path, dataset, splitBy) + ref_ids = refer.getRefIds(split='train') + + data = {} + duplicate_data = defaultdict(list) + + for ref_id in ref_ids: + ref = refer.loadRefs(ref_id)[0] + + image_id = '{:0>12}'.format(ref['image_id']) + sents = [sent['raw'] for sent in ref['sentences']] + bbox = refer.getRefBox(ref['ref_id']) + + image = Image.open(image_path + '/' + image_id + '.jpg') + + for sent in sents: + sent_id = '_'.join(sent.split(' ')) + data_id = f'{dataset}-{splitBy}-{image_id}-{sent_id}' + data_item = { + 'id': data_id, + 'image': 'coco/train2017/' + image_id + '.jpg', + 'sents': sent, + 'bbox': bbox, + 'height': image.height, + 'width': image.width + } + if data_id in data: + duplicate_data[data_id].append(data_item) + else: + data[data_id] = data_item + + return list(data.values()), list(duplicate_data.values()) + + @classmethod + def normalize_bbox(cls, bbox, height, width): + x, y, w, h = bbox + + bbox = [x / width, y / height, (x + w) / width, (y + h) / height] + bbox = [int(x * 100) for x in bbox] + return bbox + + +class RefCOCOJsonEvalDataset(RefCOCOJsonDataset): + instruction_pool = ['[refer] give me the location of {}'] + + def reformat_data(self, json_data): + for sample in json_data: + # reformat img_id + img_id = sample['img_id'].split('_')[-2] + sample['image'] = 'coco/train2017/' + img_id + '.jpg' + sample['id'] = f"{img_id}-{sample['sents']}" + return super().reformat_data(json_data) + + +class InvRefCOCOJsonDataset(RefCOCOJsonDataset): + instruction_pool = [ + '[identify] {}', + '[identify] what object is in this location {}', + '[identify] identify the object present at this location {}', + '[identify] what is it in {}', + '[identify] describe this object in {}', + '[identify] this {} is', + '[identify] the object in {} is', + ] + + @classmethod + def gen_refcoco_conversations(cls, data, instruction_template='{}'): + """build conversition data from refcoco json data as below. + + "id": "xxx", + "image": "xxx.jpg", + "conversations": [ + { + "from": "human", + "value": "xxxx" + }, + { + "from": "gpt", + "value": "xxx" + } + """ + + conversation = [ + { + 'from': 'human', + 'value': '' + }, + { + 'from': 'gpt', + 'value': '' + }, + ] + bbox = cls.normalize_bbox(data['bbox'], data['height'], data['width']) + bbox_str = '{{<{}><{}><{}><{}>}}'.format(bbox[0], bbox[1], bbox[2], + bbox[3]) + instruction = instruction_template.format(bbox_str) + answer = data['sents'] + + conversation[0]['value'] = instruction + '\n' + conversation[1]['value'] = answer + return conversation + + +# flake8: noqa +# Refer + + +class REFER: + + def __init__(self, data_root, vis_root, dataset='refcoco', splitBy='unc'): + # provide data_root folder which contains refclef, refcoco, refcoco+ and refcocog + # also provide dataset name and splitBy information + # e.g., dataset = 'refcoco', splitBy = 'unc' + # inv dataset is stored in the same path as normal dataset + dataset = dataset.split('inv')[-1] + print('loading dataset %s into memory...' % dataset) + self.ann_dir = os.path.join(data_root, dataset) + if dataset in ['refcoco', 'refcoco+', 'refcocog']: + self.vis_root = vis_root + elif dataset == 'refclef': + raise 'No RefClef image data' + else: + raise 'No refer dataset is called [%s]' % dataset + + # load refs from data/dataset/refs(dataset).json + tic = time.time() + ref_file = os.path.join(self.ann_dir, 'refs(' + splitBy + ').p') + self.data = {} + self.data['dataset'] = dataset + self.data['refs'] = pickle.load(open(ref_file, 'rb')) + + # load annotations from data/dataset/instances.json + instances_file = os.path.join(self.ann_dir, 'instances.json') + instances = json.load(open(instances_file)) + self.data['images'] = instances['images'] + self.data['annotations'] = instances['annotations'] + self.data['categories'] = instances['categories'] + + # create index + self.createIndex() + print('DONE (t=%.2fs)' % (time.time() - tic)) + + def createIndex(self): + # create sets of mapping + # 1) Refs: {ref_id: ref} + # 2) Anns: {ann_id: ann} + # 3) Imgs: {image_id: image} + # 4) Cats: {category_id: category_name} + # 5) Sents: {sent_id: sent} + # 6) imgToRefs: {image_id: refs} + # 7) imgToAnns: {image_id: anns} + # 8) refToAnn: {ref_id: ann} + # 9) annToRef: {ann_id: ref} + # 10) catToRefs: {category_id: refs} + # 11) sentToRef: {sent_id: ref} + # 12) sentToTokens: {sent_id: tokens} + print('creating index...') + # fetch info from instances + Anns, Imgs, Cats, imgToAnns = {}, {}, {}, {} + for ann in self.data['annotations']: + Anns[ann['id']] = ann + imgToAnns[ann['image_id']] = imgToAnns.get(ann['image_id'], + []) + [ann] + for img in self.data['images']: + Imgs[img['id']] = img + for cat in self.data['categories']: + Cats[cat['id']] = cat['name'] + + # fetch info from refs + Refs, imgToRefs, refToAnn, annToRef, catToRefs = {}, {}, {}, {}, {} + Sents, sentToRef, sentToTokens = {}, {}, {} + for ref in self.data['refs']: + # ids + ref_id = ref['ref_id'] + ann_id = ref['ann_id'] + category_id = ref['category_id'] + image_id = ref['image_id'] + + # add mapping related to ref + Refs[ref_id] = ref + imgToRefs[image_id] = imgToRefs.get(image_id, []) + [ref] + catToRefs[category_id] = catToRefs.get(category_id, []) + [ref] + refToAnn[ref_id] = Anns[ann_id] + annToRef[ann_id] = ref + + # add mapping of sent + for sent in ref['sentences']: + Sents[sent['sent_id']] = sent + sentToRef[sent['sent_id']] = ref + sentToTokens[sent['sent_id']] = sent['tokens'] + + # create class members + self.Refs = Refs + self.Anns = Anns + self.Imgs = Imgs + self.Cats = Cats + self.Sents = Sents + self.imgToRefs = imgToRefs + self.imgToAnns = imgToAnns + self.refToAnn = refToAnn + self.annToRef = annToRef + self.catToRefs = catToRefs + self.sentToRef = sentToRef + self.sentToTokens = sentToTokens + print('index created.') + + def getRefIds(self, image_ids=[], cat_ids=[], ref_ids=[], split=''): + image_ids = image_ids if type(image_ids) == list else [image_ids] + cat_ids = cat_ids if type(cat_ids) == list else [cat_ids] + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if len(image_ids) == len(cat_ids) == len(ref_ids) == len(split) == 0: + refs = self.data['refs'] + else: + if not len(image_ids) == 0: + refs = [self.imgToRefs[image_id] for image_id in image_ids] + else: + refs = self.data['refs'] + if not len(cat_ids) == 0: + refs = [ref for ref in refs if ref['category_id'] in cat_ids] + if not len(ref_ids) == 0: + refs = [ref for ref in refs if ref['ref_id'] in ref_ids] + if not len(split) == 0: + if split in ['testA', 'testB', 'testC']: + refs = [ref for ref in refs if split[-1] in ref['split'] + ] # we also consider testAB, testBC, ... + elif split in ['testAB', 'testBC', 'testAC']: + # rarely used I guess... + refs = [ref for ref in refs if ref['split'] == split] + elif split == 'test': + refs = [ref for ref in refs if 'test' in ref['split']] + elif split == 'train' or split == 'val': + refs = [ref for ref in refs if ref['split'] == split] + else: + raise 'No such split [%s]' % split + ref_ids = [ref['ref_id'] for ref in refs] + return ref_ids + + def getAnnIds(self, image_ids=[], cat_ids=[], ref_ids=[]): + image_ids = image_ids if type(image_ids) == list else [image_ids] + cat_ids = cat_ids if type(cat_ids) == list else [cat_ids] + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if len(image_ids) == len(cat_ids) == len(ref_ids) == 0: + ann_ids = [ann['id'] for ann in self.data['annotations']] + else: + if not len(image_ids) == 0: + lists = [ + self.imgToAnns[image_id] for image_id in image_ids + if image_id in self.imgToAnns + ] # list of [anns] + anns = list(itertools.chain.from_iterable(lists)) + else: + anns = self.data['annotations'] + if not len(cat_ids) == 0: + anns = [ann for ann in anns if ann['category_id'] in cat_ids] + ann_ids = [ann['id'] for ann in anns] + if not len(ref_ids) == 0: + ids = set(ann_ids).intersection( + {self.Refs[ref_id]['ann_id'] + for ref_id in ref_ids}) + return ann_ids + + def getImgIds(self, ref_ids=[]): + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if not len(ref_ids) == 0: + image_ids = list( + {self.Refs[ref_id]['image_id'] + for ref_id in ref_ids}) + else: + image_ids = self.Imgs.keys() + return image_ids + + def getCatIds(self): + return self.Cats.keys() + + def loadRefs(self, ref_ids=[]): + if type(ref_ids) == list: + return [self.Refs[ref_id] for ref_id in ref_ids] + elif type(ref_ids) == int: + return [self.Refs[ref_ids]] + + def loadAnns(self, ann_ids=[]): + if type(ann_ids) == list: + return [self.Anns[ann_id] for ann_id in ann_ids] + elif type(ann_ids) == int: + return [self.Anns[ann_ids]] + + def loadImgs(self, image_ids=[]): + if type(image_ids) == list: + return [self.Imgs[image_id] for image_id in image_ids] + elif type(image_ids) == int: + return [self.Imgs[image_ids]] + + def loadCats(self, cat_ids=[]): + if type(cat_ids) == list: + return [self.Cats[cat_id] for cat_id in cat_ids] + elif type(cat_ids) == int: + return [self.Cats[cat_ids]] + + def getRefBox(self, ref_id): + ref = self.Refs[ref_id] + ann = self.refToAnn[ref_id] + return ann['bbox'] # [x, y, w, h] + + def showRef(self, ref, seg_box='box'): + from matplotlib.collectns import PatchCollection + + ax = plt.gca() + # show image + image = self.Imgs[ref['image_id']] + I = io.imread(os.path.join(self.vis_root, image['file_name'])) + ax.imshow(I) + # show refer expression + for sid, sent in enumerate(ref['sentences']): + print('{}. {}'.format(sid + 1, sent['sent'])) + # show segmentations + if seg_box == 'seg': + ann_id = ref['ann_id'] + ann = self.Anns[ann_id] + polygons = [] + color = [] + c = 'none' + if type(ann['segmentation'][0]) == list: + # polygon used for refcoco* + for seg in ann['segmentation']: + poly = np.array(seg).reshape((len(seg) / 2, 2)) + polygons.append(Polygon(poly, True, alpha=0.4)) + color.append(c) + p = PatchCollection( + polygons, + facecolors=color, + edgecolors=(1, 1, 0, 0), + linewidths=3, + alpha=1, + ) + ax.add_collection(p) # thick yellow polygon + p = PatchCollection( + polygons, + facecolors=color, + edgecolors=(1, 0, 0, 0), + linewidths=1, + alpha=1, + ) + ax.add_collection(p) # thin red polygon + else: + # mask used for refclef + raise NotImplementedError('RefClef is not downloaded') + # show bounding-box + elif seg_box == 'box': + ann_id = ref['ann_id'] + ann = self.Anns[ann_id] + bbox = self.getRefBox(ref['ref_id']) + box_plot = Rectangle( + (bbox[0], bbox[1]), + bbox[2], + bbox[3], + fill=False, + edgecolor='green', + linewidth=3, + ) + ax.add_patch(box_plot) diff --git a/data/xtuner/xtuner/dataset/samplers/__init__.py b/data/xtuner/xtuner/dataset/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8afc9bc1e2bbaae2e00a530302c24106400f2ace --- /dev/null +++ b/data/xtuner/xtuner/dataset/samplers/__init__.py @@ -0,0 +1,4 @@ +from .intern_repo import InternlmRepoSampler, InternRepoSampler +from .length_grouped import LengthGroupedSampler + +__all__ = ['LengthGroupedSampler', 'InternRepoSampler', 'InternlmRepoSampler'] diff --git a/data/xtuner/xtuner/dataset/samplers/intern_repo.py b/data/xtuner/xtuner/dataset/samplers/intern_repo.py new file mode 100644 index 0000000000000000000000000000000000000000..933719a58e5c8efa46d14bc5080bd7ed1e9b0ce4 --- /dev/null +++ b/data/xtuner/xtuner/dataset/samplers/intern_repo.py @@ -0,0 +1,81 @@ +import logging +import warnings +from typing import Iterator, Optional, Sized + +import numpy as np +from mmengine import print_log +from torch.utils.data import Sampler + +from xtuner.parallel.sequence import (get_data_parallel_rank, + get_data_parallel_world_size) + + +class InternRepoSampler(Sampler): + + def __init__(self, + dataset: Sized, + shuffle: bool = True, + seed: Optional[int] = None) -> None: + if seed is not None and seed != 1024: + warnings.warn('For alignment accuracy, seed in InternRepoSampler' + 'must be set to 1024.') + world_size = get_data_parallel_world_size() + rank = get_data_parallel_rank() + self.rank = rank + self.world_size = world_size + + self.dataset = dataset + self.shuffle = shuffle + self.seed = 1024 + self.epoch = 0 + + self.num_samples = len(self.dataset) // world_size + self.total_size = self.num_samples * world_size + + def __iter__(self) -> Iterator[int]: + """Iterate the indices.""" + # deterministically shuffle based on epoch and seed + if self.shuffle: + rng = np.random.RandomState(self.seed + self.epoch) + indices = np.arange(len(self.dataset)) + rng.shuffle(indices) + indices = indices.tolist() + else: + indices = np.arange(len(self.dataset)).tolist() + + self.indices = indices[:self.total_size] + + # subsample + indices = indices[self.rank:self.total_size:self.world_size] + self.subsample_indices = indices + + return iter(indices) + + def __len__(self) -> int: + """The number of samples in this rank.""" + return self.num_samples + + def set_epoch(self, epoch: int) -> None: + """Sets the epoch for this sampler. + + When :attr:`shuffle=True`, this ensures all replicas use a different + random ordering for each epoch. Otherwise, the next iteration of this + sampler will yield the same ordering. + + Args: + epoch (int): Epoch number. + """ + self.epoch = epoch + + +class InternlmRepoSampler(InternRepoSampler): + + def __init__(self, + dataset: Sized, + shuffle: bool = True, + seed: Optional[int] = None) -> None: + super().__init__(dataset, shuffle, seed) + print_log(('InternlmRepoSampler will be deprecated in the future.' + 'Please use InternRepoSampler instead.'), + logger='current', + level=logging.WARNING) diff --git a/data/xtuner/xtuner/dataset/samplers/length_grouped.py b/data/xtuner/xtuner/dataset/samplers/length_grouped.py new file mode 100644 index 0000000000000000000000000000000000000000..184827837cf062972d6b024940ba6d252577efd4 --- /dev/null +++ b/data/xtuner/xtuner/dataset/samplers/length_grouped.py @@ -0,0 +1,164 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Iterator, Optional, Sized + +import torch +from mmengine.dist import get_dist_info, sync_random_seed +from mmengine.logging import print_log +from torch.utils.data import ConcatDataset as TorchConcatDataset +from torch.utils.data import Sampler + + +def get_length_grouped_indices(lengths, group_batch_size, generator=None): + + def process(lengths, group_batch_size, generator=None): + indices = torch.randperm(len(lengths), generator=generator) + megabatches = [ + indices[i:i + group_batch_size].tolist() + for i in range(0, len(lengths), group_batch_size) + ] + megabatches = [ + sorted(megabatch, key=lambda i: lengths[i], reverse=True) + for megabatch in megabatches + ] + return megabatches + + assert all(leng != 0 for leng in lengths), 'Should not have zero length.' + if all(leng > 0 for leng in lengths) or all(leng < 0 for leng in lengths): + # all samples are in the same modality + megabatches = process(lengths, group_batch_size, generator=generator) + else: + mm_indices, mm_lengths = zip(*[(i, l) for i, l in enumerate(lengths) + if l > 0]) + lang_indices, lang_lengths = zip(*[(i, -l) + for i, l in enumerate(lengths) + if l < 0]) + mm_megabatches = [] + for mm_megabatch in process( + mm_lengths, group_batch_size, generator=generator): + mm_megabatches.append([mm_indices[i] for i in mm_megabatch]) + lang_megabatches = [] + for lang_megabatch in process( + lang_lengths, group_batch_size, generator=generator): + lang_megabatches.append([lang_indices[i] for i in lang_megabatch]) + + last_mm = mm_megabatches[-1] + last_lang = lang_megabatches[-1] + last_batch = last_mm + last_lang + megabatches = mm_megabatches[:-1] + lang_megabatches[:-1] + + megabatch_indices = torch.randperm( + len(megabatches), generator=generator) + megabatches = [megabatches[i] for i in megabatch_indices] + + if len(last_batch) > 0: + megabatches.append( + sorted( + last_batch, key=lambda i: abs(lengths[i]), reverse=True)) + + # The rest is to get the biggest batch first. + # Since each megabatch is sorted by descending length, + # the longest element is the first + megabatch_maximums = [ + abs(lengths[megabatch[0]]) for megabatch in megabatches + ] + max_idx = torch.argmax(torch.tensor(megabatch_maximums)).item() + # Switch to put the longest element in first position + megabatches[0][0], megabatches[max_idx][0] = megabatches[max_idx][ + 0], megabatches[0][0] + + return [i for megabatch in megabatches for i in megabatch] + + +class LengthGroupedSampler(Sampler): + + def __init__(self, + dataset: Sized, + per_device_batch_size: int, + length_property='length', + mega_batch_mult: Optional[int] = None, + seed: Optional[int] = None, + round_up: bool = True) -> None: + print_log('LengthGroupedSampler is used.', logger='current') + rank, world_size = get_dist_info() + self.rank = rank + self.world_size = world_size + + self.dataset = dataset + if seed is None: + seed = sync_random_seed() + self.seed = seed + self.epoch = 0 + self.round_up = round_up + + if self.round_up: + num_iters = math.ceil( + len(self.dataset) / world_size / per_device_batch_size) + self.num_samples = num_iters * per_device_batch_size + self.total_size = self.num_samples * self.world_size + else: + self.num_samples = math.ceil( + (len(self.dataset) - rank) / world_size) + self.total_size = len(self.dataset) + + total_batch_size = per_device_batch_size * self.world_size + if mega_batch_mult is None: + # Default for mega_batch_mult: 50 or the number to get 4 + # megabatches, whichever is smaller. + mega_batch_mult = min( + len(self.dataset) // (total_batch_size * 4), 50) + # Just in case, for tiny datasets + if mega_batch_mult == 0: + mega_batch_mult = 1 + self.group_batch_size = mega_batch_mult * total_batch_size + + if isinstance(self.dataset, TorchConcatDataset): + length = [] + for sub_dataset in self.dataset.datasets: + length.extend(getattr(sub_dataset, length_property)) + self.length = length + else: + self.length = getattr(self.dataset, length_property) + assert isinstance(self.length, (list, tuple)) + + self.total_batch_size = total_batch_size + print_log( + f'LengthGroupedSampler construction is complete, ' + f'and the selected attribute is {length_property}', + logger='current') + + def __iter__(self) -> Iterator[int]: + """Iterate the indices.""" + generator = torch.Generator() + generator.manual_seed(self.seed + self.epoch) + indices = get_length_grouped_indices( + lengths=self.length, + group_batch_size=self.group_batch_size, + generator=generator) + assert len(set(indices)) == len(indices) + # add extra samples to make it evenly divisible + if self.round_up: + indices = ( + indices * + int(self.total_size / len(indices) + 1))[:self.total_size] + # subsample + assert len(indices) == self.total_size + indices = indices[self.rank:self.total_size:self.world_size] + assert len(indices) == self.num_samples + return iter(indices) + + def __len__(self) -> int: + """The number of samples in this rank.""" + return self.num_samples + + def set_epoch(self, epoch: int) -> None: + """Sets the epoch for this sampler. + + When :attr:`shuffle=True`, this ensures all replicas use a different + random ordering for each epoch. Otherwise, the next iteration of this + sampler will yield the same ordering. + + Args: + epoch (int): Epoch number. + """ + self.epoch = epoch diff --git a/data/xtuner/xtuner/dataset/utils.py b/data/xtuner/xtuner/dataset/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..84336ddb2f61e53535ef57f5b8660279cabda055 --- /dev/null +++ b/data/xtuner/xtuner/dataset/utils.py @@ -0,0 +1,271 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import base64 +import copy +import io +from io import BytesIO +from itertools import chain + +import numpy as np +import requests +from PIL import Image + +from xtuner.utils import DEFAULT_IMAGE_TOKEN, IGNORE_INDEX, IMAGE_TOKEN_INDEX + + +def get_bos_eos_token_ids(tokenizer): + if tokenizer.__class__.__name__ in [ + 'QWenTokenizer', 'QWen2Tokenizer', 'Qwen2TokenizerFast' + ]: + bos_token_id = [] + eos_token_id = tokenizer.eos_token_id + assert eos_token_id is not None, \ + 'Please set eos_token for Qwen tokenizer!' + elif tokenizer.__class__.__name__ == 'ChatGLMTokenizer': + bos_token_id = [64790, 64792] + eos_token_id = tokenizer.eos_token_id + else: + bos_token_id = tokenizer.bos_token_id + eos_token_id = tokenizer.eos_token_id + if isinstance(bos_token_id, int): + bos_token_id = [bos_token_id] + if isinstance(eos_token_id, int): + eos_token_id = [eos_token_id] + return bos_token_id, eos_token_id + + +def encode_fn(example, + tokenizer, + max_length, + input_ids_with_output=True, + with_image_token=False): + """We only support the following three scenarios: + + 1. Incremental pretraining dataset. + example['conversation'] = [ + { + 'input': '', + 'output': '### Human: Can you write xxx' + } + ] + + 2. Single-turn conversation dataset. + example['conversation'] = [ + { + 'input': 'Give three tips for staying healthy.', + 'output': '1.Eat a balanced diet xxx' + } + ] + + 3. Multi-turn conversation dataset. + example['conversation'] = [ + { + 'input': 'Give three tips for staying healthy.', + 'output': '1.Eat a balanced diet xxx' + }, + { + 'input': 'Please expand on the second point.', + 'output': 'Here is an expanded explanation of the xxx' + } + ] + """ + bos_token_id, eos_token_id = get_bos_eos_token_ids(tokenizer) + is_multi_turn_conversation = len(example['conversation']) > 1 + if is_multi_turn_conversation: + assert input_ids_with_output + + input_ids, labels = [], [] + next_needs_bos_token = True + for single_turn_conversation in example['conversation']: + input = single_turn_conversation['input'] + if DEFAULT_IMAGE_TOKEN in input and with_image_token: + chunk_encode = [ + tokenizer.encode(chunk, add_special_tokens=False) + for chunk in input.split(DEFAULT_IMAGE_TOKEN) + ] + assert len(chunk_encode) == 2 + input_encode = [] + for idx, cur_chunk_encode in enumerate(chunk_encode): + input_encode.extend(cur_chunk_encode) + if idx != len(chunk_encode) - 1: + input_encode.append(IMAGE_TOKEN_INDEX) + else: + input_encode = tokenizer.encode(input, add_special_tokens=False) + if next_needs_bos_token: + input_ids += bos_token_id + labels += [IGNORE_INDEX] * len(bos_token_id) + input_ids += input_encode + labels += [IGNORE_INDEX] * len(input_encode) + if input_ids_with_output: + # Add output + output_with_loss = single_turn_conversation.get( + 'output_with_loss', True) + output = single_turn_conversation['output'] + output_encode = tokenizer.encode(output, add_special_tokens=False) + input_ids += output_encode + if output_with_loss: + labels += copy.deepcopy(output_encode) + else: + labels += [IGNORE_INDEX] * len(output_encode) + # Add EOS_TOKEN (with loss) + if single_turn_conversation.get('need_eos_token', True): + next_needs_bos_token = True + input_ids += eos_token_id + if output_with_loss: + labels += copy.deepcopy(eos_token_id) + else: + labels += [IGNORE_INDEX] * len(eos_token_id) + else: + next_needs_bos_token = False + # Add SEP (without loss) + sep = single_turn_conversation.get('sep', '') + if sep != '': + sep_encode = tokenizer.encode(sep, add_special_tokens=False) + input_ids += sep_encode + labels += [IGNORE_INDEX] * len(sep_encode) + + if len(input_ids) > max_length: + input_ids = input_ids[:max_length] + labels = labels[:max_length] + return {'input_ids': input_ids, 'labels': labels} + + +class Packer: + """Pack multiple pieces of data into one.""" + + def __init__(self, + chunk_size=2048, + use_varlen_attn=False, + drop_last=False): + self.chunk_size = chunk_size + self.residual = {'input_ids': [], 'labels': []} + self.use_varlen_attn = use_varlen_attn + self.drop_last = drop_last + if use_varlen_attn: + self.residual_cumulative_len = [0] + + def get_cumulative_len(self, chunk_num): + ptr_l = 0 + cumulative_len = [] + for chunk_idx in range(chunk_num): + length_train = (chunk_idx + 1) * self.chunk_size + ptr_r = np.searchsorted( + self.residual_cumulative_len, length_train, side='left') + if self.residual_cumulative_len[ptr_r] == length_train: + cumulative_len_cur = \ + self.residual_cumulative_len[ptr_l:ptr_r + 1] + ptr_l = ptr_r + 1 + else: + cumulative_len_cur = self.residual_cumulative_len[ + ptr_l:ptr_r] + [length_train] + ptr_l = ptr_r + cumulative_len_cur = [ + num - chunk_idx * self.chunk_size for num in cumulative_len_cur + ] + if cumulative_len_cur[0] != 0: + cumulative_len_cur = [0] + cumulative_len_cur + + cumulative_len.append(cumulative_len_cur) + + self.residual_cumulative_len = [ + num - length_train for num in self.residual_cumulative_len[ptr_l:] + ] + if len(self.residual_cumulative_len) == 0: + self.residual_cumulative_len = [0] + elif self.residual_cumulative_len[0] != 0: + self.residual_cumulative_len = [0] + self.residual_cumulative_len + + return cumulative_len + + def get_position_ids(self, cumulative_len): + position_ids = [] + for cumulative_len_cur in cumulative_len: + index_cur = [] + for i in range(len(cumulative_len_cur) - 1): + index_cur.extend( + list( + range(cumulative_len_cur[i + 1] - # noqa: W504 + cumulative_len_cur[i]))) + position_ids.append(index_cur) + return position_ids + + def __call__(self, batch): + concatenated_samples = { + k: v + list(chain(*batch[k])) + for k, v in self.residual.items() + } + + if self.use_varlen_attn: + for input_id in batch['input_ids']: + self.residual_cumulative_len.append( + self.residual_cumulative_len[-1] + len(input_id)) + + total_length = len(concatenated_samples[list( + concatenated_samples.keys())[0]]) + + if total_length >= self.chunk_size: + chunk_num = total_length // self.chunk_size + result = { + k: [ + v[i:i + self.chunk_size] for i in range( + 0, + chunk_num * # noqa: W504 + self.chunk_size, + self.chunk_size) + ] + for k, v in concatenated_samples.items() + } + self.residual = { + k: v[(chunk_num * self.chunk_size):] + for k, v in concatenated_samples.items() + } + + if self.use_varlen_attn: + cumulative_len = self.get_cumulative_len(chunk_num) + result['cumulative_len'] = cumulative_len + result['position_ids'] = self.get_position_ids(cumulative_len) + else: + if self.drop_last: + result = {k: [] for k, v in concatenated_samples.items()} + else: + result = {k: [v] for k, v in concatenated_samples.items()} + + self.residual = {k: [] for k in concatenated_samples.keys()} + + if self.use_varlen_attn: + result['cumulative_len'] = [] if self.drop_last else [ + self.residual_cumulative_len + ] + result['position_ids'] = [] if self.drop_last \ + else self.get_position_ids([self.residual_cumulative_len]) + self.residual_cumulative_len = [0] + + return result + + +def expand2square(pil_img, background_color): + width, height = pil_img.size + if width == height: + return pil_img + elif width > height: + result = Image.new(pil_img.mode, (width, width), background_color) + result.paste(pil_img, (0, (width - height) // 2)) + return result + else: + result = Image.new(pil_img.mode, (height, height), background_color) + result.paste(pil_img, ((height - width) // 2, 0)) + return result + + +def load_image(image_file): + if image_file.startswith('http://') or image_file.startswith('https://'): + response = requests.get(image_file) + image = Image.open(BytesIO(response.content)).convert('RGB') + else: + image = Image.open(image_file).convert('RGB') + return image + + +def decode_base64_to_image(base64_string): + image_data = base64.b64decode(base64_string) + image = Image.open(io.BytesIO(image_data)) + return image diff --git a/data/xtuner/xtuner/engine/__init__.py b/data/xtuner/xtuner/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4f50972ea9f16cf0089683769475fe7043455319 --- /dev/null +++ b/data/xtuner/xtuner/engine/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from ._strategy import DeepSpeedStrategy +from .hooks import (DatasetInfoHook, EvaluateChatHook, ThroughputHook, + VarlenAttnArgsToMessageHubHook) +from .runner import TrainLoop + +__all__ = [ + 'EvaluateChatHook', 'DatasetInfoHook', 'ThroughputHook', + 'VarlenAttnArgsToMessageHubHook', 'DeepSpeedStrategy', 'TrainLoop' +] diff --git a/data/xtuner/xtuner/engine/_strategy/__init__.py b/data/xtuner/xtuner/engine/_strategy/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bac6095f977fa39655deb1d95c67d2e641e274b4 --- /dev/null +++ b/data/xtuner/xtuner/engine/_strategy/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .deepspeed import DeepSpeedStrategy + +__all__ = ['DeepSpeedStrategy'] diff --git a/data/xtuner/xtuner/engine/_strategy/deepspeed.py b/data/xtuner/xtuner/engine/_strategy/deepspeed.py new file mode 100644 index 0000000000000000000000000000000000000000..42b7f5590dc67f1a252ea8331220700845e05584 --- /dev/null +++ b/data/xtuner/xtuner/engine/_strategy/deepspeed.py @@ -0,0 +1,70 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +from mmengine._strategy import DeepSpeedStrategy as MMEngineDeepSpeedStrategy + +from xtuner import DS_CEPH_DIR +from xtuner.parallel.sequence import init_sequence_parallel +from xtuner.utils.fileio import patch_fileio + + +class DeepSpeedStrategy(MMEngineDeepSpeedStrategy): + + def __init__(self, *args, **kwargs): + sequence_parallel_size = kwargs.pop('sequence_parallel_size', 1) + self.sequence_parallel_size = sequence_parallel_size + + super().__init__(*args, **kwargs) + + from transformers.integrations.deepspeed import HfDeepSpeedConfig + + # hf_deepspeed_config has to be saved as an attribute. + self.hf_deepspeed_config = HfDeepSpeedConfig(self.config) + + def _wrap_model(self, model): + wrapper = super()._wrap_model(model) + # hard code for deepspeed zero3 + # When utilizing Zero3, the model isn't allocated to CUDA within the + # `deepspeed.initialize` process. + assert hasattr(wrapper.model, 'data_preprocessor') + wrapper.model.data_preprocessor.cuda() + return wrapper + + def save_checkpoint(self, *args, **kwargs) -> None: + if DS_CEPH_DIR: + from os import path as osp + work_dir_prefix = osp.split(self.work_dir)[0] + + filename = kwargs['filename'].replace(work_dir_prefix, DS_CEPH_DIR) + kwargs['filename'] = filename + with patch_fileio(): + super().save_checkpoint(*args, **kwargs) + else: + super().save_checkpoint(*args, **kwargs) + + def load_checkpoint(self, *args, **kwargs) -> None: + if DS_CEPH_DIR: + + with patch_fileio(): + checkpoint = super().load_checkpoint(*args, **kwargs) + else: + checkpoint = super().load_checkpoint(*args, **kwargs) + return checkpoint + + def resume(self, *args, **kwargs) -> None: + if DS_CEPH_DIR: + + with patch_fileio(): + checkpoint = super().resume(*args, **kwargs) + else: + checkpoint = super().resume(*args, **kwargs) + return checkpoint + + def _setup_distributed( # type: ignore + self, + launcher: Optional[str] = None, + backend: str = 'nccl', + **kwargs, + ): + super()._setup_distributed(launcher, backend, **kwargs) + init_sequence_parallel(self.sequence_parallel_size) diff --git a/data/xtuner/xtuner/engine/hooks/__init__.py b/data/xtuner/xtuner/engine/hooks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..90262425d16e198429ec0d36029a52c9fbdd8ef2 --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .dataset_info_hook import DatasetInfoHook +from .evaluate_chat_hook import EvaluateChatHook +from .hf_checkpoint_hook import HFCheckpointHook +from .throughput_hook import ThroughputHook +from .varlen_attn_args_to_messagehub_hook import VarlenAttnArgsToMessageHubHook + +__all__ = [ + 'EvaluateChatHook', 'DatasetInfoHook', 'ThroughputHook', + 'VarlenAttnArgsToMessageHubHook', 'HFCheckpointHook' +] diff --git a/data/xtuner/xtuner/engine/hooks/dataset_info_hook.py b/data/xtuner/xtuner/engine/hooks/dataset_info_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..84dc9498a4ce0aa2cc8175c9e317e1a35ca13fc9 --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/dataset_info_hook.py @@ -0,0 +1,65 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import Hook + +from xtuner.registry import BUILDER +from xtuner.utils import DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX + + +def split_list(lst, value): + res = [] + tmp_res = [] + for i in lst: + if i == value: + res.append(tmp_res) + tmp_res = [] + else: + tmp_res.append(i) + res.append(tmp_res) + return res + + +class DatasetInfoHook(Hook): + + def __init__(self, tokenizer, is_intern_repo_dataset=False): + self.tokenizer = BUILDER.build(tokenizer) + self.is_intern_repo_dataset = is_intern_repo_dataset + + def log(self, runner, dataset, mode='train'): + + def _log(input_ids, log_prefix=''): + if self.is_intern_repo_dataset: + input_ids = [abs(x) for x in input_ids] + # Try to split list to be compatible with IMAGE token + input_ids = split_list(input_ids, IMAGE_TOKEN_INDEX) + text = log_prefix + for idx, ids in enumerate(input_ids): + text += self.tokenizer.decode(ids) + if idx != len(input_ids) - 1: + text += DEFAULT_IMAGE_TOKEN + runner.logger.info(text) + + runner.logger.info(f'Num {mode} samples {len(dataset)}') + runner.logger.info(f'{mode} example:') + if 'chosen_ids' in dataset[0]: + _log(dataset[0]['chosen_ids'], log_prefix='chosen: ') + _log(dataset[0]['rejected_ids'], log_prefix='rejected: ') + else: + _log(dataset[0]['input_ids']) + + def before_train(self, runner) -> None: + do_train = runner.train_loop is not None + do_eval = runner.val_loop is not None + if do_train: + train_dataset = runner.train_dataloader.dataset + self.log(runner, train_dataset, mode='train') + if do_eval: + eval_dataset = runner.val_dataloader.dataset + self.log(runner, eval_dataset, mode='eval') + + def before_val(self, runner) -> None: + eval_dataset = runner.val_dataloader.dataset + self.log(runner, eval_dataset, mode='eval') + + def before_test(self, runner) -> None: + test_dataset = runner.test_dataloader.dataset + self.log(runner, test_dataset, mode='test') diff --git a/data/xtuner/xtuner/engine/hooks/evaluate_chat_hook.py b/data/xtuner/xtuner/engine/hooks/evaluate_chat_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..05d508e4c8f232a9299c1d1b7f69cfbc18262dbc --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/evaluate_chat_hook.py @@ -0,0 +1,281 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import warnings + +import torch +from mmengine.dist import master_only +from mmengine.hooks import Hook +from mmengine.model import is_model_wrapper +from mmengine.utils import mkdir_or_exist +from mmengine.utils.misc import get_object_from_string +from transformers import GenerationConfig, StoppingCriteriaList + +from xtuner.dataset.utils import expand2square, load_image +from xtuner.model.utils import prepare_inputs_labels_for_multimodal +from xtuner.registry import BUILDER +from xtuner.utils import (DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX, + StopWordStoppingCriteria) + + +class EvaluateChatHook(Hook): + + priority = 'LOW' + + def __init__(self, + tokenizer, + evaluation_inputs, + evaluation_images=None, + image_processor=None, + system='', + prompt_template=None, + every_n_iters=None, + max_new_tokens=600, + stop_word=None, + stop_words=[], + generation_kwargs={}): + self.evaluation_inputs = evaluation_inputs + if isinstance(self.evaluation_inputs, str): + self.evaluation_inputs = [self.evaluation_inputs] + self.evaluation_images = evaluation_images + if isinstance(self.evaluation_images, str): + self.evaluation_images = [self.evaluation_images] + if self.evaluation_images is not None: + assert len( + self.evaluation_images) in [1, len(self.evaluation_inputs)] + if len(self.evaluation_images) == 1: + self.evaluation_images = [self.evaluation_images[0]] * len( + self.evaluation_inputs) + self.evaluation_images = [ + load_image(img) for img in self.evaluation_images + ] + if prompt_template is None: + instruction = '{input}' + else: + if isinstance(prompt_template, str): # for resume + prompt_template = get_object_from_string(prompt_template) + instruction = prompt_template.get('INSTRUCTION', '{input}') + if system != '': + system = prompt_template.get( + 'SYSTEM', '{system}\n').format(system=system) + stop_words += prompt_template.get('STOP_WORDS', []) + if stop_word is not None: + # TODO: deprecation, v0.3.0 + warnings.warn( + ('The `stop_word` argument is deprecated and will be removed ' + 'in v0.3.0, use `stop_words` instead.'), DeprecationWarning) + stop_words.append(stop_word) + self.instruction = instruction + self.system = system + self.every_n_iters = every_n_iters + self.max_new_tokens = max_new_tokens + self.tokenizer = BUILDER.build(tokenizer) + if image_processor is not None: + self.image_processor = BUILDER.build(image_processor) + self.stop_criteria = StoppingCriteriaList() + + # default generation config + default_generation_kwargs = dict( + max_new_tokens=max_new_tokens, + do_sample=True, + temperature=0.1, + top_p=0.75, + top_k=40, + eos_token_id=self.tokenizer.eos_token_id, + pad_token_id=self.tokenizer.pad_token_id + if self.tokenizer.pad_token_id is not None else + self.tokenizer.eos_token_id) + default_generation_kwargs.update(generation_kwargs) + self.gen_config = GenerationConfig(**default_generation_kwargs) + + self.stop_criteria = StoppingCriteriaList() + for word in stop_words: + self.stop_criteria.append( + StopWordStoppingCriteria(self.tokenizer, word)) + + self.is_first_run = True + + @master_only + def _save_eval_output(self, runner, eval_outputs): + save_path = os.path.join(runner.log_dir, 'vis_data', + f'eval_outputs_iter_{runner.iter}.txt') + mkdir_or_exist(os.path.dirname(save_path)) + with open(save_path, 'w', encoding='utf-8') as f: + for i, output in enumerate(eval_outputs): + f.write(f'Eval output {i + 1}:\n{output}\n\n') + + def _eval_images(self, + runner, + model, + device, + max_new_tokens=None, + save_eval_output=False): + if save_eval_output: + eval_outputs = [] + + for sample_image, sample_input in zip(self.evaluation_images, + self.evaluation_inputs): + image = expand2square( + sample_image, + tuple(int(x * 255) for x in self.image_processor.image_mean)) + image = self.image_processor.preprocess( + image, return_tensors='pt')['pixel_values'][0] + image = image.to(device) + sample_input = DEFAULT_IMAGE_TOKEN + '\n' + sample_input + inputs = (self.system + self.instruction).format( + input=sample_input, round=1, **runner.cfg) + chunk_encode = [] + for idx, chunk in enumerate(inputs.split(DEFAULT_IMAGE_TOKEN)): + if idx == 0: + cur_encode = self.tokenizer.encode(chunk) + else: + cur_encode = self.tokenizer.encode( + chunk, add_special_tokens=False) + chunk_encode.append(cur_encode) + assert len(chunk_encode) == 2 + input_ids = [] + for idx, cur_chunk_encode in enumerate(chunk_encode): + input_ids.extend(cur_chunk_encode) + if idx != len(chunk_encode) - 1: + input_ids.append(IMAGE_TOKEN_INDEX) + input_ids = torch.tensor(input_ids).to(device) + visual_outputs = model.visual_encoder( + image.unsqueeze(0).to(model.visual_encoder.dtype), + output_hidden_states=True) + pixel_values = model.projector( + visual_outputs.hidden_states[model.visual_select_layer][:, 1:]) + + mm_inputs = prepare_inputs_labels_for_multimodal( + llm=model.llm, + input_ids=input_ids.unsqueeze(0), + pixel_values=pixel_values) + + generation_output = model.generate( + **mm_inputs, + max_new_tokens=max_new_tokens, + generation_config=self.gen_config, + bos_token_id=self.tokenizer.bos_token_id, + stopping_criteria=self.stop_criteria) + generation_output = self.tokenizer.decode(generation_output[0]) + runner.logger.info(f'Sample output:\n' + f'{inputs + generation_output}\n') + if save_eval_output: + eval_outputs.append(f'{inputs + generation_output}\n') + + if save_eval_output: + self._save_eval_output(runner, eval_outputs) + + def _eval_language(self, + runner, + model, + device, + max_new_tokens=None, + save_eval_output=False): + if save_eval_output: + eval_outputs = [] + + for sample_input in self.evaluation_inputs: + inputs = (self.system + self.instruction).format( + input=sample_input, round=1, **runner.cfg) + input_ids = self.tokenizer.encode(inputs, return_tensors='pt') + input_ids = input_ids.to(device) + generation_output = model.generate( + input_ids=input_ids, + max_new_tokens=max_new_tokens, + generation_config=self.gen_config, + stopping_criteria=self.stop_criteria) + generation_output = self.tokenizer.decode(generation_output[0]) + runner.logger.info(f'Sample output:\n{generation_output}\n') + if save_eval_output: + eval_outputs.append(f'{generation_output}\n') + + if save_eval_output: + self._save_eval_output(runner, eval_outputs) + + def _generate_samples(self, + runner, + max_new_tokens=None, + save_eval_output=False): + if max_new_tokens is None: + max_new_tokens = self.max_new_tokens + model = runner.model + if is_model_wrapper(model): + model = model.module + + device = next(iter(model.parameters())).device + + if self.is_first_run: + # hardcode for qlora DeepSpeed ZeRO3, put buffers and QuantState to + # device + model.to(device) + self.is_first_run = False + + is_checkpointing = model.llm.is_gradient_checkpointing + use_cache = model.llm.config.use_cache + + # Cast to inference mode + model.activation_checkpointing_disable() + model.llm.config.use_cache = True + model.eval() + if self.evaluation_images is not None: + self._eval_images(runner, model, device, max_new_tokens, + save_eval_output) + else: + self._eval_language(runner, model, device, max_new_tokens, + save_eval_output) + + # Cast to training mode + if is_checkpointing: + model.activation_checkpointing_enable() + model.llm.config.use_cache = use_cache + model.train() + + def before_train(self, runner): + runner.logger.info('before_train in EvaluateChatHook.') + self._generate_samples(runner, max_new_tokens=50) + + def _is_save_checkpoint(self, runner): + hooks = runner.hooks + checkpoint_hook = None + for hook in hooks: + if type(hook).__name__ == 'CheckpointHook': + checkpoint_hook = hook + break + if checkpoint_hook is None or checkpoint_hook.by_epoch: + return False + + if checkpoint_hook.every_n_train_iters( + runner, checkpoint_hook.interval, checkpoint_hook.save_begin) or \ + (checkpoint_hook.save_last and + checkpoint_hook.is_last_train_iter(runner)): + return True + + return False + + def after_train_iter(self, + runner, + batch_idx: int, + data_batch=None, + outputs=None) -> None: + if self.every_n_iters is None: + return + + save_eval_output = self._is_save_checkpoint(runner) + + do_chat = ( + save_eval_output + or self.every_n_train_iters(runner, self.every_n_iters)) + if not do_chat: + return + + runner.logger.info('after_train_iter in EvaluateChatHook.') + self._generate_samples(runner, save_eval_output=save_eval_output) + + def after_train(self, runner): + runner.logger.info('after_train in EvaluateChatHook.') + self._generate_samples(runner) + + def after_val(self, runner) -> None: + if self.every_n_iters is not None: + return + runner.logger.info('after_val in EvaluateChatHook.') + self._generate_samples(runner) diff --git a/data/xtuner/xtuner/engine/hooks/hf_checkpoint_hook.py b/data/xtuner/xtuner/engine/hooks/hf_checkpoint_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..142af4cdbc27f34a0e4def644a742258542c2db0 --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/hf_checkpoint_hook.py @@ -0,0 +1,73 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from pathlib import Path +from typing import Optional, Union + +import torch.distributed as dist +from mmengine import print_log +from mmengine._strategy import DeepSpeedStrategy +from mmengine.hooks import Hook +from mmengine.model import is_model_wrapper +from mmengine.runner import FlexibleRunner + +from xtuner.registry import BUILDER +from xtuner.utils import get_origin_state_dict + +DATA_BATCH = Optional[Union[dict, tuple, list]] + + +class HFCheckpointHook(Hook): + + priority = 95 # lower than CheckpointHook in MMEngine + + def __init__(self, out_dir: Optional[Union[str, Path]] = None) -> None: + self.out_dir = out_dir + + @staticmethod + def _use_shard_moe(llm): + config = llm.config + moe_implementation = getattr(config, 'moe_implementation', 'origin') + return moe_implementation == 'shard' + + def after_run(self, runner) -> None: + assert isinstance(runner, + FlexibleRunner), 'Runner should be `FlexibleRunner`' + assert isinstance( + runner.strategy, + DeepSpeedStrategy), 'Strategy should be `DeepSpeedStrategy`' + + if self.out_dir is None: + self.out_dir = osp.join(runner.work_dir, 'hf_model') + + wrapped_model = runner.strategy.model + if wrapped_model.zero_optimization_partition_weights(): + assert wrapped_model.zero_gather_16bit_weights_on_model_save(), \ + ('Please set `gather_16bit_weights_on_model_save=True` ' + 'in your DeepSpeed config.') + state_dict = wrapped_model._zero3_consolidated_16bit_state_dict() + else: + state_dict = wrapped_model.module_state_dict( + exclude_frozen_parameters=runner.strategy. + exclude_frozen_parameters) + + model = runner.model + if is_model_wrapper(model): + model = model.module + llm = model.llm + if (not dist.is_initialized()) or dist.get_rank() == 0: + # keys in state_dict are prefixed with 'llm.' + keys = list(state_dict.keys()) + for k in keys: + val = state_dict.pop(k) + state_dict[k[4:]] = val + + if self._use_shard_moe(llm): + print_log('recover the origin state_dict from merged one ...') + state_dict = get_origin_state_dict(state_dict, llm) + + print_log(f'Saving LLM to {self.out_dir}') + llm.save_pretrained(self.out_dir, state_dict=state_dict) + + print_log(f'Saving LLM tokenizer to {self.out_dir}') + tokenizer = BUILDER.build(runner.cfg.tokenizer) + tokenizer.save_pretrained(self.out_dir) diff --git a/data/xtuner/xtuner/engine/hooks/throughput_hook.py b/data/xtuner/xtuner/engine/hooks/throughput_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..e74c0a0acf1e13498107364cc3cf3b4797159aaf --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/throughput_hook.py @@ -0,0 +1,150 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import logging +from typing import Optional, Union + +import torch +from mmengine import print_log +from mmengine.hooks import Hook +from mmengine.model.wrappers import is_model_wrapper +from torch.utils._pytree import tree_flatten + +from xtuner.parallel.sequence import get_sequence_parallel_world_size + +DATA_BATCH = Optional[Union[dict, tuple, list]] + + +class ThroughputHook(Hook): + + # priority must be higher than LoggerHook (50) and lower than + # IterTimerHook (60) + priority = 55 + + def __init__(self, + use_activation_checkpointing=None, + hidden_size=None, + num_layers=None, + vocab_size=None, + mlp_ratio=None, + is_casual=None): + self.use_activation_checkpointing = use_activation_checkpointing + self.hidden_size = hidden_size + self.num_layers = num_layers + self.vocab_size = vocab_size + self.mlp_ratio = mlp_ratio + self.is_casual = is_casual + + @staticmethod + def _guess_is_casual_attn(model): + for module in model.modules(): + if hasattr(module, 'is_causal'): + return module.is_causal + print_log( + 'It\'s impossible to speculate whether casual attention was used, ' + 'and FLOPs will be calculated as `casual = True`.', 'current') + return True + + @staticmethod + def _get_batch_size_and_sequence_len(data_batch): + data_list, _ = tree_flatten(data_batch) + for data in data_list: + if isinstance(data, torch.Tensor): + return data.size(0), data.size(1) + raise RuntimeError('No tensor found in the batch') + + @staticmethod + def _guess_use_activation_checkpointing(model): + for module in model.modules(): + if hasattr(module, 'gradient_checkpointing'): + return module.gradient_checkpointing + return False + + def before_run(self, runner) -> None: + if is_model_wrapper(runner.model): + model = runner.model.module + else: + model = runner.model + self.use_activation_checkpointing = \ + (self.use_activation_checkpointing or + self._guess_use_activation_checkpointing(model)) + self.hidden_size = self.hidden_size or model.config.hidden_size + self.num_layers = self.num_layers or model.config.num_hidden_layers + self.vocab_size = self.vocab_size or model.config.vocab_size + self.mlp_ratio = self.mlp_ratio or (model.config.intermediate_size / + model.config.hidden_size) + self.mlp_ratio *= 1.5 # has gate_proj + self.is_casual = self.is_casual if self.is_casual is not None \ + else self._guess_is_casual_attn(model) + + use_varlen_attn = getattr(model, 'use_varlen_attn', False) + if use_varlen_attn: + print_log( + 'Using variable-length Flash Attention causes an inflation' + ' in the FLOPs calculation.', + 'current', + level=logging.WARNING) + + return + + def after_train_iter(self, + runner, + batch_idx: int, + data_batch: DATA_BATCH = None, + outputs: Optional[dict] = None) -> None: + """Calc flops based on the paper of Megatron + https://deepakn94.github.io/assets/papers/megatron-sc21.pdf.""" + + batch_size, sequence_len = self._get_batch_size_and_sequence_len( + data_batch) + sequence_parallel_size = get_sequence_parallel_world_size() + sequence_len /= sequence_parallel_size + + message_hub = runner.message_hub + iter_time = message_hub.get_scalar('train/time').current() + + # We consider a language model with 𝑙 transformer layers, + # hidden size h, sequence length s, vocabulary size V, and + # training batch size B. + # A $A_{mxk}$ x $X_{kxn}$ matrix multiplication requires 2𝑚 ×𝑘 ×𝑛 FLOPs + # (factor of 2 needed to account for multiplies and adds). + + # Attention Layer: + # qkv_proj + o_proj: 8B * s * h^2 + # attn: 2B * s^2 * h (casual=False) and 2B * s^2 * h / 2 (casual=True) + + # MLP Layer: + # up_proj + down_proj + gate_proj: 4B * s * h^2 * mlp_ratio + # (In Llama mlp_ratio = intermediate_size / hidden_size * 1.5 + # (has gate_proj)) + + # The backward pass requires double the number of FLOPs since we + # need to calculate the gradients with respect to both input and + # weight tensors. In addition, we are using activation recomputation, + # which requires an additional forward pass before the backward pass. + + # While sequence parallel will affect the FLOPs calculation in attn. + # Suppose the sequence length in one GPU is s and the sequence + # parallel world size is `sp_size`, which means the total + # sequence length in the attention calculation is + # `s * sp_size` and the number of attention heads decrease to + # `num_heads / sp_size`. Hence, the FLOPs in attn calculation is: + # 2B * (s * sp_size)^2 * (h / sp_size) (casual=False) and + # 2B * (s * sp_size)^2 * (h / sp_size) / 2 (casual=True) + + flops_qkvo_proj = 8 * batch_size * sequence_len * self.hidden_size**2 + flops_attn = 4 * batch_size * sequence_len**2 * self.hidden_size * \ + sequence_parallel_size / (int(self.is_casual) + 1) + flops_mlp = 4 * self.mlp_ratio * batch_size * sequence_len * \ + self.hidden_size**2 + flops_wo_head = (3 + int(self.use_activation_checkpointing)) * ( + flops_qkvo_proj + flops_attn + flops_mlp) * self.num_layers + flops_head = 3 * 2 * batch_size * sequence_len * self.hidden_size * \ + self.vocab_size + flops_per_iteration = flops_wo_head + flops_head + + avg_tflops_per_gpu = flops_per_iteration / 1e12 / (iter_time + 1e-12) + tokens_per_sec_per_gpu = batch_size * sequence_len / ( + iter_time + 1e-12) + + message_hub.update_scalar('train/tflops', avg_tflops_per_gpu) + message_hub.update_scalar('train/tokens_per_sec', + tokens_per_sec_per_gpu) diff --git a/data/xtuner/xtuner/engine/hooks/varlen_attn_args_to_messagehub_hook.py b/data/xtuner/xtuner/engine/hooks/varlen_attn_args_to_messagehub_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..fc31f21aecb44b666122db152ec6809dbaa41106 --- /dev/null +++ b/data/xtuner/xtuner/engine/hooks/varlen_attn_args_to_messagehub_hook.py @@ -0,0 +1,85 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Union + +from mmengine import MessageHub +from mmengine.dist import get_rank +from mmengine.hooks import Hook + +DATA_BATCH = Optional[Union[dict, tuple, list]] + + +class VarlenAttnArgsToMessageHubHook(Hook): + + def before_train_iter(self, + runner, + batch_idx: int, + data_batch: dict = None) -> None: + rank = get_rank() + message_hub = MessageHub.get_instance('varlen_attn_args') + + assert 'data' in data_batch.keys() + data = data_batch['data'] + + cumulative_len = data.pop('cumulative_len') + assert len(cumulative_len) == 1 + cumulative_len = cumulative_len[0].cuda() + message_hub.update_info(f'cumulative_len_rank_{rank}', cumulative_len) + + max_seqlen = data.pop('max_seqlen') + message_hub.update_info(f'max_seqlen_rank_{rank}', max_seqlen) + + def after_train_iter(self, + runner, + batch_idx: int, + data_batch: DATA_BATCH = None, + outputs: Optional[dict] = None) -> None: + rank = get_rank() + message_hub = MessageHub.get_instance('varlen_attn_args') + message_hub.update_info(f'cumulative_len_rank_{rank}', None) + message_hub.update_info(f'max_seqlen_rank_{rank}', None) + + def before_val_iter(self, + runner, + batch_idx: int, + data_batch: DATA_BATCH = None) -> None: + """All subclasses should override this method, if they need any + operations before each validation iteration. + + Args: + runner (Runner): The runner of the validation process. + batch_idx (int): The index of the current batch in the val loop. + data_batch (dict, optional): Data from dataloader. + Defaults to None. + """ + rank = get_rank() + message_hub = MessageHub.get_instance('varlen_attn_args') + + assert 'data' in data_batch.keys() + data = data_batch['data'] + + cumulative_len = data.pop('cumulative_len') + assert len(cumulative_len) == 1 + cumulative_len = cumulative_len[0].cuda() + message_hub.update_info(f'cumulative_len_rank_{rank}', cumulative_len) + + max_seqlen = data.pop('max_seqlen') + message_hub.update_info(f'max_seqlen_rank_{rank}', max_seqlen) + + def after_val_iter(self, + runner, + batch_idx, + data_batch=None, + outputs=None) -> None: + """All subclasses should override this method, if they need any + operations after each validation iteration. + + Args: + runner (Runner): The runner of the validation process. + batch_idx (int): The index of the current batch in the val loop. + data_batch (dict or tuple or list, optional): Data from dataloader. + outputs (Sequence, optional): Outputs from model. + """ + rank = get_rank() + message_hub = MessageHub.get_instance('varlen_attn_args') + message_hub.update_info(f'cumulative_len_rank_{rank}', None) + message_hub.update_info(f'max_seqlen_rank_{rank}', None) diff --git a/data/xtuner/xtuner/engine/runner/__init__.py b/data/xtuner/xtuner/engine/runner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d8d1c582b531e341dfbb299e56cbbd3db0b81e16 --- /dev/null +++ b/data/xtuner/xtuner/engine/runner/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .loops import TrainLoop + +__all__ = ['TrainLoop'] diff --git a/data/xtuner/xtuner/engine/runner/loops.py b/data/xtuner/xtuner/engine/runner/loops.py new file mode 100644 index 0000000000000000000000000000000000000000..aeb6be31ae6e09c32fb27f60c82690d4fc94b84a --- /dev/null +++ b/data/xtuner/xtuner/engine/runner/loops.py @@ -0,0 +1,40 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Optional, Union + +from mmengine.runner import IterBasedTrainLoop +from torch.utils.data import DataLoader + + +class TrainLoop(IterBasedTrainLoop): + + def __init__(self, + runner, + dataloader: Union[DataLoader, Dict], + max_iters: Optional[int] = None, + max_epochs: Union[int, float] = None, + **kwargs) -> None: + + if max_iters is None and max_epochs is None: + raise RuntimeError('Please specify the `max_iters` or ' + '`max_epochs` in `train_cfg`.') + elif max_iters is not None and max_epochs is not None: + raise RuntimeError('Only one of `max_iters` or `max_epochs` can ' + 'exist in `train_cfg`.') + else: + if max_iters is not None: + iters = int(max_iters) + assert iters == max_iters, ('`max_iters` should be a integer ' + f'number, but get {max_iters}') + elif max_epochs is not None: + if isinstance(dataloader, dict): + diff_rank_seed = runner._randomness_cfg.get( + 'diff_rank_seed', False) + dataloader = runner.build_dataloader( + dataloader, + seed=runner.seed, + diff_rank_seed=diff_rank_seed) + iters = max_epochs * len(dataloader) + else: + raise NotImplementedError + super().__init__( + runner=runner, dataloader=dataloader, max_iters=iters, **kwargs) diff --git a/data/xtuner/xtuner/entry_point.py b/data/xtuner/xtuner/entry_point.py new file mode 100644 index 0000000000000000000000000000000000000000..2af774fd37843714f0ce78f8ac59bd0bfecb34c6 --- /dev/null +++ b/data/xtuner/xtuner/entry_point.py @@ -0,0 +1,302 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import logging +import os +import random +import subprocess +import sys + +from mmengine.logging import print_log + +import xtuner + +# Define valid modes +MODES = ('list-cfg', 'copy-cfg', 'log-dataset', 'check-custom-dataset', + 'train', 'test', 'chat', 'convert', 'preprocess', 'mmbench', + 'eval_refcoco') + +CLI_HELP_MSG = \ + f""" + Arguments received: {str(['xtuner'] + sys.argv[1:])}. xtuner commands use the following syntax: + + xtuner MODE MODE_ARGS ARGS + + Where MODE (required) is one of {MODES} + MODE_ARG (optional) is the argument for specific mode + ARGS (optional) are the arguments for specific command + + Some usages for xtuner commands: (See more by using -h for specific command!) + + 1. List all predefined configs: + xtuner list-cfg + 2. Copy a predefined config to a given path: + xtuner copy-cfg $CONFIG $SAVE_FILE + 3-1. Fine-tune LLMs by a single GPU: + xtuner train $CONFIG + 3-2. Fine-tune LLMs by multiple GPUs: + NPROC_PER_NODE=$NGPUS NNODES=$NNODES NODE_RANK=$NODE_RANK PORT=$PORT ADDR=$ADDR xtuner dist_train $CONFIG $GPUS + 4-1. Convert the pth model to HuggingFace's model: + xtuner convert pth_to_hf $CONFIG $PATH_TO_PTH_MODEL $SAVE_PATH_TO_HF_MODEL + 4-2. Merge the HuggingFace's adapter to the pretrained base model: + xtuner convert merge $LLM $ADAPTER $SAVE_PATH + xtuner convert merge $CLIP $ADAPTER $SAVE_PATH --is-clip + 4-3. Split HuggingFace's LLM to the smallest sharded one: + xtuner convert split $LLM $SAVE_PATH + 5-1. Chat with LLMs with HuggingFace's model and adapter: + xtuner chat $LLM --adapter $ADAPTER --prompt-template $PROMPT_TEMPLATE --system-template $SYSTEM_TEMPLATE + 5-2. Chat with VLMs with HuggingFace's model and LLaVA: + xtuner chat $LLM --llava $LLAVA --visual-encoder $VISUAL_ENCODER --image $IMAGE --prompt-template $PROMPT_TEMPLATE --system-template $SYSTEM_TEMPLATE + 6-1. Preprocess arxiv dataset: + xtuner preprocess arxiv $SRC_FILE $DST_FILE --start-date $START_DATE --categories $CATEGORIES + 6-2. Preprocess refcoco dataset: + xtuner preprocess refcoco --ann-path $RefCOCO_ANN_PATH --image-path $COCO_IMAGE_PATH --save-path $SAVE_PATH + 7-1. Log processed dataset: + xtuner log-dataset $CONFIG + 7-2. Verify the correctness of the config file for the custom dataset: + xtuner check-custom-dataset $CONFIG + 8. MMBench evaluation: + xtuner mmbench $LLM --llava $LLAVA --visual-encoder $VISUAL_ENCODER --prompt-template $PROMPT_TEMPLATE --data-path $MMBENCH_DATA_PATH + 9. Refcoco evaluation: + xtuner eval_refcoco $LLM --llava $LLAVA --visual-encoder $VISUAL_ENCODER --prompt-template $PROMPT_TEMPLATE --data-path $REFCOCO_DATA_PATH + 10. List all dataset formats which are supported in XTuner + + Run special commands: + + xtuner help + xtuner version + + GitHub: https://github.com/InternLM/xtuner + """ # noqa: E501 + + +CONVERT_HELP_MSG = \ + f""" + Arguments received: {str(['xtuner'] + sys.argv[1:])}. xtuner commands use the following syntax: + + xtuner MODE MODE_ARGS ARGS + + Where MODE (required) is one of {MODES} + MODE_ARG (optional) is the argument for specific mode + ARGS (optional) are the arguments for specific command + + Some usages for convert: (See more by using -h for specific command!) + + 1. Convert the pth model to HuggingFace's model: + xtuner convert pth_to_hf $CONFIG $PATH_TO_PTH_MODEL $SAVE_PATH_TO_HF_MODEL + 2. Merge the HuggingFace's adapter to the pretrained LLM: + xtuner convert merge $LLM $ADAPTER $SAVE_PATH + 3. Split HuggingFace's LLM to the smallest sharded one: + xtuner convert split $LLM $SAVE_PATH + + GitHub: https://github.com/InternLM/xtuner + """ # noqa: E501 + + +PREPROCESS_HELP_MSG = \ + f""" + Arguments received: {str(['xtuner'] + sys.argv[1:])}. xtuner commands use the following syntax: + + xtuner MODE MODE_ARGS ARGS + + Where MODE (required) is one of {MODES} + MODE_ARG (optional) is the argument for specific mode + ARGS (optional) are the arguments for specific command + + Some usages for preprocess: (See more by using -h for specific command!) + + 1. Preprocess arxiv dataset: + xtuner preprocess arxiv $SRC_FILE $DST_FILE --start-date $START_DATE --categories $CATEGORIES + 2. Preprocess refcoco dataset: + xtuner preprocess refcoco --ann-path $RefCOCO_ANN_PATH --image-path $COCO_IMAGE_PATH --save-path $SAVE_PATH + + GitHub: https://github.com/InternLM/xtuner + """ # noqa: E501 + +special = { + 'help': lambda: print_log(CLI_HELP_MSG, 'current'), + 'version': lambda: print_log(xtuner.__version__, 'current') +} +special = { + **special, + **{f'-{k[0]}': v + for k, v in special.items()}, + **{f'--{k}': v + for k, v in special.items()} +} + + +def list_dataset_format(): + from xtuner.tools import list_dataset_format + return list_dataset_format.__file__ + + +def list_cfg(): + from xtuner.tools import list_cfg + return list_cfg.__file__ + + +def copy_cfg(): + from xtuner.tools import copy_cfg + return copy_cfg.__file__ + + +def log_dataset(): + from xtuner.tools import log_dataset + return log_dataset.__file__ + + +def check_custom_dataset(): + from xtuner.tools import check_custom_dataset + return check_custom_dataset.__file__ + + +def train(): + from xtuner.tools import train + return train.__file__ + + +def test(): + from xtuner.tools import test + return test.__file__ + + +def chat(): + from xtuner.tools import chat + return chat.__file__ + + +def mmbench(): + from xtuner.tools import mmbench + return mmbench.__file__ + + +def pth_to_hf(): + from xtuner.tools.model_converters import pth_to_hf + return pth_to_hf.__file__ + + +def merge(): + from xtuner.tools.model_converters import merge + return merge.__file__ + + +def split(): + from xtuner.tools.model_converters import split + return split.__file__ + + +def arxiv_preprocess(): + from xtuner.tools.data_preprocess import arxiv as arxiv_preprocess + return arxiv_preprocess.__file__ + + +def convert_refcoco(): + from xtuner.tools.data_preprocess import convert_refcoco + return convert_refcoco.__file__ + + +def convert_help_msg(): + print_log(CONVERT_HELP_MSG, 'current') + + +def preprocess_help_msg(): + print_log(PREPROCESS_HELP_MSG, 'current') + + +def eval_refcoco(): + from xtuner.tools import eval_refcoco + return eval_refcoco.__file__ + + +modes = { + 'list-cfg': list_cfg, + 'copy-cfg': copy_cfg, + 'log-dataset': log_dataset, + 'check-custom-dataset': check_custom_dataset, + 'train': train, + 'test': test, + 'chat': chat, + 'mmbench': mmbench, + 'convert': { + 'pth_to_hf': pth_to_hf, + 'merge': merge, + 'split': split, + '--help': convert_help_msg, + '-h': convert_help_msg + }, + 'preprocess': { + 'arxiv': arxiv_preprocess, + 'refcoco': convert_refcoco, + '--help': preprocess_help_msg, + '-h': preprocess_help_msg + }, + 'eval_refcoco': eval_refcoco, + 'list-dataset-format': list_dataset_format +} + +HELP_FUNCS = [preprocess_help_msg, convert_help_msg] +MAP_FILE_FUNCS = [ + list_cfg, copy_cfg, log_dataset, check_custom_dataset, train, test, chat, + mmbench, pth_to_hf, merge, split, arxiv_preprocess, eval_refcoco, + convert_refcoco, list_dataset_format +] + + +def cli(): + args = sys.argv[1:] + if not args: # no arguments passed + print_log(CLI_HELP_MSG, 'current') + return + if args[0].lower() in special: + special[args[0].lower()]() + return + elif args[0].lower() in modes: + try: + fn_or_dict = modes[args[0].lower()] + n_arg = 0 + + if isinstance(fn_or_dict, dict): + n_arg += 1 + fn = fn_or_dict[args[n_arg].lower()] + else: + fn = fn_or_dict + + assert callable(fn) + + if fn in HELP_FUNCS: + fn() + else: + slurm_launcher = False + for i in range(n_arg + 1, len(args)): + if args[i] == '--launcher': + if i + 1 < len(args) and args[i + 1] == 'slurm': + slurm_launcher = True + break + nnodes = int(os.environ.get('NNODES', 1)) + nproc_per_node = int(os.environ.get('NPROC_PER_NODE', 1)) + if slurm_launcher or (nnodes == 1 and nproc_per_node == 1): + subprocess.run(['python', fn()] + args[n_arg + 1:]) + else: + port = os.environ.get('PORT', None) + if port is None: + port = random.randint(20000, 29999) + print_log(f'Use random port: {port}', 'current', + logging.WARNING) + torchrun_args = [ + f'--nnodes={nnodes}', + f"--node_rank={os.environ.get('NODE_RANK', 0)}", + f'--nproc_per_node={nproc_per_node}', + f"--master_addr={os.environ.get('ADDR', '127.0.0.1')}", + f'--master_port={port}' + ] + subprocess.run(['torchrun'] + torchrun_args + [fn()] + + args[n_arg + 1:] + + ['--launcher', 'pytorch']) + except Exception as e: + print_log(f"WARNING: command error: '{e}'!", 'current', + logging.WARNING) + print_log(CLI_HELP_MSG, 'current', logging.WARNING) + return + else: + print_log('WARNING: command error!', 'current', logging.WARNING) + print_log(CLI_HELP_MSG, 'current', logging.WARNING) + return diff --git a/data/xtuner/xtuner/evaluation/__init__.py b/data/xtuner/xtuner/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fba3e590598c3fe175f9d331e0da8883c1ef4ea8 --- /dev/null +++ b/data/xtuner/xtuner/evaluation/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .metrics import MMLUMetric + +__all__ = ['MMLUMetric'] diff --git a/data/xtuner/xtuner/evaluation/metrics/__init__.py b/data/xtuner/xtuner/evaluation/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f3efc80fd5d8aa3f7b65e43ec1a8acd98a1df3bb --- /dev/null +++ b/data/xtuner/xtuner/evaluation/metrics/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .mmlu_metric import MMLUMetric + +__all__ = ['MMLUMetric'] diff --git a/data/xtuner/xtuner/evaluation/metrics/mmlu_metric.py b/data/xtuner/xtuner/evaluation/metrics/mmlu_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..ad1282056a8e7691f05f579275ad0bf990796f12 --- /dev/null +++ b/data/xtuner/xtuner/evaluation/metrics/mmlu_metric.py @@ -0,0 +1,246 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Any, Sequence + +import numpy as np +import torch +from mmengine.evaluator import BaseMetric +from mmengine.logging import print_log +from rich.console import Console +from rich.table import Table + +from xtuner.registry import BUILDER + + +class MMLUMetric(BaseMetric): + METAINFO = { + 'subcategories': { + 'abstract_algebra': ['math'], + 'anatomy': ['health'], + 'astronomy': ['physics'], + 'business_ethics': ['business'], + 'clinical_knowledge': ['health'], + 'college_biology': ['biology'], + 'college_chemistry': ['chemistry'], + 'college_computer_science': ['computer science'], + 'college_mathematics': ['math'], + 'college_medicine': ['health'], + 'college_physics': ['physics'], + 'computer_security': ['computer science'], + 'conceptual_physics': ['physics'], + 'econometrics': ['economics'], + 'electrical_engineering': ['engineering'], + 'elementary_mathematics': ['math'], + 'formal_logic': ['philosophy'], + 'global_facts': ['other'], + 'high_school_biology': ['biology'], + 'high_school_chemistry': ['chemistry'], + 'high_school_computer_science': ['computer science'], + 'high_school_european_history': ['history'], + 'high_school_geography': ['geography'], + 'high_school_government_and_politics': ['politics'], + 'high_school_macroeconomics': ['economics'], + 'high_school_mathematics': ['math'], + 'high_school_microeconomics': ['economics'], + 'high_school_physics': ['physics'], + 'high_school_psychology': ['psychology'], + 'high_school_statistics': ['math'], + 'high_school_us_history': ['history'], + 'high_school_world_history': ['history'], + 'human_aging': ['health'], + 'human_sexuality': ['culture'], + 'international_law': ['law'], + 'jurisprudence': ['law'], + 'logical_fallacies': ['philosophy'], + 'machine_learning': ['computer science'], + 'management': ['business'], + 'marketing': ['business'], + 'medical_genetics': ['health'], + 'miscellaneous': ['other'], + 'moral_disputes': ['philosophy'], + 'moral_scenarios': ['philosophy'], + 'nutrition': ['health'], + 'philosophy': ['philosophy'], + 'prehistory': ['history'], + 'professional_accounting': ['other'], + 'professional_law': ['law'], + 'professional_medicine': ['health'], + 'professional_psychology': ['psychology'], + 'public_relations': ['politics'], + 'security_studies': ['politics'], + 'sociology': ['culture'], + 'us_foreign_policy': ['politics'], + 'virology': ['health'], + 'world_religions': ['philosophy'], + }, + 'categories': { + 'STEM': [ + 'physics', 'chemistry', 'biology', 'computer science', 'math', + 'engineering' + ], + 'humanities': ['history', 'philosophy', 'law'], + 'social sciences': + ['politics', 'culture', 'economics', 'geography', 'psychology'], + 'other (business, health, misc.)': ['other', 'business', 'health'], + }, + } + METAINFO['subcategories_list'] = list({ + subcat + for subcats in METAINFO['subcategories'].values() for subcat in subcats + }) + + def __init__(self, tokenizer, *args, **kwargs): + super().__init__(*args, **kwargs) + tokenizer = BUILDER.build(tokenizer) + self.abcd_idx = [ + tokenizer.encode('A', add_special_tokens=False)[0], + tokenizer.encode('B', add_special_tokens=False)[0], + tokenizer.encode('C', add_special_tokens=False)[0], + tokenizer.encode('D', add_special_tokens=False)[0], + ] + + @staticmethod + def ABCD_to_0123(abcd): + return {'A': 0, 'B': 1, 'C': 2, 'D': 3}[abcd] + + @staticmethod + def find_first_zero_index(tensor): + indices = torch.nonzero(tensor == 0) + if indices.numel() > 0: + return indices[0].item() + else: + return None + + @staticmethod + def accuracy(preds, gts): + """Computes the accuracy for preds and gts.""" + correct = [1 if pred == gt else 0 for pred, gt in zip(preds, gts)] + acc = np.mean(correct) * 100 + return acc + + def process(self, data_batch: Any, data_samples: Sequence[dict]) -> None: + """Process one batch of data samples and predictions. The processed + results should be stored in ``self.results``, which will be used to + compute the metrics when all batches have been processed. + + Args: + data_batch (Any): A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from + the model. + """ + subjects = data_batch['data_samples']['subjects'] + gts = [ + self.ABCD_to_0123(gt) + for gt in data_batch['data_samples']['labels'] + ] + preds = [] + for sample, attn_mask, subject, gt in zip( + data_samples, data_batch['data']['attention_mask'], subjects, + gts): + pred_logits = sample['logits'] + first_zero_idx = self.find_first_zero_index(attn_mask) + pred_idx = -1 if first_zero_idx is None else first_zero_idx - 1 + pred_logtis_abcd = pred_logits[pred_idx, self.abcd_idx] + pred = torch.argmax(pred_logtis_abcd).item() + preds.append(pred) + self.results.append((subject, pred, gt)) + + def compute_metrics(self, results: list) -> dict: + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + subjects_results = { + subject: { + 'preds': [], + 'gts': [] + } + for subject in self.METAINFO['subcategories'].keys() + } + subcats_results = { + subcat: { + 'preds': [], + 'gts': [] + } + for subcat in self.METAINFO['subcategories_list'] + } + cats_results = { + cat: { + 'preds': [], + 'gts': [] + } + for cat in self.METAINFO['categories'].keys() + } + for subject, pred, gt in results: + subjects_results[subject]['preds'].append(pred) + subjects_results[subject]['gts'].append(gt) + subcats = self.METAINFO['subcategories'][subject] + for subcat in subcats: + subcats_results[subcat]['preds'].append(pred) + subcats_results[subcat]['gts'].append(gt) + for cat, subcats in self.METAINFO['categories'].items(): + for subcat in subcats: + if subcat in subcats_results: + cats_results[cat]['preds'].extend( + subcats_results[subcat]['preds']) + cats_results[cat]['gts'].extend( + subcats_results[subcat]['gts']) + + subjects_metrics = dict() + subcats_metrics = dict() + cats_metrics = dict() + for subject in self.METAINFO['subcategories'].keys(): + assert len(subjects_results[subject]['preds']) == len( + subjects_results[subject]['gts']) + if len(subjects_results[subject]['preds']) == 0: + print_log(f'Skip subject {subject} for mmlu', 'current') + else: + score = self.accuracy(subjects_results[subject]['preds'], + subjects_results[subject]['gts']) + subjects_metrics[f'{subject}'] = score + for subcat in self.METAINFO['subcategories_list']: + assert len(subcats_results[subcat]['preds']) == len( + subcats_results[subcat]['gts']) + if len(subcats_results[subcat]['preds']) == 0: + print_log(f'Skip subcategory {subcat} for mmlu', 'current') + else: + score = self.accuracy(subcats_results[subcat]['preds'], + subcats_results[subcat]['gts']) + subcats_metrics[f'{subcat}'] = score + for cat in self.METAINFO['categories'].keys(): + assert len(cats_results[cat]['preds']) == len( + cats_results[cat]['gts']) + if len(cats_results[cat]['preds']) == 0: + print_log(f'Skip category {cat} for mmlu', 'current') + else: + score = self.accuracy(cats_results[cat]['preds'], + cats_results[cat]['gts']) + cats_metrics[f'{cat}'] = score + + metrics = dict() + metrics.update(subjects_metrics) + metrics.update(subcats_metrics) + metrics.update(cats_metrics) + metrics['average'] = np.mean(list(subjects_metrics.values())) + + table_metrics = dict() + table_metrics.update(cats_metrics) + table_metrics['average'] = np.mean(list(subjects_metrics.values())) + self._print_results(table_metrics) + return metrics + + def _print_results(self, table_metrics: dict) -> None: + table_title = ' MMLU Benchmark ' + table = Table(title=table_title) + console = Console() + table.add_column('Categories', justify='left') + table.add_column('Accuracy (%)', justify='right') + for cat, acc in table_metrics.items(): + table.add_row(cat, f'{acc:.1f}') + with console.capture() as capture: + console.print(table, end='') + print_log('\n' + capture.get(), 'current') diff --git a/data/xtuner/xtuner/evaluation/metrics/reward_metric.py b/data/xtuner/xtuner/evaluation/metrics/reward_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..c5d019978c9ebbfe2debd42b113f64aba9274423 --- /dev/null +++ b/data/xtuner/xtuner/evaluation/metrics/reward_metric.py @@ -0,0 +1,102 @@ +import itertools +from collections import defaultdict +from typing import List, Optional, Sequence + +import torch +from mmengine.evaluator import BaseMetric +from mmengine.logging import print_log +from rich.console import Console +from rich.table import Table + + +class RewardMetric(BaseMetric): + r"""Reward model evaluation metric. + """ + default_prefix: Optional[str] = '' + + def __init__(self, + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + logits = torch.cat( + [sample['logits'].unsqueeze(0) for sample in data_samples], dim=0) + labels = data_batch['data']['labels'] + ds_names = data_batch['data_samples']['ds_names'] + chosen_idx = torch.where(labels == 0) + rejected_idx = torch.where(labels == 1) + chosen_logits = logits[chosen_idx].cpu() + rejected_logits = logits[rejected_idx].cpu() + + correct = (chosen_logits > rejected_logits).cpu() + self.results.append({ + 'chosen_logits': chosen_logits, + 'rejected_logits': rejected_logits, + 'correct': correct, + 'ds_names': ds_names + }) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. + metrics = {} + + correct = torch.cat([res['correct'] for res in results]) + chosen_logits = torch.cat([res['chosen_logits'] for res in results]) + rejected_logits = torch.cat( + [res['rejected_logits'] for res in results]) + ds_names = list(itertools.chain(*[res['ds_names'] for res in results])) + + # group by ds_names + grouped_correct = defaultdict(list) + grouped_chosen_logits = defaultdict(list) + grouped_rejected_logits = defaultdict(list) + for i, ds_name in enumerate(ds_names): + grouped_correct[ds_name].append(correct[i]) + grouped_chosen_logits[ds_name].append(chosen_logits[i]) + grouped_rejected_logits[ds_name].append(rejected_logits[i]) + + # print metrics in a rich table + table = Table(title='Reward Metrics') + table.add_column('Dataset Name') + table.add_column('Accuracy') + table.add_column('Chosen Score') + table.add_column('Rejected Score') + + for ds_name in grouped_correct.keys(): + correct = torch.stack(grouped_correct[ds_name]) + chosen_logits = torch.stack(grouped_chosen_logits[ds_name]) + rejected_logits = torch.stack(grouped_rejected_logits[ds_name]) + + acc = correct.float().mean() + metrics[f'accuracy/{ds_name}'] = acc.item() + metrics[f'chosen_score/{ds_name}'] = chosen_logits.mean().item() + metrics[f'rejected_score{ds_name}'] = rejected_logits.mean().item() + + table.add_row(ds_name, f'{acc:.4f}', f'{chosen_logits.mean():.4f}', + f'{rejected_logits.mean():.4f}') + + console = Console() + with console.capture() as capture: + console.print(table, end='') + print_log('\n' + capture.get(), 'current') + + return metrics diff --git a/data/xtuner/xtuner/model/__init__.py b/data/xtuner/xtuner/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1b3a501d4a96ccb4ed2e7d5d10ab093d08892f12 --- /dev/null +++ b/data/xtuner/xtuner/model/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .internvl import InternVL_V1_5 +from .llava import LLaVAModel +from .sft import SupervisedFinetune + +__all__ = ['SupervisedFinetune', 'LLaVAModel', 'InternVL_V1_5'] diff --git a/data/xtuner/xtuner/model/dpo.py b/data/xtuner/xtuner/model/dpo.py new file mode 100644 index 0000000000000000000000000000000000000000..faaa43402cb077ca39d9418e778b5bcbede10ace --- /dev/null +++ b/data/xtuner/xtuner/model/dpo.py @@ -0,0 +1,286 @@ +# DPO Authors: Rafael Rafailov, Archit Sharma, Eric Mitchell, Stefano Ermon, Christopher D. Manning, and Chelsea Finn 2023 # noqa +# Copyright 2023 The HuggingFace Team. All rights reserved. +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy + +import torch +import torch.distributed as dist +import torch.nn.functional as F +from mmengine import MessageHub +from transformers.integrations import is_deepspeed_zero3_enabled + +from xtuner.parallel.sequence import (gather_forward_split_backward, + get_sequence_parallel_group, + get_sequence_parallel_world_size, + split_for_sequence_parallel) +from .sft import SupervisedFinetune + + +def disable_grad(model): + # freeze parameters + parameter_names = [n for n, _ in model.named_parameters()] + for param_name in parameter_names: + param = model.get_parameter(param_name) + param.requires_grad = False + return model.eval() + + +def create_reference_model(model): + if is_deepspeed_zero3_enabled(): + raise ValueError('DeepSpeed ZeRO-3 is enabled and is not compatible ' + 'with `create_reference_model()`. Please instantiate ' + 'your reference model directly with ' + '`AutoCausalLM.from_pretrained()`.') + ref_model = deepcopy(model) + ref_model = disable_grad(ref_model) + return ref_model + + +class DPO(SupervisedFinetune): + """A general class of DPO and its variants.""" + + def __init__(self, + llm, + ref_llm=None, + beta=0.1, + loss_type='sigmoid', + label_smoothing=0.0, + **kwargs): + super().__init__(llm, **kwargs) + self.loss_type = loss_type + self.label_smoothing = label_smoothing + self.beta = beta + + if ref_llm is not None: + ref_llm = self.build_llm_from_cfg( + ref_llm, kwargs.get('use_varlen_attn', False), + kwargs.get('max_position_embeddings', None)) + self.ref_llm = disable_grad(ref_llm) + else: + self.ref_llm = None if self.use_lora else create_reference_model( + self.llm) + + def _gather_masked_logits(self, logits, labels, mask): + logits = torch.gather( + logits.log_softmax(-1), dim=2, + index=labels.unsqueeze(2)).squeeze(2) + return logits * mask + + def get_logps( + self, + policy_logps, # bs, seqlen,vocab_size + ref_logps, # bs, seqlen,vocab_size + loss_mask, # bs, seqlen + ): + policy_logps = policy_logps[:, :-1].sum(-1) + ref_logps = ref_logps[:, :-1].sum(-1) + loss_mask = loss_mask[:, :-1] + + if self.loss_type == 'ipo': # average_log_prob + policy_logps = policy_logps / loss_mask.sum(-1) + ref_logps = ref_logps / loss_mask.sum(-1) + + policy_chosen_logps = policy_logps[::2] + policy_rejected_logps = policy_logps[1::2] + reference_chosen_logps = ref_logps[::2] + reference_rejected_logps = ref_logps[1::2] + return (policy_chosen_logps, policy_rejected_logps, + reference_chosen_logps, reference_rejected_logps) + + def get_var_len_atten_logps(self, policy_logps, ref_logps, loss_mask, + cu_seqlens, attention_mask): + seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + # unpack sequence + unpacked_policy_logps = torch.split(policy_logps, seqlens, dim=1) + unpacked_ref_logps = torch.split(ref_logps, seqlens, dim=1) + unpacked_loss_mask = torch.split(loss_mask, seqlens, dim=1) + if attention_mask is not None: + # It indicate that we pad the original sequence, labels, + # position_ids and cumulative_len for sequence parallel if the + # attention_mask is not None. + # We then need to remove the padded segments. + assert False in attention_mask + unpacked_policy_logps = unpacked_policy_logps[:-1] + unpacked_ref_logps = unpacked_ref_logps[:-1] + unpacked_loss_mask = unpacked_loss_mask[:-1] + assert len(unpacked_policy_logps) % 2 == 0 + + def compute_logps(_logps, _mask): + _logps = _logps[:, :-1].sum(-1) + _mask = _mask[:, :-1] + if self.loss_type == 'ipo': + _logps /= _mask.sum(-1) + return _logps + + (policy_chosen_logps, policy_rejected_logps, reference_chosen_logps, + reference_rejected_logps) = [], [], [], [] + for i in range(len(unpacked_policy_logps) // 2): + chosen = unpacked_policy_logps[2 * i] + rejected = unpacked_policy_logps[2 * i + 1] + chosen_ref = unpacked_ref_logps[2 * i] + rejected_ref = unpacked_ref_logps[2 * i + 1] + chosen_mask = unpacked_loss_mask[2 * i] + rejected_mask = unpacked_loss_mask[2 * i + 1] + policy_chosen_logps.append(compute_logps(chosen, chosen_mask)) + policy_rejected_logps.append( + compute_logps(rejected, rejected_mask)) + reference_chosen_logps.append( + compute_logps(chosen_ref, chosen_mask)) + reference_rejected_logps.append( + compute_logps(rejected_ref, rejected_mask)) + + return (torch.stack(policy_chosen_logps), + torch.stack(policy_rejected_logps), + torch.stack(reference_chosen_logps), + torch.stack(reference_rejected_logps)) + + @staticmethod + def _split_for_sequence_parallel(data): + # attention mask should not be split + ARGS_NEED_TO_SPLIT = ('input_ids', 'position_ids', 'labels') + sp_group = get_sequence_parallel_group() + for key in ARGS_NEED_TO_SPLIT: + val = data.get(key, None) + if val is not None: + # `dim` is 1 as the shape of tensor is (bs, seq_len, ...) + data[key] = split_for_sequence_parallel( + val, dim=1, sp_group=sp_group) + return data + + def compute_loss(self, data, data_samples=None): + # modified from https://github.com/huggingface/trl/blob/main/trl/trainer/dpo_trainer.py # noqa + # shift labels first and add a dummy label at the end, to support sequence parallel # noqa + data['labels'] = torch.cat( + (data['labels'][:, 1:], torch.zeros_like(data['labels'][:, :1])), + dim=1) + tmp_label = data['labels'].clone() + tmp_label[tmp_label == 0] = -100 + all_loss_mask = data[ + 'labels'] != -100 # loss mask of all tokens in all sp ranks # noqa + + if get_sequence_parallel_world_size() > 1: + data = self._split_for_sequence_parallel(data) + + all_logits = self.llm(**data).logits + with torch.no_grad(): + if self.ref_llm is None: + with self.llm.disable_adapter(): + all_ref_logits = self.llm(**data).logits + else: + all_ref_logits = self.ref_llm(**data).logits + + labels = data['labels'] + labels[labels == -100] = 0 + loss_mask = labels != 0 # loss mask in a single sp rank + policy_logps = self._gather_masked_logits(all_logits, labels, + loss_mask) + ref_logps = self._gather_masked_logits(all_ref_logits, labels, + loss_mask) + + if get_sequence_parallel_world_size() > 1: + policy_logps = gather_forward_split_backward( + policy_logps, + dim=1, + sp_group=get_sequence_parallel_group(), + grad_scale='up') + ref_logps = gather_forward_split_backward( + ref_logps, + dim=1, + sp_group=get_sequence_parallel_group(), + grad_scale='up') + + if not self.use_varlen_attn: + (policy_chosen_logps, policy_rejected_logps, + reference_chosen_logps, + reference_rejected_logps) = self.get_logps( + policy_logps, ref_logps, all_loss_mask) + else: + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cu_seqlens = message_hub.get_info(f'cumulative_len_rank_{rank}') + (policy_chosen_logps, policy_rejected_logps, + reference_chosen_logps, + reference_rejected_logps) = self.get_var_len_atten_logps( + policy_logps, ref_logps, all_loss_mask, cu_seqlens, + data['attention_mask']) + + pi_logratios = policy_chosen_logps - policy_rejected_logps + ref_logratios = reference_chosen_logps - reference_rejected_logps + + logits = pi_logratios - ref_logratios + if self.loss_type == 'sigmoid': + loss = (-F.logsigmoid(self.beta * logits) * + (1 - self.label_smoothing) - + F.logsigmoid(-self.beta * logits) * self.label_smoothing) + elif self.loss_type == 'robust': + loss = (-F.logsigmoid(self.beta * logits) * + (1 - self.label_smoothing) + + F.logsigmoid(-self.beta * logits) * + self.label_smoothing) / (1 - 2 * self.label_smoothing) + elif self.loss_type == 'hinge': + loss = torch.relu(1 - self.beta * logits) + elif self.loss_type == 'ipo': + # eqn (17) of the paper where beta is the regularization + # parameter for the IPO loss, denoted by tau in the paper. # noqa + loss = (logits - 1 / (2 * self.beta))**2 + elif self.loss_type == 'kto_pair': + # eqn (7) of the HALOs paper + chosen_KL = (policy_chosen_logps - + reference_chosen_logps).mean().clamp(min=0) + rejected_KL = (policy_rejected_logps - + reference_rejected_logps).mean().clamp(min=0) + + chosen_logratios = policy_chosen_logps - reference_chosen_logps + rejected_logratios = \ + policy_rejected_logps - reference_rejected_logps + # As described in the KTO report, the KL term for chosen (rejected) + # is estimated using the rejected (chosen) half. # noqa + loss = torch.cat( + ( + 1 - F.sigmoid(self.beta * + (chosen_logratios - rejected_KL)), + 1 - F.sigmoid(self.beta * + (chosen_KL - rejected_logratios)), + ), + 0, + ) + elif self.loss_type == 'sppo_hard': + # In the paper (https://arxiv.org/pdf/2405.00675), + # SPPO employs a soft probability approach, + # estimated using the PairRM score. The probability calculation + # is conducted outside of the trainer class. + # The version described here is the hard probability version, + # where P in Equation (4.7) of Algorithm 1 is set to 1 for + # the winner and 0 for the loser. + a = policy_chosen_logps - reference_chosen_logps + b = policy_rejected_logps - reference_rejected_logps + + loss = (a - 0.5 / self.beta)**2 + (b + 0.5 / self.beta)**2 + elif self.loss_type == 'nca_pair': + chosen_rewards = (policy_chosen_logps - + reference_chosen_logps) * self.beta + rejected_rewards = (policy_rejected_logps - + reference_rejected_logps) * self.beta + loss = (-F.logsigmoid(chosen_rewards) - + 0.5 * F.logsigmoid(-chosen_rewards) - + 0.5 * F.logsigmoid(-rejected_rewards)) + else: + raise ValueError( + f'Unknown loss type: {self.loss_type}. Should be one of ' + "['sigmoid', 'hinge', 'ipo', 'kto_pair', " + "'sppo_hard', 'nca_pair', 'robust']") + # for logging + chosen_rewards = self.beta * ( + policy_chosen_logps - reference_chosen_logps) + rejected_rewards = self.beta * ( + policy_rejected_logps - reference_rejected_logps) + reward_acc = (chosen_rewards > rejected_rewards).float().mean() + + loss_dict = { + 'loss': loss, + 'chosen_rewards': chosen_rewards.mean(), + 'rejected_rewards': rejected_rewards.mean(), + 'reward_acc': reward_acc, + 'reward_margin': (chosen_rewards - rejected_rewards).mean(), + } + return loss_dict diff --git a/data/xtuner/xtuner/model/internvl.py b/data/xtuner/xtuner/model/internvl.py new file mode 100644 index 0000000000000000000000000000000000000000..0358266a9ff40defc650ca62179a1c496653bed7 --- /dev/null +++ b/data/xtuner/xtuner/model/internvl.py @@ -0,0 +1,320 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import OrderedDict +from typing import List, Optional, Tuple, Union + +import torch +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from mmengine.model import BaseModel +from peft import get_peft_model, prepare_model_for_kbit_training +from torch.nn import CrossEntropyLoss +from transformers import (AutoConfig, AutoModel, AutoTokenizer, + BitsAndBytesConfig) +from transformers.modeling_outputs import CausalLMOutputWithPast + +from xtuner.registry import BUILDER +from .utils import (find_all_linear_names, get_peft_model_state_dict, + guess_load_checkpoint, make_inputs_require_grad) + + +class InternVL_V1_5(BaseModel): + + def __init__(self, + model_path, + freeze_llm=False, + freeze_visual_encoder=False, + llm_lora=None, + visual_encoder_lora=None, + quantization_vit=False, + quantization_llm=False, + pretrained_pth=None): + print_log('Start to load InternVL_V1_5 model.', logger='current') + super().__init__() + self.freeze_llm = freeze_llm + self.freeze_visual_encoder = freeze_visual_encoder + self.use_llm_lora = llm_lora is not None + self.use_visual_encoder_lora = visual_encoder_lora is not None + self.quantization_vit = quantization_vit + self.quantization_llm = quantization_llm + if quantization_vit: + assert visual_encoder_lora is not None + if quantization_llm: + assert quantization_llm and llm_lora is not None + + config = AutoConfig.from_pretrained(model_path, trust_remote_code=True) + if config.llm_config.model_type == 'internlm2': + config.llm_config.attn_implementation = 'flash_attention_2' + else: + config.llm_config._attn_implementation = 'flash_attention_2' + + if quantization_vit is False and quantization_llm is False: + quantization = None + else: + llm_int8_skip_modules = ['mlp1'] + if quantization_llm and not quantization_vit: + llm_int8_skip_modules.append('vision_model') + + if quantization_vit and not quantization_llm: + llm_int8_skip_modules.append('language_model') + + quantization_config = dict( + type=BitsAndBytesConfig, + llm_int8_skip_modules=llm_int8_skip_modules, + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4') + quantization_clazz = quantization_config.pop('type') + quantization = quantization_clazz(**quantization_config) + + self.model = AutoModel.from_pretrained( + model_path, + torch_dtype=torch.bfloat16, + quantization_config=quantization, + config=config, + trust_remote_code=True) + + tokenizer = AutoTokenizer.from_pretrained( + model_path, trust_remote_code=True) + img_context_token_id = tokenizer.convert_tokens_to_ids('') + self.model.img_context_token_id = img_context_token_id + + if self.freeze_llm: + self.model.language_model.requires_grad_(False) + if self.freeze_visual_encoder: + self.model.vision_model.requires_grad_(False) + + if hasattr(self.model.language_model, 'enable_input_require_grads'): + self.model.language_model.enable_input_require_grads() + else: + self.model.language_model.get_input_embeddings( + ).register_forward_hook(make_inputs_require_grad) + + self.gradient_checkpointing_enable() + + if self.use_llm_lora: + self._prepare_llm_for_lora(llm_lora) + + if self.use_visual_encoder_lora: + self._prepare_visual_encoder_for_lora(visual_encoder_lora) + + if pretrained_pth is not None: + pretrained_state_dict = guess_load_checkpoint(pretrained_pth) + + self.load_state_dict(pretrained_state_dict, strict=False) + print(f'Load pretrained weight from {pretrained_pth}') + + self._count = 0 + print_log(self, logger='current') + print_log('InternVL_V1_5 construction is complete', logger='current') + + def _parse_lora_config(self, lora_config): + if isinstance(lora_config, dict) or isinstance( + lora_config, Config) or isinstance(lora_config, ConfigDict): + lora_config = BUILDER.build(lora_config) + return lora_config + + def _prepare_llm_for_lora(self, + lora_config, + use_activation_checkpointing=True): + lora_config = self._parse_lora_config(lora_config) + self.model.language_model = prepare_model_for_kbit_training( + self.model.language_model, use_activation_checkpointing) + if lora_config.target_modules is None: + modules = find_all_linear_names(self.model.language_model) + lora_config.target_modules = modules + self.model.language_model = get_peft_model(self.model.language_model, + lora_config) + + def _prepare_visual_encoder_for_lora(self, lora_config): + lora_config = self._parse_lora_config(lora_config) + if lora_config.target_modules is None: + modules = find_all_linear_names(self.model.vision_model) + lora_config.target_modules = modules + self.model.vision_model = get_peft_model(self.model.vision_model, + lora_config) + + def gradient_checkpointing_enable(self): + self.activation_checkpointing_enable() + + def activation_checkpointing_enable(self): + self.model.language_model.gradient_checkpointing_enable() + + def gradient_checkpointing_disable(self): + self.activation_checkpointing_disable() + + def activation_checkpointing_disable(self): + self.model.language_model.gradient_checkpointing_disable() + + def state_dict(self, *args, **kwargs): + state_dict = super().state_dict(*args, **kwargs) + to_return = OrderedDict() + # Step 1. visual_encoder + if self.use_visual_encoder_lora: + to_return.update( + get_peft_model_state_dict( + self.model.vision_model, state_dict=state_dict)) + elif not self.freeze_visual_encoder: + to_return.update({ + k: v + for k, v in state_dict.items() if 'model.vision_model.' in k + }) + # Step 2. LLM + if self.use_llm_lora: + to_return.update( + get_peft_model_state_dict( + self.model.language_model, state_dict=state_dict)) + elif not self.freeze_llm: + to_return.update({ + k: v + for k, v in state_dict.items() if 'model.language_model.' in k + }) + # Step 3. Projector + to_return.update( + {k: v + for k, v in state_dict.items() if 'model.mlp1.' in k}) + return to_return + + def init_weights(self): + pass + + def forward(self, data, data_samples=None, mode='loss'): + pixel_values = data['pixel_values'] + + if type(pixel_values) is list or pixel_values.ndim == 5: + if type(pixel_values) is list: + pixel_values = [ + x.unsqueeze(0) if x.ndim == 3 else x for x in pixel_values + ] + # b*n, c, h, w + concat_images = torch.cat([ + image.to(self.model.vision_model.dtype) + for image in pixel_values + ], + dim=0) + else: + raise NotImplementedError() + + input_ids = data['input_ids'] + position_ids = data['position_ids'] + attention_mask = data['attention_mask'] + # sum is 0 are text + image_flags = torch.sum(concat_images, dim=(1, 2, 3)) != 0 + image_flags = image_flags.long() + + labels = data['labels'] + use_cache = False + + # Directly calling this code in LORA fine-tuning + # will result in an error,so we must rewrite it. + # TODO: Once the official is fixed, we can remove it. + # outputs = self.model(input_ids=input_ids, + # position_ids=position_ids, + # attention_mask=attention_mask, + # image_flags=image_flags, + # pixel_values=concat_images, + # labels=labels, + # use_cache=use_cache) + outputs = self._llm_forward( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + image_flags=image_flags, + pixel_values=concat_images, + labels=labels, + use_cache=use_cache) + loss_dict = {'loss': outputs.loss} + return loss_dict + + def _llm_forward( + self, + pixel_values: torch.FloatTensor, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + image_flags: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, CausalLMOutputWithPast]: + return_dict = return_dict if return_dict is not None \ + else self.model.config.use_return_dict + + image_flags = image_flags.squeeze(-1) + # We only added the clone code here to avoid the error. + input_embeds = self.model.language_model.get_input_embeddings()( + input_ids).clone() + + vit_embeds = self.model.extract_feature(pixel_values) + vit_embeds = vit_embeds[image_flags == 1] + vit_batch_size = pixel_values.shape[0] + + B, N, C = input_embeds.shape + input_embeds = input_embeds.reshape(B * N, C) + + if torch.distributed.get_rank() == 0 and self._count % 100 == 0: + print(f'dynamic ViT batch size: {vit_batch_size}, ' + f'images per sample: {vit_batch_size / B}, ' + f'dynamic token length: {N}') + self._count += 1 + + input_ids = input_ids.reshape(B * N) + selected = (input_ids == self.model.img_context_token_id) + try: + input_embeds[ + selected] = input_embeds[selected] * 0.0 + vit_embeds.reshape( + -1, C) + except Exception as e: + vit_embeds = vit_embeds.reshape(-1, C) + print(f'warning: {e}, input_embeds[selected].shape=' + f'{input_embeds[selected].shape}, ' + f'vit_embeds.shape={vit_embeds.shape}') + n_token = selected.sum() + input_embeds[ + selected] = input_embeds[selected] * 0.0 + vit_embeds[:n_token] + + input_embeds = input_embeds.reshape(B, N, C) + + outputs = self.model.language_model( + inputs_embeds=input_embeds, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + logits = outputs.logits + + loss = None + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view( + -1, self.model.language_model.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits, ) + outputs[1:] + return (loss, ) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) diff --git a/data/xtuner/xtuner/model/llava.py b/data/xtuner/xtuner/model/llava.py new file mode 100644 index 0000000000000000000000000000000000000000..36d1833d3040e65e97700d81bd11a906fbedbebd --- /dev/null +++ b/data/xtuner/xtuner/model/llava.py @@ -0,0 +1,635 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +import os.path as osp +import warnings +from collections import OrderedDict + +import torch +import torch.nn as nn +from accelerate import init_empty_weights +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from mmengine.model import BaseModel +from peft import get_peft_model, prepare_model_for_kbit_training +from transformers import (AddedToken, AutoConfig, CLIPImageProcessor, + CLIPVisionModel, LlamaForCausalLM, + LlamaTokenizerFast, LlavaConfig, + LlavaForConditionalGeneration, LlavaProcessor) +from transformers.integrations import is_deepspeed_zero3_enabled + +from xtuner.registry import BUILDER +from xtuner.utils import DEFAULT_IMAGE_TOKEN +from .modules import ProjectorConfig, ProjectorModel, dispatch_modules +from .modules.dispatch import SUPPORT_FLASH1, SUPPORT_FLASH2 +from .utils import (LoadWoInit, find_all_linear_names, + get_peft_model_state_dict, guess_load_checkpoint, + make_inputs_require_grad, + prepare_inputs_labels_for_multimodal, traverse_dict) + + +def convert_state_dict_to_hf(state_dict, mapping): + new_state_dict = {} + for key, value in state_dict.items(): + if key.endswith('.inv_freq'): + continue + for key_to_modify, new_key in mapping.items(): + if key_to_modify in key: + key = key.replace(key_to_modify, new_key) + new_state_dict[key] = value + return new_state_dict + + +class LLaVAModel(BaseModel): + + def __init__(self, + llm, + visual_encoder, + freeze_llm=False, + freeze_visual_encoder=False, + visual_select_layer=-2, + pretrained_pth=None, + projector_depth=2, + llm_lora=None, + visual_encoder_lora=None, + use_activation_checkpointing=True, + max_position_embeddings=None): + super().__init__() + self.freeze_llm = freeze_llm + self.freeze_visual_encoder = freeze_visual_encoder + with LoadWoInit(): + if isinstance(llm, dict): + llm = self._dispatch_lm_model_cfg(llm, max_position_embeddings) + + self.llm = self._build_from_cfg_or_module(llm) + self.visual_encoder = self._build_from_cfg_or_module( + visual_encoder) + self.llm.config.use_cache = False + dispatch_modules(self.llm) + + self.projector_depth = projector_depth + projector_config = ProjectorConfig( + visual_hidden_size=self.visual_encoder.config.hidden_size, + llm_hidden_size=self.llm.config.hidden_size, + depth=self.projector_depth) + self.projector = ProjectorModel(projector_config).to( + self.visual_encoder.dtype) + + if self.freeze_llm: + self.llm.requires_grad_(False) + if self.freeze_visual_encoder: + self.visual_encoder.requires_grad_(False) + + if use_activation_checkpointing: + # For backward compatibility + if hasattr(self.llm, 'enable_input_require_grads'): + self.llm.enable_input_require_grads() + else: + self.llm.get_input_embeddings().register_forward_hook( + make_inputs_require_grad) + if hasattr(self.visual_encoder, 'enable_input_require_grads'): + self.visual_encoder.enable_input_require_grads() + else: + self.visual_encoder.get_input_embeddings( + ).register_forward_hook(make_inputs_require_grad) + self.projector.enable_input_require_grads() + + # enable gradient (activation) checkpointing for memory efficiency + self.gradient_checkpointing_enable() + + self.use_llm_lora = llm_lora is not None + self.use_visual_encoder_lora = visual_encoder_lora is not None + + if self.use_llm_lora: + self._prepare_llm_for_lora(llm_lora, use_activation_checkpointing) + if self.use_visual_encoder_lora: + self._prepare_visual_encoder_for_lora( + visual_encoder_lora, use_activation_checkpointing) + + if pretrained_pth is not None: + pretrained_state_dict = guess_load_checkpoint(pretrained_pth) + + self.load_state_dict(pretrained_state_dict, strict=False) + print_log(f'Load pretrained weight from {pretrained_pth}', + 'current') + + self.visual_select_layer = visual_select_layer + + self._is_init = True + + self.is_first_iter = True + + def _parse_lora_config(self, lora_config): + if isinstance(lora_config, dict) or isinstance( + lora_config, Config) or isinstance(lora_config, ConfigDict): + lora_config = BUILDER.build(lora_config) + return lora_config + + def _prepare_llm_for_lora(self, + lora_config, + use_activation_checkpointing=True): + lora_config = self._parse_lora_config(lora_config) + self.llm = prepare_model_for_kbit_training( + self.llm, use_activation_checkpointing) + if lora_config.target_modules is None: + modules = find_all_linear_names(self.llm) + lora_config.target_modules = modules + self.llm = get_peft_model(self.llm, lora_config) + + def _prepare_visual_encoder_for_lora(self, + lora_config, + use_activation_checkpointing=True): + lora_config = self._parse_lora_config(lora_config) + if lora_config.target_modules is None: + modules = find_all_linear_names(self.visual_encoder) + lora_config.target_modules = modules + self.visual_encoder = get_peft_model(self.visual_encoder, lora_config) + + def gradient_checkpointing_enable(self): + self.activation_checkpointing_enable() + + def activation_checkpointing_enable(self): + self.llm.gradient_checkpointing_enable() + self.visual_encoder.gradient_checkpointing_enable() + self.projector.gradient_checkpointing_enable() + + def gradient_checkpointing_disable(self): + self.activation_checkpointing_disable() + + def activation_checkpointing_disable(self): + self.llm.gradient_checkpointing_disable() + self.visual_encoder.gradient_checkpointing_disable() + self.projector.gradient_checkpointing_disable() + + def init_weights(self): + pass + + def state_dict(self, *args, **kwargs): + state_dict = super().state_dict(*args, **kwargs) + to_return = OrderedDict() + # Step 1. visual_encoder + if self.use_visual_encoder_lora: + to_return.update( + get_peft_model_state_dict( + self.visual_encoder, state_dict=state_dict)) + elif not self.freeze_visual_encoder: + to_return.update({ + k: v + for k, v in state_dict.items() if 'visual_encoder.' in k + }) + # Step 2. LLM + if self.use_llm_lora: + to_return.update( + get_peft_model_state_dict(self.llm, state_dict=state_dict)) + elif not self.freeze_llm: + to_return.update( + {k: v + for k, v in state_dict.items() if 'llm.' in k}) + # Step 3. Projector + to_return.update( + {k: v + for k, v in state_dict.items() if 'projector.' in k}) + return to_return + + @staticmethod + def _prepare_for_long_context_training(cfg, llm_cfg, + max_position_embeddings): + + orig_rope_scaling = getattr(llm_cfg, 'rope_scaling', None) + if orig_rope_scaling is None: + orig_rope_scaling = {'factor': 1} + + orig_rope_scaling_factor = orig_rope_scaling[ + 'factor'] if 'factor' in orig_rope_scaling.keys() else 1 + orig_ctx_len = getattr(llm_cfg, 'max_position_embeddings', None) + if orig_ctx_len: + orig_ctx_len *= orig_rope_scaling_factor + if max_position_embeddings > orig_ctx_len: + scaling_factor = float( + math.ceil(max_position_embeddings / orig_ctx_len)) + llm_cfg.rope_scaling = { + 'type': 'linear', + 'factor': scaling_factor + } + + # hardcode for internlm2 + llm_cfg.attn_implementation = 'flash_attention_2' + cfg.config = llm_cfg + + return cfg, llm_cfg + + @staticmethod + def _prepare_for_flash_attn(cfg, llm_cfg): + cls_name = type(llm_cfg).__name__ + SUPPORT_SDPA_ATTN = ('LlamaConfig', 'GemmaConfig', 'MistralConfig', + 'MixtralConfig', 'Qwen2Config', 'Qwen2MoeConfig', + 'Starcoder2Config', 'Starcoder2Config', + 'Phi3Config') + SUPPORT_FLASH_ATTN2 = ('InternLM2Config', 'LlamaConfig', 'GemmaConfig', + 'MistralConfig', 'MixtralConfig', 'Qwen2Config', + 'Qwen2MoeConfig', 'Starcoder2Config', + 'Starcoder2Config', 'Phi3Config') + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + if getattr(cfg, 'attn_implementation', None) is not None: + # Flash Attention 2.0 only supports torch.float16 and + # torch.bfloat16 dtypes + if cfg.attn_implementation == 'flash_attention_2': + cfg.torch_dtype = torch_dtype + elif SUPPORT_FLASH2 and cls_name in SUPPORT_FLASH_ATTN2: + cfg.torch_dtype = torch_dtype + cfg.attn_implementation = 'flash_attention_2' + elif SUPPORT_FLASH1 and cls_name in SUPPORT_SDPA_ATTN: + cfg.attn_implementation = 'sdpa' + + return cfg, llm_cfg + + @staticmethod + def _prepare_for_qlora_zero3(cfg): + if (not is_deepspeed_zero3_enabled()) or (not hasattr( + cfg, 'quantization_config')): + return cfg + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + cfg.torch_dtype = torch_dtype + quantization_config = cfg.quantization_config + quantization_config.bnb_4bit_compute_dtype = torch_dtype + quantization_config.bnb_4bit_quant_storage = torch_dtype + + return cfg + + def _dispatch_lm_model_cfg(self, cfg, max_position_embeddings=None): + cfg = self._prepare_for_qlora_zero3(cfg) + pretrained_model_name_or_path = cfg.pretrained_model_name_or_path + llm_cfg = AutoConfig.from_pretrained( + pretrained_model_name_or_path, trust_remote_code=True) + cfg, llm_cfg = self._prepare_for_flash_attn(cfg, llm_cfg) + if max_position_embeddings is not None: + cfg, llm_cfg = self._prepare_for_long_context_training( + cfg, llm_cfg, max_position_embeddings) + return cfg + + def _build_from_cfg_or_module(self, cfg_or_mod): + if isinstance(cfg_or_mod, nn.Module): + return cfg_or_mod + elif isinstance(cfg_or_mod, dict): + traverse_dict(cfg_or_mod) + return BUILDER.build(cfg_or_mod) + else: + raise NotImplementedError + + def forward(self, data, data_samples=None, mode='loss'): + if self.is_first_iter: + # hardcode for qlora DeepSpeed ZeRO3, put buffers and QuantState to + # device + # Only required in `LLaVAModel` . + # We do not need this in `SupervisedFinetune` . + self.to(data['input_ids'].device) + self.is_first_iter = False + + if 'pixel_values' in data: + visual_outputs = self.visual_encoder( + data['pixel_values'].to(self.visual_encoder.dtype), + output_hidden_states=True) + pixel_values = self.projector( + visual_outputs.hidden_states[self.visual_select_layer][:, 1:]) + data['pixel_values'] = pixel_values + data = prepare_inputs_labels_for_multimodal(llm=self.llm, **data) + + if mode == 'loss': + return self.compute_loss(data, data_samples) + elif mode == 'predict': + return self.predict(data, data_samples) + elif mode == 'tensor': + return self._forward(data, data_samples) + else: + raise NotImplementedError + + def _forward(self, data, data_samples=None): + + outputs = self.llm(**data) + + return outputs + + def predict(self, data, data_samples=None): + outputs = self.llm(**data) + logits_dict = [{'logits': logits} for logits in outputs.logits] + return logits_dict + + def compute_loss(self, data, data_samples=None): + outputs = self.llm(**data) + loss_dict = {'loss': outputs.loss} + return loss_dict + + def __getattr__(self, name: str): + try: + return super().__getattr__(name) + except AttributeError: + return getattr(self.llm, name) + + def to_hf(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}, + save_format='xtuner', + **kwargs): + if save_format == 'xtuner': + self.to_xtuner_llava(cfg, save_dir, fp32, save_pretrained_kwargs) + elif save_format == 'huggingface': + self.to_huggingface_llava(cfg, save_dir, fp32, + save_pretrained_kwargs) + elif save_format == 'official': + self.to_official_llava(cfg, save_dir, fp32, save_pretrained_kwargs) + else: + raise NotImplementedError + + def to_xtuner_llava(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}): + # LLM + self.llm.config.use_cache = True + if not fp32: + print_log('Convert LLM to float16', 'current') + self.llm.half() + if self.use_llm_lora: + llm_path = osp.join(save_dir, 'llm_adapter') + print_log(f'Saving LLM adapter to {llm_path}', 'current') + self.llm.save_pretrained(llm_path, **save_pretrained_kwargs) + elif not self.freeze_llm: + llm_path = save_dir + print_log(f'Saving LLM tokenizer to {llm_path}', 'current') + tokenizer = BUILDER.build(cfg.tokenizer) + tokenizer.save_pretrained(llm_path, **save_pretrained_kwargs) + print_log(f'Saving LLM to {llm_path}', 'current') + self.llm.save_pretrained(llm_path, **save_pretrained_kwargs) + self.llm.config.use_cache = False + + # Visual Encoder + if self.use_visual_encoder_lora: + visual_encoder_path = osp.join(save_dir, 'visual_encoder_adapter') + print_log( + f'Saving visual_encoder adapter to {visual_encoder_path}', + 'current') + self.visual_encoder.save_pretrained(visual_encoder_path, + **save_pretrained_kwargs) + elif not self.freeze_visual_encoder: + visual_encoder_path = osp.join(save_dir, 'visual_encoder') + print_log( + 'Saving visual_encoder image_processor to' + f'{visual_encoder_path}', 'current') + image_processor = BUILDER.build(cfg.image_processor) + image_processor.save_pretrained(visual_encoder_path, + **save_pretrained_kwargs) + print_log(f'Saving visual_encoder to {visual_encoder_path}', + 'current') + self.visual_encoder.save_pretrained(visual_encoder_path, + **save_pretrained_kwargs) + + # Projector + projector_path = osp.join(save_dir, 'projector') + print_log(f'Saving projector to {projector_path}', 'current') + self.projector.save_pretrained(projector_path, + **save_pretrained_kwargs) + + def to_huggingface_llava(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}): + + LLM_MAPPING = { + 'model': 'language_model.model', + 'lm_head': 'language_model.lm_head', + } + VIT_MAPPING = { + 'vision_model': 'vision_tower.vision_model', + } + PROJECTOR_MAPPING = { + 'model.0': 'multi_modal_projector.linear_1', + 'model.2': 'multi_modal_projector.linear_2', + } + + assert getattr(self.llm, 'hf_quantizer', None) is None, \ + 'This conversion format does not support quantized LLM.' + + # get state_dict + llm = self.llm + if self.use_llm_lora: + llm = self.llm.merge_and_unload() + llm.config.use_cache = True + if not fp32: + print_log('Convert LLM to float16', 'current') + llm.half() + + assert isinstance(llm, LlamaForCausalLM), \ + 'This conversion format only supports LlamaForCausalLM.' + llm_state_dict = llm.state_dict() + llm_state_dict = convert_state_dict_to_hf(llm_state_dict, LLM_MAPPING) + + need_visual_encoder = (not self.freeze_visual_encoder + or self.use_visual_encoder_lora) + visual_encoder = self.visual_encoder + if self.use_visual_encoder_lora: + visual_encoder = self.visual_encoder.merge_and_unload() + assert isinstance(visual_encoder, CLIPVisionModel),\ + 'This conversion format only supports CLIPVisionModel.' + if need_visual_encoder: + visual_encoder_state_dict = visual_encoder.state_dict() + visual_encoder_state_dict = convert_state_dict_to_hf( + visual_encoder_state_dict, VIT_MAPPING) + else: + visual_encoder_state_dict = {} + + projector_state_dict = self.projector.state_dict() + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, PROJECTOR_MAPPING) + + state_dict = { + **projector_state_dict, + **llm_state_dict, + **visual_encoder_state_dict + } + + # init model + text_config = llm.config + vision_config = visual_encoder.config + config = LlavaConfig( + text_config=text_config, + vision_config=vision_config, + attn_implementation='eager') + + with init_empty_weights(): + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', message='.*non-meta.*', category=UserWarning) + model = LlavaForConditionalGeneration(config) + model.load_state_dict(state_dict, strict=True, assign=True) + + # processor + cfg.tokenizer.type = LlamaTokenizerFast.from_pretrained + tokenizer = BUILDER.build(cfg.tokenizer) + + tokenizer.add_tokens( + AddedToken(DEFAULT_IMAGE_TOKEN, special=True, normalized=False), + special_tokens=True) + tokenizer.add_special_tokens({'pad_token': ''}) + + image_processor = BUILDER.build(cfg.image_processor) + assert isinstance(image_processor, CLIPImageProcessor),\ + 'This conversion format only supports CLIPImageProcessor.' + + processor = LlavaProcessor( + tokenizer=tokenizer, image_processor=image_processor) + + # Pad to 64 for performance reasons + pad_shape = 64 + + pre_expansion_embeddings = \ + model.language_model.model.embed_tokens.weight.data + mu = torch.mean(pre_expansion_embeddings, dim=0).float() + n = pre_expansion_embeddings.size()[0] + sigma = ((pre_expansion_embeddings - mu).T + @ (pre_expansion_embeddings - mu)) / n + dist = torch.distributions.multivariate_normal.MultivariateNormal( + mu, covariance_matrix=1e-5 * sigma) + + # We add an image token so we need to resize the model + ori_vocab_size = config.text_config.vocab_size + tokenizer_vocab_size = tokenizer.encode('')[-1] + added_token = tokenizer_vocab_size - ori_vocab_size + + if added_token > 0: + model.resize_token_embeddings(ori_vocab_size + added_token, + pad_shape) + model.language_model.model.embed_tokens.weight.data[ + ori_vocab_size:] = torch.stack( + tuple( + dist.sample() + for _ in range(model.language_model.model.embed_tokens. + weight.data[ori_vocab_size:].shape[0])), + dim=0, + ) + model.language_model.lm_head.weight.data[ + ori_vocab_size:] = torch.stack( + tuple(dist.sample() + for _ in range(model.language_model.lm_head.weight. + data[ori_vocab_size:].shape[0])), + dim=0, + ) + model.config.image_token_index = tokenizer.encode( + DEFAULT_IMAGE_TOKEN)[-1] + model.config.pad_token_id = tokenizer.encode('')[-1] + + # save + print_log(f'Saving to {save_dir}', 'current') + model.save_pretrained(save_dir, **save_pretrained_kwargs) + processor.save_pretrained(save_dir, **save_pretrained_kwargs) + + def to_official_llava(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}): + + VIT_MAPPING = { + 'vision_model': 'model.vision_tower.vision_tower.vision_model', + } + PROJECTOR_MAPPING = { + 'model.0': 'model.mm_projector.0', + 'model.2': 'model.mm_projector.2', + } + + try: + from llava.model import LlavaConfig, LlavaLlamaForCausalLM + except ImportError: + raise ImportError( + 'Please install llava with ' + '`pip install git+https://github.com/haotian-liu/LLaVA.git ' + '--no-deps`.') + + assert getattr(self.llm, 'hf_quantizer', None) is None, \ + 'This conversion format does not support quantized LLM.' + + # get state_dict + llm = self.llm + if self.use_llm_lora: + llm = self.llm.merge_and_unload() + llm.config.use_cache = True + if not fp32: + print_log('Convert LLM to float16', 'current') + llm.half() + + assert isinstance(llm, LlamaForCausalLM), \ + 'This conversion format only supports LlamaForCausalLM.' + llm_state_dict = llm.state_dict() + + need_visual_encoder = (not self.freeze_visual_encoder + or self.use_visual_encoder_lora) + visual_encoder = self.visual_encoder + if self.use_visual_encoder_lora: + visual_encoder = self.visual_encoder.merge_and_unload() + assert isinstance(visual_encoder, CLIPVisionModel),\ + 'This conversion format only supports CLIPVisionModel.' + if need_visual_encoder: + visual_encoder_state_dict = visual_encoder.state_dict() + visual_encoder_state_dict = convert_state_dict_to_hf( + visual_encoder_state_dict, VIT_MAPPING) + else: + visual_encoder_state_dict = {} + + projector_state_dict = self.projector.state_dict() + projector_state_dict = convert_state_dict_to_hf( + projector_state_dict, PROJECTOR_MAPPING) + + state_dict = { + **projector_state_dict, + **llm_state_dict, + **visual_encoder_state_dict + } + + # init model + tokenizer = BUILDER.build(cfg.tokenizer) + image_processor = BUILDER.build(cfg.image_processor) + assert isinstance(image_processor, CLIPImageProcessor),\ + 'This conversion format only supports CLIPImageProcessor.' + + llava_config_dict = llm.config.__dict__.copy() + llava_config_dict.update( + dict( + image_aspect_ratio='pad', + mm_hidden_size=visual_encoder.config.hidden_size, + mm_projector_type=f'mlp{self.projector_depth}x_gelu', + mm_use_im_patch_token=False, + mm_use_im_start_end=False, + mm_vision_select_feature='patch', + mm_vision_select_layer=self.visual_select_layer, + mm_vision_tower=visual_encoder.config.name_or_path, + unfreeze_mm_vision_tower=need_visual_encoder, + model_type='llava', + use_cache=True, + use_mm_proj=True)) + + llava_config = LlavaConfig(**llava_config_dict) + + with init_empty_weights(): + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', message='.*non-meta.*', category=UserWarning) + model = LlavaLlamaForCausalLM(llava_config) + + model.load_state_dict(state_dict, strict=True, assign=True) + + # save + print_log(f'Saving to {save_dir}', 'current') + + model.save_pretrained(save_dir, **save_pretrained_kwargs) + image_processor.save_pretrained(save_dir, **save_pretrained_kwargs) + tokenizer.save_pretrained(save_dir, **save_pretrained_kwargs) diff --git a/data/xtuner/xtuner/model/modules/__init__.py b/data/xtuner/xtuner/model/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1207a9249708ff22b19db94a028b8d06f86f53a8 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/__init__.py @@ -0,0 +1,4 @@ +from .dispatch import dispatch_modules +from .projector import ProjectorConfig, ProjectorModel + +__all__ = ['dispatch_modules', 'ProjectorConfig', 'ProjectorModel'] diff --git a/data/xtuner/xtuner/model/modules/dispatch/__init__.py b/data/xtuner/xtuner/model/modules/dispatch/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e81ec7a3aa69fe25ee4a95759cdcb377e4e1ddd7 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/__init__.py @@ -0,0 +1,276 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import types + +import torch +import transformers +from mmengine.config.lazy import LazyObject +from mmengine.utils import digit_version +from transformers.utils.import_utils import is_flash_attn_2_available + +TRANSFORMERS_VERSION = digit_version(transformers.__version__) +IS_LOW_VERSION_TRANSFORMERS = TRANSFORMERS_VERSION < digit_version('4.38') +# Transformers requires torch version >= 2.1.1 when using Torch SDPA. +# Refer to https://github.com/huggingface/transformers/blob/caa5c65db1f4db617cdac2ad667ba62edf94dd98/src/transformers/modeling_utils.py#L1611 # noqa: E501 +SUPPORT_FLASH1 = digit_version(torch.__version__) >= digit_version('2.1.1') +SUPPORT_FLASH2 = is_flash_attn_2_available() +SUPPORT_FLASH = SUPPORT_FLASH1 or SUPPORT_FLASH2 + +USE_TRITON_KERNEL = bool(os.getenv('USE_TRITON_KERNEL', default=0)) +SUPPORT_TRITON = False +try: + import triton # pre-check # noqa: F401 + import triton.language as tl # pre-check # noqa: F401 + SUPPORT_TRITON = True +except ImportError: + if USE_TRITON_KERNEL: + raise RuntimeError( + 'USE_TRITON_KERNEL is set to 1, but triton has not been installed.' + ' Run `pip install triton==2.1.0` to install triton.') + +NO_ATTN_WEIGHTS_MSG = ( + 'Due to the implementation of the PyTorch version of flash attention, ' + 'even when the `output_attentions` flag is set to True, it is not ' + 'possible to return the `attn_weights`.') + +LOWEST_TRANSFORMERS_VERSION = dict( + InternLM2ForCausalLM=digit_version('4.36'), + InternLMForCausalLM=digit_version('4.36'), + LlamaForCausalLM=digit_version('4.36'), + Phi3ForCausalLM=digit_version('4.39'), + MistralForCausalLM=digit_version('4.36'), + # Training mixtral with lower version may lead to nccl timeout + # Refer to https://github.com/microsoft/DeepSpeed/issues/5066 + MixtralForCausalLM=digit_version('4.40'), + CohereForCausalLM=digit_version('4.40'), + Qwen2ForCausalLM=digit_version('4.39'), + Qwen2MoeForCausalLM=digit_version('4.40'), + DeepseekV2ForCausalLM=digit_version('4.40'), +) + +ATTN_DISPATCH_MAPPING = dict( + InternLM2FlashAttention2=LazyObject( + 'xtuner.model.modules.dispatch.internlm2', 'internlm2_attn_forward'), + InternLMAttention=LazyObject('xtuner.model.modules.dispatch.internlm', + 'internlm_attn_forward'), + LlamaFlashAttention2=LazyObject('xtuner.model.modules.dispatch.llama', + 'llama_attn_forward'), + Phi3FlashAttention2=LazyObject('xtuner.model.modules.dispatch.phi3', + 'phi3_attn_forward'), + MistralFlashAttention2=LazyObject('xtuner.model.modules.dispatch.mistral', + 'mistral_attn_forward'), + MixtralFlashAttention2=LazyObject('xtuner.model.modules.dispatch.mistral', + 'mistral_attn_forward'), + CohereFlashAttention2=LazyObject('xtuner.model.modules.dispatch.cohere', + 'cohere_attn_forward'), + Qwen2FlashAttention2=LazyObject('xtuner.model.modules.dispatch.qwen2', + 'qwen2_attn_forward'), + Qwen2MoeFlashAttention2=LazyObject('xtuner.model.modules.dispatch.qwen2', + 'qwen2_attn_forward'), + DeepseekV2FlashAttention2=LazyObject( + 'xtuner.model.modules.dispatch.deepseek_v2', 'deepseek_attn_forward'), +) + +ATTN_LEGACY_DISPATCH_MAPPING = dict( + LlamaFlashAttention2=LazyObject('xtuner.model.modules.dispatch.llama', + 'llama_attn_forward_legacy'), ) + +VARLEN_ATTN_DISPATCH_MAPPING = dict( + InternLM2FlashAttention2=LazyObject( + 'xtuner.model.modules.dispatch.internlm2', + 'internlm2_varlen_attn_forward'), + InternLMAttention=LazyObject('xtuner.model.modules.dispatch.internlm', + 'internlm_varlen_attn_forward'), + LlamaFlashAttention2=LazyObject('xtuner.model.modules.dispatch.llama', + 'llama_varlen_attn_forward'), + Phi3FlashAttention2=LazyObject('xtuner.model.modules.dispatch.phi3', + 'phi3_varlen_attn_forward'), + MistralFlashAttention2=LazyObject('xtuner.model.modules.dispatch.mistral', + 'mistral_varlen_attn_forward'), + MixtralFlashAttention2=LazyObject('xtuner.model.modules.dispatch.mistral', + 'mistral_varlen_attn_forward'), + CohereFlashAttention2=None, + Qwen2FlashAttention2=LazyObject('xtuner.model.modules.dispatch.qwen2', + 'qwen2_varlen_attn_forward'), + Qwen2MoeFlashAttention2=LazyObject('xtuner.model.modules.dispatch.qwen2', + 'qwen2_varlen_attn_forward'), + DeepseekV2FlashAttention2=LazyObject( + 'xtuner.model.modules.dispatch.deepseek_v2', + 'deepseek_varlen_attn_forward'), +) + +VARLEN_ATTN_LEGACY_DISPATCH_MAPPING = dict( + LlamaFlashAttention2=LazyObject('xtuner.model.modules.dispatch.llama', + 'llama_varlen_attn_forward_legacy'), ) + +RMS_DISPATCH_MAPPING = dict( + InternLM2RMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + InternLMRMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + LlamaRMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + Phi3RMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + MistralRMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + MixtralRMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + CohereLayerNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'layer_norm_forward'), + Qwen2RMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), + Qwen2MoeRMSNorm=LazyObject('xtuner.model.modules.dispatch.triton_kernels', + 'rms_norm_forward'), +) + +ROTE_DISPATCH_MAPPING = dict( + InternLMRotaryEmbedding=LazyObject( + 'xtuner.model.modules.dispatch.internlm', 'InternLMRotaryEmbedding'), + MistralRotaryEmbedding=LazyObject('xtuner.model.modules.dispatch.mistral', + 'MistralRotaryEmbedding'), + MixtralRotaryEmbedding=LazyObject('xtuner.model.modules.dispatch.mistral', + 'MistralRotaryEmbedding'), +) + + +def log_once(func): + logged = False + + def wrapper(*args, **kwargs): + nonlocal logged + if not logged: + logged = True + func(*args, **kwargs) + return + + return wrapper + + +def dispatch_attn_forward(model): + + if not SUPPORT_FLASH2: + return + + from mmengine import print_log + print_log = log_once(print_log) + + attn_forward = None + for module in model.modules(): + name = type(module).__name__ + if (IS_LOW_VERSION_TRANSFORMERS + and name in ATTN_LEGACY_DISPATCH_MAPPING): + if attn_forward is None: + attn_forward = ATTN_LEGACY_DISPATCH_MAPPING[name] + attn_forward = attn_forward.build() + print_log(f'Dispatch {name} legacy forward. {NO_ATTN_WEIGHTS_MSG}', + 'current') + module.forward = types.MethodType(attn_forward, module) + elif name in ATTN_DISPATCH_MAPPING: + if attn_forward is None: + attn_forward = ATTN_DISPATCH_MAPPING[name] + attn_forward = attn_forward.build() + print_log(f'Dispatch {name} forward. {NO_ATTN_WEIGHTS_MSG}', + 'current') + module.forward = types.MethodType(attn_forward, module) + + +def dispatch_varlen_attn_forward(model): + + if not SUPPORT_FLASH2: + return + + from mmengine import print_log + print_log = log_once(print_log) + + varlen_attn_forward = None + for module in model.modules(): + name = type(module).__name__ + if (IS_LOW_VERSION_TRANSFORMERS + and name in VARLEN_ATTN_LEGACY_DISPATCH_MAPPING): + if varlen_attn_forward is None: + varlen_attn_forward = VARLEN_ATTN_LEGACY_DISPATCH_MAPPING[name] + varlen_attn_forward = varlen_attn_forward.build() + print_log( + f'Dispatch legacy {name} varlen forward. ' + f'{NO_ATTN_WEIGHTS_MSG}', 'current') + module.forward = types.MethodType(varlen_attn_forward, module) + elif name in VARLEN_ATTN_DISPATCH_MAPPING: + if varlen_attn_forward is None: + varlen_attn_forward = VARLEN_ATTN_DISPATCH_MAPPING[name] + varlen_attn_forward = varlen_attn_forward.build() + print_log(f'Dispatch {name} varlen forward. {NO_ATTN_WEIGHTS_MSG}', + 'current') + module.forward = types.MethodType(varlen_attn_forward, module) + + +def dispatch_rmsnorm_forward(model): + + if (not SUPPORT_TRITON) or (not USE_TRITON_KERNEL): + return + + from mmengine import print_log + print_log = log_once(print_log) + + rms_forward = None + for module in model.modules(): + name = type(module).__name__ + if name in RMS_DISPATCH_MAPPING: + if rms_forward is None: + rms_forward = RMS_DISPATCH_MAPPING[name] + rms_forward = rms_forward.build() + print_log(f'Dispatch {name} forward.', 'current') + module.forward = types.MethodType(rms_forward, module) + + +def replace_rote(model): + + from mmengine import print_log + print_log = log_once(print_log) + + def traverse(module): + for name, child in module.named_children(): + cls_name = type(child).__name__ + if cls_name in ROTE_DISPATCH_MAPPING: + assert hasattr(model.config, 'rope_theta'), \ + '`rope_theta` should be in the model config.' + rope_theta = model.config.rope_theta + + rote = ROTE_DISPATCH_MAPPING[cls_name] + rote = rote.build() + print_log(f'replace {cls_name}', 'current') + dim_model = child.inv_freq.shape[0] * 2 + child_new = rote(dim_model, child.max_seq_len_cached, + rope_theta).to( + device=child.inv_freq.device, + dtype=child.inv_freq.dtype) + setattr(module, name, child_new) + else: + traverse(child) + + traverse(model) + + +def dispatch_modules(model, use_varlen_attn=False): + + def check(model_name): + if 'ForCausalLM' not in model_name and model_name.endswith('Model'): + # a walkaround for reward model + model_name = model_name[:-5] + 'ForCausalLM' + msg = '{} requires transformers version at least {}, but got {}' + if model_name in LOWEST_TRANSFORMERS_VERSION: + assert TRANSFORMERS_VERSION >= LOWEST_TRANSFORMERS_VERSION[ + model_name], msg.format( + model_name, LOWEST_TRANSFORMERS_VERSION[model_name], + TRANSFORMERS_VERSION) + + check(type(model).__name__) + if use_varlen_attn: + dispatch_varlen_attn_forward(model) + else: + dispatch_attn_forward(model) + dispatch_rmsnorm_forward(model) + replace_rote(model) + + +__all__ = ['dispatch_modules'] diff --git a/data/xtuner/xtuner/model/modules/dispatch/attention.py b/data/xtuner/xtuner/model/modules/dispatch/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..e89bb511cc946e521438c442caca97c1f594403b --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/attention.py @@ -0,0 +1,97 @@ +from xtuner.parallel.sequence import sequence_parallel_wrapper +from .utils import upad_qkv + +SUPPORT_FLASH2 = False + +try: + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import pad_input + SUPPORT_FLASH2 = True +except ImportError: + pass + + +@sequence_parallel_wrapper +def flash_attn_wo_mask( + query_states, + key_states, + value_states, + dropout_p=0.0, + softmax_scale=None, + causal=True, + window_size=(-1, -1), # -1 means infinite context window +): + attn_output = flash_attn_func( + query_states, + key_states, + value_states, + dropout_p=dropout_p, + softmax_scale=softmax_scale, + causal=causal, + window_size=window_size) + return attn_output + + +@sequence_parallel_wrapper +def flash_attn_w_mask( + query_states, # bs, q_len, nhead, h_dim + key_states, + value_states, + attention_mask, + softmax_scale=None, + causal=True, + dropout_p=0.0, + window_size=(-1, -1), # -1 means infinite context window +): + batch_size, q_len = query_states.shape[:2] + query_states, key_states, value_states, indices_q, \ + cu_seq_lens, max_seq_lens = upad_qkv( + query_states, key_states, value_states, attention_mask, q_len) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + softmax_scale=softmax_scale, + dropout_p=dropout_p, + causal=causal, + window_size=window_size) + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, q_len) + return attn_output + + +@sequence_parallel_wrapper +def varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + softmax_scale=None, + dropout_p=0., + causal=True, + window_size=(-1, -1), # -1 means infinite context window +): + q_unpad, k_unpad, v_unpad = query_states.flatten(0, 1), key_states.flatten( + 0, 1), value_states.flatten(0, 1) + attn_output = flash_attn_varlen_func( + q_unpad, + k_unpad, + v_unpad, + cumulative_len, + cumulative_len, + max_seqlen, + max_seqlen, + softmax_scale=softmax_scale, + dropout_p=dropout_p, + return_attn_probs=False, + causal=causal, + window_size=window_size) + attn_output = attn_output.unsqueeze(0) + return attn_output diff --git a/data/xtuner/xtuner/model/modules/dispatch/baichuan.py b/data/xtuner/xtuner/model/modules/dispatch/baichuan.py new file mode 100644 index 0000000000000000000000000000000000000000..738c49869882a16bcea06f9efb18e41d8a76d1e8 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/baichuan.py @@ -0,0 +1,118 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def baichuan2_norm_head_forward(self, hidden_states): + norm_weight = nn.functional.normalize(self.weight) + return nn.functional.linear(hidden_states, norm_weight) + + +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +def apply_rotary_pos_emb(q, k, cos_, sin_, position_ids): + cos = cos_.squeeze(1).squeeze(0) # [seq_len, dim] + sin = sin_.squeeze(1).squeeze(0) # [seq_len, dim] + cos = cos[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + sin = sin[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + q_embed = (q.float() * cos) + (rotate_half(q.float()) * sin) + k_embed = (k.float() * cos) + (rotate_half(k.float()) * sin) + return q_embed.to(q.dtype), k_embed.to(k.dtype) + + +def baichuan_7b_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + proj = self.W_pack(hidden_states) + proj = proj.unflatten(-1, (3, self.hidden_size)).unsqueeze(0).transpose( + 0, -2).squeeze(-2) + query_states = proj[0].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = proj[1].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + value_states = proj[2].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + # [bsz, nh, t, hd] + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + attn_output = F.scaled_dot_product_attention( + query_states, key_states, value_states, attn_mask=attention_mask) + attn_output = attn_output.transpose(1, 2) + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + return attn_output, None, past_key_value + + +def baichuan_13b_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + proj = self.W_pack(hidden_states) + proj = proj.unflatten(-1, (3, self.hidden_size)).unsqueeze(0).transpose( + 0, -2).squeeze(-2) + query_states = proj[0].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = proj[1].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + value_states = proj[2].view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + if attention_mask is not None: + if q_len == 1: # inference with cache + if len(attention_mask.size()) == 4: + attention_mask = attention_mask[:, :, -1:, :] + else: + attention_mask = attention_mask[:, -1:, :] + attn_output = F.scaled_dot_product_attention( + query_states, key_states, value_states, attn_mask=attention_mask) + attn_output = attn_output.transpose(1, 2) + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/cohere.py b/data/xtuner/xtuner/model/modules/dispatch/cohere.py new file mode 100644 index 0000000000000000000000000000000000000000..8acf067474409e4f5a7a108b2b86c762c2fad37c --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/cohere.py @@ -0,0 +1,153 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +import torch.distributed as dist +import transformers +from mmengine.utils import digit_version +from transformers.models.cohere.modeling_cohere import apply_rotary_pos_emb + +from xtuner.parallel.sequence import get_sequence_parallel_world_size +from xtuner.parallel.sequence.attention import ( + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) + +try: + from transformers.cache_utils import Cache +except ImportError: + + class Cache: + pass + + +TRANSFORMERS_VERSION = digit_version(transformers.__version__) +IS_LOW_VERSION_TRANSFORMERS = TRANSFORMERS_VERSION < digit_version('4.43') + +if not IS_LOW_VERSION_TRANSFORMERS: + from transformers.modeling_flash_attention_utils import \ + _flash_attention_forward + + +def cohere_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, +): + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim) + if self.use_qk_norm: + query_states = self.q_norm(query_states) + key_states = self.k_norm(key_states) + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + cos, sin = self.rotary_emb(value_states, position_ids) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin) + + past_key_value = getattr(self, 'past_key_value', past_key_value) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; position_ids needed for + # the static cache + cache_kwargs = { + 'sin': sin, + 'cos': cos, + 'cache_position': cache_position + } + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # TODO: These transpose are quite inefficient but Flash Attention requires + # the layout [batch_size, sequence_length, num_heads, head_dim]. + # We would need to refactor the KV cache to be able to avoid many of + # these transpose/reshape/view. + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # Ignore copy + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + # self.num_heads is used in self._upad_input method + # num_heads has been changed because of sequence parallel + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + if IS_LOW_VERSION_TRANSFORMERS: + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate) + else: + attn_output = _flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate, + use_top_left_mask=self._flash_attn_uses_top_left_mask, + is_causal=self.is_causal, + ) + + if enable_sequence_parallel: + attn_output = post_process_for_sequence_parallel_attn(attn_output) + self.num_heads = ori_num_head + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/deepseek_v2.py b/data/xtuner/xtuner/model/modules/dispatch/deepseek_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..bfa3ebb6db8c4a7c1bb4e04a004d24e3f774755a --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/deepseek_v2.py @@ -0,0 +1,308 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings +from typing import Optional + +import torch +import torch.distributed as dist +import torch.nn.functional as F +from mmengine import MessageHub +from transformers.cache_utils import Cache + +from xtuner.model.transformers_models.deepseek_v2.modeling_deepseek import \ + apply_rotary_pos_emb +from xtuner.parallel.sequence import (get_sequence_parallel_world_size, + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) +from .attention import flash_attn_wo_mask, varlen_flash_attn + + +def deepseek_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + # DeepseekV2FlashAttention2 attention does not support output_attentions + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in ' + 'v4.37. Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + if self.q_lora_rank is None: + q = self.q_proj(hidden_states) + else: + q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states))) + q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2) + q_nope, q_pe = torch.split( + q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + compressed_kv = self.kv_a_proj_with_mqa(hidden_states) + compressed_kv, k_pe = torch.split( + compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1) + k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2) + kv = ( + self.kv_b_proj(self.kv_a_layernorm(compressed_kv)).view( + bsz, q_len, self.num_heads, + self.qk_nope_head_dim + self.v_head_dim).transpose(1, 2)) + + k_nope, value_states = torch.split( + kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1) + kv_seq_len = value_states.shape[-2] + + kv_seq_len = value_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None, '`position_ids` should not be None.' + if self.training: + cos, sin = self.rotary_emb( + value_states, seq_len=position_ids.max() + 1) + else: + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids) + + query_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim) + query_states[:, :, :, :self.qk_nope_head_dim] = q_nope + query_states[:, :, :, self.qk_nope_head_dim:] = q_pe + + key_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim) + key_states[:, :, :, :self.qk_nope_head_dim] = k_nope + key_states[:, :, :, self.qk_nope_head_dim:] = k_pe + + if self.q_head_dim != self.v_head_dim: + value_states = F.pad(value_states, + [0, self.q_head_dim - self.v_head_dim]) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (DeepseekV2RMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + # Handle the case where the model is quantized + if hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + elif torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + else: + target_dtype = self.q_a_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + # self.num_heads is used in self._upad_input method + # num_heads has been changed because of sequence parallel + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate, + softmax_scale=self.softmax_scale, + ) + + if enable_sequence_parallel: + attn_output = post_process_for_sequence_parallel_attn(attn_output) + self.num_heads = ori_num_head + + if self.q_head_dim != self.v_head_dim: + attn_output = attn_output[:, :, :, :self.v_head_dim] + + attn_output = attn_output.reshape(bsz, q_len, self.num_heads * + self.v_head_dim).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def deepseek_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + is_training = self.training + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + + assert is_training == (cumulative_len is not None) == ( + past_key_value is None) + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + if self.q_lora_rank is None: + q = self.q_proj(hidden_states) + else: + q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states))) + q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2) + q_nope, q_pe = torch.split( + q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + compressed_kv = self.kv_a_proj_with_mqa(hidden_states) + compressed_kv, k_pe = torch.split( + compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1) + k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2) + kv = ( + self.kv_b_proj(self.kv_a_layernorm(compressed_kv)).view( + bsz, q_len, self.num_heads, + self.qk_nope_head_dim + self.v_head_dim).transpose(1, 2)) + + k_nope, value_states = torch.split( + kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1) + kv_seq_len = value_states.shape[-2] + + kv_seq_len = value_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None, '`position_ids` should not be None.' + if self.training: + cos, sin = self.rotary_emb( + value_states, seq_len=position_ids.max() + 1) + else: + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids) + + query_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim) + query_states[:, :, :, :self.qk_nope_head_dim] = q_nope + query_states[:, :, :, self.qk_nope_head_dim:] = q_pe + + key_states = k_pe.new_empty(bsz, self.num_heads, q_len, self.q_head_dim) + key_states[:, :, :, :self.qk_nope_head_dim] = k_nope + key_states[:, :, :, self.qk_nope_head_dim:] = k_pe + + if self.q_head_dim != self.v_head_dim: + value_states = F.pad(value_states, + [0, self.q_head_dim - self.v_head_dim]) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (DeepseekV2RMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + # Handle the case where the model is quantized + if hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + elif torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + else: + target_dtype = self.q_a_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # ----------------- varlen flash attention forward ----------------------# + dropout_rate = self.attention_dropout if self.training else 0.0 + + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + causal = self.is_causal and q_len != 1 + + if is_training: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + softmax_scale=self.softmax_scale, + causal=causal, + dropout_p=dropout_rate, + training=True) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + softmax_scale=self.softmax_scale, + causal=causal, + dropout_p=dropout_rate, + training=False) + + # ---------------- varlen flash attention forward end ------------------ # + + if self.q_head_dim != self.v_head_dim: + attn_output = attn_output[:, :, :, :self.v_head_dim] + + attn_output = attn_output.reshape(bsz, q_len, + self.num_heads * self.v_head_dim) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/internlm.py b/data/xtuner/xtuner/model/modules/dispatch/internlm.py new file mode 100644 index 0000000000000000000000000000000000000000..37ca9ad310e056bc357235fa935004da79a3edd7 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/internlm.py @@ -0,0 +1,227 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.distributed as dist +import torch.nn.functional as F +from mmengine import MessageHub + +from .triton_kernels import apply_rotary_emb + +SUPPORT_FLASH2 = False + +try: + from flash_attn import flash_attn_func, flash_attn_varlen_func + + SUPPORT_FLASH2 = True +except ImportError: + pass + + +class InternLMRotaryEmbedding(torch.nn.Module): + + def __init__(self, + dim, + max_position_embeddings=2048, + base=10000, + device=None): + super().__init__() + self.inv_freq = 1.0 / ( + base**(torch.arange(0, dim, 2).float().to(device) / dim)) + + # Build here to make `torch.jit.trace` work. + self.max_seq_len_cached = max_position_embeddings + t = torch.arange( + self.max_seq_len_cached, + device=self.inv_freq.device, + dtype=self.inv_freq.dtype) + freqs = torch.einsum('i,j->ij', t, self.inv_freq) + emb = torch.cat((freqs, freqs), dim=-1) + self.cos_cached = emb.cos() + self.sin_cached = emb.sin() + + def forward(self, x, seq_len): + # x: [bs, num_attention_heads, seq_len, head_size] + if (seq_len > self.max_seq_len_cached + or self.cos_cached.device != x.device + or self.cos_cached.dtype != x.dtype): + self.max_seq_len_cached = seq_len + assert self.inv_freq.dtype == torch.float32 + t = torch.arange( + self.max_seq_len_cached, + device=x.device, + dtype=self.inv_freq.dtype) + freqs = torch.einsum('i,j->ij', t, self.inv_freq.to(t.device)) + emb = torch.cat((freqs, freqs), dim=-1).to(x.device) + self.cos_cached = emb.cos().to(x.dtype) + self.sin_cached = emb.sin().to(x.dtype) + return ( + self.cos_cached[:seq_len, ...], + self.sin_cached[:seq_len, ...], + ) + + +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +def apply_rotary_pos_emb(q, k, cos, sin, position_ids): + cos = cos[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + sin = sin[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +def internlm_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + # Modified from https://huggingface.co/internlm/internlm-7b/blob/939a68c0dc1bd5f35b63c87d44af05ce33379061/modeling_internlm.py#L161 # noqa:E501 + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim).transpose( + 1, 2) + key_states = self.k_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim).transpose( + 1, 2) + value_states = self.v_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim).transpose( + 1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + # [bsz, nh, t, hd] + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + if SUPPORT_FLASH2: + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + attn_output = flash_attn_func( + query_states, key_states, value_states, causal=True) + attn_output = attn_output.contiguous() + else: + # use flash attention implemented by pytorch + attn_output = F.scaled_dot_product_attention( + query_states, key_states, value_states, attn_mask=attention_mask) + attn_output = attn_output.transpose(1, 2) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value + + +def internlm_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + # Modified from https://huggingface.co/internlm/internlm-7b/blob/939a68c0dc1bd5f35b63c87d44af05ce33379061/modeling_internlm.py#L161 # noqa:E501 + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + # position_ids = message_hub.get_info(f'position_ids_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + use_varlen_atten = (cumulative_len is not None) + + bsz, q_len, _ = hidden_states.size() + assert bsz == 1, (f'If utilizing local attention, the batch size should be' + f' set to 1, but got {bsz}') + + query_states = self.q_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim) + key_states = self.k_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim) + value_states = self.v_proj(hidden_states).view(bsz, q_len, self.num_heads, + self.head_dim) + + kv_seq_len = key_states.shape[-3] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + + if use_varlen_atten: + cos, sin = self.rotary_emb(value_states, max_seqlen) + query_states = apply_rotary_emb(query_states, + cos[position_ids].squeeze(0), + sin[position_ids].squeeze(0)) + key_states = apply_rotary_emb(key_states, cos[position_ids].squeeze(0), + sin[position_ids].squeeze(0)) + else: + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + cos, sin = self.rotary_emb(value_states, kv_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + assert SUPPORT_FLASH2 + if use_varlen_atten: + q_unpad, k_unpad, v_unpad = query_states.flatten( + 0, 1), key_states.flatten(0, 1), value_states.flatten(0, 1) + cumulative_len = torch.cat(cumulative_len, dim=0) + attn_output = flash_attn_varlen_func( + q_unpad, + k_unpad, + v_unpad, + cumulative_len, + cumulative_len, + max_seqlen, + max_seqlen, + 0, + return_attn_probs=False, + causal=True, + ) + else: + attn_output = flash_attn_func( + query_states, key_states, value_states, causal=True) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/internlm2.py b/data/xtuner/xtuner/model/modules/dispatch/internlm2.py new file mode 100644 index 0000000000000000000000000000000000000000..7c601f0dc66c056c979a84efbb18b9125cfb44cf --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/internlm2.py @@ -0,0 +1,306 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.distributed as dist +from einops import rearrange +from mmengine import MessageHub +from transformers.cache_utils import Cache, StaticCache + +from xtuner.parallel.sequence import (get_sequence_parallel_world_size, + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) +from .attention import SUPPORT_FLASH2, flash_attn_wo_mask, varlen_flash_attn + + +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1): + cos = cos.unsqueeze(unsqueeze_dim) + sin = sin.unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """This is the equivalent of torch.repeat_interleave(x, dim=1, + repeats=n_rep). + + The hidden states go from (batch, num_key_value_heads, seqlen, head_dim) to + (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, + None, :, :].expand(batch, + num_key_value_heads, + n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, + head_dim) + + +def repeat_kv_bshd(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """The hidden states go from (batch, seqlen, num_key_value_heads, head_dim) + to (batch, seqlen, num_attention_heads, head_dim)""" + batch, slen, num_key_value_heads, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, :, + None, :].expand(batch, slen, + num_key_value_heads, n_rep, + head_dim) + return hidden_states.reshape(batch, slen, num_key_value_heads * n_rep, + head_dim) + + +def internlm2_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, +): + if isinstance(past_key_value, StaticCache): + raise ValueError( + '`static` cache implementation is not compatible with ' + '`attn_implementation==flash_attention_2` make sure to use `sdpa` ' + 'in the mean time, and open an issue at ' + 'https://github.com/huggingface/transformers') + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + qkv_states = self.wqkv(hidden_states) + + qkv_states = rearrange( + qkv_states, + 'b q (h gs d) -> b q h gs d', + gs=2 + self.num_key_value_groups, + d=self.head_dim, + ) + + query_states = qkv_states[..., :self.num_key_value_groups, :] + query_states = rearrange(query_states, 'b q h gs d -> b q (h gs) d') + key_states = qkv_states[..., -2, :] + value_states = qkv_states[..., -1, :] + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + cos, sin = self.rotary_emb(value_states, position_ids) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; + # cache_position needed for the static cache + cache_kwargs = { + 'sin': sin, + 'cos': cos, + 'cache_position': cache_position + } + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (InternLM2RMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.wqkv.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + # self.num_heads is used in self._upad_input method + # num_heads has been changed because of sequence parallel + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + dropout_rate = 0.0 + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate) + + if enable_sequence_parallel: + attn_output = post_process_for_sequence_parallel_attn(attn_output) + self.num_heads = ori_num_head + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.wo(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def internlm2_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + + if isinstance(past_key_value, StaticCache): + raise ValueError( + '`static` cache implementation is not compatible with ' + '`attn_implementation==flash_attention_2` make sure to use `sdpa` ' + 'in the mean time, and open an issue at ' + 'https://github.com/huggingface/transformers') + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + use_varlen_atten = (cumulative_len is not None) + + bsz, q_len, _ = hidden_states.size() + + assert bsz == 1, (f'If utilizing local attention, the batch size should be' + f' set to 1, but got {bsz}') + + qkv_states = self.wqkv(hidden_states) + + qkv_states = rearrange( + qkv_states, + 'b q (h gs d) -> b q h gs d', + gs=2 + self.num_key_value_groups, + d=self.head_dim, + ) + + query_states = qkv_states[..., :self.num_key_value_groups, :] + query_states = rearrange(query_states, 'b q h gs d -> b q (h gs) d') + key_states = qkv_states[..., -2, :] + value_states = qkv_states[..., -1, :] + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + try: + cos, sin = self.rotary_emb(value_states, position_ids) + except RuntimeError: + raise RuntimeError( + 'You are using the old version of InternLM2 model. The ' + '`modeling_internlm2.py` is outdated. Please update the InternLM2 ' + 'model.') + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; + # cache_position needed for the static cache + cache_kwargs = { + 'sin': sin, + 'cos': cos, + 'cache_position': cache_position + } + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (InternLM2RMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.wqkv.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # repeat kv for sequence parallel + key_states = repeat_kv_bshd(key_states, self.num_key_value_groups) + value_states = repeat_kv_bshd(value_states, self.num_key_value_groups) + + assert SUPPORT_FLASH2 + + dropout_rate = 0.0 + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=True, + dropout_p=dropout_rate, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=True, + dropout_p=dropout_rate, + training=self.training) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.wo(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/llama.py b/data/xtuner/xtuner/model/modules/dispatch/llama.py new file mode 100644 index 0000000000000000000000000000000000000000..8132096fd484f43535543ed8f6de3efe36491c7b --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/llama.py @@ -0,0 +1,524 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings +from typing import Optional, Tuple + +import torch +import torch.distributed as dist +from mmengine import MessageHub +from transformers.models.llama.modeling_llama import (apply_rotary_pos_emb, + repeat_kv) +from transformers.utils import is_flash_attn_greater_or_equal_2_10 + +from .attention import (SUPPORT_FLASH2, flash_attn_w_mask, flash_attn_wo_mask, + varlen_flash_attn) +from .triton_kernels import apply_rotary_emb + +try: + from transformers.cache_utils import Cache +except ImportError: + + class Cache: + pass + + +def repeat_kv_bshd(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """The hidden states go from (batch, seqlen, num_key_value_heads, head_dim) + to (batch, seqlen, num_attention_heads, head_dim)""" + batch, slen, num_key_value_heads, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, :, + None, :].expand(batch, slen, + num_key_value_heads, n_rep, + head_dim) + return hidden_states.reshape(batch, slen, num_key_value_heads * n_rep, + head_dim) + + +def llama_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, +): + # Modified from https://github.com/huggingface/transformers/blob/66ce9593fdb8e340df546ddd0774eb444f17a12c/src/transformers/models/llama/modeling_llama.py#L422 # noqa:E501 + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + cos, sin = self.rotary_emb(value_states, position_ids) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin) + + past_key_value = getattr(self, 'past_key_value', past_key_value) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; + # cache_position needed for the static cache + cache_kwargs = { + 'sin': sin, + 'cos': cos, + 'cache_position': cache_position + } + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + assert SUPPORT_FLASH2 + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + if is_flash_attn_greater_or_equal_2_10(): + causal = self.is_causal + else: + # TODO: Remove the `q_len != 1` check once Flash Attention for RoCm + # is bumped to 2.1. For details, please see the comment in + # LlamaFlashAttention2 __init__. + causal = self.is_causal and q_len != 1 + + # the shape of attention_mask used by flash_attn and + # F.scaled_dot_product_attention are different + assert attention_mask is None or attention_mask.ndim == 2, \ + ('When using flash_attn, attention_mask.ndim should equal to 2.' + f'But got attention_mask.shape = {attention_mask.shape}.' + 'We can pass the `attn_implementation="flash_attention_2"` flag ' + 'to `.from_pretrained` method when instantiating a Internlm2 ' + 'model.') + + if attention_mask is not None: + attn_output = flash_attn_w_mask( + query_states, + key_states, + value_states, + attention_mask, + causal=causal, + dropout_p=dropout_rate, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=causal, + dropout_p=dropout_rate, + training=self.training) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def llama_attn_forward_legacy( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + # Modified from https://github.com/huggingface/transformers/blob/ced9fd86f55ebb6b656c273f6e23f8ba50652f83/src/transformers/models/llama/modeling_llama.py#L331 # noqa:E501 + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in ' + 'v4.37. Please make sure use `attention_mask` instead.`') + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + assert position_ids is not None + if self.training: + cos, sin = self.rotary_emb( + value_states, seq_len=position_ids.max() + 1) + else: + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + assert SUPPORT_FLASH2 + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + if is_flash_attn_greater_or_equal_2_10(): + causal = self.is_causal + else: + # TODO: Remove the `q_len != 1` check once Flash Attention for RoCm + # is bumped to 2.1. For details, please see the comment in + # LlamaFlashAttention2 __init__. + causal = self.is_causal and q_len != 1 + + # the shape of attention_mask used by flash_attn and + # F.scaled_dot_product_attention are different + assert attention_mask is None or attention_mask.ndim == 2, \ + ('When using flash_attn, attention_mask.ndim should equal to 2.' + f'But got attention_mask.shape = {attention_mask.shape}.' + 'We can pass the `attn_implementation="flash_attention_2"` flag ' + 'to `.from_pretrained` method when instantiating a Internlm2 ' + 'model.') + + if attention_mask is not None: + attn_output = flash_attn_w_mask( + query_states, + key_states, + value_states, + attention_mask=attention_mask, + causal=causal, + dropout_p=dropout_rate, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=causal, + dropout_p=dropout_rate, + training=self.training) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value + + +def llama_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + use_varlen_atten = (cumulative_len is not None) + + if 'padding_mask' in kwargs: + warnings.warn('Passing `padding_mask` is deprecated and will be ' + 'removed in v4.37. Please make sure use ' + '`attention_mask` instead.`') + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + cos, sin = self.rotary_emb(value_states, position_ids) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin) + + past_key_value = getattr(self, 'past_key_value', past_key_value) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; + # cache_position needed for the static cache + cache_kwargs = { + 'sin': sin, + 'cos': cos, + 'cache_position': cache_position + } + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # repeat kv for sequence parallel + key_states = repeat_kv_bshd(key_states, self.num_key_value_groups) + value_states = repeat_kv_bshd(value_states, self.num_key_value_groups) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently casted + # in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + assert SUPPORT_FLASH2 + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=True, + dropout_p=dropout_rate, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=True, + training=self.training) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value + + +def llama_varlen_attn_forward_legacy( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + use_varlen_atten = (cumulative_len is not None) + + if 'padding_mask' in kwargs: + warnings.warn('Passing `padding_mask` is deprecated and will be ' + 'removed in v4.37. Please make sure use ' + '`attention_mask` instead.`') + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim) + + kv_seq_len = key_states.shape[-3] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + if use_varlen_atten: + cos, sin = self.rotary_emb(value_states, max_seqlen) + # position_ids (1, seq_len) + # cos, sin (1, seq_len, dim) -> (seq_len, dim) + cos = cos[position_ids].squeeze(0) + sin = sin[position_ids].squeeze(0) + query_states = apply_rotary_emb(query_states, cos, sin) + key_states = apply_rotary_emb(key_states, cos, sin) + else: + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + cos, sin = self.rotary_emb(value_states, kv_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # repeat kv for sequence parallel + key_states = repeat_kv_bshd(key_states, self.num_key_value_groups) + value_states = repeat_kv_bshd(value_states, self.num_key_value_groups) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently casted + # in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + assert SUPPORT_FLASH2 + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=True, + dropout_p=dropout_rate, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=True, + dropout_p=dropout_rate, + training=self.training) + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/mistral.py b/data/xtuner/xtuner/model/modules/dispatch/mistral.py new file mode 100644 index 0000000000000000000000000000000000000000..dc6c7fed827f229aeb286a35d2b290126f07e965 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/mistral.py @@ -0,0 +1,447 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import warnings +from typing import Optional + +import torch +import torch.distributed as dist +import torch.nn as nn +import transformers +from mmengine import MessageHub +from mmengine.utils import digit_version +from transformers.cache_utils import Cache +from transformers.models.mistral.modeling_mistral import (apply_rotary_pos_emb, + repeat_kv) + +from xtuner.parallel.sequence import get_sequence_parallel_world_size +from xtuner.parallel.sequence.attention import ( + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) +from .attention import flash_attn_wo_mask, varlen_flash_attn +from .triton_kernels import apply_rotary_emb + +SUPPORT_FLASH2 = False + +try: + from flash_attn import flash_attn_func + _flash_supports_window_size = 'window_size' in list( + inspect.signature(flash_attn_func).parameters) + SUPPORT_FLASH2 = True +except ImportError: + pass + +TRANSFORMERS_VERSION = digit_version(transformers.__version__) +IS_LOW_VERSION_TRANSFORMERS = TRANSFORMERS_VERSION < digit_version('4.43') + +if not IS_LOW_VERSION_TRANSFORMERS: + from transformers.modeling_flash_attention_utils import \ + _flash_attention_forward + + +class MistralRotaryEmbedding(nn.Module): + + def __init__(self, + dim, + max_position_embeddings=2048, + base=10000, + device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + self.inv_freq = 1.0 / ( + base**(torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, + device=self.inv_freq.device, + dtype=torch.get_default_dtype()) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange( + self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + freqs = torch.einsum('i,j->ij', t, self.inv_freq.to(device)) + # Different from paper, but it uses a different permutation + # in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1).to(device) + self.cos_cached = emb.cos().to(dtype) + self.sin_cached = emb.sin().to(dtype) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if (seq_len > self.max_seq_len_cached + or self.cos_cached.device != x.device # noqa: W503 + or self.cos_cached.dtype != x.dtype): # noqa: W503 + self._set_cos_sin_cache( + seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +def repeat_kv_bshd(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """The hidden states go from (batch, seqlen, num_key_value_heads, head_dim) + to (batch, seqlen, num_attention_heads, head_dim)""" + batch, slen, num_key_value_heads, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, :, + None, :].expand(batch, slen, + num_key_value_heads, n_rep, + head_dim) + return hidden_states.reshape(batch, slen, num_key_value_heads * n_rep, + head_dim) + + +def mistral_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in ' + 'v4.37. Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None + if self.training: + cos, sin = self.rotary_emb( + value_states, seq_len=position_ids.max() + 1) + else: + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window) + + if past_key_value is not None: + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads for sequence parallel + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. (LlamaRMSNorm handles it correctly) + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + # num_heads has been changed because of sequence parallel + # `self.num_heads`` is not used in self._flash_attention_forward + # in mistral/mixtral, we are doing this to avoid some unnecessary risk + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + if IS_LOW_VERSION_TRANSFORMERS: + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_length=query_states.shape[1], + dropout=dropout_rate, + use_sliding_windows=use_sliding_windows, + ) + else: + attn_output = _flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate, + sliding_window=getattr(self.config, 'sliding_window', None), + use_top_left_mask=self._flash_attn_uses_top_left_mask, + is_causal=self.is_causal, + ) + + if enable_sequence_parallel: + attn_output = post_process_for_sequence_parallel_attn(attn_output) + self.num_heads = ori_num_head + + attn_output = attn_output.reshape(bsz, q_len, + self.hidden_size).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def mistral_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + is_training = self.training + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + + assert is_training == (past_key_value is None) + use_varlen_atten = (cumulative_len is not None) + + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37' + ' Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + bsz, q_len, _ = hidden_states.size() + assert bsz == 1, (f'If utilizing local attention, the batch size should be' + f' set to 1, but got {bsz}') + # attention_mask is set to None if no padding token in input_ids + assert attention_mask is None + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim) + + assert _flash_supports_window_size, \ + ('The current flash attention version does not support sliding window ' + 'attention, for a more memory efficient implementation make sure ' + 'to upgrade flash-attn library.') + + kv_seq_len = key_states.shape[-3] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + if use_varlen_atten: + cos, sin = self.rotary_emb(value_states, max_seqlen) + query_states = apply_rotary_emb(query_states, + cos[position_ids].squeeze(0), + sin[position_ids].squeeze(0)) + key_states = apply_rotary_emb(key_states, cos[position_ids].squeeze(0), + sin[position_ids].squeeze(0)) + else: + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + # Because the input can be padded, the absolute sequence length + # depends on the max position id. + rotary_seq_len = max(kv_seq_len, position_ids.max().item() + 1) + cos, sin = self.rotary_emb(value_states, seq_len=rotary_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window # noqa: W503 + and cache_has_contents): # noqa: W503 + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # repeat kv for sequence parallel + key_states = repeat_kv_bshd(key_states, self.num_key_value_groups) + value_states = repeat_kv_bshd(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for + # training stability reasons, therefore the input hidden states gets + # silently casted in float32. Hence, we need + # cast them back in float16 just to be sure everything works as expected. + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # ----------------- flash attention forward ------------------------# + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + causal = self.is_causal and q_len != 1 + + use_sliding_windows = ( + _flash_supports_window_size and # noqa: W504 + getattr(self.config, 'sliding_window', None) is not None # noqa: W503 + and kv_seq_len > self.config.sliding_window) # noqa: W503 + window_size = (self.config.sliding_window, + self.config.sliding_window) if use_sliding_windows else (-1, + -1) + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=causal, + dropout_p=dropout_rate, + window_size=window_size, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=causal, + dropout_p=dropout_rate, + window_size=window_size, + training=self.training) + + # ---------------- flash attention forward end ------------------- # + + attn_output = attn_output.reshape(bsz, q_len, + self.hidden_size).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/phi3.py b/data/xtuner/xtuner/model/modules/dispatch/phi3.py new file mode 100644 index 0000000000000000000000000000000000000000..10f60f93983392643f3c1907b34af1bd48b2f03c --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/phi3.py @@ -0,0 +1,480 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import warnings +from typing import Optional, Tuple + +import torch +import torch.distributed as dist +import transformers +from mmengine import MessageHub +from mmengine.utils import digit_version + +from xtuner.parallel.sequence import (get_sequence_parallel_world_size, + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) +from .attention import flash_attn_wo_mask, varlen_flash_attn + +try: + from transformers.cache_utils import Cache +except ImportError: + + class Cache: + pass + + +TRANSFORMERS_VERSION = digit_version(transformers.__version__) +IS_LOW_VERSION_TRANSFORMERS = TRANSFORMERS_VERSION < digit_version('4.43') + +if not IS_LOW_VERSION_TRANSFORMERS: + from transformers.modeling_flash_attention_utils import \ + _flash_attention_forward + +_flash_supports_window_size = False +try: + from flash_attn import flash_attn_func + + _flash_supports_window_size = 'window_size' in list( + inspect.signature(flash_attn_func).parameters) + + if not _flash_supports_window_size: + raise ValueError( + 'Please update flash-attention to support window size.') +# else: +except ImportError: + pass + + +# Copied from https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/blob/3a811845d89f3c1b3f41b341d0f9f05104769f35/modeling_phi3.py#L302 # noqa:E501 +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """This is the equivalent of torch.repeat_interleave(x, dim=1, + repeats=n_rep). + + The hidden states go from (batch, num_key_value_heads, seqlen, head_dim) to + (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, + None, :, :].expand(batch, + num_key_value_heads, + n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, + head_dim) + + +# https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/blob/3a811845d89f3c1b3f41b341d0f9f05104769f35/modeling_phi3.py#L247 # noqa:E501 +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/blob/3a811845d89f3c1b3f41b341d0f9f05104769f35/modeling_phi3.py#L255 # noqa:E501 +def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`, *optional*): + Deprecated and unused. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding. + """ # noqa:E501 + cos = cos.unsqueeze(unsqueeze_dim) + sin = sin.unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +def phi3_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, +): + if not _flash_supports_window_size: + raise ValueError( + 'The current flash attention version does not support ' + 'sliding window attention.') + + output_attentions = False + + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in ' + 'v4.37. Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + + bsz, q_len, _ = hidden_states.size() + + qkv = self.qkv_proj(hidden_states) + query_pos = self.num_heads * self.head_dim + query_states = qkv[..., :query_pos] + key_states = qkv[..., query_pos:query_pos + + self.num_key_value_heads * self.head_dim] + value_states = qkv[..., + query_pos + self.num_key_value_heads * self.head_dim:] + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + rotary_seq_len = max(kv_seq_len, position_ids.max().item() + 1) + cos, sin = self.rotary_emb( + value_states, position_ids, seq_len=rotary_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window) + + if past_key_value is not None: + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_dropout = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. + + if query_states.dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.qkv_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + # (b, s // sp_world_size, nd, dim) -> (b, s, nd // sp_world_size, dim) + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states, + scatter_dim=2, gather_dim=1) + # num_heads has been changed because of sequence parallel + # `self.num_heads`` is not used in self._flash_attention_forward + # in mistral/mixtral, we are doing this to avoid some unnecessary risk + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + if IS_LOW_VERSION_TRANSFORMERS: + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=attn_dropout, + use_sliding_windows=use_sliding_windows, + ) + else: + attn_output = _flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=attn_dropout, + sliding_window=getattr(self.config, 'sliding_window', None), + use_top_left_mask=self._flash_attn_uses_top_left_mask, + is_causal=self.is_causal, + ) + + if enable_sequence_parallel: + # (b, s, nd // sp_world_size, dim) -> (b, s // sp_world_size, nd, dim) + attn_output = post_process_for_sequence_parallel_attn( + attn_output, scatter_dim=1, gather_dim=2) + self.num_heads = ori_num_head + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def phi3_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + if not _flash_supports_window_size: + raise ValueError( + 'The current flash attention version does not support ' + 'sliding window attention.') + + output_attentions = False + + is_training = self.training + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + + assert is_training == (past_key_value is None) + use_varlen_atten = (cumulative_len is not None) + + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37' + ' Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + + bsz, q_len, _ = hidden_states.size() + assert bsz == 1, (f'If utilizing local attention, the batch size should be' + f' set to 1, but got {bsz}') + # attention_mask is set to None if no padding token in input_ids + # varlen attn need data packing so no padding tokens in input_ids + assert attention_mask is None + + qkv = self.qkv_proj(hidden_states) + query_pos = self.num_heads * self.head_dim + query_states = qkv[..., :query_pos] + key_states = qkv[..., query_pos:query_pos + + self.num_key_value_heads * self.head_dim] + value_states = qkv[..., + query_pos + self.num_key_value_heads * self.head_dim:] + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None + rotary_seq_len = max(kv_seq_len, position_ids.max().item() + 1) + cos, sin = self.rotary_emb( + value_states, position_ids, seq_len=rotary_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window) + + if past_key_value is not None: + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + # In PEFT, usually we cast the layer norms in float32 for + # training stability reasons, therefore the input hidden states gets + # silently casted in float32. Hence, we need + # cast them back in float16 just to be sure everything works as expected. + + if query_states.dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.qkv_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # ----------------- flash attention forward ------------------------# + + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + causal = self.is_causal and q_len != 1 + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window) + + window_size = (self.config.sliding_window, + self.config.sliding_window) if use_sliding_windows else (-1, + -1) + attn_dropout = self.attention_dropout if self.training else 0.0 + + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=causal, + dropout_p=attn_dropout, + window_size=window_size, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=causal, + dropout_p=attn_dropout, + window_size=window_size, + training=self.training) + + # ---------------- flash attention forward end ------------------- # + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/qwen2.py b/data/xtuner/xtuner/model/modules/dispatch/qwen2.py new file mode 100644 index 0000000000000000000000000000000000000000..20f2f40f382e4e88daf7b40a54611d9b781460a9 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/qwen2.py @@ -0,0 +1,380 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import warnings +from typing import Optional + +import torch +import torch.distributed as dist +import transformers +from mmengine import MessageHub +from mmengine.utils import digit_version +from transformers.cache_utils import Cache +from transformers.models.qwen2.modeling_qwen2 import (apply_rotary_pos_emb, + repeat_kv) + +from xtuner.parallel.sequence import get_sequence_parallel_world_size +from xtuner.parallel.sequence.attention import ( + post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn) +from .attention import flash_attn_wo_mask, varlen_flash_attn + +SUPPORT_FLASH2 = False + +try: + from flash_attn import flash_attn_func + _flash_supports_window_size = 'window_size' in list( + inspect.signature(flash_attn_func).parameters) + SUPPORT_FLASH2 = True +except ImportError: + pass + +TRANSFORMERS_VERSION = digit_version(transformers.__version__) +IS_LOW_VERSION_TRANSFORMERS = TRANSFORMERS_VERSION < digit_version('4.43') + +if not IS_LOW_VERSION_TRANSFORMERS: + from transformers.modeling_flash_attention_utils import \ + _flash_attention_forward + + +def qwen2_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in ' + 'v4.37. Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None + rotary_seq_len = max(kv_seq_len, position_ids.max().item() + 1) + cos, sin = self.rotary_emb(value_states, seq_len=rotary_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and self.config.use_sliding_window) + + if past_key_value is not None: + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads for sequence parallel + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for training + # stability reasons therefore the input hidden states gets silently + # casted in float32. Hence, we need cast them back in the correct dtype + # just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not + # cast the LayerNorms in fp32. + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and self.training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + # num_heads has been changed because of sequence parallel + # `self.num_heads`` is not used in self._flash_attention_forward + # in mistral/mixtral, we are doing this to avoid some unnecessary risk + ori_num_head = self.num_heads + self.num_heads = query_states.shape[-2] + + if IS_LOW_VERSION_TRANSFORMERS: + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_length=query_states.shape[1], + dropout=dropout_rate, + use_sliding_windows=use_sliding_windows, + ) + else: + if (self.config.use_sliding_window + and getattr(self.config, 'sliding_window', None) is not None + and self.layer_idx >= self.config.max_window_layers): + # There may be bugs here, but we are aligned with Transformers + sliding_window = self.config.sliding_window + else: + sliding_window = None + attn_output = _flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + query_states.shape[1], + dropout=dropout_rate, + sliding_window=sliding_window, + is_causal=self.is_causal, + use_top_left_mask=self._flash_attn_uses_top_left_mask, + ) + + if enable_sequence_parallel: + attn_output = post_process_for_sequence_parallel_attn(attn_output) + self.num_heads = ori_num_head + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def qwen2_varlen_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +): + is_training = self.training + + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cumulative_len = message_hub.get_info(f'cumulative_len_rank_{rank}') + max_seqlen = message_hub.get_info(f'max_seqlen_rank_{rank}') + + assert is_training == (past_key_value is None) + use_varlen_atten = (cumulative_len is not None) + + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37' + ' Please make sure use `attention_mask` instead.`') + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + 'The cache structure has changed since version v4.36. ' + f'If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, ' + 'please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, + self.layer_idx) + + assert position_ids is not None + rotary_seq_len = max(kv_seq_len, position_ids.max().item() + 1) + cos, sin = self.rotary_emb(value_states, seq_len=rotary_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + if past_key_value is not None: + # Activate slicing cache only if the config has a value + # `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length(self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + 'past key must have a shape of (`batch_size, num_heads, ' + 'self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat( + [attention_mask, + torch.ones_like(attention_mask[:, -1:])], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads for sequence parallel + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for + # training stability reasons, therefore the input hidden states gets + # silently casted in float32. Hence, we need + # cast them back in float16 just to be sure everything works as expected. + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + # ----------------- flash attention forward ------------------------# + + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + causal = self.is_causal and q_len != 1 + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and self.config.use_sliding_window) + # Decide whether to use SWA or not by layer index. + if use_sliding_windows and self.layer_idx >= self.config.max_window_layers: + use_sliding_windows = False + + window_size = (self.config.sliding_window, + self.config.sliding_window) if use_sliding_windows else (-1, + -1) + + if use_varlen_atten: + attn_output = varlen_flash_attn( + query_states, + key_states, + value_states, + cumulative_len, + max_seqlen, + causal=causal, + dropout_p=dropout_rate, + window_size=window_size, + training=self.training) + else: + attn_output = flash_attn_wo_mask( + query_states, + key_states, + value_states, + causal=causal, + dropout_p=dropout_rate, + window_size=window_size, + training=self.training) + + # ---------------- flash attention forward end ------------------- # + + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value diff --git a/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/__init__.py b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ed29f409f853172a0c90f0e81b0200972c379e66 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .layer_norm import layer_norm_forward +from .rms_norm import rms_norm_forward +from .rotary import apply_rotary_emb + +__all__ = ['rms_norm_forward', 'layer_norm_forward', 'apply_rotary_emb'] diff --git a/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/layer_norm.py b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/layer_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..f808d6ad157a3ddbfeb6df02960c79739fcdc088 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/layer_norm.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn.functional as F + + +def layer_norm_forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + hidden_states = F.layer_norm( + hidden_states, (hidden_states.shape[-1], ), eps=self.variance_epsilon) + hidden_states = self.weight.to(torch.float32) * hidden_states + return hidden_states.to(input_dtype) diff --git a/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rms_norm.py b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rms_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..6191d55ba6e5e983d1e20c3e5282dffd439d2fd6 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rms_norm.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import triton +import triton.language as tl + + +@triton.jit +def _rms_norm_fwd_fused( + X, # pointer to the input + Y, # pointer to the output + W, # pointer to the weights + Rstd, # pointer to the 1/std + stride, # how much to increase the pointer when moving by 1 row + N, # number of columns in X + eps, # epsilon to avoid division by zero + BLOCK_SIZE: tl.constexpr, +): + # Map the program id to the row of X and Y it should compute. + row = tl.program_id(0) + Y += row * stride + X += row * stride + # Compute variance + _var = tl.zeros([BLOCK_SIZE], dtype=tl.float32) + for off in range(0, N, BLOCK_SIZE): + cols = off + tl.arange(0, BLOCK_SIZE) + x = tl.load(X + cols, mask=cols < N, other=0.).to(tl.float32) + _var += x * x + var = tl.sum(_var, axis=0) / N + rstd = 1 / tl.sqrt(var + eps) + # Write rstd + tl.store(Rstd + row, rstd) + # Normalize and apply linear transformation + for off in range(0, N, BLOCK_SIZE): + cols = off + tl.arange(0, BLOCK_SIZE) + mask = cols < N + w = tl.load(W + cols, mask=mask) + x = tl.load(X + cols, mask=mask, other=0.).to(tl.float32) + x_hat = x * rstd + y = x_hat * w + # Write output + tl.store(Y + cols, y, mask=mask) + + +@triton.jit +def _rms_norm_bwd_dx_fused( + DX, # pointer to the input gradient + DY, # pointer to the output gradient + DW, # pointer to the partial sum of weights gradient + X, # pointer to the input + W, # pointer to the weights + Rstd, # pointer to the 1/std + Lock, # pointer to the lock + stride, # how much to increase the pointer when moving by 1 row + N, # number of columns in X + eps, # epsilon to avoid division by zero + GROUP_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr): + # Map the program id to the elements of X, DX, and DY it should compute. + row = tl.program_id(0) + cols = tl.arange(0, BLOCK_SIZE_N) + mask = cols < N + X += row * stride + DY += row * stride + DX += row * stride + # Offset locks and weights/biases gradient pointer for parallel reduction + lock_id = row % GROUP_SIZE_M + Lock += lock_id + Count = Lock + GROUP_SIZE_M + DW = DW + lock_id * N + cols + # Load data to SRAM + x = tl.load(X + cols, mask=mask, other=0).to(tl.float32) + dy = tl.load(DY + cols, mask=mask, other=0).to(tl.float32) + w = tl.load(W + cols, mask=mask).to(tl.float32) + rstd = tl.load(Rstd + row) + # Compute dx + xhat = x * rstd + wdy = w * dy + xhat = tl.where(mask, xhat, 0.) + wdy = tl.where(mask, wdy, 0.) + c1 = tl.sum(xhat * wdy, axis=0) / N + dx = (wdy - (xhat * c1)) * rstd + # Write dx + tl.store(DX + cols, dx, mask=mask) + # Accumulate partial sums for dw/db + partial_dw = (dy * xhat).to(w.dtype) + while tl.atomic_cas(Lock, 0, 1) == 1: + pass + count = tl.load(Count) + # First store doesn't accumulate + if count == 0: + tl.atomic_xchg(Count, 1) + else: + partial_dw += tl.load(DW, mask=mask) + tl.store(DW, partial_dw, mask=mask) + # Release the lock + tl.atomic_xchg(Lock, 0) + + +@triton.jit +def _rms_norm_bwd_dwdb( + DW, # pointer to the partial sum of weights gradient + FINAL_DW, # pointer to the weights gradient + M, # GROUP_SIZE_M + N, # number of columns + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr): + # Map the program id to the elements of DW and DB it should compute. + pid = tl.program_id(0) + cols = pid * BLOCK_SIZE_N + tl.arange(0, BLOCK_SIZE_N) + dw = tl.zeros((BLOCK_SIZE_M, BLOCK_SIZE_N), dtype=tl.float32) + # Iterate through the rows of DW and DB to sum the partial sums. + for i in range(0, M, BLOCK_SIZE_M): + rows = i + tl.arange(0, BLOCK_SIZE_M) + mask = (rows[:, None] < M) & (cols[None, :] < N) + offs = rows[:, None] * N + cols[None, :] + dw += tl.load(DW + offs, mask=mask, other=0.) + # Write the final sum to the output. + sum_dw = tl.sum(dw, axis=0) + tl.store(FINAL_DW + cols, sum_dw, mask=cols < N) + + +class RMSNorm(torch.autograd.Function): + + @staticmethod + def forward(ctx, x, weight, eps): + # allocate output + y = torch.empty_like(x) + # reshape input data into 2D tensor + x_arg = x.reshape(-1, x.shape[-1]) + M, N = x_arg.shape + rstd = torch.empty((M, ), dtype=torch.float32, device='cuda') + # Less than 64KB per feature: enqueue fused kernel + MAX_FUSED_SIZE = 65536 // x.element_size() + BLOCK_SIZE = min(MAX_FUSED_SIZE, triton.next_power_of_2(N)) + if N > BLOCK_SIZE: + raise RuntimeError( + "This rms norm doesn't support feature dim >= 64KB.") + # heuristics for number of warps + num_warps = min(max(BLOCK_SIZE // 256, 1), 8) + # enqueue kernel + _rms_norm_fwd_fused[(M, )]( + x_arg, + y, + weight, + rstd, + x_arg.stride(0), + N, + eps, + BLOCK_SIZE=BLOCK_SIZE, + num_warps=num_warps, + ) + ctx.save_for_backward(x, weight, rstd) + ctx.BLOCK_SIZE = BLOCK_SIZE + ctx.num_warps = num_warps + ctx.eps = eps + return y + + @staticmethod + def backward(ctx, dy): + x, w, v = ctx.saved_tensors + # heuristics for amount of parallel reduction stream for DW/DB + N = w.shape[0] + GROUP_SIZE_M = 64 + if N <= 8192: + GROUP_SIZE_M = 96 + if N <= 4096: + GROUP_SIZE_M = 128 + if N <= 1024: + GROUP_SIZE_M = 256 + # allocate output + locks = torch.zeros(2 * GROUP_SIZE_M, dtype=torch.int32, device='cuda') + _dw = torch.empty((GROUP_SIZE_M, w.shape[0]), + dtype=x.dtype, + device=w.device) + dw = torch.empty((w.shape[0], ), dtype=w.dtype, device=w.device) + dx = torch.empty_like(dy) + # enqueue kernel using forward pass heuristics + # also compute partial sums for DW and DB + x_arg = x.reshape(-1, x.shape[-1]) + M, N = x_arg.shape + _rms_norm_bwd_dx_fused[(M, )]( + dx, + dy, + _dw, + x, + w, + v, + locks, + x_arg.stride(0), + N, + ctx.eps, + BLOCK_SIZE_N=ctx.BLOCK_SIZE, + GROUP_SIZE_M=GROUP_SIZE_M, + num_warps=ctx.num_warps) + + def grid(meta): + return [triton.cdiv(N, meta['BLOCK_SIZE_N'])] + + # accumulate partial sums in separate kernel + _rms_norm_bwd_dwdb[grid]( + _dw, + dw, + GROUP_SIZE_M, + N, + BLOCK_SIZE_M=32, + BLOCK_SIZE_N=128, + ) + return dx, dw, None + + +rms_norm = RMSNorm.apply + + +def rms_norm_forward(self, hidden_states): + if (hidden_states.device == torch.device('cpu') + or self.weight.device == torch.device('cpu')): + raise RuntimeError( + 'Can not use triton kernels on cpu. Please set `USE_TRITON_KERNEL`' + ' environment variable to 0 before training.') + return rms_norm(hidden_states, self.weight, self.variance_epsilon) diff --git a/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rotary.py b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rotary.py new file mode 100644 index 0000000000000000000000000000000000000000..1e09c16628751dbc769d1ca4ce7d0650de8f835b --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/triton_kernels/rotary.py @@ -0,0 +1,327 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from https://github.com/Dao-AILab/flash-attention/blob/main/flash_attn/ops/triton/rotary.py # noqa:E501 +from typing import Optional, Union + +import torch +import triton +import triton.language as tl + + +@triton.jit +def rotary_kernel( + OUT, # Pointers to matrices + X, + COS, + SIN, + CU_SEQLENS, + SEQLEN_OFFSETS, # this could be int or a pointer + # Matrix dimensions + seqlen, + rotary_dim, + seqlen_ro, + # strides + stride_out_batch, + stride_out_seqlen, + stride_out_nheads, + stride_out_headdim, + stride_x_batch, + stride_x_seqlen, + stride_x_nheads, + stride_x_headdim, + # Meta-parameters + BLOCK_K: tl.constexpr, + IS_SEQLEN_OFFSETS_TENSOR: tl.constexpr, + IS_VARLEN: tl.constexpr, + INTERLEAVED: tl.constexpr, + CONJUGATE: tl.constexpr, + BLOCK_M: tl.constexpr, +): + pid_m = tl.program_id(axis=0) + pid_batch = tl.program_id(axis=1) + pid_head = tl.program_id(axis=2) + rotary_dim_half = rotary_dim // 2 + + if not IS_VARLEN: + X = X + pid_batch * stride_x_batch + pid_head * stride_x_nheads + OUT = OUT + pid_batch * stride_out_batch + pid_head * stride_out_nheads + else: + start_idx = tl.load(CU_SEQLENS + pid_batch) + seqlen = tl.load(CU_SEQLENS + pid_batch + 1) - start_idx + X = X + start_idx * stride_x_seqlen + pid_head * stride_x_nheads + OUT = OUT + start_idx * stride_out_seqlen + \ + pid_head * stride_out_nheads + + if pid_m * BLOCK_M >= seqlen: + return + rm = pid_m * BLOCK_M + tl.arange(0, BLOCK_M) + if not IS_SEQLEN_OFFSETS_TENSOR: + rm_cs = rm + SEQLEN_OFFSETS + else: + rm_cs = rm + tl.load(SEQLEN_OFFSETS + pid_batch) + rk = tl.arange(0, BLOCK_K) + rk_half = tl.arange(0, BLOCK_K // 2) + + if not INTERLEAVED: + # Load the 1st and 2nd halves of X, do calculation, + # then store to 1st and 2nd halves of OUT + X = X + ( + rm[:, None] * stride_x_seqlen + + rk_half[None, :] * stride_x_headdim) + # This is different from the official implementation as the shapes of + # the two tensors cos and sin are (seqlen_ro, rotary_dim) instead of + # (seqlen_ro, rotary_dim // 2). + COS = COS + (rm_cs[:, None] * rotary_dim + rk_half[None, :]) + SIN = SIN + (rm_cs[:, None] * rotary_dim + rk_half[None, :]) + cos = tl.load( + COS, + mask=(rm_cs[:, None] < seqlen_ro) & + (rk_half[None, :] < rotary_dim_half), + other=1.0).to(tl.float32) + sin = tl.load( + SIN, + mask=(rm_cs[:, None] < seqlen_ro) & + (rk_half[None, :] < rotary_dim_half), + other=0.0).to(tl.float32) + x0 = tl.load( + X, + mask=(rm[:, None] < seqlen) & (rk_half[None, :] < rotary_dim_half), + other=0.0).to(tl.float32) + x1 = tl.load( + X + rotary_dim_half * stride_x_headdim, + mask=(rm[:, None] < seqlen) & (rk_half[None, :] < rotary_dim_half), + other=0.0, + ).to(tl.float32) + if CONJUGATE: + sin = -sin + o0 = x0 * cos - x1 * sin + o1 = x0 * sin + x1 * cos + # write back result + OUT = OUT + ( + rm[:, None] * stride_out_seqlen + + rk_half[None, :] * stride_out_headdim) + tl.store( + OUT, + o0, + mask=(rm[:, None] < seqlen) & (rk_half[None, :] < rotary_dim_half)) + tl.store( + OUT + rotary_dim_half * stride_out_headdim, + o1, + mask=(rm[:, None] < seqlen) & (rk_half[None, :] < rotary_dim_half), + ) + else: + # We don't want to load X[0, 2, 4, ...] and X[1, 3, 5, ...] separately + # since both are slow. + # Instead, we load x0 = X[0, 1, 2, 3, ...] and x1 = X[1, 0, 3, 2, ...]. + # Loading x0 will be fast but x1 will be slow. + # Then we load cos = COS[0, 0, 1, 1, ...] and + # sin = SIN[0, 0, 1, 1, ...]. + # Then we do the calculation and use tl.where to pick put the right + # outputs for the even and for the odd indices. + rk_swap = rk + ((rk + 1) % 2) * 2 - 1 # 1, 0, 3, 2, 5, 4, ... + rk_repeat = tl.arange(0, BLOCK_K) // 2 + # This is different from the official implementation as the shapes of + # the two tensors cos and sin are (seqlen_ro, rotary_dim) instead of + # (seqlen_ro, rotary_dim // 2). + X0 = X + ( + rm[:, None] * stride_x_seqlen + rk[None, :] * stride_x_headdim) + X1 = X + ( + rm[:, None] * stride_x_seqlen + + rk_swap[None, :] * stride_x_headdim) + COS = COS + (rm_cs[:, None] * rotary_dim + rk_repeat[None, :]) + SIN = SIN + (rm_cs[:, None] * rotary_dim + rk_repeat[None, :]) + cos = tl.load( + COS, + mask=(rm_cs[:, None] < seqlen_ro) & + (rk_repeat[None, :] < rotary_dim_half), + other=1.0, + ).to(tl.float32) + sin = tl.load( + SIN, + mask=(rm_cs[:, None] < seqlen_ro) & + (rk_repeat[None, :] < rotary_dim_half), + other=0.0, + ).to(tl.float32) + x0 = tl.load( + X0, + mask=(rm[:, None] < seqlen) & (rk[None, :] < rotary_dim), + other=0.0).to(tl.float32) + x1 = tl.load( + X1, + mask=(rm[:, None] < seqlen) & (rk_swap[None, :] < rotary_dim), + other=0.0).to(tl.float32) + if CONJUGATE: + sin = -sin + x0_cos = x0 * cos + x1_sin = x1 * sin + out = tl.where(rk[None, :] % 2 == 0, x0_cos - x1_sin, x0_cos + x1_sin) + OUT = OUT + ( + rm[:, None] * stride_out_seqlen + rk[None, :] * stride_out_headdim) + tl.store( + OUT, out, mask=(rm[:, None] < seqlen) & (rk[None, :] < rotary_dim)) + + +def apply_rotary( + x: torch.Tensor, + cos: torch.Tensor, + sin: torch.Tensor, + seqlen_offsets: Union[int, torch.Tensor] = 0, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[int] = None, + interleaved=False, + inplace=False, + conjugate=False, +) -> torch.Tensor: + """ + Arguments: + x: (batch, seqlen, nheads, headdim) if cu_seqlens is None + else (total_seqlen, nheads, headdim). + cos: (seqlen_ro, rotary_dim) + sin: (seqlen_ro, rotary_dim) + seqlen_offsets: integer or integer tensor of size (batch,) + cu_seqlens: (batch + 1,) or None + max_seqlen: int + Returns: + y: (batch, seqlen, nheads, headdim) + """ + is_varlen = cu_seqlens is not None + if not is_varlen: + batch, seqlen, nheads, headdim = x.shape + else: + assert max_seqlen is not None, ('If cu_seqlens is passed in, ' + 'then max_seqlen must be passed') + total_seqlen, nheads, headdim = x.shape + batch_p_1 = cu_seqlens.shape[0] + batch = batch_p_1 - 1 + seqlen = max_seqlen + seqlen_ro, rotary_dim = cos.shape + assert sin.shape == cos.shape + # rotary_dim *= 2 + assert rotary_dim <= headdim, 'rotary_dim must be <= headdim' + assert headdim <= 256, 'Only support headdim <= 256' + assert seqlen_ro >= seqlen, 'seqlen_ro must be >= seqlen' + + assert ( + cos.dtype == sin.dtype + ), f'cos and sin must have the same dtype, got {cos.dtype} and {sin.dtype}' + assert (x.dtype == cos.dtype), ( + f'Input and cos/sin must have the same dtype, ' + f'got {x.dtype} and {cos.dtype}') + + cos, sin = cos.contiguous(), sin.contiguous() + if isinstance(seqlen_offsets, torch.Tensor): + assert seqlen_offsets.shape == (batch, ) + assert seqlen_offsets.dtype in [torch.int32, torch.int64] + seqlen_offsets = seqlen_offsets.contiguous() + else: + assert seqlen_offsets + seqlen <= seqlen_ro + + output = torch.empty_like(x) if not inplace else x + if rotary_dim < headdim and not inplace: + output[..., rotary_dim:].copy_(x[..., rotary_dim:]) + + BLOCK_K = (32 if rotary_dim <= 32 else + (64 if rotary_dim <= 64 else + (128 if rotary_dim <= 128 else 256))) + + def grid(META): + return (triton.cdiv(seqlen, META['BLOCK_M']), batch, nheads) + + BLOCK_M = 4 if interleaved else (8 if rotary_dim <= 64 else 4) + + # Need this, otherwise Triton tries to launch from cuda:0 and we get + # ValueError: Pointer argument (at 0) cannot be accessed from Triton + # (cpu tensor?) + with torch.cuda.device(x.device.index): + rotary_kernel[grid]( + output, # data ptrs + x, + cos, + sin, + cu_seqlens, + seqlen_offsets, + seqlen, # shapes + rotary_dim, + seqlen_ro, + output.stride(0) + if not is_varlen else 0, # batch_strides if not varlen else 0 + output.stride(-3), # seqlen_stride or total_seqlen_stride + output.stride(-2), # nheads_stride + output.stride(-1), # headdim_stride + x.stride(0) + if not is_varlen else 0, # batch_strides if not varlen else 0 + x.stride(-3), # seqlen stride or total_seqlen_stride + x.stride(-2), # nheads stride + x.stride(-1), # headdim stride + BLOCK_K, + isinstance(seqlen_offsets, torch.Tensor), + is_varlen, + interleaved, + conjugate, + BLOCK_M, + ) + return output + + +class ApplyRotaryEmb(torch.autograd.Function): + + @staticmethod + def forward( + ctx, + x, + cos, + sin, + interleaved=False, + inplace=False, + seqlen_offsets: Union[int, torch.Tensor] = 0, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[int] = None, + ): + out = apply_rotary( + x, + cos, + sin, + seqlen_offsets=seqlen_offsets, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + interleaved=interleaved, + inplace=inplace, + ) + if isinstance(seqlen_offsets, int): + ctx.save_for_backward( + cos, sin, cu_seqlens) # Can't save int with save_for_backward + ctx.seqlen_offsets = seqlen_offsets + else: + ctx.save_for_backward(cos, sin, cu_seqlens, seqlen_offsets) + ctx.seqlen_offsets = None + ctx.interleaved = interleaved + ctx.inplace = inplace + ctx.max_seqlen = max_seqlen + return out if not inplace else x + + @staticmethod + def backward(ctx, do): + seqlen_offsets = ctx.seqlen_offsets + if seqlen_offsets is None: + cos, sin, cu_seqlens, seqlen_offsets = ctx.saved_tensors + else: + cos, sin, cu_seqlens = ctx.saved_tensors + # TD [2023-09-02]: For some reason Triton (2.0.0.post1) errors with + # "[CUDA]: invalid device context", and cloning makes it work. Idk why. + # Triton 2.1.0 works. + if not ctx.interleaved and not ctx.inplace: + do = do.clone() + dx = apply_rotary( + do, + cos, + sin, + seqlen_offsets=seqlen_offsets, + cu_seqlens=cu_seqlens, + max_seqlen=ctx.max_seqlen, + interleaved=ctx.interleaved, + inplace=ctx.inplace, + conjugate=True, + ) + return dx, None, None, None, None, None, None, None + + +apply_rotary_emb = ApplyRotaryEmb.apply diff --git a/data/xtuner/xtuner/model/modules/dispatch/utils.py b/data/xtuner/xtuner/model/modules/dispatch/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4cfa26cd1f98460a217862abe50f531389421a08 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/utils.py @@ -0,0 +1,64 @@ +import torch +import torch.nn.functional as F + +try: + from flash_attn.bert_padding import index_first_axis, unpad_input +except ImportError: + pass + + +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad( + torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +def upad_qkv(query_layer, key_layer, value_layer, attention_mask, + query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data( + attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, + head_dim), indices_k) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, + head_dim), indices_k) + if query_length == kv_seq_len: + # Different from the origin version as sequence parallel change + # the number of attention heads. + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, -1, head_dim), + indices_k) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = \ + unpad_input(query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) diff --git a/data/xtuner/xtuner/model/modules/dispatch/yi.py b/data/xtuner/xtuner/model/modules/dispatch/yi.py new file mode 100644 index 0000000000000000000000000000000000000000..3c3e0d20ce04ee04edcf70380b8fcc220d9a7321 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/dispatch/yi.py @@ -0,0 +1,99 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn.functional as F + + +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +def apply_rotary_pos_emb(q, k, cos, sin, position_ids): + # The first two dimensions of cos and sin are always 1, + # so we can `squeeze` them. + cos = cos.squeeze(1).squeeze(0) # [seq_len, dim] + sin = sin.squeeze(1).squeeze(0) # [seq_len, dim] + cos = cos[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + sin = sin[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim] + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """This is the equivalent of torch.repeat_interleave(x, dim=1, + repeats=n_rep). + + The hidden states go from (batch, num_key_value_heads, seqlen, head_dim) to + (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, + None, :, :].expand(batch, + num_key_value_heads, + n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, + head_dim) + + +def yi_attn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, + cos, sin, position_ids) + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + # use flash attention implemented by pytorch + attn_output = F.scaled_dot_product_attention( + query_states, key_states, value_states, attn_mask=attention_mask) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + # Due to the implementation of the PyTorch version of flash attention, + # even when the output_attentions flag is set to True, it is not possible + # to return the attn_weights. + return attn_output, None, past_key_value diff --git a/data/xtuner/xtuner/model/modules/projector/__init__.py b/data/xtuner/xtuner/model/modules/projector/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6196093dd5ffa4f4be0821ae2198f17a86f685f6 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/projector/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from transformers import AutoConfig, AutoModel + +from .configuration_projector import ProjectorConfig +from .modeling_projector import ProjectorModel + +AutoConfig.register('projector', ProjectorConfig) +AutoModel.register(ProjectorConfig, ProjectorModel) + +__all__ = ['ProjectorConfig', 'ProjectorModel'] diff --git a/data/xtuner/xtuner/model/modules/projector/configuration_projector.py b/data/xtuner/xtuner/model/modules/projector/configuration_projector.py new file mode 100644 index 0000000000000000000000000000000000000000..f63ffdc4698bc867bd559370ea8766537270661c --- /dev/null +++ b/data/xtuner/xtuner/model/modules/projector/configuration_projector.py @@ -0,0 +1,23 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from transformers import PretrainedConfig + + +class ProjectorConfig(PretrainedConfig): + model_type = 'projector' + _auto_class = 'AutoConfig' + + def __init__( + self, + visual_hidden_size=4096, + llm_hidden_size=4096, + depth=2, + hidden_act='gelu', + bias=True, + **kwargs, + ): + self.visual_hidden_size = visual_hidden_size + self.llm_hidden_size = llm_hidden_size + self.depth = depth + self.hidden_act = hidden_act + self.bias = bias + super().__init__(**kwargs) diff --git a/data/xtuner/xtuner/model/modules/projector/modeling_projector.py b/data/xtuner/xtuner/model/modules/projector/modeling_projector.py new file mode 100644 index 0000000000000000000000000000000000000000..d55e7588c8c3d7dc3537f1bf0a7ec4c14b1901b2 --- /dev/null +++ b/data/xtuner/xtuner/model/modules/projector/modeling_projector.py @@ -0,0 +1,51 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from transformers import PreTrainedModel +from transformers.activations import ACT2FN + +from .configuration_projector import ProjectorConfig + + +class ProjectorModel(PreTrainedModel): + _auto_class = 'AutoModel' + config_class = ProjectorConfig + base_model_prefix = 'model' + supports_gradient_checkpointing = True + + def __init__(self, config: ProjectorConfig) -> None: + super().__init__(config) + self.gradient_checkpointing = False + + modules = [ + nn.Linear( + config.visual_hidden_size, + config.llm_hidden_size, + bias=config.bias) + ] + for _ in range(1, config.depth): + modules.append(ACT2FN[config.hidden_act]) + modules.append( + nn.Linear( + config.llm_hidden_size, + config.llm_hidden_size, + bias=config.bias)) + self.model = nn.Sequential(*modules) + + def enable_input_require_grads(self): + + def make_inputs_require_grad(module, input, output): + output.requires_grad_(True) + + self.model.register_forward_hook(make_inputs_require_grad) + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, ProjectorModel): + module.gradient_checkpointing = value + + def forward(self, x): + if self.gradient_checkpointing and self.training: + layer_outputs = torch.utils.checkpoint.checkpoint(self.model, x) + else: + layer_outputs = self.model(x) + return layer_outputs diff --git a/data/xtuner/xtuner/model/orpo.py b/data/xtuner/xtuner/model/orpo.py new file mode 100644 index 0000000000000000000000000000000000000000..37264088acd7c852865e0dcd7795796bd8990eeb --- /dev/null +++ b/data/xtuner/xtuner/model/orpo.py @@ -0,0 +1,212 @@ +# ORPO Authors: Jiwoo Hong, Noah Lee, and James Thorne +# Official code: https://github.com/xfactlab/orpo +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.distributed as dist +import torch.nn.functional as F +from mmengine import MessageHub +from torch import nn + +from xtuner.parallel.sequence import (gather_forward_split_backward, + get_sequence_parallel_group, + get_sequence_parallel_world_size, + split_for_sequence_parallel) +from .sft import SupervisedFinetune + + +class ORPO(SupervisedFinetune): + """ORPO: Monolithic Preference Optimization without Reference Model + https://arxiv.org/abs/2403.07691 + + Args: + beta (float): Weight of the odds_ratio_loss. Defaults to 0.1. + """ + + def __init__(self, *args, beta=0.1, **kwargs): + super().__init__(*args, **kwargs) + self.beta = beta + + def _gather_masked_logits(self, logits, labels, mask): + logits = torch.gather( + logits.log_softmax(-1), dim=2, + index=labels.unsqueeze(2)).squeeze(2) + return logits * mask + + def get_logps( + self, + all_logps, # bs, seqlen + average_log_prob, + loss_mask, # bs, seqlen + ): + all_logps = all_logps[:, :-1].sum(-1) + loss_mask = loss_mask[:, :-1] + + if average_log_prob: # average_log_prob + all_logps = all_logps / loss_mask.sum(-1) + + chosen_logps = all_logps[::2] + rejected_logps = all_logps[1::2] + return chosen_logps, rejected_logps + + def get_var_len_atten_logps(self, all_logps, average_log_prob, loss_mask, + cu_seqlens, attention_mask): + seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + # unpack sequence + unpacked_logps = torch.split(all_logps, seqlens, dim=1) + unpacked_loss_mask = torch.split(loss_mask, seqlens, dim=1) + if attention_mask is not None: + # It indicate that we pad the original sequence, labels, + # position_ids and cumulative_len for sequence parallel if the + # attention_mask is not None. + # We then need to remove the padded segments. + assert False in attention_mask + unpacked_logps = unpacked_logps[:-1] + unpacked_loss_mask = unpacked_loss_mask[:-1] + assert len(unpacked_logps) % 2 == 0 + + def compute_logps(_logps, _mask): + _logps = _logps[:, :-1].sum(-1) + _mask = _mask[:, :-1] + if average_log_prob: + _logps /= _mask.sum(-1) + return _logps + + chosen_logps, rejected_logps = [], [] + for i in range(len(unpacked_logps) // 2): + chosen = unpacked_logps[2 * i] + rejected = unpacked_logps[2 * i + 1] + chosen_mask = unpacked_loss_mask[2 * i] + rejected_mask = unpacked_loss_mask[2 * i + 1] + chosen_logps.append(compute_logps(chosen, chosen_mask)) + rejected_logps.append(compute_logps(rejected, rejected_mask)) + + return (torch.stack(chosen_logps), torch.stack(rejected_logps)) + + def cross_entropy_loss(self, logits, labels): + logits = logits[..., :-1, :].contiguous() + # labels are already shifted, now we need to remove the last dummy label # noqa + labels = labels[..., :-1].contiguous() + # Flatten the tokens + loss_fct = nn.CrossEntropyLoss() + logits = logits.view(-1, logits.shape[-1]) + labels = labels.view(-1) + # Enable model parallelism + labels = labels.to(logits.device) + loss = loss_fct(logits, labels) + return loss + + def odds_ratio_loss( + self, + chosen_logps: torch.FloatTensor, + rejected_logps: torch.FloatTensor, + ): + # modified from https://github.com/huggingface/trl/blob/b031adfdb8708f1f295eab6c3f2cb910e8fe0c23/trl/trainer/orpo_trainer.py#L597 # noqa + # Derived from Eqs. (4) and (7) from https://arxiv.org/abs/2403.07691 by using log identities and exp(log(P(y|x)) = P(y|x) # noqa + log_odds = (chosen_logps - rejected_logps) - ( + torch.log1p(-torch.exp(chosen_logps)) - + torch.log1p(-torch.exp(rejected_logps))) + ratio = F.logsigmoid(log_odds) + ratio = ratio[~torch.isnan(ratio)] # select valid loss + losses = self.beta * ratio + + chosen_rewards = self.beta * chosen_logps + rejected_rewards = self.beta * rejected_logps + + return losses, chosen_rewards, rejected_rewards, torch.mean( + ratio), torch.mean(log_odds) + + @staticmethod + def _split_for_sequence_parallel(data): + # attention mask should not be split + ARGS_NEED_TO_SPLIT = ('input_ids', 'position_ids', 'labels', + 'chosen_rejected_tag') + sp_group = get_sequence_parallel_group() + for key in ARGS_NEED_TO_SPLIT: + val = data.get(key, None) + if val is not None: + # `dim` is 1 as the shape of tensor is (bs, seq_len, ...) + data[key] = split_for_sequence_parallel( + val, dim=1, sp_group=sp_group) + return data + + def compute_loss(self, data, data_samples=None): + # shift labels first and add a dummy label at the end, to support sequence parallel # noqa + data['labels'] = torch.cat( + (data['labels'][:, 1:], torch.zeros_like(data['labels'][:, :1])), + dim=1) + tmp_label = data['labels'].clone() + tmp_label[tmp_label == 0] = -100 + # loss mask of all tokens in all sp ranks + all_loss_mask = data['labels'] != -100 + + if self.use_varlen_attn: + # create a chosen rejected tag for varlen_attn ce loss + message_hub = MessageHub.get_instance('varlen_attn_args') + rank = dist.get_rank() + cu_seqlens = message_hub.get_info(f'cumulative_len_rank_{rank}') + seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + + chosen_rejected_tag = torch.ones_like(data['labels']) + unpacked_tag = list( + torch.split(chosen_rejected_tag, seqlens, dim=1)) + # import pdb; pdb.set_trace() + for i in range(len(unpacked_tag) // 2): + # import pdb; pdb.set_trace() + unpacked_tag[2 * i + 1] *= 0 + chosen_rejected_tag = torch.cat(unpacked_tag, dim=1) + data['chosen_rejected_tag'] = chosen_rejected_tag + + if get_sequence_parallel_world_size() > 1: + data = self._split_for_sequence_parallel(data) + chosen_rejected_tag = data.pop('chosen_rejected_tag', None) + all_logits = self.llm(**data).logits + + labels = data['labels'].clone() + labels[labels == -100] = 0 + loss_mask = labels != 0 # loss mask in a single sp rank + all_logps = self._gather_masked_logits(all_logits, labels, loss_mask) + if get_sequence_parallel_world_size() > 1: + all_logps = gather_forward_split_backward( + all_logps, + dim=1, + sp_group=get_sequence_parallel_group(), + grad_scale='up') + + if not self.use_varlen_attn: + chosen_nll_loss = self.cross_entropy_loss(all_logits[::2], + data['labels'][::2]) + chosen_logps, rejected_logps = self.get_logps( + all_logps, True, all_loss_mask) + else: + chosen_idxs = chosen_rejected_tag == 1 + chosen_logits = all_logits[chosen_idxs] + chosen_labels = data['labels'][chosen_idxs] + chosen_nll_loss = self.cross_entropy_loss(chosen_logits, + chosen_labels) + + chosen_logps, rejected_logps = self.get_var_len_atten_logps( + all_logps, True, all_loss_mask, cu_seqlens, + data['attention_mask']) + (losses, chosen_rewards, rejected_rewards, log_odds_ratio, + log_odds_chosen) = self.odds_ratio_loss(chosen_logps, rejected_logps) + losses = losses.mean() + # skip nan loss + if torch.isnan(chosen_nll_loss): + chosen_nll_loss = all_logits.mean() * 0 + if torch.isnan(losses): + losses = all_logits.mean() * 0 + loss = chosen_nll_loss - losses + + reward_acc = (chosen_rewards > rejected_rewards).float().mean() + + loss_dict = { + 'loss': loss, + 'chosen_rewards': chosen_rewards.mean(), + 'rejected_rewards': rejected_rewards.mean(), + 'reward_acc': reward_acc, + 'reward_margin': (chosen_rewards - rejected_rewards).mean(), + 'log_odds_ratio': log_odds_ratio, + 'log_odds_chosen': log_odds_chosen, + 'nll_loss': chosen_nll_loss.detach().mean() + } + return loss_dict diff --git a/data/xtuner/xtuner/model/reward.py b/data/xtuner/xtuner/model/reward.py new file mode 100644 index 0000000000000000000000000000000000000000..6bc203daa8ceb5d15be11ed6a37aa9676aa6d32d --- /dev/null +++ b/data/xtuner/xtuner/model/reward.py @@ -0,0 +1,490 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +import math +import os +import warnings +from collections import OrderedDict +from contextlib import nullcontext + +import torch +import torch.distributed as dist +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from mmengine.model import BaseModel +from mmengine.runner import load_checkpoint +from peft import get_peft_model, prepare_model_for_kbit_training +from torch import nn +from transformers import (AutoConfig, AutoModelForSequenceClassification, + PreTrainedModel, PreTrainedTokenizer) +from transformers.dynamic_module_utils import get_class_from_dynamic_module +from transformers.integrations import is_deepspeed_zero3_enabled +from transformers.modeling_utils import no_init_weights + +from xtuner.parallel.sequence import (gather_forward_split_backward, + get_sequence_parallel_group, + get_sequence_parallel_world_size, + split_for_sequence_parallel) +from xtuner.registry import BUILDER +from .modules import dispatch_modules +from .modules.dispatch import SUPPORT_FLASH1, SUPPORT_FLASH2 +from .utils import (LoadWoInit, find_all_linear_names, + get_peft_model_state_dict, make_inputs_require_grad, + traverse_dict) + + +def reduce_mean(tensor): + """"Obtain the mean of tensor on different GPUs.""" + if not (dist.is_available() and dist.is_initialized()): + return tensor + tensor = tensor.clone() + dist.all_reduce(tensor.div_(dist.get_world_size()), op=dist.ReduceOp.SUM) + return tensor + + +def smart_tokenizer_and_embedding_resize( + tokenizer: PreTrainedTokenizer, + model: PreTrainedModel, +): + """Resize embedding.""" + if is_deepspeed_zero3_enabled(): + import deepspeed + + params = [model.get_input_embeddings().weight] + if model.get_output_embeddings( + ) is not None and not model.config.tie_word_embeddings: + params.append(model.get_output_embeddings().weight) + + context_maybe_zero3 = deepspeed.zero.GatheredParameters( + params, modifier_rank=0) + else: + context_maybe_zero3 = nullcontext() + + with context_maybe_zero3: + current_embedding_size = model.get_input_embeddings().weight.size(0) + + if len(tokenizer) > current_embedding_size: + assert isinstance(model.get_output_embeddings(), nn.Linear) + + model.resize_token_embeddings(len(tokenizer), pad_to_multiple_of=64) + with context_maybe_zero3: + num_new_tokens = len(tokenizer) - current_embedding_size + input_embeddings = model.get_input_embeddings().weight.data + output_embeddings = model.get_output_embeddings().weight.data + + input_embeddings_avg = input_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + output_embeddings_avg = output_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + + input_embeddings[-num_new_tokens:] = input_embeddings_avg + output_embeddings[-num_new_tokens:] = output_embeddings_avg + + print_log( + f'Resized token embeddings from {current_embedding_size} to ' + f'{len(tokenizer)}.', 'current') + + +class RewardModel(BaseModel): + + def __init__( + self, + llm, + lora=None, + peft_model=None, + use_activation_checkpointing=True, + use_varlen_attn=False, + tokenizer=None, + max_position_embeddings=None, + reward_token_id=None, + loss_type='ranking', + penalty_type='log_barrier', + penalty_weight=0.01, + ): + super().__init__() + with LoadWoInit(): + if isinstance(llm, dict): + llm = self._dispatch_lm_model_cfg(llm, max_position_embeddings) + self.llm = self._build_from_cfg_or_module(llm).model + self.v_head = nn.Linear(self.llm.config.hidden_size, 1, bias=False) + # zero init + self.v_head.weight.data.zero_() + + self.reward_token_id = reward_token_id + assert loss_type in ('ranking', + 'focal'), f'Unsupported loss type {loss_type}' + self.loss_type = loss_type + assert penalty_type in ( + 'log_barrier', 'L2', + 'none'), f'Unsupported penalty type {penalty_type}' + self.penalty_type = penalty_type + self.penalty_weight = penalty_weight + + if tokenizer is not None: + if isinstance(tokenizer, dict): + tokenizer = BUILDER.build(tokenizer) + smart_tokenizer_and_embedding_resize(tokenizer, self.llm) + + self.llm.config.use_cache = False + dispatch_modules(self.llm, use_varlen_attn=use_varlen_attn) + + if use_activation_checkpointing: + # For backward compatibility + if hasattr(self.llm, 'enable_input_require_grads'): + self.llm.enable_input_require_grads() + else: + self.llm.get_input_embeddings().register_forward_hook( + make_inputs_require_grad) + + # enable gradient checkpointing for memory efficiency + self.gradient_checkpointing_enable() + + if isinstance(lora, dict) or isinstance(lora, Config) or isinstance( + lora, ConfigDict): + self.lora = BUILDER.build(lora) + else: + self.lora = lora + self.peft_model = peft_model + self.use_lora = lora is not None + if self.use_lora: + self._prepare_for_lora(peft_model, use_activation_checkpointing) + + self._is_init = True + # Determines whether to calculate attention based on the + # seq_len dimension (use_varlen_attn = False) or the actual length of + # the sequence. + self.use_varlen_attn = use_varlen_attn + + def gradient_checkpointing_enable(self): + self.activation_checkpointing_enable() + + def activation_checkpointing_enable(self): + self.llm.gradient_checkpointing_enable() + + def gradient_checkpointing_disable(self): + self.activation_checkpointing_disable() + + def activation_checkpointing_disable(self): + self.llm.gradient_checkpointing_disable() + + def _prepare_for_lora(self, + peft_model=None, + use_activation_checkpointing=True): + self.llm = prepare_model_for_kbit_training( + self.llm, use_activation_checkpointing) + if self.lora.target_modules is None: + modules = find_all_linear_names(self.llm) + self.lora.target_modules = modules + + self.llm = get_peft_model(self.llm, self.lora) + if peft_model is not None: + _ = load_checkpoint(self, peft_model) + + def init_weights(self): + pass + + @staticmethod + def _prepare_for_long_context_training(cfg, llm_cfg, + max_position_embeddings): + if not hasattr(llm_cfg, 'rope_scaling'): + print_log('Current model does not support RoPE scaling.', + 'current') + return + + current_max_length = getattr(llm_cfg, 'max_position_embeddings', None) + if current_max_length and max_position_embeddings > current_max_length: + print_log( + f'Enlarge max model length from {current_max_length} ' + f'to {max_position_embeddings}.', 'current') + scaling_factor = float( + math.ceil(max_position_embeddings / current_max_length)) + else: + print_log( + 'The input `max_position_embeddings` is smaller than ' + 'origin max length. Consider increase input length.', + 'current') + scaling_factor = 1.0 + cfg.rope_scaling = {'type': 'linear', 'factor': scaling_factor} + + return cfg + + @staticmethod + def _prepare_for_flash_attn(cfg, llm_cfg): + cls_name = type(llm_cfg).__name__ + SUPPORT_SDPA_ATTN = ('LlamaConfig', 'GemmaConfig', 'MistralConfig', + 'MixtralConfig', 'Qwen2Config', 'Qwen2MoeConfig', + 'Starcoder2Config', 'Starcoder2Config', + 'Phi3Config') + SUPPORT_FLASH_ATTN2 = ('InternLM2Config', 'LlamaConfig', 'GemmaConfig', + 'MistralConfig', 'MixtralConfig', 'Qwen2Config', + 'Qwen2MoeConfig', 'Starcoder2Config', + 'Starcoder2Config', 'Phi3Config') + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + if getattr(cfg, 'attn_implementation', None) is not None: + # Flash Attention 2.0 only supports torch.float16 and + # torch.bfloat16 dtypes + if cfg.attn_implementation == 'flash_attention_2': + cfg.torch_dtype = torch_dtype + elif SUPPORT_FLASH2 and cls_name in SUPPORT_FLASH_ATTN2: + cfg.torch_dtype = torch_dtype + cfg.attn_implementation = 'flash_attention_2' + elif SUPPORT_FLASH1 and cls_name in SUPPORT_SDPA_ATTN: + cfg.attn_implementation = 'sdpa' + + return cfg + + @staticmethod + def _prepare_for_qlora_zero3(cfg): + if (not is_deepspeed_zero3_enabled()) or (not hasattr( + cfg, 'quantization_config')): + return cfg + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + cfg.torch_dtype = torch_dtype + quantization_config = cfg.quantization_config + quantization_config.bnb_4bit_compute_dtype = torch_dtype + quantization_config.bnb_4bit_quant_storage = torch_dtype + + return cfg + + def _dispatch_lm_model_cfg(self, cfg, max_position_embeddings=None): + cfg = self._prepare_for_qlora_zero3(cfg) + pretrained_model_name_or_path = cfg.pretrained_model_name_or_path + llm_cfg = AutoConfig.from_pretrained( + pretrained_model_name_or_path, trust_remote_code=True) + cfg = self._prepare_for_flash_attn(cfg, llm_cfg) + if max_position_embeddings is not None: + cfg = self._prepare_for_long_context_training( + cfg, llm_cfg, max_position_embeddings) + return cfg + + def _build_from_cfg_or_module(self, cfg_or_mod): + if isinstance(cfg_or_mod, nn.Module): + return cfg_or_mod + elif isinstance(cfg_or_mod, dict): + traverse_dict(cfg_or_mod) + return BUILDER.build(cfg_or_mod) + else: + raise NotImplementedError + + def forward(self, data, data_samples=None, mode='loss'): + labels = data.pop('labels', None) + if mode == 'loss': + return self.compute_loss(data, labels) + elif mode == 'predict': + return self.predict(data, data_samples) + elif mode == 'tensor': + return self._forward(data, data_samples) + else: + raise NotImplementedError + + def _forward(self, data, data_samples=None): + hidden_states = self.llm(**data)[0] + logits = self.v_head(hidden_states) + return logits + + def predict(self, data, data_samples=None): + hidden_states = self.llm(**data)[0] + logits = self.v_head(hidden_states) + logits_dict = [{'logits': log} for log in logits] + return logits_dict + + @staticmethod + def _split_for_sequence_parallel(data): + # attention mask should not be split + ARGS_NEED_TO_SPLIT = ('input_ids', 'position_ids') + sp_group = get_sequence_parallel_group() + for key in ARGS_NEED_TO_SPLIT: + val = data.get(key, None) + if val is not None: + # `dim` is 1 as the shape of tensor is (bs, seq_len, ...) + data[key] = split_for_sequence_parallel( + val, dim=1, sp_group=sp_group) + return data + + def compute_loss(self, data, labels=None): + if get_sequence_parallel_world_size() > 1: + data = self._split_for_sequence_parallel(data) + + hidden_states = self.llm(**data)[0] + logits = self.v_head(hidden_states) + + if get_sequence_parallel_world_size() > 1: + logits = gather_forward_split_backward( + logits, + dim=1, + sp_group=get_sequence_parallel_group(), + grad_scale='up') + + chosen_idx = torch.where(labels == 0) + rejected_idx = torch.where(labels == 1) + chosen_logits = logits[chosen_idx] + rejected_logits = logits[rejected_idx] + + num_samples = torch.tensor(len(chosen_logits)).float().to( + hidden_states.device) + avg_factor = 1.0 / num_samples + avg_factor = reduce_mean(avg_factor).to(hidden_states.device) + + chosen_mean = reduce_mean(chosen_logits.mean().detach()) + rejected_mean = reduce_mean(rejected_logits.mean().detach()) + acc = reduce_mean( + (chosen_logits > rejected_logits).sum() / num_samples).detach() + num_tokens = torch.tensor(labels.shape[1]).float() + + # ranking loss + if self.loss_type == 'ranking': + rank_loss = self.ranking_loss( + chosen_logits, rejected_logits, avg_factor=avg_factor) + elif self.loss_type == 'focal': + rank_loss = self.focal_loss( + chosen_logits, rejected_logits, avg_factor=avg_factor) + else: + raise NotImplementedError( + f'Unsupported loss type {self.loss_type}') + + # penalty loss + if self.penalty_type == 'log_barrier': + penalty = self.log_barrier_penalty( + torch.cat([chosen_logits, rejected_logits]), + lower_bound=-5, + upper_bound=5, + avg_factor=avg_factor) + elif self.penalty_type == 'L2': + penalty = self.l2_penalty( + torch.cat([chosen_logits, rejected_logits]), + avg_factor=avg_factor) + elif self.penalty_type == 'none': + penalty = 0 + else: + raise NotImplementedError( + f'Unsupported penalty type {self.penalty_type}') + + loss = rank_loss + self.penalty_weight * penalty + loss_dict = { + 'loss': loss, + 'acc': acc, + 'chosen_score_mean': chosen_mean, + 'rejected_score_mean': rejected_mean, + 'num_samples': num_samples, + 'num_tokens': num_tokens, + } + + return loss_dict + + def ranking_loss(self, chosen_logits, rejected_logits, avg_factor): + rank_loss = -nn.functional.logsigmoid(chosen_logits - rejected_logits) + return rank_loss.sum() * avg_factor + + def focal_loss(self, chosen_logits, rejected_logits, avg_factor): + # focal ranking loss from InternLM2 paper https://arxiv.org/abs/2403.17297 # noqa + rank_loss = -nn.functional.logsigmoid(chosen_logits - rejected_logits) + p_ij = torch.sigmoid(chosen_logits - rejected_logits) + p = 2 * torch.relu(p_ij - 0.5) + gamma = 2 + focal_loss = ((1 - p)**gamma) * rank_loss + return focal_loss.sum() * avg_factor + + def log_barrier_penalty(self, + logits, + lower_bound, + upper_bound, + epsilon=1e-3, + avg_factor=1): + # log barrier penalty from InternLM2 paper https://arxiv.org/abs/2403.17297 # noqa + logits_fp32 = logits.float() + logits_clamped = torch.clamp(logits_fp32, lower_bound + epsilon, + upper_bound - epsilon) + penalty = -torch.log(upper_bound - logits_clamped) - torch.log( + logits_clamped - lower_bound) + return penalty.sum() * avg_factor + + def l2_penalty(self, logits, avg_factor=1): + return (logits**2).sum() * avg_factor + + def state_dict(self, *args, **kwargs): + state_dict = super().state_dict(*args, **kwargs) + if not self.use_lora: + return state_dict + to_return = get_peft_model_state_dict(self.llm, state_dict=state_dict) + return OrderedDict(to_return) + + def __getattr__(self, name: str): + try: + return super().__getattr__(name) + except AttributeError: + return getattr(self.llm, name) + + def to_hf(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}, + **kwargs): + print(f'Saving LLM tokenizer to {save_dir}') + tokenizer = BUILDER.build(cfg.tokenizer) + tokenizer.save_pretrained(save_dir) + + if 'PeftModel' in self.llm.__class__.__name__: + # merge adapter + self.llm = self.llm.merge_and_unload() + if 'InternLM2' in self.llm.__class__.__name__: + from xtuner.tools.model_converters.modeling_internlm2_reward.modeling_internlm2 import \ + InternLM2ForRewardModel # noqa + print(f'Saving Reward Model to {save_dir}') + hf_cfg = self.llm.config + hf_cfg.reward_token_id = self.reward_token_id if \ + self.reward_token_id is not None else cfg.reward_token_id + if not fp32: + dtype = torch.float16 + else: + dtype = torch.float32 + with no_init_weights(): + reward_model = InternLM2ForRewardModel._from_config( + hf_cfg, torch_dtype=dtype) + reward_model.model.load_state_dict(self.llm.state_dict()) + reward_model.v_head.load_state_dict(self.v_head.state_dict()) + reward_model.save_pretrained(save_dir, **save_pretrained_kwargs) + # fix auto_map in config + with open(os.path.join(save_dir, 'config.json')) as fp: + config_dict = json.load(fp) + config_dict['auto_map'][ + 'AutoModel'] = 'modeling_internlm2.InternLM2ForRewardModel' + config_dict['auto_map'].pop('AutoModelForCausalLM', None) + with open(os.path.join(save_dir, 'config.json'), 'w') as fp: + json.dump(config_dict, fp, indent=2) + else: + warnings.warn( + f'The pretrained model type: {self.llm.__class__.__name__} ' + 'has no reward model class defined. Use ' + 'the SequenceClassification class instead.' + 'You can refer to `xtuner/tools/model_converters/modeling_internlm2_reward` ' # noqa + 'to implement the reward model class.') + + hf_cfg = self.llm.config + hf_cfg.num_labels = 1 # set the output dim to 1 + try: + with no_init_weights(): + reward_model = \ + AutoModelForSequenceClassification.from_config(hf_cfg) + except Exception as e: + warnings.warn(f'Cannot find SequenceClassification class ' + f'from transformers: {e}, \n' + 'try to find it in the dynamic module.') + module_file, causal_model_name = hf_cfg.auto_map[ + 'AutoModelForCausalLM'].split('.') + seqcls_model_name = causal_model_name.split( + 'For')[0] + 'ForSequenceClassification' + seqcls_class = get_class_from_dynamic_module( + f'{module_file}.{seqcls_model_name}', hf_cfg._name_or_path) + with no_init_weights(): + reward_model = seqcls_class(hf_cfg) + reward_model.model.load_state_dict(self.llm.state_dict()) + reward_model.score.load_state_dict(self.v_head.state_dict()) + reward_model.save_pretrained(save_dir, **save_pretrained_kwargs) diff --git a/data/xtuner/xtuner/model/sft.py b/data/xtuner/xtuner/model/sft.py new file mode 100644 index 0000000000000000000000000000000000000000..5229504891b3d921286ef0106c84ebd1349e378e --- /dev/null +++ b/data/xtuner/xtuner/model/sft.py @@ -0,0 +1,336 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from collections import OrderedDict +from contextlib import nullcontext + +import torch +from mmengine import print_log +from mmengine.config import Config, ConfigDict +from mmengine.model import BaseModel +from mmengine.runner import load_checkpoint +from peft import get_peft_model, prepare_model_for_kbit_training +from torch import nn +from transformers import AutoConfig, PreTrainedModel, PreTrainedTokenizer +from transformers.integrations import is_deepspeed_zero3_enabled + +from xtuner.parallel.sequence import (get_sequence_parallel_group, + get_sequence_parallel_world_size, + reduce_sequence_parallel_loss, + split_for_sequence_parallel) +from xtuner.registry import BUILDER +from .modules import dispatch_modules +from .modules.dispatch import SUPPORT_FLASH1, SUPPORT_FLASH2 +from .utils import (LoadWoInit, find_all_linear_names, + get_peft_model_state_dict, make_inputs_require_grad, + traverse_dict) + + +def smart_tokenizer_and_embedding_resize( + tokenizer: PreTrainedTokenizer, + model: PreTrainedModel, +): + """Resize embedding.""" + if is_deepspeed_zero3_enabled(): + import deepspeed + + params = [model.get_input_embeddings().weight] + if model.get_output_embeddings( + ) is not None and not model.config.tie_word_embeddings: + params.append(model.get_output_embeddings().weight) + + context_maybe_zero3 = deepspeed.zero.GatheredParameters( + params, modifier_rank=0) + else: + context_maybe_zero3 = nullcontext() + + with context_maybe_zero3: + current_embedding_size = model.get_input_embeddings().weight.size(0) + + if len(tokenizer) > current_embedding_size: + assert isinstance(model.get_output_embeddings(), nn.Linear) + + model.resize_token_embeddings(len(tokenizer), pad_to_multiple_of=64) + with context_maybe_zero3: + num_new_tokens = len(tokenizer) - current_embedding_size + input_embeddings = model.get_input_embeddings().weight.data + output_embeddings = model.get_output_embeddings().weight.data + + input_embeddings_avg = input_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + output_embeddings_avg = output_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + + input_embeddings[-num_new_tokens:] = input_embeddings_avg + output_embeddings[-num_new_tokens:] = output_embeddings_avg + + print_log( + f'Resized token embeddings from {current_embedding_size} to ' + f'{len(tokenizer)}.', 'current') + + +class SupervisedFinetune(BaseModel): + + def __init__(self, + llm, + lora=None, + peft_model=None, + use_activation_checkpointing=True, + use_varlen_attn=False, + tokenizer=None, + max_position_embeddings=None): + super().__init__() + + self.llm = self.build_llm_from_cfg(llm, use_varlen_attn, + max_position_embeddings) + + if tokenizer is not None: + if isinstance(tokenizer, dict): + tokenizer = BUILDER.build(tokenizer) + smart_tokenizer_and_embedding_resize(tokenizer, self.llm) + + self.llm.config.use_cache = False + if use_activation_checkpointing: + # For backward compatibility + if hasattr(self.llm, 'enable_input_require_grads'): + self.llm.enable_input_require_grads() + else: + self.llm.get_input_embeddings().register_forward_hook( + make_inputs_require_grad) + + # enable gradient checkpointing for memory efficiency + self.gradient_checkpointing_enable() + + if isinstance(lora, dict) or isinstance(lora, Config) or isinstance( + lora, ConfigDict): + self.lora = BUILDER.build(lora) + else: + self.lora = lora + self.peft_model = peft_model + self.use_lora = lora is not None + if self.use_lora: + self._prepare_for_lora(peft_model, use_activation_checkpointing) + + self._is_init = True + # Determines whether to calculate attention based on the + # seq_len dimension (use_varlen_attn = False) or the actual length of + # the sequence. + self.use_varlen_attn = use_varlen_attn + + def build_llm_from_cfg(self, llm_cfg, use_varlen_attn, + max_position_embeddings): + # For forward + with LoadWoInit(): + if isinstance(llm_cfg, dict): + llm = self._dispatch_lm_model_cfg(llm_cfg, + max_position_embeddings) + llm = self._build_from_cfg_or_module(llm) + + llm.config.use_cache = False + dispatch_modules(llm, use_varlen_attn=use_varlen_attn) + return llm + + def gradient_checkpointing_enable(self): + self.activation_checkpointing_enable() + + def activation_checkpointing_enable(self): + self.llm.gradient_checkpointing_enable() + + def gradient_checkpointing_disable(self): + self.activation_checkpointing_disable() + + def activation_checkpointing_disable(self): + self.llm.gradient_checkpointing_disable() + + def _prepare_for_lora(self, + peft_model=None, + use_activation_checkpointing=True): + self.llm = prepare_model_for_kbit_training( + self.llm, use_activation_checkpointing) + if self.lora.target_modules is None: + modules = find_all_linear_names(self.llm) + self.lora.target_modules = modules + + self.llm = get_peft_model(self.llm, self.lora) + if peft_model is not None: + _ = load_checkpoint(self, peft_model) + + def init_weights(self): + pass + + @staticmethod + def _prepare_for_long_context_training(cfg, llm_cfg, + max_position_embeddings): + if not hasattr(llm_cfg, 'rope_scaling'): + print_log('Current model does not support RoPE scaling.', + 'current') + return + + current_max_length = getattr(llm_cfg, 'max_position_embeddings', None) + if current_max_length and max_position_embeddings > current_max_length: + print_log( + f'Enlarge max model length from {current_max_length} ' + f'to {max_position_embeddings}.', 'current') + scaling_factor = float( + math.ceil(max_position_embeddings / current_max_length)) + else: + print_log( + 'The input `max_position_embeddings` is smaller than ' + 'origin max length. Consider increase input length.', + 'current') + scaling_factor = 1.0 + cfg.rope_scaling = {'type': 'linear', 'factor': scaling_factor} + + return cfg + + @staticmethod + def _prepare_for_flash_attn(cfg, llm_cfg): + cls_name = type(llm_cfg).__name__ + SUPPORT_SDPA_ATTN = ('LlamaConfig', 'GemmaConfig', 'MistralConfig', + 'MixtralConfig', 'Qwen2Config', 'Qwen2MoeConfig', + 'Starcoder2Config', 'Starcoder2Config', + 'Phi3Config') + SUPPORT_FLASH_ATTN2 = ('InternLM2Config', 'LlamaConfig', 'GemmaConfig', + 'MistralConfig', 'MixtralConfig', 'Qwen2Config', + 'Qwen2MoeConfig', 'Starcoder2Config', + 'Starcoder2Config', 'Phi3Config', + 'DeepseekV2Config') + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + if getattr(cfg, 'attn_implementation', None) is not None: + # Flash Attention 2.0 only supports torch.float16 and + # torch.bfloat16 dtypes + if cfg.attn_implementation == 'flash_attention_2': + cfg.torch_dtype = torch_dtype + elif SUPPORT_FLASH2 and cls_name in SUPPORT_FLASH_ATTN2: + cfg.torch_dtype = torch_dtype + cfg.attn_implementation = 'flash_attention_2' + elif SUPPORT_FLASH1 and cls_name in SUPPORT_SDPA_ATTN: + cfg.attn_implementation = 'sdpa' + + return cfg + + @staticmethod + def _prepare_for_qlora_zero3(cfg): + if (not is_deepspeed_zero3_enabled()) or (not hasattr( + cfg, 'quantization_config')): + return cfg + + torch_dtype = torch.bfloat16 if ( + torch.cuda.is_available() and torch.cuda.is_bf16_supported()) \ + else torch.float16 + + cfg.torch_dtype = torch_dtype + quantization_config = cfg.quantization_config + quantization_config.bnb_4bit_compute_dtype = torch_dtype + quantization_config.bnb_4bit_quant_storage = torch_dtype + + return cfg + + def _dispatch_lm_model_cfg(self, cfg, max_position_embeddings=None): + cfg = self._prepare_for_qlora_zero3(cfg) + pretrained_model_name_or_path = cfg.pretrained_model_name_or_path + llm_cfg = AutoConfig.from_pretrained( + pretrained_model_name_or_path, trust_remote_code=True) + cfg = self._prepare_for_flash_attn(cfg, llm_cfg) + if max_position_embeddings is not None: + cfg = self._prepare_for_long_context_training( + cfg, llm_cfg, max_position_embeddings) + return cfg + + def _build_from_cfg_or_module(self, cfg_or_mod): + if isinstance(cfg_or_mod, nn.Module): + return cfg_or_mod + elif isinstance(cfg_or_mod, dict): + traverse_dict(cfg_or_mod) + return BUILDER.build(cfg_or_mod) + else: + raise NotImplementedError + + def forward(self, data, data_samples=None, mode='loss'): + + if mode == 'loss': + return self.compute_loss(data, data_samples) + elif mode == 'predict': + return self.predict(data, data_samples) + elif mode == 'tensor': + return self._forward(data, data_samples) + else: + raise NotImplementedError + + def _forward(self, data, data_samples=None): + + outputs = self.llm(**data) + + return outputs + + def predict(self, data, data_samples=None): + outputs = self.llm(**data) + logits_dict = [{'logits': logits} for logits in outputs.logits] + return logits_dict + + @staticmethod + def _split_for_sequence_parallel(data): + # attention mask should not be split + ARGS_NEED_TO_SPLIT = ('input_ids', 'labels', 'position_ids') + sp_group = get_sequence_parallel_group() + for key in ARGS_NEED_TO_SPLIT: + val = data.get(key, None) + if val is not None: + # `dim` is 1 as the shape of tensor is (bs, seq_len, ...) + data[key] = split_for_sequence_parallel( + val, dim=1, sp_group=sp_group) + return data + + def _compute_sequence_parallel_loss(self, data): + data = self._split_for_sequence_parallel(data) + outputs = self.llm(**data) + labels = data['labels'] + num_tokens = (labels != -100).sum() + sp_group = get_sequence_parallel_group() + loss = reduce_sequence_parallel_loss(outputs.loss, num_tokens, + sp_group) + return {'loss': loss} + + def compute_loss(self, data, data_samples=None): + if get_sequence_parallel_world_size() > 1: + return self._compute_sequence_parallel_loss(data) + else: + outputs = self.llm(**data) + loss_dict = {'loss': outputs.loss} + return loss_dict + + def state_dict(self, *args, **kwargs): + state_dict = super().state_dict(*args, **kwargs) + if not self.use_lora: + return state_dict + to_return = get_peft_model_state_dict(self.llm, state_dict=state_dict) + return OrderedDict(to_return) + + def __getattr__(self, name: str): + try: + return super().__getattr__(name) + except AttributeError: + return getattr(self.llm, name) + + def to_hf(self, + cfg, + save_dir, + fp32=False, + save_pretrained_kwargs={}, + **kwargs): + self.llm.config.use_cache = True + if not fp32: + print_log('Convert LLM to float16', 'current') + self.llm.half() + if self.use_lora: + print_log(f'Saving adapter to {save_dir}', 'current') + else: + print_log(f'Saving LLM tokenizer to {save_dir}', 'current') + tokenizer = BUILDER.build(cfg.tokenizer) + tokenizer.save_pretrained(save_dir) + print_log(f'Saving LLM to {save_dir}', 'current') + self.llm.save_pretrained(save_dir, **save_pretrained_kwargs) + self.llm.config.use_cache = False diff --git a/data/xtuner/xtuner/model/transformers_models/__init__.py b/data/xtuner/xtuner/model/transformers_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..71f7ea1d42e34a3fa6b4239b86a468c2e7727b14 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/__init__.py @@ -0,0 +1,8 @@ +from .deepseek_v2 import (DeepseekTokenizerFast, DeepseekV2Config, + DeepseekV2ForCausalLM, DeepseekV2Model) +from .mixtral import MixtralConfig, MixtralForCausalLM, MixtralModel + +__all__ = [ + 'DeepseekTokenizerFast', 'DeepseekV2Config', 'DeepseekV2ForCausalLM', + 'DeepseekV2Model', 'MixtralConfig', 'MixtralForCausalLM', 'MixtralModel' +] diff --git a/data/xtuner/xtuner/model/transformers_models/deepseek_v2/__init__.py b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6a74b483ca374f0b50c9e3a5e536e54aa671cca4 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/__init__.py @@ -0,0 +1,8 @@ +from .configuration_deepseek import DeepseekV2Config +from .modeling_deepseek import DeepseekV2ForCausalLM, DeepseekV2Model +from .tokenization_deepseek_fast import DeepseekTokenizerFast + +__all__ = [ + 'DeepseekV2ForCausalLM', 'DeepseekV2Model', 'DeepseekV2Config', + 'DeepseekTokenizerFast' +] diff --git a/data/xtuner/xtuner/model/transformers_models/deepseek_v2/configuration_deepseek.py b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/configuration_deepseek.py new file mode 100644 index 0000000000000000000000000000000000000000..daaddcf4922fcfe3617040da2717ee912a10f123 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/configuration_deepseek.py @@ -0,0 +1,219 @@ +from transformers.configuration_utils import PretrainedConfig +from transformers.utils import logging + +logger = logging.get_logger(__name__) + +DEEPSEEK_PRETRAINED_CONFIG_ARCHIVE_MAP = {} + + +# Compared to the original version, two parameters, `moe_implementation` and +# `expert_in_one_shard`, have been added. +class DeepseekV2Config(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`DeepseekV2Model`]. It is used to instantiate an DeepSeek + model according to the specified arguments, defining the model architecture. Instantiating a configuration with the + defaults will yield a similar configuration to that of the DeepSeek-V2. + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + vocab_size (`int`, *optional*, defaults to 102400): + Vocabulary size of the Deep model. Defines the number of different tokens that can be represented by the + `inputs_ids` passed when calling [`DeepseekV2Model`] + hidden_size (`int`, *optional*, defaults to 4096): + Dimension of the hidden representations. + intermediate_size (`int`, *optional*, defaults to 11008): + Dimension of the MLP representations. + moe_intermediate_size (`int`, *optional*, defaults to 1407): + Dimension of the MoE representations. + num_hidden_layers (`int`, *optional*, defaults to 32): + Number of hidden layers in the Transformer decoder. + num_attention_heads (`int`, *optional*, defaults to 32): + Number of attention heads for each attention layer in the Transformer decoder. + n_shared_experts (`int`, *optional*, defaults to None): + Number of shared experts, None means dense model. + n_routed_experts (`int`, *optional*, defaults to None): + Number of routed experts, None means dense model. + routed_scaling_factor (`float`, *optional*, defaults to 1.0): + Scaling factor or routed experts. + topk_method (`str`, *optional*, defaults to `gready`): + Topk method used in routed gate. + n_group (`int`, *optional*, defaults to None): + Number of groups for routed experts. + topk_group (`int`, *optional*, defaults to None): + Number of selected groups for each token(for each token, ensuring the selected experts is only within `topk_group` groups). + num_experts_per_tok (`int`, *optional*, defaults to None): + Number of selected experts, None means dense model. + moe_layer_freq (`int`, *optional*, defaults to 1): + The frequency of the MoE layer: one expert layer for every `moe_layer_freq - 1` dense layers. + first_k_dense_replace (`int`, *optional*, defaults to 0): + Number of dense layers in shallow layers(embed->dense->dense->...->dense->moe->moe...->lm_head). + \--k dense layers--/ + norm_topk_prob (`bool`, *optional*, defaults to False): + Whether to normalize the weights of the routed experts. + scoring_func (`str`, *optional*, defaults to 'softmax'): + Method of computing expert weights. + aux_loss_alpha (`float`, *optional*, defaults to 0.001): + Auxiliary loss weight coefficient. + seq_aux = (`bool`, *optional*, defaults to True): + Whether to compute the auxiliary loss for each individual sample. + num_key_value_heads (`int`, *optional*): + This is the number of key_value heads that should be used to implement Grouped Query Attention. If + `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if + `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When + converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed + by meanpooling all the original heads within that group. For more details checkout [this + paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to + `num_attention_heads`. + hidden_act (`str` or `function`, *optional*, defaults to `"silu"`): + The non-linear activation function (function or string) in the decoder. + max_position_embeddings (`int`, *optional*, defaults to 2048): + The maximum sequence length that this model might ever be used with. + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + rms_norm_eps (`float`, *optional*, defaults to 1e-06): + The epsilon used by the rms normalization layers. + use_cache (`bool`, *optional*, defaults to `True`): + Whether or not the model should return the last key/values attentions (not used by all models). Only + relevant if `config.is_decoder=True`. + pad_token_id (`int`, *optional*): + Padding token id. + bos_token_id (`int`, *optional*, defaults to 1): + Beginning of stream token id. + eos_token_id (`int`, *optional*, defaults to 2): + End of stream token id. + pretraining_tp (`int`, *optional*, defaults to 1): + Experimental feature. Tensor parallelism rank used during pretraining. Please refer to [this + document](https://huggingface.co/docs/transformers/parallelism) to understand more about it. This value is + necessary to ensure exact reproducibility of the pretraining results. Please refer to [this + issue](https://github.com/pytorch/pytorch/issues/76232). + tie_word_embeddings (`bool`, *optional*, defaults to `False`): + Whether to tie weight embeddings + rope_theta (`float`, *optional*, defaults to 10000.0): + The base period of the RoPE embeddings. + rope_scaling (`Dict`, *optional*): + Dictionary containing the scaling configuration for the RoPE embeddings. Currently supports two scaling + strategies: linear and dynamic. Their scaling factor must be a float greater than 1. The expected format is + `{"type": strategy name, "factor": scaling factor}`. When using this flag, don't update + `max_position_embeddings` to the expected new maximum. + attention_bias (`bool`, defaults to `False`, *optional*, defaults to `False`): + Whether to use a bias in the query, key, value and output projection layers during self-attention. + attention_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the attention probabilities. + moe_implementation (`str`, *optional*, defaults to 'origin'): + The implementation of the moe blocks. 'origin' or 'shard'. + expert_in_one_shard (`int`, *optional*, defaults to None): + How many expert models are integrated into a shard. It is used only + when `moe_implementation` == 'shard' + + ```python + >>> from transformers import DeepseekV2Model, DeepseekV2Config + + >>> # Initializing a Deepseek-V2 style configuration + >>> configuration = DeepseekV2Config() + + >>> # Accessing the model configuration + >>> configuration = model.config + ```""" + + model_type = 'deepseek_v2' + keys_to_ignore_at_inference = ['past_key_values'] + + def __init__( + self, + vocab_size=102400, + hidden_size=4096, + intermediate_size=11008, + moe_intermediate_size=1407, + num_hidden_layers=30, + num_attention_heads=32, + num_key_value_heads=32, + n_shared_experts=None, + n_routed_experts=None, + ep_size=1, + routed_scaling_factor=1.0, + kv_lora_rank=512, + q_lora_rank=1536, + qk_rope_head_dim=64, + v_head_dim=128, + qk_nope_head_dim=128, + topk_method='gready', + n_group=None, + topk_group=None, + num_experts_per_tok=None, + moe_layer_freq=1, + first_k_dense_replace=0, + norm_topk_prob=False, + scoring_func='softmax', + aux_loss_alpha=0.001, + seq_aux=True, + hidden_act='silu', + max_position_embeddings=2048, + initializer_range=0.02, + rms_norm_eps=1e-6, + use_cache=True, + pad_token_id=None, + bos_token_id=100000, + eos_token_id=100001, + pretraining_tp=1, + tie_word_embeddings=False, + rope_theta=10000.0, + rope_scaling=None, + attention_bias=False, + attention_dropout=0.0, + moe_implementation='origin', + expert_in_one_shard=None, + **kwargs, + ): + self.vocab_size = vocab_size + self.max_position_embeddings = max_position_embeddings + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.moe_intermediate_size = moe_intermediate_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.n_shared_experts = n_shared_experts + self.n_routed_experts = n_routed_experts + self.ep_size = ep_size + self.routed_scaling_factor = routed_scaling_factor + self.kv_lora_rank = kv_lora_rank + self.q_lora_rank = q_lora_rank + self.qk_rope_head_dim = qk_rope_head_dim + self.v_head_dim = v_head_dim + self.qk_nope_head_dim = qk_nope_head_dim + self.topk_method = topk_method + self.n_group = n_group + self.topk_group = topk_group + self.num_experts_per_tok = num_experts_per_tok + self.moe_layer_freq = moe_layer_freq + self.first_k_dense_replace = first_k_dense_replace + self.norm_topk_prob = norm_topk_prob + self.scoring_func = scoring_func + self.aux_loss_alpha = aux_loss_alpha + self.seq_aux = seq_aux + # for backward compatibility + if num_key_value_heads is None: + num_key_value_heads = num_attention_heads + + self.num_key_value_heads = num_key_value_heads + self.hidden_act = hidden_act + self.initializer_range = initializer_range + self.rms_norm_eps = rms_norm_eps + self.pretraining_tp = pretraining_tp + self.use_cache = use_cache + self.rope_theta = rope_theta + self.rope_scaling = rope_scaling + self.attention_bias = attention_bias + self.attention_dropout = attention_dropout + self.moe_implementation = moe_implementation + self.expert_in_one_shard = expert_in_one_shard + + super().__init__( + pad_token_id=pad_token_id, + bos_token_id=bos_token_id, + eos_token_id=eos_token_id, + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) diff --git a/data/xtuner/xtuner/model/transformers_models/deepseek_v2/modeling_deepseek.py b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/modeling_deepseek.py new file mode 100644 index 0000000000000000000000000000000000000000..f58dd466fa7a4b754df2b5e7b3da8911985d182d --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/modeling_deepseek.py @@ -0,0 +1,2037 @@ +# Copyright 2023 DeepSeek-AI and The HuggingFace Inc. team. All rights reserved. +# +# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX +# and OPT implementations in this library. It has been modified from its +# original forms to accommodate minor architectural differences compared +# to GPT-NeoX and OPT used by the Meta AI team that trained the model. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch DeepSeek model.""" +import copy +import math +import os +import types +import warnings +from typing import List, Optional, Tuple, Union + +import numpy as np +import torch +import torch.distributed as dist +import torch.nn.functional as F +import torch.utils.checkpoint +from torch import nn +from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss +from transformers.activations import ACT2FN +from transformers.cache_utils import Cache, DynamicCache +from transformers.configuration_utils import PretrainedConfig +from transformers.modeling_attn_mask_utils import ( + AttentionMaskConverter, _prepare_4d_attention_mask, + _prepare_4d_causal_attention_mask, + _prepare_4d_causal_attention_mask_for_sdpa) +from transformers.modeling_outputs import (BaseModelOutputWithPast, + CausalLMOutputWithPast, + SequenceClassifierOutputWithPast) +from transformers.modeling_utils import PreTrainedModel +from transformers.pytorch_utils import (ALL_LAYERNORM_LAYERS, + is_torch_greater_or_equal_than_1_13) +from transformers.utils import (add_start_docstrings, + add_start_docstrings_to_model_forward, + is_flash_attn_2_available, + is_flash_attn_greater_or_equal_2_10, logging, + replace_return_docstrings) +from transformers.utils.import_utils import is_torch_fx_available + +from xtuner.utils import load_state_dict_into_model +from .configuration_deepseek import DeepseekV2Config + +if is_flash_attn_2_available(): + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import pad_input # noqa + from flash_attn.bert_padding import index_first_axis, unpad_input + +# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph. +# It means that the function will not be traced through and simply appear as a node in the graph. +if is_torch_fx_available(): + if not is_torch_greater_or_equal_than_1_13: + import torch.fx + + _prepare_4d_causal_attention_mask = torch.fx.wrap( + _prepare_4d_causal_attention_mask) + +logger = logging.get_logger(__name__) + +_CONFIG_FOR_DOC = 'DeepseekV2Config' + + +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad( + torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +class DeepseekV2RMSNorm(nn.Module): + + def __init__(self, hidden_size, eps=1e-6): + """DeepseekV2RMSNorm is equivalent to T5LayerNorm.""" + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +ALL_LAYERNORM_LAYERS.append(DeepseekV2RMSNorm) + + +class DeepseekV2RotaryEmbedding(nn.Module): + + def __init__(self, + dim, + max_position_embeddings=2048, + base=10000, + device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / ( + self.base + **(torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer('inv_freq', inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, + device=self.inv_freq.device, + dtype=torch.get_default_dtype(), + ) + self.max_seq_len_cached = None + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange( + self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq.to(t.device)) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer( + 'cos_cached', emb.cos().to(dtype), persistent=False) + self.register_buffer( + 'sin_cached', emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if self.max_seq_len_cached is None or seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache( + seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaLinearScalingRotaryEmbedding with Llama->DeepseekV2 +class DeepseekV2LinearScalingRotaryEmbedding(DeepseekV2RotaryEmbedding): + """DeepseekV2RotaryEmbedding extended with linear scaling. + + Credits to the Reddit user /u/kaiokendev + """ + + def __init__( + self, + dim, + max_position_embeddings=2048, + base=10000, + device=None, + scaling_factor=1.0, + ): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange( + self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + t = t / self.scaling_factor + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer( + 'cos_cached', emb.cos().to(dtype), persistent=False) + self.register_buffer( + 'sin_cached', emb.sin().to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.LlamaDynamicNTKScalingRotaryEmbedding with Llama->DeepseekV2 +class DeepseekV2DynamicNTKScalingRotaryEmbedding(DeepseekV2RotaryEmbedding): + """DeepseekV2RotaryEmbedding extended with Dynamic NTK scaling. + + Credits to the Reddit users /u/bloc97 and /u/emozilla + """ + + def __init__( + self, + dim, + max_position_embeddings=2048, + base=10000, + device=None, + scaling_factor=1.0, + ): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + + if seq_len > self.max_position_embeddings: + base = self.base * ((self.scaling_factor * seq_len / + self.max_position_embeddings) - + (self.scaling_factor - 1))**( + self.dim / (self.dim - 2)) + inv_freq = 1.0 / ( + base + **(torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer('inv_freq', inv_freq, persistent=False) + + t = torch.arange( + self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer( + 'cos_cached', emb.cos().to(dtype), persistent=False) + self.register_buffer( + 'sin_cached', emb.sin().to(dtype), persistent=False) + + +# Inverse dim formula to find dim based on number of rotations +def yarn_find_correction_dim(num_rotations, + dim, + base=10000, + max_position_embeddings=2048): + return (dim * math.log(max_position_embeddings / + (num_rotations * 2 * math.pi))) / (2 * + math.log(base)) + + +# Find dim range bounds based on rotations +def yarn_find_correction_range(low_rot, + high_rot, + dim, + base=10000, + max_position_embeddings=2048): + low = math.floor( + yarn_find_correction_dim(low_rot, dim, base, max_position_embeddings)) + high = math.ceil( + yarn_find_correction_dim(high_rot, dim, base, max_position_embeddings)) + return max(low, 0), min(high, dim - 1) # Clamp values just in case + + +def yarn_get_mscale(scale=1, mscale=1): + if scale <= 1: + return 1.0 + return 0.1 * mscale * math.log(scale) + 1.0 + + +def yarn_linear_ramp_mask(min, max, dim): + if min == max: + max += 0.001 # Prevent singularity + + linear_func = (torch.arange(dim, dtype=torch.float32) - min) / (max - min) + ramp_func = torch.clamp(linear_func, 0, 1) + return ramp_func + + +class DeepseekV2YarnRotaryEmbedding(DeepseekV2RotaryEmbedding): + + def __init__( + self, + dim, + max_position_embeddings=2048, + base=10000, + device=None, + scaling_factor=1.0, + original_max_position_embeddings=4096, + beta_fast=32, + beta_slow=1, + mscale=1, + mscale_all_dim=0, + ): + self.scaling_factor = scaling_factor + self.original_max_position_embeddings = original_max_position_embeddings + self.beta_fast = beta_fast + self.beta_slow = beta_slow + self.mscale = mscale + self.mscale_all_dim = mscale_all_dim + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + dim = self.dim + + freq_extra = 1.0 / ( + self.base**(torch.arange( + 0, dim, 2, dtype=torch.float32, device=device) / dim)) + freq_inter = 1.0 / ( + self.scaling_factor * self.base**(torch.arange( + 0, dim, 2, dtype=torch.float32, device=device) / dim)) + + low, high = yarn_find_correction_range( + self.beta_fast, + self.beta_slow, + dim, + self.base, + self.original_max_position_embeddings, + ) + inv_freq_mask = 1.0 - yarn_linear_ramp_mask(low, high, dim // 2).to( + device=device, dtype=torch.float32) + inv_freq = freq_inter * (1 - + inv_freq_mask) + freq_extra * inv_freq_mask + self.register_buffer('inv_freq', inv_freq, persistent=False) + + t = torch.arange(seq_len, device=device, dtype=torch.float32) + + freqs = torch.outer(t, inv_freq) + + _mscale = float( + yarn_get_mscale(self.scaling_factor, self.mscale) / + yarn_get_mscale(self.scaling_factor, self.mscale_all_dim)) + + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer( + 'cos_cached', (emb.cos() * _mscale).to(dtype), persistent=False) + self.register_buffer( + 'sin_cached', (emb.sin() * _mscale).to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.models.llama.modeling_llama.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`): + The position indices of the tokens corresponding to the query and key tensors. For example, this can be + used to pass offsetted position ids when working with a KV-cache. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding. + """ + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + + b, h, s, d = q.shape + q = q.view(b, h, s, d // 2, 2).transpose(4, 3).reshape(b, h, s, d) + + b, h, s, d = k.shape + k = k.view(b, h, s, d // 2, 2).transpose(4, 3).reshape(b, h, s, d) + + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +class DeepseekV2MLP(nn.Module): + + def __init__(self, config, hidden_size=None, intermediate_size=None): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size if hidden_size is None else hidden_size + self.intermediate_size = ( + config.intermediate_size + if intermediate_size is None else intermediate_size) + + self.gate_proj = nn.Linear( + self.hidden_size, self.intermediate_size, bias=False) + self.up_proj = nn.Linear( + self.hidden_size, self.intermediate_size, bias=False) + self.down_proj = nn.Linear( + self.intermediate_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, x): + down_proj = self.down_proj( + self.act_fn(self.gate_proj(x)) * self.up_proj(x)) + return down_proj + + +class MoEGate(nn.Module): + + def __init__(self, config): + super().__init__() + self.config = config + self.top_k = config.num_experts_per_tok + self.n_routed_experts = config.n_routed_experts + self.routed_scaling_factor = config.routed_scaling_factor + self.scoring_func = config.scoring_func + self.alpha = config.aux_loss_alpha + self.seq_aux = config.seq_aux + self.topk_method = config.topk_method + self.n_group = config.n_group + self.topk_group = config.topk_group + + # topk selection algorithm + self.norm_topk_prob = config.norm_topk_prob + self.gating_dim = config.hidden_size + self.weight = nn.Parameter( + torch.empty((self.n_routed_experts, self.gating_dim))) + self.reset_parameters() + + def reset_parameters(self) -> None: + import torch.nn.init as init + + init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + + def forward(self, hidden_states): + bsz, seq_len, h = hidden_states.shape + ### compute gating score + hidden_states = hidden_states.view(-1, h) + logits = F.linear( + hidden_states.type(torch.float32), self.weight.type(torch.float32), + None) + if self.scoring_func == 'softmax': + scores = logits.softmax(dim=-1, dtype=torch.float32) + else: + raise NotImplementedError( + f'insupportable scoring function for MoE gating: {self.scoring_func}' + ) + + ### select top-k experts + # fix official typos + if self.topk_method in ('gready', 'greedy'): + topk_weight, topk_idx = torch.topk( + scores, k=self.top_k, dim=-1, sorted=False) + elif self.topk_method == 'group_limited_greedy': + group_scores = (scores.view(bsz * seq_len, self.n_group, + -1).max(dim=-1).values) # [n, n_group] + group_idx = torch.topk( + group_scores, k=self.topk_group, dim=-1, + sorted=False)[1] # [n, top_k_group] + group_mask = torch.zeros_like(group_scores) # [n, n_group] + group_mask.scatter_(1, group_idx, 1) # [n, n_group] + score_mask = (group_mask.unsqueeze(-1).expand( + bsz * seq_len, self.n_group, + self.n_routed_experts // self.n_group).reshape( + bsz * seq_len, -1)) # [n, e] + tmp_scores = scores.masked_fill(~score_mask.bool(), 0.0) # [n, e] + topk_weight, topk_idx = torch.topk( + tmp_scores, k=self.top_k, dim=-1, sorted=False) + + ### norm gate to sum 1 + if self.top_k > 1 and self.norm_topk_prob: + denominator = topk_weight.sum(dim=-1, keepdim=True) + 1e-20 + topk_weight = topk_weight / denominator + else: + topk_weight = topk_weight * self.routed_scaling_factor + ### expert-level computation auxiliary loss + if self.training and self.alpha > 0.0: + scores_for_aux = scores + aux_topk = self.top_k + # always compute aux loss based on the naive greedy topk method + topk_idx_for_aux_loss = topk_idx.view(bsz, -1) + if self.seq_aux: + scores_for_seq_aux = scores_for_aux.view(bsz, seq_len, -1) + ce = torch.zeros( + bsz, self.n_routed_experts, device=hidden_states.device) + ce.scatter_add_( + 1, + topk_idx_for_aux_loss, + torch.ones( + bsz, seq_len * aux_topk, device=hidden_states.device), + ).div_(seq_len * aux_topk / self.n_routed_experts) + aux_loss = (ce * scores_for_seq_aux.mean(dim=1)).sum( + dim=1).mean() * self.alpha + else: + mask_ce = F.one_hot( + topk_idx_for_aux_loss.view(-1), + num_classes=self.n_routed_experts) + ce = mask_ce.float().mean(0) + Pi = scores_for_aux.mean(0) + fi = ce * self.n_routed_experts + aux_loss = (Pi * fi).sum() * self.alpha + else: + aux_loss = None + return topk_idx, topk_weight, aux_loss + + +class AddAuxiliaryLoss(torch.autograd.Function): + """The trick function of adding auxiliary (aux) loss, which includes the + gradient of the aux loss during backpropagation.""" + + @staticmethod + def forward(ctx, x, loss): + assert loss.numel() == 1 + ctx.dtype = loss.dtype + ctx.required_aux_loss = loss.requires_grad + return x + + @staticmethod + def backward(ctx, grad_output): + grad_loss = None + if ctx.required_aux_loss: + grad_loss = torch.ones( + 1, dtype=ctx.dtype, device=grad_output.device) + return grad_output, grad_loss + + +class ExpertShard(nn.Module): + + def __init__(self, config, shard_idx, expert_in_one_shard=10): + super().__init__() + hidden_dim = config.hidden_size + ffn_dim = config.moe_intermediate_size + self.w1w3 = nn.Parameter( + torch.empty(expert_in_one_shard, ffn_dim * 2, hidden_dim)) + self.w2 = nn.Parameter( + torch.empty(expert_in_one_shard, hidden_dim, ffn_dim)) + + self.act = nn.SiLU() + self.expert_in_one_shard = expert_in_one_shard + self.shard_idx = shard_idx + + self.reset_parameters() + + def reset_parameters(self) -> None: + # Different from nn.Linear module, weights of self.w1w3 and self.w2 + # can not be initialized by DeepseekV2PreTrainedModel._init_weights method + self.w1w3.data.normal_(0, 0.02) + self.w2.data.normal_(0, 0.02) + + def expert_forward(self, current_state, expert_idx): + w1w3 = self.w1w3[expert_idx] + w2 = self.w2[expert_idx] + gate_up_out = torch.matmul(current_state, w1w3.T) + gate_out, up_out = gate_up_out.chunk(2, dim=-1) + gate_out = self.act(gate_out) + out = gate_out * up_out + out = torch.matmul(out, w2.T) + return out + + def forward(self, hidden_states, flat_topk_idx, y): + for i in range(self.expert_in_one_shard): + expert_idx = i + self.expert_in_one_shard * self.shard_idx + y[flat_topk_idx == expert_idx] = self.expert_forward( + hidden_states[flat_topk_idx == expert_idx], i) + return y + + +class DeepseekV2MoEShard(nn.Module): + """A mixed expert module containing shared experts.""" + + def __init__(self, config): + super().__init__() + self.config = config + self.num_experts_per_tok = config.num_experts_per_tok + + if hasattr(config, 'ep_size') and config.ep_size > 1: + raise NotImplementedError + else: + self.ep_size = 1 + self.experts_per_rank = config.n_routed_experts + self.ep_rank = 0 + self.n_routed_experts = config.n_routed_experts + + expert_in_one_shard = config.expert_in_one_shard + assert config.n_routed_experts % expert_in_one_shard == 0, \ + ('n_routed_experts should be divisible by expert_in_one_shard, but got ' + f'n_routed_experts = {config.n_routed_experts} and expert_in_one_shard = {expert_in_one_shard}') + + self.shard_num = config.n_routed_experts // expert_in_one_shard + self.expert_in_one_shard = expert_in_one_shard + self.experts = nn.ModuleList([ + ExpertShard(config, i, self.expert_in_one_shard) + for i in range(self.shard_num) + ]) + + self.gate = MoEGate(config) + if config.n_shared_experts is not None: + intermediate_size = config.moe_intermediate_size * config.n_shared_experts + self.shared_experts = DeepseekV2MLP( + config=config, intermediate_size=intermediate_size) + + def forward(self, hidden_states): + if not self.training: + raise NotImplementedError + + identity = hidden_states + orig_shape = hidden_states.shape + topk_idx, topk_weight, aux_loss = self.gate(hidden_states) + hidden_states = hidden_states.view(-1, hidden_states.shape[-1]) + flat_topk_idx = topk_idx.view(-1) + + hidden_states = hidden_states.repeat_interleave( + self.num_experts_per_tok, dim=0) + y = torch.empty_like(hidden_states) + y_dtype = y.dtype + for shard_index in range(self.shard_num): + y = self.experts[shard_index](hidden_states, flat_topk_idx, y) + y = ((y.view(*topk_weight.shape, -1) * + topk_weight.unsqueeze(-1)).sum(dim=1)).type(y_dtype) + y = y.view(*orig_shape) + y = AddAuxiliaryLoss.apply(y, aux_loss) + + if self.config.n_shared_experts is not None: + y = y + self.shared_experts(identity) + return y + + +class DeepseekV2MoE(nn.Module): + """A mixed expert module containing shared experts.""" + + def __init__(self, config): + super().__init__() + self.config = config + self.num_experts_per_tok = config.num_experts_per_tok + + if hasattr(config, 'ep_size') and config.ep_size > 1: + assert config.ep_size == dist.get_world_size() + self.ep_size = config.ep_size + self.experts_per_rank = config.n_routed_experts // config.ep_size + self.ep_rank = dist.get_rank() + self.experts = nn.ModuleList([ + (DeepseekV2MLP( + config, intermediate_size=config.moe_intermediate_size) + if i >= self.ep_rank * self.experts_per_rank and i < + (self.ep_rank + 1) * self.experts_per_rank else None) + for i in range(config.n_routed_experts) + ]) + else: + self.ep_size = 1 + self.experts_per_rank = config.n_routed_experts + self.ep_rank = 0 + self.experts = nn.ModuleList([ + DeepseekV2MLP( + config, intermediate_size=config.moe_intermediate_size) + for i in range(config.n_routed_experts) + ]) + self.gate = MoEGate(config) + if config.n_shared_experts is not None: + intermediate_size = config.moe_intermediate_size * config.n_shared_experts + self.shared_experts = DeepseekV2MLP( + config=config, intermediate_size=intermediate_size) + + def forward(self, hidden_states): + identity = hidden_states + orig_shape = hidden_states.shape + topk_idx, topk_weight, aux_loss = self.gate(hidden_states) + hidden_states = hidden_states.view(-1, hidden_states.shape[-1]) + flat_topk_idx = topk_idx.view(-1) + if self.training: + hidden_states = hidden_states.repeat_interleave( + self.num_experts_per_tok, dim=0) + y = torch.empty_like(hidden_states) + y_dtype = y.dtype + for i, expert in enumerate(self.experts): + y[flat_topk_idx == i] = expert( + hidden_states[flat_topk_idx == i]) + y = ((y.view(*topk_weight.shape, -1) * + topk_weight.unsqueeze(-1)).sum(dim=1)).type(y_dtype) + y = y.view(*orig_shape) + y = AddAuxiliaryLoss.apply(y, aux_loss) + else: + y = self.moe_infer(hidden_states, topk_idx, + topk_weight).view(*orig_shape) + if self.config.n_shared_experts is not None: + y = y + self.shared_experts(identity) + return y + + @torch.no_grad() + def moe_infer(self, x, topk_ids, topk_weight): + cnts = topk_ids.new_zeros((topk_ids.shape[0], len(self.experts))) + cnts.scatter_(1, topk_ids, 1) + tokens_per_expert = cnts.sum(dim=0) + idxs = topk_ids.view(-1).argsort() + sorted_tokens = x[idxs // topk_ids.shape[1]] + sorted_tokens_shape = sorted_tokens.shape + if self.ep_size > 1: + tokens_per_ep_rank = tokens_per_expert.view(self.ep_size, + -1).sum(dim=1) + tokens_per_expert_group = tokens_per_expert.new_empty( + tokens_per_expert.shape[0]) + dist.all_to_all_single(tokens_per_expert_group, tokens_per_expert) + output_splits = ( + tokens_per_expert_group.view(self.ep_size, + -1).sum(1).cpu().numpy().tolist()) + gathered_tokens = sorted_tokens.new_empty( + tokens_per_expert_group.sum(dim=0).cpu().item(), + sorted_tokens.shape[1]) + input_split_sizes = tokens_per_ep_rank.cpu().numpy().tolist() + dist.all_to_all( + list(gathered_tokens.split(output_splits)), + list(sorted_tokens.split(input_split_sizes)), + ) + tokens_per_expert_post_gather = tokens_per_expert_group.view( + self.ep_size, self.experts_per_rank).sum(dim=0) + gatherd_idxs = np.zeros( + shape=(gathered_tokens.shape[0], ), dtype=np.int32) + s = 0 + for i, k in enumerate(tokens_per_expert_group.cpu().numpy()): + gatherd_idxs[s:s + k] = i % self.experts_per_rank + s += k + gatherd_idxs = gatherd_idxs.argsort() + sorted_tokens = gathered_tokens[gatherd_idxs] + tokens_per_expert = tokens_per_expert_post_gather + tokens_per_expert = tokens_per_expert.cpu().numpy() + + outputs = [] + start_idx = 0 + for i, num_tokens in enumerate(tokens_per_expert): + end_idx = start_idx + num_tokens + if num_tokens == 0: + continue + expert = self.experts[i + self.ep_rank * self.experts_per_rank] + tokens_for_this_expert = sorted_tokens[start_idx:end_idx] + expert_out = expert(tokens_for_this_expert) + outputs.append(expert_out) + start_idx = end_idx + + outs = torch.cat( + outputs, dim=0) if len(outputs) else sorted_tokens.new_empty(0) + if self.ep_size > 1: + new_x = torch.empty_like(outs) + new_x[gatherd_idxs] = outs + gathered_tokens = new_x.new_empty(*sorted_tokens_shape) + dist.all_to_all( + list(gathered_tokens.split(input_split_sizes)), + list(new_x.split(output_splits)), + ) + outs = gathered_tokens + + new_x = torch.empty_like(outs) + new_x[idxs] = outs + final_out = ( + new_x.view(*topk_ids.shape, -1).type(topk_weight.dtype).mul_( + topk_weight.unsqueeze(dim=-1)).sum(dim=1).type(new_x.dtype)) + return final_out + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """This is the equivalent of torch.repeat_interleave(x, dim=1, + repeats=n_rep). + + The hidden states go from (batch, num_key_value_heads, seqlen, head_dim) to + (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, + None, :, :].expand(batch, + num_key_value_heads, + n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, + head_dim) + + +# Copied from transformers.models.llama.modeling_llama.LlamaAttention with Llama->DeepseekV2 +class DeepseekV2Attention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper.""" + + def __init__(self, + config: DeepseekV2Config, + layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f'Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will ' + 'to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` ' + 'when creating this class.') + + self.attention_dropout = config.attention_dropout + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + + self.max_position_embeddings = config.max_position_embeddings + self.rope_theta = config.rope_theta + self.q_lora_rank = config.q_lora_rank + self.qk_rope_head_dim = config.qk_rope_head_dim + self.kv_lora_rank = config.kv_lora_rank + self.v_head_dim = config.v_head_dim + self.qk_nope_head_dim = config.qk_nope_head_dim + self.q_head_dim = config.qk_nope_head_dim + config.qk_rope_head_dim + + self.is_causal = True + + if self.q_lora_rank is None: + self.q_proj = nn.Linear( + self.hidden_size, self.num_heads * self.q_head_dim, bias=False) + else: + self.q_a_proj = nn.Linear( + self.hidden_size, + config.q_lora_rank, + bias=config.attention_bias) + self.q_a_layernorm = DeepseekV2RMSNorm(config.q_lora_rank) + self.q_b_proj = nn.Linear( + config.q_lora_rank, + self.num_heads * self.q_head_dim, + bias=False) + + self.kv_a_proj_with_mqa = nn.Linear( + self.hidden_size, + config.kv_lora_rank + config.qk_rope_head_dim, + bias=config.attention_bias, + ) + self.kv_a_layernorm = DeepseekV2RMSNorm(config.kv_lora_rank) + self.kv_b_proj = nn.Linear( + config.kv_lora_rank, + self.num_heads * + (self.q_head_dim - self.qk_rope_head_dim + self.v_head_dim), + bias=False, + ) + + self.o_proj = nn.Linear( + self.num_heads * self.v_head_dim, + self.hidden_size, + bias=config.attention_bias, + ) + self._init_rope() + + self.softmax_scale = self.q_head_dim**(-0.5) + if self.config.rope_scaling is not None: + mscale_all_dim = self.config.rope_scaling.get('mscale_all_dim', 0) + scaling_factor = self.config.rope_scaling['factor'] + if mscale_all_dim: + mscale = yarn_get_mscale(scaling_factor, mscale_all_dim) + self.softmax_scale = self.softmax_scale * mscale * mscale + + def _init_rope(self): + if self.config.rope_scaling is None: + self.rotary_emb = DeepseekV2RotaryEmbedding( + self.qk_rope_head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.rope_theta, + ) + else: + scaling_type = self.config.rope_scaling['type'] + scaling_factor = self.config.rope_scaling['factor'] + if scaling_type == 'linear': + self.rotary_emb = DeepseekV2LinearScalingRotaryEmbedding( + self.qk_rope_head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == 'dynamic': + self.rotary_emb = DeepseekV2DynamicNTKScalingRotaryEmbedding( + self.qk_rope_head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == 'yarn': + kwargs = { + key: self.config.rope_scaling[key] + for key in [ + 'original_max_position_embeddings', + 'beta_fast', + 'beta_slow', + 'mscale', + 'mscale_all_dim', + ] if key in self.config.rope_scaling + } + self.rotary_emb = DeepseekV2YarnRotaryEmbedding( + self.qk_rope_head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + **kwargs, + ) + else: + raise ValueError(f'Unknown RoPE scaling type {scaling_type}') + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return (tensor.view(bsz, seq_len, self.num_heads, + self.v_head_dim).transpose(1, 2).contiguous()) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`' + ) + bsz, q_len, _ = hidden_states.size() + + if self.q_lora_rank is None: + q = self.q_proj(hidden_states) + else: + q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states))) + q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2) + q_nope, q_pe = torch.split( + q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1) + + compressed_kv = self.kv_a_proj_with_mqa(hidden_states) + compressed_kv, k_pe = torch.split( + compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1) + k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2) + kv = ( + self.kv_b_proj(self.kv_a_layernorm(compressed_kv)).view( + bsz, q_len, self.num_heads, + self.qk_nope_head_dim + self.v_head_dim).transpose(1, 2)) + + k_nope, value_states = torch.split( + kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1) + kv_seq_len = value_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f'The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length( + kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + + q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids) + + query_states = k_pe.new_empty(bsz, self.num_heads, q_len, + self.q_head_dim) + query_states[:, :, :, :self.qk_nope_head_dim] = q_nope + query_states[:, :, :, self.qk_nope_head_dim:] = q_pe + + key_states = k_pe.new_empty(bsz, self.num_heads, q_len, + self.q_head_dim) + key_states[:, :, :, :self.qk_nope_head_dim] = k_nope + key_states[:, :, :, self.qk_nope_head_dim:] = k_pe + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + attn_weights = ( + torch.matmul(query_states, key_states.transpose(2, 3)) * + self.softmax_scale) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f'Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is' + f' {attn_weights.size()}') + assert attention_mask is not None + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f'Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}' + ) + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax( + attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_weights = nn.functional.dropout( + attn_weights, p=self.attention_dropout, training=self.training) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.v_head_dim): + raise ValueError( + f'`attn_output` should be of size {(bsz, self.num_heads, q_len, self.v_head_dim)}, but is' + f' {attn_output.size()}') + + attn_output = attn_output.transpose(1, 2).contiguous() + + attn_output = attn_output.reshape(bsz, q_len, + self.num_heads * self.v_head_dim) + + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Copied from transformers.models.llama.modeling_llama.LlamaFlashAttention2 with Llama->DeepseekV2 +class DeepseekV2FlashAttention2(DeepseekV2Attention): + """DeepseekV2 flash attention module. + + This module inherits from `DeepseekV2Attention` as the weights of the + module stays untouched. The only required change would be on the forward + pass where it needs to correctly call the public API of flash attention and + deal with padding tokens in case the input contains any of them. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1. + # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignment, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0. + # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left). + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10( + ) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + # DeepseekV2FlashAttention2 attention does not support output_attentions + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`' + ) + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop('padding_mask') + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + if self.q_lora_rank is None: + q = self.q_proj(hidden_states) + else: + q = self.q_b_proj(self.q_a_layernorm(self.q_a_proj(hidden_states))) + q = q.view(bsz, q_len, self.num_heads, self.q_head_dim).transpose(1, 2) + q_nope, q_pe = torch.split( + q, [self.qk_nope_head_dim, self.qk_rope_head_dim], dim=-1) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + compressed_kv = self.kv_a_proj_with_mqa(hidden_states) + compressed_kv, k_pe = torch.split( + compressed_kv, [self.kv_lora_rank, self.qk_rope_head_dim], dim=-1) + k_pe = k_pe.view(bsz, q_len, 1, self.qk_rope_head_dim).transpose(1, 2) + kv = ( + self.kv_b_proj(self.kv_a_layernorm(compressed_kv)).view( + bsz, q_len, self.num_heads, + self.qk_nope_head_dim + self.v_head_dim).transpose(1, 2)) + + k_nope, value_states = torch.split( + kv, [self.qk_nope_head_dim, self.v_head_dim], dim=-1) + kv_seq_len = value_states.shape[-2] + + kv_seq_len = value_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length( + kv_seq_len, self.layer_idx) + + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + q_pe, k_pe = apply_rotary_pos_emb(q_pe, k_pe, cos, sin, position_ids) + + query_states = k_pe.new_empty(bsz, self.num_heads, q_len, + self.q_head_dim) + query_states[:, :, :, :self.qk_nope_head_dim] = q_nope + query_states[:, :, :, self.qk_nope_head_dim:] = q_pe + + key_states = k_pe.new_empty(bsz, self.num_heads, q_len, + self.q_head_dim) + key_states[:, :, :, :self.qk_nope_head_dim] = k_nope + key_states[:, :, :, self.qk_nope_head_dim:] = k_pe + + if self.q_head_dim != self.v_head_dim: + value_states = F.pad(value_states, + [0, self.q_head_dim - self.v_head_dim]) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache + # to be able to avoid many of these transpose/reshape/view. + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently casted in float32. Hence, we need + # cast them back in the correct dtype just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not cast the LayerNorms + # in fp32. (DeepseekV2RMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + # Handle the case where the model is quantized + if hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + elif torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + else: + target_dtype = self.q_proj.weight.dtype if self.q_lora_rank is None else self.q_a_proj.weight.dtype + + logger.warning_once( + f'The input hidden states seems to be silently casted in float32, this might be related to' + f' the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in' + f' {target_dtype}.') + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + q_len, + dropout=dropout_rate, + softmax_scale=self.softmax_scale, + ) + if self.q_head_dim != self.v_head_dim: + attn_output = attn_output[:, :, :, :self.v_head_dim] + + attn_output = attn_output.reshape(bsz, q_len, self.num_heads * + self.v_head_dim).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, + query_states, + key_states, + value_states, + attention_mask, + query_length, + dropout=0.0, + softmax_scale=None, + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`int`, *optional*): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + """ + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in DeepseekV2FlashAttention2 __init__. + causal = self.is_causal and query_length != 1 + + # Contains at least one padding token in the sequence + if attention_mask is not None: + batch_size = query_states.shape[0] + ( + query_states, + key_states, + value_states, + indices_q, + cu_seq_lens, + max_seq_lens, + ) = self._upad_input(query_states, key_states, value_states, + attention_mask, query_length) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, + query_length) + else: + attn_output = flash_attn_func( + query_states, + key_states, + value_states, + dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + + return attn_output + + def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, + query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data( + attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, + head_dim), + indices_k, + ) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, + head_dim), + indices_k, + ) + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, self.num_heads, + head_dim), + indices_k, + ) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input( + query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + + +ATTENTION_CLASSES = { + 'eager': DeepseekV2Attention, + 'flash_attention_2': DeepseekV2FlashAttention2, +} + + +class DeepseekV2DecoderLayer(nn.Module): + + def __init__(self, config: DeepseekV2Config, layer_idx: int): + super().__init__() + self.hidden_size = config.hidden_size + + self.self_attn = ATTENTION_CLASSES[config._attn_implementation]( + config=config, layer_idx=layer_idx) + + moe_implementation = config.moe_implementation + if moe_implementation == 'origin': + block = DeepseekV2MoE + elif moe_implementation == 'shard': + block = DeepseekV2MoEShard + else: + raise NotImplementedError + + self.mlp = ( + block(config) if + (config.n_routed_experts is not None + and layer_idx >= config.first_k_dense_replace and layer_idx % + config.moe_layer_freq == 0) else DeepseekV2MLP(config)) + self.input_layernorm = DeepseekV2RMSNorm( + config.hidden_size, eps=config.rms_norm_eps) + self.post_attention_layernorm = DeepseekV2RMSNorm( + config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + **kwargs, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, + torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): + attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1, + query_sequence_length, key_sequence_length)` if default attention is used. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + if 'padding_mask' in kwargs: + warnings.warn( + 'Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`' + ) + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + **kwargs, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states = self.mlp(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states, ) + + if output_attentions: + outputs += (self_attn_weights, ) + + if use_cache: + outputs += (present_key_value, ) + + return outputs + + +def _load_pretrained_model( + cls, + model, + state_dict, + loaded_keys, + resolved_archive_file, + pretrained_model_name_or_path, + ignore_mismatched_sizes=False, + sharded_metadata=None, + _fast_init=True, + low_cpu_mem_usage=False, + device_map=None, + offload_folder=None, + offload_state_dict=None, + dtype=None, + hf_quantizer=None, + keep_in_fp32_modules=None, + gguf_path=None, +): + if ((state_dict is not None) or (resolved_archive_file is None) + or (low_cpu_mem_usage) or (device_map is not None) + or (offload_folder is not None) or + (not (offload_state_dict is None or offload_state_dict is False)) + or (hf_quantizer is not None) or + (keep_in_fp32_modules is not None and len(keep_in_fp32_modules) > 0) + or (gguf_path is not None)): + raise NotImplementedError + + folder = os.path.sep.join(resolved_archive_file[0].split(os.path.sep)[:-1]) + error_msgs = load_state_dict_into_model(model, folder) + return model, [], [], [], None, error_msgs + + +DeepseekV2_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`DeepseekV2Config`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + 'The bare DeepseekV2 Model outputting raw hidden-states without any specific head on top.', + DeepseekV2_START_DOCSTRING, +) +class DeepseekV2PreTrainedModel(PreTrainedModel): + config_class = DeepseekV2Config + base_model_prefix = 'model' + supports_gradient_checkpointing = True + _no_split_modules = ['DeepseekV2DecoderLayer'] + _skip_keys_device_placement = 'past_key_values' + _supports_flash_attn_2 = True + _supports_sdpa = True + _supports_cache_class = True + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): + moe_implementation = kwargs.get('moe_implementation', 'origin') + if moe_implementation == 'origin': + return super().from_pretrained(pretrained_model_name_or_path, + *args, **kwargs) + + cls._load_pretrained_model = types.MethodType(_load_pretrained_model, + cls) + return super().from_pretrained(pretrained_model_name_or_path, *args, + **kwargs) + + +DeepseekV2_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + + [What are position IDs?](../glossary#position-ids) + past_key_values (`Cache` or `tuple(tuple(torch.FloatTensor))`, *optional*): + Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values` + returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`. + + Two formats are allowed: + - a [`~cache_utils.Cache`] instance; + - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy + cache format. + + The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the + legacy cache format will be returned. + + If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't + have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids` + of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +@add_start_docstrings( + 'The bare DeepseekV2 Model outputting raw hidden-states without any specific head on top.', + DeepseekV2_START_DOCSTRING, +) +class DeepseekV2Model(DeepseekV2PreTrainedModel): + """Transformer decoder consisting of *config.num_hidden_layers* layers. + Each layer is a [`DeepseekV2DecoderLayer`] + + Args: + config: DeepseekV2Config + """ + + def __init__(self, config: DeepseekV2Config): + super().__init__(config) + self.padding_idx = config.pad_token_id + self.vocab_size = config.vocab_size + + self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, + self.padding_idx) + self.layers = nn.ModuleList([ + DeepseekV2DecoderLayer(config, layer_idx) + for layer_idx in range(config.num_hidden_layers) + ]) + self._use_sdpa = config._attn_implementation == 'sdpa' + self._use_flash_attention_2 = config._attn_implementation == 'flash_attention_2' + self.norm = DeepseekV2RMSNorm( + config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.embed_tokens + + def set_input_embeddings(self, value): + self.embed_tokens = value + + @add_start_docstrings_to_model_forward(DeepseekV2_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + 'You cannot specify both input_ids and inputs_embeds at the same time' + ) + elif input_ids is not None: + batch_size, seq_length = input_ids.shape[:2] + elif inputs_embeds is not None: + batch_size, seq_length = inputs_embeds.shape[:2] + else: + raise ValueError( + 'You have to specify either input_ids or inputs_embeds') + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + '`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`transformers.' + ) + use_cache = False + + past_key_values_length = 0 + if use_cache: + use_legacy_cache = not isinstance(past_key_values, Cache) + if use_legacy_cache: + past_key_values = DynamicCache.from_legacy_cache( + past_key_values) + past_key_values_length = past_key_values.get_usable_length( + seq_length) + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, + seq_length + past_key_values_length, + dtype=torch.long, + device=device, + ) + position_ids = position_ids.unsqueeze(0) + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + + if self._use_flash_attention_2: + # 2d mask is passed through the layers + attention_mask = ( + attention_mask if + (attention_mask is not None and 0 in attention_mask) else None) + elif self._use_sdpa and not output_attentions: + # output_attentions=True can not be supported when using SDPA, and we fall back on + # the manual implementation that requires a 4D causal mask in all cases. + attention_mask = _prepare_4d_causal_attention_mask_for_sdpa( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + ) + else: + # 4d mask is passed through the layers + attention_mask = _prepare_4d_causal_attention_mask( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + ) + + # embed positions + hidden_states = inputs_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = None + + for decoder_layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + if self.gradient_checkpointing and self.training: + layer_outputs = self._gradient_checkpointing_func( + decoder_layer.__call__, + hidden_states, + attention_mask, + position_ids, + past_key_values, + output_attentions, + use_cache, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_values, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache = layer_outputs[ + 2 if output_attentions else 1] + + if output_attentions: + all_self_attns += (layer_outputs[1], ) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + next_cache = None + if use_cache: + next_cache = ( + next_decoder_cache.to_legacy_cache() + if use_legacy_cache else next_decoder_cache) + if not return_dict: + return tuple( + v for v in + [hidden_states, next_cache, all_hidden_states, all_self_attns] + if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +class DeepseekV2ForCausalLM(DeepseekV2PreTrainedModel): + _tied_weights_keys = ['lm_head.weight'] + + def __init__(self, config): + super().__init__(config) + self.model = DeepseekV2Model(config) + self.vocab_size = config.vocab_size + self.lm_head = nn.Linear( + config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.embed_tokens + + def set_input_embeddings(self, value): + self.model.embed_tokens = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + @add_start_docstrings_to_model_forward(DeepseekV2_INPUTS_DOCSTRING) + @replace_return_docstrings( + output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, CausalLMOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, transformers., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, transformers., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer, DeepseekV2ForCausalLM + + >>> model = DeepseekV2ForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS) + >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER) + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = outputs[0] + logits = self.lm_head(hidden_states) + logits = logits.float() + + loss = None + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits, ) + outputs[1:] + return (loss, ) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation( + self, + input_ids, + past_key_values=None, + attention_mask=None, + inputs_embeds=None, + **kwargs, + ): + if past_key_values is not None: + if isinstance(past_key_values, Cache): + cache_length = past_key_values.get_seq_length() + past_length = past_key_values.seen_tokens + max_cache_length = past_key_values.get_max_length() + else: + cache_length = past_length = past_key_values[0][0].shape[2] + max_cache_length = None + + # Keep only the unprocessed tokens: + # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where + # some of the inputs are exclusively passed as part of the cache (e.g. when passing input_embeds as + # input) + if (attention_mask is not None + and attention_mask.shape[1] > input_ids.shape[1]): + input_ids = input_ids[:, -(attention_mask.shape[1] - + past_length):] + # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard + # input_ids based on the past_length. + elif past_length < input_ids.shape[1]: + input_ids = input_ids[:, past_length:] + # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. + + # If we are about to go beyond the maximum cache length, we need to crop the input attention mask. + if (max_cache_length is not None and attention_mask is not None + and cache_length + input_ids.shape[1] > max_cache_length): + attention_mask = attention_mask[:, -max_cache_length:] + + position_ids = kwargs.get('position_ids', None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1]:] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {'inputs_embeds': inputs_embeds} + else: + model_inputs = {'input_ids': input_ids} + + model_inputs.update({ + 'position_ids': position_ids, + 'past_key_values': past_key_values, + 'use_cache': kwargs.get('use_cache'), + 'attention_mask': attention_mask, + }) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += (tuple( + past_state.index_select(0, beam_idx.to(past_state.device)) + for past_state in layer_past), ) + return reordered_past + + +@add_start_docstrings( + """ + The DeepseekV2 Model transformer with a sequence classification head on top (linear layer). + + [`DeepseekV2ForSequenceClassification`] uses the last token in order to do the classification, as other causal models + (e.g. GPT-2) do. + + Since it does classification on the last token, it requires to know the position of the last token. If a + `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If + no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the + padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in + each row of the batch). + """, + DeepseekV2_START_DOCSTRING, +) +class DeepseekV2ForSequenceClassification(DeepseekV2PreTrainedModel): + + def __init__(self, config): + super().__init__(config) + self.num_labels = config.num_labels + self.model = DeepseekV2Model(config) + self.score = nn.Linear(config.hidden_size, self.num_labels, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.embed_tokens + + def set_input_embeddings(self, value): + self.model.embed_tokens = value + + @add_start_docstrings_to_model_forward(DeepseekV2_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, SequenceClassifierOutputWithPast]: + r""" + labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): + Labels for computing the sequence classification/regression loss. Indices should be in `[0, transformers., + config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If + `config.num_labels > 1` a classification loss is computed (Cross-Entropy). + """ + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + transformer_outputs = self.model( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + hidden_states = transformer_outputs[0] + logits = self.score(hidden_states) + + if input_ids is not None: + batch_size = input_ids.shape[0] + else: + batch_size = inputs_embeds.shape[0] + + if self.config.pad_token_id is None and batch_size != 1: + raise ValueError( + 'Cannot handle batch sizes > 1 if no padding token is defined.' + ) + if self.config.pad_token_id is None: + sequence_lengths = -1 + else: + if input_ids is not None: + sequence_lengths = (torch.eq( + input_ids, self.config.pad_token_id).int().argmax(-1) - + 1).to(logits.device) + else: + sequence_lengths = -1 + + pooled_logits = logits[torch.arange(batch_size, device=logits.device), + sequence_lengths] + + loss = None + if labels is not None: + labels = labels.to(logits.device) + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = 'regression' + elif self.num_labels > 1 and (labels.dtype == torch.long + or labels.dtype == torch.int): + self.config.problem_type = 'single_label_classification' + else: + self.config.problem_type = 'multi_label_classification' + + if self.config.problem_type == 'regression': + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(pooled_logits.squeeze(), labels.squeeze()) + else: + loss = loss_fct(pooled_logits, labels) + elif self.config.problem_type == 'single_label_classification': + loss_fct = CrossEntropyLoss() + loss = loss_fct( + pooled_logits.view(-1, self.num_labels), labels.view(-1)) + elif self.config.problem_type == 'multi_label_classification': + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(pooled_logits, labels) + if not return_dict: + output = (pooled_logits, ) + transformer_outputs[1:] + return ((loss, ) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=pooled_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) diff --git a/data/xtuner/xtuner/model/transformers_models/deepseek_v2/tokenization_deepseek_fast.py b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/tokenization_deepseek_fast.py new file mode 100644 index 0000000000000000000000000000000000000000..89e3cbb50b61c357deeb3fd37b9eab1188018172 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/deepseek_v2/tokenization_deepseek_fast.py @@ -0,0 +1,37 @@ +from typing import List, Optional, Union + +from transformers.models.llama import LlamaTokenizerFast + + +class DeepseekTokenizerFast(LlamaTokenizerFast): + + def convert_ids_to_tokens( + self, + ids: Union[int, List[int]], + skip_special_tokens: bool = False) -> Union[str, List[str]]: + """Converts a single index or a sequence of indices in a token or a + sequence of tokens, using the vocabulary and added tokens. + + Args: + ids (`int` or `List[int]`): + The token id (or token ids) to convert to tokens. + skip_special_tokens (`bool`, *optional*, defaults to `False`): + Whether or not to remove special tokens in the decoding. + + Returns: + `str` or `List[str]`: The decoded token(s). + """ + if isinstance(ids, int): + return self._convert_id_to_token(ids) + tokens = [] + for index in ids: + index = int(index) + if skip_special_tokens and index in self.all_special_ids: + continue + token = self._tokenizer.id_to_token(index) + tokens.append(token if token is not None else '') + return tokens + + def _convert_id_to_token(self, index: int) -> Optional[str]: + token = self._tokenizer.id_to_token(int(index)) + return token if token is not None else '' diff --git a/data/xtuner/xtuner/model/transformers_models/mixtral/__init__.py b/data/xtuner/xtuner/model/transformers_models/mixtral/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..aabfd89dbbd8cb1b7f3233ecf6f2bd384aaddd03 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/mixtral/__init__.py @@ -0,0 +1,4 @@ +from .configuration_mixtral import MixtralConfig +from .modeling_mixtral import MixtralForCausalLM, MixtralModel + +__all__ = ['MixtralForCausalLM', 'MixtralModel', 'MixtralConfig'] diff --git a/data/xtuner/xtuner/model/transformers_models/mixtral/configuration_mixtral.py b/data/xtuner/xtuner/model/transformers_models/mixtral/configuration_mixtral.py new file mode 100644 index 0000000000000000000000000000000000000000..457aefd479f4cae837e63b3af66c25de52d5ac96 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/mixtral/configuration_mixtral.py @@ -0,0 +1,178 @@ +# Copyright 2023 Mixtral AI and the HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Mixtral model configuration.""" + +from transformers.configuration_utils import PretrainedConfig +from transformers.utils import logging + +logger = logging.get_logger(__name__) + + +class MixtralConfig(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`MixtralModel`]. It is used to instantiate an + Mixtral model according to the specified arguments, defining the model architecture. Instantiating a configuration + with the defaults will yield a similar configuration to that of the Mixtral-7B-v0.1 or Mixtral-7B-Instruct-v0.1. + + [mixtralai/Mixtral-8x7B](https://huggingface.co/mixtralai/Mixtral-8x7B) + [mixtralai/Mixtral-7B-Instruct-v0.1](https://huggingface.co/mixtralai/Mixtral-7B-Instruct-v0.1) + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + vocab_size (`int`, *optional*, defaults to 32000): + Vocabulary size of the Mixtral model. Defines the number of different tokens that can be represented by the + `inputs_ids` passed when calling [`MixtralModel`] + hidden_size (`int`, *optional*, defaults to 4096): + Dimension of the hidden representations. + intermediate_size (`int`, *optional*, defaults to 14336): + Dimension of the MLP representations. + num_hidden_layers (`int`, *optional*, defaults to 32): + Number of hidden layers in the Transformer encoder. + num_attention_heads (`int`, *optional*, defaults to 32): + Number of attention heads for each attention layer in the Transformer encoder. + num_key_value_heads (`int`, *optional*, defaults to 8): + This is the number of key_value heads that should be used to implement Grouped Query Attention. If + `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if + `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When + converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed + by meanpooling all the original heads within that group. For more details checkout [this + paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to `8`. + hidden_act (`str` or `function`, *optional*, defaults to `"silu"`): + The non-linear activation function (function or string) in the decoder. + max_position_embeddings (`int`, *optional*, defaults to `4096*32`): + The maximum sequence length that this model might ever be used with. Mixtral's sliding window attention + allows sequence of up to 4096*32 tokens. + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + rms_norm_eps (`float`, *optional*, defaults to 1e-05): + The epsilon used by the rms normalization layers. + use_cache (`bool`, *optional*, defaults to `True`): + Whether or not the model should return the last key/values attentions (not used by all models). Only + relevant if `config.is_decoder=True`. + pad_token_id (`int`, *optional*): + The id of the padding token. + bos_token_id (`int`, *optional*, defaults to 1): + The id of the "beginning-of-sequence" token. + eos_token_id (`int`, *optional*, defaults to 2): + The id of the "end-of-sequence" token. + tie_word_embeddings (`bool`, *optional*, defaults to `False`): + Whether the model's input and output word embeddings should be tied. + rope_theta (`float`, *optional*, defaults to 1000000.0): + The base period of the RoPE embeddings. + sliding_window (`int`, *optional*): + Sliding window attention window size. If not specified, will default to `4096`. + attention_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the attention probabilities. + num_experts_per_tok (`int`, *optional*, defaults to 2): + The number of experts to root per-token, can be also interpreted as the `top-p` routing + parameter + num_local_experts (`int`, *optional*, defaults to 8): + Number of experts per Sparse MLP layer. + output_router_logits (`bool`, *optional*, defaults to `False`): + Whether or not the router logits should be returned by the model. Enabling this will also + allow the model to output the auxiliary loss. See [here]() for more details + router_aux_loss_coef (`float`, *optional*, defaults to 0.001): + The aux loss factor for the total loss. + router_jitter_noise (`float`, *optional*, defaults to 0.0): + Amount of noise to add to the router. + moe_implementation (`str`, *optional*, defaults to 'origin'): + The implementation of the moe blocks. 'origin' or 'shard'. + expert_in_one_shard (`int`, *optional*, defaults to None): + How many expert models are integrated into a shard. It is used only + when `moe_implementation` == 'shard'. + + ```python + >>> from transformers import MixtralModel, MixtralConfig + + >>> # Initializing a Mixtral 7B style configuration + >>> configuration = MixtralConfig() + + >>> # Initializing a model from the Mixtral 7B style configuration + >>> model = MixtralModel(configuration) + + >>> # Accessing the model configuration + >>> configuration = model.config + ```""" + + model_type = 'mixtral' + keys_to_ignore_at_inference = ['past_key_values'] + + def __init__( + self, + vocab_size=32000, + hidden_size=4096, + intermediate_size=14336, + num_hidden_layers=32, + num_attention_heads=32, + num_key_value_heads=8, + hidden_act='silu', + max_position_embeddings=4096 * 32, + initializer_range=0.02, + rms_norm_eps=1e-5, + use_cache=True, + pad_token_id=None, + bos_token_id=1, + eos_token_id=2, + tie_word_embeddings=False, + rope_theta=1e6, + sliding_window=None, + attention_dropout=0.0, + num_experts_per_tok=2, + num_local_experts=8, + output_router_logits=False, + router_aux_loss_coef=0.001, + router_jitter_noise=0.0, + moe_implementation='origin', + expert_in_one_shard=None, + **kwargs, + ): + self.vocab_size = vocab_size + self.max_position_embeddings = max_position_embeddings + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.sliding_window = sliding_window + + # for backward compatibility + if num_key_value_heads is None: + num_key_value_heads = num_attention_heads + + self.num_key_value_heads = num_key_value_heads + self.hidden_act = hidden_act + self.initializer_range = initializer_range + self.rms_norm_eps = rms_norm_eps + self.use_cache = use_cache + self.rope_theta = rope_theta + self.attention_dropout = attention_dropout + + self.num_experts_per_tok = num_experts_per_tok + self.num_local_experts = num_local_experts + self.output_router_logits = output_router_logits + self.router_aux_loss_coef = router_aux_loss_coef + self.router_jitter_noise = router_jitter_noise + + self.moe_implementation = moe_implementation + self.expert_in_one_shard = expert_in_one_shard + + super().__init__( + pad_token_id=pad_token_id, + bos_token_id=bos_token_id, + eos_token_id=eos_token_id, + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) diff --git a/data/xtuner/xtuner/model/transformers_models/mixtral/modeling_mixtral.py b/data/xtuner/xtuner/model/transformers_models/mixtral/modeling_mixtral.py new file mode 100644 index 0000000000000000000000000000000000000000..94d048fe723cb2179a696fdeb4f698fb3fd870b3 --- /dev/null +++ b/data/xtuner/xtuner/model/transformers_models/mixtral/modeling_mixtral.py @@ -0,0 +1,1821 @@ +# Modified from https://github.com/huggingface/transformers/blob/v4.41.0/src/transformers/models/mixtral/modeling_mixtral.py +"""PyTorch Mixtral model.""" +import inspect +import math +import os +import types +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn.functional as F +import torch.utils.checkpoint +from torch import nn +from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss +from transformers.activations import ACT2FN +from transformers.cache_utils import Cache, DynamicCache +from transformers.modeling_attn_mask_utils import ( + _prepare_4d_causal_attention_mask, + _prepare_4d_causal_attention_mask_for_sdpa) +from transformers.modeling_outputs import (MoeCausalLMOutputWithPast, + MoeModelOutputWithPast, + SequenceClassifierOutputWithPast) +from transformers.modeling_utils import PreTrainedModel +from transformers.pytorch_utils import is_torch_greater_or_equal_than_1_13 +from transformers.utils import (add_start_docstrings, + add_start_docstrings_to_model_forward, + is_flash_attn_2_available, + is_flash_attn_greater_or_equal_2_10, logging, + replace_return_docstrings) +from transformers.utils.import_utils import is_torch_fx_available + +from xtuner.utils import load_state_dict_into_model +from .configuration_mixtral import MixtralConfig + +if is_flash_attn_2_available(): + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import pad_input # noqa + from flash_attn.bert_padding import index_first_axis, unpad_input + + _flash_supports_window_size = 'window_size' in list( + inspect.signature(flash_attn_func).parameters) + +# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph. +# It means that the function will not be traced through and simply appear as a node in the graph. +if is_torch_fx_available(): + if not is_torch_greater_or_equal_than_1_13: + import torch.fx + + _prepare_4d_causal_attention_mask = torch.fx.wrap( + _prepare_4d_causal_attention_mask) + +logger = logging.get_logger(__name__) + +_CONFIG_FOR_DOC = 'MixtralConfig' + + +def load_balancing_loss_func( + gate_logits: torch.Tensor, + num_experts: torch.Tensor = None, + top_k=2, + attention_mask: Optional[torch.Tensor] = None) -> float: + r""" + Computes auxiliary load balancing loss as in Switch Transformer - implemented in Pytorch. + + See Switch Transformer (https://arxiv.org/abs/2101.03961) for more details. This function implements the loss + function presented in equations (4) - (6) of the paper. It aims at penalizing cases where the routing between + experts is too unbalanced. + + Args: + gate_logits (Union[`torch.Tensor`, Tuple[torch.Tensor]): + Logits from the `gate`, should be a tuple of model.config.num_hidden_layers tensors of + shape [batch_size X sequence_length, num_experts]. + attention_mask (`torch.Tensor`, None): + The attention_mask used in forward function + shape [batch_size X sequence_length] if not None. + num_experts (`int`, *optional*): + Number of experts + + Returns: + The auxiliary loss. + """ + if gate_logits is None or not isinstance(gate_logits, tuple): + return 0 + + if isinstance(gate_logits, tuple): + compute_device = gate_logits[0].device + concatenated_gate_logits = torch.cat( + [layer_gate.to(compute_device) for layer_gate in gate_logits], + dim=0) + + routing_weights = torch.nn.functional.softmax( + concatenated_gate_logits, dim=-1) + + _, selected_experts = torch.topk(routing_weights, top_k, dim=-1) + + expert_mask = torch.nn.functional.one_hot(selected_experts, num_experts) + + if attention_mask is None: + # Compute the percentage of tokens routed to each experts + tokens_per_expert = torch.mean(expert_mask.float(), dim=0) + + # Compute the average probability of routing to these experts + router_prob_per_expert = torch.mean(routing_weights, dim=0) + else: + batch_size, sequence_length = attention_mask.shape + num_hidden_layers = concatenated_gate_logits.shape[0] // ( + batch_size * sequence_length) + + # Compute the mask that masks all padding tokens as 0 with the same shape of expert_mask + expert_attention_mask = ( + attention_mask[None, :, :, None, None].expand( + (num_hidden_layers, batch_size, sequence_length, top_k, + num_experts)).reshape(-1, top_k, + num_experts).to(compute_device)) + + # Compute the percentage of tokens routed to each experts + tokens_per_expert = torch.sum( + expert_mask.float() * expert_attention_mask, dim=0) / torch.sum( + expert_attention_mask, dim=0) + + # Compute the mask that masks all padding tokens as 0 with the same shape of tokens_per_expert + router_per_expert_attention_mask = ( + attention_mask[None, :, :, None].expand( + (num_hidden_layers, batch_size, sequence_length, + num_experts)).reshape(-1, num_experts).to(compute_device)) + + # Compute the average probability of routing to these experts + router_prob_per_expert = torch.sum( + routing_weights * router_per_expert_attention_mask, + dim=0) / torch.sum( + router_per_expert_attention_mask, dim=0) + + overall_loss = torch.sum(tokens_per_expert * + router_prob_per_expert.unsqueeze(0)) + return overall_loss * num_experts + + +# Copied from transformers.models.llama.modeling_llama._get_unpad_data +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad( + torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaRMSNorm with Llama->Mixtral +class MixtralRMSNorm(nn.Module): + + def __init__(self, hidden_size, eps=1e-6): + """MixtralRMSNorm is equivalent to T5LayerNorm.""" + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +# Copied from transformers.models.mistral.modeling_mistral.MistralRotaryEmbedding with Mistral->Mixtral +class MixtralRotaryEmbedding(nn.Module): + + def __init__(self, + dim, + max_position_embeddings=2048, + base=10000, + device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / ( + self.base + **(torch.arange(0, self.dim, 2, + dtype=torch.int64).float().to(device) / self.dim)) + self.register_buffer('inv_freq', inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, + device=self.inv_freq.device, + dtype=torch.get_default_dtype()) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange( + self.max_seq_len_cached, device=device, + dtype=torch.int64).type_as(self.inv_freq) + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer( + 'cos_cached', emb.cos().to(dtype), persistent=False) + self.register_buffer( + 'sin_cached', emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache( + seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.models.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., :x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.models.mistral.modeling_mistral.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`): + The position indices of the tokens corresponding to the query and key tensors. For example, this can be + used to pass offsetted position ids when working with a KV-cache. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding. + """ + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """This is the equivalent of torch.repeat_interleave(x, dim=1, + repeats=n_rep). + + The hidden states go from (batch, num_key_value_heads, seqlen, head_dim) to + (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, + None, :, :].expand(batch, + num_key_value_heads, + n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, + head_dim) + + +# Copied from transformers.models.mistral.modeling_mistral.MistralAttention with Mistral->Mixtral +class MixtralAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper. + + Modified to use sliding window attention: Longformer and "Generating Long + Sequences with Sparse Transformers". + """ + + def __init__(self, config: MixtralConfig, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f'Instantiating {self.__class__.__name__} without passing a `layer_idx` is not recommended and will ' + 'lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` ' + 'when creating this class.') + + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = self.hidden_size // self.num_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.rope_theta = config.rope_theta + self.is_causal = True + self.attention_dropout = config.attention_dropout + + if (self.head_dim * self.num_heads) != self.hidden_size: + raise ValueError( + f'hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}' + f' and `num_heads`: {self.num_heads}).') + self.q_proj = nn.Linear( + self.hidden_size, self.num_heads * self.head_dim, bias=False) + self.k_proj = nn.Linear( + self.hidden_size, + self.num_key_value_heads * self.head_dim, + bias=False) + self.v_proj = nn.Linear( + self.hidden_size, + self.num_key_value_heads * self.head_dim, + bias=False) + self.o_proj = nn.Linear( + self.num_heads * self.head_dim, self.hidden_size, bias=False) + + self.rotary_emb = MixtralRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.rope_theta, + ) + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, + self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f'The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length( + kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_weights = torch.matmul(query_states, key_states.transpose( + 2, 3)) / math.sqrt(self.head_dim) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f'Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is' + f' {attn_weights.size()}') + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f'Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}' + ) + + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax( + attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_weights = nn.functional.dropout( + attn_weights, p=self.attention_dropout, training=self.training) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f'`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is' + f' {attn_output.size()}') + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Copied from transformers.models.mistral.modeling_mistral.MistralFlashAttention2 with Mistral->Mixtral +class MixtralFlashAttention2(MixtralAttention): + """Mixtral flash attention module. + + This module inherits from `MixtralAttention` as the weights of the module + stays untouched. The only required change would be on the forward pass + where it needs to correctly call the public API of flash attention and deal + with padding tokens in case the input contains any of them. + """ + + # Copied from transformers.models.llama.modeling_llama.LlamaFlashAttention2.__init__ + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1. + # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignment, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0. + # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left). + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10( + ) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + ): + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f'The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} ' + 'for auto-regressive decoding with k/v caching, please make sure to initialize the attention class ' + 'with a layer index.') + kv_seq_len += past_key_value.get_usable_length( + kv_seq_len, self.layer_idx) + + # Because the input can be padded, the absolute sequence length depends on the max position id. + rotary_seq_len = max(kv_seq_len, position_ids[:, -1].max().item()) + 1 + cos, sin = self.rotary_emb(value_states, seq_len=rotary_seq_len) + + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + use_sliding_windows = ( + _flash_supports_window_size + and getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window) + + if not _flash_supports_window_size: + logger.warning_once( + 'The current flash attention version does not support sliding window attention, for a more memory efficient implementation' + ' make sure to upgrade flash-attn library.') + + if past_key_value is not None: + # Activate slicing cache only if the config has a value `sliding_windows` attribute + cache_has_contents = past_key_value.get_seq_length( + self.layer_idx) > 0 + if (getattr(self.config, 'sliding_window', None) is not None + and kv_seq_len > self.config.sliding_window + and cache_has_contents): + slicing_tokens = 1 - self.config.sliding_window + + past_key = past_key_value[self.layer_idx][0] + past_value = past_key_value[self.layer_idx][1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + f'past key must have a shape of (`batch_size, num_heads, self.config.sliding_window-1, head_dim`), got' + f' {past_key.shape}') + + if attention_mask is not None: + attention_mask = attention_mask[:, slicing_tokens:] + attention_mask = torch.cat([ + attention_mask, + torch.ones_like(attention_mask[:, -1:]) + ], + dim=-1) + + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently casted in float32. Hence, we need + # cast them back in float16 just to be sure everything works as expected. + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, '_pre_quantization_dtype'): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + logger.warning_once( + f'The input hidden states seems to be silently casted in float32, this might be related to' + f' the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in' + f' {target_dtype}.') + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + attn_output = self._flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + q_len, + dropout=dropout_rate, + use_sliding_windows=use_sliding_windows, + ) + + attn_output = attn_output.reshape(bsz, q_len, + self.hidden_size).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, + query_states, + key_states, + value_states, + attention_mask, + query_length, + dropout=0.0, + softmax_scale=None, + use_sliding_windows=False, + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`float`): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + use_sliding_windows (`bool`, *optional*): + Whether to activate sliding window attention. + """ + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in LlamaFlashAttention2 __init__. + causal = self.is_causal and query_length != 1 + + # Contains at least one padding token in the sequence + if attention_mask is not None: + batch_size = query_states.shape[0] + query_states, key_states, value_states, indices_q, cu_seq_lens, max_seq_lens = self._upad_input( + query_states, key_states, value_states, attention_mask, + query_length) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + + if not use_sliding_windows: + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + else: + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + window_size=(self.config.sliding_window, + self.config.sliding_window), + ) + + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, + query_length) + else: + if not use_sliding_windows: + attn_output = flash_attn_func( + query_states, + key_states, + value_states, + dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + else: + attn_output = flash_attn_func( + query_states, + key_states, + value_states, + dropout, + softmax_scale=softmax_scale, + causal=causal, + window_size=(self.config.sliding_window, + self.config.sliding_window), + ) + + return attn_output + + def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, + query_length): + batch_size, kv_seq_len, num_heads, head_dim = key_layer.shape + + # On the first iteration we need to properly re-create the padding mask + # by slicing it on the proper place + if kv_seq_len != attention_mask.shape[-1]: + attention_mask_num_tokens = attention_mask.shape[-1] + attention_mask = attention_mask[:, attention_mask_num_tokens - + kv_seq_len:] + + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data( + attention_mask) + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_heads, head_dim), + indices_k) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_heads, head_dim), + indices_k) + + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, num_heads, + head_dim), indices_k) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input( + query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + + +# Copied from transformers.models.mistral.modeling_mistral.MistralSdpaAttention with Mistral->Mixtral +class MixtralSdpaAttention(MixtralAttention): + """Mixtral attention module using + torch.nn.functional.scaled_dot_product_attention. + + This module inherits from `MixtralAttention` as the weights of the module + stays untouched. The only changes are on the forward pass to adapt to SDPA + API. + """ + + # Adapted from MixtralAttention.forward + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + if output_attentions: + # TODO: Improve this warning with e.g. `model.config.attn_implementation = "manual"` once this is implemented. + logger.warning_once( + 'MixtralModel is using MixtralSdpaAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, ' + 'but specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.' + ) + return super().forward( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, + self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, + self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length( + kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {'sin': sin, 'cos': cos} # Specific to RoPE models + key_states, value_states = past_key_value.update( + key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f'Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}' + ) + + # SDPA with memory-efficient backend is currently (torch==2.1.2) bugged with non-contiguous inputs with custom attn_mask, + # Reference: https://github.com/pytorch/pytorch/issues/112577. + if query_states.device.type == 'cuda' and attention_mask is not None: + query_states = query_states.contiguous() + key_states = key_states.contiguous() + value_states = value_states.contiguous() + + attn_output = torch.nn.functional.scaled_dot_product_attention( + query_states, + key_states, + value_states, + attn_mask=attention_mask, + dropout_p=self.attention_dropout if self.training else 0.0, + # The q_len > 1 is necessary to match with AttentionMaskConverter.to_causal_4d that does not create a causal mask in case q_len == 1. + is_causal=self.is_causal and attention_mask is None and q_len > 1, + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.view(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value + + +MIXTRAL_ATTENTION_CLASSES = { + 'eager': MixtralAttention, + 'flash_attention_2': MixtralFlashAttention2, + 'sdpa': MixtralSdpaAttention, +} + + +class MixtralBlockSparseTop2MLP(nn.Module): + + def __init__(self, config: MixtralConfig): + super().__init__() + self.ffn_dim = config.intermediate_size + self.hidden_dim = config.hidden_size + + self.w1 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) + self.w2 = nn.Linear(self.ffn_dim, self.hidden_dim, bias=False) + self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) + + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, hidden_states): + current_hidden_states = self.act_fn( + self.w1(hidden_states)) * self.w3(hidden_states) + current_hidden_states = self.w2(current_hidden_states) + return current_hidden_states + + +class MixtralSparseMoeBlock(nn.Module): + """This implementation is strictly equivalent to standard MoE with full + capacity (no dropped tokens). + + It's faster since it formulates MoE operations in terms of block-sparse + operations to accommodate imbalanced assignments of tokens to experts, + whereas standard MoE either (1) drop tokens at the cost of reduced + performance or (2) set capacity factor to number of experts and thus waste + computation and memory on padding. + """ + + def __init__(self, config): + super().__init__() + self.hidden_dim = config.hidden_size + self.ffn_dim = config.intermediate_size + self.num_experts = config.num_local_experts + self.top_k = config.num_experts_per_tok + + # gating + self.gate = nn.Linear(self.hidden_dim, self.num_experts, bias=False) + + self.experts = nn.ModuleList([ + MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts) + ]) + + # Jitter parameters + self.jitter_noise = config.router_jitter_noise + + def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: + """""" + batch_size, sequence_length, hidden_dim = hidden_states.shape + if self.training and self.jitter_noise > 0: + hidden_states *= torch.empty_like(hidden_states).uniform_( + 1.0 - self.jitter_noise, 1.0 + self.jitter_noise) + hidden_states = hidden_states.view(-1, hidden_dim) + # router_logits: (batch * sequence_length, n_experts) + router_logits = self.gate(hidden_states) + + routing_weights = F.softmax(router_logits, dim=1, dtype=torch.float) + routing_weights, selected_experts = torch.topk( + routing_weights, self.top_k, dim=-1) + routing_weights /= routing_weights.sum(dim=-1, keepdim=True) + # we cast back to the input dtype + routing_weights = routing_weights.to(hidden_states.dtype) + + final_hidden_states = torch.zeros( + (batch_size * sequence_length, hidden_dim), + dtype=hidden_states.dtype, + device=hidden_states.device) + + # One hot encode the selected experts to create an expert mask + # this will be used to easily index which expert is going to be sollicitated + expert_mask = torch.nn.functional.one_hot( + selected_experts, num_classes=self.num_experts).permute(2, 1, 0) + + # Loop over all available experts in the model and perform the computation on each expert + for expert_idx in range(self.num_experts): + expert_layer = self.experts[expert_idx] + idx, top_x = torch.where(expert_mask[expert_idx]) + + # Index the correct hidden states and compute the expert hidden state for + # the current expert. We need to make sure to multiply the output hidden + # states by `routing_weights` on the corresponding tokens (top-1 and top-2) + current_state = hidden_states[None, top_x].reshape(-1, hidden_dim) + current_hidden_states = expert_layer( + current_state) * routing_weights[top_x, idx, None] + + # However `index_add_` only support torch tensors for indexing so we'll use + # the `top_x` tensor here. + final_hidden_states.index_add_( + 0, top_x, current_hidden_states.to(hidden_states.dtype)) + final_hidden_states = final_hidden_states.reshape( + batch_size, sequence_length, hidden_dim) + return final_hidden_states, router_logits + + +class ExpertShard(nn.Module): + + def __init__(self, config, expert_in_one_shard=1): + super().__init__() + self.w1w3 = nn.Parameter( + torch.empty(expert_in_one_shard, config.intermediate_size * 2, + config.hidden_size)) + self.w2 = nn.Parameter( + torch.empty(expert_in_one_shard, config.hidden_size, + config.intermediate_size)) + self.act = ACT2FN[config.hidden_act] + self.expert_in_one_shard = expert_in_one_shard + + def forward(self, hidden_states, expert_mask, routing_weights, + final_hidden_states): + hidden_dim = hidden_states.shape[-1] + for expert_idx in range(self.expert_in_one_shard): + idx, top_x = torch.where(expert_mask[expert_idx]) + current_state = hidden_states[None, top_x].reshape(-1, hidden_dim) + + w1w3 = self.w1w3[expert_idx] + w2 = self.w2[expert_idx] + gate_up_out = torch.matmul(current_state, w1w3.T) + gate_out, up_out = gate_up_out.chunk(2, dim=-1) + gate_out = self.act(gate_out) + out = gate_out * up_out + out = torch.matmul(out, w2.T) + + current_hidden_states = out * routing_weights[top_x, idx, None] + final_hidden_states.index_add_( + 0, top_x, current_hidden_states.to(hidden_states.dtype)) + return final_hidden_states + + +class MixtralSparseShardMoeBlock(nn.Module): + + def __init__(self, config): + super().__init__() + self.hidden_dim = config.hidden_size + self.ffn_dim = config.intermediate_size + self.num_experts = config.num_local_experts + self.top_k = config.num_experts_per_tok + + # gating + self.gate = nn.Linear(self.hidden_dim, self.num_experts, bias=False) + + expert_in_one_shard = config.expert_in_one_shard + assert config.num_local_experts % expert_in_one_shard == 0, \ + ('num_local_experts should be divisible by expert_in_one_shard, but got ' + f'num_local_experts = {config.num_local_experts} and expert_in_one_shard = {expert_in_one_shard}') + self.shard_num = config.num_local_experts // expert_in_one_shard + self.expert_in_one_shard = expert_in_one_shard + self.experts = nn.ModuleList([ + ExpertShard(config, self.expert_in_one_shard) + for i in range(self.shard_num) + ]) + + # Jitter parameters + self.jitter_noise = config.router_jitter_noise + + def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: + """""" + batch_size, sequence_length, hidden_dim = hidden_states.shape + if self.training and self.jitter_noise > 0: + hidden_states *= torch.empty_like(hidden_states).uniform_( + 1.0 - self.jitter_noise, 1.0 + self.jitter_noise) + hidden_states = hidden_states.view(-1, hidden_dim) + # router_logits: (batch * sequence_length, n_experts) + router_logits = self.gate(hidden_states) + + routing_weights = F.softmax(router_logits, dim=1, dtype=torch.float) + routing_weights, selected_experts = torch.topk( + routing_weights, self.top_k, dim=-1) + routing_weights /= routing_weights.sum(dim=-1, keepdim=True) + # we cast back to the input dtype + routing_weights = routing_weights.to(hidden_states.dtype) + + final_hidden_states = torch.zeros( + (batch_size * sequence_length, hidden_dim), + dtype=hidden_states.dtype, + device=hidden_states.device) + + # One hot encode the selected experts to create an expert mask + # this will be used to easily index which expert is going to be sollicitated + expert_mask = torch.nn.functional.one_hot( + selected_experts, num_classes=self.num_experts).permute(2, 1, 0) + + # Loop over all available experts in the model and perform the computation on each expert + for shard_index in range(self.shard_num): + mask = expert_mask[shard_index * + self.expert_in_one_shard:(shard_index + 1) * + self.expert_in_one_shard] + final_hidden_states = self.experts[shard_index]( + hidden_states, mask, routing_weights, final_hidden_states) + + final_hidden_states = final_hidden_states.reshape( + batch_size, sequence_length, hidden_dim) + return final_hidden_states, router_logits + + +class MixtralDecoderLayer(nn.Module): + + def __init__(self, config: MixtralConfig, layer_idx: int): + super().__init__() + self.hidden_size = config.hidden_size + + self.self_attn = MIXTRAL_ATTENTION_CLASSES[ + config._attn_implementation](config, layer_idx) + + moe_implementation = config.moe_implementation + if moe_implementation == 'origin': + block = MixtralSparseMoeBlock + elif moe_implementation == 'shard': + block = MixtralSparseShardMoeBlock + else: + raise NotImplementedError + self.block_sparse_moe = block(config) + + self.input_layernorm = MixtralRMSNorm( + config.hidden_size, eps=config.rms_norm_eps) + self.post_attention_layernorm = MixtralRMSNorm( + config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + output_router_logits: Optional[bool] = False, + use_cache: Optional[bool] = False, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, + torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, sequence_length)` where padding elements are indicated by 0. + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_router_logits (`bool`, *optional*): + Whether or not to return the logits of all the routers. They are useful for computing the router loss, and + should not be returned during inference. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + """ + + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states, router_logits = self.block_sparse_moe(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states, ) + + if output_attentions: + outputs += (self_attn_weights, ) + + if use_cache: + outputs += (present_key_value, ) + + if output_router_logits: + outputs += (router_logits, ) + + return outputs + + +def _load_pretrained_model( + cls, + model, + state_dict, + loaded_keys, + resolved_archive_file, + pretrained_model_name_or_path, + ignore_mismatched_sizes=False, + sharded_metadata=None, + _fast_init=True, + low_cpu_mem_usage=False, + device_map=None, + offload_folder=None, + offload_state_dict=None, + dtype=None, + hf_quantizer=None, + keep_in_fp32_modules=None, + gguf_path=None, +): + if ((state_dict is not None) or (resolved_archive_file is None) + or (low_cpu_mem_usage) or (device_map is not None) + or (offload_folder is not None) or + (not (offload_state_dict is None or offload_state_dict is False)) + or (hf_quantizer is not None) or + (keep_in_fp32_modules is not None and len(keep_in_fp32_modules) > 0) + or (gguf_path is not None)): + raise NotImplementedError + + folder = os.path.sep.join(resolved_archive_file[0].split(os.path.sep)[:-1]) + error_msgs = load_state_dict_into_model(model, folder) + return model, [], [], [], None, error_msgs + + +MIXTRAL_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`MixtralConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + 'The bare Mixtral Model outputting raw hidden-states without any specific head on top.', + MIXTRAL_START_DOCSTRING, +) +# Copied from transformers.models.mistral.modeling_mistral.MistralPreTrainedModel with Mistral->Mixtral +class MixtralPreTrainedModel(PreTrainedModel): + config_class = MixtralConfig + base_model_prefix = 'model' + supports_gradient_checkpointing = True + _no_split_modules = ['MixtralDecoderLayer'] + _skip_keys_device_placement = 'past_key_values' + _supports_flash_attn_2 = True + _supports_sdpa = True + _supports_cache_class = True + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): + moe_implementation = kwargs.get('moe_implementation', 'origin') + if moe_implementation == 'origin': + return super().from_pretrained(pretrained_model_name_or_path, + *args, **kwargs) + + cls._load_pretrained_model = types.MethodType(_load_pretrained_model, + cls) + return super().from_pretrained(pretrained_model_name_or_path, *args, + **kwargs) + + +MIXTRAL_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + + [What are position IDs?](../glossary#position-ids) + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape + `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape + `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that + don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all + `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + output_router_logits (`bool`, *optional*): + Whether or not to return the logits of all the routers. They are useful for computing the router loss, and + should not be returned during inference. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +@add_start_docstrings( + 'The bare Mixtral Model outputting raw hidden-states without any specific head on top.', + MIXTRAL_START_DOCSTRING, +) +# Copied from transformers.models.mistral.modeling_mistral.MistralModel with MISTRAL->MIXTRAL,Mistral->Mixtral +class MixtralModel(MixtralPreTrainedModel): + """Transformer decoder consisting of *config.num_hidden_layers* layers. + Each layer is a [`MixtralDecoderLayer`] + + Args: + config: MixtralConfig + """ + + def __init__(self, config: MixtralConfig): + super().__init__(config) + self.padding_idx = config.pad_token_id + self.vocab_size = config.vocab_size + + self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, + self.padding_idx) + self.layers = nn.ModuleList([ + MixtralDecoderLayer(config, layer_idx) + for layer_idx in range(config.num_hidden_layers) + ]) + self._attn_implementation = config._attn_implementation + self.norm = MixtralRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.embed_tokens + + def set_input_embeddings(self, value): + self.embed_tokens = value + + # Ignore copy + @add_start_docstrings_to_model_forward(MIXTRAL_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, MoeModelOutputWithPast]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_router_logits = ( + output_router_logits if output_router_logits is not None else + self.config.output_router_logits) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + 'You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time' + ) + elif input_ids is not None: + batch_size, seq_length = input_ids.shape + elif inputs_embeds is not None: + batch_size, seq_length, _ = inputs_embeds.shape + else: + raise ValueError( + 'You have to specify either decoder_input_ids or decoder_inputs_embeds' + ) + + past_key_values_length = 0 + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + '`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...' + ) + use_cache = False + + if use_cache: + use_legacy_cache = not isinstance(past_key_values, Cache) + if use_legacy_cache: + past_key_values = DynamicCache.from_legacy_cache( + past_key_values) + past_key_values_length = past_key_values.get_usable_length( + seq_length) + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, + seq_length + past_key_values_length, + dtype=torch.long, + device=device) + position_ids = position_ids.unsqueeze(0).view(-1, seq_length) + else: + position_ids = position_ids.view(-1, seq_length).long() + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + + if attention_mask is not None and self._attn_implementation == 'flash_attention_2' and use_cache: + is_padding_right = attention_mask[:, -1].sum().item() != batch_size + if is_padding_right: + raise ValueError( + "You are attempting to perform batched generation with padding_side='right'" + ' this may lead to unexpected behaviour for Flash Attention version of Mixtral. Make sure to ' + " call `tokenizer.padding_side = 'left'` before tokenizing the input. " + ) + + if self._attn_implementation == 'flash_attention_2': + # 2d mask is passed through the layers + attention_mask = attention_mask if ( + attention_mask is not None and 0 in attention_mask) else None + elif self._attn_implementation == 'sdpa' and not output_attentions: + # output_attentions=True can not be supported when using SDPA, and we fall back on + # the manual implementation that requires a 4D causal mask in all cases. + attention_mask = _prepare_4d_causal_attention_mask_for_sdpa( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + sliding_window=self.config.sliding_window, + ) + else: + # 4d mask is passed through the layers + attention_mask = _prepare_4d_causal_attention_mask( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + sliding_window=self.config.sliding_window, + ) + + hidden_states = inputs_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + all_router_logits = () if output_router_logits else None + next_decoder_cache = None + + for decoder_layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + if self.gradient_checkpointing and self.training: + layer_outputs = self._gradient_checkpointing_func( + decoder_layer.__call__, + hidden_states, + attention_mask, + position_ids, + past_key_values, + output_attentions, + output_router_logits, + use_cache, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_values, + output_attentions=output_attentions, + output_router_logits=output_router_logits, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache = layer_outputs[ + 2 if output_attentions else 1] + + if output_attentions: + all_self_attns += (layer_outputs[1], ) + + if output_router_logits: + all_router_logits += (layer_outputs[-1], ) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + next_cache = None + if use_cache: + next_cache = next_decoder_cache.to_legacy_cache( + ) if use_legacy_cache else next_decoder_cache + + if not return_dict: + return tuple(v for v in [ + hidden_states, next_cache, all_hidden_states, all_self_attns, + all_router_logits + ] if v is not None) + return MoeModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + router_logits=all_router_logits, + ) + + +class MixtralForCausalLM(MixtralPreTrainedModel): + _tied_weights_keys = ['lm_head.weight'] + + def __init__(self, config): + super().__init__(config) + self.model = MixtralModel(config) + self.vocab_size = config.vocab_size + self.lm_head = nn.Linear( + config.hidden_size, config.vocab_size, bias=False) + self.router_aux_loss_coef = config.router_aux_loss_coef + self.num_experts = config.num_local_experts + self.num_experts_per_tok = config.num_experts_per_tok + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.embed_tokens + + def set_input_embeddings(self, value): + self.model.embed_tokens = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + @add_start_docstrings_to_model_forward(MIXTRAL_INPUTS_DOCSTRING) + @replace_return_docstrings( + output_type=MoeCausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + # Ignore copy + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, MoeCausalLMOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer, MixtralForCausalLM + + >>> model = MixtralForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-v0.1") + >>> tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-v0.1") + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_router_logits = ( + output_router_logits if output_router_logits is not None else + self.config.output_router_logits) + + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + output_router_logits=output_router_logits, + return_dict=return_dict, + ) + + hidden_states = outputs[0] + logits = self.lm_head(hidden_states) + logits = logits.float() + + loss = None + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + aux_loss = None + if output_router_logits: + aux_loss = load_balancing_loss_func( + outputs.router_logits if return_dict else outputs[-1], + self.num_experts, + self.num_experts_per_tok, + attention_mask, + ) + if labels is not None: + loss += self.router_aux_loss_coef * aux_loss.to( + loss.device) # make sure to reside in the same device + + if not return_dict: + output = (logits, ) + outputs[1:] + if output_router_logits: + output = (aux_loss, ) + output + return (loss, ) + output if loss is not None else output + + return MoeCausalLMOutputWithPast( + loss=loss, + aux_loss=aux_loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + router_logits=outputs.router_logits, + ) + + def prepare_inputs_for_generation( + self, + input_ids, + past_key_values=None, + attention_mask=None, + inputs_embeds=None, + output_router_logits=False, + **kwargs, + ): + # Omit tokens covered by past_key_values + if past_key_values is not None: + if isinstance(past_key_values, Cache): + cache_length = past_key_values.get_seq_length() + past_length = past_key_values.seen_tokens + max_cache_length = past_key_values.get_max_length() + else: + cache_length = past_length = past_key_values[0][0].shape[2] + max_cache_length = None + + # Keep only the unprocessed tokens: + # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where + # some of the inputs are exclusively passed as part of the cache (e.g. when passing input_embeds as + # input) + if attention_mask is not None and attention_mask.shape[ + 1] > input_ids.shape[1]: + input_ids = input_ids[:, -(attention_mask.shape[1] - + past_length):] + # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard + # input_ids based on the past_length. + elif past_length < input_ids.shape[1]: + input_ids = input_ids[:, past_length:] + # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. + + # If we are about to go beyond the maximum cache length, we need to crop the input attention mask. + if (max_cache_length is not None and attention_mask is not None + and cache_length + input_ids.shape[1] > max_cache_length): + attention_mask = attention_mask[:, -max_cache_length:] + + position_ids = kwargs.get('position_ids', None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1]:] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {'inputs_embeds': inputs_embeds} + else: + model_inputs = {'input_ids': input_ids} + + model_inputs.update({ + 'position_ids': position_ids, + 'past_key_values': past_key_values, + 'use_cache': kwargs.get('use_cache'), + 'attention_mask': attention_mask, + 'output_router_logits': output_router_logits, + }) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += (tuple( + past_state.index_select(0, beam_idx.to(past_state.device)) + for past_state in layer_past), ) + return reordered_past + + +@add_start_docstrings( + """ + The Mixtral Model transformer with a sequence classification head on top (linear layer). + + [`MixtralForSequenceClassification`] uses the last token in order to do the classification, as other causal models + (e.g. GPT-2) do. + + Since it does classification on the last token, it requires to know the position of the last token. If a + `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If + no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the + padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in + each row of the batch). + """, + MIXTRAL_START_DOCSTRING, +) +# Copied from transformers.models.llama.modeling_llama.LlamaForSequenceClassification with Llama->Mixtral, LLAMA->MIXTRAL +class MixtralForSequenceClassification(MixtralPreTrainedModel): + + def __init__(self, config): + super().__init__(config) + self.num_labels = config.num_labels + self.model = MixtralModel(config) + self.score = nn.Linear(config.hidden_size, self.num_labels, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.embed_tokens + + def set_input_embeddings(self, value): + self.model.embed_tokens = value + + @add_start_docstrings_to_model_forward(MIXTRAL_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[Union[Cache, + List[torch.FloatTensor]]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, SequenceClassifierOutputWithPast]: + r""" + labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): + Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., + config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If + `config.num_labels > 1` a classification loss is computed (Cross-Entropy). + """ + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.model( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + hidden_states = transformer_outputs[0] + logits = self.score(hidden_states) + + if input_ids is not None: + batch_size = input_ids.shape[0] + else: + batch_size = inputs_embeds.shape[0] + + if self.config.pad_token_id is None and batch_size != 1: + raise ValueError( + 'Cannot handle batch sizes > 1 if no padding token is defined.' + ) + if self.config.pad_token_id is None: + sequence_lengths = -1 + else: + if input_ids is not None: + # if no pad token found, use modulo instead of reverse indexing for ONNX compatibility + sequence_lengths = torch.eq( + input_ids, self.config.pad_token_id).int().argmax(-1) - 1 + sequence_lengths = sequence_lengths % input_ids.shape[-1] + sequence_lengths = sequence_lengths.to(logits.device) + else: + sequence_lengths = -1 + + pooled_logits = logits[torch.arange(batch_size, device=logits.device), + sequence_lengths] + + loss = None + if labels is not None: + labels = labels.to(logits.device) + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = 'regression' + elif self.num_labels > 1 and (labels.dtype == torch.long + or labels.dtype == torch.int): + self.config.problem_type = 'single_label_classification' + else: + self.config.problem_type = 'multi_label_classification' + + if self.config.problem_type == 'regression': + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(pooled_logits.squeeze(), labels.squeeze()) + else: + loss = loss_fct(pooled_logits, labels) + elif self.config.problem_type == 'single_label_classification': + loss_fct = CrossEntropyLoss() + loss = loss_fct( + pooled_logits.view(-1, self.num_labels), labels.view(-1)) + elif self.config.problem_type == 'multi_label_classification': + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(pooled_logits, labels) + if not return_dict: + output = (pooled_logits, ) + transformer_outputs[1:] + return ((loss, ) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=pooled_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) diff --git a/data/xtuner/xtuner/model/utils.py b/data/xtuner/xtuner/model/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a8bbf294448f4930e150d490ddfaf0a0c4ce9fb2 --- /dev/null +++ b/data/xtuner/xtuner/model/utils.py @@ -0,0 +1,317 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from typing import List, Optional + +import torch +from mmengine.utils.misc import get_object_from_string +from peft import PeftType +from torch import nn +from transformers import PreTrainedModel + +from xtuner.utils import IGNORE_INDEX, IMAGE_TOKEN_INDEX + + +def set_obj_dtype(d): + for key, value in d.items(): + if value in ['torch.float16', 'torch.float32', 'torch.bfloat16']: + d[key] = getattr(torch, value.split('.')[-1]) + + +def try_build_module(cfg): + builder = cfg['type'] + if isinstance(builder, str): + builder = get_object_from_string(builder) + if builder is None: + # support handling cfg with key 'type' can not be built, such as + # {'rope_scaling': {'type': 'linear', 'factor': 2.0}} + return cfg + cfg.pop('type') + module_built = builder(**cfg) + return module_built + + +def traverse_dict(d): + if isinstance(d, dict): + set_obj_dtype(d) + for key, value in d.items(): + if isinstance(value, dict): + traverse_dict(value) + if 'type' in value: + module_built = try_build_module(value) + d[key] = module_built + elif isinstance(d, list): + for element in d: + traverse_dict(element) + + +def find_all_linear_names(model): + lora_module_names = set() + for name, module in model.named_modules(): + if isinstance(module, nn.Linear): + names = name.split('.') + lora_module_names.add(names[0] if len(names) == 1 else names[-1]) + + if 'lm_head' in lora_module_names: # needed for 16-bit + lora_module_names.remove('lm_head') + if 'output_layer' in lora_module_names: # needed for 16-bit + lora_module_names.remove('output_layer') + return list(lora_module_names) + + +class LoadWoInit: + """Context manager that disable parameter initialization.""" + + def __init__(self): + self.constant_ = torch.nn.init.constant_ + self.zeros_ = torch.nn.init.zeros_ + self.ones_ = torch.nn.init.ones_ + self.uniform_ = torch.nn.init.uniform_ + self.normal_ = torch.nn.init.normal_ + self.kaiming_uniform_ = torch.nn.init.kaiming_uniform_ + self.kaiming_normal_ = torch.nn.init.kaiming_normal_ + + def __enter__(self, *args, **kwargs): + torch.nn.init.constant_ = lambda *args, **kwargs: None + torch.nn.init.zeros_ = lambda *args, **kwargs: None + torch.nn.init.ones_ = lambda *args, **kwargs: None + torch.nn.init.uniform_ = lambda *args, **kwargs: None + torch.nn.init.normal_ = lambda *args, **kwargs: None + torch.nn.init.kaiming_uniform_ = lambda *args, **kwargs: None + torch.nn.init.kaiming_normal_ = lambda *args, **kwargs: None + + def __exit__(self, *args, **kwargs): + torch.nn.init.constant_ = self.constant_ + torch.nn.init.zeros_ = self.zeros_ + torch.nn.init.ones_ = self.ones_ + torch.nn.init.uniform_ = self.uniform_ + torch.nn.init.normal_ = self.normal_ + torch.nn.init.kaiming_uniform_ = self.kaiming_uniform_ + torch.nn.init.kaiming_normal_ = self.kaiming_normal_ + + +def get_peft_model_state_dict(model, state_dict=None, adapter_name='default'): + # Modified from `https://github.com/huggingface/peft/blob/main/src/peft/utils/save_and_load.py` # noqa: E501 + + config = model.peft_config[adapter_name] + if state_dict is None: + state_dict = model.state_dict() + if config.peft_type == PeftType.LORA: + # adapted from `https://github.com/microsoft/LoRA/blob/main/loralib/utils.py` # noqa: E501 + # to be used directly with the state dict which is necessary + # when using DeepSpeed or FSDP + bias = config.bias + if bias == 'none': + to_return = {k: state_dict[k] for k in state_dict if 'lora_' in k} + elif bias == 'all': + to_return = { + k: state_dict[k] + for k in state_dict if 'lora_' in k or 'bias' in k + } + elif bias == 'lora_only': + to_return = {} + for k in state_dict: + if 'lora_' in k: + to_return[k] = state_dict[k] + bias_name = k.split('lora_')[0] + 'bias' + if bias_name in state_dict: + to_return[bias_name] = state_dict[bias_name] + else: + raise NotImplementedError + to_return = { + k: v + for k, v in to_return.items() + if (('lora_' in k and adapter_name in k) or ('bias' in k)) + } + else: + # Currently we only support lora + raise NotImplementedError + if model.modules_to_save is not None: + for key, value in state_dict.items(): + if any(f'{module_name}.modules_to_save.{adapter_name}' in key + for module_name in model.modules_to_save): + to_return[key] = value + + return to_return + + +# Modified from https://github.com/haotian-liu/LLaVA/blob/82fc5e0e5f4393a4c26851fa32c69ab37ea3b146/llava/model/llava_arch.py#L99 # noqa: E501 +def prepare_inputs_labels_for_multimodal( + llm: PreTrainedModel, + input_ids: torch.LongTensor = None, + position_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + labels: Optional[torch.LongTensor] = None, + pixel_values: Optional[torch.FloatTensor] = None): + if pixel_values is None: + return { + 'input_ids': input_ids, + 'position_ids': position_ids, + 'attention_mask': attention_mask, + 'past_key_values': past_key_values, + 'inputs_embeds': None, + 'labels': labels + } + + _labels = labels + _position_ids = position_ids + _attention_mask = attention_mask + if attention_mask is None: + attention_mask = torch.ones_like(input_ids, dtype=torch.bool) + else: + attention_mask = attention_mask.bool() + if position_ids is None: + position_ids = torch.arange( + 0, input_ids.shape[1], dtype=torch.long, device=input_ids.device) + if labels is None: + labels = torch.full_like(input_ids, IGNORE_INDEX) + + # remove the padding using attention_mask -- TODO: double check + input_ids = [ + cur_input_ids[cur_attention_mask] + for cur_input_ids, cur_attention_mask in zip(input_ids, attention_mask) + ] + labels = [ + cur_labels[cur_attention_mask] + for cur_labels, cur_attention_mask in zip(labels, attention_mask) + ] + + new_inputs_embeds = [] + new_labels = [] + cur_image_idx = 0 + for batch_idx, cur_input_ids in enumerate(input_ids): + num_images = (cur_input_ids == IMAGE_TOKEN_INDEX).sum() + if num_images == 0: + cur_pixel_values = pixel_values[cur_image_idx] + cur_inputs_embeds_1 = llm.get_input_embeddings()(cur_input_ids) + cur_inputs_embeds = torch.cat( + [cur_inputs_embeds_1, cur_pixel_values[0:0]], dim=0) + new_inputs_embeds.append(cur_inputs_embeds) + new_labels.append(labels[batch_idx]) + cur_image_idx += 1 + continue + + image_token_indices = [-1] + torch.where( + cur_input_ids == IMAGE_TOKEN_INDEX)[0].tolist() + [ + cur_input_ids.shape[0] + ] + cur_input_ids_noim = [] + cur_labels = labels[batch_idx] + cur_labels_noim = [] + for i in range(len(image_token_indices) - 1): + cur_input_ids_noim.append(cur_input_ids[image_token_indices[i] + + 1:image_token_indices[i + + 1]]) + cur_labels_noim.append(cur_labels[image_token_indices[i] + + 1:image_token_indices[i + 1]]) + split_sizes = [x.shape[0] for x in cur_labels_noim] + cur_inputs_embeds = llm.get_input_embeddings()( + torch.cat(cur_input_ids_noim)) + cur_inputs_embeds_no_im = torch.split( + cur_inputs_embeds, split_sizes, dim=0) + cur_new_inputs_embeds = [] + cur_new_labels = [] + + for i in range(num_images + 1): + cur_new_inputs_embeds.append(cur_inputs_embeds_no_im[i]) + cur_new_labels.append(cur_labels_noim[i]) + if i < num_images: + cur_pixel_values = pixel_values[cur_image_idx] + cur_image_idx += 1 + cur_new_inputs_embeds.append(cur_pixel_values) + cur_new_labels.append( + torch.full((cur_pixel_values.shape[0], ), + IGNORE_INDEX, + device=cur_labels.device, + dtype=cur_labels.dtype)) + + cur_new_inputs_embeds = torch.cat(cur_new_inputs_embeds) + cur_new_labels = torch.cat(cur_new_labels) + + new_inputs_embeds.append(cur_new_inputs_embeds) + new_labels.append(cur_new_labels) + + # Combine them + max_len = max(x.shape[0] for x in new_inputs_embeds) + batch_size = len(new_inputs_embeds) + + new_inputs_embeds_padded = [] + new_labels_padded = torch.full((batch_size, max_len), + IGNORE_INDEX, + dtype=new_labels[0].dtype, + device=new_labels[0].device) + attention_mask = torch.zeros((batch_size, max_len), + dtype=attention_mask.dtype, + device=attention_mask.device) + position_ids = torch.zeros((batch_size, max_len), + dtype=position_ids.dtype, + device=position_ids.device) + + for i, (cur_new_embed, + cur_new_labels) in enumerate(zip(new_inputs_embeds, new_labels)): + cur_len = cur_new_embed.shape[0] + new_inputs_embeds_padded.append( + torch.cat((cur_new_embed, + torch.zeros((max_len - cur_len, cur_new_embed.shape[1]), + dtype=cur_new_embed.dtype, + device=cur_new_embed.device)), + dim=0)) + if cur_len > 0: + new_labels_padded[i, :cur_len] = cur_new_labels + attention_mask[i, :cur_len] = True + position_ids[i, :cur_len] = torch.arange( + 0, + cur_len, + dtype=position_ids.dtype, + device=position_ids.device) + + new_inputs_embeds = torch.stack(new_inputs_embeds_padded, dim=0) + + if _labels is None: + new_labels = None + else: + new_labels = new_labels_padded + + if _attention_mask is None: + attention_mask = None + else: + attention_mask = attention_mask.to(dtype=_attention_mask.dtype) + + if _position_ids is None: + position_ids = None + + return { + 'input_ids': None, + 'position_ids': position_ids, + 'attention_mask': attention_mask, + 'past_key_values': past_key_values, + 'inputs_embeds': new_inputs_embeds, + 'labels': new_labels + } + + +def make_inputs_require_grad(module, input, output): + output.requires_grad_(True) + + +def guess_load_checkpoint(pth_model): + if osp.isfile(pth_model): + state_dict = torch.load(pth_model, map_location='cpu') + if 'state_dict' in state_dict: + state_dict = state_dict['state_dict'] + elif osp.isdir(pth_model): + try: + from xtuner.utils.zero_to_any_dtype import \ + get_state_dict_from_zero_checkpoint + except ImportError: + raise ImportError( + 'The provided PTH model appears to be a DeepSpeed checkpoint. ' + 'However, DeepSpeed library is not detected in current ' + 'environment. This suggests that DeepSpeed may not be ' + 'installed or is incorrectly configured. Please verify your ' + 'setup.') + state_dict = get_state_dict_from_zero_checkpoint( + osp.dirname(pth_model), osp.basename(pth_model)) + else: + raise FileNotFoundError(f'Cannot find {pth_model}') + return state_dict diff --git a/data/xtuner/xtuner/parallel/__init__.py b/data/xtuner/xtuner/parallel/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8c726230c8b8e703359ea62ff1edab1fea420052 --- /dev/null +++ b/data/xtuner/xtuner/parallel/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .sequence import * # noqa: F401, F403 diff --git a/data/xtuner/xtuner/parallel/sequence/__init__.py b/data/xtuner/xtuner/parallel/sequence/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6e2992f78aa84f860b4465860d891b67900276f7 --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/__init__.py @@ -0,0 +1,41 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.dist import init_dist + +from .attention import (post_process_for_sequence_parallel_attn, + pre_process_for_sequence_parallel_attn, + sequence_parallel_wrapper) +from .comm import (all_to_all, gather_for_sequence_parallel, + gather_forward_split_backward, split_for_sequence_parallel, + split_forward_gather_backward) +from .data_collate import (pad_cumulative_len_for_sequence_parallel, + pad_for_sequence_parallel) +from .reduce_loss import reduce_sequence_parallel_loss +from .sampler import SequenceParallelSampler +from .setup_distributed import (get_data_parallel_group, + get_data_parallel_rank, + get_data_parallel_world_size, + get_inner_sequence_parallel_group, + get_inner_sequence_parallel_rank, + get_inner_sequence_parallel_world_size, + get_sequence_parallel_group, + get_sequence_parallel_rank, + get_sequence_parallel_world_size, + init_inner_sequence_parallel, + init_sequence_parallel, + is_inner_sequence_parallel_initialized) + +__all__ = [ + 'sequence_parallel_wrapper', 'pre_process_for_sequence_parallel_attn', + 'post_process_for_sequence_parallel_attn', 'pad_for_sequence_parallel', + 'split_for_sequence_parallel', 'SequenceParallelSampler', + 'init_sequence_parallel', 'get_sequence_parallel_group', + 'get_sequence_parallel_world_size', 'get_sequence_parallel_rank', + 'get_data_parallel_group', 'get_data_parallel_world_size', + 'get_data_parallel_rank', 'reduce_sequence_parallel_loss', 'init_dist', + 'all_to_all', 'gather_for_sequence_parallel', + 'split_forward_gather_backward', 'gather_forward_split_backward', + 'get_inner_sequence_parallel_group', 'get_inner_sequence_parallel_rank', + 'get_inner_sequence_parallel_world_size', 'init_inner_sequence_parallel', + 'is_inner_sequence_parallel_initialized', + 'pad_cumulative_len_for_sequence_parallel' +] diff --git a/data/xtuner/xtuner/parallel/sequence/attention.py b/data/xtuner/xtuner/parallel/sequence/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..e8bb1adaca8bd42123976c46431cfba10c21fe96 --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/attention.py @@ -0,0 +1,151 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math + +import torch.distributed as dist + +from .comm import (all_to_all, gather_forward_split_backward, + split_forward_gather_backward) +from .setup_distributed import (get_inner_sequence_parallel_group, + get_inner_sequence_parallel_world_size, + get_sequence_parallel_group, + get_sequence_parallel_world_size, + init_inner_sequence_parallel, + is_inner_sequence_parallel_initialized) + + +def pre_process_for_sequence_parallel_attn(query_states, + key_states, + value_states, + scatter_dim=2, + gather_dim=1): + b, s_div_sp, h, d = query_states.shape + sp = get_sequence_parallel_world_size() + + if not is_inner_sequence_parallel_initialized(): + insp = sp // math.gcd(h, sp) + init_inner_sequence_parallel(insp) + else: + insp = get_inner_sequence_parallel_world_size() + + def pre_process_for_inner_sp(q, k, v): + if scatter_dim != 2 and gather_dim != 1: + raise NotImplementedError( + 'Currently only `scatter_dim == 2` and `gather_dim == 1` ' + f'is supported. But got scatter_dim = {scatter_dim} and ' + f'gather_dim = {gather_dim}.') + + # (b, s_div_sp, h, d) -> + # (b, s_div_sp, sp/insp, h*insp/sp, insp, d/insp) -> + # (b, s_div_sp, sp/insp, insp, h*insp/sp, d/insp) -> + # (b, s_div_sp, insp*h, d/insp) + q = q.view(b, s_div_sp, sp // insp, h * insp // sp, insp, + d // insp).transpose(3, 4).flatten(2, 4) + k = k.view(b, s_div_sp, sp // insp, h * insp // sp, insp, + d // insp).transpose(3, 4).flatten(2, 4) + v = v.view(b, s_div_sp, sp // insp, h * insp // sp, insp, + d // insp).transpose(3, 4).flatten(2, 4) + + return q, k, v + + def post_process_for_inner_sp(q, k, v): + # (b, s, insp*h/sp, d/insp) -> (b, s, insp*h/sp, d) + q = gather_forward_split_backward(q, -1, + get_inner_sequence_parallel_group()) + k = gather_forward_split_backward(k, -1, + get_inner_sequence_parallel_group()) + v = gather_forward_split_backward(v, -1, + get_inner_sequence_parallel_group()) + + return q, k, v + + assert (h * insp) % sp == 0, \ + ('The number of attention heads should be divisible by ' + '(sequence_parallel_world_size // sequence_parallel_inner_world_size)' + f'. But got n_head = {h}, sequence_parallel_world_size = ' + f'{sp} and sequence_parallel_inner_world_size = {insp}.') + + if insp > 1: + query_states, key_states, value_states = pre_process_for_inner_sp( + query_states, key_states, value_states) + + # (b, s_div_sp, insp*h, d/insp) -> (b, s, insp*h/sp, d/insp) + sequence_parallel_group = get_sequence_parallel_group() + query_states = all_to_all( + query_states, + sequence_parallel_group, + scatter_dim=scatter_dim, + gather_dim=gather_dim) + key_states = all_to_all( + key_states, + sequence_parallel_group, + scatter_dim=scatter_dim, + gather_dim=gather_dim) + value_states = all_to_all( + value_states, + sequence_parallel_group, + scatter_dim=scatter_dim, + gather_dim=gather_dim) + + if insp > 1: + query_states, key_states, value_states = post_process_for_inner_sp( + query_states, key_states, value_states) + + return query_states, key_states, value_states + + +def post_process_for_sequence_parallel_attn(attn_output, + scatter_dim=1, + gather_dim=2): + sp = get_sequence_parallel_world_size() + insp = get_inner_sequence_parallel_world_size() + b, s, h_mul_insp_div_sp, d = attn_output.shape + h = h_mul_insp_div_sp * sp // insp + s_div_sp = s // sp + + if insp > 1: + # (b, s, insp*h/sp, d) -> (b, s, insp*h/sp, d/insp) + attn_output = split_forward_gather_backward( + attn_output, -1, get_inner_sequence_parallel_group()) + + # (b, s, insp*h/sp, d/insp) -> (b, s_div_sp, insp*h, d/insp) + sequence_parallel_group = get_sequence_parallel_group() + output = all_to_all( + attn_output, + sequence_parallel_group, + scatter_dim=scatter_dim, + gather_dim=gather_dim) + + if insp > 1: + # (b, s_div_sp, insp*h, d/insp) -> + # (b, s_div_sp, sp/insp, insp, h*insp/sp, d/insp) -> + # (b, s_div_sp, sp/insp, h*insp/sp, insp, d/insp) -> + # (b, s_div_sp, h, d) + output = output.view(b, s_div_sp, sp // insp, insp, h * insp // sp, + d // insp).transpose(3, 4).reshape( + b, s_div_sp, h, d) + + return output + + +def sequence_parallel_wrapper(local_attn): + + def sequence_parallel_attn(query_states, key_states, value_states, *args, + **kwargs): + training = kwargs.pop('training', True) + enable_sequence_parallel = ( + dist.is_initialized() and get_sequence_parallel_world_size() > 1 + and training) + if enable_sequence_parallel: + query_states, key_states, value_states = \ + pre_process_for_sequence_parallel_attn( + query_states, key_states, value_states) + + out = local_attn(query_states, key_states, value_states, *args, + **kwargs) + + if enable_sequence_parallel: + out = post_process_for_sequence_parallel_attn(out).contiguous() + + return out + + return sequence_parallel_attn diff --git a/data/xtuner/xtuner/parallel/sequence/comm.py b/data/xtuner/xtuner/parallel/sequence/comm.py new file mode 100644 index 0000000000000000000000000000000000000000..1ff78e68c138dbf68cbda363424e460eac614b19 --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/comm.py @@ -0,0 +1,269 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Any, Tuple + +import torch +import torch.distributed as dist +from torch import Tensor + + +def _all_to_all( + input: Tensor, + world_size: int, + group: dist.ProcessGroup, + scatter_dim: int, + gather_dim: int, +): + input_list = [ + t.contiguous() + for t in torch.tensor_split(input, world_size, scatter_dim) + ] + output_list = [torch.empty_like(input_list[0]) for _ in range(world_size)] + dist.all_to_all(output_list, input_list, group=group) + return torch.cat(output_list, dim=gather_dim).contiguous() + + +class _AllToAll(torch.autograd.Function): + """All-to-all communication. + + Args: + input: Input tensor + sp_group: Sequence parallel process group + scatter_dim: Scatter dimension + gather_dim: Gather dimension + """ + + @staticmethod + def forward(ctx: Any, input: Tensor, sp_group: dist.ProcessGroup, + scatter_dim: int, gather_dim: int): + ctx.sp_group = sp_group + ctx.scatter_dim = scatter_dim + ctx.gather_dim = gather_dim + ctx.world_size = dist.get_world_size(sp_group) + output = _all_to_all(input, ctx.world_size, sp_group, scatter_dim, + gather_dim) + return output + + @staticmethod + def backward(ctx: Any, grad_output: Tensor) -> Tuple: + grad_output = _all_to_all( + grad_output, + ctx.world_size, + ctx.sp_group, + ctx.gather_dim, + ctx.scatter_dim, + ) + return ( + grad_output, + None, + None, + None, + ) + + +def all_to_all( + input: Tensor, + sp_group: dist.ProcessGroup, + scatter_dim: int = 2, + gather_dim: int = 1, +): + """Convenience function to apply the all-to-all operation with scatter and + gather dimensions. + + Notes: + We have wrapped the `torch.distributed.all_to_all` function to + enable automatic differentiation of the all-to-all operation. + + Args: + input: The input tensor for which all-to-all communication is performed + sp_group: The sequence parallel process group. + scatter_dim: The dimension along which the input tensor is scattered + (default: 2). + gather_dim: The dimension along which the output tensor is gathered + (default: 1). + + Returns: + The output tensor after the all-to-all communication. + """ + return _AllToAll.apply(input, sp_group, scatter_dim, gather_dim) + + +def split_for_sequence_parallel(input, dim: int, sp_group: dist.ProcessGroup): + """Splits the input tensor along a given dimension for sequence parallel. + + Args: + input: The input tensor to be split. + dim: The dimension along which the tensor should be split. + sp_group: The sequence parallel process group. + + Returns: + The split tensor corresponding to the current rank's chunk. + """ + world_size = dist.get_world_size(sp_group) + if world_size == 1: + return input + + rank = dist.get_rank(sp_group) + dim_size = input.size(dim) + assert dim_size % world_size == 0, ( + f'The dimension to split ({dim_size}) is not a multiple of ' + f'world size ({world_size}), cannot split tensor evenly') + + tensor_list = torch.split(input, dim_size // world_size, dim=dim) + output = tensor_list[rank].contiguous() + + return output + + +def gather_for_sequence_parallel(input, dim: int, sp_group: dist.ProcessGroup): + """Gathers the input tensor along a given dimension for sequence parallel. + + Args: + input: The input tensor to be gathered. + dim: The dimension along which the tensor should be gathered. + sp_group: The sequence parallel process group. + + Returns: + The gathered tensor concatenated along the specified dimension. + """ + input = input.contiguous() + world_size = dist.get_world_size(sp_group) + dist.get_rank(sp_group) + + if world_size == 1: + return input + + tensor_list = [torch.empty_like(input) for _ in range(world_size)] + assert input.device.type == 'cuda' + dist.all_gather(tensor_list, input, group=sp_group) + + output = torch.cat(tensor_list, dim=dim).contiguous() + + return output + + +class _GatherForwardSplitBackward(torch.autograd.Function): + """Gather the input during forward. + + Scale and split the grad and keep only the corresponding chuck to the rank + during backward. + """ + + @staticmethod + def forward(ctx, input, dim, sp_group, grad_scale): + ctx.dim = dim + ctx.sp_group = sp_group + ctx.grad_scale = grad_scale + return gather_for_sequence_parallel(input, dim, sp_group) + + @staticmethod + def backward(ctx, grad_output): + if ctx.grad_scale == 'up': + grad_output = grad_output * dist.get_world_size(ctx.sp_group) + elif ctx.grad_scale == 'down': + grad_output = grad_output / dist.get_world_size(ctx.sp_group) + + return (split_for_sequence_parallel(grad_output, ctx.dim, + ctx.sp_group), None, None, None) + + +class _SplitForwardGatherBackward(torch.autograd.Function): + """Split the input and keep only the corresponding chuck to the rank during + forward. + + Scale and gather the grad during backward. + """ + + @staticmethod + def forward(ctx, input, dim, sp_group, grad_scale): + ctx.dim = dim + ctx.sp_group = sp_group + ctx.grad_scale = grad_scale + return split_for_sequence_parallel(input, dim, sp_group) + + @staticmethod + def backward(ctx, grad_output): + if ctx.grad_scale == 'up': + grad_output = grad_output * dist.get_world_size(ctx.sp_group) + elif ctx.grad_scale == 'down': + grad_output = grad_output / dist.get_world_size(ctx.sp_group) + return (gather_for_sequence_parallel(grad_output, ctx.dim, + ctx.sp_group), None, None, None) + + +def split_forward_gather_backward(input, dim, sp_group, grad_scale=None): + """Split tensors according to the sp rank during forward propagation and + gather the grad from the whole sp group during backward propagation. + + 1. When do we need this? input.requires_grad = True + + 2. Why we need grad scale? + + We have to scale down the grads as `gather_forward_split_backward` scales + up the grads. + """ + return _SplitForwardGatherBackward.apply(input, dim, sp_group, grad_scale) + + +def gather_forward_split_backward(input, dim, sp_group, grad_scale=None): + """Gather tensors from the whole sp group during forward propagation and + split the grad according to the sp rank during backward propagation. + + 1. When do we need this? + + When sp is greater than 1, we need to slice the input `x` along + sequence length dimension before it is passed into the model and get + `sub_seq_x`. We then pass `sub_seq_x` into model and get output + `sub_seq_out`. If the loss calculation process needs to use the complete + output, we have to gather the `sub_seq_out` in all sp ranks during forward + propagation and split the grad during backward propagation. + + 2. Why we need grad scale? + Here is a simple case. + + -------- SP 1 ----------- + Suppose here is a toy model with only one linear module + (in_features = 2, out_features = 1) and the input x has shape(2, 2). + Y = [[y1], = [[w11x11 + w21x12], = [[x11, x12], dot [[w11], + [y2]] [w11x21 + w21x22]] [x21, x22]] [w21]] + z = mean(Y) = (y1 + y2) / 2 + Here is the partial derivative of z with respect to w11: + ∂z / ∂w11 = ∂z / ∂y1 * ∂y1 / ∂w11 + ∂z / ∂y2 * ∂y2 / ∂w11 + = 1/2 * x11 + 1/2 * x21 = (x11 + x21) / 2 + + -------- SP 2 ----------- + When sequence parallel world size is set to 2, we will split the input x + and scatter them to the two rank in the same sequence parallel group. + ```Step 1 + Y_rank0 = [[y1]] = [[w11x11 + w21x12]] = [[x11, x12]] dot [[w11, w21]]^T + Y_rank1 = [[y2]] = [[w11x21 + w21x22]] = [[x21, x22]] dot [[w11, w21]]^T + ``` + + Then, we have to gather them: + ```Step 2 + Y_rank0 = [[y1], + detach([y2])] + Y_rank1 = [detach([y1]), + [y2]] + ``` + Note that y2 in Y_rank0 does not have grad, neither does y1 in Y_rank1. + + Similarly, we calculate the loss in each rank: + ```Step 3 + z_rank0 = mean(Y_rank0) = (y1 + detach(y2)) / 2 + z_rank1 = mean(Y_rank1) = (detach(y1) + y2) / 2 + ``` + So the partial derivative of loss_rank0 with respect to w11: + ```∂z / ∂w11 = ∂z / ∂y1 * ∂y1 / ∂w11 = x11 / 2``` + The same for rank1: + ```∂z / ∂w11 = ∂z / ∂y2 * ∂y2 / ∂w11 = x21 / 2``` + + Finally, we need to all_reduce them: + ```Step 4 + In both rank: + ∂z / ∂w11 = (x11 / 2 + x21 / 2) / 2 = (x11 + x21) / 4 + ``` + + In SP2, the gradient of each param is only half of that in SP1. + So we should scale up the grad during the backward process in Step 2. + """ # noqa: E501 + return _GatherForwardSplitBackward.apply(input, dim, sp_group, grad_scale) diff --git a/data/xtuner/xtuner/parallel/sequence/data_collate.py b/data/xtuner/xtuner/parallel/sequence/data_collate.py new file mode 100644 index 0000000000000000000000000000000000000000..048eaec103be1ab1108fcf817f5d4ed4d5ece9ab --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/data_collate.py @@ -0,0 +1,46 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch + +from .setup_distributed import get_sequence_parallel_world_size + + +def pad_for_sequence_parallel(tensor, padding_value, dim=-1): + length = tensor.shape[dim] + seq_parallel_world_size = get_sequence_parallel_world_size() + if length % seq_parallel_world_size == 0: + return tensor + + pad_num = seq_parallel_world_size - (length % seq_parallel_world_size) + pad_shape = (*tensor.shape[:dim], pad_num, + *tensor.shape[dim + 1:]) if dim != -1 else ( + *tensor.shape[:dim], pad_num) + pad = torch.full( + pad_shape, padding_value, dtype=tensor.dtype, device=tensor.device) + tensor = torch.cat([tensor, pad], dim=dim) + return tensor + + +# This function only meets the following two conditions: +# 1. use_varlen_attn = True +# 2. pack_to_max_length = True and the lengths of each sequence are different +def pad_cumulative_len_for_sequence_parallel(cumulative_len): + assert len(cumulative_len) == 1 + seqlen = cumulative_len[0][-1] + seq_parallel_world_size = get_sequence_parallel_world_size() + if seqlen % seq_parallel_world_size == 0: + return cumulative_len, None + + bs = len(cumulative_len) + pad_len = seq_parallel_world_size - (seqlen % seq_parallel_world_size) + seqlen_new = seqlen + pad_len + attention_mask = torch.zeros( + bs, seqlen_new, dtype=torch.bool, device=cumulative_len[0].device) + attention_mask[:, :seqlen] = True + + for i, cu_len in enumerate(cumulative_len): + pad = torch.tensor([seqlen_new], + device=cu_len.device, + dtype=cu_len.dtype) + cumulative_len[i] = torch.cat([cu_len, pad], dim=0) + + return cumulative_len, attention_mask diff --git a/data/xtuner/xtuner/parallel/sequence/reduce_loss.py b/data/xtuner/xtuner/parallel/sequence/reduce_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..fb37242a33d814826e11d985924105064d131b79 --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/reduce_loss.py @@ -0,0 +1,34 @@ +import torch +import torch.distributed as dist + +from .setup_distributed import get_sequence_parallel_group + + +class _ReduceLoss(torch.autograd.Function): + + @staticmethod + def forward(ctx, mean_loss, loss_scale, process_group): + ctx.mode = process_group + if loss_scale == 0: + # convert nan to 0 just for logging + mean_loss = torch.nan_to_num(mean_loss) + loss_sum = mean_loss * loss_scale + dist.all_reduce(loss_sum, group=process_group) + dist.all_reduce(loss_scale, group=process_group) + loss = loss_sum / loss_scale + return loss + + @staticmethod + def backward(ctx, grad_output): + return grad_output, None, None + + +def reduce_sequence_parallel_loss(mean_loss, + loss_scale, + sp_group: dist.ProcessGroup = None): + if dist.get_world_size(sp_group) == 1: + return mean_loss + if sp_group is None: + # avoid bc breaking + sp_group = get_sequence_parallel_group() + return _ReduceLoss.apply(mean_loss, loss_scale, sp_group) diff --git a/data/xtuner/xtuner/parallel/sequence/sampler.py b/data/xtuner/xtuner/parallel/sequence/sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..69adb7cc91c5e5603b47fbb5cd438165d522a79b --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/sampler.py @@ -0,0 +1,38 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Optional, Sized + +from mmengine.dataset import DefaultSampler +from mmengine.dist import sync_random_seed + +from .setup_distributed import (get_data_parallel_rank, + get_data_parallel_world_size) + + +class SequenceParallelSampler(DefaultSampler): + + def __init__(self, + dataset: Sized, + shuffle: bool = True, + seed: Optional[int] = None, + round_up: bool = True) -> None: + rank = get_data_parallel_rank() + world_size = get_data_parallel_world_size() + self.rank = rank + self.world_size = world_size + + self.dataset = dataset + self.shuffle = shuffle + if seed is None: + seed = sync_random_seed() + self.seed = seed + self.epoch = 0 + self.round_up = round_up + + if self.round_up: + self.num_samples = math.ceil(len(self.dataset) / world_size) + self.total_size = self.num_samples * self.world_size + else: + self.num_samples = math.ceil( + (len(self.dataset) - rank) / world_size) + self.total_size = len(self.dataset) diff --git a/data/xtuner/xtuner/parallel/sequence/setup_distributed.py b/data/xtuner/xtuner/parallel/sequence/setup_distributed.py new file mode 100644 index 0000000000000000000000000000000000000000..473993a33f3f2e782e6f78594acc2bdcc120422b --- /dev/null +++ b/data/xtuner/xtuner/parallel/sequence/setup_distributed.py @@ -0,0 +1,174 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.distributed as dist + +_SEQUENCE_PARALLEL_GROUP = None +_SEQUENCE_PARALLEL_WORLD_SIZE = None +_SEQUENCE_PARALLEL_RANK = None + +_INNER_SEQUENCE_PARALLEL_GROUP = None +_INNER_SEQUENCE_PARALLEL_WORLD_SIZE = None +_INNER_SEQUENCE_PARALLEL_RANK = None + +_DATA_PARALLEL_GROUP = None +_DATA_PARALLEL_WORLD_SIZE = None +_DATA_PARALLEL_RANK = None + + +def init_sequence_parallel(sequence_parallel_size: int = 1): + assert dist.is_initialized() + world_size: int = dist.get_world_size() + + # enable_ds_sequence_parallel = sequence_parallel_size > 1 + # if enable_ds_sequence_parallel: + if world_size % sequence_parallel_size != 0: + raise RuntimeError(f'world_size ({world_size}) is not divisible by ' + f'sequence_parallel_size {sequence_parallel_size}') + + num_sequence_parallel_groups: int = world_size // sequence_parallel_size + + rank = dist.get_rank() + + # Build the sequence parallel groups. + global _SEQUENCE_PARALLEL_GROUP + assert _SEQUENCE_PARALLEL_GROUP is None, \ + 'sequence parallel group is already initialized' + for i in range(num_sequence_parallel_groups): + ranks = range(i * sequence_parallel_size, + (i + 1) * sequence_parallel_size) + group = dist.new_group(ranks) + if rank in ranks: + _SEQUENCE_PARALLEL_GROUP = group + + global _DATA_PARALLEL_GROUP + assert _DATA_PARALLEL_GROUP is None, \ + 'data parallel group is already initialized' + all_data_parallel_group_ranks = [] + start_rank = 0 + end_rank = world_size + for j in range(sequence_parallel_size): + ranks = range(start_rank + j, end_rank, sequence_parallel_size) + all_data_parallel_group_ranks.append(list(ranks)) + group = dist.new_group(ranks) + if rank in ranks: + _DATA_PARALLEL_GROUP = group + + +def init_inner_sequence_parallel(inner_sequence_parallel_size: int = 1): + """Build the sequence parallel inner groups. + + They are helpful when sp size is not evenly divided by the number of attn + heads. + """ + assert _SEQUENCE_PARALLEL_GROUP is not None, \ + ('Please call `init_inner_sequence_parallel` after calling ' + '`init_sequence_parallel`.') + + rank = dist.get_rank() + world_size: int = dist.get_world_size() + + n_inner_group = world_size // inner_sequence_parallel_size + + global _INNER_SEQUENCE_PARALLEL_GROUP + assert _INNER_SEQUENCE_PARALLEL_GROUP is None + + for i in range(n_inner_group): + ranks = range(i * inner_sequence_parallel_size, + (i + 1) * inner_sequence_parallel_size) + group = dist.new_group(ranks) + if rank in ranks: + _INNER_SEQUENCE_PARALLEL_GROUP = group + + +def is_inner_sequence_parallel_initialized(): + return _INNER_SEQUENCE_PARALLEL_GROUP is not None + + +def get_inner_sequence_parallel_group(): + return _INNER_SEQUENCE_PARALLEL_GROUP + + +def get_inner_sequence_parallel_world_size(): + global _INNER_SEQUENCE_PARALLEL_WORLD_SIZE + if _INNER_SEQUENCE_PARALLEL_WORLD_SIZE is not None: + return _INNER_SEQUENCE_PARALLEL_WORLD_SIZE + if not dist.is_initialized() or (_INNER_SEQUENCE_PARALLEL_GROUP is None): + _INNER_SEQUENCE_PARALLEL_WORLD_SIZE = 1 + else: + _INNER_SEQUENCE_PARALLEL_WORLD_SIZE = dist.get_world_size( + group=get_inner_sequence_parallel_group()) + return _INNER_SEQUENCE_PARALLEL_WORLD_SIZE + + +def get_inner_sequence_parallel_rank(): + global _INNER_SEQUENCE_PARALLEL_RANK + if _INNER_SEQUENCE_PARALLEL_RANK is not None: + return _INNER_SEQUENCE_PARALLEL_RANK + if not dist.is_initialized() or (_INNER_SEQUENCE_PARALLEL_GROUP is None): + _INNER_SEQUENCE_PARALLEL_RANK = 0 + else: + _INNER_SEQUENCE_PARALLEL_RANK = dist.get_rank( + group=get_inner_sequence_parallel_group()) + return _INNER_SEQUENCE_PARALLEL_RANK + + +def get_sequence_parallel_group(): + """Get the sequence parallel group the caller rank belongs to.""" + return _SEQUENCE_PARALLEL_GROUP + + +def get_sequence_parallel_world_size(): + """Return world size for the sequence parallel group.""" + global _SEQUENCE_PARALLEL_WORLD_SIZE + if _SEQUENCE_PARALLEL_WORLD_SIZE is not None: + return _SEQUENCE_PARALLEL_WORLD_SIZE + if not dist.is_initialized() or (_SEQUENCE_PARALLEL_GROUP is None): + _SEQUENCE_PARALLEL_WORLD_SIZE = 1 + else: + _SEQUENCE_PARALLEL_WORLD_SIZE = dist.get_world_size( + group=get_sequence_parallel_group()) + return _SEQUENCE_PARALLEL_WORLD_SIZE + + +def get_sequence_parallel_rank(): + """Return my rank for the sequence parallel group.""" + global _SEQUENCE_PARALLEL_RANK + if _SEQUENCE_PARALLEL_RANK is not None: + return _SEQUENCE_PARALLEL_RANK + if not dist.is_initialized() or (_SEQUENCE_PARALLEL_GROUP is None): + _SEQUENCE_PARALLEL_RANK = 0 + else: + _SEQUENCE_PARALLEL_RANK = dist.get_rank( + group=get_sequence_parallel_group()) + return _SEQUENCE_PARALLEL_RANK + + +def get_data_parallel_group(): + """Get the data parallel group the caller rank belongs to.""" + assert _DATA_PARALLEL_GROUP is not None, \ + 'data parallel group is not initialized' + return _DATA_PARALLEL_GROUP + + +def get_data_parallel_world_size(): + """Return world size for the data parallel group.""" + global _DATA_PARALLEL_WORLD_SIZE + if _DATA_PARALLEL_WORLD_SIZE is not None: + return _DATA_PARALLEL_WORLD_SIZE + if not dist.is_initialized(): + _DATA_PARALLEL_WORLD_SIZE = 1 + else: + _DATA_PARALLEL_WORLD_SIZE = dist.get_world_size( + group=get_data_parallel_group()) + return _DATA_PARALLEL_WORLD_SIZE + + +def get_data_parallel_rank(): + """Return my rank for the data parallel group.""" + global _DATA_PARALLEL_RANK + if _DATA_PARALLEL_RANK is not None: + return _DATA_PARALLEL_RANK + if not dist.is_initialized(): + _DATA_PARALLEL_RANK = 0 + else: + _DATA_PARALLEL_RANK = dist.get_rank(group=get_data_parallel_group()) + return _DATA_PARALLEL_RANK diff --git a/data/xtuner/xtuner/registry.py b/data/xtuner/xtuner/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..7c8907e0be44210849d029bc26c77494971220b0 --- /dev/null +++ b/data/xtuner/xtuner/registry.py @@ -0,0 +1,7 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.registry import Registry + +__all__ = ['BUILDER', 'MAP_FUNC'] + +BUILDER = Registry('builder') +MAP_FUNC = Registry('map_fn') diff --git a/data/xtuner/xtuner/tools/chat.py b/data/xtuner/xtuner/tools/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..3bddac52cdcca8c2e5ef7ac5e10ebcd444897e5f --- /dev/null +++ b/data/xtuner/xtuner/tools/chat.py @@ -0,0 +1,491 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import re +import sys + +import torch +from huggingface_hub import snapshot_download +from peft import PeftModel +from transformers import (AutoModel, AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel, GenerationConfig) +from transformers.generation.streamers import TextStreamer + +from xtuner.dataset.utils import expand2square, load_image +from xtuner.model.utils import prepare_inputs_labels_for_multimodal +from xtuner.tools.utils import get_stop_criteria +from xtuner.utils import (DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX, + PROMPT_TEMPLATE, SYSTEM_TEMPLATE) + +TORCH_DTYPE_MAP = dict( + fp16=torch.float16, bf16=torch.bfloat16, fp32=torch.float32, auto='auto') + + +def remove_prefix(state_dict, prefix): + new_state_dict = {} + for key, value in state_dict.items(): + if key.startswith(prefix): + new_key = key[len(prefix):] + new_state_dict[new_key] = value + else: + new_state_dict[key] = value + return new_state_dict + + +def parse_args(): + parser = argparse.ArgumentParser(description='Chat with a HF model') + parser.add_argument( + 'model_name_or_path', help='Hugging Face model name or path') + adapter_group = parser.add_mutually_exclusive_group() + adapter_group.add_argument( + '--adapter', default=None, help='adapter name or path') + adapter_group.add_argument( + '--llava', default=None, help='llava name or path') + parser.add_argument( + '--visual-encoder', default=None, help='visual encoder name or path') + parser.add_argument( + '--visual-select-layer', default=-2, help='visual select layer') + parser.add_argument('--image', default=None, help='image') + parser.add_argument( + '--torch-dtype', + default='fp16', + choices=TORCH_DTYPE_MAP.keys(), + help='Override the default `torch.dtype` and load the model under ' + 'a specific `dtype`.') + parser.add_argument( + '--prompt-template', + choices=PROMPT_TEMPLATE.keys(), + default=None, + help='Specify a prompt template') + system_group = parser.add_mutually_exclusive_group() + system_group.add_argument( + '--system', default=None, help='Specify the system text') + system_group.add_argument( + '--system-template', + choices=SYSTEM_TEMPLATE.keys(), + default=None, + help='Specify a system template') + parser.add_argument( + '--bits', + type=int, + choices=[4, 8, None], + default=None, + help='LLM bits') + parser.add_argument( + '--bot-name', type=str, default='BOT', help='Name for Bot') + parser.add_argument( + '--with-plugins', + nargs='+', + choices=['calculate', 'solve', 'search'], + help='Specify plugins to use') + parser.add_argument( + '--no-streamer', action='store_true', help='Whether to with streamer') + parser.add_argument( + '--lagent', action='store_true', help='Whether to use lagent') + parser.add_argument( + '--stop-words', nargs='+', type=str, default=[], help='Stop words') + parser.add_argument( + '--offload-folder', + default=None, + help='The folder in which to offload the model weights (or where the ' + 'model weights are already offloaded).') + parser.add_argument( + '--max-new-tokens', + type=int, + default=2048, + help='Maximum number of new tokens allowed in generated text') + parser.add_argument( + '--temperature', + type=float, + default=0.1, + help='The value used to modulate the next token probabilities.') + parser.add_argument( + '--top-k', + type=int, + default=40, + help='The number of highest probability vocabulary tokens to ' + 'keep for top-k-filtering.') + parser.add_argument( + '--top-p', + type=float, + default=0.75, + help='If set to float < 1, only the smallest set of most probable ' + 'tokens with probabilities that add up to top_p or higher are ' + 'kept for generation.') + parser.add_argument( + '--repetition-penalty', + type=float, + default=1.0, + help='The parameter for repetition penalty. 1.0 means no penalty.') + parser.add_argument( + '--seed', + type=int, + default=0, + help='Random seed for reproducible text generation') + args = parser.parse_args() + return args + + +def get_input(): + """Helper function for getting input from users.""" + sentinel = '' # ends when this string is seen + result = None + while result is None: + print(('\ndouble enter to end input (EXIT: exit chat, ' + 'RESET: reset history) >>> '), + end='') + try: + result = '\n'.join(iter(input, sentinel)) + except UnicodeDecodeError: + print('Invalid characters detected. Please enter again.') + return result + + +def main(): + args = parse_args() + torch.manual_seed(args.seed) + + # build llm + quantization_config = None + load_in_8bit = False + if args.bits == 4: + quantization_config = BitsAndBytesConfig( + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4') + elif args.bits == 8: + load_in_8bit = True + model_kwargs = { + 'quantization_config': quantization_config, + 'load_in_8bit': load_in_8bit, + 'device_map': 'auto', + 'offload_folder': args.offload_folder, + 'trust_remote_code': True, + 'torch_dtype': TORCH_DTYPE_MAP[args.torch_dtype] + } + if args.lagent: + from lagent.actions import ActionExecutor, GoogleSearch + from lagent.agents import (CALL_PROTOCOL_CN, FORCE_STOP_PROMPT_CN, + ReAct, ReActProtocol) + from lagent.llms import HFTransformerCasualLM + + try: + SERPER_API_KEY = os.environ['SERPER_API_KEY'] + except Exception: + print('Please obtain the `SERPER_API_KEY` from https://serper.dev ' + 'and set it using `export SERPER_API_KEY=xxx`.') + sys.exit(1) + + model_kwargs.pop('trust_remote_code') + llm = HFTransformerCasualLM( + args.model_name_or_path, model_kwargs=model_kwargs) + if args.adapter is not None: + print(f'Loading adapter from {args.adapter}...') + llm.model = PeftModel.from_pretrained( + llm.model, + args.adapter, + offload_folder=args.offload_folder, + trust_remote_code=True) + search_tool = GoogleSearch(api_key=SERPER_API_KEY) + chatbot = ReAct( + llm=llm, + action_executor=ActionExecutor(actions=[search_tool]), + protocol=ReActProtocol( + call_protocol=CALL_PROTOCOL_CN, + force_stop=FORCE_STOP_PROMPT_CN)) + while True: + text = get_input() + while text.strip() == 'RESET': + print('Log: History responses have been removed!') + chatbot._session_history = [] + inputs = '' + text = get_input() + if text.strip() == 'EXIT': + print('Log: Exit!') + exit(0) + response = chatbot.chat(text) + print(response.response) + else: + if args.with_plugins is None: + inner_thoughts_open = False + calculate_open = False + solve_open = False + search_open = False + else: + assert args.prompt_template == args.system_template == 'moss_sft' + from plugins import plugins_api + inner_thoughts_open = True + calculate_open = 'calculate' in args.with_plugins + solve_open = 'solve' in args.with_plugins + search_open = 'search' in args.with_plugins + # pre-import for api and model preparation + if calculate_open: + from plugins import calculate # noqa: F401 + if solve_open: + from plugins import solve # noqa: F401 + if search_open: + from plugins import search # noqa: F401 + # build llm + llm = AutoModelForCausalLM.from_pretrained(args.model_name_or_path, + **model_kwargs) + tokenizer = AutoTokenizer.from_pretrained( + args.model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + print(f'Load LLM from {args.model_name_or_path}') + if args.adapter is not None: + llm = PeftModel.from_pretrained( + llm, + args.adapter, + offload_folder=args.offload_folder, + trust_remote_code=True) + print(f'Load adapter from {args.adapter}') + if args.llava is not None: + llava_path = snapshot_download( + repo_id=args.llava) if not osp.isdir( + args.llava) else args.llava + + # build visual_encoder + if 'visual_encoder' in os.listdir(llava_path): + assert args.visual_encoder is None, ( + "Please don't specify the `--visual-encoder` since passed " + '`--llava` contains a visual encoder!') + visual_encoder_path = osp.join(llava_path, 'visual_encoder') + else: + assert args.visual_encoder is not None, ( + 'Please specify the `--visual-encoder`!') + visual_encoder_path = args.visual_encoder + visual_encoder = CLIPVisionModel.from_pretrained( + visual_encoder_path, + torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype]) + image_processor = CLIPImageProcessor.from_pretrained( + visual_encoder_path) + print(f'Load visual_encoder from {visual_encoder_path}') + + # load adapter + if 'llm_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'llm_adapter') + llm = PeftModel.from_pretrained( + llm, + adapter_path, + offload_folder=args.offload_folder, + trust_remote_code=True) + print(f'Load LLM adapter from {args.llava}') + if 'visual_encoder_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'visual_encoder_adapter') + visual_encoder = PeftModel.from_pretrained( + visual_encoder, + adapter_path, + offload_folder=args.offload_folder) + print(f'Load visual_encoder adapter from {args.llava}') + + # build projector + projector_path = osp.join(llava_path, 'projector') + projector = AutoModel.from_pretrained( + projector_path, + torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype], + trust_remote_code=True) + print(f'Load projector from {args.llava}') + + projector.cuda() + projector.eval() + visual_encoder.cuda() + visual_encoder.eval() + + llm.eval() + + if args.image is not None: + image = load_image(args.image) + image = expand2square( + image, tuple(int(x * 255) for x in image_processor.image_mean)) + image = image_processor.preprocess( + image, return_tensors='pt')['pixel_values'][0] + image = image.cuda().unsqueeze(0).to(visual_encoder.dtype) + visual_outputs = visual_encoder(image, output_hidden_states=True) + pixel_values = projector( + visual_outputs.hidden_states[args.visual_select_layer][:, 1:]) + + stop_words = args.stop_words + sep = '' + if args.prompt_template: + template = PROMPT_TEMPLATE[args.prompt_template] + stop_words += template.get('STOP_WORDS', []) + sep = template.get('SEP', '') + stop_criteria = get_stop_criteria( + tokenizer=tokenizer, stop_words=stop_words) + + if args.no_streamer: + streamer = None + else: + streamer = TextStreamer(tokenizer, skip_prompt=True) + + gen_config = GenerationConfig( + max_new_tokens=args.max_new_tokens, + do_sample=args.temperature > 0, + temperature=args.temperature, + top_p=args.top_p, + top_k=args.top_k, + repetition_penalty=args.repetition_penalty, + eos_token_id=tokenizer.eos_token_id, + pad_token_id=tokenizer.pad_token_id + if tokenizer.pad_token_id is not None else tokenizer.eos_token_id, + ) + + n_turn = 0 + inputs = '' + while True: + text = get_input() + while text.strip() == 'RESET': + print('Log: History responses have been removed!') + n_turn = 0 + inputs = '' + text = get_input() + if text.strip() == 'EXIT': + print('Log: Exit!') + exit(0) + + if args.image is not None and n_turn == 0: + text = DEFAULT_IMAGE_TOKEN + '\n' + text + + if args.prompt_template: + prompt_text = '' + template = PROMPT_TEMPLATE[args.prompt_template] + if 'SYSTEM' in template and n_turn == 0: + system_text = None + if args.system_template is not None: + system_text = SYSTEM_TEMPLATE[ + args.system_template].format( + round=n_turn + 1, bot_name=args.bot_name) + elif args.system is not None: + system_text = args.system + if system_text is not None: + prompt_text += template['SYSTEM'].format( + system=system_text, + round=n_turn + 1, + bot_name=args.bot_name) + prompt_text += template['INSTRUCTION'].format( + input=text, round=n_turn + 1, bot_name=args.bot_name) + if args.prompt_template == args.system_template == 'moss_sft': + if not inner_thoughts_open: + prompt_text.replace('- Inner thoughts: enabled.', + '- Inner thoughts: disabled.') + if not calculate_open: + prompt_text.replace(('- Calculator: enabled. API: ' + 'Calculate(expression)'), + '- Calculator: disabled.') + if not solve_open: + prompt_text.replace( + '- Equation solver: enabled. API: Solve(equation)', + '- Equation solver: disabled.') + if not search_open: + prompt_text.replace( + '- Web search: enabled. API: Search(query)', + '- Web search: disabled.') + else: + prompt_text = text + inputs += prompt_text + if args.image is None: + if n_turn == 0: + ids = tokenizer.encode(inputs, return_tensors='pt') + else: + ids = tokenizer.encode( + inputs, return_tensors='pt', add_special_tokens=False) + + if args.with_plugins is not None: + generate_output = llm.generate( + inputs=ids.cuda(), + generation_config=gen_config, + streamer=streamer, + stopping_criteria=stop_criteria).cpu() + generate_output_text = tokenizer.decode( + generate_output[0][len(ids[0]):]) + if streamer is None: + end = '' if generate_output_text[-1] == '\n' else '\n' + print(generate_output_text, end=end) + pattern = r'<\|Commands\|>:(.*?)' + command_text = ', '.join( + re.findall(pattern, generate_output_text)) + extent_text = plugins_api( + command_text, + calculate_open=calculate_open, + solve_open=solve_open, + search_open=search_open) + end = '' if extent_text[-1] == '\n' else '\n' + print(extent_text, end=end) + extent_text_ids = tokenizer.encode( + extent_text, + return_tensors='pt', + add_special_tokens=False) + new_ids = torch.cat((generate_output, extent_text_ids), + dim=1) + + generate_output = llm.generate( + inputs=new_ids.cuda(), + generation_config=gen_config, + streamer=streamer, + stopping_criteria=stop_criteria) + if streamer is None: + output_text = tokenizer.decode( + generate_output[0][len(new_ids[0]):]) + end = '' if output_text[-1] == '\n' else '\n' + print(output_text, end=end) + else: + generate_output = llm.generate( + inputs=ids.cuda(), + generation_config=gen_config, + streamer=streamer, + stopping_criteria=stop_criteria) + if streamer is None: + output_text = tokenizer.decode( + generate_output[0][len(ids[0]):]) + end = '' if output_text[-1] == '\n' else '\n' + print(output_text, end=end) + inputs = tokenizer.decode(generate_output[0]) + else: + chunk_encode = [] + for idx, chunk in enumerate(inputs.split(DEFAULT_IMAGE_TOKEN)): + if idx == 0 and n_turn == 0: + cur_encode = tokenizer.encode(chunk) + else: + cur_encode = tokenizer.encode( + chunk, add_special_tokens=False) + chunk_encode.append(cur_encode) + assert len(chunk_encode) == 2 + ids = [] + for idx, cur_chunk_encode in enumerate(chunk_encode): + ids.extend(cur_chunk_encode) + if idx != len(chunk_encode) - 1: + ids.append(IMAGE_TOKEN_INDEX) + ids = torch.tensor(ids).cuda().unsqueeze(0) + mm_inputs = prepare_inputs_labels_for_multimodal( + llm=llm, input_ids=ids, pixel_values=pixel_values) + + generate_output = llm.generate( + **mm_inputs, + generation_config=gen_config, + streamer=streamer, + bos_token_id=tokenizer.bos_token_id, + stopping_criteria=stop_criteria) + if streamer is None: + output_text = tokenizer.decode(generate_output[0]) + end = '' if output_text[-1] == '\n' else '\n' + print(output_text, end=end) + inputs += tokenizer.decode(generate_output[0]) + n_turn += 1 + inputs += sep + if len(generate_output[0]) >= args.max_new_tokens: + print( + 'Remove the memory of history responses, since ' + f'it exceeds the length limitation {args.max_new_tokens}.') + n_turn = 0 + inputs = '' + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/check_custom_dataset.py b/data/xtuner/xtuner/tools/check_custom_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..d9d005fb5b6e9f7b3b0cf964d5dd45c4acdd5a4a --- /dev/null +++ b/data/xtuner/xtuner/tools/check_custom_dataset.py @@ -0,0 +1,157 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +from functools import partial + +import numpy as np +from datasets import DatasetDict +from mmengine.config import Config + +from xtuner.dataset.utils import Packer, encode_fn +from xtuner.registry import BUILDER + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Verify the correctness of the config file for the ' + 'custom dataset.') + parser.add_argument('config', help='config file name or path.') + args = parser.parse_args() + return args + + +def is_standard_format(dataset): + example = next(iter(dataset)) + if 'conversation' not in example: + return False + conversation = example['conversation'] + if not isinstance(conversation, list): + return False + for item in conversation: + if (not isinstance(item, dict)) or ('input' + not in item) or ('output' + not in item): + return False + input, output = item['input'], item['output'] + if (not isinstance(input, str)) or (not isinstance(output, str)): + return False + return True + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + + tokenizer = BUILDER.build(cfg.tokenizer) + if cfg.get('framework', 'mmengine').lower() == 'huggingface': + train_dataset = cfg.train_dataset + else: + train_dataset = cfg.train_dataloader.dataset + + dataset = train_dataset.dataset + max_length = train_dataset.max_length + dataset_map_fn = train_dataset.get('dataset_map_fn', None) + template_map_fn = train_dataset.get('template_map_fn', None) + max_dataset_length = train_dataset.get('max_dataset_length', 10) + split = train_dataset.get('split', 'train') + remove_unused_columns = train_dataset.get('remove_unused_columns', False) + rename_maps = train_dataset.get('rename_maps', []) + shuffle_before_pack = train_dataset.get('shuffle_before_pack', True) + pack_to_max_length = train_dataset.get('pack_to_max_length', True) + input_ids_with_output = train_dataset.get('input_ids_with_output', True) + + if dataset.get('path', '') != 'json': + raise ValueError( + 'You are using custom datasets for SFT. ' + 'The custom datasets should be in json format. To load your JSON ' + 'file, you can use the following code snippet: \n' + '"""\nfrom datasets import load_dataset \n' + 'dataset = dict(type=load_dataset, path=\'json\', ' + 'data_files=\'your_json_file.json\')\n"""\n' + 'For more details, please refer to Step 5 in the ' + '`Using Custom Datasets` section of the documentation found at' + ' docs/zh_cn/user_guides/single_turn_conversation.md.') + + try: + dataset = BUILDER.build(dataset) + except RuntimeError: + raise RuntimeError( + 'Unable to load the custom JSON file using ' + '`datasets.load_dataset`. Your data-related config is ' + f'{train_dataset}. Please refer to the official documentation on' + ' `load_dataset` (https://huggingface.co/docs/datasets/loading) ' + 'for more details.') + + if isinstance(dataset, DatasetDict): + dataset = dataset[split] + + if not is_standard_format(dataset) and dataset_map_fn is None: + raise ValueError( + 'If the custom dataset is not in the XTuner-defined ' + 'format, please utilize `dataset_map_fn` to map the original data' + ' to the standard format. For more details, please refer to ' + 'Step 1 and Step 5 in the `Using Custom Datasets` section of the ' + 'documentation found at ' + '`docs/zh_cn/user_guides/single_turn_conversation.md`.') + + if is_standard_format(dataset) and dataset_map_fn is not None: + raise ValueError( + 'If the custom dataset is already in the XTuner-defined format, ' + 'please set `dataset_map_fn` to None.' + 'For more details, please refer to Step 1 and Step 5 in the ' + '`Using Custom Datasets` section of the documentation found at' + ' docs/zh_cn/user_guides/single_turn_conversation.md.') + + max_dataset_length = min(max_dataset_length, len(dataset)) + indices = np.random.choice(len(dataset), max_dataset_length, replace=False) + dataset = dataset.select(indices) + + if dataset_map_fn is not None: + dataset = dataset.map(dataset_map_fn) + + print('#' * 20 + ' dataset after `dataset_map_fn` ' + '#' * 20) + print(dataset[0]['conversation']) + + if template_map_fn is not None: + template_map_fn = BUILDER.build(template_map_fn) + dataset = dataset.map(template_map_fn) + + print('#' * 20 + ' dataset after adding templates ' + '#' * 20) + print(dataset[0]['conversation']) + + for old, new in rename_maps: + dataset = dataset.rename_column(old, new) + + if pack_to_max_length and (not remove_unused_columns): + raise ValueError('We have to remove unused columns if ' + '`pack_to_max_length` is set to True.') + + dataset = dataset.map( + partial( + encode_fn, + tokenizer=tokenizer, + max_length=max_length, + input_ids_with_output=input_ids_with_output), + remove_columns=list(dataset.column_names) + if remove_unused_columns else None) + + print('#' * 20 + ' encoded input_ids ' + '#' * 20) + print(dataset[0]['input_ids']) + print('#' * 20 + ' encoded labels ' + '#' * 20) + print(dataset[0]['labels']) + + if pack_to_max_length and split == 'train': + if shuffle_before_pack: + dataset = dataset.shuffle() + dataset = dataset.flatten_indices() + dataset = dataset.map(Packer(max_length), batched=True) + + print('#' * 20 + ' input_ids after packed to max_length ' + + '#' * 20) + print(dataset[0]['input_ids']) + print('#' * 20 + ' labels after packed to max_length ' + '#' * 20) + print(dataset[0]['labels']) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/copy_cfg.py b/data/xtuner/xtuner/tools/copy_cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..9c3ff69c1271ae16fc3ad11d2f7ce184cca5dfea --- /dev/null +++ b/data/xtuner/xtuner/tools/copy_cfg.py @@ -0,0 +1,35 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp +import shutil + +from mmengine.utils import mkdir_or_exist + +from xtuner.configs import cfgs_name_path + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('config_name', help='config name') + parser.add_argument('save_dir', help='save directory for copied config') + args = parser.parse_args() + return args + + +def add_copy_suffix(string): + file_name, ext = osp.splitext(string) + return f'{file_name}_copy{ext}' + + +def main(): + args = parse_args() + mkdir_or_exist(args.save_dir) + config_path = cfgs_name_path[args.config_name] + save_path = osp.join(args.save_dir, + add_copy_suffix(osp.basename(config_path))) + shutil.copyfile(config_path, save_path) + print(f'Copy to {save_path}') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/data_preprocess/arxiv.py b/data/xtuner/xtuner/tools/data_preprocess/arxiv.py new file mode 100644 index 0000000000000000000000000000000000000000..55c3004038971462142f1a4a3619edae4d775b34 --- /dev/null +++ b/data/xtuner/xtuner/tools/data_preprocess/arxiv.py @@ -0,0 +1,60 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json +from datetime import datetime + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('src_file', help='source file path') + parser.add_argument('dst_file', help='destination file path') + parser.add_argument( + '--categories', + nargs='+', + default=['cs.AI', 'cs.CL', 'cs.CV'], + help='target categories') + parser.add_argument( + '--start-date', + default='2020-01-01', + help='start date (format: YYYY-MM-DD)') + + args = parser.parse_args() + return args + + +def has_intersection(list1, list2): + set1 = set(list1) + set2 = set(list2) + return len(set1.intersection(set2)) > 0 + + +def read_json_file(file_path): + data = [] + with open(file_path) as file: + for line in file: + try: + json_data = json.loads(line) + data.append(json_data) + except json.JSONDecodeError: + print(f'Failed to parse line: {line}') + return data + + +def main(): + args = parse_args() + json_data = read_json_file(args.src_file) + from_time = datetime.strptime(args.start_date, '%Y-%m-%d') + filtered_data = [ + item for item in json_data + if has_intersection(args.categories, item['categories'].split()) + and datetime.strptime(item['update_date'], '%Y-%m-%d') >= from_time + ] + + with open(args.dst_file, 'w') as file: + json.dump(filtered_data, file) + + print(f'Save to {args.dst_file}\n{len(filtered_data)} items') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/data_preprocess/convert_refcoco.py b/data/xtuner/xtuner/tools/data_preprocess/convert_refcoco.py new file mode 100644 index 0000000000000000000000000000000000000000..883e82a226414f9fbf49e27ed7144bd8e478cfef --- /dev/null +++ b/data/xtuner/xtuner/tools/data_preprocess/convert_refcoco.py @@ -0,0 +1,47 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json + +from xtuner.dataset.refcoco_json import RefCOCOJsonDataset + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--ann-path', + default='data/refcoco_annotations', + help='Refcoco annotation path', + ) + parser.add_argument( + '--image-path', + default='data/llava_data/llava_images/coco/train2017', + help='COCO image path', + ) + parser.add_argument( + '--save-path', default='./', help='The folder to save converted data') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + + data_info = [ + ('refcoco', 'unc'), + ('refcoco+', 'unc'), + ('refcocog', 'umd'), + ] + all_data = [] + for dataset, split in data_info: + data = RefCOCOJsonDataset.get_data_json( + ann_path=args.ann_path, + image_path=args.image_path, + dataset=dataset, + splitBy=split, + )[0] + all_data.extend(data) + save_path = args.save_path + '/train.json' + with open(save_path, 'w') as f: + print(f'save to {save_path} with {len(all_data)} items.') + print(all_data[0]) + json.dump(all_data, f, indent=4) diff --git a/data/xtuner/xtuner/tools/eval_refcoco.py b/data/xtuner/xtuner/tools/eval_refcoco.py new file mode 100644 index 0000000000000000000000000000000000000000..cbdc1bf6e9dda876440ffa61416f66247d1705db --- /dev/null +++ b/data/xtuner/xtuner/tools/eval_refcoco.py @@ -0,0 +1,356 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import re + +import torch +import tqdm +from huggingface_hub import snapshot_download +from mmengine.dist import get_dist_info, init_dist, master_only +from mmengine.utils.dl_utils import set_multi_processing +from peft import PeftModel +from torch import distributed as dist +from torch.utils.data import DataLoader, DistributedSampler +from transformers import (AutoModel, AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel, GenerationConfig) + +from xtuner.dataset.map_fns import llava_map_fn, template_map_fn_factory +from xtuner.dataset.refcoco_json import RefCOCOJsonEvalDataset +from xtuner.model.utils import LoadWoInit, prepare_inputs_labels_for_multimodal +from xtuner.tools.utils import get_stop_criteria +from xtuner.utils import (DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX, + PROMPT_TEMPLATE) + +TORCH_DTYPE_MAP = dict( + fp16=torch.float16, bf16=torch.bfloat16, fp32=torch.float32, auto='auto') + + +def merge_outputs(otuputs): + new_outputs = [None for _ in range(dist.get_world_size())] + + assert dist.is_initialized() + + dist.all_gather_object(new_outputs, otuputs) + new_dict = [] + for output in new_outputs: + new_dict.extend(output) + return new_dict + + +@master_only +def master_print(msg): + print(msg) + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMBench') + parser.add_argument( + 'model_name_or_path', help='Hugging Face model name or path') + parser.add_argument('--data-path', default=None, help='data path') + parser.add_argument('--work-dir', help='the dir to save results') + parser.add_argument('--llava', default=None, help='llava name or path') + parser.add_argument( + '--visual-encoder', default=None, help='visual encoder name or path') + parser.add_argument( + '--visual-select-layer', default=-2, help='visual select layer') + parser.add_argument( + '--prompt-template', + choices=PROMPT_TEMPLATE.keys(), + default=None, + help='Specify a prompt template', + ) + parser.add_argument( + '--stop-words', nargs='+', type=str, default=[], help='Stop words') + parser.add_argument( + '--torch-dtype', + default='fp16', + choices=TORCH_DTYPE_MAP.keys(), + help='Override the default `torch.dtype` and load the model under ' + 'a specific `dtype`.', + ) + parser.add_argument( + '--bits', + type=int, + choices=[4, 8, None], + default=None, + help='LLM bits') + parser.add_argument( + '--bot-name', type=str, default='BOT', help='Name for Bot') + parser.add_argument( + '--offload-folder', + default=None, + help='The folder in which to offload the model weights (or where the ' + 'model weights are already offloaded).', + ) + parser.add_argument( + '--max-new-tokens', + type=int, + default=100, + help='Maximum number of new tokens allowed in generated text', + ) + parser.add_argument( + '--seed', + type=int, + default=0, + help='Random seed for reproducible text generation', + ) + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher', + ) + args = parser.parse_args() + return args + + +def eval_iou(answers): + + def computeIoU(bbox1, bbox2): + x1, y1, x2, y2 = bbox1 + x3, y3, x4, y4 = bbox2 + intersection_x1 = max(x1, x3) + intersection_y1 = max(y1, y3) + intersection_x2 = min(x2, x4) + intersection_y2 = min(y2, y4) + intersection_area = max(0, + intersection_x2 - intersection_x1 + 1) * max( + 0, intersection_y2 - intersection_y1 + 1) + bbox1_area = (x2 - x1 + 1) * (y2 - y1 + 1) + bbox2_area = (x4 - x3 + 1) * (y4 - y3 + 1) + union_area = bbox1_area + bbox2_area - intersection_area + iou = intersection_area / union_area + return iou + + right = 0 + for answer in answers: + bbox = answer['bbox'] + bbox = RefCOCOJsonEvalDataset.normalize_bbox(bbox, answer['height'], + answer['width']) + answer_bbox = [int(x) for x in re.findall(r'\d+', answer['ans'])] + if len(answer_bbox) == 4: + iou = computeIoU(answer_bbox, bbox) + if iou > 0.5: + right += 1 + else: + print('Error format sample: ', answer) + return right / len(answers) + + +def build_model(args): + rank, world_size = get_dist_info() + # build llm + quantization_config = None + load_in_8bit = False + if args.bits == 4: + quantization_config = BitsAndBytesConfig( + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4', + ) + elif args.bits == 8: + load_in_8bit = True + model_kwargs = { + 'quantization_config': quantization_config, + 'load_in_8bit': load_in_8bit, + 'device_map': rank if world_size > 1 else 'auto', + 'offload_folder': args.offload_folder, + 'trust_remote_code': True, + 'torch_dtype': TORCH_DTYPE_MAP[args.torch_dtype], + } + + # build llm + with LoadWoInit(): + llm = AutoModelForCausalLM.from_pretrained(args.model_name_or_path, + **model_kwargs) + tokenizer = AutoTokenizer.from_pretrained( + args.model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + master_print(f'Load LLM from {args.model_name_or_path}') + + llava_path = ( + snapshot_download( + repo_id=args.llava) if not osp.isdir(args.llava) else args.llava) + + # build visual_encoder + if 'visual_encoder' in os.listdir(llava_path): + assert args.visual_encoder is None, ( + "Please don't specify the `--visual-encoder` since passed " + '`--llava` contains a visual encoder!') + visual_encoder_path = osp.join(llava_path, 'visual_encoder') + else: + assert (args.visual_encoder is not None + ), 'Please specify the `--visual-encoder`!' # noqa: E501 + visual_encoder_path = args.visual_encoder + with LoadWoInit(): + visual_encoder = CLIPVisionModel.from_pretrained( + visual_encoder_path, torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype]) + image_processor = CLIPImageProcessor.from_pretrained( + visual_encoder_path) + master_print(f'Load visual_encoder from {visual_encoder_path}') + + # load adapter + if 'llm_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'llm_adapter') + + with LoadWoInit(): + llm = PeftModel.from_pretrained( + llm, adapter_path, offload_folder=args.offload_folder) + + master_print(f'Load LLM adapter from {args.llava}') + + if 'visual_encoder_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'visual_encoder_adapter') + visual_encoder = PeftModel.from_pretrained( + visual_encoder, adapter_path, offload_folder=args.offload_folder) + master_print(f'Load visual_encoder adapter from {args.llava}') + + # build projector + projector_path = osp.join(llava_path, 'projector') + with LoadWoInit(): + projector = AutoModel.from_pretrained( + projector_path, torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype]) + master_print(f'Load projector from {args.llava}') + + projector.cuda() + projector.eval() + + visual_encoder.cuda() + visual_encoder.eval() + + llm.eval() + return llm, visual_encoder, projector, tokenizer, image_processor + + +def generate( + llm, + visual_encoder, + projector, + tokenizer, + samples, + visual_select_layer, +): + gen_config = GenerationConfig( + max_new_tokens=100, + do_sample=False, + eos_token_id=tokenizer.eos_token_id, + pad_token_id=(tokenizer.pad_token_id if tokenizer.pad_token_id + is not None else tokenizer.eos_token_id), + ) + stop_criteria = get_stop_criteria(tokenizer=tokenizer, stop_words=['']) + + device = next(llm.parameters()).device + # prepare inputs + inputs = samples['conversation'][0]['input'][0] + chunk_encode = [] + for idx, chunk in enumerate(inputs.split(DEFAULT_IMAGE_TOKEN)): + if idx == 0: + cur_encode = tokenizer.encode(chunk) + else: + cur_encode = tokenizer.encode(chunk, add_special_tokens=False) + chunk_encode.append(cur_encode) + assert len(chunk_encode) == 2 + ids = [] + for idx, cur_chunk_encode in enumerate(chunk_encode): + ids.extend(cur_chunk_encode) + if idx != len(chunk_encode) - 1: + ids.append(IMAGE_TOKEN_INDEX) + ids = torch.tensor(ids).cuda().unsqueeze(0) + + visual_outputs = visual_encoder( + samples['pixel_values'].to(device), output_hidden_states=True) + pixel_values = projector( + visual_outputs.hidden_states[visual_select_layer][:, 1:]) + samples['pixel_values'] = pixel_values + samples['input_ids'] = ids + datax = prepare_inputs_labels_for_multimodal( + llm=llm.to(device), + input_ids=samples['input_ids'].to(device), + pixel_values=samples['pixel_values'].to(device), + ) + + # generation + generation = llm.generate( + **datax, + generation_config=gen_config, + streamer=None, + bos_token_id=tokenizer.bos_token_id, + stopping_criteria=stop_criteria, + ) + answer = tokenizer.decode(generation[0]) + return { + 'ans': answer, + 'id': samples['id'][0], + 'bbox': torch.tensor(samples['bbox']).tolist(), + 'height': samples['height'], + 'width': samples['width'], + } + + +@torch.no_grad() +def main(): + # init + args = parse_args() + if args.launcher != 'none': + set_multi_processing(distributed=True) + init_dist(args.launcher) + + rank, world_size = get_dist_info() + torch.cuda.set_device(rank) + else: + rank = 0 + world_size = 1 + print(f'Rank: {rank} / World size: {world_size}') + + # build_model + llm, visual_encoder, projector, tokenizer, image_processor = build_model( + args) + + # dataset + dataset = RefCOCOJsonEvalDataset( + data_path=args.data_path, + image_folder='data/llava_data/llava_images/', + tokenizer=tokenizer, + image_processor=image_processor, + max_dataset_length=None, + dataset_map_fn=llava_map_fn, + template_map_fn=dict( + type=template_map_fn_factory, template=PROMPT_TEMPLATE.vicuna), + max_length=2048, + pad_image_to_square=False, + ) + loader = DataLoader( + dataset, + batch_size=1, + shuffle=False, + sampler=DistributedSampler(dataset, shuffle=False, seed=0), + ) + loader.sampler.set_epoch(0) + + answers = [] + for i, data in tqdm.tqdm(enumerate(loader), desc=f'Rank {rank}'): + answer = generate( + llm, + visual_encoder, + projector, + tokenizer, + data, + args.visual_select_layer, + ) + answers.append(answer) + + merged_outputs = merge_outputs(answers) + acc = eval_iou(merged_outputs) + master_print(f'Acc: {acc}') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/get_data_order.py b/data/xtuner/xtuner/tools/get_data_order.py new file mode 100644 index 0000000000000000000000000000000000000000..30c23e84e7213fb518f798946da0befb1091b8c2 --- /dev/null +++ b/data/xtuner/xtuner/tools/get_data_order.py @@ -0,0 +1,41 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--data-folder', help='Data folder') + parser.add_argument('--save-folder', help='The folder to save data order.') + parser.add_argument( + '--file-type', + default='.bin', + help='We want to get the order of the file in this type.') + args = parser.parse_args() + return args + + +def save_data_order(data_folder, save_folder, file_type='.bin'): + assert os.path.exists(data_folder), f'{data_folder} does not exist.' + triples = list(os.walk(data_folder, followlinks=True)) + data_order = [] + for root, dirs, files in triples: + dirs.sort() + print(f'Reading {root}...') + for fn in sorted(files): + if fn.endswith(file_type): + fp = os.path.join(root, fn) + # Using relative paths so that you can get the same result + # on different clusters + fp = fp.replace(data_folder, '')[1:] + data_order.append(fp) + + save_path = os.path.join(save_folder, 'data_order.txt') + with open(save_path, 'w') as f: + for fp in data_order: + f.write(fp + '\n') + + +if __name__ == '__main__': + args = parse_args() + save_data_order(args.data_folder, args.save_folder, args.file_type) diff --git a/data/xtuner/xtuner/tools/list_cfg.py b/data/xtuner/xtuner/tools/list_cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..0062ade5714aa5b30467ab53809d245f8c142f66 --- /dev/null +++ b/data/xtuner/xtuner/tools/list_cfg.py @@ -0,0 +1,29 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +from xtuner.configs import cfgs_name_path + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + '-p', '--pattern', default=None, help='Pattern for fuzzy matching') + args = parser.parse_args() + return args + + +def main(pattern=None): + args = parse_args() + configs_names = sorted(list(cfgs_name_path.keys())) + print('==========================CONFIGS===========================') + if args.pattern is not None: + print(f'PATTERN: {args.pattern}') + print('-------------------------------') + for name in configs_names: + if args.pattern is None or args.pattern.lower() in name.lower(): + print(name) + print('=============================================================') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/list_dataset_format.py b/data/xtuner/xtuner/tools/list_dataset_format.py new file mode 100644 index 0000000000000000000000000000000000000000..40d3a71f2539db6b0af2880d78c0e2710c296dfe --- /dev/null +++ b/data/xtuner/xtuner/tools/list_dataset_format.py @@ -0,0 +1,14 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from xtuner.dataset.map_fns import DATASET_FORMAT_MAPPING + + +def main(): + dataset_format = DATASET_FORMAT_MAPPING.keys() + print('======================DATASET_FORMAT======================') + for format in dataset_format: + print(format) + print('==========================================================') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/log_dataset.py b/data/xtuner/xtuner/tools/log_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..40b5e25feff74d90cff8ffeaa74fd6b103d649a9 --- /dev/null +++ b/data/xtuner/xtuner/tools/log_dataset.py @@ -0,0 +1,52 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +from mmengine.config import Config + +from xtuner.registry import BUILDER + + +def parse_args(): + parser = argparse.ArgumentParser(description='Log processed dataset.') + parser.add_argument('config', help='config file name or path.') + # chose which kind of dataset style to show + parser.add_argument( + '--show', + default='text', + choices=['text', 'masked_text', 'input_ids', 'labels', 'all'], + help='which kind of dataset style to show') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + + tokenizer = BUILDER.build(cfg.tokenizer) + if cfg.get('framework', 'mmengine').lower() == 'huggingface': + train_dataset = BUILDER.build(cfg.train_dataset) + else: + train_dataset = BUILDER.build(cfg.train_dataloader.dataset) + + if args.show == 'text' or args.show == 'all': + print('#' * 20 + ' text ' + '#' * 20) + print(tokenizer.decode(train_dataset[0]['input_ids'])) + if args.show == 'masked_text' or args.show == 'all': + print('#' * 20 + ' text(masked) ' + '#' * 20) + masked_text = ' '.join( + ['[-100]' for i in train_dataset[0]['labels'] if i == -100]) + unmasked_text = tokenizer.decode( + [i for i in train_dataset[0]['labels'] if i != -100]) + print(masked_text + ' ' + unmasked_text) + if args.show == 'input_ids' or args.show == 'all': + print('#' * 20 + ' input_ids ' + '#' * 20) + print(train_dataset[0]['input_ids']) + if args.show == 'labels' or args.show == 'all': + print('#' * 20 + ' labels ' + '#' * 20) + print(train_dataset[0]['labels']) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/mmbench.py b/data/xtuner/xtuner/tools/mmbench.py new file mode 100644 index 0000000000000000000000000000000000000000..24d3825bb2ded3be9b11aaee18f312e86342223e --- /dev/null +++ b/data/xtuner/xtuner/tools/mmbench.py @@ -0,0 +1,513 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json +import math +import os +import os.path as osp +import re +import string +import time + +import numpy as np +import pandas as pd +import torch +import tqdm +from huggingface_hub import snapshot_download +from mmengine import mkdir_or_exist +from mmengine.dist import (collect_results, get_dist_info, get_rank, init_dist, + master_only) +from mmengine.utils.dl_utils import set_multi_processing +from peft import PeftModel +from rich.console import Console +from rich.table import Table +from torch.utils.data import Dataset +from transformers import (AutoModel, AutoModelForCausalLM, AutoTokenizer, + BitsAndBytesConfig, CLIPImageProcessor, + CLIPVisionModel, GenerationConfig) + +from xtuner.dataset.utils import decode_base64_to_image, expand2square +from xtuner.model.utils import LoadWoInit, prepare_inputs_labels_for_multimodal +from xtuner.tools.utils import get_stop_criteria, is_cn_string +from xtuner.utils import (DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX, + PROMPT_TEMPLATE) + +TORCH_DTYPE_MAP = dict( + fp16=torch.float16, bf16=torch.bfloat16, fp32=torch.float32, auto='auto') + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMBench') + parser.add_argument( + 'model_name_or_path', help='Hugging Face model name or path') + parser.add_argument('--data-path', default=None, help='data path') + parser.add_argument('--work-dir', help='the dir to save results') + parser.add_argument('--llava', default=None, help='llava name or path') + parser.add_argument( + '--visual-encoder', default=None, help='visual encoder name or path') + parser.add_argument( + '--visual-select-layer', default=-2, help='visual select layer') + parser.add_argument( + '--prompt-template', + choices=PROMPT_TEMPLATE.keys(), + default=None, + help='Specify a prompt template') + parser.add_argument( + '--stop-words', nargs='+', type=str, default=[], help='Stop words') + parser.add_argument( + '--torch-dtype', + default='fp16', + choices=TORCH_DTYPE_MAP.keys(), + help='Override the default `torch.dtype` and load the model under ' + 'a specific `dtype`.') + parser.add_argument( + '--bits', + type=int, + choices=[4, 8, None], + default=None, + help='LLM bits') + parser.add_argument( + '--bot-name', type=str, default='BOT', help='Name for Bot') + parser.add_argument( + '--offload-folder', + default=None, + help='The folder in which to offload the model weights (or where the ' + 'model weights are already offloaded).') + parser.add_argument( + '--max-new-tokens', + type=int, + default=100, + help='Maximum number of new tokens allowed in generated text') + parser.add_argument( + '--seed', + type=int, + default=0, + help='Random seed for reproducible text generation') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + args = parser.parse_args() + return args + + +@master_only +def master_print(msg): + print(msg) + + +class MMBenchDataset(Dataset): + ABBRS = { + 'coarse_perception': 'CP', + 'finegrained_perception (instance-level)': 'FP-S', + 'finegrained_perception (cross-instance)': 'FP-C', + 'logic_reasoning': 'LR', + 'relation_reasoning': 'RR', + 'attribute_reasoning': 'AR', + 'sketch_reasoning': 'Sketch Reasoning', + 'scenery_building': 'Scenery & Building', + 'food_clothes': 'Food & Clothes', + 'historical_figure': 'Historical Figure', + 'traditional_show': 'Traditional Show', + 'calligraphy_painting': 'Calligraphy Painting', + 'cultural_relic': 'Cultural Relic' + } + + def __init__(self, data_file): + self.data_file = data_file + self.df = pd.read_csv(data_file, sep='\t') + self.split = 'dev' if 'answer' in self.df.iloc[0].keys() else 'test' + self.has_l2_category = 'l2-category' in self.df.columns.to_list() + + def get_image(self, image): + while len(image) < 16: + image = self.df[self.df['index'] == int(image)]['image'].values + assert len(image) == 1 + image = image[0] + image = decode_base64_to_image(image) + return image + + def __len__(self): + return len(self.df) + + def __getitem__(self, idx): + index = self.df.iloc[idx]['index'] + image = self.df.iloc[idx]['image'] + image = self.get_image(image) + question = self.df.iloc[idx]['question'] + answer = self.df.iloc[idx]['answer'] if 'answer' in self.df.iloc[ + 0].keys() else None + category = self.df.iloc[idx]['category'] + + options = { + cand: self.load_from_df(idx, cand) + for cand in string.ascii_uppercase + if self.load_from_df(idx, cand) is not None + } + options_prompt = '' + for key, item in options.items(): + options_prompt += f'{key}. {item}\n' + + hint = self.load_from_df(idx, 'hint') + data = { + 'img': image, + 'question': question, + 'answer': answer, + 'options': options_prompt, + 'category': category, + 'options_dict': options, + 'index': index, + 'context': hint, + } + if self.has_l2_category: + data.update({'l2-category': self.df.iloc[idx]['l2-category']}) + return data + + def load_from_df(self, idx, key): + if key in self.df.iloc[idx] and not pd.isna(self.df.iloc[idx][key]): + return self.df.iloc[idx][key] + else: + return None + + @master_only + def eval_result(self, result_df, show=True): + + def calc_acc(df, group='category'): + assert group in ['overall', 'category', 'l2-category'] + if group == 'overall': + res = {'Average': np.mean(df['hit'])} + else: + res = {} + abilities = list(set(df[group])) + abilities.sort() + for ab in abilities: + sub_df = df[df[group] == ab] + ab = self.ABBRS[ab] if ab in self.ABBRS else ab + res[ab] = np.mean(sub_df['hit']) + return res + + def eval_sub_data(sub_data, answer_map): + lt = len(sub_data) + for i in range(lt): + item = sub_data.iloc[i] + match = re.search(r'([A-D]+)', item['prediction']) + pred = match.group(1) if match else '' + gt = answer_map[item['index']] + if gt != pred: + return 0 + return 1 + + def show_result(ret_json): + show_dict = ret_json.copy() + table = Table(title=f' MMBench ({self.data_file}) ') + console = Console() + table.add_column('Category', justify='left') + table.add_column('Accuracy (%)', justify='right') + average = show_dict.pop('Average') * 100 + table.add_row('Average', f'{average:.1f}') + table.add_section() + for cat_name, cat_acc in show_dict.items(): + table.add_row(cat_name, f'{cat_acc * 100:.1f}') + with console.capture() as capture: + console.print(table, end='') + print('\n' + capture.get()) + print('Note: Please be cautious if you use the results in papers, ' + "since we don't use ChatGPT as a helper for choice " + 'extraction') + + data = result_df.sort_values(by='index') + data['prediction'] = [str(x) for x in data['prediction']] + for k in data.keys(): + data[k.lower() if k not in 'ABCD' else k] = data.pop(k) + + data_main = data[data['index'] < int(1e6)] + cate_map = { + i: c + for i, c in zip(self.df['index'], self.df['category']) + } + if self.has_l2_category: + l2_cate_map = { + i: c + for i, c in zip(self.df['index'], self.df['l2-category']) + } + answer_map = { + i: c + for i, c in zip(self.df['index'], self.df['answer']) + } + + lt = len(data_main) + hit, tot = 0, 0 + result = {} + for i in range(lt): + item_main = data_main.iloc[i] + idx = item_main['index'] + assert idx not in result + sub_data = data[data['index'] % int(1e6) == idx] + ret = eval_sub_data(sub_data, answer_map) + result[idx] = ret + hit += ret + tot += 1 + + indices = data_main['index'] + data_main = data_main.copy() + data_main['hit'] = [result[i] for i in indices] + main_idx = data_main['index'] + data_main['category'] = [cate_map[i] for i in main_idx] + + ret_json = calc_acc(data_main, 'overall') + + if self.has_l2_category: + data_main['l2-category'] = [l2_cate_map[i] for i in main_idx] + l2 = calc_acc(data_main, 'l2-category') + ret_json.update(l2) + else: + leaf = calc_acc(data_main, 'category') + ret_json.update(leaf) + if show: + show_result(ret_json) + return ret_json + + +def main(): + args = parse_args() + + torch.manual_seed(args.seed) + + if args.launcher != 'none': + set_multi_processing(distributed=True) + init_dist(args.launcher) + + rank, world_size = get_dist_info() + torch.cuda.set_device(rank) + else: + rank = 0 + world_size = 1 + + # build llm + quantization_config = None + load_in_8bit = False + if args.bits == 4: + quantization_config = BitsAndBytesConfig( + load_in_4bit=True, + load_in_8bit=False, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + bnb_4bit_compute_dtype=torch.float16, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type='nf4') + elif args.bits == 8: + load_in_8bit = True + model_kwargs = { + 'quantization_config': quantization_config, + 'load_in_8bit': load_in_8bit, + 'device_map': rank if world_size > 1 else 'auto', + 'offload_folder': args.offload_folder, + 'trust_remote_code': True, + 'torch_dtype': TORCH_DTYPE_MAP[args.torch_dtype] + } + + # build llm + with LoadWoInit(): + llm = AutoModelForCausalLM.from_pretrained(args.model_name_or_path, + **model_kwargs) + tokenizer = AutoTokenizer.from_pretrained( + args.model_name_or_path, + trust_remote_code=True, + encode_special_tokens=True) + master_print(f'Load LLM from {args.model_name_or_path}') + + llava_path = snapshot_download( + repo_id=args.llava) if not osp.isdir(args.llava) else args.llava + + # build visual_encoder + if 'visual_encoder' in os.listdir(llava_path): + assert args.visual_encoder is None, ( + "Please don't specify the `--visual-encoder` since passed " + '`--llava` contains a visual encoder!') + visual_encoder_path = osp.join(llava_path, 'visual_encoder') + else: + assert args.visual_encoder is not None, ( + 'Please specify the `--visual-encoder`!') + visual_encoder_path = args.visual_encoder + with LoadWoInit(): + visual_encoder = CLIPVisionModel.from_pretrained( + visual_encoder_path, torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype]) + image_processor = CLIPImageProcessor.from_pretrained( + visual_encoder_path) + master_print(f'Load visual_encoder from {visual_encoder_path}') + + # load adapter + if 'llm_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'llm_adapter') + + with LoadWoInit(): + llm = PeftModel.from_pretrained( + llm, adapter_path, offload_folder=args.offload_folder) + + master_print(f'Load LLM adapter from {args.llava}') + + if 'visual_encoder_adapter' in os.listdir(llava_path): + adapter_path = osp.join(llava_path, 'visual_encoder_adapter') + visual_encoder = PeftModel.from_pretrained( + visual_encoder, adapter_path, offload_folder=args.offload_folder) + master_print(f'Load visual_encoder adapter from {args.llava}') + + # build projector + projector_path = osp.join(llava_path, 'projector') + with LoadWoInit(): + projector = AutoModel.from_pretrained( + projector_path, torch_dtype=TORCH_DTYPE_MAP[args.torch_dtype]) + master_print(f'Load projector from {args.llava}') + + projector.cuda() + projector.eval() + + visual_encoder.cuda() + visual_encoder.eval() + + llm.eval() + + stop_words = args.stop_words + if args.prompt_template: + template = PROMPT_TEMPLATE[args.prompt_template] + stop_words += template.get('STOP_WORDS', []) + stop_criteria = get_stop_criteria( + tokenizer=tokenizer, stop_words=stop_words) + + gen_config = GenerationConfig( + max_new_tokens=args.max_new_tokens, + do_sample=False, + eos_token_id=tokenizer.eos_token_id, + pad_token_id=tokenizer.pad_token_id + if tokenizer.pad_token_id is not None else tokenizer.eos_token_id, + ) + + # work_dir + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + save_dir = args.work_dir + else: + # use config filename as default work_dir + save_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.data_path))[0]) + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime(time.time())) + save_dir = osp.join(save_dir, timestamp) + + if rank == 0: + mkdir_or_exist(osp.abspath(save_dir)) + print('=======================================================') + print(f'Dataset path: {osp.abspath(args.data_path)}\n' + f'Results will be saved to {osp.abspath(save_dir)}') + print('=======================================================') + + args_path = osp.join(save_dir, 'args.json') + with open(args_path, 'w', encoding='utf-8') as f: + json.dump(args.__dict__, f, indent=2) + + results_xlsx_path = osp.join(save_dir, 'mmbench_result.xlsx') + results_json_path = osp.join(save_dir, 'mmbench_result.json') + + dataset = MMBenchDataset(args.data_path) + + results = [] + n_samples = len(dataset) + per_rank_samples = math.ceil(n_samples / world_size) + + per_rank_ids = range(per_rank_samples * rank, + min(n_samples, per_rank_samples * (rank + 1))) + for i in tqdm.tqdm(per_rank_ids, desc=f'Rank {rank}'): + data_sample = dataset[i] + if data_sample['context'] is not None: + text = data_sample['context'] + '\n' + data_sample[ + 'question'] + '\n' + data_sample['options'] + else: + text = data_sample['question'] + '\n' + data_sample['options'] + + text = DEFAULT_IMAGE_TOKEN + '\n' + text + + if is_cn_string(text): + text = text + '请直接回答选项字母。' + else: + text = text + ("Answer with the option's letter from the " + 'given choices directly.') + + if args.prompt_template: + prompt_text = '' + template = PROMPT_TEMPLATE[args.prompt_template] + prompt_text += template['INSTRUCTION'].format( + input=text, round=1, bot_name=args.bot_name) + else: + prompt_text = text + inputs = prompt_text + + image = data_sample['img'].convert('RGB') + image = expand2square( + image, tuple(int(x * 255) for x in image_processor.image_mean)) + image = image_processor.preprocess( + image, return_tensors='pt')['pixel_values'][0] + image = image.cuda().unsqueeze(0).to(visual_encoder.dtype) + visual_outputs = visual_encoder(image, output_hidden_states=True) + pixel_values = projector( + visual_outputs.hidden_states[args.visual_select_layer][:, 1:]) + + chunk_encode = [] + for idx, chunk in enumerate(inputs.split(DEFAULT_IMAGE_TOKEN)): + if idx == 0: + cur_encode = tokenizer.encode(chunk) + else: + cur_encode = tokenizer.encode(chunk, add_special_tokens=False) + chunk_encode.append(cur_encode) + assert len(chunk_encode) == 2 + + # TODO: Auto-detect whether to prepend a bos_token_id at the beginning. + ids = [] + + for idx, cur_chunk_encode in enumerate(chunk_encode): + ids.extend(cur_chunk_encode) + if idx != len(chunk_encode) - 1: + ids.append(IMAGE_TOKEN_INDEX) + ids = torch.tensor(ids).cuda().unsqueeze(0) + mm_inputs = prepare_inputs_labels_for_multimodal( + llm=llm, input_ids=ids, pixel_values=pixel_values) + + generate_output = llm.generate( + **mm_inputs, + generation_config=gen_config, + streamer=None, + bos_token_id=tokenizer.bos_token_id, + stopping_criteria=stop_criteria) + + predict = tokenizer.decode( + generate_output[0], skip_special_tokens=True).strip() + cur_result = {} + cur_result['question'] = data_sample.get('question') + cur_result.update(data_sample.get('options_dict')) + cur_result['prediction'] = predict + if data_sample.get('category') is not None: + cur_result['category'] = data_sample.get('category') + if data_sample.get('l2-category') is not None: + cur_result['l2-category'] = data_sample.get('l2-category') + cur_result['index'] = data_sample.get('index') + cur_result['split'] = data_sample.get('split') + cur_result['answer'] = data_sample.get('answer') + results.append(cur_result) + + results = collect_results(results, n_samples) + + if get_rank() == 0: + + results_df = pd.DataFrame(results) + with pd.ExcelWriter(results_xlsx_path, engine='openpyxl') as writer: + results_df.to_excel(writer, index=False) + + if dataset.split == 'dev': + results_dict = dataset.eval_result(results_df, show=True) + with open(results_json_path, 'w', encoding='utf-8') as f: + json.dump(results_dict, f, indent=2) + else: + print('All done!') + + +if __name__ == '__main__': + + main() diff --git a/data/xtuner/xtuner/tools/model_converters/merge.py b/data/xtuner/xtuner/tools/model_converters/merge.py new file mode 100644 index 0000000000000000000000000000000000000000..c7202a6633aa4f42e4082c81048a0053fd9e64c6 --- /dev/null +++ b/data/xtuner/xtuner/tools/model_converters/merge.py @@ -0,0 +1,77 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import torch +from peft import PeftModel +from transformers import (AutoModelForCausalLM, AutoTokenizer, + CLIPImageProcessor, CLIPVisionModel) + +from xtuner.model.utils import LoadWoInit + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Merge a HuggingFace adapter to base model') + parser.add_argument('model_name_or_path', help='model name or path') + parser.add_argument('adapter_name_or_path', help='adapter name or path') + parser.add_argument( + 'save_dir', help='the directory to save the merged model') + parser.add_argument( + '--max-shard-size', + type=str, + default='2GB', + help='Only applicable for LLM. The maximum size for ' + 'each sharded checkpoint.') + parser.add_argument( + '--is-clip', + action='store_true', + help='Indicate if the model is a clip model') + parser.add_argument( + '--safe-serialization', + action='store_true', + help='Indicate if using `safe_serialization`') + parser.add_argument( + '--device', + default='cuda', + choices=('cuda', 'cpu', 'auto'), + help='Indicate the device') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + if args.is_clip: + with LoadWoInit(): + model = CLIPVisionModel.from_pretrained( + args.model_name_or_path, device_map=args.device) + processor = CLIPImageProcessor.from_pretrained(args.model_name_or_path) + else: + with LoadWoInit(): + model = AutoModelForCausalLM.from_pretrained( + args.model_name_or_path, + torch_dtype=torch.float16, + low_cpu_mem_usage=True, + device_map=args.device, + trust_remote_code=True) + processor = AutoTokenizer.from_pretrained( + args.model_name_or_path, trust_remote_code=True) + model_unmerged = PeftModel.from_pretrained( + model, + args.adapter_name_or_path, + device_map=args.device, + is_trainable=False, + trust_remote_code=True) + model_merged = model_unmerged.merge_and_unload() + print(f'Saving to {args.save_dir}...') + model_merged.save_pretrained( + args.save_dir, + safe_serialization=args.safe_serialization, + max_shard_size=args.max_shard_size) + processor.save_pretrained(args.save_dir) + print('All done!') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/__init__.py b/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/configuration_internlm2.py b/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/configuration_internlm2.py new file mode 100644 index 0000000000000000000000000000000000000000..12fdffe28ca875049873cfd010ac59ddf68af6c2 --- /dev/null +++ b/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/configuration_internlm2.py @@ -0,0 +1,154 @@ +# coding=utf-8 +# Copyright (c) The InternLM team and The HuggingFace Inc. team. All rights reserved. +# +# This code is based on transformers/src/transformers/models/llama/configuration_llama.py +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" InternLM2 model configuration""" + +from transformers.configuration_utils import PretrainedConfig +from transformers.utils import logging + +logger = logging.get_logger(__name__) + +INTERNLM2_PRETRAINED_CONFIG_ARCHIVE_MAP = {} + + +# Modified from transformers.model.llama.configuration_llama.LlamaConfig +class InternLM2Config(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`InternLM2Model`]. It is used to instantiate + an InternLM2 model according to the specified arguments, defining the model architecture. Instantiating a + configuration with the defaults will yield a similar configuration to that of the InternLM2-7B. + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + vocab_size (`int`, *optional*, defaults to 32000): + Vocabulary size of the InternLM2 model. Defines the number of different tokens that can be represented by the + `inputs_ids` passed when calling [`InternLM2Model`] + hidden_size (`int`, *optional*, defaults to 4096): + Dimension of the hidden representations. + intermediate_size (`int`, *optional*, defaults to 11008): + Dimension of the MLP representations. + num_hidden_layers (`int`, *optional*, defaults to 32): + Number of hidden layers in the Transformer encoder. + num_attention_heads (`int`, *optional*, defaults to 32): + Number of attention heads for each attention layer in the Transformer encoder. + num_key_value_heads (`int`, *optional*): + This is the number of key_value heads that should be used to implement Grouped Query Attention. If + `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if + `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When + converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed + by meanpooling all the original heads within that group. For more details checkout [this + paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to + `num_attention_heads`. + hidden_act (`str` or `function`, *optional*, defaults to `"silu"`): + The non-linear activation function (function or string) in the decoder. + max_position_embeddings (`int`, *optional*, defaults to 2048): + The maximum sequence length that this model might ever be used with. Typically set this to something large + just in case (e.g., 512 or 1024 or 2048). + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + rms_norm_eps (`float`, *optional*, defaults to 1e-12): + The epsilon used by the rms normalization layers. + use_cache (`bool`, *optional*, defaults to `True`): + Whether or not the model should return the last key/values attentions (not used by all models). Only + relevant if `config.is_decoder=True`. + tie_word_embeddings(`bool`, *optional*, defaults to `False`): + Whether to tie weight embeddings + Example: + + """ + model_type = "internlm2" + _auto_class = "AutoConfig" + + def __init__( # pylint: disable=W0102 + self, + vocab_size=103168, + hidden_size=4096, + intermediate_size=11008, + num_hidden_layers=32, + num_attention_heads=32, + num_key_value_heads=None, + hidden_act="silu", + max_position_embeddings=2048, + initializer_range=0.02, + rms_norm_eps=1e-6, + use_cache=True, + pad_token_id=0, + bos_token_id=1, + eos_token_id=2, + reward_token_id=92527, + tie_word_embeddings=False, + bias=True, + rope_theta=10000, + rope_scaling=None, + attn_implementation="eager", + **kwargs, + ): + self.vocab_size = vocab_size + self.max_position_embeddings = max_position_embeddings + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.bias = bias + + if num_key_value_heads is None: + num_key_value_heads = num_attention_heads + self.num_key_value_heads = num_key_value_heads + + self.hidden_act = hidden_act + self.initializer_range = initializer_range + self.rms_norm_eps = rms_norm_eps + self.use_cache = use_cache + self.rope_theta = rope_theta + self.rope_scaling = rope_scaling + self._rope_scaling_validation() + + self.attn_implementation = attn_implementation + if self.attn_implementation is None: + self.attn_implementation = "eager" + + self.reward_token_id = reward_token_id + super().__init__( + pad_token_id=pad_token_id, + bos_token_id=bos_token_id, + eos_token_id=eos_token_id, + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) + + def _rope_scaling_validation(self): + """ + Validate the `rope_scaling` configuration. + """ + if self.rope_scaling is None: + return + + if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2: + raise ValueError( + "`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, " + f"got {self.rope_scaling}" + ) + rope_scaling_type = self.rope_scaling.get("type", None) + rope_scaling_factor = self.rope_scaling.get("factor", None) + if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: + raise ValueError( + f"`rope_scaling`'s type field must be one of ['linear', 'dynamic'], got {rope_scaling_type}" + ) + if rope_scaling_factor is None or not isinstance(rope_scaling_factor, float) or rope_scaling_factor < 1.0: + raise ValueError(f"`rope_scaling`'s factor field must be a float >= 1, got {rope_scaling_factor}") diff --git a/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/modeling_internlm2.py b/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/modeling_internlm2.py new file mode 100644 index 0000000000000000000000000000000000000000..59cba84567a2c6871bdf45d12a0753a663ea87dc --- /dev/null +++ b/data/xtuner/xtuner/tools/model_converters/modeling_internlm2_reward/modeling_internlm2.py @@ -0,0 +1,1578 @@ +# Copyright (c) The InternLM team and The HuggingFace Inc. team. All rights reserved. +# +# This code is based on transformers/src/transformers/models/llama/modeling_llama.py +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch InternLM2 model.""" +import math +import queue +import threading +import warnings +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn.functional as F +import torch.utils.checkpoint +from einops import rearrange +from torch import nn +from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss +from transformers.activations import ACT2FN +from transformers.modeling_outputs import ( + BaseModelOutputWithPast, + CausalLMOutputWithPast, + SequenceClassifierOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import ( + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, + replace_return_docstrings, +) + +try: + from transformers.generation.streamers import BaseStreamer +except: # noqa # pylint: disable=bare-except + BaseStreamer = None + +from .configuration_internlm2 import InternLM2Config + +logger = logging.get_logger(__name__) + +_CONFIG_FOR_DOC = "InternLM2Config" + +flash_attn_func, flash_attn_varlen_func = None, None +pad_input, index_first_axis, unpad_input = None, None, None +def _import_flash_attn(): + global flash_attn_func, flash_attn_varlen_func + global pad_input, index_first_axis, unpad_input + try: + from flash_attn import flash_attn_func as _flash_attn_func, flash_attn_varlen_func as _flash_attn_varlen_func + from flash_attn.bert_padding import pad_input as _pad_input, index_first_axis as _index_first_axis, unpad_input as _unpad_input + flash_attn_func, flash_attn_varlen_func = _flash_attn_func, _flash_attn_varlen_func + pad_input, index_first_axis, unpad_input = _pad_input, _index_first_axis, _unpad_input + except ImportError: + raise ImportError("flash_attn is not installed.") + +# Copied from transformers.models.llama.modeling_llama._get_unpad_data +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad(torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +# Copied from transformers.models.bart.modeling_bart._make_causal_mask +def _make_causal_mask( + input_ids_shape: torch.Size, dtype: torch.dtype, device: torch.device, past_key_values_length: int = 0 +): + """ + Make causal mask used for bi-directional self-attention. + """ + bsz, tgt_len = input_ids_shape + mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min, device=device), device=device) + mask_cond = torch.arange(mask.size(-1), device=device) + mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) + mask = mask.to(dtype) + + if past_key_values_length > 0: + mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype, device=device), mask], dim=-1) + return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) + + +# Copied from transformers.models.bart.modeling_bart._expand_mask +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + """ + Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. + """ + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) + + inverted_mask = 1.0 - expanded_mask + + return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) + + +# Copied from transformers.models.llama.modeling_llama.LlamaRMSNorm with Llama->InternLM2 +class InternLM2RMSNorm(nn.Module): + def __init__(self, hidden_size, eps=1e-6): + """ + InternLM2RMSNorm is equivalent to T5LayerNorm + """ + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +# Copied from transformers.model.llama.modeling_llama.LlamaRotaryEmbedding with Llama->InternLM2 +class InternLM2RotaryEmbedding(nn.Module): + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / (self.base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, device=self.inv_freq.device, dtype=torch.get_default_dtype() + ) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.einsum("i,j->ij", t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=torch.float32) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.model.llama.modeling_llama.LlamaLinearScalingRotaryEmbedding with Llama->InternLM2 +class InternLM2LinearScalingRotaryEmbedding(InternLM2RotaryEmbedding): + """InternLM2RotaryEmbedding extended with linear scaling. Credits to the Reddit user /u/kaiokendev""" + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + t = t / self.scaling_factor + + freqs = torch.einsum("i,j->ij", t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Copied from transformers.model.llama.modeling_llama.LlamaDynamicNTKScalingRotaryEmbedding with Llama->InternLM2 +class InternLM2DynamicNTKScalingRotaryEmbedding(InternLM2RotaryEmbedding): + """InternLM2RotaryEmbedding extended with Dynamic NTK scaling. + Credits to the Reddit users /u/bloc97 and /u/emozilla. + """ + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + + if seq_len > self.max_position_embeddings: + base = self.base * ( + (self.scaling_factor * seq_len / self.max_position_embeddings) - (self.scaling_factor - 1) + ) ** (self.dim / (self.dim - 2)) + inv_freq = 1.0 / (base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.einsum("i,j->ij", t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Copied from transformers.model.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., : x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2 :] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.model.llama.modeling_llama.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors.""" + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +class InternLM2MLP(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size + self.intermediate_size = config.intermediate_size + self.w1 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.w2 = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, x): + down_proj = self.w2(self.act_fn(self.w1(x)) * self.w3(x)) + + return down_proj + + +# Copied from transformers.model.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +# Modified from transformers.model.llama.modeling_llama.LlamaAttention +class InternLM2Attention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config: InternLM2Config): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = self.hidden_size // self.num_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.is_causal = True + + if (self.head_dim * self.num_heads) != self.hidden_size: + raise ValueError( + f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}" + f" and `num_heads`: {self.num_heads})." + ) + + self.wqkv = nn.Linear( + self.hidden_size, + (self.num_heads + 2 * self.num_key_value_heads) * self.head_dim, + bias=config.bias, + ) + + self.wo = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=config.bias) + self._init_rope() + + def _init_rope(self): + if self.config.rope_scaling is None: + self.rotary_emb = InternLM2RotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.config.rope_theta, + ) + else: + scaling_type = self.config.rope_scaling["type"] + scaling_factor = self.config.rope_scaling["factor"] + if scaling_type == "dynamic": + self.rotary_emb = InternLM2DynamicNTKScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.config.rope_theta, + scaling_factor=scaling_factor, + ) + elif scaling_type == "linear": + self.rotary_emb = InternLM2LinearScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.config.rope_theta, + scaling_factor=scaling_factor, + ) + else: + raise ValueError("Currently we only support rotary embedding's type being 'dynamic' or 'linear'.") + return self.rotary_emb + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. " + "Please make sure use `attention_mask` instead.`" + ) + + bsz, q_len, _ = hidden_states.size() + + qkv_states = self.wqkv(hidden_states) + + qkv_states = rearrange( + qkv_states, + "b q (h gs d) -> b q h gs d", + gs=2 + self.num_key_value_groups, + d=self.head_dim, + ) + + query_states = qkv_states[..., : self.num_key_value_groups, :] + query_states = rearrange(query_states, "b q h gs d -> b q (h gs) d") + key_states = qkv_states[..., -2, :] + value_states = qkv_states[..., -1, :] + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.wo(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Modified from transformers.model.llama.modeling_llama.InternLM2FlashAttention2 +class InternLM2FlashAttention2(InternLM2Attention): + """ + InternLM2 flash attention module. This module inherits from `InternLM2Attention` as the weights of the module stays + untouched. The only required change would be on the forward pass where it needs to correctly call the public API of + flash attention and deal with padding tokens in case the input contains any of them. + """ + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + # InternLM2FlashAttention2 attention does not support output_attentions + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. " + "Please make sure use `attention_mask` instead.`" + ) + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop("padding_mask") + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + qkv_states = self.wqkv(hidden_states) + + qkv_states = rearrange( + qkv_states, + "b q (h gs d) -> b q h gs d", + gs=2 + self.num_key_value_groups, + d=self.head_dim, + ) + + query_states = qkv_states[..., : self.num_key_value_groups, :] + query_states = rearrange(query_states, "b q h gs d -> b q (h gs) d") + key_states = qkv_states[..., -2, :] + value_states = qkv_states[..., -1, :] + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + attn_output = self._flash_attention_forward( + query_states, key_states, value_states, attention_mask, q_len + ) + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size).contiguous() + attn_output = self.wo(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, query_states, key_states, value_states, attention_mask, query_length, dropout=0.0, softmax_scale=None + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`int`, *optional*): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + """ + # Contains at least one padding token in the sequence + causal = self.is_causal and query_length != 1 + if attention_mask is not None: + batch_size = query_states.shape[0] + query_states, key_states, value_states, indices_q, cu_seq_lens, max_seq_lens = self._unpad_input( + query_states, key_states, value_states, attention_mask, query_length + ) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, query_length) + else: + attn_output = flash_attn_func( + query_states, key_states, value_states, dropout, softmax_scale=softmax_scale, causal=causal + ) + + return attn_output + + def _unpad_input(self, query_layer, key_layer, value_layer, attention_mask, query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim), indices_k + ) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q.to(torch.int64), + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + +INTERNLM2_ATTENTION_CLASSES = { + "eager": InternLM2Attention, + "flash_attention_2": InternLM2FlashAttention2, +} + +# Modified from transformers.model.llama.modeling_llama.LlamaDecoderLayer +class InternLM2DecoderLayer(nn.Module): + def __init__(self, config: InternLM2Config): + super().__init__() + self.hidden_size = config.hidden_size + + self.attention = INTERNLM2_ATTENTION_CLASSES[config.attn_implementation](config=config) + + self.feed_forward = InternLM2MLP(config) + self.attention_norm = InternLM2RMSNorm(config.hidden_size, eps=config.rms_norm_eps) + self.ffn_norm = InternLM2RMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + **kwargs, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): + attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1, + query_sequence_length, key_sequence_length)` if default attention is used. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. " + "Please make sure use `attention_mask` instead.`" + ) + + residual = hidden_states + + hidden_states = self.attention_norm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.attention( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + **kwargs, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.ffn_norm(hidden_states) + hidden_states = self.feed_forward(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + return outputs + + +InternLM2_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`InternLM2Config`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +# Copied from transformers.models.llama.modeling_llama.LlamaPreTrainedModel with Llama->InternLM2 +@add_start_docstrings( + "The bare InternLM2 Model outputting raw hidden-states without any specific head on top.", + InternLM2_START_DOCSTRING, +) +class InternLM2PreTrainedModel(PreTrainedModel): + config_class = InternLM2Config + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["InternLM2DecoderLayer"] + _skip_keys_device_placement = "past_key_values" + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + +InternLM2_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + + [What are position IDs?](../glossary#position-ids) + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or + when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape + `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape + `(batch_size, num_heads, decoder_sequence_length, embed_size_per_head)`. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't + have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids` + of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +# Modified from transformers.model.llama.modeling_llama.LlamaModel +@add_start_docstrings( + "The bare InternLM2 Model outputting raw hidden-states without any specific head on top.", + InternLM2_START_DOCSTRING, +) +class InternLM2Model(InternLM2PreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`InternLM2DecoderLayer`] + + Args: + config: InternLM2Config + """ + + _auto_class = "AutoModel" + + def __init__(self, config: InternLM2Config): + super().__init__(config) + self.padding_idx = config.pad_token_id + self.vocab_size = config.vocab_size + self.config = config + + self.tok_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx) + + self.layers = nn.ModuleList([InternLM2DecoderLayer(config) for _ in range(config.num_hidden_layers)]) + self.norm = InternLM2RMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.tok_embeddings + + def set_input_embeddings(self, value): + self.tok_embeddings = value + + def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length): + # create causal mask + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + combined_attention_mask = None + if input_shape[-1] > 1: + combined_attention_mask = _make_causal_mask( + input_shape, + inputs_embeds.dtype, + device=inputs_embeds.device, + past_key_values_length=past_key_values_length, + ) + + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]).to( + inputs_embeds.device + ) + combined_attention_mask = ( + expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask + ) + + return combined_attention_mask + + @add_start_docstrings_to_model_forward(InternLM2_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if self.config.attn_implementation == "flash_attention_2": + _import_flash_attn() + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + batch_size, seq_length = input_ids.shape[:2] + elif inputs_embeds is not None: + batch_size, seq_length = inputs_embeds.shape[:2] + else: + raise ValueError("You have to specify either input_ids or inputs_embeds") + + seq_length_with_past = seq_length + past_key_values_length = 0 + if past_key_values is not None: + past_key_values_length = past_key_values[0][0].shape[2] + seq_length_with_past = seq_length_with_past + past_key_values_length + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device + ) + position_ids = position_ids.unsqueeze(0) + + if inputs_embeds is None: + inputs_embeds = self.tok_embeddings(input_ids) + + if self.config.attn_implementation == "flash_attention_2": + # 2d mask is passed through the layers + attention_mask = attention_mask if (attention_mask is not None and 0 in attention_mask) else None + else: + if attention_mask is None: + attention_mask = torch.ones( + (batch_size, seq_length_with_past), dtype=torch.bool, device=inputs_embeds.device + ) + attention_mask = self._prepare_decoder_attention_mask( + attention_mask, (batch_size, seq_length), inputs_embeds, past_key_values_length + ) + + # embed positions + hidden_states = inputs_embeds + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + for idx, decoder_layer in enumerate(self.layers): + if output_hidden_states: + all_hidden_states += (hidden_states,) + + past_key_value = past_key_values[idx] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + def create_custom_forward(module): + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs, output_attentions, None) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + position_ids, + None, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += (layer_outputs[2 if output_attentions else 1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +# Modified from transformers.model.llama.modeling_llama.LlamaForCausalLM +class InternLM2ForCausalLM(InternLM2PreTrainedModel): + _auto_class = "AutoModelForCausalLM" + + _tied_weights_keys = ["output.weight"] + + def __init__(self, config): + super().__init__(config) + self.model = InternLM2Model(config) + self.vocab_size = config.vocab_size + self.output = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.tok_embeddings + + def set_input_embeddings(self, value): + self.model.tok_embeddings = value + + def get_output_embeddings(self): + return self.output + + def set_output_embeddings(self, new_embeddings): + self.output = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + @add_start_docstrings_to_model_forward(InternLM2_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, CausalLMOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer, InternLM2ForCausalLM + + >>> model = InternLM2ForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS) + >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER) + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = outputs[0] + logits = self.output(hidden_states) + logits = logits.float() + + loss = None + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits,) + outputs[1:] + return (loss,) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation( + self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs + ): + if past_key_values is not None: + past_length = past_key_values[0][0].shape[2] + + # Some generation methods already pass only the last input ID + if input_ids.shape[1] > past_length: + remove_prefix_length = past_length + else: + # Default to old behavior: keep only final ID + remove_prefix_length = input_ids.shape[1] - 1 + + input_ids = input_ids[:, remove_prefix_length:] + + position_ids = kwargs.get("position_ids", None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1] :] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids} + + model_inputs.update( + { + "position_ids": position_ids, + "past_key_values": past_key_values, + "use_cache": kwargs.get("use_cache"), + "attention_mask": attention_mask, + } + ) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += ( + tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past), + ) + return reordered_past + + def build_inputs(self, tokenizer, query: str, history: List[Tuple[str, str]] = [], meta_instruction=""): + if tokenizer.add_bos_token: + prompt = "" + else: + prompt = tokenizer.bos_token + if meta_instruction: + prompt += f"""<|im_start|>system\n{meta_instruction}<|im_end|>\n""" + for record in history: + prompt += f"""<|im_start|>user\n{record[0]}<|im_end|>\n<|im_start|>assistant\n{record[1]}<|im_end|>\n""" + prompt += f"""<|im_start|>user\n{query}<|im_end|>\n<|im_start|>assistant\n""" + return tokenizer([prompt], return_tensors="pt") + + @torch.no_grad() + def chat( + self, + tokenizer, + query: str, + history: List[Tuple[str, str]] = [], + streamer: Optional[BaseStreamer] = None, + max_new_tokens: int = 1024, + do_sample: bool = True, + temperature: float = 0.8, + top_p: float = 0.8, + meta_instruction: str = "You are an AI assistant whose name is InternLM (书生·浦语).\n" + "- InternLM (书生·浦语) is a conversational language model that is developed by Shanghai AI Laboratory (上海人工智能实验室). It is designed to be helpful, honest, and harmless.\n" + "- InternLM (书生·浦语) can understand and communicate fluently in the language chosen by the user such as English and 中文.", + **kwargs, + ): + inputs = self.build_inputs(tokenizer, query, history, meta_instruction) + inputs = {k: v.to(self.device) for k, v in inputs.items() if torch.is_tensor(v)} + # also add end-of-assistant token in eos token id to avoid unnecessary generation + eos_token_id = [tokenizer.eos_token_id, tokenizer.convert_tokens_to_ids(["<|im_end|>"])[0]] + outputs = self.generate( + **inputs, + streamer=streamer, + max_new_tokens=max_new_tokens, + do_sample=do_sample, + temperature=temperature, + top_p=top_p, + eos_token_id=eos_token_id, + **kwargs, + ) + outputs = outputs[0].cpu().tolist()[len(inputs["input_ids"][0]) :] + response = tokenizer.decode(outputs, skip_special_tokens=True) + response = response.split("<|im_end|>")[0] + history = history + [(query, response)] + return response, history + + @torch.no_grad() + def stream_chat( + self, + tokenizer, + query: str, + history: List[Tuple[str, str]] = [], + max_new_tokens: int = 1024, + do_sample: bool = True, + temperature: float = 0.8, + top_p: float = 0.8, + **kwargs, + ): + """ + Return a generator in format: (response, history) + Eg. + ('你好,有什么可以帮助您的吗', [('你好', '你好,有什么可以帮助您的吗')]) + ('你好,有什么可以帮助您的吗?', [('你好', '你好,有什么可以帮助您的吗?')]) + """ + if BaseStreamer is None: + raise ModuleNotFoundError( + "The version of `transformers` is too low. Please make sure " + "that you have installed `transformers>=4.28.0`." + ) + + response_queue = queue.Queue(maxsize=20) + + class ChatStreamer(BaseStreamer): + def __init__(self, tokenizer) -> None: + super().__init__() + self.tokenizer = tokenizer + self.queue = response_queue + self.query = query + self.history = history + self.response = "" + self.cache = [] + self.received_inputs = False + self.queue.put((self.response, history + [(self.query, self.response)])) + + def put(self, value): + if len(value.shape) > 1 and value.shape[0] > 1: + raise ValueError("ChatStreamer only supports batch size 1") + elif len(value.shape) > 1: + value = value[0] + + if not self.received_inputs: + # The first received value is input_ids, ignore here + self.received_inputs = True + return + + self.cache.extend(value.tolist()) + token = self.tokenizer.decode(self.cache, skip_special_tokens=True) + if token.strip() != "<|im_end|>": + self.response = self.response + token + history = self.history + [(self.query, self.response)] + self.queue.put((self.response, history)) + self.cache = [] + else: + self.end() + + def end(self): + self.queue.put(None) + + def stream_producer(): + return self.chat( + tokenizer=tokenizer, + query=query, + streamer=ChatStreamer(tokenizer=tokenizer), + history=history, + max_new_tokens=max_new_tokens, + do_sample=do_sample, + temperature=temperature, + top_p=top_p, + **kwargs, + ) + + def consumer(): + producer = threading.Thread(target=stream_producer) + producer.start() + while True: + res = response_queue.get() + if res is None: + return + yield res + + return consumer() + +# Modified from transformers.model.llama.modeling_llama.LlamaForCausalLM +class InternLM2ForRewardModel(InternLM2PreTrainedModel): + + _auto_class = "AutoModel" + _tied_weights_keys = ["v_head.weight"] + + def __init__(self, config): + super().__init__(config) + self.model = InternLM2Model(config) + self.vocab_size = config.vocab_size + self.v_head = nn.Linear(config.hidden_size, 1, bias=False) + self.reward_token_id = config.reward_token_id + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.tok_embeddings + + def set_input_embeddings(self, value): + self.model.tok_embeddings = value + + def get_output_embeddings(self): + return self.v_head + + def set_output_embeddings(self, new_embeddings): + self.v_head = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + @add_start_docstrings_to_model_forward(InternLM2_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=SequenceClassifierOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, SequenceClassifierOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer, InternLM2ForCausalLM + + >>> model = InternLM2ForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS) + >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER) + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = outputs[0] + hidden_states = self.v_head(hidden_states) + # get end reward token's score + ends = attention_mask.cumsum(dim=1).argmax(dim=1).view(-1,1) + + reward_scores = torch.gather(hidden_states.squeeze(-1), 1, ends) + + loss = None + + if not return_dict: + output = (reward_scores,) + outputs[1:] + return (loss,) + output if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=reward_scores, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + @torch.no_grad() + def get_score( + self, + tokenizer, + conversation: List[dict], + **kwargs, + ): + conversation_str = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=False) + input_ids = tokenizer.encode(conversation_str, return_tensors="pt", add_special_tokens=False) + # add reward score token at the end of the input_ids + input_ids = torch.cat([input_ids, torch.tensor([[self.reward_token_id]], dtype=torch.long)], dim=1).to(self.device) + attention_mask = torch.ones_like(input_ids, dtype=torch.bool).to(self.device) + + outputs = self.forward(input_ids=input_ids, attention_mask=attention_mask, **kwargs) + score = outputs[0].cpu().item() + return score + + @torch.no_grad() + def get_scores( + self, + tokenizer, + conversations: List[List[dict]], + **kwargs, + ): + conversation_strs = [tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=False) for conversation in conversations] + batch_input_ids = [] + attention_masks = [] + + for conversation_str in conversation_strs: + input_ids = tokenizer.encode(conversation_str, return_tensors="pt", add_special_tokens=False) + input_ids = torch.cat([input_ids, torch.tensor([[self.reward_token_id]], dtype=torch.long)], dim=1).squeeze(0) + attention_mask = torch.ones(input_ids.shape, dtype=torch.bool) + batch_input_ids.append(input_ids) + attention_masks.append(attention_mask) + + r_pad_batch_input_ids = torch.nn.utils.rnn.pad_sequence(batch_input_ids, batch_first=True, padding_value=tokenizer.pad_token_id) + r_pad_attention_masks = torch.nn.utils.rnn.pad_sequence(attention_masks, batch_first=True, padding_value=False) + + outputs = self.forward(input_ids=r_pad_batch_input_ids.to(self.device), attention_mask=r_pad_attention_masks.to(self.device), **kwargs) + scores = outputs[0].cpu().tolist() + return scores + + @torch.no_grad() + def compare( + self, + tokenizer, + conversation1: List[dict], + conversation2: List[dict], + return_logits: bool = False, + **kwargs, + ): + score1 = self.get_score(tokenizer, conversation1, **kwargs) + score2 = self.get_score(tokenizer, conversation2, **kwargs) + if return_logits: + return score1, score2 + else: + return score1 > score2 + + @torch.no_grad() + def rank( + self, + tokenizer, + conversations: List[List[dict]], + return_logits: bool = False, + **kwargs, + ): + scores = self.get_scores(tokenizer, conversations, **kwargs) + if return_logits: + return scores + else: + return sorted(range(len(scores)), key=lambda i: scores[i], reverse=True) + + +# Copied from transformers.model.llama.modeling_llama.LlamaForSequenceClassification with Llama->InternLM2 +@add_start_docstrings( + """ + The InternLM2 Model transformer with a sequence classification head on top (linear layer). + + [`InternLM2ForSequenceClassification`] uses the last token in order to do the classification, + as other causal models (e.g. GPT-2) do. + + Since it does classification on the last token, it requires to know the position of the last token. If a + `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If + no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the + padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in + each row of the batch). + """, + InternLM2_START_DOCSTRING, +) +class InternLM2ForSequenceClassification(InternLM2PreTrainedModel): + def __init__(self, config): + super().__init__(config) + self.num_labels = config.num_labels + self.model = InternLM2Model(config) + self.score = nn.Linear(config.hidden_size, self.num_labels, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.tok_embeddings + + def set_input_embeddings(self, value): + self.model.tok_embeddings = value + + @add_start_docstrings_to_model_forward(InternLM2_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, SequenceClassifierOutputWithPast]: + r""" + labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): + Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., + config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If + `config.num_labels > 1` a classification loss is computed (Cross-Entropy). + """ + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.model( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + hidden_states = transformer_outputs[0] + logits = self.score(hidden_states) + + if input_ids is not None: + batch_size = input_ids.shape[0] + else: + batch_size = inputs_embeds.shape[0] + + if self.config.pad_token_id is None and batch_size != 1: + raise ValueError("Cannot handle batch sizes > 1 if no padding token is defined.") + if self.config.pad_token_id is None: + sequence_lengths = -1 + else: + if input_ids is not None: + sequence_lengths = (torch.eq(input_ids, self.config.pad_token_id).int().argmax(-1) - 1).to( + logits.device + ) + else: + sequence_lengths = -1 + + pooled_logits = logits[torch.arange(batch_size, device=logits.device), sequence_lengths] + + loss = None + if labels is not None: + labels = labels.to(logits.device) + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = "regression" + elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): + self.config.problem_type = "single_label_classification" + else: + self.config.problem_type = "multi_label_classification" + + if self.config.problem_type == "regression": + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(pooled_logits.squeeze(), labels.squeeze()) + else: + loss = loss_fct(pooled_logits, labels) + elif self.config.problem_type == "single_label_classification": + loss_fct = CrossEntropyLoss() + loss = loss_fct(pooled_logits.view(-1, self.num_labels), labels.view(-1)) + elif self.config.problem_type == "multi_label_classification": + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(pooled_logits, labels) + if not return_dict: + output = (pooled_logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=pooled_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) diff --git a/data/xtuner/xtuner/tools/model_converters/pth_to_hf.py b/data/xtuner/xtuner/tools/model_converters/pth_to_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..2a4b28883281960a1cbda7193c0144e5b41d2e74 --- /dev/null +++ b/data/xtuner/xtuner/tools/model_converters/pth_to_hf.py @@ -0,0 +1,142 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp +import shutil +import warnings + +from accelerate import init_empty_weights +from accelerate.utils import set_module_tensor_to_device +from mmengine import print_log +from mmengine.config import Config, DictAction +from mmengine.fileio import PetrelBackend, get_file_backend +from mmengine.utils import mkdir_or_exist +from tqdm import tqdm + +from xtuner.configs import cfgs_name_path +from xtuner.model.utils import guess_load_checkpoint +from xtuner.registry import BUILDER + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert the pth model to HuggingFace model') + parser.add_argument('config', help='config file name or path.') + parser.add_argument('pth_model', help='pth model file') + parser.add_argument( + 'save_dir', help='the directory to save HuggingFace model') + parser.add_argument( + '--fp32', + action='store_true', + help='Save LLM in fp32. If not set, fp16 will be used by default.') + parser.add_argument( + '--max-shard-size', + type=str, + default='2GB', + help='Only applicable for LLM. The maximum size for ' + 'each sharded checkpoint.') + parser.add_argument( + '--safe-serialization', + action='store_true', + help='Indicate if using `safe_serialization`') + parser.add_argument( + '--save-format', + default='xtuner', + choices=('xtuner', 'official', 'huggingface'), + help='Only applicable for LLaVAModel. Indicate the save format.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + # parse config + if not osp.isfile(args.config): + try: + args.config = cfgs_name_path[args.config] + except KeyError: + raise FileNotFoundError(f'Cannot find {args.config}') + + # load config + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + model_name = cfg.model.type if isinstance(cfg.model.type, + str) else cfg.model.type.__name__ + use_meta_init = True + + if 'LLaVAModel' in model_name: + cfg.model.pretrained_pth = None + if args.save_format != 'xtuner': + use_meta_init = False + if 'Reward' in model_name: + use_meta_init = False + cfg.model.llm.pop('quantization_config', None) + if hasattr(cfg.model.llm, 'quantization_config'): + # Can not build a qlora model on meta device + use_meta_init = False + + if use_meta_init: + try: + # Initializing the model with meta-tensor can reduce unwanted + # memory usage. + with init_empty_weights(): + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', message='.*non-meta.*', category=UserWarning) + model = BUILDER.build(cfg.model) + except NotImplementedError as e: + # Cannot initialize the model with meta tensor if the model is + # quantized. + if 'Cannot copy out of meta tensor' in str(e): + model = BUILDER.build(cfg.model) + else: + raise e + else: + model = BUILDER.build(cfg.model) + + backend = get_file_backend(args.pth_model) + if isinstance(backend, PetrelBackend): + from xtuner.utils.fileio import patch_fileio + with patch_fileio(): + state_dict = guess_load_checkpoint(args.pth_model) + else: + state_dict = guess_load_checkpoint(args.pth_model) + + for name, param in tqdm(state_dict.items(), desc='Load State Dict'): + set_module_tensor_to_device(model, name, 'cpu', param) + + model.llm.config.use_cache = True + + print_log(f'Load PTH model from {args.pth_model}', 'current') + + mkdir_or_exist(args.save_dir) + + save_pretrained_kwargs = { + 'max_shard_size': args.max_shard_size, + 'safe_serialization': args.safe_serialization + } + model.to_hf( + cfg=cfg, + save_dir=args.save_dir, + fp32=args.fp32, + save_pretrained_kwargs=save_pretrained_kwargs, + save_format=args.save_format) + + shutil.copyfile(args.config, osp.join(args.save_dir, 'xtuner_config.py')) + print_log('All done!', 'current') + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/model_converters/split.py b/data/xtuner/xtuner/tools/model_converters/split.py new file mode 100644 index 0000000000000000000000000000000000000000..da0e4d7b765a135ed8437c68befdb070da4a265a --- /dev/null +++ b/data/xtuner/xtuner/tools/model_converters/split.py @@ -0,0 +1,64 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import copy +import json +import os +import os.path as osp +import shutil + +import torch +from mmengine.utils import mkdir_or_exist + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Split a HuggingFace model to the smallest sharded one') + parser.add_argument('src_dir', help='the directory of the model') + parser.add_argument('dst_dir', help='the directory to save the new model') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + mkdir_or_exist(args.dst_dir) + + all_files = os.listdir(args.src_dir) + for name in all_files: + if not name.startswith(('pytorch_model', '.')): + src_path = osp.join(args.src_dir, name) + dst_path = osp.join(args.dst_dir, name) + shutil.copy(src_path, dst_path) + + with open(osp.join(args.src_dir, 'pytorch_model.bin.index.json')) as f: + index = json.load(f) + + n_shard = len(index['weight_map']) + new_index = copy.deepcopy(index) + new_index['weight_map'] = {} + cnt = 1 + + checkpoints = set(index['weight_map'].values()) + for ckpt in checkpoints: + state_dict = torch.load( + osp.join(args.src_dir, ckpt), map_location='cuda') + keys = sorted(list(state_dict.keys())) + for k in keys: + new_state_dict_name = 'pytorch_model-{:05d}-of-{:05d}.bin'.format( + cnt, n_shard) + new_index['weight_map'][k] = new_state_dict_name + new_state_dict = {k: state_dict[k]} + torch.save(new_state_dict, + osp.join(args.dst_dir, new_state_dict_name)) + cnt += 1 + del state_dict + torch.cuda.empty_cache() + with open(osp.join(args.dst_dir, 'pytorch_model.bin.index.json'), + 'w') as f: + json.dump(new_index, f) + assert new_index['weight_map'].keys() == index['weight_map'].keys( + ), 'Mismatch on `weight_map`!' + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/plugins/__init__.py b/data/xtuner/xtuner/tools/plugins/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b893bcac8976bed61f0526d57f22a118b6c6b848 --- /dev/null +++ b/data/xtuner/xtuner/tools/plugins/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .api import plugins_api + +__all__ = ['plugins_api'] diff --git a/data/xtuner/xtuner/tools/plugins/api.py b/data/xtuner/xtuner/tools/plugins/api.py new file mode 100644 index 0000000000000000000000000000000000000000..7ac6579d6152564e4c7e5d885e06b39b8a03c65f --- /dev/null +++ b/data/xtuner/xtuner/tools/plugins/api.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import re + + +def plugins_api(input_str, + calculate_open=True, + solve_open=True, + search_open=True): + + pattern = r'(Solve|solve|Solver|solver|Calculate|calculate|Calculator|calculator|Search)\("([^"]*)"\)' # noqa: E501 + + matches = re.findall(pattern, input_str) + + converted_str = '<|Results|>:\n' + + for i in range(len(matches)): + if matches[i][0] in [ + 'Calculate', 'calculate' + 'Calculator', 'calculator' + ]: + if calculate_open: + from .calculate import Calculate + result = Calculate(matches[i][1]) + else: + result = None + converted_str += f"Calculate(\"{matches[i][1]}\") => {result}\n" + elif matches[i][0] in ['Solve', 'solve', 'Solver', 'solver']: + if solve_open: + from .solve import Solve + result = Solve(matches[i][1]) + else: + result = None + converted_str += f"Solve(\"{matches[i][1]}\") =>\n{result}\n" + elif matches[i][0] == 'Search': + if search_open: + from .search import Search + result = Search(matches[i][1]) + else: + result = None + converted_str += f"Search(\"{matches[i][1]}\") =>\n{result}" + + converted_str += '\n' + return converted_str diff --git a/data/xtuner/xtuner/tools/plugins/calculate.py b/data/xtuner/xtuner/tools/plugins/calculate.py new file mode 100644 index 0000000000000000000000000000000000000000..48ed436cbeddd35de34fbb26d1f6f1e7d85fa810 --- /dev/null +++ b/data/xtuner/xtuner/tools/plugins/calculate.py @@ -0,0 +1,14 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from math import * # noqa: F401, F403 + + +def Calculate(expression): + res = '' + for exp in expression.split(';'): + try: + res += '{:.2f};'.format(eval(exp.replace('^', '**'))) + except Exception: + res += 'No result.' + if res[-1] == ';': + res = res[:-1] + return res diff --git a/data/xtuner/xtuner/tools/plugins/search.py b/data/xtuner/xtuner/tools/plugins/search.py new file mode 100644 index 0000000000000000000000000000000000000000..392bc86204fd43a7312bfd3ed13a30aef9fc4f42 --- /dev/null +++ b/data/xtuner/xtuner/tools/plugins/search.py @@ -0,0 +1,56 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import sys + +import requests + +try: + SERPER_API_KEY = os.environ['SERPER_API_KEY'] +except Exception: + print('Please obtain the `SERPER_API_KEY` from https://serper.dev and ' + 'set it using `export SERPER_API_KEY=xxx`.') + sys.exit(1) + + +def parse_results(results, k=10): + snippets = [] + + for result in results['organic'][:k]: + if 'snippet' in result: + snippets.append(result['snippet']) + for attribute, value in result.get('attributes', {}).items(): + snippets.append(f'{attribute}: {value}.') + return snippets + + +def search(api_key, search_term, **kwargs): + headers = { + 'X-API-KEY': api_key, + 'Content-Type': 'application/json', + } + params = { + 'q': search_term, + **{key: value + for key, value in kwargs.items() if value is not None}, + } + try: + response = requests.post( + 'https://google.serper.dev/search', + headers=headers, + params=params, + timeout=5) + except Exception as e: + return -1, str(e) + return response.status_code, response.json() + + +def Search(q, k=10): + status_code, response = search(SERPER_API_KEY, q) + if status_code != 200: + ret = 'None\n' + else: + text = parse_results(response, k=k) + ret = '' + for idx, res in enumerate(text): + ret += f"<|{idx+1}|>: '{res}'\n" + return ret diff --git a/data/xtuner/xtuner/tools/plugins/solve.py b/data/xtuner/xtuner/tools/plugins/solve.py new file mode 100644 index 0000000000000000000000000000000000000000..20266a23f492cc5e7264d1a46398d64c94267579 --- /dev/null +++ b/data/xtuner/xtuner/tools/plugins/solve.py @@ -0,0 +1,61 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +import re +from math import * # noqa: F401, F403 + +from sympy import Eq, solve, symbols + +from .calculate import Calculate + + +def Solve(equations_str): + try: + equations_str = equations_str.replace(' ', '') + equations_ori = re.split(r'[,;]+', equations_str) + equations_str = equations_str.replace('^', '**') + equations_str = re.sub(r'(\(.*\))([a-zA-Z])', r'\1 * \2', + equations_str) + equations_str = re.sub(r'(\d+)([a-zA-Z])', r'\1 * \2', equations_str) + equations_str = equations_str.replace('pi', str(math.pi)) + equations = re.split(r'[,;]+', equations_str) + vars_list = list(set(re.findall(r'[a-zA-Z]+', equations_str))) + vars = {var: symbols(var) for var in vars_list} + + output = '' + eqs = [] + for eq in equations: + if '=' in eq: + left, right = eq.split('=') + eqs.append( + Eq( + eval(left.strip(), {}, vars), + eval(right.strip(), {}, vars))) + solutions = solve(eqs, vars, dict=True) + + vars_values = {var: [] for var in vars_list} + if isinstance(solutions, list): + for idx, solution in enumerate(solutions): + for var, sol in solution.items(): + output += f'{var}_{idx} = {sol}\n' + vars_values[str(var)].append(sol) + else: + for var, sol in solutions.items(): + output += f'{var} = {sol}\n' + vars_values[str(var)].append(sol) + for eq, eq_o in zip(equations, equations_ori): + if '=' not in eq: + for var in vars_list: + need_note = True if len(vars_values[var]) > 1 else False + for idx, value in enumerate(vars_values[var]): + eq_to_calc = eq.replace(var, str(value)) + calc_result = Calculate(eq_to_calc) + if need_note: + eq_name = eq_o.replace(var, f'{var}_{idx}') + else: + eq_name = eq_o + if calc_result != 'No results.': + output += f'{eq_name} = {calc_result}\n' + + return output.strip() + except Exception: + return 'No result.' diff --git a/data/xtuner/xtuner/tools/process_untokenized_datasets.py b/data/xtuner/xtuner/tools/process_untokenized_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..c41905ee6daaebca1f9e546b5588c6d627baea39 --- /dev/null +++ b/data/xtuner/xtuner/tools/process_untokenized_datasets.py @@ -0,0 +1,75 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import warnings + +from mmengine import Config, ConfigDict +from mmengine.config.lazy import LazyObject + +from xtuner.registry import BUILDER + +# ignore FutureWarning in hf datasets +warnings.simplefilter(action='ignore', category=FutureWarning) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('config', help='config file name or path.') + parser.add_argument('--save-folder', help='The folder to save data order.') + args = parser.parse_args() + return args + + +def modify_config(config, dataset_save_folder): + dataset = ConfigDict( + type=LazyObject('datasets', 'load_from_disk'), + dataset_path=dataset_save_folder) + train_dataset = ConfigDict( + type=LazyObject('xtuner.dataset', 'process_hf_dataset'), + dataset=dataset, + do_dataset_tokenization=False, + tokenizer=None, + max_length=None, + dataset_map_fn=None, + template_map_fn=None, + max_dataset_length=None, + split=None, + remove_unused_columns=False, + rename_maps=[], + pack_to_max_length=False, + input_ids_with_output=False) + config.train_dataloader.dataset = train_dataset + return config + + +def process_untokenized_dataset(config): + dataset = BUILDER.build(config.train_dataloader.dataset) + return dataset + + +if __name__ == '__main__': + args = parse_args() + cfg = Config.fromfile(args.config) + + print('Start to process untokenized dataset...') + processed_dataset = process_untokenized_dataset(cfg) + print('Processing untokenized dataset finished.') + + processed_dataset_save_folder = args.save_folder + if not os.path.isabs(processed_dataset_save_folder): + processed_dataset_save_folder = os.path.join( + os.getcwd(), processed_dataset_save_folder) + modified_cfg = modify_config(cfg, processed_dataset_save_folder) + + print('Start to save processed dataset...') + processed_dataset.save_to_disk(processed_dataset_save_folder) + print( + f'Processed dataset has been saved to {processed_dataset_save_folder}') + + cfg_folder, cfg_file_name = os.path.split(args.config) + cfg_file_name = cfg_file_name.split('.')[0] + cfg_file_name = f'{cfg_file_name}_modified.py' + modified_cfg_save_path = os.path.join(cfg_folder, cfg_file_name) + modified_cfg.dump(modified_cfg_save_path) + print(f'Modified config has been saved to {modified_cfg_save_path}. ' + 'Please use this new config for the next training phase.') diff --git a/data/xtuner/xtuner/tools/process_untokenized_datasets_legacy.py b/data/xtuner/xtuner/tools/process_untokenized_datasets_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..8b4dd5a7de93e2966b2bb3d9c579a2e4669db034 --- /dev/null +++ b/data/xtuner/xtuner/tools/process_untokenized_datasets_legacy.py @@ -0,0 +1,184 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import ast +import multiprocessing +import os +import warnings +from functools import partial + +from datasets import Dataset, DatasetDict, load_dataset +from mmengine import ConfigDict +from transformers import AutoTokenizer + +from xtuner.dataset.huggingface import process +from xtuner.dataset.map_fns import (DATASET_FORMAT_MAPPING, + template_map_fn_factory) +from xtuner.utils import PROMPT_TEMPLATE + +# ignore FutureWarning in hf datasets +warnings.simplefilter(action='ignore', category=FutureWarning) +""" +ftdp dataset: +srun -p llm_razor --quotatype=auto --gres=gpu:1 --ntasks=1 \ + --ntasks-per-node=1 --cpus-per-task=5 --kill-on-bad-exit=1 \ + python xtuner/tools/process_untokenized_datasets.py \ + --data-folder /path/to/data/folder \ + --save-folder ./processed \ + --tokenizer-path pretrained_model_name_or_path \ + --prompt-template internlm2_chat \ + --dataset-format ftdp + +normal json dataset: +srun -p llm_razor --quotatype=auto --gres=gpu:1 --ntasks=1 \ + --ntasks-per-node=1 --cpus-per-task=5 --kill-on-bad-exit=1 \ + python xtuner/tools/process_untokenized_datasets.py \ + --data-folder /path/to/data/folder \ + --save-folder ./processed \ + --tokenizer-path pretrained_model_name_or_path \ + --prompt-template internlm2_chat +""" + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--data-folder', help='Data folder') + parser.add_argument('--save-folder', help='The folder to save data order.') + parser.add_argument( + '--tokenizer-path', help='The path to the hf tokenizer.') + parser.add_argument( + '--dataset-format', + choices=list(DATASET_FORMAT_MAPPING.keys()) + ['ftdp'], + default=None, + help='Which dataset format is this data. The available choices are ' + f"{list(DATASET_FORMAT_MAPPING.keys()) + ['ftdp']}. ") + parser.add_argument( + '--prompt-template', + choices=PROMPT_TEMPLATE.keys(), + help='Which prompt template need to be added to the dataset. ' + f'The available choices are {PROMPT_TEMPLATE.keys()}') + parser.add_argument( + '--max-length', default=32768, help='Max sequence length.') + parser.add_argument( + '--pack-to-max-length', + action='store_true', + help='Whether to pack the dataset to the `max_length `.') + parser.add_argument( + '--file-type', + default='.json', + help='We want to get the order of the file in this type.') + parser.add_argument( + '--data-order-path', + default=None, + help=('The path to a txt file which contains the a list of data path.' + ' It can be obtain by xtuner/tools/get_data_order.py script.')) + args = parser.parse_args() + return args + + +def process_one(fp, + tokenizer, + max_length, + pack_to_max_length, + dataset_map_fn=None, + template_map_fn=None, + is_ftdp=False): + dataset = [] + if is_ftdp: + with open(fp) as file: + lines = file.readlines() + for line in lines: + line = ast.literal_eval(line) + dataset.append({'messages': line}) + dataset = Dataset.from_list(dataset) + else: + # load formal json data + dataset = load_dataset('json', data_files=fp) + dataset = dataset['train'] + dataset = process( + dataset, + tokenizer=tokenizer, + max_length=max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=template_map_fn, + remove_unused_columns=True, + pack_to_max_length=pack_to_max_length, + map_num_proc=32) + return fp, dataset + + +def process_untokenized_dataset(folder, + tokenizer, + max_length, + pack_to_max_length, + dataset_map_fn, + prompt_template, + data_order_path=None, + file_type='.json', + is_ftdp=False): + assert os.path.exists(folder), f'{folder} does not exist.' + datasets_dict = {} + + if data_order_path is not None: + data_order = load_dataset( + 'text', data_files=data_order_path, split='train')['text'] + for i, fp in enumerate(data_order): + data_order[i] = os.path.join(folder, fp) + else: + triples = list(os.walk(folder, followlinks=True)) + data_order = [] + for root, dirs, files in triples: + dirs.sort() + for fn in sorted(files): + if fn.endswith(file_type): + fp = os.path.join(root, fn) + data_order.append(fp) + print('All file path: ', data_order) + + pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()) + template_map_fn = ConfigDict( + type=template_map_fn_factory, template=prompt_template) + process_single = partial( + process_one, + tokenizer=tokenizer, + max_length=max_length, + pack_to_max_length=pack_to_max_length, + dataset_map_fn=dataset_map_fn, + template_map_fn=template_map_fn, + is_ftdp=is_ftdp) + out = pool.map(process_single, data_order) + pool.close() + pool.join() + for idx, (key, dataset) in enumerate(out): + assert data_order[idx] == key + dataset = dataset.remove_columns('length') + datasets_dict[str(idx)] = dataset + datasets_dict = DatasetDict(datasets_dict) + return datasets_dict + + +if __name__ == '__main__': + args = parse_args() + tokenizer = ConfigDict( + type=AutoTokenizer.from_pretrained, + pretrained_model_name_or_path=args.tokenizer_path, + trust_remote_code=True, + padding_side='right') + + if args.dataset_format is None: + dataset_map_fn = None + elif args.dataset_format == 'ftdp': + dataset_map_fn = DATASET_FORMAT_MAPPING['openai'] + else: + dataset_map_fn = DATASET_FORMAT_MAPPING[args.dataset_format] + + datasets_dict = process_untokenized_dataset( + args.data_folder, + tokenizer, + args.max_length, + args.pack_to_max_length, + dataset_map_fn, + PROMPT_TEMPLATE[args.prompt_template], + data_order_path=args.data_order_path, + file_type=args.file_type, + is_ftdp=args.dataset_format == 'ftdp') + datasets_dict.save_to_disk(args.save_folder) diff --git a/data/xtuner/xtuner/tools/process_untokenized_llava_data.py b/data/xtuner/xtuner/tools/process_untokenized_llava_data.py new file mode 100644 index 0000000000000000000000000000000000000000..4d0c075855734835d3a72a2c98ee7be38b85bfac --- /dev/null +++ b/data/xtuner/xtuner/tools/process_untokenized_llava_data.py @@ -0,0 +1,33 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import warnings + +from mmengine import Config + +from xtuner.registry import BUILDER + +# ignore FutureWarning in hf datasets +warnings.simplefilter(action='ignore', category=FutureWarning) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('config', help='config file name or path.') + parser.add_argument('--save-folder', help='The folder to save data order.') + args = parser.parse_args() + return args + + +def build_llava_dataset(config): + dataset = BUILDER.build(config.train_dataloader.dataset) + return dataset + + +if __name__ == '__main__': + args = parse_args() + cfg = Config.fromfile(args.config) + + llava_dataset = build_llava_dataset(cfg) + text_data = llava_dataset.text_data + + text_data.save_to_disk(args.save_folder) diff --git a/data/xtuner/xtuner/tools/test.py b/data/xtuner/xtuner/tools/test.py new file mode 100644 index 0000000000000000000000000000000000000000..5eb3f6d9d3099a54f561d8a3910168b0fc0a4fab --- /dev/null +++ b/data/xtuner/xtuner/tools/test.py @@ -0,0 +1,107 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +from types import FunctionType + +from mmengine.config import Config, DictAction +from mmengine.registry import RUNNERS +from mmengine.runner import Runner + +from xtuner.configs import cfgs_name_path +from xtuner.model.utils import guess_load_checkpoint +from xtuner.registry import MAP_FUNC + + +def parse_args(): + parser = argparse.ArgumentParser(description='Test model') + parser.add_argument('config', help='config file name or path.') + parser.add_argument('--checkpoint', default=None, help='checkpoint file') + parser.add_argument( + '--work-dir', + help='the directory to save the file containing evaluation metrics') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', '--local-rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def register_function(cfg_dict): + if isinstance(cfg_dict, dict): + for key, value in dict.items(cfg_dict): + if isinstance(value, FunctionType): + value_str = str(value) + if value_str not in MAP_FUNC: + MAP_FUNC.register_module(module=value, name=value_str) + cfg_dict[key] = value_str + else: + register_function(value) + elif isinstance(cfg_dict, (list, tuple)): + for value in cfg_dict: + register_function(value) + + +def main(): + args = parse_args() + + # parse config + if not osp.isfile(args.config): + try: + args.config = cfgs_name_path[args.config] + except KeyError: + raise FileNotFoundError(f'Cannot find {args.config}') + + # load config + cfg = Config.fromfile(args.config) + cfg.launcher = args.launcher + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # register FunctionType object in cfg to `MAP_FUNC` Registry and + # change these FunctionType object to str + register_function(cfg._cfg_dict) + + # work_dir is determined in this priority: CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + + # build the runner from config + if 'runner_type' not in cfg: + # build the default runner + runner = Runner.from_cfg(cfg) + else: + # build customized runner from the registry + # if 'runner_type' is set in the cfg + runner = RUNNERS.build(cfg) + + state_dict = guess_load_checkpoint(args.checkpoint) + runner.model.load_state_dict(state_dict, strict=False) + runner.logger.info(f'Load checkpoint from {args.checkpoint}') + + # start testing + runner.test() + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/tokenize_ftdp_datasets.py b/data/xtuner/xtuner/tools/tokenize_ftdp_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..9327a91fef9f79c48d4c3e933e7f039e0a11f191 --- /dev/null +++ b/data/xtuner/xtuner/tools/tokenize_ftdp_datasets.py @@ -0,0 +1,433 @@ +import argparse +import json +import os +import os.path as osp +from functools import partial +from pathlib import Path +from typing import Dict, List + +import numpy as np +from mmengine import list_dir_or_file, track_progress_rich +from transformers import AutoTokenizer + +SEPCIAL_TOKENS = [ + '<|plugin|>', '<|interpreter|>', '<|action_end|>', '<|action_start|>', + '<|im_end|>', '<|im_start|>' +] + +CHATML_LLAMAV13_32K_TOKEN_CFG = dict( + role_cfg=dict( + system=dict( + begin=dict( + with_name='<|im_start|>system name={name}\n', + without_name='<|im_start|>system\n', + name={ + 'interpreter': '<|interpreter|>', + 'plugin': '<|plugin|>', + }), + end='<|im_end|>\n', + loss=dict( + meta=False, + icl=False, + current=False, + prefix=False, + )), + user=dict( + begin=dict( + with_name='<|im_start|>user name={name}\n', + without_name='<|im_start|>user\n', + ), + end='<|im_end|>\n', + loss=dict( + icl=False, + current=False, + prefix=False, + )), + assistant=dict( + begin=dict( + with_name='<|im_start|>assistant name={name}\n', + without_name='<|im_start|>assistant\n', + name={ + 'interpreter': '<|interpreter|>', + 'plugin': '<|plugin|>', + }), + end='<|im_end|>\n', + loss=dict( + icl=True, + current=True, + prefix=False, + end=True, + )), + environment=dict( + begin=dict( + with_name='<|im_start|>environment name={name}\n', + without_name='<|im_start|>environment\n', + name={ + 'interpreter': '<|interpreter|>', + 'plugin': '<|plugin|>', + }), + end='<|im_end|>\n', + loss=dict( + icl=False, + current=False, + prefix=False, + )), + tool=dict( + begin=dict( + with_name='<|action_start|>{name}\n', + name={ + 'interpreter': '<|interpreter|>', + 'plugin': '<|plugin|>', + }), + end='<|action_end|>\n', + belong='assistant', + ), + thought=dict( + begin=dict(without_name=''), + end='', + belong='assistant', + ), + ), + max_len=32 * 1024, +) + + +def chatml_format( + processed_data, + tokenizer, + role_cfg, + max_len=2048, + encode_json=True, +): + """ + ```python + dict( + role='', + content='', + name='', -> Begin 扩增 + type='', + ) + ``` + ```python + dict( + system=dict( + begin=dict( + with_name='system name={name}\n', + without_name='system\n', + name={ + 'interpreter': '', + 'plugin': '', + }), + end='\n', + loss=dict( + meta=False, + icl=False, + current=False, + prefix=False, + )), + user=dict( + begin=dict( + with_name='user name={name}\n', + without_name='user\n', + ), + end='\n', + loss=dict( + icl=False, + current=False, + prefix=False, + )), + assistant=dict( + begin=dict( + with_name='assistant name={name}\n', + without_name='assistant\n', + name={ + 'interpreter': '', + 'plugin': '', + }), + end='\n', + loss=dict( + icl=True, + current=True, + prefix=False, + end=True, + )), + environment=dict( + begin=dict( + with_name='environment name={name}\n', + without_name='environment\n', + name={ + 'interpreter': '', + 'plugin': '', + }), + end='\n', + loss=dict( + icl=False, + current=False, + prefix=False, + )), + tool=dict( + begin=dict( + with_name='{name}\n', + name={ + 'interpreter': '', + 'plugin': '', + }), + end='\n', + belong='assistant', + ), + thought=dict( + begin='', + end='', + belong='assistant', + ), + ``` + """ + + def format_begin(role_cfg, message): + name = message.get('name', None) + if name is not None: + begin = role_cfg['begin'].get('with_name', '') + if name in role_cfg['begin'].get('name', {}): + begin = begin.format(name=role_cfg['begin']['name'][name]) + else: + begin = begin.format(name=name) + else: + begin = role_cfg['begin'].get('without_name', '') + return begin + + def format_sub_role(messages: List[Dict], roles_cfg) -> List[Dict]: + new_message = list() + for message in messages: + if message['role'] in [ + 'assistant', 'user', 'system', 'environment' + ]: + new_message.append(message) + continue + role_cfg = roles_cfg[message['role']] + begin = format_begin(role_cfg, message) + new_content = begin + message['content'] + role_cfg['end'] + if role_cfg.get('fallback_role'): + new_message.append( + dict(role=role_cfg['fallback_role'], content=new_content)) + elif role_cfg.get('belong'): + if new_message[-1]['role'] != role_cfg.get('belong'): + new_message.append( + dict(role=role_cfg.get('belong'), content=new_content)) + else: + new_message[-1]['content'] += new_content + else: + new_message.append( + dict(role=message['role'], content=new_content)) + + return new_message + + token_ids = [] + _processed_data = format_sub_role(processed_data, role_cfg) + + for dialog_item in _processed_data: + role = dialog_item['role'] + content = dialog_item['content'] + # TODO: is strip necessary? or use lstrip? 避免开始有\n\n的情况 + # content = content.lstrip() + begin = format_begin(role_cfg[role], dialog_item) + end = role_cfg[role]['end'] + begin_token = tokenizer.encode(begin, add_special_tokens=False) + if not role_cfg[role]['loss'].get('beigin', False): + begin_token = [-token_id for token_id in begin_token] + end_token = tokenizer.encode( + role_cfg[role]['end'], add_special_tokens=False) + # breakpoint() + if not role_cfg[role]['loss'].get('end', False): + end_token = [-token_id for token_id in end_token] + + content_token = tokenizer.encode( + begin + content + end, add_special_tokens=False) + content_token = content_token[len(begin_token):-len(end_token)] + + if dialog_item.get('loss', True): + loss_cfg = role_cfg[role]['loss'] + else: + loss_cfg = dict(icl=False, current=False, meta=False) + if not loss_cfg[dialog_item.get('type', 'current')]: + content_token = [-token_id for token_id in content_token] + + if begin == '': + tokens = content_token + else: + tokens = begin_token + content_token + if end != '': + tokens = tokens + end_token + + token_ids += tokens + + token_ids = [tokenizer.bos_token_id] + token_ids + token_ids = token_ids[:max_len] + if encode_json: + line = str.encode(json.dumps({'tokens': token_ids}) + '\n') + return line, len(token_ids) + return token_ids, len(token_ids) + + +def write_bin_meta_bin(path, dataset_name, filename, samples): + train_path = osp.join(path, f'train/cn/{dataset_name}') + valid_path = osp.join(path, f'valid/cn/{dataset_name}') + train_dir = Path(train_path) + valid_dir = Path(valid_path) + train_dir.mkdir(exist_ok=True, parents=True) + valid_dir.mkdir(exist_ok=True, parents=True) + train_f = open(train_dir.joinpath(f'{filename}.bin'), 'wb') + valid_f_path = valid_dir.joinpath(f'{filename}.bin') + valid_f = open(valid_f_path, 'wb') + print(train_dir) + print(valid_dir) + train_tokens = 0 + valid_tokens = 0 + last_train_position = 0 + last_valid_position = 0 + train_samples = 0 + valid_samples = 0 + train_meta = [] + valid_meta = [] + for line, token_num in samples: + train_tokens += token_num + train_f.write(line) + train_meta.append((last_train_position, token_num)) + last_train_position += len(line) + train_samples += 1 + if (train_samples) % 100 == 0: # ? + valid_tokens += token_num + valid_f.write(line) + valid_meta.append((last_valid_position, token_num)) + last_valid_position += len(line) + valid_samples += 1 + train_f.close() + valid_f.close() + np.save(open(train_dir.joinpath(f'{filename}.bin.meta'), 'wb'), train_meta) + + # remove the length of `valid_samples` is less than 500 + # 500 is a magic number, you can change it to any number you want + # the number must bigger the DP. + if valid_samples > 500: + np.save( + open(valid_dir.joinpath(f'{filename}.bin.meta'), 'wb'), valid_meta) + else: + print(f'{valid_f_path} is removed because the number of', + f'`valid_samples`({valid_samples}) is less than 500') + os.remove(valid_f_path) + return train_tokens, valid_tokens, train_samples, valid_samples + + +def tokenize_and_save(tokenizer, processed_dir, tokenized_dir): + tokenized_save_dir = osp.join(tokenized_dir, 'chatml_llamav13_32k') + data_dir = processed_dir + all_train_tokens = 0 + all_valid_tokens = 0 + all_train_samples = 0 + all_valid_samples = 0 + + for filename in list_dir_or_file(data_dir, recursive=True, list_dir=False): + file_path = os.path.join(data_dir, filename) + if '/processed/' not in file_path: + continue + assert '.jsonl' in filename + + # dataset name such as char_x10_chat_format + dataset_name = filename.split(os.sep)[0] + + # Hardcode here to skip tokenizing the file if it already exists + # (Refactor the `write_bin_meta_bin`!). + train_f = osp.join(tokenized_save_dir, 'train', 'cn', dataset_name, + f'{osp.splitext(osp.basename(filename))[0]}.bin') + if osp.isfile(train_f): + print(f'{train_f} already exists, skip it') + continue + + tokenize_fun = partial( + chatml_format, + tokenizer=tokenizer, + **CHATML_LLAMAV13_32K_TOKEN_CFG) + samples = [] + with open(file_path) as f: + dataset = f.readlines() + task_num = len(dataset) + dataset = map(lambda x: (json.loads(x), ), dataset) + + for sample in track_progress_rich( + tokenize_fun, + dataset, + nproc=32, + task_num=task_num, + chunksize=32, + description=f'{os.path.basename(file_path)}...'): + samples.append(sample) + + train_tokens, valid_tokens, train_samples, valid_samples = write_bin_meta_bin( # noqa E501 + path=tokenized_save_dir, + dataset_name=dataset_name, + samples=samples, + filename=osp.splitext(osp.basename(filename))[0]) + if train_tokens is None: + print(f'{osp.splitext(osp.basename(filename))[0]} already ' + 'exists, skip it') + continue + + print(f'train_tokens {train_tokens}', flush=True) + print(f'train_samples {train_samples}') + print(f'valid tokens {valid_tokens}') + print(f'valid_samples {valid_samples}') + all_train_tokens += train_tokens + all_valid_tokens += valid_tokens + all_train_samples += train_samples + all_valid_samples += valid_samples + + print(f'all train tokens {all_train_tokens}') + print(f'all train samples {all_train_samples}') + print(f'all valid tokens {all_valid_tokens}') + print(f'all valid samples {all_valid_samples}') + + +def tokenizer_add_special_tokens(tokenizer): + print(f'Before adding special tokens, Vocabulary Size: {len(tokenizer)}') + for special_token in SEPCIAL_TOKENS: + if special_token not in tokenizer.get_vocab(): + tokenizer.add_tokens([special_token], special_tokens=True) + print(f'After adding special tokens, Vocabulary Size: {len(tokenizer)}') + + +def save_new_tokenizer(tokenizer, save_dir): + tokenizer.save_pretrained(save_dir) + print(f'save new tokenizer to {save_dir}') + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--processed-dir', help='The folder to save untokenized data.') + parser.add_argument( + '--tokenized-dir', help='The folder to save tokenized data.') + parser.add_argument( + '--tokenizer-path', help='The path to the hf tokenizer.') + parser.add_argument( + '--tokenizer-w-special-tokens-save-dir', + default=None, + help='We have to add special tokens to the vocabulary of ' + 'the given tokenizer, and save the new tokenizer to this folder.') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + tokenizer = AutoTokenizer.from_pretrained( + args.tokenizer_path, trust_remote_code=True, padding_side='right') + + ori_vocab_size = len(tokenizer) + tokenizer_add_special_tokens(tokenizer) + if len(tokenizer) != ori_vocab_size: + save_new_tokenizer(tokenizer, args.tokenizer_w_special_tokens_save_dir) + + tokenize_and_save(tokenizer, args.processed_dir, args.tokenized_dir) + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/train.py b/data/xtuner/xtuner/tools/train.py new file mode 100644 index 0000000000000000000000000000000000000000..29b5d539577e50c60ff2d88b3acf5c7160890e36 --- /dev/null +++ b/data/xtuner/xtuner/tools/train.py @@ -0,0 +1,364 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json +import logging +import os +import os.path as osp +from functools import partial +from types import FunctionType + +from mmengine.config import Config, DictAction +from mmengine.config.lazy import LazyObject +from mmengine.logging import print_log +from mmengine.registry import RUNNERS +from mmengine.runner import Runner +from mmengine.utils import digit_version +from peft import get_peft_model, prepare_model_for_kbit_training +from transformers import TrainingArguments + +from xtuner.configs import cfgs_name_path +from xtuner.dataset.collate_fns import default_collate_fn +from xtuner.model.modules import dispatch_modules +from xtuner.model.modules.dispatch import SUPPORT_FLASH2 +from xtuner.model.utils import LoadWoInit, find_all_linear_names, traverse_dict +from xtuner.registry import BUILDER, MAP_FUNC +from xtuner.tools.utils import (auto_dtype_of_deepspeed_config, + get_seed_from_checkpoint, set_model_resource) + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train LLM') + parser.add_argument('config', help='config file name or path.') + parser.add_argument('--work-dir', help='the dir to save logs and models') + parser.add_argument( + '--deepspeed', + type=str, + default=None, + help='the path to the .json file for deepspeed') + parser.add_argument( + '--resume', + type=str, + default=None, + help='specify checkpoint path to be resumed from.') + parser.add_argument( + '--seed', type=int, default=None, help='Random seed for the training') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', '--local-rank', type=int, default=0) + args = parser.parse_args() + return args + + +def register_function(cfg_dict): + if isinstance(cfg_dict, dict): + for key, value in dict.items(cfg_dict): + if isinstance(value, FunctionType): + value_str = str(value) + if value_str not in MAP_FUNC: + MAP_FUNC.register_module(module=value, name=value_str) + cfg_dict[key] = value_str + else: + register_function(value) + elif isinstance(cfg_dict, (list, tuple)): + for value in cfg_dict: + register_function(value) + + +def check_cfg(cfg, args): + if getattr(cfg, 'use_varlen_attn', + False) and cfg.train_dataloader.batch_size > 1: + raise NotImplementedError( + f'If utilizing varlen attention, the batch size should be' + f' set to 1, but got {cfg.train_dataloader.batch_size}') + + if getattr(cfg, 'use_varlen_attn', False): + sequence_parallel = getattr(cfg, 'sequence_parallel', 1) + max_length = getattr(cfg.train_dataloader.dataset, 'max_length', None) + if max_length is not None: + assert max_length % sequence_parallel == 0, \ + ('When using varlen attention, `max_length` should be evenly ' + 'divided by sequence parallel world size, but got ' + f'max_length = {max_length} and sequence_parallel = ' + f'{sequence_parallel}') + + if getattr(cfg, 'sequence_parallel_size', 1) > 1: + assert SUPPORT_FLASH2, ('`flash_attn` is required if you want to use ' + 'sequence parallel.') + attn_implementation = getattr(cfg.model.llm, 'attn_implementation', + None) + assert (attn_implementation is None or + attn_implementation == 'flash_attention_2'), \ + ('If you want to use sequence parallel, please set ' + 'attn_implementation to `flash_attention_2` or do not ' + f'set this attribute. Got `{attn_implementation}` .') + + if getattr(cfg, 'use_varlen_attn', False): + assert SUPPORT_FLASH2, ('`flash_attn` is required if you set ' + '`use_varlen_attn` to True.') + attn_implementation = getattr(cfg.model.llm, 'attn_implementation', + None) + assert (attn_implementation is None or + attn_implementation == 'flash_attention_2'), \ + ('If you want to set `use_varlen_attn` to True, please set' + ' attn_implementation to `flash_attention_2` or do not ' + f'set this attribute. Got `{attn_implementation}` .') + + if args.deepspeed is None: + assert getattr(cfg, 'sequence_parallel_size', 1) == 1, \ + ('Sequence parallel training without DeepSpeed lacks validation.' + 'Please use DeepSpeed to optimize the training phase by ' + '`--deepspeed deepspeed_zero1 (deepspeed_zero2 or ' + 'deepspeed_zero3)`.') + + + + + +def main(): + args = parse_args() + + # parse config + if not osp.isfile(args.config): + try: + args.config = cfgs_name_path[args.config] + except KeyError: + raise FileNotFoundError(f'Cannot find {args.config}') + + # load config + cfg = Config.fromfile(args.config) + set_model_resource(cfg) + + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # register FunctionType object in cfg to `MAP_FUNC` Registry and + # change these FunctionType object to str + register_function(cfg._cfg_dict) + + check_cfg(cfg, args) + + if cfg.get('framework', 'mmengine').lower() == 'huggingface': + # set default training_args + if cfg.get('training_args', None) is None: + cfg.training_args = dict(type=TrainingArguments) + if args.seed is not None: + cfg.training_args.seed = args.seed + # set work_dir + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.training_args.output_dir = args.work_dir + elif cfg.training_args.get('output_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.training_args.output_dir = osp.join( + './work_dirs', + osp.splitext(osp.basename(args.config))[0]) + # enable deepspeed + if args.deepspeed: + if not osp.isfile(args.deepspeed): + try: + args.deepspeed = cfgs_name_path[args.deepspeed] + except KeyError: + raise FileNotFoundError(f'Cannot find {args.deepspeed}') + cfg.training_args.deepspeed = args.deepspeed + if cfg.training_args.get('deepspeed'): + device_map = None + else: + # Data Parallel + device_map = { + '': int(os.environ.get('LOCAL_RANK', args.local_rank)) + } + # build training_args + training_args = BUILDER.build(cfg.training_args) + # build model + with LoadWoInit(): + cfg.model.device_map = device_map + traverse_dict(cfg.model) + model = BUILDER.build(cfg.model) + model.config.use_cache = False + dispatch_modules(model) + if cfg.get('lora', None): + lora = BUILDER.build(cfg.lora) + model = prepare_model_for_kbit_training(model) + if lora.target_modules is None: + modules = find_all_linear_names(model) + lora.target_modules = modules + model = get_peft_model(model, lora) + + # build dataset + train_dataset = BUILDER.build(cfg.train_dataset) + data_collator = partial(default_collate_fn, return_hf_format=True) + # build trainer + trainer = cfg.trainer( + model=model, + args=training_args, + train_dataset=train_dataset, + data_collator=data_collator) + # training + trainer.train(resume_from_checkpoint=args.resume) + trainer.save_state() + trainer.save_model(output_dir=training_args.output_dir) + else: + if args.seed is not None and args.resume is None: + # Use args.seed + cfg.merge_from_dict(dict(randomness=dict(seed=args.seed))) + print_log( + f'Set the random seed to {args.seed}.', + logger='current', + level=logging.INFO) + elif args.resume is not None: + # Use resumed seed + from mmengine.fileio import PetrelBackend, get_file_backend + + from xtuner.utils.fileio import patch_fileio + backend = get_file_backend(args.resume) + if isinstance(backend, PetrelBackend): + with patch_fileio(): + resumed_seed = get_seed_from_checkpoint(args.resume) + else: + resumed_seed = get_seed_from_checkpoint(args.resume) + cfg.merge_from_dict(dict(randomness=dict(seed=resumed_seed))) + if args.seed is not None and args.seed != resumed_seed: + print_log( + (f'The value of random seed in resume checkpoint ' + f'"{args.resume}" is different from the value in ' + f'arguments. The resumed seed is {resumed_seed}, while ' + f'the input argument seed is {args.seed}. Using the ' + f'resumed seed {resumed_seed}.'), + logger='current', + level=logging.WARNING) + else: + print_log( + f'Set the random seed to {resumed_seed}.', + logger='current', + level=logging.INFO) + + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + cfg.launcher = args.launcher + # work_dir is determined in this priority: + # CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + + if args.deepspeed: + try: + import deepspeed + except ImportError: + raise ImportError( + 'deepspeed is not installed properly, please check.') + if digit_version(deepspeed.__version__) < digit_version('0.12.3'): + raise RuntimeError('Please upgrade your DeepSpeed version ' + 'by using the command pip install ' + '`deepspeed>=0.12.3`') + optim_wrapper = cfg.optim_wrapper.type + if optim_wrapper == 'DeepSpeedOptimWrapper': + print_log( + 'Deepspeed training is already enabled in your config.', + logger='current', + level=logging.WARNING) + else: + if not osp.isfile(args.deepspeed): + try: + args.deepspeed = cfgs_name_path[args.deepspeed] + except KeyError: + raise FileNotFoundError( + f'Cannot find {args.deepspeed}') + with open(args.deepspeed) as f: + ds_cfg = json.load(f) + + ds_grad_accum = ds_cfg.get('gradient_accumulation_steps', + 'auto') + mm_grad_accum = cfg.optim_wrapper.get('accumulative_counts', 1) + if ds_grad_accum != 'auto' and ds_grad_accum != mm_grad_accum: + print_log(('Mismatch on gradient_accumulation_steps: ' + f'MMEngine {mm_grad_accum}, ' + f'Deepspeed {ds_grad_accum}. ' + f'Set to {mm_grad_accum}'), + logger='current', + level=logging.WARNING) + grad_accum = mm_grad_accum + + ds_train_bs = ds_cfg.get('train_micro_batch_size_per_gpu', + 'auto') + mm_train_bs = cfg.train_dataloader.batch_size + if ds_train_bs != 'auto' and ds_train_bs != mm_train_bs: + print_log( + ('Mismatch on train_micro_batch_size_per_gpu: ' + f'MMEngine {mm_train_bs}, Deepspeed {ds_train_bs}. ' + f'Set to {mm_train_bs}'), + logger='current', + level=logging.WARNING) + train_bs = cfg.train_dataloader.batch_size + + ds_grad_clip = ds_cfg.get('gradient_clipping', 'auto') + clip_grad = cfg.optim_wrapper.get('clip_grad', None) + if clip_grad and clip_grad.get('max_norm', None) is not None: + mm_max_norm = cfg.optim_wrapper.clip_grad.max_norm + else: + mm_max_norm = 1.0 + if ds_grad_clip != 'auto' and ds_grad_clip != mm_max_norm: + print_log( + ('Mismatch on gradient_clipping: ' + f'MMEngine {mm_max_norm}, Deepspeed {ds_grad_clip}. ' + f'Set to {mm_max_norm}'), + logger='current', + level=logging.WARNING) + grad_clip = mm_max_norm + ds_cfg = auto_dtype_of_deepspeed_config(ds_cfg) + exclude_frozen_parameters = True if digit_version( + deepspeed.__version__) >= digit_version('0.10.1') else None + strategy = dict( + type=LazyObject('xtuner.engine', 'DeepSpeedStrategy'), + config=ds_cfg, + gradient_accumulation_steps=grad_accum, + train_micro_batch_size_per_gpu=train_bs, + gradient_clipping=grad_clip, + exclude_frozen_parameters=exclude_frozen_parameters, + sequence_parallel_size=getattr(cfg, + 'sequence_parallel_size', + 1)) + cfg.__setitem__('strategy', strategy) + optim_wrapper = dict( + type='DeepSpeedOptimWrapper', + optimizer=cfg.optim_wrapper.optimizer) + cfg.__setitem__('optim_wrapper', optim_wrapper) + cfg.runner_type = 'FlexibleRunner' + + # resume is determined in this priority: resume from > auto_resume + if args.resume is not None: + cfg.resume = True + cfg.load_from = args.resume + + # build the runner from config + if 'runner_type' not in cfg: + # build the default runner + runner = Runner.from_cfg(cfg) + else: + # build customized runner from the registry + # if 'runner_type' is set in the cfg + runner = RUNNERS.build(cfg) + + # start training + runner.train() + + +if __name__ == '__main__': + main() diff --git a/data/xtuner/xtuner/tools/utils.py b/data/xtuner/xtuner/tools/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1f08cc6317a3ce3b1d72af1e405ec9a610696357 --- /dev/null +++ b/data/xtuner/xtuner/tools/utils.py @@ -0,0 +1,193 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +import re +import warnings + +import torch +from transformers import PreTrainedTokenizerFast, StoppingCriteriaList +from transformers.generation.streamers import BaseStreamer + +from xtuner.utils import StopWordStoppingCriteria + + +def get_base_model(model): + if hasattr(model, 'llm'): + model = model.llm + if 'PeftModel' in model.__class__.__name__: + model = model.base_model.model + return model + + +def get_streamer(model): + # TODO: deprecation, v0.3.0 + warnings.warn( + ('`get_streamer` is deprecated and will be removed in v0.3.0, ' + "use `transformers`'s `TextStreamer` instead."), DeprecationWarning) + if model.__class__.__name__ == 'InferenceEngine': + model = model.module + base_model = get_base_model(model) + base_model_name = base_model.__class__.__name__.lower() + is_internlm = 'internlm' in base_model_name + is_qwen = 'qwen' in base_model_name + is_baichuan = 'baichuan' in base_model_name + is_chatglm = 'chatglm' in base_model_name + no_space = is_internlm or is_qwen or is_baichuan or is_chatglm + if no_space: + return NoSpaceStreamer + else: + return DecodeOutputStreamer + +def set_model_resource(cfg): + if cfg.get("model_resource"): + fn = cfg["model_resource"].get("fn") + args = cfg["model_resource"].get("args", {}) + local_path = fn(cfg["pretrained_model_name_or_path"], **args) + s = [(cfg._cfg_dict, k, v) for k, v in cfg._cfg_dict.items()] + while s: + current_d, current_k, current_v = s.pop() + if current_k == "pretrained_model_name_or_path": + current_d[current_k] = local_path + + if isinstance(current_v, dict): + s.extend([(current_v, k, v) for k, v in current_v.items()]) + elif isinstance(current_v, list): + for i in current_v: + if isinstance(i, dict): + s.extend((i, k, v) for k, v in i.items()) + + +class DecodeOutputStreamer(BaseStreamer): + """Default streamer for HuggingFace models.""" + + def __init__(self, tokenizer, skip_prompt=True) -> None: + super().__init__() + # TODO: deprecation, v0.3.0 + warnings.warn( + '`DecodeOutputStreamer` is deprecated and will be ' + 'removed in v0.3.0.', DeprecationWarning) + self.tokenizer = tokenizer + self.skip_prompt = skip_prompt + self.gen_len = 0 + if isinstance(tokenizer, PreTrainedTokenizerFast): + self.decode = self._decode_with_raw_id + self.hex_regex = re.compile(r'^<0x([0-9ABCDEF]+)>$') + else: + self.decode = self._decode_fallback + + def _decode_with_raw_id(self, value): + """Convert token ids to tokens and decode.""" + + tok = self.tokenizer._convert_id_to_token(value) + if tok.startswith('▁'): # sentencepiece + space = ' ' + tok = tok[1:] + else: + space = '' + if res := self.hex_regex.match(tok): + tok = chr(int(res.group(1), 16)) + if tok == '': + tok = '\n' + return space + tok + + def _decode_fallback(self, value): + """Fallback decoder for non-fast tokenizer.""" + + tok = self.tokenizer.decode( + value, + skip_special_tokens=False, + clean_up_tokenization_spaces=False) + return tok + ' ' + + def put(self, value): + """Callback function to decode token and output to stdout.""" + + if self.gen_len == 0 and self.skip_prompt: + pass + else: + tok = self.decode(value[0]) + print(tok, end='', flush=True) + + self.gen_len += 1 + + def end(self): + """Callback function to finish generation.""" + + print('\n') + + +class NoSpaceStreamer(DecodeOutputStreamer): + + def __init__(self, tokenizer, skip_prompt=True) -> None: + BaseStreamer().__init__() + # TODO: deprecation, v0.3.0 + warnings.warn( + '`NoSpaceStreamer` is deprecated and will be ' + 'removed in v0.3.0.', DeprecationWarning) + self.tokenizer = tokenizer + self.skip_prompt = skip_prompt + self.gen_len = 0 + self.hex_regex = re.compile(r'^<0x([0-9ABCDEF]+)>$') + + def decode(self, value): + tok = self.tokenizer.decode(value) + if res := self.hex_regex.match(tok): + tok = chr(int(res.group(1), 16)) + if tok == '' or tok == '\r': + tok = '\n' + + return tok + + +def get_stop_criteria( + tokenizer, + stop_words=[], +): + stop_criteria = StoppingCriteriaList() + for word in stop_words: + stop_criteria.append(StopWordStoppingCriteria(tokenizer, word)) + return stop_criteria + + +def auto_dtype_of_deepspeed_config(ds_config): + if ds_config.get('fp16') and not ds_config.get('bf16'): + if ds_config.get('fp16').get('enabled') == 'auto': + ds_config['fp16']['enabled'] = torch.cuda.is_available() + elif not ds_config.get('fp16') and ds_config.get('bf16'): + if ds_config.get('bf16').get('enabled') == 'auto': + ds_config['bf16']['enabled'] = torch.cuda.is_bf16_supported() + elif ds_config.get('fp16') and ds_config.get('bf16'): + if ds_config.get('fp16').get('enabled') == 'auto': + ds_config['fp16']['enabled'] = torch.cuda.is_available() + if ds_config.get('bf16').get('enabled') == 'auto': + ds_config['bf16']['enabled'] = torch.cuda.is_bf16_supported() + if (ds_config['fp16']['enabled'] is True + and ds_config['bf16']['enabled'] is True): + ds_config['fp16']['enabled'] = False + ds_config['bf16']['enabled'] = True + return ds_config + + +def is_cn_string(s): + if re.search('[\u4e00-\u9fff]', s): + return True + return False + + +def get_seed_from_checkpoint(pth_model): + if osp.isfile(pth_model): + checkpoint = torch.load(pth_model, map_location='cpu') + elif osp.isdir(pth_model): + try: + from deepspeed.utils.zero_to_fp32 import get_model_state_files + except ImportError: + raise ImportError( + 'The provided PTH model appears to be a DeepSpeed checkpoint. ' + 'However, DeepSpeed library is not detected in current ' + 'environment. This suggests that DeepSpeed may not be ' + 'installed or is incorrectly configured. Please verify your ' + 'setup.') + filename = get_model_state_files(pth_model)[0] + checkpoint = torch.load(filename, map_location='cpu') + else: + raise FileNotFoundError(f'Cannot find {pth_model}') + return checkpoint['meta']['seed'] diff --git a/data/xtuner/xtuner/utils/__init__.py b/data/xtuner/xtuner/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6663b32253528a8d02b61e1dec07326116ba6130 --- /dev/null +++ b/data/xtuner/xtuner/utils/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .constants import (DEFAULT_IMAGE_TOKEN, DEFAULT_PAD_TOKEN_INDEX, + IGNORE_INDEX, IMAGE_TOKEN_INDEX) +from .handle_moe_load_and_save import (SUPPORT_MODELS, get_origin_state_dict, + load_state_dict_into_model) +from .stop_criteria import StopWordStoppingCriteria +from .templates import PROMPT_TEMPLATE, SYSTEM_TEMPLATE + +__all__ = [ + 'IGNORE_INDEX', 'DEFAULT_PAD_TOKEN_INDEX', 'PROMPT_TEMPLATE', + 'DEFAULT_IMAGE_TOKEN', 'SYSTEM_TEMPLATE', 'StopWordStoppingCriteria', + 'IMAGE_TOKEN_INDEX', 'load_state_dict_into_model', 'get_origin_state_dict', + 'SUPPORT_MODELS' +] diff --git a/data/xtuner/xtuner/utils/constants.py b/data/xtuner/xtuner/utils/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..2862c8ab50bb3f811795f5b8aea0d991505d6a41 --- /dev/null +++ b/data/xtuner/xtuner/utils/constants.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +IGNORE_INDEX = -100 +DEFAULT_PAD_TOKEN_INDEX = 0 +IMAGE_TOKEN_INDEX = -200 +DEFAULT_IMAGE_TOKEN = '' diff --git a/data/xtuner/xtuner/utils/fileio.py b/data/xtuner/xtuner/utils/fileio.py new file mode 100644 index 0000000000000000000000000000000000000000..922146e584313f35b5cdcd76b3908ed0e4f7ce11 --- /dev/null +++ b/data/xtuner/xtuner/utils/fileio.py @@ -0,0 +1,345 @@ +import io +from contextlib import contextmanager + +import mmengine.fileio as fileio +from mmengine.fileio import LocalBackend, PetrelBackend, get_file_backend + + +def patch_func(module, fn_name_to_wrap): + backup = getattr(patch_func, '_backup', []) + fn_to_wrap = getattr(module, fn_name_to_wrap) + + def wrap(fn_new): + setattr(module, fn_name_to_wrap, fn_new) + backup.append((module, fn_name_to_wrap, fn_to_wrap)) + setattr(fn_new, '_fallback', fn_to_wrap) + setattr(patch_func, '_backup', backup) + return fn_new + + return wrap + + +@contextmanager +def patch_fileio(global_vars=None): + if getattr(patch_fileio, '_patched', False): + # Only patch once, avoid error caused by patch nestly. + yield + return + import builtins + + @patch_func(builtins, 'open') + def open(file, mode='r', *args, **kwargs): + backend = get_file_backend(file) + if isinstance(backend, LocalBackend): + return open._fallback(file, mode, *args, **kwargs) + if 'b' in mode: + return io.BytesIO(backend.get(file, *args, **kwargs)) + else: + return io.StringIO(backend.get_text(file, *args, **kwargs)) + + if global_vars is not None and 'open' in global_vars: + bak_open = global_vars['open'] + global_vars['open'] = builtins.open + + import os + + @patch_func(os.path, 'join') + def join(a, *paths): + backend = get_file_backend( + a.decode('utf-8') if isinstance(a, bytes) else a) + if isinstance(backend, LocalBackend): + return join._fallback(a, *paths) + paths = [item.lstrip('./') for item in paths if len(item) > 0] + return backend.join_path(a, *paths) + + @patch_func(os.path, 'isdir') + def isdir(path): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return isdir._fallback(path) + + return backend.isdir(path) + + @patch_func(os.path, 'isfile') + def isfile(path): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return isfile._fallback(path) + + return backend.isfile(path) + + @patch_func(os.path, 'exists') + def exists(path): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return exists._fallback(path) + return backend.exists(path) + + @patch_func(os, 'mkdir') + def mkdir(path, *args, **kwargs): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return mkdir._fallback(path, *args, **kwargs) + + @patch_func(os, 'makedirs') + def makedirs(path, *args, **kwargs): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return makedirs._fallback(path, *args, **kwargs) + + @patch_func(os, 'listdir') + def listdir(path): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return listdir._fallback(path) + return backend.list_dir_or_file(path) + + @patch_func(os, 'chmod') + def chmod(path, *args, **kwargs): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return chmod._fallback(path, *args, **kwargs) + + @patch_func(os, 'stat') + def stat(path, *args, **kwargs): + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return stat._fallback(path, *args, **kwargs) + + import glob as glob_pkg + + @patch_func(glob_pkg, 'glob') + def glob(pathname, *, recursive=False): + backend = get_file_backend(pathname) + if isinstance(backend, LocalBackend): + return glob._fallback(pathname, recursive=recursive) + + if pathname.endswith('*_optim_states.pt'): + import os + pathname = os.path.split(pathname)[0] + files = backend.list_dir_or_file(pathname, recursive=recursive) + files = [ + os.path.join(pathname, f) for f in files + if f.endswith('_optim_states.pt') + ] + elif pathname.endswith('*_model_states.pt'): + import os + pathname = os.path.split(pathname)[0] + files = backend.list_dir_or_file(pathname, recursive=recursive) + files = [ + os.path.join(pathname, f) for f in files + if f.endswith('_model_states.pt') + ] + elif '*' in pathname: + raise NotImplementedError + else: + files = backend.list_dir_or_file(pathname, recursive=recursive) + + return files + + import filecmp + + @patch_func(filecmp, 'cmp') + def cmp(f1, f2, *args, **kwargs): + with fileio.get_local_path(f1) as f1, fileio.get_local_path(f2) as f2: + return cmp._fallback(f1, f2, *args, **kwargs) + + import shutil + + @patch_func(shutil, 'copy') + def copy(src, dst, **kwargs): + from pathlib import Path + + if isinstance(src, Path): + src = str(src).replace(':/', '://') + if isinstance(dst, Path): + dst = str(dst).replace(':/', '://') + + src_backend = get_file_backend(src) + dst_backend = get_file_backend(dst) + + if isinstance(src_backend, LocalBackend) and isinstance( + dst_backend, LocalBackend): + return copy._fallback(src, dst, **kwargs) + elif isinstance(src_backend, LocalBackend) and isinstance( + dst_backend, PetrelBackend): + return dst_backend.copyfile_from_local(str(src), str(dst)) + elif isinstance(src_backend, PetrelBackend) and isinstance( + dst_backend, LocalBackend): + return src_backend.copyfile_to_local(str(src), str(dst)) + + import torch + + @patch_func(torch, 'load') + def load(f, *args, **kwargs): + if isinstance(f, str): + f = io.BytesIO(fileio.get(f)) + return load._fallback(f, *args, **kwargs) + + @patch_func(torch, 'save') + def save(obj, f, *args, **kwargs): + backend = get_file_backend(f) + if isinstance(backend, LocalBackend): + return save._fallback(obj, f, *args, **kwargs) + + with io.BytesIO() as buffer: + save._fallback(obj, buffer, *args, **kwargs) + buffer.seek(0) + backend.put(buffer, f) + + # from tempfile import TemporaryDirectory + # import os + # with TemporaryDirectory(dir='/dev/shm') as tmpdir: + # suffix = os.path.split(f)[-1] + # tmppath = os.path.join._fallback(tmpdir, suffix) + # from mmengine import print_log + # print_log('write to tmp dir', logger='current') + # save._fallback(obj, tmppath, *args, **kwargs) + # print_log('write to ceph', logger='current') + + # with open(tmppath, 'rb') as buffer: + # backend.put(buffer, f) + + from sentencepiece import SentencePieceProcessor + + @patch_func(SentencePieceProcessor, 'LoadFromFile') + def LoadFromFile(cls, path): + if path: + backend = get_file_backend(path) + if isinstance(backend, LocalBackend): + return LoadFromFile._fallback(cls, path) + from tempfile import TemporaryDirectory + with TemporaryDirectory() as tmpdir: + local_path = backend.copyfile_to_local(path, tmpdir) + loaded_file = LoadFromFile._fallback(cls, local_path) + return loaded_file + else: + return LoadFromFile._fallback(cls, path) + + try: + setattr(patch_fileio, '_patched', True) + yield + finally: + for patched_fn in patch_func._backup: + (module, fn_name_to_wrap, fn_to_wrap) = patched_fn + setattr(module, fn_name_to_wrap, fn_to_wrap) + if global_vars is not None and 'open' in global_vars: + global_vars['open'] = bak_open + setattr(patch_fileio, '_patched', False) + + +def patch_hf_auto_from_pretrained(petrel_hub): + if hasattr(patch_hf_auto_from_pretrained, '_patched'): + return + + from peft import PeftModel + from transformers import (AutoConfig, AutoFeatureExtractor, + AutoImageProcessor, AutoModelForCausalLM, + AutoProcessor, AutoTokenizer, + ImageProcessingMixin, PreTrainedModel, + PreTrainedTokenizerBase, ProcessorMixin) + from transformers.models.auto.auto_factory import _BaseAutoModelClass + + target_cls = list(_BaseAutoModelClass.__subclasses__()) + target_cls.extend([AutoModelForCausalLM] + + AutoModelForCausalLM.__subclasses__()) + target_cls.extend([AutoConfig] + AutoConfig.__subclasses__()) + target_cls.extend([AutoTokenizer] + AutoTokenizer.__subclasses__()) + target_cls.extend([AutoImageProcessor] + + AutoImageProcessor.__subclasses__()) + target_cls.extend([AutoFeatureExtractor] + + AutoFeatureExtractor.__subclasses__()) + target_cls.extend([AutoProcessor] + AutoProcessor.__subclasses__()) + target_cls.extend([PreTrainedTokenizerBase] + + PreTrainedTokenizerBase.__subclasses__()) + target_cls.extend([ImageProcessingMixin] + + ImageProcessingMixin.__subclasses__()) + target_cls.extend([PreTrainedModel] + PreTrainedModel.__subclasses__()) + target_cls.extend([ProcessorMixin] + ProcessorMixin.__subclasses__()) + target_cls.extend([PeftModel] + PeftModel.__subclasses__()) + + import os + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): + with patch_fileio(): + model_path = pretrained_model_name_or_path + model_path = os.path.join(petrel_hub, model_path) + obj = cls._from_pretrained(model_path, *args, **kwargs) + return obj + + for cls in set(target_cls): + if not hasattr(cls, '_from_pretrained'): + cls._from_pretrained = cls.from_pretrained + cls.from_pretrained = from_pretrained + + patch_hf_auto_from_pretrained._patched = True + + +def patch_hf_save_pretrained(): + if hasattr(patch_hf_save_pretrained, '_patched'): + return + + import torch + from peft import PeftModel + from transformers import (AutoConfig, AutoTokenizer, PreTrainedModel, + PreTrainedTokenizerBase) + from transformers.models.auto.auto_factory import _BaseAutoModelClass + + target_cls = [] + target_cls.extend([AutoConfig] + AutoConfig.__subclasses__()) + target_cls.extend([AutoTokenizer] + AutoTokenizer.__subclasses__()) + target_cls.extend([PreTrainedTokenizerBase] + + PreTrainedTokenizerBase.__subclasses__()) + target_cls.extend([PreTrainedModel] + PreTrainedModel.__subclasses__()) + + target_cls.extend([_BaseAutoModelClass] + + _BaseAutoModelClass.__subclasses__()) + target_cls.extend([PeftModel] + PeftModel.__subclasses__()) + + def _patch_wrap(method): + + def wrapped_method(self, *args, **kwargs): + + with patch_fileio(): + kwargs['save_function'] = torch.save + kwargs['safe_serialization'] = False + + obj = method(self, *args, **kwargs) + return obj + + return wrapped_method + + for cls in set(target_cls): + if hasattr(cls, 'save_pretrained'): + cls.save_pretrained = _patch_wrap(cls.save_pretrained) + + patch_hf_save_pretrained._patched = True + + +def patch_deepspeed_engine(): + if hasattr(patch_deepspeed_engine, '_patched'): + return + + def _copy_recovery_script(self, save_path): + import os + from shutil import copyfile + + from deepspeed.utils import zero_to_fp32 + from mmengine import PetrelBackend, get_file_backend + script = 'zero_to_fp32.py' + + src = zero_to_fp32.__file__ + dst = os.path.join(save_path, script) + + backend = get_file_backend(save_path) + if isinstance(backend, PetrelBackend): + backend.copyfile_from_local(src, dst) + else: + copyfile(src, dst) + self._change_recovery_script_permissions(dst) + + from deepspeed.runtime.engine import DeepSpeedEngine + DeepSpeedEngine._copy_recovery_script = _copy_recovery_script + + patch_deepspeed_engine._patched = True diff --git a/data/xtuner/xtuner/utils/handle_moe_load_and_save.py b/data/xtuner/xtuner/utils/handle_moe_load_and_save.py new file mode 100644 index 0000000000000000000000000000000000000000..88a3936a84b8de7311e3a00d7e0661a2a3265736 --- /dev/null +++ b/data/xtuner/xtuner/utils/handle_moe_load_and_save.py @@ -0,0 +1,232 @@ +import json +import os +import re +from collections import OrderedDict + +import deepspeed +import torch +import torch.distributed as dist +import torch.nn as nn +from mmengine import print_log +from transformers.integrations import is_deepspeed_zero3_enabled +from transformers.modeling_utils import load_state_dict +from transformers.utils import (SAFE_WEIGHTS_INDEX_NAME, WEIGHTS_INDEX_NAME, + is_safetensors_available) + +SUPPORT_MODELS = ( + 'DeepseekV2ForCausalLM', + 'MixtralForCausalLM', +) + +ORDER_MAPPING = dict( + DeepseekV2ForCausalLM=dict(down_proj=0, gate_proj=1, up_proj=2), + MixtralForCausalLM=dict(down_proj=1, gate_proj=0, up_proj=2), +) + +PARAM_NAME_MAPPING = dict( + DeepseekV2ForCausalLM=dict( + gate_proj='gate_proj', up_proj='up_proj', down_proj='down_proj'), + MixtralForCausalLM=dict(gate_proj='w1', up_proj='w3', down_proj='w2'), +) + + +def print_on_rank0(info): + if dist.get_rank() == 0: + print_log(info, 'current') + + +def get_expert_num_per_shard(model): + for module in model.modules(): + if hasattr(module, 'expert_in_one_shard'): + return module.expert_in_one_shard + + +def mix_sort(expert_name): + components = re.findall(r'(\D+|\d+)', expert_name) + out = [int(comp) if comp.isdigit() else comp for comp in components] + return tuple(out) + + +def _get_merged_param_name(origin_param_name, expert_num_per_shard): + split_name = origin_param_name.split('.experts.') + expert_idx = re.findall(r'\d+', split_name[1])[0] + expert_idx = int(expert_idx) + assert expert_idx % expert_num_per_shard == 0 + shard_idx = expert_idx // expert_num_per_shard + w1w3 = split_name[0] + f'.experts.{shard_idx}.w1w3' + w2 = split_name[0] + f'.experts.{shard_idx}.w2' + return w1w3, w2 + + +def _merge_experts_weight(state_dict, expert_num_per_shard, order_mapping): + experts_name = [key for key in state_dict.keys() if '.experts.' in key] + experts_name = sorted(experts_name, key=mix_sort) + linear_num_per_expert = 3 + linear_num_per_shard = expert_num_per_shard * linear_num_per_expert + expert_shard_num = len(experts_name) // linear_num_per_shard + for shard_idx in range(expert_shard_num): + begin, end = shard_idx * linear_num_per_shard, ( + shard_idx + 1) * linear_num_per_shard + experts_name_cur = experts_name[begin:end] + + down_proj_weight = [ + state_dict.pop(key) + for key in experts_name_cur[order_mapping['down_proj']::3] + ] + gate_proj_weight = [ + state_dict.pop(key) + for key in experts_name_cur[order_mapping['gate_proj']::3] + ] + up_proj_weight = [ + state_dict.pop(key) + for key in experts_name_cur[order_mapping['up_proj']::3] + ] + w1 = torch.stack(gate_proj_weight) + w3 = torch.stack(up_proj_weight) + w1w3 = torch.cat([w1, w3], dim=1) + assert w1w3.ndim == 3, w1w3.shape + w2 = torch.stack(down_proj_weight) + assert w2.ndim == 3, w2.shape + merged_key_w1w3, merged_key_w2 = _get_merged_param_name( + experts_name_cur[0], expert_num_per_shard) + print_on_rank0(f'merged key {merged_key_w1w3}') + state_dict[merged_key_w1w3] = w1w3 + print_on_rank0(f'merged key {merged_key_w2}') + state_dict[merged_key_w2] = w2 + + return + + +def load_state_dict_into_model(model_to_load, pretrained_model_path): + + model_name = type(model_to_load).__name__ + if model_name not in SUPPORT_MODELS: + raise RuntimeError( + f'Only models in {SUPPORT_MODELS} may need to load pretrained ' + f'weights via `load_state_dict_into_model`, but got {model_name}.') + order_mapping = ORDER_MAPPING[model_name] + + index_file = os.path.join(pretrained_model_path, WEIGHTS_INDEX_NAME) + safe_index_file = os.path.join(pretrained_model_path, + SAFE_WEIGHTS_INDEX_NAME) + index_present = os.path.isfile(index_file) + safe_index_present = os.path.isfile(safe_index_file) + assert index_present or (safe_index_present and is_safetensors_available()) + if safe_index_present and is_safetensors_available(): + load_index = safe_index_file + else: + load_index = index_file + with open(load_index, encoding='utf-8') as f: + index = json.load(f) + weight_map = index['weight_map'] + unloaded_shard_files = list(set(weight_map.values())) + unloaded_shard_files.sort(reverse=True) + + expert_num_per_shard = get_expert_num_per_shard(model_to_load) + error_msgs = [] + + def load(module: nn.Module, state_dict, unloaded_shard_files, prefix=''): + params_to_gather = [] + param_names = [] + for name, param in module.named_parameters( + prefix=prefix[:-1], recurse=False): + while name not in state_dict: + assert len(unloaded_shard_files) > 0 + shard_file = unloaded_shard_files.pop() + shard_file = os.path.join(pretrained_model_path, shard_file) + print_on_rank0( + f'{name} not in state_dict, loading {shard_file}') + new_shard = load_state_dict(shard_file, is_quantized=False) + state_dict.update(new_shard) + _merge_experts_weight(state_dict, expert_num_per_shard, + order_mapping) + params_to_gather.append(param) + param_names.append(name) + if len(params_to_gather) > 0: + args = (state_dict, prefix, {}, True, [], [], error_msgs) + if is_deepspeed_zero3_enabled(): + with deepspeed.zero.GatheredParameters( + params_to_gather, modifier_rank=0): + if dist.get_rank() == 0: + module._load_from_state_dict(*args) + else: + module._load_from_state_dict(*args) + + for name in param_names: + print_on_rank0(f'state_dict pop {name}') + state_dict.pop(name) + + for name, child in module._modules.items(): + if child is not None: + load(child, state_dict, unloaded_shard_files, + prefix + name + '.') + + state_dict = OrderedDict() + load(model_to_load, state_dict, unloaded_shard_files, prefix='') + print_on_rank0(f'{state_dict.keys()}') + del state_dict + + return error_msgs + + +def _get_origin_param_name(merged_param_name, expert_num_per_shard, is_w1w3, + param_name_mapping): + split_name = merged_param_name.split('.experts.') + shard_idx = re.findall(r'\d+', split_name[1])[0] + shard_idx = int(shard_idx) + origin_param_names = [None] * (expert_num_per_shard * (1 + int(is_w1w3))) + expert_idx_begin = expert_num_per_shard * shard_idx + for i in range(expert_num_per_shard): + if is_w1w3: + gate_proj, up_proj = param_name_mapping[ + 'gate_proj'], param_name_mapping['up_proj'] + gate = split_name[ + 0] + f'.experts.{expert_idx_begin + i}.{gate_proj}.weight' + up = split_name[ + 0] + f'.experts.{expert_idx_begin + i}.{up_proj}.weight' + origin_param_names[i * 2] = gate + origin_param_names[i * 2 + 1] = up + else: + down_proj = param_name_mapping['down_proj'] + down = split_name[ + 0] + f'.experts.{expert_idx_begin + i}.{down_proj}.weight' + origin_param_names[i] = down + return origin_param_names + + +def _split_param(merged_param, is_w1w3): + if is_w1w3: + expert_num, _, hidden_dim = merged_param.shape + merged_param = merged_param.view(expert_num * 2, -1, hidden_dim) + return torch.unbind(merged_param, dim=0) + else: + # (e, hidden_dim, ffn_dim) + return torch.unbind(merged_param, dim=0) + + +def get_origin_state_dict(state_dict, model): + + model_name = type(model).__name__ + if model_name not in SUPPORT_MODELS: + raise RuntimeError( + f'Only models in {SUPPORT_MODELS} may need to convert state_dict ' + f'via `get_origin_state_dict` interface, but got {model_name}.') + param_name_mapping = PARAM_NAME_MAPPING[model_name] + + expert_num_per_shard = get_expert_num_per_shard(model) + experts_param_name = [ + name for name in state_dict.keys() if '.experts.' in name + ] + for expert_param_name in experts_param_name: + print_on_rank0(f'processing {expert_param_name} ...') + is_w1w3 = expert_param_name.split('.')[-1] == 'w1w3' + origin_param_names = _get_origin_param_name(expert_param_name, + expert_num_per_shard, + is_w1w3, + param_name_mapping) + merged_param = state_dict.pop(expert_param_name) + origin_params = _split_param(merged_param, is_w1w3) + assert len(origin_param_names) == len(origin_params) + for name, param in zip(origin_param_names, origin_params): + state_dict[name] = param + return state_dict diff --git a/data/xtuner/xtuner/utils/stop_criteria.py b/data/xtuner/xtuner/utils/stop_criteria.py new file mode 100644 index 0000000000000000000000000000000000000000..954cc9d700af18f4951eab4fa881cc34d900f365 --- /dev/null +++ b/data/xtuner/xtuner/utils/stop_criteria.py @@ -0,0 +1,16 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from transformers import StoppingCriteria + + +class StopWordStoppingCriteria(StoppingCriteria): + """StopWord stopping criteria.""" + + def __init__(self, tokenizer, stop_word): + self.tokenizer = tokenizer + self.stop_word = stop_word + self.length = len(self.stop_word) + + def __call__(self, input_ids, *args, **kwargs) -> bool: + cur_text = self.tokenizer.decode(input_ids[0]) + cur_text = cur_text.replace('\r', '').replace('\n', '') + return cur_text[-self.length:] == self.stop_word diff --git a/data/xtuner/xtuner/utils/templates.py b/data/xtuner/xtuner/utils/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..0e5732a3ed3f7ebc08b6940c3f39c850d2f8c61f --- /dev/null +++ b/data/xtuner/xtuner/utils/templates.py @@ -0,0 +1,201 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.config import ConfigDict + +# - Turn 0: SYSTEM + INSTRUCTION, [output + SUFFIX], SEP +# - Turn 1: INSTRUCTION, [output + SUFFIX], SEP +# - Turn ... +# Note: [] means having supervised loss during the fine-tuning +PROMPT_TEMPLATE = ConfigDict( + default=dict( + SYSTEM='<|System|>:{system}\n', + INSTRUCTION='<|User|>:{input}\n<|Bot|>:', + SEP='\n'), + zephyr=dict( + SYSTEM='<|system|>\n{system}\n', + INSTRUCTION='<|user|>\n{input}\n<|assistant|>\n', + SEP='\n'), + internlm_chat=dict( + SYSTEM='<|System|>:{system}\n', + INSTRUCTION='<|User|>:{input}\n<|Bot|>:', + SUFFIX='', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['']), + internlm2_chat=dict( + SYSTEM='<|im_start|>system\n{system}<|im_end|>\n', + INSTRUCTION=('<|im_start|>user\n{input}<|im_end|>\n' + '<|im_start|>assistant\n'), + SUFFIX='<|im_end|>', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['<|im_end|>']), + moss_sft=dict( + SYSTEM='{system}\n', + INSTRUCTION='<|Human|>: {input}\n', + SEP='\n', + STOP_WORDS=['', '']), + llama2_chat=dict( + SYSTEM=( + '[INST] <>\n You are a helpful, respectful and honest ' + 'assistant. Always answer as helpfully as possible, while being ' + 'safe. Your answers should not include any harmful, unethical, ' + 'racist, sexist, toxic, dangerous, or illegal content. Please ' + 'ensure that your responses are socially unbiased and positive in ' + 'nature.\n{system}\n<>\n [/INST] '), + INSTRUCTION='[INST] {input} [/INST]', + SEP='\n'), + code_llama_chat=dict( + SYSTEM='{system}\n', INSTRUCTION='[INST] {input} [/INST]'), + chatglm2=dict( + SYSTEM='{system}\n', + INSTRUCTION='[Round {round}]\n\n问:{input}\n\n答:', + SEP='\n\n'), + chatglm3=dict( + SYSTEM='<|system|>\n{system}', + INSTRUCTION='<|user|>\n{input}<|assistant|>\n', + SEP='\n'), + qwen_chat=dict( + SYSTEM=('<|im_start|>system\n{system}<|im_end|>\n'), + INSTRUCTION=('<|im_start|>user\n{input}<|im_end|>\n' + '<|im_start|>assistant\n'), + SUFFIX='<|im_end|>', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['<|im_end|>', '<|endoftext|>']), + baichuan_chat=dict( + SYSTEM='{system}\n', + INSTRUCTION='{input}', + SEP='\n'), + baichuan2_chat=dict( + SYSTEM='{system}\n', + INSTRUCTION='{input}', + SEP='\n'), + wizardlm=dict( + SYSTEM=('A chat between a curious user and an artificial ' + 'intelligence assistant. The assistant gives ' + 'helpful, detailed, and polite answers to the ' + 'user\'s questions. {system}\n '), + INSTRUCTION=('USER: {input} ASSISTANT:'), + SEP='\n'), + wizardcoder=dict( + SYSTEM=( + 'Below is an instruction that describes a task. ' + 'Write a response that appropriately completes the request.\n\n' + '{system}\n '), + INSTRUCTION=('### Instruction:\n{input}\n\n### Response:'), + SEP='\n\n'), + vicuna=dict( + SYSTEM=('A chat between a curious user and an artificial ' + 'intelligence assistant. The assistant gives ' + 'helpful, detailed, and polite answers to the ' + 'user\'s questions. {system}\n '), + INSTRUCTION=('USER: {input} ASSISTANT:'), + SEP='\n'), + deepseek_coder=dict( + SYSTEM=('You are an AI programming assistant, utilizing ' + 'the DeepSeek Coder model, developed by DeepSeek' + 'Company, and you only answer questions related ' + 'to computer science. For politically sensitive ' + 'questions, security and privacy issues, and ' + 'other non-computer science questions, you will ' + 'refuse to answer. {system}\n'), + INSTRUCTION=('### Instruction:\n{input}\n### Response:\n'), + SEP='\n'), + # TODO: deprecation, v0.2.0 + deepseekcoder=dict( + SYSTEM=('You are an AI programming assistant, utilizing ' + 'the DeepSeek Coder model, developed by DeepSeek' + 'Company, and you only answer questions related ' + 'to computer science. For politically sensitive ' + 'questions, security and privacy issues, and ' + 'other non-computer science questions, you will ' + 'refuse to answer. {system}\n'), + INSTRUCTION=('### Instruction:\n{input}\n### Response:\n'), + SEP='\n'), + deepseek_moe=dict( + SYSTEM=('[INST] {system} [/INST]\n'), + INSTRUCTION=('[INST] {input} [/INST]'), + SEP='\n'), + deepseek_v2=dict( + SYSTEM='{system}\n\n', + INSTRUCTION='User: {input}\n\nAssistant: ', + SUFFIX='<|end▁of▁sentence|>', + SUFFIX_AS_EOS=True, + STOP_WORDS=['<|end▁of▁sentence|>']), + mistral=dict( + SYSTEM=('[INST] {system} [/INST]\n'), + INSTRUCTION=('[INST] {input} [/INST]'), + SEP='\n'), + mixtral=dict( + SYSTEM=('[INST] {system} [/INST]\n'), + INSTRUCTION=('[INST] {input} [/INST]'), + SEP='\n'), + minicpm=dict(INSTRUCTION=('<用户> {input} '), SEP='\n'), + minicpm3=dict( + SYSTEM=('<|im_start|>system\n{system}<|im_end|>\n'), + INSTRUCTION=('<|im_start|>user\n{input}<|im_end|>\n' + '<|im_start|>assistant\n'), + SUFFIX='<|im_end|>', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['<|im_end|>', '<|endoftext|>']), + gemma=dict( + # `system` field is extended by xtuner + SYSTEM=('system\n{system}\n'), + INSTRUCTION=('user\n{input}\n' + 'model\n'), + SUFFIX='', + SUFFIX_AS_EOS=False, + SEP='\n', + STOP_WORDS=['']), + cohere_chat=dict( + SYSTEM=('<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}' + '<|END_OF_TURN_TOKEN|>'), + INSTRUCTION=( + '<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{input}<|END_OF_TURN_TOKEN|>' + '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>'), + SUFFIX='<|END_OF_TURN_TOKEN|>', + SUFFIX_AS_EOS=True, + STOP_WORDS=['<|END_OF_TURN_TOKEN|>']), + llama3_chat=dict( + SYSTEM=('<|start_header_id|>system<|end_header_id|>\n\n' + '{system}<|eot_id|>'), + INSTRUCTION=( + '<|start_header_id|>user<|end_header_id|>\n\n{input}<|eot_id|>' + '<|start_header_id|>assistant<|end_header_id|>\n\n'), + SUFFIX='<|eot_id|>', + SUFFIX_AS_EOS=True, + STOP_WORDS=['<|eot_id|>']), + phi3_chat=dict( + SYSTEM='<|system|>\n{system}<|end|>\n', + INSTRUCTION='<|user|>\n{input}<|end|>\n<|assistant|>\n', + SUFFIX='<|end|>', + SUFFIX_AS_EOS=True, + SEP='\n', + STOP_WORDS=['<|end|>']), +) + +SYSTEM_TEMPLATE = ConfigDict( + moss_sft=('You are an AI assistant whose name is {bot_name}.\n' + 'Capabilities and tools that {bot_name} can possess.\n' + '- Inner thoughts: enabled.\n' + '- Web search: enabled. API: Search(query)\n' + '- Calculator: enabled. API: Calculate(expression)\n' + '- Equation solver: enabled. API: Solve(equation)\n' + '- Text-to-image: disabled.\n' + '- Image edition: disabled.\n' + '- Text-to-speech: disabled.\n'), + alpaca=('Below is an instruction that describes a task. ' + 'Write a response that appropriately completes the request.\n'), + arxiv_gentile=('If you are an expert in writing papers, please generate ' + "a good paper title for this paper based on other authors' " + 'descriptions of their abstracts.\n'), + colorist=('You are a professional color designer. Please provide the ' + 'corresponding colors based on the description of Human.\n'), + coder=('You are a professional programer. Please provide the ' + 'corresponding code based on the description of Human.\n'), + lawyer='你现在是一名专业的中国律师,请根据用户的问题给出准确、有理有据的回复。\n', + medical='如果你是一名医生,请根据患者的描述回答医学问题。\n', + sql=('If you are an expert in SQL, please generate a good SQL Query ' + 'for Question based on the CREATE TABLE statement.\n'), +) diff --git a/data/xtuner/xtuner/utils/zero_to_any_dtype.py b/data/xtuner/xtuner/utils/zero_to_any_dtype.py new file mode 100644 index 0000000000000000000000000000000000000000..efe1fc0a12b3acee980a42c378a28b42532de256 --- /dev/null +++ b/data/xtuner/xtuner/utils/zero_to_any_dtype.py @@ -0,0 +1,696 @@ +#!/usr/bin/env python + +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +# This script extracts consolidated weights from a zero 1, 2 and 3 DeepSpeed +# checkpoints. It gets copied into the top level checkpoint dir, so the user +# can easily do the conversion at any point in the future. Once extracted, the +# weights don't require DeepSpeed and can be used in any application. +# +# example: python zero_to_any_dtype.py . pytorch_model.bin + +import argparse +import glob +import math +import os +import re +from collections import OrderedDict +from dataclasses import dataclass + +import torch +# yapf: disable +from deepspeed.checkpoint.constants import (BUFFER_NAMES, DS_VERSION, + FP32_FLAT_GROUPS, + FROZEN_PARAM_FRAGMENTS, + FROZEN_PARAM_SHAPES, + OPTIMIZER_STATE_DICT, PARAM_SHAPES, + PARTITION_COUNT, + SINGLE_PARTITION_OF_FP32_GROUPS, + ZERO_STAGE) +# while this script doesn't use deepspeed to recover data, since the +# checkpoints are pickled with DeepSpeed data structures it has to be +# available in the current python environment. +from deepspeed.utils import logger +from tqdm import tqdm + +# yapf: enable + + +@dataclass +class zero_model_state: + buffers: dict() + param_shapes: dict() + shared_params: list + ds_version: int + frozen_param_shapes: dict() + frozen_param_fragments: dict() + + +debug = 0 + +# load to cpu +device = torch.device('cpu') + +DEFAULT_DTYPE = torch.float16 + + +def atoi(text): + return int(text) if text.isdigit() else text + + +def natural_keys(text): + """alist.sort(key=natural_keys) sorts in human order + http://nedbatchelder.com/blog/200712/human_sorting.html (See Toothy's + implementation in the comments)""" + return [atoi(c) for c in re.split(r'(\d+)', text)] + + +def get_model_state_file(checkpoint_dir, zero_stage): + if not os.path.isdir(checkpoint_dir): + raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") + + # there should be only one file + if zero_stage <= 2: + file = os.path.join(checkpoint_dir, 'mp_rank_00_model_states.pt') + elif zero_stage == 3: + file = os.path.join(checkpoint_dir, + 'zero_pp_rank_0_mp_rank_00_model_states.pt') + + if not os.path.exists(file): + raise FileNotFoundError(f"can't find model states file at '{file}'") + + return file + + +def get_checkpoint_files(checkpoint_dir, glob_pattern): + # XXX: need to test that this simple glob rule works for multi-node + # setup too + ckpt_files = sorted( + glob.glob(os.path.join(checkpoint_dir, glob_pattern)), + key=natural_keys) + + if len(ckpt_files) == 0: + raise FileNotFoundError( + f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") + + return ckpt_files + + +def get_optim_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, '*_optim_states.pt') + + +def get_model_state_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, '*_model_states.pt') + + +def parse_model_states(files, dtype=DEFAULT_DTYPE): + zero_model_states = [] + for file in files: + state_dict = torch.load(file, map_location=device) + + if BUFFER_NAMES not in state_dict: + raise ValueError(f'{file} is not a model state checkpoint') + buffer_names = state_dict[BUFFER_NAMES] + if debug: + print('Found buffers:', buffer_names) + + buffers = { + k: v.to(dtype) + for k, v in state_dict['module'].items() if k in buffer_names + } + param_shapes = state_dict[PARAM_SHAPES] + + # collect parameters that are included in param_shapes + param_names = [] + for s in param_shapes: + for name in s.keys(): + param_names.append(name) + + # update with frozen parameters + frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) + if frozen_param_shapes is not None: + if debug: + print(f'Found frozen_param_shapes: {frozen_param_shapes}') + param_names += list(frozen_param_shapes.keys()) + + # handle shared params + shared_params = [[k, v] + for k, v in state_dict['shared_params'].items()] + + ds_version = state_dict.get(DS_VERSION, None) + + frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) + + z_model_state = zero_model_state( + buffers=buffers, + param_shapes=param_shapes, + shared_params=shared_params, + ds_version=ds_version, + frozen_param_shapes=frozen_param_shapes, + frozen_param_fragments=frozen_param_fragments) + zero_model_states.append(z_model_state) + + return zero_model_states + + +@torch.no_grad() +def parse_optim_states(files, ds_checkpoint_dir, dtype=DEFAULT_DTYPE): + + zero_stage = None + world_size = None + total_files = len(files) + flat_groups = [] + for f in tqdm(files, desc='Load Checkpoints'): + state_dict = torch.load(f, map_location=device) + if ZERO_STAGE not in state_dict[OPTIMIZER_STATE_DICT]: + raise ValueError(f'{f} is not a zero checkpoint') + + zero_stage = state_dict[OPTIMIZER_STATE_DICT][ZERO_STAGE] + world_size = state_dict[OPTIMIZER_STATE_DICT][PARTITION_COUNT] + + # the groups are named differently in each stage + if zero_stage <= 2: + fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS + elif zero_stage == 3: + fp32_groups_key = FP32_FLAT_GROUPS + else: + raise ValueError(f'unknown zero stage {zero_stage}') + + # immediately discard the potentially huge 2 optimizer states as we + # only care for fp32 master weights and also handle the case where it + # was already removed by another helper script + state_dict['optimizer_state_dict'].pop('optimizer_state_dict', None) + fp32_groups = state_dict['optimizer_state_dict'].pop(fp32_groups_key) + if zero_stage <= 2: + flat_groups.append([param.to(dtype) for param in fp32_groups]) + elif zero_stage == 3: + # if there is more than one param group, there will be multiple + # flattened tensors - one flattened tensor per group - for + # simplicity merge them into a single tensor + + # XXX: could make the script more memory efficient for when there + # are multiple groups - it will require matching the sub-lists of + # param_shapes for each param group flattened tensor + flat_groups.append(torch.cat(fp32_groups, 0).to(dtype)) + + # For ZeRO-2 each param group can have different partition_count as data + # parallelism for expert parameters can be different from data parallelism + # for non-expert parameters. So we can just use the max of the + # partition_count to get the dp world_size. + if type(world_size) is list: + world_size = max(world_size) + + if world_size != total_files: + raise ValueError( + f"Expected {world_size} of '*_optim_states.pt' under " + f"'{ds_checkpoint_dir}' but found {total_files} files. " + 'Possibly due to an overwrite of an old checkpoint, ' + "or a checkpoint didn't get saved by one or more processes.") + + return zero_stage, world_size, flat_groups + + +def _get_state_dict_from_zero_checkpoint(ds_checkpoint_dir, + exclude_frozen_parameters, + dtype=DEFAULT_DTYPE): + """Returns state_dict reconstructed from ds checkpoint. + + Args: + - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder + (where the optimizer files are) + """ + print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") + + optim_files = get_optim_files(ds_checkpoint_dir) + zero_stage, world_size, flat_groups = parse_optim_states( + optim_files, ds_checkpoint_dir, dtype) + print(f'Detected checkpoint of type zero stage {zero_stage}, ' + f'world_size: {world_size}') + + model_files = get_model_state_files(ds_checkpoint_dir) + + zero_model_states = parse_model_states(model_files) + print(f'Parsing checkpoint created by deepspeed==' + f'{zero_model_states[0].ds_version}') + + if zero_stage <= 2: + return _get_state_dict_from_zero2_checkpoint( + world_size, flat_groups, zero_model_states, + exclude_frozen_parameters) + elif zero_stage == 3: + return _get_state_dict_from_zero3_checkpoint( + world_size, flat_groups, zero_model_states, + exclude_frozen_parameters) + + +def _zero2_merge_frozen_params(state_dict, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len( + zero_model_states[0].frozen_param_shapes) == 0: + return + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + frozen_param_fragments = zero_model_states[0].frozen_param_fragments + + if debug: + num_elem = sum(s.numel() for s in frozen_param_shapes.values()) + print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in ' + f'{wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + state_dict[name] = frozen_param_fragments[name] + + if debug: + print(f'{name} full shape: {shape} unpartitioned numel ' + f'{unpartitioned_numel} ') + + print(f'Reconstructed Frozen state dict with {total_params} params ' + f'{total_numel} elements') + + +def _has_callable(obj, fn): + attr = getattr(obj, fn, None) + return callable(attr) + + +def _zero2_merge_trainable_params(state_dict, world_size, flat_groups, + zero_model_states): + param_shapes = zero_model_states[0].param_shapes + + # Reconstruction protocol: + # + # XXX: document this + + if debug: + for i in range(world_size): + for j in range(len(flat_groups[0])): + print(f'flat_groups[{i}][{j}].shape={flat_groups[i][j].shape}') + + # XXX: memory usage doubles here (zero2) + num_param_groups = len(flat_groups[0]) + merged_single_partition_of_groups = [] + for i in range(num_param_groups): + merged_partitions = [sd[i] for sd in flat_groups] + full_single_vector = torch.cat(merged_partitions, 0) + merged_single_partition_of_groups.append(full_single_vector) + avail_numel = sum([ + full_single_vector.numel() + for full_single_vector in merged_single_partition_of_groups + ]) + + if debug: + wanted_params = sum([len(shapes) for shapes in param_shapes]) + wanted_numel = sum([ + sum(shape.numel() for shape in shapes.values()) + for shapes in param_shapes + ]) + # not asserting if there is a mismatch due to possible padding + print(f'Have {avail_numel} numels to process.') + print(f'Need {wanted_numel} numels in {wanted_params} params.') + + # params + # XXX: for huge models that can't fit into the host's RAM we will have to + # recode this to support out-of-core computing solution + total_numel = 0 + total_params = 0 + for shapes, full_single_vector in zip(param_shapes, + merged_single_partition_of_groups): + offset = 0 + avail_numel = full_single_vector.numel() + for name, shape in shapes.items(): + + unpartitioned_numel = shape.numel() if _has_callable( + shape, 'numel') else math.prod(shape) + total_numel += unpartitioned_numel + total_params += 1 + + if debug: + print(f'{name} full shape: {shape} unpartitioned numel ' + f'{unpartitioned_numel} ') + state_dict[name] = full_single_vector.narrow( + 0, offset, unpartitioned_numel).view(shape) + offset += unpartitioned_numel + + # Z2 started to align to 2*world_size to improve nccl performance. + # Therefore both offset and avail_numel can differ by anywhere between + # 0..2*world_size. Due to two unrelated complex paddings performed in + # the code it's almost impossible to predict the exact numbers w/o the + # live optimizer object, so we are checking that the numbers are + # within the right range + align_to = 2 * world_size + + def zero2_align(x): + return align_to * math.ceil(x / align_to) + + if debug: + print(f'original offset={offset}, avail_numel={avail_numel}') + + offset = zero2_align(offset) + avail_numel = zero2_align(avail_numel) + + if debug: + print(f'aligned offset={offset}, avail_numel={avail_numel}') + + # Sanity check + if offset != avail_numel: + raise ValueError(f'consumed {offset} numels out of {avail_numel} ' + '- something is wrong') + + print(f'Reconstructed state dict with {total_params} params ' + f'{total_numel} elements') + + +def _get_state_dict_from_zero2_checkpoint(world_size, flat_groups, + zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f'added {len(buffers)} buffers') + + if not exclude_frozen_parameters: + _zero2_merge_frozen_params(state_dict, zero_model_states) + + _zero2_merge_trainable_params(state_dict, world_size, flat_groups, + zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def zero3_partitioned_param_info(unpartitioned_numel, world_size): + remainder = unpartitioned_numel % world_size + padding_numel = (world_size - remainder) if remainder else 0 + partitioned_numel = math.ceil(unpartitioned_numel / world_size) + return partitioned_numel, padding_numel + + +def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len( + zero_model_states[0].frozen_param_shapes) == 0: + return + + if debug: + for i in range(world_size): + num_elem = sum( + s.numel() + for s in zero_model_states[i].frozen_param_fragments.values()) + print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([ + p.numel() + for p in zero_model_states[0].frozen_param_fragments.values() + ]) * world_size + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in ' + f'{wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in zero_model_states[0].frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + param_frags = tuple(model_state.frozen_param_fragments[name] + for model_state in zero_model_states) + state_dict[name] = torch.cat(param_frags, 0).narrow( + 0, 0, unpartitioned_numel).view(shape) # noqa: E501 + + _partitioned = zero3_partitioned_param_info(unpartitioned_numel, + world_size) + partitioned_numel, partitioned_padding_numel = _partitioned + if debug: + print(f'Frozen params: {total_params} {name} full shape: {shape} ' + f'partition0 numel={partitioned_numel} ' + f'partitioned_padding_numel={partitioned_padding_numel}') + + print(f'Reconstructed Frozen state dict with {total_params} params ' + f'{total_numel} elements') + + +def _zero3_merge_trainable_params(state_dict, world_size, flat_groups, + zero_model_states): + param_shapes = zero_model_states[0].param_shapes + avail_numel = flat_groups[0].numel() * world_size + # Reconstruction protocol: For zero3 we need to zip the partitions + # together at boundary of each param, re-consolidating each param, while + # dealing with padding if any + + # merge list of dicts, preserving order + param_shapes = {k: v for d in param_shapes for k, v in d.items()} + + if debug: + for i in range(world_size): + print(f'flat_groups[{i}].shape={flat_groups[i].shape}') + + wanted_params = len(param_shapes) + wanted_numel = sum(shape.numel() for shape in param_shapes.values()) + # not asserting if there is a mismatch due to possible padding + avail_numel = flat_groups[0].numel() * world_size + print(f'Trainable params: Have {avail_numel} numels to process.') + print(f'Trainable params: Need {wanted_numel} numels in ' + f'{wanted_params} params.') + + offset = 0 + total_numel = 0 + total_params = 0 + partitioned_sizes = [] + for name, shape in param_shapes.items(): + + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + total_params += 1 + + _info = zero3_partitioned_param_info(unpartitioned_numel, world_size) + + partitioned_numel, partitioned_padding_numel = _info + partitioned_sizes.append(partitioned_numel) + if debug: + print( + f'Trainable params: {total_params} {name} full shape: {shape} ' + f'partition0 numel={partitioned_numel} ' + f'partitioned_padding_numel={partitioned_padding_numel}') + + offset += partitioned_numel + + offset *= world_size + + # Sanity check + if offset != avail_numel: + raise ValueError(f'consumed {offset} numels out of {avail_numel} ' + '- something is wrong') + + mat_chunks = [] + for rank in range(world_size): + rank_chunks = flat_groups.pop(0).split(partitioned_sizes) + rank_chunks = [tensor.clone() for tensor in rank_chunks] + mat_chunks.append(rank_chunks) + + for name, shape in tqdm( + param_shapes.items(), desc='Gather Sharded Weights'): + + pad_flat_param_chunks = [] + for rank in range(world_size): + pad_flat_param_chunks.append(mat_chunks[rank].pop(0)) + + pad_flat_param = torch.cat(pad_flat_param_chunks, dim=0) + + # Because pad_flat_param_chunks is a list, it is necessary to manually + # release the tensors in the list; Python will not automatically do so. + for rank in range(world_size): + pad_flat_param_chunks.pop() + + param = pad_flat_param[:shape.numel()].view(shape) + state_dict[name] = param + + print(f'Reconstructed Trainable state dict with {total_params} params ' + f'{total_numel} elements') + + +def _get_state_dict_from_zero3_checkpoint(world_size, flat_groups, + zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f'added {len(buffers)} buffers') + + if not exclude_frozen_parameters: + _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) + + _zero3_merge_trainable_params(state_dict, world_size, flat_groups, + zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def get_state_dict_from_zero_checkpoint(checkpoint_dir, + tag=None, + exclude_frozen_parameters=False, + dtype=DEFAULT_DTYPE): + # flake8: noqa + """Convert ZeRO 2 or 3 checkpoint into a single consolidated state_dict + that can be loaded with ``load_state_dict()`` and used for training without + DeepSpeed or shared with others, for example via a model hub. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. + If not provided will attempt to load tag in 'latest' file. + e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + + Returns: + - pytorch ``state_dict`` + + Note: this approach may not work if your application doesn't have + sufficient free CPU memory and you may need to use the offline approach + using the ``zero_to_any_dtype.py`` script that is saved with the + checkpoint. + + A typical usage might be :: + + from xtuner.utils.zero_to_any_dtype import get_state_dict_from_zero_checkpoint + # do the training and checkpoint saving + state_dict = get_state_dict_from_zero_checkpoint(checkpoint_dir, dtype=torch.float16) # already on cpu + model = model.cpu() # move to cpu + model.load_state_dict(state_dict) + # submit to model hub or save the model to share with others + + In this example the ``model`` will no longer be usable in the deepspeed + context of the same application. i.e. you will need to re-initialize the + deepspeed engine, since ``model.load_state_dict(state_dict)`` will remove + all the deepspeed magic from it. + + If you want it all done for you, use + ``load_state_dict_from_zero_checkpoint`` instead. + """ + # flake8: noqa + if tag is None: + latest_path = os.path.join(checkpoint_dir, 'latest') + if os.path.isfile(latest_path): + with open(latest_path) as fd: + tag = fd.read().strip() + else: + raise ValueError(f"Unable to find 'latest' file at {latest_path}") + + ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) + + if not os.path.isdir(ds_checkpoint_dir): + raise FileNotFoundError( + f"Directory '{ds_checkpoint_dir}' doesn't exist") + + return _get_state_dict_from_zero_checkpoint(ds_checkpoint_dir, + exclude_frozen_parameters, + dtype) + + +def convert_zero_checkpoint_to_state_dict(checkpoint_dir, + output_file, + tag=None, + exclude_frozen_parameters=False, + dtype=DEFAULT_DTYPE): + """Convert ZeRO 2 or 3 checkpoint into a single consolidated ``state_dict`` + file that can be loaded with ``torch.load(file)`` + ``load_state_dict()`` + and used for training without DeepSpeed. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder. + (one that contains the tag-folder, like ``global_step14``) + - ``output_file``: path to the pytorch state_dict output file + (e.g. path/pytorch_model.bin) + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. + If not provided will attempt to load tag in the file named + ``latest`` in the checkpoint folder, e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + """ + + state_dict = get_state_dict_from_zero_checkpoint( + checkpoint_dir, tag, exclude_frozen_parameters, dtype) + print(f'Saving {dtype} state dict to {output_file}') + torch.save(state_dict, output_file) + + +def load_state_dict_from_zero_checkpoint(model, + checkpoint_dir, + tag=None, + dtype=DEFAULT_DTYPE): + + # flake8: noqa + """ + 1. Put the provided model to cpu + 2. Convert ZeRO 2 or 3 checkpoint into a single consolidated ``state_dict`` + 3. Load it into the provided model + + Args: + - ``model``: the model object to update + - ``checkpoint_dir``: path to the desired checkpoint folder. (one that + contains the tag-folder, like ``global_step14``) + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. + If not provided will attempt to load tag in the file named + ``latest`` in the checkpoint folder, e.g., ``global_step14`` + + Returns: + - ``model`: modified model + + Make sure you have plenty of CPU memory available before you call this + function. If you don't have enough use the ``zero_to_any_dtype.py`` + utility to do the conversion. You will find it conveniently placed for you + in the checkpoint folder. + + A typical usage might be :: + + from xtuner.utils.zero_to_any_dtype import load_state_dict_from_zero_checkpoint + model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir, dtype=torch.float16) + # submit to model hub or save the model to share with others + + Note, that once this was run, the ``model`` will no longer be usable in + the deepspeed context of the same application. i.e. you will need to + re-initialize the deepspeed engine, since + ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic + from it. + """ + # flake8: noqa + logger.info(f'Extracting {dtype} weights') + state_dict = get_state_dict_from_zero_checkpoint( + checkpoint_dir, tag, dtype=dtype) + + logger.info(f'Overwriting model with {dtype} weights') + model = model.cpu() + model.load_state_dict(state_dict, strict=False) + + return model diff --git a/data/xtuner/xtuner/version.py b/data/xtuner/xtuner/version.py new file mode 100644 index 0000000000000000000000000000000000000000..e4669c1880af551fc52eae2b826adfdd60e6a6d0 --- /dev/null +++ b/data/xtuner/xtuner/version.py @@ -0,0 +1,26 @@ +# Copyright (c) OpenMMLab. All rights reserved. +__version__ = '0.1.23' +short_version = __version__ + + +def parse_version_info(version_str): + """Parse a version string into a tuple. + + Args: + version_str (str): The version string. + Returns: + tuple[int or str]: The version info, e.g., "1.3.0" is parsed into + (1, 3, 0), and "2.0.0rc1" is parsed into (2, 0, 0, 'rc1'). + """ + version_info = [] + for x in version_str.split('.'): + if x.isdigit(): + version_info.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + version_info.append(int(patch_version[0])) + version_info.append(f'rc{patch_version[1]}') + return tuple(version_info) + + +version_info = parse_version_info(__version__) diff --git a/model/sentence-transformer/1_Pooling/config.json b/model/sentence-transformer/1_Pooling/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d1514c3162bbe87b343f565fadc62e6c06f04f03 --- /dev/null +++ b/model/sentence-transformer/1_Pooling/config.json @@ -0,0 +1,7 @@ +{ + "word_embedding_dimension": 384, + "pooling_mode_cls_token": false, + "pooling_mode_mean_tokens": true, + "pooling_mode_max_tokens": false, + "pooling_mode_mean_sqrt_len_tokens": false +} \ No newline at end of file diff --git a/model/sentence-transformer/README.md b/model/sentence-transformer/README.md new file mode 100644 index 0000000000000000000000000000000000000000..065b011b76fe98894d8975acfa28f028085fa35b --- /dev/null +++ b/model/sentence-transformer/README.md @@ -0,0 +1,164 @@ +--- +language: +- multilingual +- ar +- bg +- ca +- cs +- da +- de +- el +- en +- es +- et +- fa +- fi +- fr +- gl +- gu +- he +- hi +- hr +- hu +- hy +- id +- it +- ja +- ka +- ko +- ku +- lt +- lv +- mk +- mn +- mr +- ms +- my +- nb +- nl +- pl +- pt +- ro +- ru +- sk +- sl +- sq +- sr +- sv +- th +- tr +- uk +- ur +- vi +license: apache-2.0 +library_name: sentence-transformers +tags: +- sentence-transformers +- feature-extraction +- sentence-similarity +- transformers +language_bcp47: +- fr-ca +- pt-br +- zh-cn +- zh-tw +pipeline_tag: sentence-similarity +--- + +# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 + +This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search. + + + +## Usage (Sentence-Transformers) + +Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: + +``` +pip install -U sentence-transformers +``` + +Then you can use the model like this: + +```python +from sentence_transformers import SentenceTransformer +sentences = ["This is an example sentence", "Each sentence is converted"] + +model = SentenceTransformer('sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2') +embeddings = model.encode(sentences) +print(embeddings) +``` + + + +## Usage (HuggingFace Transformers) +Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. + +```python +from transformers import AutoTokenizer, AutoModel +import torch + + +# Mean Pooling - Take attention mask into account for correct averaging +def mean_pooling(model_output, attention_mask): + token_embeddings = model_output[0] #First element of model_output contains all token embeddings + input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() + return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) + + +# Sentences we want sentence embeddings for +sentences = ['This is an example sentence', 'Each sentence is converted'] + +# Load model from HuggingFace Hub +tokenizer = AutoTokenizer.from_pretrained('sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2') +model = AutoModel.from_pretrained('sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2') + +# Tokenize sentences +encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') + +# Compute token embeddings +with torch.no_grad(): + model_output = model(**encoded_input) + +# Perform pooling. In this case, max pooling. +sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) + +print("Sentence embeddings:") +print(sentence_embeddings) +``` + + + +## Evaluation Results + + + +For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2) + + + +## Full Model Architecture +``` +SentenceTransformer( + (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel + (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False}) +) +``` + +## Citing & Authors + +This model was trained by [sentence-transformers](https://www.sbert.net/). + +If you find this model helpful, feel free to cite our publication [Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks](https://arxiv.org/abs/1908.10084): +```bibtex +@inproceedings{reimers-2019-sentence-bert, + title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", + author = "Reimers, Nils and Gurevych, Iryna", + booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", + month = "11", + year = "2019", + publisher = "Association for Computational Linguistics", + url = "http://arxiv.org/abs/1908.10084", +} +``` \ No newline at end of file diff --git a/model/sentence-transformer/config.json b/model/sentence-transformer/config.json new file mode 100644 index 0000000000000000000000000000000000000000..c06d5b49495f044e6380e68a60538be17a6bd5d1 --- /dev/null +++ b/model/sentence-transformer/config.json @@ -0,0 +1,24 @@ +{ + "_name_or_path": "old_models/paraphrase-multilingual-MiniLM-L12-v2/0_Transformer", + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 384, + "initializer_range": 0.02, + "intermediate_size": 1536, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "position_embedding_type": "absolute", + "transformers_version": "4.7.0", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 250037 +} diff --git a/model/sentence-transformer/config_sentence_transformers.json b/model/sentence-transformer/config_sentence_transformers.json new file mode 100644 index 0000000000000000000000000000000000000000..b974b349cb2d419ada11181750a733ff82f291ad --- /dev/null +++ b/model/sentence-transformer/config_sentence_transformers.json @@ -0,0 +1,7 @@ +{ + "__version__": { + "sentence_transformers": "2.0.0", + "transformers": "4.7.0", + "pytorch": "1.9.0+cu102" + } +} \ No newline at end of file diff --git a/model/sentence-transformer/modules.json b/model/sentence-transformer/modules.json new file mode 100644 index 0000000000000000000000000000000000000000..f7640f94e81bb7f4f04daf1668850b38763a13d9 --- /dev/null +++ b/model/sentence-transformer/modules.json @@ -0,0 +1,14 @@ +[ + { + "idx": 0, + "name": "0", + "path": "", + "type": "sentence_transformers.models.Transformer" + }, + { + "idx": 1, + "name": "1", + "path": "1_Pooling", + "type": "sentence_transformers.models.Pooling" + } +] \ No newline at end of file diff --git a/model/sentence-transformer/pytorch_model.bin b/model/sentence-transformer/pytorch_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..8791e85e2bb19d096e294dece4a15c4308446f8c --- /dev/null +++ b/model/sentence-transformer/pytorch_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16cc9e54df6e083272378abec2d75dc34d7a48b5276db3ccc050d18de672ac59 +size 470693617 diff --git a/model/sentence-transformer/sentence_bert_config.json b/model/sentence-transformer/sentence_bert_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5fd10429389515d3e5cccdeda08cae5fea1ae82e --- /dev/null +++ b/model/sentence-transformer/sentence_bert_config.json @@ -0,0 +1,4 @@ +{ + "max_seq_length": 128, + "do_lower_case": false +} \ No newline at end of file diff --git a/model/sentence-transformer/sentencepiece.bpe.model b/model/sentence-transformer/sentencepiece.bpe.model new file mode 100644 index 0000000000000000000000000000000000000000..7a3f40a75f870bc1f21700cd414dc2acc431583c --- /dev/null +++ b/model/sentence-transformer/sentencepiece.bpe.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865 +size 5069051 diff --git a/model/sentence-transformer/special_tokens_map.json b/model/sentence-transformer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..2ea7ad0e45a9d1d1591782ba7e29a703d0758831 --- /dev/null +++ b/model/sentence-transformer/special_tokens_map.json @@ -0,0 +1 @@ +{"bos_token": "", "eos_token": "", "unk_token": "", "sep_token": "", "pad_token": "", "cls_token": "", "mask_token": {"content": "", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}} \ No newline at end of file diff --git a/model/sentence-transformer/tf_model.h5 b/model/sentence-transformer/tf_model.h5 new file mode 100644 index 0000000000000000000000000000000000000000..fedf87aa7fedfe2262ec90a7bba4ba3067372372 --- /dev/null +++ b/model/sentence-transformer/tf_model.h5 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22150b6ba00e477c7f816f1988d028fff924e2b52e14540889690c72c5add40e +size 470899176 diff --git a/model/sentence-transformer/tokenizer.json b/model/sentence-transformer/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..4279db36dd166b0700071894530c745bb0a83131 --- /dev/null +++ b/model/sentence-transformer/tokenizer.json @@ -0,0 +1 @@ +{"version":"1.0","truncation":{"max_length":128,"strategy":"LongestFirst","stride":0},"padding":{"strategy":"BatchLongest","direction":"Right","pad_to_multiple_of":null,"pad_id":1,"pad_type_id":0,"pad_token":""},"added_tokens":[{"id":0,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":1,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":2,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":3,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":250001,"special":true,"content":"","single_word":false,"lstrip":true,"rstrip":false,"normalized":false}],"normalizer":{"type":"Precompiled","precompiled_charsmap":"ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC//Q8AqFUOAKldDgCqYQ4Aq30OAKxlDgCttQ8Arr0PAK+1DwCapQCAnqUAgKKlAICmpQCAqqUAgK6lAICypQCAtqUAgLhVDwC5dQ8Aun0PALt1DwC8bQ8AvREPAL4RDwC/EQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1dQ8AtnEPALdxDwCj6Q8AuqUAgL6lAIDCpQCAxqUAgKY1DgCl7Q8AyqUAgKsRDgCqCQ4AzqUAgNKlAICvsQ4ArgkOAK0BDgCsCQ4A1qUAgIIdAACBHQAAgB0AANqlAIDepQCA4qUAgL6UAQCErAEA5qUAgIfgAQCGzAAA6qUAgO6lAIDypQCAlqQAgKhtDgCpiQEAqpkBAKuRAQCswQEArckBAK75AQCv+QEAhKAAAPalAID6pQCA/qUAgAKmAIAGpgCACqYAgA6mAIC4xQAAuc0AALrFAAC73QAAvM0AAL39AAC+9QAAv50AALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEAsxECABKmAIAWpgCAGqYAgB6mAIC2SQIAtUkCACKmAIC7hQIAuoUCACamAIAqpgCAv4UCAL6FAgC9lQIAvJUCAIU8GgCjVQIALqYAgDKmAICmDQIANqYAgDqmAIClDQIAqsECAKvBAgA+pgCAQqYAgK7BAgCvwQIArNECAK3RAgCCGQAARqYAgIAZAACBGQAASqYAgE6mAIBSpgCAWqYAgL4ABABepgCAYqYAgGamAIBqpgCAbqYAgHKmAIB2pgCA7+gOAHqmAICG6AQAh1ADAH6mAICCpgCA74ACAIamAIDhlAEAiqYAgONYAQCOpgCA4wAOAJKmAIDhaA0AlqYAgKhxAgCpcQIAqnECAKupAgCsuQIArbkCAK6pAgCvqQIAhKwFAJqmAICepgCAoqYAgKamAICqpgCArqYAgLKmAIC4bQEAuQ0BALoFAQC7GQEAvAkBAL09AQC+NQEAv9kBALDZAgCx2QIAsm0BALNlAQC0fQEAtWUBALZlAQC3VQEA4WAPAOP0AADjHA4A4bwBALamAICCOQAAgTEAAIA9AAC6pgCAvigEAL6mAIDCpgCAvjwHAO8QAADv0A4AyqYAgIbgBACHyAQAzqYAgLO1AgDSpgCAtX0CALZ1AgDWpgCA2qYAgN6mAIC6UQIAu1ECALz1AQC9/QEAvvUBAL/tAQBWpgCAxqYAgKqxBQCrsQUArBUGAK0dBgCuFQYArw0GAOKmAIDmpgCA6qYAgKNVBQDupgCApZ0FAKaVBQDypgCAs+kGAPamAID6pgCA/qYAgAKnAIC24QYAtekGAAanAIC7sQYAuqEGAAqnAIAOpwCAv50GAL6RBgC9pQYAvKkGAKgdBgCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvIQYAEqcAgBanAIAapwCAHqcAgCKnAIAmpwCAKqcAgC6nAIC45QcAue0HALrlBwC7/QcAvOUHAL3tBwC+5QcAv00HALAlBgCxNQYAsj0GALMxBgC0FQYAtRkGALYNBgC3AQYAo6kHAIIVAACBtQEAgLUBADKnAICmoQcApakHADanAICr8QcAquEHAISgAgA6pwCAr90HAK7RBwCt5QcArOkHAD6nAICzlQYAhugAAIcYAQC2tQYAQqcAgEanAIC1vQYAukkBALtVAQBKpwCATqcAgL45AQC/OQEAvEUBAL05AQCoPQYAqU0GAKpZBgCrUQYArHEGAK1xBgCuuQEAr7kBAISsAQBSpwCAVqcAgFqnAIBepwCAYqcAgGanAIBqpwCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCwyQEAsdUBALLVAQCzqQEAtLkBALW5AQC2qQEAt6EBAKPRBQBupwCAcqcAgHanAIB6pwCApvEFAKX5BQB+pwCAqxECAKoNAgCCpwCAhqcAgK99AgCufQIArX0CAKwBAgCKpwCAjqcAgJKnAICWpwCAgTEAAIANAACapwCAgjkAAJ6nAICipwCAviQDAKqnAICupwCAsqcAgIbYHACHTAMAtqcAgLqnAIC+pwCAhMAcAOMgAQDCpwCA4cgBAManAIDvMAIAyqcAgM6nAIDSpwCA1qcAgNqnAIDepwCA4qcAgLOVAwDmpwCA6qcAgO6nAIDypwCAtrkDALWxAwD2pwCAu1EDALpJAwD6pwCA/qcAgL/1AAC+SQMAvUEDALxJAwCoLQIAqUUCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAL5oHQACqACABqgAgAqoAICAHQAAgQkAAIKpAAAOqACAuFEBALlZAQC6YQEAu2EBALwRAQC9EQEAvhEBAL8RAQCwzQIAsdUCALLdAgCz1QIAtM0CALVxAQC2cQEAt3EBAOFYBgDhVAcA47AAAOO8BgASqACAGqgAgIYYHACHVB0AHqgAgCKoAIAmqACAKqgAgL74HAAuqACA7/AGAO/gBgCjlQIAMqgAgDaoAIA6qACAPqgAgKa5AgClsQIAQqgAgKtRAgCqSQIARqgAgEqoAICv9QEArkkCAK1BAgCsSQIAqG0eAKl1HgCqfR4Aq40eAKyVHgCtnR4Aro0eAK+BHgAWqACATqgAgFKoAIBWqACAWqgAgF6oAIBiqACAZqgAgLiJHgC5iR4AupkeALuRHgC8uR4AvbkeAL59HwC/dR8AsMUeALHNHgCyxR4As90eALTFHgC1zR4AtsUeALe5HgCz9R4AaqgAgG6oAIByqACAdqgAgLYdHgC1HR4AeqgAgLsJHgC6AR4AfqgAgIKoAIC/CR4AvgEeAL0JHgC8ER4Agm0AAKOxHgCAVQAAgWUAAKZZHgCEmAMAv9ABAKVZHgCqRR4Aq00eAIYABACHmAEArkUeAK9NHgCsVR4ArU0eAIqoAICOqACAhCQAAJKoAICWqACAmqgAgKanAICGqACAqLUeAKmFHgCqjR4Aq4UeAKydHgCtgR4Arv0eAK/1HgCwjR4AsZUeALKVHgCzpR4AtL0eALVxAQC2cQEAt3EBALhRAQC5UQEAulEBALtRAQC89QEAvf0BAL71AQC/7QEAsyUeAL4IBwCeqACAoqgAgKaoAIC2IR4AtTUeAKqoAIC7cR4AumkeAK6oAICyqACAv5UBAL5ZHgC9UR4AvGEeALaoAICjYR4AuqgAgL6oAICmZR4AwqgAgMaoAIClcR4Aqi0eAKs1HgDKqACAzqgAgK4dHgCv0QEArCUeAK0VHgDhVBoA0qgAgONcCgDWqACA2qgAgN6oAIDiqACA5qgAgOqoAIC+qAUA7qgAgPKoAICPMSoA+qgAgO/E+wD+qACAk2EuAJIdLwCR2SoAkEkqAJfZEgCWdRIAlQ0TAJTBLgCbHRsAmkEWAJlJFgCYDRcAn3EeAJ4RGwCdcRoAnHkaAKOhAgCinQMAoZUfAKCJHgDjiAEA4wgeAOFoAADh/B4A79wBAO98HwC1if4AtAH8ALMB+gCylfoAsQH4ALAR9gCv4fYArgH0AK0l8gCs7fIAqwHwAKrpDwCp1Q4AqN0OAKcBDACmyQoApe0KAKQBCACj4QYAovEGAKHlAwACqQCAggErAIMBKwAGqQCACqkAgIYxLwCHiS8AhIkrAIVFLgCKdRIAiwUTAIYIBQCHbAUAjhEXAI8RFwCMsRMAjV0WAJI9GgCTQRsAhMgFAIQABwCWUR8Al1EfAJRRGwCVORoAmn0eAJt9AgAOqQCAEqkAgIFZAQCAVQEAnFkDAIJRAQC+yAcAFqkAgBqpAIAeqQCAIqkAgCapAIAqqQCA79QeAC6pAIDhJB4AMqkAgONoAQA2qQCAOqkAgD6pAIBCqQCAu2kCALpZAgBGqQCASqkAgL8dAgC+HQIAvRkCALxxAgCz7QIATqkAgFKpAIBWqQCAWqkAgLZ9AgC17QIAXqkAgKMNBQD2qACAYqkAgGqpAIBmqQCApp0FAKUNBQBuqQCAq4kFAKq5BQCGCAMAh3wDAK/9BQCu/QUArfkFAKyRBQCAsQcAgbkHAIJBAACzsQYAcqkAgLVZBwC2MQcAdqkAgHqpAIB+qQCAuuEHALvhBwC84QcAveEHAL7hBwC/3QcAqLUGAKm5BgCqdQYAq4UHAKydBwCt/QcArvUHAK8ZBwCCqQCAhqkAgIqpAICOqQCAkqkAgJapAICaqQCAnqkAgLh1BwC5fQcAunUHALsFBwC8HQcAvTEHAL4xBwC/MQcAsGkHALFpBwCyeQcAs3kHALRpBwC1VQcAtlEHALdNBwCj/QcAoqkAgKapAICqqQCArqkAgKZ9BgClFQYAsqkAgKutBgCqrQYAtqkAgLqpAICvkQYArq0GAK2tBgCsrQYAvqkAgMKpAIDGqQCAyqkAgIAdAACBCQAAgjkAAM6pAIDSqQCA2qkAgIbIAACHpAEA3qkAgOKpAIDmqQCA6qkAgKiNAQCpmQEAqtkBAKvRAQCs8QEArfEBAK45AQCvOQEAhKAAAO6pAIDyqQCA9qkAgPqpAID+qQCAAqoAgAaqAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAAugUEALsJBAC44QcAueEHAL4JBAC/CQQAvAkEAL0JBACyjQcAs+UHALC1BwCxhQcAtuUHALftBwC08QcAtfEHAKpNBwCrVQcAqEkHAKlJBwCu3QcAr8UHAKxNBwCt1QcACqoAgA6qAIASqgCAFqoAgBqqAIAeqgCAIqoAgCaqAICz0QIAKqoAgC6qAIC+AAwAMqoAgLbxAgC1+QIANqoAgLsNAgC6DQIAOqoAgD6qAIC/DQIAvg0CAL0NAgC8DQIAghUAAKOVAgCAYQAAgWEAAKa1AgBCqgCASqoAgKW9AgCqSQIAq0kCAIbIDACHrAwArkkCAK9JAgCsSQIArUkCAKhlAgCpdQIAqn0CAKt1AgCsbQIArbECAK6xAgCvsQIAhKANAE6qAIBSqgCAVqoAgFqqAIBeqgCAYqoAgGaqAIC4MQEAuTEBALoxAQC7MQEAvNUBAL3dAQC+yQEAv8EBALDRAgCx0QIAstECALPRAgC0EQEAtREBALYRAQC3EQEA4bAGAGqqAIDj0AYAhEAPAG6qAIDhpAEAcqoAgOPABgB2qgCAeqoAgH6qAIDv1AYA7AAAAIKqAIDvZAcAhqoAgIqqAICOqgCAkqoAgLO5AgCWqgCAtakCALZ9AgCaqgCAnqoAgKKqAIC6WQIAu1kCALxJAgC9SQIAvpkBAL+ZAQCjdQ0ARqoAgKaqAICqqgCArqoAgKaxDQClZQ0AsqoAgKuVDQCqlQ0AvqQDALaqAICvVQ4ArlUOAK2FDQCshQ0AgE0AAIFVAACCVQAAs2UPALqqAIC1ZQ8Atm0PAL6qAICGQAMAhxQDALrtDwC7/Q8AvOkPAL3VDwC+3Q8Av9UPAKhZDgCpoQ8AqqEPAKuhDwCsoQ8AraEPAK6hDwCvoQ8AwqoAgMaqAIDKqgCAzqoAgNKqAIDWqgCA2qoAgN6qAIC4AQ8AuQEPALoBDwC7HQ8AvA0PAL01DwC+PQ8Av9UAALBlDwCxdQ8AsnEPALNNDwC0VQ8AtV0PALZNDwC3QQ8AoykOAOKqAIDmqgCA6qoAgO6qAICmIQ4ApSkOAPKqAICrsQ4AqqEOAPaqAID6qgCAr5kOAK6RDgCtmQ4ArKUOAP6qAIACqwCABqsAgAqrAIDvJA0ADqsAgBKrAIAWqwCA49AOABqrAIDhGA4AHqsAgIAVAACBGQAAggUAACKrAICo0QEAqdkBAKopAQCrKQEArDkBAK05AQCuKQEArykBAL5oAQAqqwCAhsgBAIesAAAuqwCAMqsAgDarAIA6qwCAuO0AALmFAAC6jQAAu4UAALydAAC9gQAAvoEAAL+BAACwWQEAsVkBALLtAACz5QAAtP0AALXlAAC25QAAt9UAALOhAgA+qwCAQqsAgEarAIBKqwCAtrkCALWxAgBOqwCAu50CALqdAgBSqwCAVqsAgL8hAwC+OQMAvTEDALw5AwCF+PUAo+UCAFqrAIBeqwCApv0CAGKrAIBmqwCApfUCAKrZAgCr2QIAaqsAgG6rAICufQMAr2UDAKx9AwCtdQMAuOkAALnpAAC6aQAAu2kAALx5AAC9ZQAAvm0AAL9lAACwsQAAsbkAALKBAACzgQAAtPkAALX5AAC27QAAt+UAAKhlAwCpdQMAqn0DAKt1AwCsbQMArdEAAK7RAACv0QAAcqsAgHarAIB6qwCA1qkAgH6rAICCqwCAhqsAgIqrAICA/QEAgQkAAIIZAACOqwCAkqsAgL5EAgCaqwCAnqsAgISsAgCiqwCAh/gCAIasBQCmqwCAqqsAgK6rAICyqwCAs/UCALarAIC6qwCAvqsAgMKrAIC2UQEAteUCAMarAIC7fQEAunUBAMqrAIDOqwCAvz0BAL49AQC9VQEAvFUBAOFwDwDSqwCA47gOAITABQDvyAAA1qsAgNqrAIDeqwCA4zwOAOKrAIDh0AEA5qsAgIR0BwDqqwCA72gBAO6rAIDyqwCApXkCAKbNAQD2qwCAgCEAAIEhAACC3QcAo2kCAKzJAQCtyQEArqEBAK+hAQD6qwCA/qsAgKrpAQCr4QEAlqsAgAKsAIC+QAIABqwAgIYwAwCHMAMACqwAgA6sAICoOQcAqTkHAKoNBwCrHQcArAUHAK0NBwCuBQcAr3kHALAJBwCxCQcAshkHALMRBwC0OQcAtTkHALbdBwC3yQcAuPkHALn5BwC6zQcAu8EHALzFBwC9yQcAvrkHAL+xBwCzpQcAEqwAgBasAIAarACAHqwAgLatBwC1rQcAIqwAgLvtBwC67QcAJqwAgCqsAIC/3QcAvt0HAL3lBwC87QcALqwAgKPhBwAyrACANqwAgKbpBwA6rACAPqwAgKXpBwCqqQcAq6kHAEKsAIBGrACArpkHAK+ZBwCsqQcAraEHAEqsAIBOrACAUqwAgFasAIBarACAXqwAgGKsAIBmrACAgREAAIANAABqrACAghkAAG6sAIByrACAvuQBAHasAICG4AAAhxgBAHqsAIB+rACAgqwAgIasAICKrACA77AEAI6sAIDh1AYAkqwAgONcBACWrACAmqwAgJ6sAICirACAqJkBAKmZAQCqDQEAqwUBAKwdAQCtBQEArgUBAK81AQCEiAEApqwAgKqsAICurACAsqwAgLasAIC6rACAvqwAgLjBAAC5wQAAusEAALvBAAC8wQAAvcEAAL7BAAC/wQAAsE0BALElAQCyIQEAsyEBALQlAQC1LQEAthEBALcRAQDCrACAxqwAgLONAgDKrACAtZ0CAM6sAIDSrACAto0CANasAIDarACAu+kCALqBAgC9/QIAvP0CAL/hAgC+6QIA3qwAgKbVAgClxQIAvggDAKPVAgCCLQAAgRkAAIB5AACvuQIArrECAK2lAgCspQIAq7ECAKrZAgDirACA6qwAgO80AgDurACAhxgDAIYs/ADyrACA9qwAgPqsAID+rACAAq0AgAatAIAKrQCADq0AgOMAAQASrQCA4eABABatAIC6tQMAu70DABqtAIAerQCAvnkDAL95AwC8pQMAvXkDACarAICztQMAIq0AgCatAIC2kQMAKq0AgC6tAIC1pQMAqEkCAKlJAgCqWQIAq1kCAKxJAgCtdQIArnECAK9tAgC+aP0AvqT/ADKtAIA2rQCAOq0AgD6tAIBCrQCARq0AgLj5AgC5+QIAukkBALtJAQC8XQEAvUEBAL5BAQC/fQEAsBUCALEdAgCyFQIAs8kCALTZAgC12QIAtskCALfJAgDjIAYA4bAGAOGAAQDjEAYAgA0AAIE1AACCPQAASq0AgE6tAIBSrQCAWq0AgF6tAIDvcAAAYq0AgGatAIDvTAEAhIz9AGqtAICjmQIAbq0AgKWJAgByrQCAdq0AgKa9AgCGwPwAh+T8AKuRAgCqmQIArVUCAKyJAgCvVQIArlUCAKh9/gCpgf4Aqpn+AKuZ/gCsif4ArYn+AK65/gCvuf4AVq0AgHqtAIB+rQCAgq0AgIatAICKrQCAjq0AgJKtAIC4tf4Aub3+ALph/wC7Yf8AvGH/AL1h/wC+Yf8Av2H/ALDJ/gCxyf4Ast3+ALPR/gC0uf4Atbn+ALaR/gC3kf4AsxH+AJatAICarQCAnq0AgKKtAIC2Cf4AtQH+AKatAIC7Df4Aug3+AKqtAICurQCAv33+AL59/gC9Bf4AvAn+ALKtAICjVf4Atq0AgLqtAICmTf4Avq0AgMKtAIClRf4Aqkn+AKtJ/gCEKAMAxq0AgK45/gCvOf4ArE3+AK1B/gCAzQEAgdEBAILRAQCzuf4Ayq0AgLXR/gC21f4Azq0AgIZgAQCHYAEAug0BALsFAQC8HQEAvQUBAL4NAQC/BQEA0q0AgNatAIDarQCA3q0AgOKtAIDhwP0A5q0AgOOM/ADqrQCA7q0AgPKtAIDvtPwA9q0AgPqtAID+rQCAAq4AgKgp/gCpKf4Aqj3+AKs1/gCsVf4ArVn+AK5N/gCvRf4ABq4AgAquAIAOrgCAEq4AgBauAIAargCAHq4AgCKuAIC4SQEAuUkBALpZAQC7UQEAvHkBAL15AQC+GQEAvxUBALDFAQCxzQEAssUBALPdAQC0xQEAtc0BALbFAQC3eQEAJq4AgCquAIAurgCAo7n9ADKuAICl0f0AptX9AITQAwBBrgCAvuACAKoNAgCrBQIArB0CAK0FAgCuDQIArwUCAIFJAACAQQAAowkDAIJdAAClGQMARa4AgEmuAICmEQMAhsAEAIfkAwCrDQMAqg0DAK0BAwCsHQMArwEDAK4JAwCw4QMAseEDALLhAwCz/QMAtOUDALXtAwC25QMAtz0DALgFAwC5DQMAugUDALsdAwC8BQMAvQ0DAL4FAwC/vQAATa4AgFGuAIBVrgCAWa4AgOasAIBdrgCAYa4AgGWuAICo8QMAqfkDAKqpAwCrqQMArLkDAK25AwCuqQMAr6UDALNBAgBprgCAba4AgHGuAIB1rgCAtlkCALVRAgB5rgCAu0UCALpFAgB9rgCAga4AgL9JAgC+QQIAvUkCALxVAgCFrgCAia4AgI2uAICRrgCA74wDAJWuAICZrgCAna4AgONsAwChrgCA4VAAAKWuAICprgCAvngFALGuAICEcAIAgOUAAIHpAACC+QAAta4AgIawBACHVAUAua4AgO9A/gC9rgCA4Vz+AMGuAIDjVAEAxa4AgMmuAIDNrgCA0a4AgLOZAQDVrgCA2a4AgN2uAIDhrgCAth0BALUdAQDlrgCAuz0BALo9AQDprgCA7a4AgL/hAAC++QAAvfEAALz5AACoIQYAqVEGAKpRBgCrzQYArNUGAK3dBgCu1QYAr8kGAK2uAIDxrgCA9a4AgPmuAID9rgCAAa8AgAWvAIAJrwCAuG0HALkFBwC6DQcAuwUHALwdBwC9AQcAvgEHAL8BBwCwuQYAsbkGALJtBwCzZQcAtH0HALVlBwC2ZQcAt1UHAKPZBgANrwCAEa8AgBWvAIAZrwCApl0GAKVdBgCEnAIAq30GAKp9BgC+JAMAHa8AgK+hBwCuuQcArbEHAKy5BwCASQAAgUkAAIJZAACzVQcAIa8AgLV9BwC2aQcAJa8AgIZAAACHVAMAulUHALspBwC8OQcAvTkHAL4pBwC/IQcAo5kGACmvAIAtrwCAMa8AgDWvAICmpQYApbEGADmvAICr5QYAqpkGAD2vAIBBrwCAr+0GAK7lBgCt9QYArPUGAOE4BQBFrwCA4yQEAEmvAIBNrwCAUa8AgFWvAIBZrwCAXa8AgGGvAIBlrwCAaa8AgG2vAIBxrwCA7/QEAHWvAICo+QYAqQkGAKoRBgCrLQYArDkGAK0lBgCuLQYAryUGAHmvAIB9rwCAga8AgIWvAICAGQAAgRkAAIIFAACJrwCAuOUBALntAQC65QEAu/0BALzlAQC97QEAvuUBAL9ZAQCwXQYAsSEGALIhBgCzIQYAtCEGALUpBgC2EQYAtxEGAKjRAgCp2QIAqg0DAKsFAwCsHQMArQUDAK4FAwCvNQMAvmQCAJGvAICVrwCAma8AgJ2vAIChrwCApa8AgKmvAIC4JQMAuS0DALolAwC7PQMAvCUDAL0pAwC++QMAv/kDALBNAwCxIQMAsiUDALM9AwC0JQMAtS0DALYlAwC3HQMAs4UDAITIAgCtrwCAhAgDALGvAIC2hQMAtZUDALWvAIC75QMAuokDAIYIDACHnAMAv+kDAL7hAwC96QMAvPEDAIXsCgA2rgCAo80DALmvAICl3QMAva8AgMGvAICmzQMAxa8AgMmvAICrrQMAqsEDAK2hAwCsuQMAr6EDAK6pAwDNrwCA0a8AgNWvAIDZrwCA78gDAN2vAIDhrwCA5a8AgOO0AwDprwCA4dABAO2vAICADQAAgXUAAIJ9AADxrwCA9a8AgPmvAICzZQEAvgQCALVlAQABsACABbAAgLZlAQCGQA0Ah1gNALv1AQC6/QEAvaUBALy5AQC/mQEAvqUBAAmwAIANsACAEbAAgIQADAAVsACAGbAAgB2wAIDvzAEAIbAAgOEsBgAlsACA4yABAOwAAAApsACALbAAgDGwAIA1sACAo+kBADmwAIA9sACApukBAEGwAIBFsACApekBAKpxAQCreQEASbAAgE2wAICuKQEArxUBAKw1AQCtKQEAqCUOAKktDgCqJQ4Aqz0OAKwlDgCtLQ4AriUOAK+VDgD9rwCAUbAAgFWwAIBZsACAXbAAgIKdAACBnQAAgJ0AALhFDwC5TQ8AukUPALtZDwC8SQ8AvUkPAL59DwC/cQ8AsPEOALH5DgCypQ4As7kOALSpDgC1lQ4Atp0OALd9DwCo1Q8Aqd0PAKoJDwCrCQ8ArBkPAK0FDwCuDQ8ArwUPAGGwAIBlsACAabAAgL6gAwBtsACAcbAAgId4AwCGEAAAuBUPALkdDwC6IQ8AuyEPALz1AAC9/QAAvvUAAL/tAACwQQ8AsU0PALJdDwCzVQ8AtE0PALU1DwC2MQ8AtzEPAHWwAIDvsAwAebAAgH2wAICBsACAhbAAgImwAICNsACAkbAAgJWwAICZsACAnbAAgKGwAIDjqA0ApbAAgOGMDQCzwQ4AqbAAgK2wAICxsACAtbAAgLbFDgC10Q4AubAAgLvJDgC6xQ4AvbAAgMGwAIC/sQ4AvskOAL3BDgC8yQ4AowEOAMWwAIDJsACAzbAAgNGwAICmBQ4ApREOANWwAICrCQ4AqgUOANmwAICErAIAr3EOAK4JDgCtAQ4ArAkOAIBRAACBWQAAgmEAALPFAAC+zAEAtcUAALbNAADhsACAhkAHAIcUAQC6yQAAu8kAALzZAAC92QAAvskAAL/FAACrDQMAqg0DAKkJAwCouQIArw0DAK4NAwCtDQMArA0DAL5gAwDlsACA6bAAgO2wAIDxsACA9bAAgPmwAIC+MAUAuykDALoZAwC5GQMAuAEDAL/dAwC+3QMAvd0DALwxAwCzTQMAsk0DALFNAwCwTQMAtzkDALYxAwC1QQMAtE0DAP2wAICmkQMApZkDAAGxAICjmQMABbEAgAmxAIANsQCAr5kDAK6VAwCthQMArIUDAKuVAwCqlQMAja8AgBGxAIAVsQCAGbEAgB2xAIAhsQCAJbEAgCmxAIAtsQCAMbEAgDWxAIA5sQCAPbEAgEGxAICAHQAAgQkAAIL9AQBFsQCAvwgHAEmxAIBRsQCA7yQAAFWxAICElAIAWbEAgF2xAICH4AIAhgQFAL4AGABhsQCAZbEAgOGQAQBpsQCA44AAAG2xAIBxsQCAdbEAgLNlAQB5sQCAtWUBALZtAQB9sQCAgbEAgIWxAIC65QEAu/kBALzpAQC96QEAvsUBAL+9AQCJsQCAjbEAgJGxAIC+xBkAlbEAgJmxAICdsQCA78gBAKGxAIDh3A4ApbEAgOMwDgCpsQCArbEAgLGxAICEMAQAgHkAAIEVAACCFQAAo+UBALWxAICl5QEApu0BALmxAICGQAYAh5AHAKplAQCreQEArGkBAK1pAQCuRQEArz0BAKjdBQCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvnQYATbEAgL2xAIDBsQCAhDABAMWxAIDJsQCAzbEAgNGxAIC4jQYAuZUGALqdBgC7lQYAvI0GAL21BgC+vQYAv7UGALDtBgCx8QYAsvEGALPxBgC0zQYAtbUGALa9BgC3tQYAqIkHAKmVBwCqkQcAq5EHAKy9BwCtpQcArqEHAK/dBwDVsQCA2bEAgN2xAIDhsQCA5bEAgOmxAIDtsQCA8bEAgLhJBwC5VQcAul0HALtVBwC8cQcAvX0HAL5pBwC/aQcAsKUHALGtBwCyuQcAs7EHALSRBwC1kQcAtnkHALd5BwD1sQCA+bEAgP2xAIABsgCA78gFAOHACQAFsgCA48AZAOMkBAAJsgCA4dAGAO/cKACinQMAoxUBAKAZBQChjQUAs1kGAA2yAIARsgCAFbIAgBmyAIC2ZQYAtXUGAB2yAIC7KQYAuiEGACGyAIAlsgCAvxUGAL4VBgC9JQYAvC0GAKOZBgCPmfwAKbIAgDGyAIA1sgCApqUGAKW1BgA5sgCAq+kGAKrhBgCGKB8Ah5wAAK/VBgCu1QYAreUGAKztBgCebQkAn30HAJwNCwCd7QkAmvENAJs5DQCY5fAAmQ0PAJbh8QCX6fEAlMX1AJUN8wCSHfcAk/H1AJD9+QCR7fkAgh3/AIMB+gA9sgCAQbIAgIYV9gCHOfYAhAn6AIXx9ACKwfAAiyXyAEWyAIBJsgCAjuEMAI8VDgCMNfIAjQHzAJKtDgCTgQgATbIAgFGyAICW6QQAl3UGAJR5CgCV8QoAmtEGAJvJAABVsgCAWbIAgIEdAwCAHQMAnFkCAIL1AwCrARAAqpUWAKmNFgCojRYAr5UuAK4BLACt/RIArJkSAKOlHgCipR4AoY0CAN2wAICnGRoAppUaAKUBGACknR8AXbIAgGGyAIBlsgCAabIAgG2yAIBxsgCAdbIAgHmyAICz5SoAsuUqALGtLwCw5S4AfbIAgIGyAIC1ASQAtBEqAKgpAwCpNQMAqj0DAKs1AwCsLQMArbUDAK69AwCvtQMAhbIAgImyAICNsgCAkbIAgIAdAACBCQAAgrkAAJWyAIC4TQIAuV0CALptAgC7CQIAvBkCAL0ZAgC+CQIAvwECALDNAwCx1QMAst0DALPVAwC0zQMAtXUCALZ9AgC3dQIAmbIAgITIHQChsgCAvgwfAKWyAICpsgCA70gGAO9YBwDhWAYA4ZgGAOOUAQDjAAYAhhAcAId8HQC+9B4ArbIAgLGyAIC2ZQMAtfUDALWyAICz5QMAubIAgL2yAIDBsgCAv+ECAL5ZAwC9UQMAvFkDALtBAwC6WQMAxbIAgMmyAIAtsgCAnbIAgM2yAIDRsgCA1bIAgNmyAIDdsgCA4bIAgKitHQCptR0AqrUdAKslHgCsPR4ArR0eAK4VHgCvdR4AsA0eALEtHgCyJR4As40eALSVHgC1nR4AtpUeALeNHgC4tR4Aub0eALq1HgC7nR4AvIUeAL1VHwC+XR8Av1UfALMdHQDlsgCA6bIAgO2yAIDxsgCAtr0eALWVHgD1sgCAu8keALrpHgD5sgCA/bIAgL95HgC+cR4AvXkeALzRHgCCKQAAo1kdAIAdAACBFQAApvkeAAGzAIAFswCApdEeAKqtHgCrjR4ACbMAgITgAwCuNR4Arz0eAKyVHgCtPR4AqIkeAKmVHgCqnR4Aq7EeAKzRHgCt2R4Ars0eAK/FHgANswCAEbMAgIaIAACHbAEAFbMAgBmzAIAdswCAIbMAgLhdAQC5wQEAusEBALvBAQC8wQEAvckBAL7xAQC/8QEAsL0eALGdHgCylR4As2UBALR9AQC1ZQEAtm0BALdlAQCqLR0AqzUdACWzAIApswCAri0dAK+VHACsLR0ArSUdAISMAQCjkR0ALbMAgDGzAICmER0ANbMAgDmzAIClgR0As1UeAD2zAIBBswCARbMAgEmzAIC2GR4AtRkeAE2zAIC7GR4AujkeAFGzAIBVswCAv+EBAL75AQC98QEAvAEeAFmzAIBdswCAYbMAgKOZHQBlswCApdUdAKbVHQBpswCAbbMAgHGzAICq9R0Aq9UdAKzNHQCtPQIArjUCAK8tAgCAZQAAgRUAAIIdAACEAAQAdbMAgHmzAICHcAMAhvwEAIGzAICFswCAibMAgI2zAICRswCAlbMAgJmzAICdswCAvsgEAKGzAIClswCAqbMAgK2zAICxswCAtbMAgO/cHwC5swCA4ZQBAL2zAIDjHAEAwbMAgMWzAIDJswCAzbMAgLt1AwC6aQMAvkgGANGzAIC/HQMAvh0DAL0dAwC8ZQMAs9UDANWzAIDZswCA3bMAgOGzAIC2fQMAtcUDAIRwBQCoJQIAqTUCAKo9AgCrNQIArC0CAK2dAgCulQIAr7UCAIIVAADlswCAgNkBAIEJAADEAAAA6bMAgPGzAID1swCAuKkCALmpAgC6SQEAu0kBALxZAQC9RQEAvkUBAL99AQCwzQIAsdECALLRAgCzqQIAtLkCALW5AgC2qQIAt6ECAOEoHgDhNBwA43QBAOMYHgD5swCA/bMAgIa4BACHVAUAhDgHAAG0AIAFtACACbQAgL6sBwANtACA78weAO/IGgCj9QIAEbQAgBW0AIAZtACAHbQAgKZdAgCl5QIAIbQAgKtVAgCqSQIAJbQAgCm0AICvPQIArj0CAK09AgCsRQIAqGEGAKlhBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgDtswCALbQAgDG0AIA1tACAObQAgD20AIBBtACARbQAgLjxBgC58QYAuvEGALvxBgC8nQYAvbEGAL6xBgC/sQYAsOUGALHtBgCy5QYAs/0GALTlBgC17QYAttkGALfVBgCz6QYASbQAgE20AIBRtACAVbQAgLbhBgC16QYAWbQAgLspBgC6IQYAXbQAgGG0AIC/KQYAviEGAL0pBgC8MQYAgl0AAKOtBgCARQAAgV0AAKalBgBltACAabQAgKWtBgCqZQYAq20GAIYADACHQAMArmUGAK9tBgCsdQYArW0GAG20AIDvfAUAcbQAgHW0AIB5tACAfbQAgIG0AICFtACAibQAgI20AICRtACAlbQAgJm0AIDjaAUAnbQAgOF4BQCz0QYAobQAgKW0AICptACArbQAgLb9BgC1/QYAsbQAgLupBgC6oQYAtbQAgLm0AIC/mQYAvqkGAL2pBgC8sQYAqLkGAKm5BgCqGQYAqxkGAKw1BgCtPQYArjUGAK8pBgC9tACAgh0AAIEdAACAHQAAwbQAgMW0AIDJtACA0bQAgLjpAQC56QEAuvkBALv5AQC86QEAvekBAL5dAQC/VQEAsCUGALEtBgCyJQYAsz0GALQtBgC1HQYAthUGALfZAQCGgAwAh+QCANW0AICjnQUA2bQAgKWxBQCmsQUA3bQAgOG0AIDltACAqu0FAKvlBQCs/QUAreUFAK7lBQCv1QUAtk0DAOm0AICExAMAtUUDAO20AICzjQIA8bQAgPW0AIC+SQMAv0kDALxJAwC9SQMAumkDALtpAwD5tACA/bQAgAG1AICmiQMApYEDAAW1AICjSQIACbUAgA21AIARtQCAr40DAK6NAwCtjQMArI0DAKutAwCqrQMAfbMAgBW1AIAZtQCAHbUAgIW0PQAhtQCAJbUAgCm1AIAttQCAMbUAgIA9AACBCQAAgh0AADW1AIC+sAMAObUAgIc4AwCG3AwAQbUAgEW1AIBJtQCATbUAgFG1AIDvXAYAVbUAgFm1AIC+6AwA45QGAF21AIDh3AEAYbUAgGW1AIBptQCAbbUAgLNRAQBxtQCAdbUAgHm1AIB9tQCAtnEBALV5AQCBtQCAuz0BALo9AQCFtQCAibUAgL/9AQC+9QEAvQUBALwFAQCNtQCAkbUAgJW1AICEQAwAmbUAgJ21AIChtQCA76wHAKW1AIDhJAYAqbUAgONABwCGkAwAh/wMALG1AIC1tQCAgFkAAIFlAACCYQAAo90BALm1AICl9QEApv0BAL21AIDBtQCAxbUAgKqxAQCrsQEArIkBAK2JAQCueQEAr3EBAM20AIA9tQCAybUAgM21AICttQCA0bUAgNW1AIDZtQCAqJ0NAKktDgCqOQ4AqzEOAKwRDgCtEQ4Arn0OAK9tDgCwGQ4AsRkOALIxDgCzMQ4AtNEOALXZDgC2zQ4At8UOALj9DgC52Q4AuqkOALupDgC8vQ4AvaUOAL6tDgC/pQ4AqIEPAKmBDwCqgQ8Aq4EPAKyBDwCtjQ8AroUPAK+1DwDdtQCA4bUAgOW1AIDptQCA7bUAgPG1AID1tQCA+bUAgLidDwC5rQ8AuqUPALtNDwC8VQ8AvV0PAL5JDwC/SQ8AsNEPALHRDwCy0Q8As9EPALS1DwC1vQ8AtrUPALetDwCzCQ4A/bUAgAG2AIAFtgCACbYAgLYNDgC1CQ4ADbYAgLsVDgC6FQ4AEbYAgBW2AIC/eQ4AvnEOAL0FDgC8BQ4AghUAAKNNDgCAYQAAgWEAAKZJDgAZtgCAvhABAKVNDgCqUQ4Aq1EOAIQkAQAhtgCArjUOAK89DgCsQQ4ArUEOAKg5DgCpOQ4AqlkOAKtRDgCscQ4ArXEOAK6RAQCvkQEAhgAAAIeEAAAltgCAKbYAgC22AIAxtgCANbYAgDm2AIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALD1AQCx/QEAsvUBALNNAQC0VQEAtV0BALZVAQC3TQEAuk0PALtVDwC4TQ8AuUUPAL59DwC/tQ8AvEUPAL11DwCyAQ8AswEPALAxDwCxMQ8AtgEPALcNDwC0EQ8AtREPAKqZDgCrRQ8AqOUOAKmZDgCuQQ8Ar0EPAKxRDwCtUQ8APbYAgEG2AIBFtgCASbYAgE22AIBRtgCAVbYAgFm2AICzUQ0AXbYAgGG2AIBltgCAabYAgLZxDQC1eQ0AbbYAgLu5AgC6sQIAcbYAgHW2AIC/GQIAvhECAL0ZAgC8oQIAebYAgKMVDQB9tgCAgbYAgKY1DQCFtgCAibYAgKU9DQCq9QIAq/0CAIToAwCRtgCArlUCAK9dAgCs5QIArV0CAKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvfQEAgO0BAIHxAQCC8QEAvqAFAJW2AICZtgCAh2gFAIYcBQC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALAFAQCxDQEAsgUBALMdAQC0BQEAtQ0BALYFAQC3+QEA4WQPAOGcDwDjFA4A49QPAJ22AIDhPA4AobYAgOPkAAC+rAQApbYAgKm2AIDvDAAArbYAgLG2AIDvYA4A77QPALW2AIC5tgCAhEQEALNhAgC9tgCAtWECALZhAgDBtgCAxbYAgMm2AIC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCjrQUAjbYAgM22AIDRtgCA1bYAgKatBQClrQUA2bYAgKtJBgCqQQYA3bYAgOG2AICvSQYArkEGAK1JBgCsUQYA5bYAgOm2AIDttgCA8bYAgIAdAACBCQAAgjkAAPW2AID5tgCA/bYAgIbIAACHIAMAAbcAgAW3AIAJtwCADbcAgKhtBgCptQcAqr0HAKsdBwCsCQcArTEHAK4xBwCvLQcAhKgDABG3AIAVtwCAGbcAgB23AIAhtwCAJbcAgCm3AIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBVBwCxJQcAsi0HALM9BwC0LQcAtRUHALYdBwC39QAALbcAgOG8BgAxtwCA4/QFADW3AIA5twCAPbcAgEG3AIBFtwCASbcAgE23AIBRtwCAVbcAgFm3AIBdtwCA7+gEALN1BgCCLQAAgRUAAIAdAABhtwCAtvEGALXBBgBltwCAu6EGALrRBgBptwCAvmwBAL+RBgC+qQYAvakGALy5BgCjtQYAcbcAgIYoAACHTAEAdbcAgKYxBgClAQYAebcAgKthBgCqEQYAfbcAgIG3AICvUQYArmkGAK1pBgCseQYAhbcAgLO9AQCJtwCAjbcAgLZ5AQCRtwCAlbcAgLV5AQC6VQEAu10BAJm3AICdtwCAvvkAAL/lAAC8RQEAvf0AAKhxAgCpcQIAqnECAKtxAgCstQIArb0CAK61AgCvrQIAhOw8AKG3AICltwCAqbcAgK23AICxtwCAtbcAgLm3AIC4XQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDVAgCx3QIAstUCALNtAwC0eQMAtWUDALZtAwC3ZQMAHbYAgL23AIDBtwCAo/UCAMW3AIClMQIApjECAMm3AIDNtwCA0bcAgKodAgCrFQIArA0CAK21AwCusQMAr60DAIBlAACBCQAAghkAANW3AIDZtwCA4bcAgL4QPADltwCAhsA8AIcgAwDptwCA7bcAgPG3AID1twCA+bcAgP23AICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAAG4AIAFuACACbgAgA24AIARuACAFbgAgBm4AIAduACAuHUBALl9AQC6dQEAu8kBALzZAQC9xQEAvsUBAL/9AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2VQEAt00BAOGkBgAhuACA41AGAL6APACEHDwAvoA/ACW4AIApuACALbgAgDG4AIA1uACAObgAgD24AIBBuACA7+AGAEW4AICBfQAAgHEAAEm4AICCBQAAUbgAgFW4AIDvTAAAWbgAgOGQAQBduACA41gBAGG4AIBluACAabgAgIZYPwCH/DwAs509AN23AIBNuACAbbgAgHG4AIC21T0AtbU9AHW4AIC7+T0AuvE9AHm4AIB9uACAvxk+AL4RPgC91T0AvNU9AIG4AICj2T0AhbgAgIm4AICmkT0AjbgAgJG4AICl8T0AqrU9AKu9PQCVuACAmbgAgK5VPgCvXT4ArJE9AK2RPQCoVT4AqVk+AKphPgCrYT4ArGE+AK1hPgCuYT4Ar2E+AISoAwCduACAobgAgKW4AICpuACArbgAgLG4AIC1uACAuEU/ALldPwC6VT8Au20/ALx1PwC9fT8AvnU/AL9tPwCwwT8AscE/ALLBPwCzwT8AtME/ALXBPwC2wT8At8E/AIC5AQCBuQEAggUAALm4AIDhgD4AwbgAgOMoPQDFuACAhoAAAIcEAQDvCD0AybgAgM24AIDRuACA1bgAgNm4AICzqT8AvbgAgN24AIDhuACA5bgAgLahPwC1qT8A6bgAgLtFPgC6RT4A7bgAgPG4AIC/RT4AvkU+AL1VPgC8VT4Ao2k/APW4AID5uACA/bgAgAG5AICmYT8ApWk/AAW5AICrhT4AqoU+AAm5AIANuQCAr4U+AK6FPgCtlT4ArJU+ABG5AICzGT4AFbkAgBm5AIC2IT4AHbkAgCG5AIC1MT4AuvEBALv5AQAluQCAKbkAgL6xAQC/vQEAvNEBAL3RAQCo0T0AqdE9AKrVPQCr6T0ArP09AK3lPQCu7T0ArxECAID5AwCBzQMAgsUDAIQkAwC+AAQAMbkAgIesAwCGvAQAuBkCALktAgC6JQIAu+kCALz5AgC9+QIAvukCAL/pAgCwcQIAsXkCALJBAgCzQQIAtDECALU9AgC2NQIAtykCAKVtPQA1uQCAObkAgKZ9PQA9uQCAbbcAgKNFPQBBuQCArY0CAKyNAgCv4QIAru0CAKwAAABFuQCAq6UCAKqtAgDh+AEASbkAgOP0AgCEwAQATbkAgFG5AIBVuQCAWbkAgF25AIBhuQCAZbkAgGm5AIBtuQCAcbkAgO8wAgB1uQCAqBUCAKkZAgCqJQIAqz0CAKwlAgCtLQIAriUCAK9VAgB5uQCAfbkAgIG5AICFuQCAibkAgI25AICEsAQAkbkAgLjRAgC52QIAuuECALvhAgC8kQIAvZ0CAL6VAgC/iQIAsC0CALE1AgCyNQIAswUCALQdAgC18QIAtvECALfxAgDheD8A4zQBAOMIPgDhbD4AgQkAAICpAACVuQCAgj0AAJm5AIChuQCApbkAgL4gBACpuQCA79g+AO/MPgCtuQCAsbkAgLPpAgCG6AQAh8AEALbpAgC1uQCAubkAgLXpAgC6rQIAu7UCAL25AIDBuQCAvp0CAL9xAgC8pQIAvZUCAC25AICduQCAxbkAgMm5AIDNuQCA0bkAgNW5AIDZuQCAqBUGAKmhBgCqoQYAq70GAKytBgCtgQYArv0GAK/tBgCwlQYAsZ0GALKVBgCzrQYAtLUGALW9BgC2tQYAt60GALiVBgC5mQYAukkHALtJBwC8WQcAvVkHAL5JBwC/SQcArN0FAK3tBQCu5QUArwkFAN25AIDhuQCAqtUFAKvNBQDluQCApZEFAKaRBQDpuQCA7bkAgPG5AID1uQCAo5EFALNJBgD5uQCA/bkAgAG6AIAFugCAtmEGALVFBgAJugCAuzkGALoxBgC+ZAAADboAgL8ZBgC+EQYAvRkGALwhBgCjiQcAgtkBAIHZAQCAwQEAEboAgKahBwClhQcAFboAgKv5BwCq8QcAhggBAId8AQCv2QcArtEHAK3ZBwCs4QcAGboAgLP1BgAdugCAIboAgLaFBgAlugCAKboAgLWdBgC6jQYAu20BAC26AIAxugCAvmUBAL9tAQC8dQEAvW0BAKglBgCpLQYAqjkGAKsxBgCsUQYArUEGAK5BBgCvdQYANboAgDm6AIA9ugCAQboAgEW6AIBJugCATboAgFG6AIC4VQEAuWUBALplAQC7fQEAvGUBAL1tAQC+HQEAvxUBALANBgCx7QEAsuUBALP9AQC05QEAte0BALblAQC3bQEAo7EFAFW6AIBZugCAvkgDAL5YDACmwQUApdkFAF26AICrKQIAqskFAGG6AIBlugCArykCAK4hAgCtKQIArDECAGm6AIBtugCAcboAgHW6AICAGQAAgRkAAIIFAAB5ugCAhKwDAIG6AICHGAMAhswMAIW6AICJugCAjboAgJG6AICokQMAqZkDAKrJAwCrxQMArN0DAK3BAwCuwQMAr/UDAJW6AICZugCAnboAgKG6AIClugCAqboAgK26AICxugCAuH0DALnBAAC6wQAAu9EAALz5AAC9+QAAvpkAAL+ZAACwjQMAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALNBAgC1ugCAuboAgL8EDwC9ugCAtkECALVVAgDBugCAu4ECALpJAgDFugCAyboAgL+BAgC+mQIAvZECALyZAgDNugCA0boAgNW6AIDZugCA76QDAN26AIDhugCA5boAgOMQAwDpugCA4VgAAIQgDQCAKQAAgSkAAIIdAADxugCA4VAGAOGgBwDjoAYA41AHAIWUDAD1ugCA70gbAPm6AIDhJAIA/boAgONwGgABuwCABbsAgAm7AIDvqAEA7+gGAIagDwCHDA0Ao4kCAA27AIClnQIAEbsAgBW7AICmiQIAGbsAgB27AICrSQIAqoECAK1ZAgCsUQIAr0kCAK5RAgCoZQ4AqXUOAKp9DgCrdQ4ArG0OAK21DgCuvQ4Ar7UOAO26AIAhuwCAJbsAgCm7AIAtuwCAOLsAgDy7AIBAuwCAuF0PALltDwC6ZQ8Auw0PALwVDwC9HQ8AvhUPAL8JDwCwzQ4AsdUOALLdDgCz1Q4AtM0OALVxDwC2cQ8At20PALP1DgBEuwCASLsAgEy7AIBQuwCAtjUOALXlDgBUuwCAuxEOALoJDgBYuwCAXLsAgL+1DwC+CQ4AvQEOALwJDgCCFQAAo7EOAIBhAACBYQAApnEOAGC7AIC+EAEApaEOAKpNDgCrVQ4AaLsAgIQgAQCuTQ4Ar/EPAKxNDgCtRQ4An0UIAJ4NCQCdDQkAnJkLAJt1NQCaETUAmZk3AJgNMQCXJTEAliUxAJWBPQCUDT0Ak4k/AJIVOACRPTkAkD05AI9lJQDvrA0AhgAEAIegAQBsuwCAcLsAgHS7AIDv6AEAeLsAgOE0AgB8uwCA4zQBAIC7AIDjCAwAhLsAgOEIDQChoQEAiLsAgKMJBQCibQMApc0EAKQRBQCnHRkAph0ZAKmhHQCoORkAq+kcAKqpHQCtkREArAEQAK8BFACuUREAsfkVALDlFQCz6WkAsgFoALUBbAC0eWkAjLsAgJC7AICUuwCAmLsAgJy7AICguwCAowkDAKIZDQCh/Q0AoP0NAIIlJgCDBToApLsAgKi7AICGqTwAhzU+AIQdOgCFPTsAiok+AIslMgCsuwCAsLsAgI6xNACPMTYAjD0yAI0tMgCSJTYAk9EIAIREAwC+wAQAlhULAJdVDgCUXQoAlVUKAJplDgCbiQ4AtLsAgLi7AIC8uwCAwLsAgJyBAADEuwCAuLUCALm9AgC6tQIAuwkCALwZAgC9GQIAvgkCAL8BAgCwdQ0AsX0NALJJDQCzSQ0AtJUCALWdAgC2lQIAt40CAKi9DQCpUQ0AqlUNAKtpDQCsfQ0ArWUNAK5tDQCvEQ0AZLsAgILtAQCBHQAAgB0AAMi7AIDMuwCAfboAgL5wBQCznQwAhIwFANC7AIDYuwCA3LsAgLalDAC1tQwA4LsAgLv5DAC68QwAhigFAIcgBQC/GQMAvhEDAL3dDAC83QwA5LsAgKPZDADouwCA7LsAgKbhDADwuwCA9LsAgKXxDACqtQwAq70MAPi7AID8uwCArlUDAK9dAwCsmQwArZkMAAC8AIAEvACACLwAgAy8AIAQvACAFLwAgBi8AIDvvAEAHLwAgOF8DgAgvACA41ABACS8AIAovACALLwAgDC8AICzlQIANLwAgDi8AIA8vACAQLwAgLa9AgC1uQIASLwAgLs5AgC6YQIAhsgEAIesBAC/GQIAvhECAL0ZAgC8IQIAo1UFAILVBwCBxQcAgMUHAEy8AICmfQUApXkFAFC8AICr+QUAqqEFAFS8AIBYvACAr9kFAK7RBQCt2QUArOEFAFy8AICzWQcAYLwAgGS8AIC2HQcAaLwAgGy8AIC1FQcAugkHALsJBwBwvACAdLwAgL75BwC/+QcAvPkHAL35BwDUuwCARLwAgHi8AIB8vACAgLwAgIS8AICIvACAjLwAgKitBwCptQcAqrUHAKvtBwCs+QcArfkHAK7tBwCv5QcAsKkHALGpBwCySQcAs0kHALRZBwC1WQcAtkkHALdJBwC4eQcAuUUHALpBBwC7XQcAvEUHAL1NBwC+RQcAvzkHAKMdBgCQvACAlLwAgJi8AICcvACAplkGAKVRBgCgvACAq00GAKpNBgCkvACAqLwAgK+9BgCuvQYArb0GAKy9BgCAbQAAgQkAAIIZAACsvACAsLwAgISYAQC+kAEAtLwAgIYAHACHxAEAuLwAgLy8AIDAvACAxLwAgMi8AIDMvACAqF0GAKmVAQCqlQEAq6UBAKy9AQCt1QEArtEBAK/RAQDQvACA1LwAgNi8AIDcvACA4LwAgOS8AIDovACA7LwAgLhZAQC5WQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsLUBALG9AQCygQEAs4EBALR5AQC1eQEAtmkBALdpAQCzHQIA8LwAgPS8AIC+gBwA+LwAgLZVAgC1NQIA/LwAgLt5AgC6cQIAAL0AgAS9AIC/vQIAvr0CAL1VAgC8VQIACL0AgKNZAgAMvQCAEL0AgKYRAgAUvQCAGL0AgKVxAgCqNQIAqz0CABy9AIAgvQCArvkCAK/5AgCsEQIArRECACi9AIAsvQCAvgQdAL4AHgAwvQCANL0AgDi9AIA8vQCAgPkAAIHNAACCxQAAhCADAIawHACHlAMAQL0AgES9AIBIvQCATL0AgFC9AIBUvQCA42wCAFi9AIDhoAEAXL0AgO8UAgBgvQCAZL0AgGi9AIBsvQCAcL0AgHS9AIB4vQCA4fAGAOE0BgDjTAAA4xgGAHy9AICAvQCAhL0AgIi9AICAPQAAgQkAAIIZAACMvQCAkL0AgIS8HQDvmAAA7zgHALMxAgDRAAAAh9gdAIZsHACYvQCAtikCALUhAgCcvQCAu80CALrNAgCgvQCApL0AgL/NAgC+zQIAvc0CALzNAgCyXQYAs2UGALANBgCxVQYAtn0GALedBQC0fQYAtXUGALqNBQC7zQUAuKUFALmFBQC+xQUAv8kFALzVBQC9zQUAqL0AgKy9AICwvQCAtL0AgLi9AIC8vQCAwL0AgMS9AICqtQYAq70GAKgBBwCpvQYAroEGAK+NBgCsmQYArZUGAKNxHQDIvQCAzL0AgNC9AIDUvQCApmkdAKVhHQDYvQCAq40dAKqNHQDcvQCA4L0AgK+NHQCujR0ArY0dAKyNHQDkvQCAs9UeAOi9AIDsvQCAts0eAPC9AID0vQCAtcUeALqhHgC7oR4A+L0AgPy9AIC+pR4Av6keALyxHgC9sR4AJL0AgJS9AIAAvgCAhAQDAID5AACB+QAAghEAAAS+AICoIR4AqSEeAKo5HgCrOR4ArCkeAK0pHgCuAR4ArwEeALABHgCxAR4AsgEeALMBHgC0BR4AtQkeALY9HgC3NR4AuA0eALkVHgC6HR4AuxUeALwNHgC95R8Avu0fAL/lHwCjkR8ACL4AgIYoAQCHSAEADL4AgKaJHwClgR8AEL4AgKvlHwCq5R8AFL4AgBi+AICv7R8AruEfAK31HwCs9R8AHL4AgLMtHgAgvgCAJL4AgLaVHgAovgCALL4AgLWdHgC6sR4Au7EeADC+AIA0vgCAvnUBAL99AQC8oR4AvaEeAKjRHgCp2R4AquEeAKvhHgCsUR4ArVEeAK5RHgCvUR4AOL4AgDy+AIBAvgCARL4AgEi+AIBMvgCAUL4AgFS+AIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALAxHgCxMR4AsjEeALMxHgC09QEAtf0BALb1AQC37QEAo2kdAFi+AIBcvgCAYL4AgGS+AICm0R0ApdkdAGi+AICr9R0AqvUdAGy+AIBwvgCArzkCAK4xAgCt5R0ArOUdAIFpAACAWQAAvgAEAIJhAAB4vgCAfL4AgIC+AICEvgCAhOwDAIi+AICHiAMAhuwEAIy+AICQvgCAlL4AgJi+AICohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAJy+AICgvgCApL4AgKi+AICsvgCAsL4AgLS+AIC4vgCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAOFUHgDhrB8A45QBAOMoHgDjYAMAvL4AgOEIAADAvgCA75ADAMS+AIDIvgCAzL4AgNC+AIDUvgCA70wfAO9MHwCzXQIA2L4AgNy+AIDgvgCA6L4AgLYVAgC1dQIA7L4AgLs5AgC6MQIAhCQFAL7gBAC/1QIAvtUCAL0VAgC8FQIAuJEdALmZHQC6oR0Au6EdALzRHQC93R0AvtUdAL/JHQCwCR4AsQkeALIZHgCzGR4AtAkeALUJHgC2vR0At7UdAKipHgCpqR4AqrkeAKu5HgCsqR4ArakeAK55HgCveR4AgKUAAIGtAACCpQAA8L4AgIbQBACH+AQA9L4AgPi+AIB0vgCA5L4AgPy+AIAAvwCABL8AgAi/AIAMvwCAEL8AgKhxBgCpcQYAqnEGAKtxBgCsVQYArUUGAK5NBgCvRQYAsD0GALHlBgCy7QYAs+UGALT9BgC15QYAtu0GALflBgC43QYAuXEHALp1BwC7SQcAvFkHAL1ZBwC+SQcAv0kHALPZBgAUvwCAGL8AgBy/AIAgvwCAtuUGALX9BgAkvwCAuwEGALrZBgAovwCALL8AgL8BBgC+GQYAvREGALwZBgAwvwCAo9kFADS/AIA4vwCAppEFADy/AIBAvwCApfEFAKq1BQCrvQUARL8AgEi/AICuUQUAr1EFAKyRBQCtkQUAo1kHAIIZAACBGQAAgOEBAEy/AICmZQcApX0HAFC/AICrgQcAqlkHAISgAgC+rAEAr4EHAK6ZBwCtkQcArJkHAFS/AICzqQYAhugAAIcsAQC2WQEAWL8AgFy/AIC1oQYAunUBALt9AQBgvwCAZL8AgL75AQC/+QEAvGUBAL35AQCo0QYAqdkGAKplBgCrdQYArG0GAK2dAQCulQEAr40BAITsAQBovwCAbL8AgHC/AIB0vwCAeL8AgHy/AICAvwCAuGkBALlpAQC6CQEAuwUBALwdAQC9AQEAvgEBAL81AQCw9QEAsf0BALL1AQCzaQEAtHkBALV5AQC2aQEAt2EBAIS/AICIvwCAjL8AgKPhBQCQvwCApekFAKYRAgCUvwCAmL8AgJy/AICqPQIAqzUCAKwtAgCtsQIArrECAK+xAgCgvwCApL8AgL4EAwCEAAwAqL8AgKy/AICwvwCAtL8AgIANAACBFQAAgh0AALi/AIC8vwCAwL8AgIdEAwCG3AwAs+kDAMi/AIDMvwCA0L8AgNS/AIC2PQMAtT0DANi/AIC7GQMAuhEDANy/AIDgvwCAv7kAAL6xAAC9uQAAvAEDAOS/AIDhlAEA6L8AgON8AQDsvwCA8L8AgPS/AID4vwCA/L8AgADAAIAEwACACMAAgAzAAIAQwACAFMAAgO9MAgCoVQIAqV0CAKphAgCrYQIArLUCAK29AgCutQIAr60CAL5oDQAYwACAHMAAgCDAAIAkwACAgq0AAIGtAACArQAAuGEBALlhAQC6CQEAuwkBALwBAQC9AQEAvgEBAL8BAQCw1QIAsd0CALLVAgCzbQEAtHUBALV9AQC2aQEAt2EBAOFoBgDh8AcA47AAAOP0BgAowACALMAAgDDAAIA4wACAPMAAgEDAAIBEwACASMAAgL78DABMwACA72wAAO8oBgCjqQIAUMAAgIZoDACHBA0AVMAAgKZ9AgClfQIAWMAAgKtZAgCqUQIAXMAAgGDAAICv+QEArvEBAK35AQCsQQIAqIUOAKmNDgCqhQ4Aq50OAKyNDgCtvQ4ArrUOAK/dDgA0wACAZMAAgGjAAIBswACAcMAAgHTAAIB4wACAfMAAgLitDgC5tQ4Aur0OALu1DgC8dQ8AvX0PAL51DwC/bQ8AsKkOALG1DgCyvQ4As7UOALStDgC1lQ4Atp0OALeVDgCzDQ4AgMAAgITAAICIwACAjMAAgLY9DgC1BQ4AkMAAgLtxDgC6bQ4AlMAAgJjAAIC/UQ4AvmkOAL1hDgC8aQ4AghkAAKNJDgCAZQAAgRkAAKZ5DgCcwACAoMAAgKVBDgCqKQ4AqzUOAIS8AwCkwACAri0OAK8VDgCsLQ4ArSUOAKidDgCppQ4Aqq0OAKulDgCsvQ4AraEOAK7dDgCvzQ4AhiABAIdkAQCowACArMAAgLDAAIC0wACAuMAAgLzAAIC4eQEAuXkBALrNAQC7xQEAvN0BAL3FAQC+xQEAv/UBALC9DgCxjQ4AsoUOALNJAQC0WQEAtVkBALZJAQC3SQEAtS0OAMDAAIDEwACAtjkOAMjAAIDMwACAsz0OANDAAIC9hQEAvEkOAL+FAQC+hQEA1MAAgMS/AIC7UQ4AumEOAKNlDgDYwACA3MAAgODAAIDkwACApmEOAKV1DgDowACAqwkOAKo5DgDswACA8MAAgK/dAQCu3QEArd0BAKwRDgD0wACA+MAAgO/QDwD8wACAAMEAgATBAIAIwQCADMEAgBDBAIC+aAMAGMEAgBzBAIDhVA4AIMEAgONkDgAkwQCAgFkAAIFZAACCaQAAhIwDAIbwBACHFAMAKMEAgCzBAIAwwQCANMEAgDjBAIA8wQCAQMEAgETBAIBIwQCATMEAgFDBAIBUwQCAWMEAgFzBAIBgwQCAZMEAgGjBAIBswQCAqIkDAKmJAwCqmQMAq5kDAKyJAwCtiQMArj0DAK81AwCwUQMAsVEDALJVAwCzfQMAtBUDALUdAwC2FQMAtw0DALg9AwC5DQMAugUDALvtAAC89QAAvfkAAL7pAAC/6QAAcMEAgHTBAIB4wQCAsz0CAHzBAIC1LQIAtiUCAIDBAIC+aAUAiMEAgLq5AgC7uQIAvK0CAL2FAgC+/QIAv/UCAIBJAACBVQAAglUAAIQABQDvjAMAvhgEAId0BQCG/AQA4zwDAIzBAIDhUAAAkMEAgJTBAICYwQCAnMEAgKDBAICkwQCAqMEAgKzBAICwwQCAtMEAgLjBAIC8wQCA79QOAL4oBgDhdA4AwMEAgONUAQDEwQCAyMEAgMzBAIDQwQCAo/ECANTBAIDYwQCA3MEAgODBAICm6QIApeECAOTBAICrdQIAqnUCAOjBAIDswQCArzkCAK4xAgCtSQIArGECAKgpBgCpKQYAqj0GAKsxBgCsSQYArUkGAK55BgCveQYAhMEAgIIVAACBxQcAgMUHAPDBAICEaAMA9MEAgPjBAIC4yQYAuckGALrZBgC72QYAvMkGAL3JBgC+WQcAv1kHALAJBgCxCQYAshkGALMZBgC0CQYAtQkGALb5BgC3+QYAs7UGAPzBAICGrAAAh0ADAADCAIC2yQYAtcEGAATCAIC7zQYAus0GAAjCAIAMwgCAv80GAL7NBgC9zQYAvM0GABDCAICj8QYAFMIAgBjCAICmjQYAHMIAgCDCAIClhQYAqokGAKuJBgAkwgCAKMIAgK6JBgCviQYArIkGAK2JBgCoJQYAqWEGAKplBgCrfQYArGUGAK1tBgCuZQYAr50GACzCAIAwwgCANMIAgDjCAIA8wgCAQMIAgETCAIBIwgCAuPUGALn9BgC69QYAu4kGALyZBgC9mQYAvokGAL+BBgCw5QYAse0GALLlBgCz/QYAtOUGALXtBgC20QYAt80GAEzCAIC2/QYAtf0GAFDCAICz/QYAVMIAgFjCAIBcwgCAvzkGAL4xBgC9OQYAvCEGALs5BgC6MQYAFMEAgGDCAICjrQYAgnkAAIFVAACAVQAAhFwBAKatBgClrQYAaMIAgKtpBgCqYQYAhkh/AIfkAACvaQYArmEGAK1pBgCscQYAbMIAgO/cBwBwwgCAdMIAgHjCAIB8wgCAgMIAgITCAICIwgCAhKADAIzCAIC/JHkAkMIAgONoBwCUwgCA4XQGALPRAgCYwgCAvgQDAISAfQCcwgCAtvkCALXxAgCgwgCAu7UCALqpAgCkwgCAqMIAgL9RAwC+mQIAvZECALylAgCpBQIAqLkCAKsVAgCqHQIArT0CAKw9AgCvUQIArl0CAL5ofQCswgCAsMIAgLTCAIC4wgCAvMIAgMDCAIDEwgCAufEDALjpAwC78QMAuvkDAL1RAwC86QMAv00DAL5RAwCxNQIAsCkCALMBAgCyNQIAtdEDALQZAgC30QMAttkDAIIpAACjlQMAgB0AAIEVAACmvQMAyMIAgMzCAICltQMAqu0DAKvxAwDQwgCA2MIAgK7dAwCvFQIArOEDAK3VAwCGYH0Ah3h9ALNBAQCEAH8AtUEBANzCAIDgwgCAtkkBAOTCAIDowgCAu0EBALpNAQC9SQEAvEUBAL8pAQC+OQEA7MIAgO/cBgDwwgCA9MIAgPjCAID8wgCAAMMAgO8wBgCELH4A4eAGAATDAIDjiAEACMMAgON0AAAMwwCA4SwBAKPJAQAQwwCAFMMAgIVweQAYwwCApsEBAKXJAQAcwwCAq8kBAKrFAQAgwwCAJMMAgK+hAQCusQEArcEBAKzNAQCo3X0AqQV+AKoBfgCrAX4ArAF+AK0BfgCuAX4ArwF+ANTCAIAowwCALMMAgDDDAIA0wwCAgp0AAIGdAACAnQAAuC1+ALnhfgC64X4Au+F+ALzhfgC94X4AvuF+AL/hfgCwQX4AsU1+ALJZfgCzVX4AtDV+ALUlfgC2JX4AtxV+AKitfwCp0X8AqtF/AKvtfwCs9X8ArRV/AK4RfwCvEX8AOMMAgDzDAIBAwwCARMMAgIbwAwCHuAAASMMAgEzDAIC4EX8AuRl/ALohfwC7IX8AvPUAAL39AAC+9QAAv+0AALBxfwCxcX8AsnF/ALNFfwC0QX8AtU1/ALY9fwC3NX8As1l+AFDDAIBUwwCAWMMAgFzDAIC2lX4AtX1+AGDDAIC7tX4AurV+AGTDAIBowwCAv4l+AL6FfgC9kX4AvKV+AGzDAICjHX4AcMMAgHTDAICm0X4AeMMAgHzDAIClOX4AqvF+AKvxfgCAwwCAhMMAgK7BfgCvzX4ArOF+AK3VfgCwrQAAscUAALLBAACzwQAAtMUAALXNAAC28QAAt/EAALhhAAC5YQAAumEAALt9AAC8ZQAAvW0AAL5lAAC/vQMAiMMAgIzDAICQwwCAZMIAgJTDAICYwwCAnMMAgKDDAICoWQEAqVkBAKrtAACr5QAArP0AAK3lAACu5QAAr9UAAKTDAICCHQAAgR0AAIAdAACowwCArMMAgLDDAIC+VAIAhoAEAIfsAgC4wwCAvMMAgMDDAIDEwwCAyMMAgL54AwDjdH4AzMMAgOG4fQDQwwCA1MMAgNjDAIDcwwCA4MMAgOTDAIDowwCA7MMAgPDDAIDvwH4A9MMAgPjDAID8wwCAs4UDAADEAIAExACACMQAgAzEAIC2hQMAtZUDABDEAIC74QMAuokDAL4kBgAUxACAv+kDAL7hAwC99QMAvPUDAIIpAACjwQMAgB0AAIEVAACmwQMAGMQAgBzEAICl0QMAqs0DAKulAwAgxACAheAFAK6lAwCvrQMArLEDAK2xAwDh+AMAKMQAgONcHwAsxACA7/QDADDEAICGPAcAh6wCAON8fgA0xACA4YABADjEAIA8xACAQMQAgO/kEwBExACAs3EBAEjEAIBMxACAUMQAgFTEAIC2EQEAtWEBAFjEAIC7OQEAujEBAFzEAIBgxACAvxkBAL4RAQC9GQEAvCEBAGTEAIBoxACAbMQAgHDEAIB0xACAeMQAgHzEAIDvxH8AgMQAgOH8fgCExACA4/B/AIANAACBdQAAgn0AAIjEAICMxACAkMQAgKP5AQC+AAgApekBAJjEAICcxACAppkBAISoBQCgxACAq7EBAKq5AQCtkQEArKkBAK+RAQCumQEAqCkGAKkpBgCqOQYAqzkGAKwpBgCtUQYArlUGAK9NBgAkxACAhCABAKTEAICUxACAo+EBAKKZBAChGQQAoPEFALg5BgC5OQYAus0GALvFBgC83QYAvcUGAL7FBgC/8QYAsDUGALE9BgCyNQYAsw0GALQVBgC1HQYAthUGALcJBgCPoWwAs5EHAIYoAQCHfAMAtqEHAKjEAICsxACAtbEHALrlBwC77QcAsMQAgLTEAIC+7QcAv90HALz1BwC97QcAn/l4AJ7leACdcXkAnCF8AJvxfACaYX0AmZlxAJjZcACX4XAAlnl0AJVtdACUbXQAk61pAJJxaACReWgAkB1uAIIhbQCD5W8AuMQAgLzEAICGTWgAh5V1AISZaQCFmWkAiqV1AIu5dQDAxACAxMQAgI5xcACPgXwAjDlxAI05cQCSYX0Ak6l9AMjEAIDMxACAlml5AJeZBACU4XgAlX15AJpBBQCbyQUA0MQAgNTEAIDYxACA3MQAgJypAADgxACAo4ENAKKpAQChqQEA5MQAgKexCQCmAQgApU0NAKSZDQCrkRUAqoUVAKkBFACocQkArx0QAK7pEQCtvREArAEQALMBGACy8RwAscEdALDJHQC0wwCA6MQAgLXhGAC0/RkA7MQAgPDEAID0xACA+MQAgIAdAACBCQAAgv0DAPzEAICjFQUAAMUAgIaIDACHPAMACMUAgKYlBQClNQUADMUAgKtpBQCqYQUAEMUAgBTFAICvWQUArmkFAK1pBQCscQUAGMUAgBzFAICEBAwAIMUAgCTFAIDhbAYAKMUAgOPsewAsxQCAMMUAgDTFAIDvqAYAOMUAgDzFAIBAxQCARMUAgKmNBQCogQUAq60FAKqZBQCtoQUArLkFAK+lBQCuqQUAhGgNAEjFAIBMxQCAUMUAgFTFAIBYxQCAXMUAgL70DAC5SQUAuEEFALtZBQC6QQUAvUkFALxBBQC/cQUAvn0FALGpBQCwoQUAs7kFALKhBQC1mQUAtKkFALd5BQC2kQUAqNUEAKndBACq7QQAqyUDAKyFAwCtjQMArrEDAK+xAwBgxQCAZMUAgGjFAIBsxQCAgBkAAIEZAACCBQAAcMUAgLgxAgC5MQIAujUCALvBAgC8hQIAvbUCAL69AgC/tQIAsGkCALFpAgCyQQIAs0ECALQ5AgC1OQIAthECALcRAgCGoAwAh0wNAHjFAIB8xQCA76QGAIDFAICExQCA78wHAOOUAQDhpAYA4TgBAONcBgCIxQCAjMUAgJDFAICUxQCAmMUAgJzFAICzLQQAoMUAgLVFAwCkxQCAqMUAgLZFAwCsxQCAsMUAgLvlAgC65QIAvd0CALzdAgC/tQIAvrUCAATFAIB0xQCAtMUAgLjFAIC8xQCAwMUAgMTFAIDIxQCAqDEOAKk5DgCqAQ4AqwEOAKxxDgCtcQ4ArnUOAK9tDgCwGQ4AsSUOALItDgCzJQ4AtCEOALUhDgC2IQ4AtyEOALjFDgC5zQ4AusUOALvdDgC8xQ4Avc0OAL5ZDwC/WQ8As6kOAMzFAIDQxQCA1MUAgNjFAIC20Q4AtdkOANzFAIC7wQ4Auv0OAODFAIC+LAAAv8UOAL7FDgC90Q4AvNkOAIJpAACj7Q4AgFkAAIFRAACmlQ4A5MUAgOjFAIClnQ4AqrkOAKuFDgCGyAAAh6wAAK6BDgCvgQ4ArJ0OAK2VDgDsxQCAs5EOAPDFAID0xQCAtqUOAPjFAID8xQCAta0OALrhDgC74Q4AAMYAgATGAIC+6Q4Av9UOALz1DgC96Q4Ao6UKAAjGAIAMxgCAEMYAgBTGAICmzQ0Apc0NABjGAICrbQwAqm0MABzGAIAgxgCArz0MAK49DACtVQwArFUMAKgJDgCpCQ4Aqh0OAKsVDgCsIQ4ArSEOAK4hDgCvIQ4AJMYAgCjGAIAsxgCAMMYAgDTGAIA4xgCAPMYAgEDGAIC4zQEAudUBALrdAQC71QEAvM0BAL1RAQC+UQEAv1EBALAhDgCxIQ4AsiUOALM5DgC0KQ4AtRUOALYdDgC39QEARMYAgEjGAIBMxgCAo5kNAFDGAIClpQ0Apq0NAL7cAgCE7AMAWMYAgKrpDQCr6Q0ArP0NAK3hDQCu4Q0Ar90NAIBFAACBTQAAglkAAKNFAwBcxgCApUEDAKZBAwBgxgCAhsAEAIcAAwCqLQMAqyUDAKw9AwCtJQMAriUDAK8VAwCoWQIAqYUDAKqBAwCrgQMArIUDAK2NAwCusQMAr7EDAGTGAIBoxgCAbMYAgHDGAIB0xgCAeMYAgHzGAICAxgCAuGUDALltAwC6ZQMAu30DALxlAwC9bQMAvmUDAL/dAACwpQMAsa0DALKlAwCzvQMAtK0DALWdAwC2lQMAt10DALMJAgCExgCAiMYAgIzGAICQxgCAtg0CALUNAgCUxgCAu2kCALphAgCYxgCAnMYAgL9ZAgC+aQIAvWkCALxxAgCgxgCApMYAgKjGAICsxgCA4aABALDGAIDjaAMAtMYAgIEVAACAFQAA74wDAIIVAAC4xgCAvMYAgMDGAIC+cAUA4RgOAOGUDwDjOA8A49QPAISUAgDIxgCAzMYAgNDGAIDUxgCA2MYAgNzGAIDgxgCA5MYAgOjGAIDv7AEA7/gPAIZgBACHBAUAs5UBAITMBQC1dQEA7MYAgPDGAIC2dQEA9MYAgPjGAIC7UQEAulkBAL31AAC8SQEAv/UAAL71AACoJQYAqVUGAKpVBgCrrQYArLUGAK29BgCutQYAr60GAMTGAID8xgCAAMcAgATHAIAIxwCADMcAgBDHAIAUxwCAuGkHALlpBwC6CQcAuwkHALwZBwC9GQcAvg0HAL8BBwCw1QYAsd0GALLVBgCzaQcAtHkHALV5BwC2aQcAt2EHAKPdBgAYxwCAHMcAgCDHAIAkxwCApj0GAKU9BgAoxwCAqxkGAKoRBgAsxwCAMMcAgK+9BwCuvQcArb0HAKwBBgCAXQAAgW0AAIJlAACzUQcAvtgDALVxBwC2cQcANMcAgIbgAACHFAMAul0HALs5BwC8KQcAvRUHAL4dBwC/2QAAqJUGAKmdBgCqlQYAq60GAKy1BgCtvQYArrUGAK+tBgA4xwCAPMcAgEDHAIBExwCASMcAgEzHAIBQxwCAVMcAgLhxAQC5cQEAunEBALtxAQC81QEAvd0BAL7VAQC/zQEAsNUGALGxBgCysQYAs40GALSVBgC1UQEAtlEBALdRAQBYxwCAoxkGAFzHAIBgxwCApjkGAFTGAIBkxwCApTkGAKoVBgCrcQYAaMcAgGzHAICuVQYAr5EBAKxhBgCtXQYAcMcAgHTHAIB4xwCAfMcAgIDHAICExwCAiMcAgIzHAICQxwCAlMcAgJjHAICcxwCAgBkAAIEZAACCBQAAoMcAgISAAgC+gAMAhwwDAIasHADhaAYAqMcAgOOYBwCsxwCAsMcAgLTHAIDvrAcAuMcAgLzHAIDAxwCAxMcAgMjHAIDMxwCA0McAgNTHAICzZQMA2McAgLVlAwC2bQMA3McAgODHAIDkxwCAuukDALvlAwC8/QMAve0DAL7RAwC/0QMA6McAgOzHAIDwxwCA9McAgPjHAID8xwCAAMgAgATIAICogQMAqYEDAKqBAwCrgQMArIEDAK2BAwCugQMAr4EDALBBAwCxTQMAskUDALNVAwC0eQMAtXkDALYZAwC3GQMAuCkDALkpAwC6OQMAuzkDALwpAwC9KQMAvhkDAL8ZAwCBGQAAgBEAAKMhAgCCLQAApSECAAjIAIAMyACApikCABDIAIAYyACAq6ECAKqtAgCtqQIArLkCAK+VAgCulQIAhEwCAL5IHQCHZB0AhuwcAONAAwAcyACA4aABACDIAIDvnAMAJMgAgCjIAIAsyACAMMgAgDTIAIA4yACAPMgAgEDIAIBEyACASMgAgEzIAIBQyACAVMgAgFjIAIDvtAEAhKgdAOF8BgBcyACA43AGAGDIAIBkyACAaMgAgGzIAICz4QEAcMgAgHTIAIB4yACAfMgAgLblAQC19QEAgMgAgLuhAQC62QEAvuQcAIjIAIC/rQEAvqUBAL2xAQC8uQEAqBUeAKkZHgCqKR4AqykeAKw9HgCtJR4Ari0eAK8lHgAUyACAgvkfAIH5HwCA4R8AhMgAgIzIAICGHAAAh7ADALjBHgC5wR4AusEeALvBHgC8wR4AvcEeAL7BHgC/wR4AsF0eALElHgCyLR4AsyUeALQhHgC1KR4AthkeALcZHgCjoR4AkMgAgJTIAICYyACAnMgAgKalHgCltR4AoMgAgKvhHgCqmR4ApMgAgKjIAICv7R4AruUeAK3xHgCs+R4ArMgAgLOZHwCwyACAtMgAgLa9HwC4yACAvMgAgLW1HwC6mR8Au5kfAMDIAIDEyACAvnkfAL95HwC8eR8AvXkfAKglHgCpUR4AqlUeAKtpHgCseR4ArXkeAK5pHgCvaR4AyMgAgMzIAIDQyACA1MgAgNjIAIDcyACA4MgAgOTIAIC42R4Aue0eALr5HgC7+R4AvOkeAL3pHgC+nR4Av5UeALAZHgCxGR4AsukeALPpHgC0+R4AtfkeALbpHgC36R4Ao90eAIIpAACBFQAAgB0AAOjIAICm+R4ApfEeAOzIAICr3R4Aqt0eAKTHAIDwyACArz0eAK49HgCtPR4ArD0eAITIAgCzQQEAvgwBAPjIAIC2QQEA/MgAgADJAIC1UQEAuk0BALslAQCGSAAAh1ABAL4lAQC/LQEAvDEBAL0xAQAEyQCACMkAgIQEAwC+gAQADMkAgO+oHwAQyQCAFMkAgL8oMQDjdB8AGMkAgOE4HgAcyQCAIMkAgCTJAIAoyQCALMkAgDDJAICjzQIANMkAgKXdAgA4yQCAPMkAgKbNAgBAyQCARMkAgKupAgCqwQIArb0CAKy9AgCvoQIArqkCAKm1AgCoaR0AqwECAKoJAgCtAQIArBkCAK8xAgCuAQIAhGwFAEjJAIBMyQCAUMkAgFTJAICCnQEAgZ0BAICdAQC55QMAuOUDALvlAwC65QMAveUDALzlAwC/5QMAvuUDALEhAgCwSQIAsyUCALIlAgC1KQIAtCECALcVAgC2FQIAqM0CAKnRAgCq0QIAqw0BAKwVAQCtBQEArgEBAK8BAQBYyQCAXMkAgGDJAIBoyQCAvvgEAGzJAIBwyQCAdMkAgLgVAQC5HQEAuikBALspAQC89QEAvf0BAL71AQC/7QEAsEkBALFVAQCyXQEAs1UBALRNAQC1NQEAtj0BALcxAQCGoAUAh8gFAHjJAIDvvAAAfMkAgIDJAICEyQCA74weAIQsBwDh8B4AiMkAgOMcHgCMyQCA4ZQBAJDJAIDjbAAAsxkCAJTJAICYyQCAnMkAgIQACAC2xQEAtd0BAKDJAIC70QEAus0BAKTJAICoyQCAv7EBAL7JAQC9wQEAvMkBAKPZBQBkyQCArMkAgLDJAIC0yQCApgUGAKUdBgC4yQCAqxEGAKoNBgC8yQCAwMkAgK9xBgCuCQYArQEGAKwJBgDEyQCAgh0AAIEdAACAHQAAyMkAgMzJAIDQyQCA1MkAgIZAAwCHxAMA2MkAgNzJAIDgyQCA5MkAgOjJAIDsyQCAqK0HAKmxBwCqsQcAq7EHAKwZBwCtBQcArg0HAK8FBwDwyQCA9MkAgPjJAID8yQCAAMoAgATKAIAIygCADMoAgLgtBwC5zQAAusUAALvdAAC8zQAAvf0AAL71AAC/nQAAsEkHALFVBwCyUQcAsykHALQ5BwC1OQcAtiUHALcVBwCzOQYAEMoAgBTKAIAYygCAHMoAgLaFBgC1kQYAIMoAgLuRBgC6jQYAJMoAgCjKAIC//QYAvv0GAL39BgC8hQYALMoAgKN9BgAwygCANMoAgKbBBgA4ygCAPMoAgKXVBgCqyQYAq9UGAEDKAIC+bAEArrkGAK+5BgCswQYArbkGAKjpAQCp6QEAqvkBAKv5AQCs6QEArekBAK45AQCvOQEAgPUAAIH9AACCwQAARMoAgIYQAACHdAEASMoAgPTIAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+kQAAv5EAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAA7/QGAEzKAIBQygCAVMoAgO8wAgBYygCAXMoAgGDKAIDj4AcAZMoAgOGAAQBoygCA4ygGAGzKAIDhyAUAcMoAgLMxAgB0ygCAeMoAgJYAAAB8ygCAtikCALUhAgCAygCAu80CALrNAgCEygCAiMoAgL/NAgC+zQIAvc0CALzNAgCMygCAkMoAgJTKAICj/QIAmMoAgKXtAgCm5QIAnMoAgKDKAICkygCAqgECAKsBAgCsAQIArQECAK4BAgCvAQIAgA0AAIEVAACCHQAAqMoAgKzKAICwygCAvlQMALjKAICGwAwAhyQDALzKAIDAygCAxMoAgMjKAIDMygCA0MoAgKi5AgCpAQEAqgEBAKsBAQCsBQEArQ0BAK4FAQCvOQEAhKgNANTKAIDYygCA3MoAgODKAIDkygCA6MoAgOzKAIC4LQEAucUBALrNAQC7xQEAvMEBAL3JAQC++QEAv/kBALBNAQCxUQEAslUBALMpAQC0OQEAtSUBALYlAQC3FQEA4RgGAPDKAIDjOAcA9MoAgPjKAIC+WAwA/MoAgADLAICEbA8ABMsAgL5gDwAIywCADMsAgBDLAIDvcAYAFMsAgIAVAACBGQAAgi0AAITMDwDjYAYAGMsAgOGgAQAcywCA73QAACDLAICGyAwAh/wMACjLAIAsywCAMMsAgDTLAICjCQ4AtMoAgCTLAIA4ywCAPMsAgKYNDgClDQ4AQMsAgKsVDgCqCQ4ARMsAgEjLAICvYQ4Arn0OAK19DgCsAQ4ATMsAgLOpDgBQywCAVMsAgLapDgBYywCAXMsAgLWpDgC6SQ8Au0kPAGDLAIBkywCAvkkPAL9JDwC8SQ8AvUkPAKhdDgCpbQ4AqmUOAKt9DgCsZQ4ArW0OAK5lDgCvuQ8AaMsAgGzLAIBwywCAdMsAgHjLAIB8ywCAgMsAgITLAIC4UQ8AuV0PALpVDwC7aQ8AvH0PAL1lDwC+bQ8Av2EPALDJDwCxyQ8AstkPALPZDwC0yQ8AtckPALZ9DwC3cQ8AiMsAgLURDwC2EQ8AjMsAgIARAACBGQAAgikAALMVDwC8HQ8AvWEPAL5hDwC/fQ8AkMsAgJTLAIC6FQ8AuwkPAKOtDwCYywCAhugAAIfIAQCcywCApq0PAKWtDwCgywCAq00OAKpNDgCkywCAqMsAgK9NDgCuTQ4ArU0OAKxNDgCocQ4AqXEOAKpxDgCrcQ4ArJ0BAK2FAQCuhQEAr7UBAL7sAACsywCAsMsAgLTLAIC4ywCAvMsAgMDLAIDEywCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCwzQEAsaUBALKhAQCzoQEAtKUBALWtAQC2kQEAt5EBALP5DQDIywCAzMsAgNDLAIDUywCAtgUCALUVAgDYywCAu2ECALoJAgDcywCA4MsAgL9pAgC+YQIAvXUCALx1AgDkywCAo70NAOjLAIDsywCApkECAPDLAID0ywCApVECAKpNAgCrJQIA+MsAgPzLAICuJQIAry0CAKwxAgCtMQIAge0AAIDtAADv0AEAgh0AAADMAIAIzACAhjgEAIdQAwAMzACAEMwAgBTMAIAYzACA4eABABzMAIDjZA8AIMwAgCTMAIAozACALMwAgLORAwAwzACAtbkDALZ9AwA0zACAOMwAgDzMAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAL5oBQBAzACARMwAgEjMAIBMzACAUMwAgFTMAIBYzACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOF4DwDjNA4A47gOAOF8DgBczACAYMwAgGTMAIBozACAbMwAgHDMAIB4zACAfMwAgIDMAIDv5A4A79QOAITMAICjnQIAgmEAAIFpAACAUQAAhJwFAKZxAgCltQIAiMwAgKtVAgCqVQIAhkgEAIfMBACv+QEArvEBAK1FAgCsRQIAqJUGAKmlBgCqrQYAq6UGAKy9BgCtoQYArqUGAK/dBgB0zACAjMwAgJDMAICUzACAmMwAgJzMAICgzACApMwAgLhtBwC5dQcAun0HALt1BwC8bQcAvcUHAL7NBwC/xQcAsKUGALGtBgCyuQYAs7EGALSRBgC1kQYAtl0HALdVBwCzJQYAqMwAgKzMAICwzACAtMwAgLYhBgC1NQYAuMwAgLtpBgC6YQYAvMwAgMDMAIC/VQYAvlUGAL1lBgC8bQYAxMwAgKNhBgDIzACAzMwAgKZlBgDQzACA1MwAgKVxBgCqJQYAqy0GANjMAIDczACArhEGAK8RBgCsKQYArSEGAKipBgCpqQYAqrkGAKuxBgCszQYArTEBAK4xAQCvMQEAgMkBAIHJAQCCBQAA4MwAgL54AgCEeAIA5MwAgOjMAIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALBRAQCxUQEAslEBALNRAQC09QEAtf0BALb1AQC37QEAszEGAOzMAICGKAAAh9wBAPDMAIC2sQEAtUUGAPTMAIC7lQEAupUBAPjMAID8zACAvzkBAL4xAQC9hQEAvIUBAATMAICjdQYAAM0AgATNAICm9QEACM0AgAzNAIClAQYAqtEBAKvRAQAQzQCAFM0AgK51AQCvfQEArMEBAK3BAQAYzQCAHM0AgCDNAIAkzQCAKM0AgCzNAIAwzQCANM0AgDjNAIA8zQCAQM0AgETNAIBIzQCATM0AgFDNAIC+cAMAhQA8AOHEBgCERAIA44wHAIBhAACBYQAAgmEAAO9oAwCFRDwA4RACAFjNAIDj2CsAhlA9AIf0AwBczQCA76QHAGDNAIDvQAIAZM0AgGjNAIBszQCAcM0AgHTNAIB4zQCAhDw8AHzNAICAzQCAhM0AgIjNAIDj7AIAjM0AgOEsAQCzUQMAkM0AgJTNAICYzQCAnM0AgLZ5AwC1cQMAoM0AgLs5AwC6MQMApM0AgKjNAIC/9QAAvvUAAL0VAwC8FQMAqD0CAKmBAgCqmQIAq5ECAKy5AgCtuQIArtECAK/RAgCEqD8Avqg/AKzNAICwzQCAtM0AgLjNAIC8zQCAwM0AgLhRAQC5UQEAulEBALtRAQC8cQEAvXEBAL5xAQC/cQEAsLUCALG9AgCygQIAs4ECALRxAQC1cQEAtnEBALdxAQCAtQAAgb0AAIK1AADIzQCAhrA/AIfgPADMzQCA71QAAL4sPgDhVAYA0M0AgOOIAADUzQCA2M0AgNzNAIDgzQCAo1ECAOTNAIC/2CYA6M0AgOzNAICmeQIApXECAPDNAICrOQIAqjECAPTNAID4zQCAr/UBAK71AQCtFQIArBUCAJAtJACRBSgAkg0oAJPZKACUhS0AlTUsAJbFLACXtTEAmAEwAJkVMACalTUAmyk0AJxtNACdmTUAnj04AJ81OABUzQCAttU+ALXFPgDEzQCAs9E+APzNAIAAzgCABM4AgL/ZPgC+1T4AvcU+ALzFPgC71T4Auuk+AAjOAICPXSQAqeUJAKgVCACrBQwAqg0MAK0BEACsAQwAr0EQAK69EACh4QAADM4AgKMBBACi4QAApZ0EAKSVBACnuQgApgEIAKD1OQChBT0Aouk8AKP1PQAQzgCAFM4AgBjOAIAczgCAscEUALABFACzARgAsn0UALXVGAC01RgAIM4AgCTOAICCISUAgyklACjOAIAszgCAhsUpAIeBLACEGSkAhRkpAIoBLQCL+S0AMM4AgDjOAICOATEAj4k0AIyRMACNHTEAkkU1AJMZNQCG6AcAh+wBAJZZOQCXYTgAlPU0AJVZOQCaoTwAm0U9ADzOAIBAzgCAgX0AAIB9AACcQTwAglUAAKjpPwCp/T8Aqgk/AKsFPwCsHT8ArQU/AK4NPwCvBT8ARM4AgEjOAIBMzgCAUM4AgFTOAIBYzgCAXM4AgGDOAIC4DT8AuRU/ALoVPwC7JT8AvD0/AL39PgC+9T4Av+0+ALB9PwCxQT8AskE/ALNBPwC0QT8AtU0/ALY9PwC3NT8Ao4E8AGTOAIBozgCAbM4AgHDOAICmhTwApZU8AHTOAICrhTwAqrk8AHjOAIB8zgCAr4k8AK6FPACtlTwArJU8AITIAwCz7T0AgM4AgITOAIC26T0AiM4AgIzOAIC16T0Auq09ALu1PQCQzgCAlM4AgL6dPQC/IQIAvKU9AL2VPQCoDT0AqR09AKohPQCrPT0ArCU9AK0tPQCuJT0Ar1k9AIANAACBFQAAgh0AAJjOAICczgCAoM4AgKjOAIC+uAMAuLkCALlhAgC6GQIAuxkCALwJAgC9CQIAviECAL8hAgCwLT0AsTU9ALI1PQCzBT0AtB09ALWhAgC2oQIAt6ECAKOpPACszgCAhigFAIfsAgCwzgCApq08AKWtPAC0zgCAq/E8AKrpPAC4zgCAvM4AgK9lAwCu2TwArdE8AKzhPADAzgCAsykCAMTOAIDIzgCAtvkCAMzOAIDQzgCAtfkCALrVAgC73QIA1M4AgNjOAIC+eQEAv3kBALzFAgC9eQEA3M4AgODOAICj5QIA5M4AgKU1AgDozgCA7M4AgKY1AgDwzgCA9M4AgKsRAgCqGQIArbUBAKwJAgCvtQEArrUBAOPwPgDhrD8A4UA+AON8PwD4zgCA/M4AgADPAIAEzwCAgA0AAIERAACCEQAACM8AgO+oPgAMzwCAEM8AgO8gPgCoLQUAqW0FAKplBQCrrQUArLUFAK29BQCutQUAr60FAKTOAICE6AMAvuADABTPAICGEAMAh5gDABjPAIAczwCAuGkGALlpBgC6AQYAuwEGALwFBgC9DQYAvjEGAL8xBgCw1QUAsd0FALLVBQCzaQYAtHkGALV5BgC2aQYAt2EGAKg5BgCpgQcAqpkHAKuRBwCsuQcArbkHAK7ZBwCv1QcAIM8AgCTPAIA0zgCAKM8AgCzPAIAwzwCANM8AgDjPAIC4VQcAuV0HALppBwC7aQcAvAEHAL0BBwC+AQcAvwEHALCtBwCxsQcAsrEHALOFBwC0nQcAtXUHALZ9BwC3cQcAsxEGADzPAIBAzwCARM8AgEjPAIC2OQYAtTEGAEzPAIC7dQYAumkGAFDPAIBUzwCAv7EGAL5ZBgC9UQYAvGUGAFjPAICjVQYAXM8AgGDPAICmfQYAZM8AgGjPAICldQYAqi0GAKsxBgBszwCAcM8AgK4dBgCv9QYArCEGAK0VBgCouQEAqbkBAKopAQCrKQEArD0BAK0lAQCuLQEAryUBAHTPAICCHQAAgR0AAIAdAAB4zwCAfM8AgIDPAIC+cAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwXQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAAITIAgCzpQIAhzgDAIYoAgC2oQIAiM8AgIzPAIC1sQIAup0CALshAwC+bAMAkM8AgL4hAwC/KQMAvDEDAL0xAwCj4QIAlM8AgJjPAICczwCAoM8AgKblAgCl9QIApM8AgKtlAwCq2QIAqM8AgKzPAICvbQMArmUDAK11AwCsdQMAqZkAAKiRAACrzQAAqqEAAK3dAACs3QAAr8UAAK7NAAC+LA0AsM8AgLTPAIC4zwCAvM8AgMDPAIDEzwCAyM8AgLnBAQC4eQAAu8EBALrJAQC9wQEAvNkBAL/FAQC+xQEAsY0AALCNAACzQQAAskkAALVBAAC0WQAAt0EAALZJAADMzwCA0M8AgNTPAIDYzwCA3M8AgO9QBwDgzwCA5M8AgL74DwDjdAcA6M8AgOF8BACAGQAAgQkAAIJ5AADszwCA8M8AgLNpAQD4zwCAhMQCALYdAQD8zwCAANAAgLUVAQC6CQEAuwkBAIboDQCH6A0Avt0BAL/FAQC83QEAvdUBAATQAIAI0ACADNAAgBDQAIDv1AAAFNAAgBjQAIDvTAEA47ADAOG0BgDhgAEA45gBABzQAIAg0ACAJNAAgCjQAIAs0ACAMNAAgKPlAQCEwA0ApZkBADTQAIA40ACAppEBADzQAIBA0ACAq4UBAKqFAQCtWQEArFEBAK9JAQCuUQEA9M8AgETQAIBI0ACATNAAgFDQAIBU0ACAWNAAgFzQAICoaQ8AqXEPAKpxDwCrrQ8ArLUPAK29DwCutQ8Ar6kPALDZDwCx9Q8Asv0PALP1DwC07Q8AtZUPALadDwC3iQ8AuLkPALmFDwC6jQ8Au2kAALx5AAC9eQAAvmkAAL9pAACBnQAAgJ0AAGDQAICCBQAAZNAAgGjQAIBs0ACAcNAAgIaAAwCH9AMAdNAAgHjQAIB80ACAgNAAgITQAICEzwCAs5kPAIjQAICM0ACAkNAAgJTQAIC2XQ8AtV0PAJjQAIC7UQ8Aun0PAJzQAICg0ACAvzEPAL5JDwC9QQ8AvEkPAKNZDgCk0ACAqNAAgKzQAICw0ACApp0OAKWdDgC00ACAq5EOAKq9DgC40ACAvNAAgK/xDgCuiQ4ArYEOAKyJDgDA0ACAxNAAgMjQAIDM0ACAgBkAAIEZAACCBQAA0NAAgISgAQDU0ACAh+gBAIYABADY0ACA3NAAgODQAIDk0ACAqBUBAKkdAQCqFQEAqyUBAKw9AQCtJQEAri0BAK8lAQDo0ACA7NAAgPDQAID00ACA+NAAgPzQAIAA0QCABNEAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsCUBALEtAQCyJQEAsz0BALQtAQC1HQEAthUBALf5AAAI0QCADNEAgBDRAICzkQIAFNEAgLW5AgC2qQIAGNEAgBzRAIAg0QCAuu0CALvlAgC8/QIAveUCAL7lAgC/1QIApvECACTRAIAo0QCApeECACzRAICjyQIAMNEAgDTRAICuvQIAr40CAKylAgCtvQIAqrUCAKu9AgA40QCAPNEAgID5AACB+QAAggUAAEDRAIC+yAMAhBgDAEjRAIBM0QCAUNEAgFTRAIBY0QCAXNEAgGDRAIBk0QCAhhgEAIecAwBo0QCAbNEAgHDRAIB00QCAeNEAgHzRAIDvsAIAgNEAgOGUAQCE0QCA42wCAIjRAICM0QCAkNEAgJTRAICY0QCA79APAJzRAICg0QCApNEAgKjRAIDhrAEArNEAgONsAACAMQAAgT0AAIIdAADv9A4A42wOALDRAIDhLA8AvnAFALM5AgCEDAUAhugEAIdgBQDcAAAAtvECALX5AgC40QCAu9UCALrVAgC80QCAwNEAgL91AQC+dQEAvcUCALzFAgDE0QCA4fQOAMjRAIDjUA4AzNEAgNDRAIDU0QCA2NEAgNzRAIDg0QCA5NEAgOjRAIDs0QCA8NEAgPTRAIDv5A8ApmUCAPjRAID80QCApW0CAADSAICjrQIABNIAgAjSAICu4QEAr+EBAKxRAgCtUQIAqkECAKtBAgAM0gCAENIAgKiZBgCpmQYAqqkGAKupBgCsuQYArbkGAK6pBgCvqQYAFNIAgIIdAACBHQAAgB0AABjSAIAc0gCAINIAgL50AwC4rQYAubUGALq9BgC7tQYAvK0GAL1RBwC+UQcAv1EHALChBgCxoQYAsqEGALOhBgC0oQYAtaEGALalBgC3mQYARNEAgLMlBgCExAMAtNEAgLY9BgAk0gCAKNIAgLU1BgC6YQYAu2EGAIYIAACHiAAAvmEGAL9hBgC8cQYAvXEGAKNhBgAs0gCAMNIAgDTSAIA40gCApnkGAKVxBgA80gCAqyUGAKolBgBA0gCARNIAgK8lBgCuJQYArTUGAKw1BgCoXQYAqW0GAKplBgCrjQYArJkGAK2FBgCujQYAr4UGAEjSAIBM0gCAUNIAgFTSAIBY0gCAXNIAgGDSAIBk0gCAuIUGALmNBgC6mQYAu5UGALyNBgC9rQYAvqUGAL99AQCw/QYAscUGALLNBgCzxQYAtN0GALXFBgC2zQYAt8UGALPtBgBo0gCAbNIAgHDSAIB00gCAtgUGALURBgB40gCAuwEGALo5BgB80gCAgNIAgL8BBgC+GQYAvREGALwZBgCE0gCAo6kGAIjSAICM0gCApkEGAJDSAICElAEApVUGAKp9BgCrRQYAvqABAJjSAICuXQYAr0UGAKxdBgCtVQYAqJkCAKnBAgCqwQIAq8ECAKzBAgCtyQIArvECAK/xAgCB7QMAgO0DAJzSAICC+QMAhpAcAId0AwCg0gCApNIAgLjFAwC5zQMAusUDALvdAwC8zQMAvf0DAL71AwC/nQMAsEEDALFBAwCyQQMAs0EDALRBAwC1QQMAtkEDALdBAwCzSQIAqNIAgKzSAICw0gCAtNIAgLZJAgC1SQIAuNIAgLuFAwC6hQMAvNIAgMDSAIC/hQMAvoUDAL2VAwC8lQMAxNIAgKMNAgDI0gCAzNIAgKYNAgDQ0gCA1NIAgKUNAgCqwQMAq8EDANjSAIDc0gCArsEDAK/BAwCs0QMArdEDAOOYAQDhpAcA4VgGAONYBgDhoAEA4NIAgOPQAADk0gCA6NIAgOzSAIDvOAAA8NIAgO/0AQD00gCA+NIAgO/4BgCAeQAAgRUAAIIdAACEAB0A/NIAgADTAIC+EB0ACNMAgIbAHACHrB0ADNMAgBDTAIAU0wCAGNMAgBzTAIAg0wCAu8UFALqhBQC5qQUAuJEFAL/NBQC+zQUAvckFALzVBQCzHQYAsh0GALEdBgCwHQYAt6EFALa9BQC1vQUAtL0FAKu9BgCqvQYAqb0GAKi9BgCvfQYArn0GAK19BgCsfQYAJNMAgCjTAIAs0wCAMNMAgDTTAIA40wCAPNMAgEDTAICo7R0AqS0eAKoxHgCrMR4ArJUeAK2dHgCulR4Ar40eAATTAIBE0wCASNMAgEzTAIBQ0wCAVNMAgFjTAIBc0wCAuKkeALmpHgC6XR8Au1EfALxxHwC9cR8AvnUfAL9pHwCw/R4Asc0eALLFHgCzrR4AtLkeALW5HgC2rR4At6UeALO5HgBg0wCAZNMAgGjTAICU0gCAth0eALUdHgBs0wCAuwkeALo5HgBw0wCAhOADAL99HgC+fR4AvXkeALwRHgCCaQAAo/0eAIBFAACBUQAAplkeAL6cAwB00wCApVkeAKp9HgCrTR4AhkgAAIdsAACuOR4ArzkeAKxVHgCtPR4AqF0eAKltHgCqZR4Aq30eAKxlHgCtbR4ArmUeAK/9HgB40wCAfNMAgIDTAICE0wCAiNMAgIzTAICQ0wCAlNMAgLhpAQC5aQEAunkBALt5AQC8aQEAvWkBAL7dAQC/1QEAsIUeALGNHgCyhR4As50eALSFHgC1jR4AtoUeALdZAQCz7R4AmNMAgJzTAICg0wCApNMAgLbtHgC17R4AqNMAgLtJHgC6QR4ArNMAgLDTAIC/SR4AvkEeAL1JHgC8UR4AtNMAgKOpHgC40wCAvNMAgKapHgDA0wCAxNMAgKWpHgCqBR4Aqw0eAMjTAIDM0wCArgUeAK8NHgCsFR4ArQ0eAKghAwCpIQMAqiEDAKshAwCsIQMArSEDAK4hAwCvIQMA0NMAgNTTAIDY0wCAvmACANzTAIDg0wCA6NMAgOzTAIC4iQMAuYkDALqdAwC7lQMAvLkDAL25AwC+eQAAv3kAALDlAwCx7QMAsuUDALP9AwC07QMAtd0DALbVAwC3vQMAgKkAAIG1AACCvQAAs6UDAPDTAIC1pQMAtq0DAPTTAICE4AIA+NMAgLotAwC7JQMAvD0DAL0lAwC+JQMAvxUDAKPpAwD80wCAhmgEAIeAAwAA1ACApuEDAKXpAwAE1ACAq2kDAKphAwAI1ACADNQAgK9ZAwCuaQMArWkDAKxxAwAQ1ACAFNQAgBjUAIAc1ACAINQAgOE8HwAk1ACA40AeACjUAIAs1ACAMNQAgO+MHgA01ACAONQAgDzUAIBA1ACARNQAgIIlAACBEQAAgB0AAEjUAIDj5AMATNQAgOGsAQBQ1ACA77ADAIRkAgC+YAUAhtAEAIdEBQBY1ACAXNQAgGDUAIBk1ACAaNQAgGzUAIBw1ACAdNQAgHjUAIDvsAEAhKQFAOHcHgB81ACA4xABAIDUAICE1ACAiNQAgIzUAICzUQEAkNQAgJTUAICY1ACAnNQAgLYRAQC1fQEAoNQAgLsNAQC6DQEApNQAgKjUAIC//QAAvv0AAL39AAC8/QAAqDkGAKk5BgCqmQYAq5EGAKy1BgCt0QYArskGAK/BBgBU1ACArNQAgLDUAIC01ACAgA0AAIGxAACCsQAAuNQAgLhhBwC5YQcAumEHALt9BwC8ZQcAvW0HAL5lBwC/HQcAsIkGALGJBgCyaQcAs2kHALR5BwC1eQcAtmkHALdlBwCjEQYAvNQAgMDUAIC+gAMAxNQAgKZRBgClPQYAyNQAgKtNBgCqTQYAhggAAId8AwCvvQcArr0HAK29BwCsvQcAzNQAgNDUAICzSQcA1NQAgLVZBwDY1ACA3NQAgLZRBwDg1ACA5NMAgLtBBwC6dQcAvUUHALxFBwC/RQcAvkUHAKh5BgCpeQYAqokGAKuJBgCsmQYArZkGAK6JBgCviQYA5NQAgOjUAIDs1ACA8NQAgPTUAID41ACA/NQAgADVAIC4jQYAuZUGALqVBgC7pQYAvL0GAL1xAQC+cQEAv3EBALD5BgCxzQYAstkGALPZBgC0yQYAtckGALa9BgC3tQYAowEGAATVAIAI1QCADNUAgBDVAICmGQYApREGABTVAICrCQYAqj0GABjVAIAc1QCArw0GAK4NBgCtDQYArA0GACDVAIAk1QCAKNUAgCzVAICAGQAAgRkAAIIFAAAw1QCAhKwBAL6sAQCH6AAAhkwPADjVAIA81QCAQNUAgETVAIConQIAqcUCAKrNAgCrwQIArMUCAK3NAgCu+QIArz0DAEjVAIBM1QCAUNUAgFTVAIC+PAwAWNUAgFzVAIBg1QCAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+ZAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDALNFAgBk1QCAaNUAgGzVAIBw1QCAtk0CALVNAgB01QCAu4kDALqBAwB41QCAfNUAgL+JAwC+gQMAvYkDALyRAwCA1QCAowECAITVAICI1QCApgkCAIzVAICQ1QCApQkCAKrFAwCrzQMAlNUAgJjVAICuxQMAr80DAKzVAwCtzQMAgO0BAIEVAACCEQAAhAACAJzVAIDhpAEAoNUAgOPsAACo1QCArNUAgLDVAIDvMAAAtNUAgLjVAIC81QCAwNUAgIbgDACH9AIAxNUAgMjVAIDM1QCA0NUAgO/MBgDU1QCA4bAHANjVAIDjEAYA3NUAgODVAIDk1QCA6NUAgOzVAIDw1QCA9NUAgPjVAID81QCAANYAgATWAIAI1gCA7+gBAIUYDwDhzAYADNYAgOMcBgCAKQAAgR0AAIIFAAAQ1gCAszkCAITMDQCGaA8Ah/wMAOHQ0gO28QEAtfkBABjWAIC72QEAutEBAL7kDAAc1gCAv30BAL59AQC9fQEAvMEBAKjxDQCp8Q0AqvENAKvxDQCsMQ4ArTEOAK4xDgCvMQ4ApNUAgBTWAIAg1gCAJNYAgCjWAIAs1gCAMNYAgDTWAIC46Q4AuekOALqJDgC7hQ4AvJ0OAL2BDgC+gQ4Av7UOALBVDgCxXQ4AslUOALPpDgC0+Q4AtfkOALbpDgC34Q4Ao3kNADjWAIA81gCAQNYAgETWAICmsQ4ApbkOAEjWAICrmQ4AqpEOAEzWAIBQ1gCArz0OAK49DgCtPQ4ArIEOAFTWAICz7Q8AWNYAgFzWAIC26Q8AYNYAgGTWAIC16Q8Auq0PALu1DwA01QCAaNYAgL6VDwC/mQ8AvK0PAL2hDwCoIQ4AqSEOAKohDgCrPQ4ArCUOAK0tDgCuJQ4Ar1UOAGzWAIBw1gCAdNYAgHjWAICAHQAAgQkAAIK9AAB81gCAuDkOALk5DgC6yQ4Au8kOALzZDgC92Q4AvskOAL/JDgCwLQ4AsTUOALI9DgCzMQ4AtBUOALUZDgC2CQ4AtwkOAKOpDgCA1gCAhIACAL6AAQCFAAQApq0OAKWtDgCI1gCAq/EOAKrpDgCGKAcAhxgAAK/dDgCu0Q4AreUOAKzpDgCM1gCAs+0BAJDWAICU1gCAtuUBAJjWAICc1gCAte0BALplAQC7bQEAoNYAgKTWAIC+bQEAv10BALx1AQC9bQEAqN0NAKnpDQCqIQIAqyECAKwhAgCtIQIAriECAK8hAgCo1gCArNYAgLDWAIC01gCAohECAKMRAgCgqQ4AodUCALiJAgC5iQIAup0CALuVAgC8vQIAvXUDAL59AwC/dQMAsOUCALHtAgCy5QIAs/0CALTtAgC13QIAttUCALe9AgCjqQIAj8UaALjWAIC81gCAwNYAgKahAgClqQIAxNYAgKspAgCqIQIAyNYAgMzWAICvGQIArikCAK0pAgCsMQIAniUOAJ/lDgCc6QoAnRUKAJpFFgCbRQoAmFkWAJlRFgCWcRIAl4ETAJRVEgCV7RIAktEeAJPZHgCQtRoAkVUeAISpHwCFJR8AhiUfAIexEwDQ1gCA1NYAgIJZGwCDURsAjEUSAI2lFwCOpRcAj7kXAIA5+wHY1gCAijkTAIutEwCUmQsAlaEPAJZpDwCX3Q8A3NYAgO+cDwCSyQsAk30LAJxFAwDjeA4A4NYAgOGYDADk1gCAhHgCAJqRAwCbXQMA4QQAAL6IBQDj3OoD6NYAgOzWAIDw1gCA7+wAAO+MDgDhcA4A4fwOAOMwAADjeA4AgSEAAIA5AADvtO0DgikAALMJAgD41gCAhmgEAIcsBQD81gCAtg0CALUNAgAA1wCAu8UBALrFAQAE1wCACNcAgL99AQC+fQEAvdUBALzVAQCE1gCA9NYAgAzXAIAQ1wCAFNcAgBjXAIAc1wCAINcAgKi9BQCp5QUAquEFAKvhBQCs5QUAre0FAK7RBQCv0QUAsGEGALFhBgCyYQYAs2EGALTZBgC12QYAtskGALfBBgC4yQYAuckGALp5BwC7eQcAvEUHAL0lBwC+EQcAvw0HAKNJBQAk1wCAKNcAgCzXAIAw1wCApk0FAKVNBQA01wCAq4UGAKqFBgA41wCAPNcAgK89BgCuPQYArZUGAKyVBgBA1wCARNcAgEjXAIBM1wCAUNcAgFTXAIBY1wCAXNcAgIA5AACBOQAAggUAAGDXAIC+uAMAhLgDAGjXAIBs1wCAqMUGAKnVBgCq1QYAq+UGAKz9BgCtHQEArhUBAK8NAQBk1wCAcNcAgIaIAQCHHAEAdNcAgHjXAIB81wCAgNcAgLjpAQC56QEAuokBALuJAQC8mQEAvZkBAL6JAQC/iQEAsHUBALF9AQCydQEAs+kBALT5AQC1+QEAtukBALfhAQCzXQYAhNcAgIjXAICM1wCAhLwBALadAQC1dQYAkNcAgLu5AQC6sQEAlNcAgJjXAIC/PQEAvj0BAL09AQC8oQEAnNcAgKMZBgCg1wCApNcAgKbZAQCo1wCArNcAgKUxBgCq9QEAq/0BALDXAIC01wCArnkBAK95AQCs5QEArXkBAKj5AgCp+QIAqi0DAKs9AwCsJQMArS0DAK4lAwCvmQMAuNcAgLzXAIDA1wCAxNcAgIANAACBsQAAgrEAAMjXAIC4lQMAuZ0DALqhAwC7oQMAvHEAAL1xAAC+cQAAv3EAALDpAwCx6QMAsvUDALPFAwC03QMAtbUDALaxAwC3sQMAvswDAMzXAIDQ1wCA2NcAgNzXAIDg1wCA5NcAgO/kAgDo1wCA4ZQBAOzXAIDjLAEA8NcAgPTXAICHGAMAhhz8A7tNAwC6TQMA+NcAgPzXAIC/EQMAvnkDAL1xAwC8QQMAs8UDAITo/AMA2ACABNgAgAjYAIC2zQMAtc0DAAzYAICkAfwDpSX/A6bZ/wOnAfgDENgAgKEVAwCiHQMAoz0CAKwR9wOtAfADri3zA68B8wOoEfsDqZn7A6oB9AOrHfcDtAHoA7Vl6wO+xPwDhMT8A7AB7AOxVe8Dsk3vA7Nx7gMU2ACAGNgAgBzYAIAg2ACAJNgAgCjYAIAs2ACAMNgAgOFQBgDhNAQA42wBAOPoBgA02ACAONgAgDzYAIBA2ACAgDUAAIE9AACCNQAASNgAgEzYAIBQ2ACA77ABAO/ABgCj5QIAVNgAgIbo/AOHfP0DWNgAgKbtAgCl7QIAXNgAgKttAgCqbQIAYNgAgGTYAICvMQIArlkCAK1RAgCsYQIAqI3+A6mV/gOqnf4Dq5X+A6yx/gOtvf4Drqn+A6+p/gNE2ACAaNgAgGzYAIBw2ACAdNgAgHjYAIB82ACAgNgAgLgl/wO5Lf8DuiX/A7s9/wO8Jf8DvS3/A74l/wO/zf8DsKn+A7Gp/gOygf4Ds4H+A7SB/gO1if4Dtmn/A7cd/wOE2ACA4SD8A4jYAIDjePwDjNgAgJDYAICU2ACAmNgAgJzYAICg2ACApNgAgKjYAICAHQAAgXEAAIJxAADvDP0Ds1X+A6zYAICw2ACAvkAAALTYAIC2ff4DtXn+A7jYAIC7Lf4Dui3+A4boAACHrAAAvw3+A74F/gO9Ff4DvBX+A6OV/wO82ACAwNgAgMTYAIDI2ACApr3/A6W5/wPM2ACAq+3/A6rt/wPQ2ACA1NgAgK/N/wOuxf8DrdX/A6zV/wPY2ACAs/H+A9zYAIDg2ACAto3+A+TYAIDo2ACAtY3+A7pFAQC7TQEA7NgAgPDYAIC+RQEAv00BALxVAQC9TQEAqC3+A6k1/gOqPf4Dq0n+A6xB/gOtSf4DrnH+A69x/gP02ACA+NgAgPzYAIAA2QCABNkAgAjZAIAM2QCAENkAgLhJAQC5VQEAul0BALtVAQC8TQEAvXUBAL59AQC/dQEAsMUBALHNAQCyxQEAs90BALTFAQC1zQEAtsUBALd9AQCjtf0DFNkAgBjZAICExAMAHNkAgKbJ/QOlyf0DINkAgKsJAgCqAQIAKNkAgL7sAgCvCQIArgECAK0JAgCsEQIAgEkAAIFVAACCVQAAo0UDACzZAIClRQMApkUDADDZAICGwAQAhxQDAKopAwCrJQMArD0DAK0hAwCuIQMArxUDADTZAIA42QCAPNkAgEDZAIBE2QCASNkAgEzZAIBQ2QCAqH0CAKmhAwCqoQMAq6EDAKyhAwCtqQMArpEDAK+RAwCwgQMAsY0DALKFAwCzmQMAtIkDALW9AwC2tQMAt30DALhFAwC5TQMAukUDALtdAwC8RQMAvU0DAL5FAwC/+QAA1NcAgLMNAgBU2QCAWNkAgLYNAgBc2QCAYNkAgLUNAgC6YQIAu20CAGTZAIBo2QCAvmkCAL9dAgC8dQIAvWkCAGzZAIBw2QCAdNkAgHjZAIB82QCA4aQBAIDZAIDjQAMAhNkAgIjZAICM2QCA77gDAIAVAACBHQAAggUAAJDZAICEgAIAvsgFAIcYBQCGLAQAmNkAgJzZAICg2QCA76gBAKTZAIDhdP4DqNkAgOPw/gOs2QCAsNkAgLTZAIC42QCAvNkAgMDZAIDE2QCAs5EBAMjZAIC1UQEAtlEBAMzZAIDQ2QCA1NkAgLp9AQC7dQEAvG0BAL39AAC+9QAAv+kAAKgpBgCpVQYAqlUGAKuNBgCslQYArZ0GAK6VBgCvjQYAlNkAgNjZAIDc2QCA4NkAgOTZAIDo2QCA7NkAgPDZAIC4bQcAuQUHALoNBwC7BQcAvB0HAL0FBwC+AQcAvz0HALD1BgCx/QYAsvUGALNlBwC0fQcAtWEHALZhBwC3VQcA4xAFAPTZAIDh8AQA+NkAgIAdAACBCQAAgjkAAPzZAIAA2gCAhOgDAL7gAwAE2gCA78wFAAjaAICHOAAAhhgAAKOdBgAM2gCAENoAgBTaAIAY2gCApl0GAKVdBgAc2gCAq3kGAKpxBgAg2gCAJNoAgK/lBwCu+QcArfEHAKxhBgCokQYAqZEGAKqRBgCrrQYArLkGAK2lBgCurQYAr6UGACjaAIAs2gCAMNoAgDTaAIA42gCAPNoAgEDaAIBE2gCAuGUBALltAQC6ZQEAu30BALxlAQC9bQEAvmUBAL/ZAQCw3QYAsaUGALKtBgCzpQYAtKEGALWpBgC2mQYAt5kGALMZBgBI2gCATNoAgFDaAIBU2gCAtiUGALUxBgBY2gCAu2EGALoZBgBc2gCAYNoAgL9tBgC+ZQYAvXEGALx5BgBk2gCAo10GAGjaAIBs2gCApmEGAHDaAICEmAEApXUGAKpdBgCrJQYAvqQBAHjaAICuIQYArykGAKw9BgCtNQYAqcUCAKixAgCrxQIAqsUCAK3NAgCsxQIAr/UCAK71AgB82gCAgNoAgITaAICI2gCAjNoAgJDaAICU2gCAmNoAgLnJAwC4wQMAu9kDALrBAwC9+QMAvMkDAL+ZAwC+8QMAsUUDALBFAwCzRQMAskUDALVFAwC0RQMAt0UDALZFAwCASQMAgUkDAIJdAwCzRQIAvtwMALVFAgC2RQIAnNoAgIYADACH5AMAuokDALuJAwC8mQMAvZkDAL6JAwC/iQMAowkCAKDaAICk2gCAqNoAgKzaAICmCQIApQkCALDaAICrxQMAqsUDALTaAIC42gCAr8UDAK7FAwCt1QMArNUDALzaAIDA2gCAxNoAgCTZAIDvAAAAyNoAgMzaAIDQ2gCA4+gAANTaAIDhjAEA2NoAgNzaAIDg2gCA6NoAgOzaAICAbQAAgXUAAIJ9AACEQAIAhvAMAId4DQDw2gCA9NoAgPjaAID82gCAANsAgATbAIAI2wCADNsAgBDbAIAU2wCAGNsAgBzbAIAg2wCAJNsAgCjbAIAs2wCAMNsAgO/MAQCE7AwA4TAGADTbAIDjGAEAONsAgDzbAIBA2wCARNsAgLPlAQBI2wCAhIQPAEzbAIBQ2wCAtuUBALX1AQBY2wCAu30BALrZAQC+oAwAXNsAgL8hAQC+OQEAvTEBALw5AQCo7Q0AqSUOAKotDgCrJQ4ArD0OAK0lDgCuLQ4AryUOAOTaAICC9Q8AgeUPAIDpDwBU2wCAYNsAgIaYAACHDAMAuK0OALlFDwC6TQ8Au0UPALxFDwC9TQ8AvkUPAL95DwCwXQ4AsfkOALKtDgCzpQ4AtL0OALWlDgC2pQ4At5UOAGTbAIDv7AwAaNsAgGzbAIBw2wCAdNsAgHjbAIB82wCAvugAAIDbAICE2wCAiNsAgIzbAIDj6A0AkNsAgOEEDACj5Q4AlNsAgJjbAICc2wCAoNsAgKblDgCl9Q4ApNsAgKt9DgCq2Q4AqNsAgKzbAICvIQ4ArjkOAK0xDgCsOQ4AqDkOAKk5DgCqUQ4Aq1EOAKxxDgCtcQ4ArnEOAK9xDgCw2wCAtNsAgLjbAIC82wCAgBkAAIEZAACCBQAAwNsAgLjRDgC50Q4AutEOALvlDgC84Q4AveEOAL7hDgC/4Q4AsBEOALERDgCyEQ4AsxEOALTxDgC18Q4AtvEOALfxDgCz2Q4AyNsAgIYoAACHuAAAzNsAgLbxDgC1+Q4A0NsAgLvVDgC61Q4A1NsAgNjbAIC/NQ4AvjUOAL3FDgC8xQ4A3NsAgKOdDgDg2wCA5NsAgKa1DgDo2wCA7NsAgKW9DgCqkQ4Aq5EOAPDbAID02wCArnEOAK9xDgCsgQ4ArYEOAKjdDQCp6Q0Aqj0CAKuNAgCsmQIArZkCAK6JAgCviQIAvqwEAPjbAID82wCAhCADAADcAIAE3ACACNwAgAzcAIC4iQIAuYkCALqZAgC7kQIAvLkCAL25AgC+eQMAv3kDALD5AgCx+QIAss0CALPFAgC03QIAtcUCALbBAgC3uQIAs7UCABDcAIAU3ACAGNwAgBzcAIC2GQIAtRECACDcAIC7PQIAuj0CACTcAIAo3ACAvwECAL4ZAgC9EQIAvBkCACzcAICj8QIAMNwAgDjcAICmXQIAPNwAgEDcAIClVQIAqnkCAKt5AgCGSAUAh6wEAK5dAgCvRQIArF0CAK1VAgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAETcAIBI3ACATNwAgFDcAICB8QEAgJkBAHTaAICC9QEAuHkBALl5AQC6zQEAu8UBALzdAQC9xQEAvsUBAL/1AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2SQEAt0kBAFTcAIBY3ACAXNwAgO/UAQCEEAUAYNwAgGTcAIDvjA4AvuwFAOHsDgBo3ACA4xwOAGzcAIDhlAEAcNwAgONkDgCzXQIAdNwAgHjcAIB83ACAgNwAgLYVAgC1dQIAhNwAgLs5AgC6MQIAiNwAgIzcAIC/2QEAvtEBAL0VAgC8FQIAo50FADTcAICQ3ACAlNwAgJjcAICm1QUApbUFAJzcAICr+QUAqvEFAKDcAICk3ACArxkGAK4RBgCt1QUArNUFAIBRAACBWQAAgmEAALOVBgCo3ACAtXEHALZxBwCs3ACAhkADAIdUAwC67QcAu+UHALzlBwC97QcAvtEHAL/NBwCw3ACAtNwAgLjcAIC83ACAwNwAgMTcAIDvQAQAyNwAgOEwBwDM3ACA45QEANDcAIDU3ACA2NwAgNzcAIDg3ACAoxkGAOTcAIDo3ACA7NwAgPDcAICm/QcApf0HAPTcAICraQcAqmEHAPjcAID83ACAr0EHAK5dBwCtYQcArGkHAKjNBwCp0QcAqtEHAKstBgCsNQYArT0GAK41BgCvnQYAAN0AgATdAIAI3QCADN0AgIAZAACBGQAAggUAABDdAIC4iQYAuYkGALqZBgC7kQYAvLkGAL25BgC+UQEAv1EBALDlBgCx7QYAsv0GALP1BgC02QYAtcUGALbBBgC3uQYAqNEBAKnZAQCqCQEAqwkBAKwZAQCtGQEArgkBAK8JAQCEYAEAvnwBAIeoAACGjAEAGN0AgBzdAIAg3QCAJN0AgLgJAQC5CQEAuhkBALsRAQC8OQEAvTkBAL75AAC/+QAAsH0BALFBAQCyRQEAs10BALRFAQC1TQEAtkUBALc5AQAo3QCALN0AgDDdAICzjQIANN0AgLWdAgC2lQIAON0AgDzdAIBA3QCAurUCALuJAgC8nQIAvYUCAL6NAgC/hQIAps0CAETdAIBI3QCApcUCAEzdAICj1QIAUN0AgFTdAICu1QIAr90CAKzFAgCt3QIAqu0CAKvRAgCE9AMAWN0AgKgxAwCpMQMAqjEDAKsxAwCskQAArZEAAK6RAACvjQAAXN0AgGDdAIBk3QCAaN0AgGzdAIBw3QCAdN0AgHjdAIC4vQAAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALD9AACxxQAAss0AALOpAAC0uQAAtaUAALahAAC3oQAAgL0BAIEJAACCGQAAfN0AgIDdAIC+WAIAhxQdAIacHQCEbB0AxNsAgIjdAICM3QCAvrwcAJDdAICU3QCAmN0AgLP5AgCc3QCAoN0AgKTdAICo3QCAtlEBALVZAQC+3B8Au0EBALp5AQCs3QCAsN0AgL8hAQC+PQEAvT0BALxZAQDhcAcAtN0AgOMIBgC43QCA78wAALzdAIDA3QCAxN0AgOMQAADI3QCA4dABAMzdAICGkBwAh/QcAO/gBgDQ3QCAo3kCANTdAIDY3QCA3N0AgODdAICm0QEApdkBAOTdAICrwQEAqvkBAOjdAIDs3QCAr6EBAK69AQCtvQEArNkBAITdAICCFQAAgeUfAIDlHwDw3QCA9N0AgPjdAID83QCAqAkfAKkJHwCqHR8AqxUfAKwNHwCtcR8ArnEfAK9xHwCwER8AsS0fALIlHwCzyR8AtN0fALXBHwC2wR8At8EfALjFHwC5yR8AutUfALupHwC8uR8AvbkfAL6pHwC/oR8As7UfAADeAIAE3gCACN4AgAzeAIC20R8AtaUfABDeAIC7yR8AuvUfABTeAIAY3gCAvyUfAL45HwC9PR8AvNEfABzeAIAg3gCAJN4AgCjeAIAs3gCA4WAfADDeAIDjtBwANN4AgDjeAIA83gCA7wAdAEDeAIBE3gCASN4AgEzeAICjNR4AUN4AgFTeAIBY3gCAXN4AgKZRHgClJR4AYN4AgKtJHgCqdR4AhKgCAGTeAICvpR4ArrkeAK29HgCsUR4AgE0AAIFVAACCVQAAs8kBAGjeAIC12QEAtskBAGzeAICGoAAAhwQBALrFAQC7rQEAvLUBAL29AQC+tQEAv60BAKiZAQCpmQEAqg0BAKsFAQCsHQEArQUBAK4FAQCvNQEAcN4AgHTeAIB43gCAfN4AgIDeAICE3gCAiN4AgIzeAIC4JQEAuS0BALo5AQC7OQEAvCkBAL0pAQC+3QAAv9UAALBNAQCxJQEAsi0BALMlAQC0PQEAtSUBALYhAQC3HQEAkN4AgJTeAICY3gCAo4kCAJzeAIClmQIApokCAKDeAICk3gCAqN4AgKqFAgCr7QIArPUCAK39AgCu9QIAr+0CAKzeAICw3gCAtN4AgIRAAgC43gCAvN4AgMDeAIDE3gCAgA0AAIEVAACCHQAAyN4AgMzeAIDQ3gCAh7QDAIbcBAC+zAMA2N4AgNzeAIDg3gCA7+gCAOTeAIDo3gCA7N4AgOP8AgDw3gCA4dABAPTeAID43gCA/N4AgADfAIAE3wCAs2EDAAjfAIAM3wCAEN8AgBTfAIC2eQMAtXEDABjfAIC7XQMAul0DABzfAIAg3wCAv+EAAL79AAC9/QAAvP0AALC5AgCxuQIAsgkBALMJAQC0GQEAtQUBALYFAQC3PQEAuAUBALllAQC6bQEAu2UBALxhAQC9YQEAvmEBAL9hAQCFXAcAJN8AgCjfAIAs3wCAFN0AgDDfAIA03wCAON8AgKgxAgCpOQIAqskCAKvJAgCs2QIArdkCAK7JAgCvyQIAhMwFAOGAHgA83wCA47weAOE4HgBA3wCA46AAAL4QBABI3wCATN8AgO8MHgBQ3wCAVN8AgFjfAIBc3wCA73QeAKNhAgCCUQAAgUEAAICRAABg3wCApnkCAKVxAgBk3wCAq10CAKpdAgCGyAQAhzwFAK/hAQCu/QEArf0BAKz9AQCohQYAqY0GAKqFBgCrmQYArIkGAK2JBgCuvQYAr7EGAETfAIBo3wCAbN8AgHDfAIB03wCAeN8AgHzfAICA3wCAuJ0GALmtBgC6pQYAuwkHALwZBwC9GQcAvg0HAL8FBwCw0QYAsdEGALLRBgCz0QYAtLUGALW9BgC2tQYAt60GALMNBgCE3wCAiN8AgIzfAICQ3wCAtgkGALUBBgCU3wCAuxUGALoVBgCY3wCAnN8AgL95BgC+cQYAvQUGALwFBgCg3wCA4aAEAKTfAIDjXAUAgA0AAIE1AACCPQAAqN8AgKzfAICw3wCAhGADAL5sAAC/8AEAhZAAALTfAIDvmAUAo40HAIQIAACGAAwAh4wAALjfAICmiQcApYEHALzfAICrlQcAqpUHAMDfAIDE3wCAr/kHAK7xBwCthQcArIUHAMjfAICz6QYAzN8AgNDfAIC26QYA1N8AgNjfAIC16QYAukUBALtNAQDc3wCA4N8AgL5FAQC/TQEAvFUBAL1NAQCoIQYAqSEGAKolBgCrPQYArCUGAK0tBgCuSQYAr0EGAOTfAIDo3wCA7N8AgPDfAID03wCA+N8AgPzfAIAA4ACAuEkBALlJAQC6WQEAu1EBALx5AQC9eQEAvhkBAL8VAQCwxQEAsc0BALLFAQCz3QEAtMUBALXNAQC2xQEAt3kBAATgAIAI4ACADOAAgKOhBQAQ4ACApaEFAKahBQAU4ACAjyHqAxjgAICqDQIAqwUCAKwdAgCtBQIArg0CAK8FAgCX7RIAlmUSAJVFEQCUnRYAk3EWAJJVFQCReesDkFnqA59hBgCeNQUAnUUaAJxpGgCbVRkAmkUeAJlZHgCYRR0A4WAAABzgAIDjTD4AIOAAgKOxAgCi1QEAobUHAKCJBgCxATgAsAk+ALOVOgCyjToAtbUmALQBJADvaDoAvjAMAKnJNgCowTYAqwEwAKrhNwCtzTMArPUyAK/5PgCuATwAoRkCACjgAICjbQ4Aom0OAKX1CgCkAQgAp4ULAKaZCgCGAA0Ah0QNAIIJ6wODCesDhDHqA4UVFACGORcAh80XAISgDQAs4ACAiiUQAIsNEwCMnRMAjQ0cAI4ZHwCPDR8A1N4AgO8AAwCSbRgAk0kbAJR9GwCVBQQAllkHAJdJBwAw4ACANOAAgJpFBgCbLQAAnFEDAONgAAA44ACA4WwAAIClAQCBAQEAggUBAL4ADAA84ACAQOAAgETgAIDviAEASOAAgOFUBgBM4ACA41QBAFDgAIBU4ACAWOAAgFzgAICz6QIAYOAAgGTgAIBo4ACAbOAAgLadAgC1mQIAcOAAgLuJAgC6vQIAdOAAgHjgAIC/WQIAvlECAL1ZAgC8kQIAoykNAHzgAICA4ACAhOAAgIjgAICmXQ0ApVkNAIzgAICrSQ0Aqn0NAJDgAICY4ACAr5kNAK6RDQCtmQ0ArFENAIBRAACBWQAAgmEAALMtDwCc4ACAtS0PALbJDwCg4ACAhkADAIcIAwC6yQ8Au8UPALzBDwC9wQ8AvsEPAL/BDwAk4ACAlOAAgKTgAICo4ACArOAAgLDgAIC04ACAuOAAgKhFDgCpgQ8AqskPAKvJDwCsyQ8ArSUPAK4tDwCvJQ8AsGEPALFtDwCyeQ8As3kPALRpDwC1aQ8Ath0PALcVDwC4LQ8AuTUPALo1DwC7BQ8AvB0PAL3xAAC+8QAAv/EAAKNhDgC84ACAhMQBAMDgAIDE4ACApoUOAKVhDgDI4ACAq4kOAKqFDgDM4ACA0OAAgK+NDgCujQ4ArY0OAKyNDgDU4ACA2OAAgNzgAIDg4ACA5OAAgOjgAIDs4ACA8OAAgPTgAICCHQAAgR0AAIAdAAD44ACA/OAAgADhAIC+tAEAqK0BAKnVAQCq1QEAqwUBAKwdAQCtBQEArg0BAK8FAQCGgAEAhxgBAAjhAIAM4QCAEOEAgBThAIAY4QCAHOEAgLiFAAC5jQAAuoUAALudAAC8hQAAvY0AAL6FAAC/vQAAsH0BALHhAACy5QAAs/0AALTtAAC13QAAttUAALe9AACzXQIAIOEAgCThAIAo4QCALOEAgLaFAgC1lQIAMOEAgLslAwC6uQIANOEAgDjhAIC/GQMAvikDAL0pAwC8MQMAvswEAKMZAgA84QCAQOEAgKbBAgBE4QCASOEAgKXRAgCq/QIAq2EDAEzhAIBQ4QCArm0DAK9dAwCsdQMArW0DAKgpAwCpKQMAqjkDAKs5AwCsKQMArSkDAK6dAACvlQAAVOEAgFjhAIBc4QCAYOEAgGThAICCqQEAga0BAICtAQC4mQAAua0AALqlAAC7bQAAvHUAAL19AAC+dQAAv20AALDtAACx9QAAsvUAALPFAAC03QAAtb0AALa1AAC3qQAA4XgBAOEcDgDjEAAA4zwOAGjhAIBs4QCAvhQEAHDhAICErAIAeOEAgId4BQCGDAUAfOEAgIDhAIDvvAAA70gOALPxAgCE4QCAiOEAgIzhAICQ4QCAtukCALXhAgCU4QCAu3EBALppAQCY4QCAhKAEAL85AQC+WQEAvVEBALxhAQCc4QCAhIwEAKDhAICEADgApOEAgKjhAICs4QCAsOEAgKqJDgCriQ4AqLkOAKmxDgCu/Q4Ar+EOAKz5DgCt9Q4Asq0OALNlDgCwkQ4AsaUOALZ9DgC3ZQ4AtH0OALV1DgC6XQ4Au+UNALhdDgC5VQ4AvuENAL/pDQC8/Q0AvfUNAKOxBQB04QCAtOEAgLjhAIC84QCApqkFAKWhBQDA4QCAqzEGAKopBgDE4QCAyOEAgK95BgCuGQYArREGAKwhBgDM4QCA0OEAgNThAIDY4QCAgB0AAIEJAACCOQAA3OEAgODhAIDk4QCAhsgAAIcMAwDo4QCA7OEAgPDhAID04QCAqKUHAKm1BwCqvQcAq8kHAKzZBwCt2QcArskHAK/BBwC+oAAA+OEAgPzhAIAA4gCABOIAgAjiAIAM4gCAEOIAgLjNAAC51QAAutUAALvlAAC8/QAAvZUAAL6dAAC/lQAAsIkHALFlBwCyYQcAs30HALRlBwC1bQcAtmUHALf1AACzNQYAFOIAgBjiAIAc4gCAIOIAgLZZBgC1UQYAJOIAgLuhBgC6TQYAKOIAgCziAIC/qQYAvqEGAL2pBgC8tQYAMOIAgDTiAIDv8AUAOOIAgDziAIBA4gCAROIAgEjiAICAPQAAgQkAAIIdAABM4gCA4cgGAFDiAIDjSAQAVOIAgKO1BgBY4gCAhigAAIdAAQBc4gCAptkGAKXRBgBg4gCAqyEGAKrNBgBk4gCAaOIAgK8pBgCuIQYArSkGAKw1BgBs4gCAs70BAHDiAIB04gCAtnkBAHjiAIB84gCAtXkBALpVAQC7XQEAgOIAgITiAIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgC+rDwAiOIAgIziAICQ4gCAlOIAgJjiAICc4gCAoOIAgLhpAwC5aQMAugkDALsJAwC8HQMAvQUDAL4NAwC/BQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwCk4gCAqOIAgKziAICj9QIAsOIAgKUxAgCmMQIAtOIAgLjiAIC84gCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMA7xgCAIIVAACBbQAAgG0AAMDiAIDI4gCAhvg8AIcYAwDM4gCA0OIAgNTiAIDY4gCA42wHAAThAIDhaAEA3OIAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIA4OIAgOTiAIDo4gCA7OIAgPDiAID04gCA+OIAgPziAIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4bQGAADjAIDj9AYABOMAgIQYPQAI4wCADOMAgBDjAIAU4wCAGOMAgBzjAIAg4wCAJOMAgCjjAIDvWAYALOMAgIF9AACAcQAAMOMAgIIFAAA44wCAPOMAgO+AAQC+VDwA4ZABAEDjAIDjfAYAROMAgEjjAIBM4wCAhtg8AIf0PACjnT0AxOIAgDTjAIBQ4wCAVOMAgKbVPQCltT0AWOMAgKv5PQCq8T0AXOMAgGDjAICvGT4ArhE+AK3VPQCs1T0AZOMAgLOhPgBo4wCAbOMAgLatPgBw4wCAdOMAgLWxPgC6ST8Au0k/AHjjAIB84wCAvkk/AL9JPwC8ST8AvUk/AKhVPgCpZT4Aqm0+AKtlPgCsfT4ArWk+AK65PwCvuT8AgOMAgITjAICI4wCAjOMAgJDjAICU4wCAmOMAgJzjAIC4VT8AuV0/ALpVPwC7bT8AvHU/AL19PwC+dT8Av20/ALDJPwCxyT8Astk/ALPZPwC0yT8Atck/ALZ9PwC3cT8AghUAAKPhPwCAsQEAgbEBAKbtPwCg4wCAvtABAKXxPwCqCT4Aqwk+AITkAQCk4wCArgk+AK8JPgCsCT4ArQk+ALPdPACo4wCAhugAAIfMAQCs4wCAtpU8ALX1PACw4wCAu7k8ALqxPAC04wCAuOMAgL9ZPwC+UT8AvZU8ALyVPACoUT4AqVE+AKptPgCrYT4ArGE+AK1hPgCulQEAr40BAISgAQC84wCAwOMAgMTjAIDI4wCAzOMAgNDjAIDU4wCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCw/QEAsc0BALLFAQCzrQEAtLkBALW5AQC2rQEAt6UBALPlPQDY4wCA3OMAgODjAIDk4wCAtuE9ALXpPQDo4wCAuwkCALo5AgDs4wCA8OMAgL99AgC+fQIAvXkCALwRAgD04wCAo6E9APjjAID84wCApqU9AADkAIAE5ACApa09AKp9AgCrTQIACOQAgAzkAICuOQIArzkCAKxVAgCtPQIAgOkAAIHpAACCHQAAvsADAO/kAgAQ5ACAh1QDAIY8BADjEAEAGOQAgOH4AQAc5ACAIOQAgCTkAIAo5ACALOQAgDDkAIA05ACAOOQAgLORAwA85ACAtbkDALZ9AwBA5ACAROQAgEjkAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAIRsBQBM5ACAUOQAgFTkAIBY5ACAXOQAgL5wBQBg5ACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOFAPwDjvAAA4wg+AOFsPgBk5ACAaOQAgGzkAIBw5ACAdOQAgHjkAIB85ACAgOQAgL5sBwDvVAAA75w+AIjkAICjnQIAgmkAAIFhAACAaQAAjOQAgKZxAgCltQIAkOQAgKtVAgCqVQIAhsgEAIfsBACv+QEArvEBAK1FAgCsRQIAqKUGAKmpBgCquQYAq7kGAKypBgCtqQYArtkGAK/ZBgCE5ACAlOQAgJjkAICc5ACAoOQAgKTkAICo5ACArOQAgLhxBwC5cQcAunUHALvdBwC8xQcAvc0HAL7FBwC//QcAsKkGALG1BgCytQYAs40GALSVBgC1UQcAtlEHALdRBwCzMQYAsOQAgLTkAIC45ACAvOQAgLYpBgC1IQYAwOQAgLtxBgC6bQYAxOQAgMjkAIC/lQcAvlEGAL1ZBgC8YQYAzOQAgKN1BgDQ5ACA1OQAgKZtBgDY5ACA3OQAgKVlBgCqKQYAqzUGAODkAIDk5ACArhUGAK/RBwCsJQYArR0GAIANAACBFQAAgh0AAOjkAIDs5ACA8OQAgITcAQD05ACAhoAAAIcgAQD45ACA/OQAgADlAIAE5QCACOUAgAzlAIAQ5QCA43QEABTlAIDhyAUAGOUAgBzlAIAg5QCAJOUAgCjlAIAs5QCAMOUAgDTlAIA45QCA77QEADzlAIBA5QCAqD0GAKlVBgCqVQYAq6kBAKy5AQCtuQEArqkBAK+pAQCErAEAROUAgEjlAIBM5QCAUOUAgFTlAIBY5QCAXOUAgLhtAQC5BQEAugEBALsBAQC8BQEAvQ0BAL4xAQC/MQEAsNkBALHZAQCybQEAs2UBALR9AQC1ZQEAtmUBALdVAQCBvQMAgL0DALPVBQCCGQAAtTkCAGDlAIC+VAMAtjECAGjlAIBs5QCAuxUCALoVAgC9uQIAvLECAL+pAgC+sQIAcOUAgKZpAgClYQIAhAAMAKONBQB05QCAhvgMAId8AwCv8QIArukCAK3hAgCs6QIAq00CAKpNAgB45QCAfOUAgIDlAICE5QCAiOUAgIzlAIDjIAEAkOUAgOGgAQCU5QCA70ACAJjlAICc5QCAoOUAgKTlAICo5QCArOUAgLDlAICz8QMAtOUAgBTkAIC45QCAvOUAgLbpAwC14QMAwOUAgLu1AwC6tQMAxOUAgMjlAIC/lQMAvpUDAL2lAwC8pQMAqCkCAKkpAgCqOQIAqzkCAKwpAgCtKQIArlkCAK9VAgCAzQEAgQkAAIIZAADM5QCA0OUAgL58DQCHtA0AhhwMALgxAgC5PQIAujUCALvpAgC8+QIAvfkCAL7pAgC/6QIAsDECALExAgCyMQIAszECALQRAgC1EQIAthECALcRAgDY5QCA3OUAgODlAIDk5QCA6OUAgOzlAIDw5QCA79QGAPTlAIDhVAYA+OUAgOOkAACsDBUA/OUAgADmAIAE5gCAo/ECAAjmAIAM5gCAEOYAgBTmAICm6QIApeECABjmAICrtQIAqrUCABzmAIAg5gCAr5UCAK6VAgCtpQIArKUCAKghDgCpIQ4AqkkOAKtZDgCsaQ4ArWkOAK6ZDgCvmQ4A1OUAgCTmAIAo5gCALOYAgDDmAIA05gCAOOYAgDzmAIC49Q4Auf0OALr1DgC7iQ4AvJ0OAL2FDgC+hQ4Av7UOALDpDgCx6Q4Asv0OALPxDgC01Q4Atd0OALbVDgC3zQ4As8EOAIIVAACBtQAAgLUAAEDmAIC26Q4AteEOAL4QAAC7LQ4Aui0OAIRkAwBE5gCAvxkOAL4RDgC9JQ4AvCkOAEjmAICjhQ4AhogAAIdsAwCmrQ4ATOYAgFDmAIClpQ4AqmkOAKtpDgBU5gCAWOYAgK5VDgCvXQ4ArG0OAK1hDgCziQ4AXOYAgGDmAIBk5gCAaOYAgLaBDgC1iQ4AbOYAgLuVDgC6jQ4AcOYAgHTmAIC/+Q4AvvEOAL2FDgC8hQ4AeOYAgHzmAICA5gCAhOYAgOMMDQCI5gCA4RgNAIzmAIDvrAwAkOYAgJTmAICY5gCAnOYAgKDmAICk5gCAqOYAgKgBDgCpAQ4AqgEOAKsBDgCsAQ4ArQEOAK4BDgCvPQ4AgN0AAIEJAACCGQAArOYAgLDmAICEPAEAvnQAALjmAIC4HQ4AuS0OALolDgC76QEAvPkBAL35AQC+6QEAv+kBALBJDgCxUQ4AslEOALNRDgC0NQ4AtT0OALY1DgC3LQ4Ao4kNALzmAICGrAQAhzwDAMDmAICmgQ0ApYkNAMTmAICrlQ0Aqo0NAMjmAIDM5gCAr/kNAK7xDQCthQ0ArIUNANDmAICznQIAhEgDAL5ABAC2VQMA1OYAgNjmAIC1sQIAunEDALt5AwDc5gCA4OYAgL4xAwC/MQMAvFEDAL1RAwCwkQMAsZkDALKhAwCzoQMAtNEDALXRAwC20QMAt9EDALj1AwC5+QMAus0DALvFAwC83QMAvcUDAL7NAwC/xQMA5OYAgOjmAIDs5gCA8OYAgIV8GQD05gCA+OYAgGTlAICoIQIAqTECAKoxAgCrBQIArB0CAK3xAwCu8QMAr/EDAPzmAIAA5wCABOcAgAjnAIDvUAAADOcAgBDnAIAU5wCA44QAABjnAIDh+AEAHOcAgIAVAACBGQAAggUAACDnAICjmQMAKOcAgIZoBACHYAUALOcAgKZRAgCltQMAMOcAgKt9AgCqdQIANOcAgDjnAICvNQIArjUCAK1VAgCsVQIAPOcAgEDnAIBE5wCASOcAgEznAIBQ5wCAVOcAgO/4AQC+bAQA4YAOAFjnAIDjFAEAXOcAgGDnAIBk5wCAaOcAgGznAIBw5wCAdOcAgLPdAQB45wCAtf0BALb1AQB85wCAgOcAgITnAIC6sQEAu4UBALydAQC9NQEAvj0BAL81AQCpBQYAqLkFAKsVBgCqHQYArT0GAKw9BgCvTQYArl0GACTnAICCHQAAgR0AAIAdAACI5wCAjOcAgJDnAICU5wCAuUEHALidBgC7QQcAukkHAL1FBwC8WQcAv0UHAL5FBwCxCQYAsD0GALOpBgCyAQYAtbkGALSxBgC3rQYAtrEGAKORBgCEjAIAhigAAIfAAwCY5wCAprkGAKWxBgCc5wCAq8kGAKr9BgCg5wCApOcAgK95BgCucQYArXkGAKzRBgCo5wCAs5kHAKznAICw5wCAtlEHALTnAIC45wCAtbEHALptBwC7dQcAvOcAgMDnAIC+WQcAv0UHALxtBwC9ZQcAxOcAgMjnAIDM5wCA0OcAgNTnAIDY5wCA3OcAgO+oBQDg5wCA4TQFAOTnAIDjdAUA6OcAgOznAIDw5wCA9OcAgKMdBgCCLQAAgRUAAIAdAAD45wCAptUGAKU1BgD85wCAq/EGAKrpBgAA6ACAhCgBAK/BBgCu3QYAreEGAKzpBgCoxQYAqdUGAKrVBgCr5QYArP0GAK0VBgCuHQYArxUGAL7sAQAI6ACAhggAAIcgAAAM6ACAEOgAgBToAIAY6ACAuH0GALkFBgC6DQYAuwUGALwBBgC9CQYAvjkGAL85BgCwbQYAsXUGALJ9BgCzdQYAtFkGALVFBgC2TQYAt0UGAKiRAgCpmQIAqqECAKuhAgCs0QIArd0CAK7VAgCvyQIAHOgAgCDoAIAk6ACAvyweACjoAIAs6ACAMOgAgDToAIC4VQMAuV0DALppAwC7ZQMAvGEDAL1hAwC+YQMAv2EDALC5AgCxjQIAsoUCALNtAwC0dQMAtX0DALZ1AwC3bQMAOOgAgDzoAICzIQIAQOgAgLVRAgCEiAMAROgAgLZVAgC05gCAvigcALtBAgC6dQIAvbEDALxZAgC/sQMAvrkDAKNpAgBI6ACATOgAgFDoAIBU6ACAph0CAKUZAgBY6ACAqwkCAKo9AgBc6ACAYOgAgK/5AwCu8QMArfkDAKwRAgCopQIAqbUCAKq9AgCrtQIArK0CAK01AQCuPQEArzUBAL4sHABk6ACAaOgAgGzoAIBw6ACAeOgAgIdoHQCGHB0AuIUBALmNAQC6hQEAu50BALyNAQC9vQEAvrUBAL95AACwUQEAsVEBALJRAQCzUQEAtPEBALXxAQC29QEAt+UBAO/YAACCtQAAgaUAAIClAAB86ACAgOgAgIToAIDvxAYAiOgAgOH0BgCM6ACA4zgBAOPMAACQ6ACA4SgBAJToAICY6ACAtuUBALV1AgCEQBwAs2UCAJzoAICg6ACApOgAgL9lAQC+ZQEAvdUBALzVAQC7xQEAusUBAKjoAICs6ACAo7UdAHToAICw6ACAtOgAgLjoAICmNR4ApaUdALzoAICrFR4AqhUeAMDoAIDE6ACAr7UeAK61HgCtBR4ArAUeAMjoAIDM6ACA0OgAgNToAICADQAAgTUAAII9AADY6ACA3OgAgODoAIC1BQAAcRoAgOG0AgCs2AIAtQUAAHUaAICotR8AqRUfAKodHwCrFR8ArDEfAK09HwCuLR8AryEfAOG0AgCs2AIAtQUAAHkaAIDhtAIArNgCALUFAAB9GgCAuNEAALnZAAC64QAAu+EAALyRAAC9kQAAvpEAAL+RAACwIR8AsTEfALIxHwCzMR8AtAkfALUJHwC28QAAt/EAAOG0AgCs3AIA71QdALUdAACBGgCA4bwCAKzQAgC1KQAAoyUBAKKRAwChFR0AoA0dAOGAHgCFGgCA47wdAOHEAgCz1R4AtQkAAKzYAgCJGgCA4bwCALb9HgC1+R4ArOACALu1HgC6pR4AtQUAAI0aAIC/jR4Avo0eAL2lHgC8pR4AoxUeAOG8AgCs0AIAtREAAI9pJQCmPR4ApTkeAJEaAICrdR4AqmUeAOG0AgCseAEAr00eAK5NHgCtZR4ArGUeAJvdFACa5RUAmQEXAJjhEACfcR8AnnkZAJ35GQCcARsAk+UtAJIRLwCRbSkAkG0pAJf5EQCW8REAlYUsAJSZLQC1JQAA4ZQCAILxJgCDjSoAhJUqAIXhLACGHS4Ah3kuAKy0AgCVGgCAilUvAIspEgCMORIAjRkTAI7xFACPHRYAtQUAAJkaAICSVRcAk5EYAJRxGgCV+RoAlvkcAJd9HgCC4AMAkwsAgJpVHgCb2QAAnHUCAIMMAICzDACAuIkKAKwBBACthQYAroEGAMwQAgDMfAMAtgwAgJ0aAIDCDACAxQwAgMgMAIAACwCAgaUyArwMAIAE6ACAmpUGAJtVIwK8kQYAvbEAAL6RBgC/rQYAuOkGALmVBgC6kQYAoRoAgLTBBgC1zQYAts0GALfdBgCw/QYAseUGALKdAACz5QYAhVTHA6UaAICH/AAAuAEKAK0aAIDpDACAsRoAgIyRcwCNpAEAzPACAL4NAIDBDQCAiRQAALgZCgCLDAAAGg4AgFMOAIC5DACAvwwAgBkKAICRwAEAywwAgLhtCgDODACA1AwAgNoMAIDdDACA4AwAgLUaAIAoDQCA5gwAgLkaAIDhpB4AKw0AgONUHgCvIXMAzCgCAO8MAIDsDACA8gwAgPUMAID4DACAzIACAJS4AwD7DACAkhQCAO9gHgCQAAIA/gwAgAoNAIC48QoADQ0AgJ8LAIAQDQCAiSkLABMNAICpGgCAvDABAL/EAQC+7AEAFg0AgMzsAgC4xQoAukQBAK0JAIAZDQCAygYAgN8GAIDyBgCAHA0AgPoGAIAfDQCACgcAgC0HAIAYBwCA9gcAgC8HAICpDQCAOgcAgK8NAIBKBwCAtXkAAGcHAIC3cSoCcgcAgLFhAAB0BwCAsw0pAo0HAIC96QAAoAcAgPoHAICtBwCAuRkrAsMHAIC7WRQCHwgAgFoJAIA8CACALw4AgFsIAIA5AACAgQgAgHEAAIDHCACAKwAAgCAJAIA9AACAXAkAgEMAAIBeCQCARQgAgGoIAIBJAACAAAgAgFMAAIB5CQCAWQAAgCINAIBfAACAuw0iAtANAIDMFDYCHwAAgL9lAAC+EQAAvW0AAOUHAICAaQEAgXUBAIJxAQCD3SEChGkHAIWBBwCGgQcAh3EBAIihAQCJrQEAirUHAIuNBwCMlQcAjaUBAE8AAICPpQEAkOEBAJHtBwCSsSECk/0HAJSNBwCVUQYAlvEBAJfZAQCY0QEAmXUGAJp9BgCb1QEAnGkGAJ2ZFAKeUQYAn1EGAKB1FAKhuQYAokkBAKOFLQKkIQEApS0BAKZ1FAKntQYAqKERAqlRFAKqlQYAsSEAgMy8NQLNPDUCbQAAgKoDAICsAwCArwMAgL0hAIDEIQCA2yEAgOIhAIDJAACADwAAgLihBgC6BgCAtwYAgMwAAIDOIQCAtQMAgN0FAIAYBgCAugUCALvVAgC46QUAuf0FAL7JAgC/5RcCvA0CAL0BAgCy4QUAs+EFALCNBQCxnQUAtuUFALfpBQC09QUAte0FAKo9BQCrwQUAqD0FAKk1BQCuzQUAr/UFAKzNBQCtxQUAoj0FAKMFBQCg1QIAoTkFAKYdBQCnBQUApB0FAKUVBQC/BgCAm8EFAD4GAIBVBgCAnt0FAJ8xBACcUQIAndUFAHIGAICJBgCApAMAgDAiAIDbAACAoAMAgI8HAIDuBwCA8gcAgJAJAIACCACABggAgJYLAICUCQCArwoAgG8HAICLBwCAlwcAgKIHAICqBwCAqgkAgPsOAIASDwCAHw8AgMwEMwLNsDACzCAzAs3gMALMEDACzGgwAsxYMALNjDACzGgxAs0UMQLM1DECzRQ2AsxwIALN0CcCzDA2AswkMQLMDDwCzWg/AswYPwLNND8CzBg9As3AMgLMRDwCzBg5Asw4MgLNqDICzIgyAs34MwLMfDMCzUAzAswoMwLNCDMCzMghAs0kJgLMrCYCzEA4AsyYJQLNyDoCzBwkAs0QJALMhDsCzag7AsysJQLNvDoCzKw4Asz4JwLM4DgCzXQ4AicPAID2BgCAYQ0AgIgNAIDNICoCzBwrAqoGAIAsIgCAzKQgAs2gJwLMOCYCygQAgMw4OgLNPDsCzBA5As1gPgLMoAMAvj0NAL3tLALWBACAu1UjAgQJAIC5PSICzwYAgNkHAIClBACAoA0AgLIEAIBvBQCA9AYAgL4EAIB1BQCAr70MAK6ZLgKtpQwAwgUAgKvFIgIDBgCAxAQAgCMGAIDQBACAyAUAgCkGAIBdBgCAowEYAqAEAIAaBwCAHQcAgJ9dDACeUQwAnUUMACcHAICbWSECrwcAgLEHAIC0BwCAuAcAgCoHAIDOBwCA0AcAgJMtJgLTBwCAbAgAgG8IAICPBQwAjnEMAI1lDAB5CACAi0UgAmAJAICJNS8CYwkAgGcJAIB8CACAcAkAgHMJAIC9AwCAACIAgIFdDACAYQwAgAABAIEYAACCAAQABCIAgIQQBwCFFAYAhuQIAIc8AgCILAUAiaQFAIoAeAAIIgCAjCQAAAwiAIAUIgCAECIAgLgRAACRxHsAkkh6AJNMeQAcIgCAzOgCAJbwCQC4OQAAkMAJACQiAICS8AkAzPgCAJS0CQC4DQAAKCIAgMwcAgC4BQAANCIAgMzkAgC4HQAAOCIAgDwiAIBDIgCAWiIAgKiMCACp5HsAYSIAgKvUBgDM5AIAuA0AAGsiAIDMlAIAbyIAgLGAewC4CQAAuBUAAMz8AgC15AgAcyIAgMzYAgB3IgCAuAUAALqcBQC7XAUAvAB8AL30fwC++H0Av/xyAIAJOgKBDToCggE6AoMFOgKEGToChR06AoYROgKHFToCiCk6AoktOgKKIToCiyU6Aow5OgKNPToCjjE6Ao81OgLM8AIAkekPAIMiAIDMzAIAuBkAAH8iAIDM3AIAl+UPALg1AAC4DQAAjyIAgMz8AgC4BQAAkyIAgMwwAgCXIgCAzNACAJsiAICfIgCAzIgCAKQtDwClVQ8Apl0PAMyUAgCoqToCqa06ArjVAACjIgCAuDUAAKciAIDMUAMAr7U6AswsAwCrIgCAzBgDALMFDwC0HQ8AzyIAgLYJDwC3CQ8Avmh9ALhtAAC4RQAAzDgDALwpDwDTIgCAviUPAMxYAwCH5Q4AzOg6Ari9AQC4yQEAzPA1As2kMwLMgCICzXwlAs2UNgLMBCkCzew7AsxkOgK45QEAuMEBAInVDgCI1Q4Al7EOALgNAACvIgCAsyIAgLciAIC4GQAAuyIAgNciAICfaTsC2yIAgL8iAIC4PQAAzMQCAMz4AgDDIgCAxyIAgLjZAADLIgCA3yIAgLjRAADjIgCAuPEAAMzMMwLnIgCAuMkAAMzoMwLrIgCAuNUAAKllAAC4yQAAzNgCAKq5BgC3TQ0Atk0NALU1DgC0NQ4AuFUAABUjAICxGQ8AsCkOAL/1AwC+UQ0AvVkNALw1DAC7XQ0Aul0NALldDQC4XQ0AgL0KAIHFCgCCFQQAg8kKAMx8BQCF3QoAhtUKAIfNCgDMVAUAifEKAIq5CACLDQgAjBEIAI0VCACOtScCj+UKAJBpCACRbQgAknEIAJNtJALMEAUAlR0IAJaFCgDMEAUAzDQFAJk9CACaiQoAmw0IAJwRCACdFQgAzEgFAMwQAgCgZQoAoW0KAKJlCgC4BQcApLEEAMzoAgCmsQQAuA0HAKiBBADM/AIAqpkIAKtdCgCsuQgArakEALglBwCvNQgAsNEIALHxBADMwAIAs40IALQpKAK1IQoAtiEKALchCgC4IQsAuSUIALhBBwC7KQsAvA0dAr3dDwC+MQsAvzELAIDdCgAZIwCAnKF9ANADAIDpAwCAhRkJAIaZCQCHlQkAiOEJAIklJQICBACAGwQAgC4EAIBBBACAVAQAgGcEAICQrQoAkUkFAJJtBQCTYQUAlGEFAJVtBQCWZQUAlxEFAJg1BQCZPQUAmjUFAJsNBQCcFQUAnR0FAJ4VBQCfCQUAoKkJAKH9BQCi9QUAowEFAKQFBQClDQUApgUFAKc9BQCoBQUAqQ0FAKoFBQCrGQUArIkJAK2pBQCutQkAr/0JALABCQCxfQUAsnUFALMBBQC0aQkAtQEFALYFBQC3PQUAuAUFALnhJQK6AQUAuwEFALzRJQK9PQkAvnkJAL9dCQCDMAUAoXgHAJ+xfgB6BACApHgHAKVIBwCNBACA8wQAgIt8BADdAACAEwEAgIhIBAAcAQCAIAEAgCQBAIAoAQCALAEAgDABAICyAAcAs/wHADQBAIDhAACAtuQHALfwBwDmAACA6wAAgLrgBwC7nAcAvIgHAL2oBwDwAACAs8F+AKPMBAD1AACA+gAAgIMABAD/AACAhXQEAKUgBAAEAQCAiEwEAAkBAIAOAQCAFwEAgK8tBwCNxAcArSEHAKwpBwDNAwCA8AQAgI8FAICwZQcA4gUAgB0GAIBDBgCAWgYAgHcGAICOBgCA0wMAgOwDAIAFBACAHgQAgDEEAIC8fAQAgt0rAoPlKwKA/QoAgfkrAoaZCQCHmQkAhOEKAIXhCgCKiQkAi4kJAIiJCQCJiQkAjoUJAEQEAICM4QgAjY0JAJK5KwKTQScCkJkrApHFCwCWyQsAl3UnApTFDQCV0SQCmskLAJvZKgKYyQsAmXkHAFcEAIBqBACAnP0LAH0EAICQBACA9gQAgKABAICkAQCAqAEAgONkAgCsAQCAsAEAgLQBAIDvvAcAqBEJALgBAIC8AQCAwAEAgMQBAIDIAQCAzAEAgNABAIDUAQCA2AEAgNwBAIDgAQCA5AEAgOgBAIDsAQCA8AEAgPQBAID4AQCA/AEAgAACAICCnH4ABAIAgKD1VAKh2VQCoulUAqP1dQCk7XUApZ12AKaVdgCnvXYAqIV2AKkpfQCqOX0AqwV9AKwdfQCtBX0Arg19AK8FfQCwfX0AsUl+ALJRfgCzUX4AtHV+ALV9fgC2aX4At2l+ALhZfgC5WX4Auil+ALspfgC8IX4AvSF+AL4ZfgC/GX4AkgcAgDkJAIDXBwCATSIAgLQNAAC1NQAAtj0AAKIGAICsBgCArwYAgAMjAIAJIwCAvSV4ALy1WALGMQCALjoAgJkqAIC9KgCAySoAgNkqAIDhKgCA7SoAgPUqAID9KgCACSsAgF0rAIB1KwCAhSsAgJUrAIClKwCAtSsAgNUrAICAeX8AgYF/AIKBfwCDnX8AhI1/AIWxfwCGsX8Ah7F/AIjhfwCJ4X8AiuF/AIv9fwCM5X8Aje1/AI7lfwCP3X8AkKV/AJGtfwCSpX8Ak71/AJSlfwCVrX8Alm1+AJctfgCYFX4AmRl+AJrpfgCb6X4AnPl+AJ35fgCe6X4An+V+AKAdfgChJX4AoiV+AKM9fgCkJX4ApS1+AKYlfgCnXX4AqGV+AKltfgCqZX4Aq31+AKxlfgCtbX4ArmV+AK9dfgCwJX4AsS1+ALIlfgCzPX4AtCV+ALUpfgC2WXcAt9V1ALj9eQC56XUAuvl1ALvZeQC86XUAvdV1AL7RdQC/2XUAgDF2AIE9dgCCSXYAg0V2AIRBdgCFTXYAhvl0AId9dgCIoQIAiU12AIpZdgCLuXoAjEl2AI2degCOsQIAjx16AJCRVgKRKXYAkoF2AJPNdgCU2XYAlel2AJbJdgCX0VkCmKF2AJllWgKa8XYAm01aApzRdgCdYXoAnoFWAp/VdgCgBQIAoY1aAqI1VwKjCXYApCF2AKUtdgCmiVoCp5laAqi5WgKpdXYAql13ANkrAIDdKwCAESwAgDksAIBJLACAUSwAgFUsAIBhLACAfSwAgIEsAICZLACAnSwAgKUsAIC1LACAUS0AgGUtAIClLQCAuS0AgMEtAIDFLQCA1S0AgJl1CgD4LQCAJC4AgDAuAIBQLgCAXC4AgGAuAIBkLgCAgux6AINkewB8LgCAgC4AgIZ0ewCHvHsArC4AgLguAIDALgCAyC4AgNguAIDnLgCA7y4AgBsvAIAfLwCAJy8AgJJwfAArLwCAMy8AgJFMfAA7LwCASy8AgGcvAIDfLwCA8y8AgKvMfACo5HwAqdx8APcvAIB3MACAezAAgI8wAICiwHwAkzAAgJswAICjMACAzEBJAs0ASQLM/EoCzWhLAqswAIC3MACA7TAAgP0wAIARMQCAjjEAgJoxAICqMQCAsqx8ALNAfAC2MQCAwjEAgMoxAIDOMQCAtGx8ALUEfACAlQcAgZ0HAIKVBwCDqQcAhLkHAIW5BwCG2QcAh9kHAIjpBwCJ6QcAivkHAIv5BwCM6QcAjekHAI7RBwCP0QcAkLEHAJGxBwCSSQEAk0kBAJRZAQCVWQEAlkkBAJdJAQCYeQEAmXkBAJpJAQCbSQEAnFkBAJ1ZAQCeSQEAn0kBAKC5AQChuQEAoskBAKPJAQCk2QEApdkBAKbJAQCnyQEAqPkBAKn5AQCqyQEAq8kBAKzZAQCt2QEArskBAK/JAQCwuQEAsbkBALJJAQCzSQEAtFkBALVZAQC2SQEAt0kBALh5AQC5eQEAukkBALtJAQC8WQEAvVkBAL5JAQC/SQEA0jEAgNYxAIDaMQCAkjIAgNoyAIDmMgCA6jIAgO4yAIDyMgCA+jIAgP4yAIASMwCALjMAgDYzAIB2MwCAejMAgIIzAICGMwCAjjMAgJIzAIC2MwCAujMAgNYzAIDaMwCA3jMAgOIzAID2MwCAGjQAgB40AIAiNACARjQAgIY0AICKNACAqjQAgLo0AIDCNACA4jQAgAY1AIBKNQCAUjUAgGY1AIByNQCAejUAgII1AICGNQCAijUAgKI1AICmNQCAwjUAgMo1AIDSNQCA1jUAgOI1AIDqNQCA7jUAgPI1AID6NQCA/jUAgJ42AICyNgCAnoUMAOY2AIDqNgCA8jYAgIC5AwCBuQMAgskDAIPJAwCE2QMAhdkDAIbJAwCHyQMAiPkDAIn5AwCKyQMAi8kDAIzZAwCN2QMAjs0DAI/FAwCQvQMAkQEMAJJJDgCTSQ4AlFkOAJVZDgCWSQ4Al0kOAJh5DgCZeQ4AmkkOAJtJDgCcWQ4AnVkOAJ5JDgCfSQ4AoLkOAKG5DgCiyQ4Ao8kOAKTZDgCl2Q4ApskOAKfJDgCo+Q4AqfkOAKrJDgCryQ4ArNkOAK3ZDgCuyQ4Ar8kOALC5DgCxuQ4AskkOALNJDgC0WQ4AtVkOALZJDgC3SQ4AuHkOALl5DgC6SQ4Au0kOALxZDgC9WQ4AvkkOAL9JDgC8eQQAvXkEAL6JBAC/nQQAuHUEALl9BAC6aQQAu2kEALRxBAC1cQQAtnEEALdxBACwcQQAsXEEALJxBACzcQQArGkEAK1pBACucQQAr3EEAKhBBACpQQQAqkEEAKtBBACknQUApWEEAKZhBACnYQQAoJ0FAKGFBQCijQUAo4UFAJxdBQCdZQUAnm0FAJ9lBQCYXQUAmUUFAJpNBQCbRQUAlB0FAJVlBQCWbQUAl2UFAJAdBQCRBQUAkg0FAJMFBQCMMQcAjTEHAI4xBwCPMQcAiDEHAIkxBwCKMQcAizEHAIQxBwCFMQcAhjEHAIcxBwCAMQcAgTEHAIIxBwCDMQcAJjcAgC43AIA2NwCAcjcAgHY3AIB+NwCAgjcAgIY3AICyNwCAtjcAgL43AIDSNwCA1jcAgPI3AID6NwCA/jcAgCI4AIBCOACAUjgAgFY4AIBeOACAijgAgI44AICeOACAwjgAgM44AIDeOACA9jgAgP44AIACOQCABjkAgAo5AIAWOQCAGjkAgCI5AIA+OQCAQjkAgEY5AIBeOQCAYjkAgGo5AIB+OQCAgjkAgIY5AICOOQCAkjkAgJY5AICaOQCAnjkAgK45AIDGOQCAyjkAgNY5AIDaOQCA3jkAgOI5AIDqOQCA7jkAgPI5AID+OQCABjoAgA46AIASOgCAGjoAgIC5AQCBuQEAgskBAIPJAQCE2QEAhdkBAIbJAQCHyQEAiPkBAIn5AQCKyQEAi8kBAIzZAQCN2QEAjskBAI/JAQCQuQEAkbkBAJIRAACTEQAAlDEAAJUxAAAeOgCAIjoAgCo6AIAyOgCAPSMAgGUsAIBpLACAJSQAgIJgAgCZ4QAAgIAAAIGYAACC5AYAg4gEAITUGwCFlBoAhhgfALMjAICIxB4AiQAQAIqoEwCLrBEAjAAoAI20KwCOuCoAj7wpAOOwAgC+dAIAnlUAAOMUAgCCbAIAtyMAgJkNAAC+RAIAnjUAAIJoAgCZBQAAuyMAgO/MAgC+oAAAgoQAAO/YAgDj7AEA4/QBAL8jAIDjCAMAwyMAgOM4AwDHIwCA44gDAMsjAIDv4AMAzyMAgO+IAwDvPAEA78QDANMjAIDv1AMA4+wDAB43AIDXIwCA4+wDAOPsAwDj5AMA2yMAgOO4AwDvXAMA70wDAN8jAIDvSAMA7/QDAOMjAIDnIwCA7zQDAON8AwDjlAQA6yMAgO8jAIDzIwCA47QEAPcjAID7IwCA/yMAgO9sBAADJACAByQAgO9YBADvUAQACyQAgBYkAIAaJACAvQAAgOP4BADCAACAMSQAgB4kAIBtKQCA45wEAAglAIBrJQCAriUAgO9QBADaJQCABCYAgO88BAApJgCAgAlLAoYcdwC+RAIAgnQCAL5QAgA+JgCAmREBAJkNAQCPrAIAggQCAI1oAQCewQIAi3wBAJ49AQCeKQEAvggCAJfQAgCZXQEAldACAJ5VAQCT0AIAmXUBAJHQAgC+SAIAn7gCAEYmAICdtAIAnk0BAJuwAgCZXQEAmbQCAL6EAgCeqQEApowCAGImAICkgAIAmakBAGomAIChSAIAgqwCAK/kAgCCtAIAglwCAJnlAQC+CAIAgnwCAIIABACopAIAnvkBAL5wAgC1HAQAnoUBAL6oBQCyhAIAtrECAL6sBQC4KQkAuYkCALqZAgCCjAUAu+gEAIKcBQByJgCAuPAEAJ5ZBgCZbQYAnmEGAJl5BgC+fAIAnmEGAIJcAgC+QAIAmVkGAJ5dBgCCYAIAmaUGAL58AgCevQYAghwCAL4UAgCZzQYAvkwCAIJMAgCa3QYAnt0GAJ/FBgDjDAIAgrwCAJn5BgC+ZAIA7/QCAJrxBgCe6QYAn+kGAJ7ZBgCf1QYA4wQCAJklBgCaIQYAgngCAJk9BgDjBAIAgkQCAJolBgC+cAIA75wCAJ4FBgCfFQYA7+gCAJp1BgCZBQYAggQCAL5wAgDjcAIAnnUGAJ8NBgCeAQYAvnwCAOM0AgCZDQYAvmACAIJsAgDv8AIAmTUGAIKQAwDv2AIAniEGAIQmAICbxQcAmeUHAL58AgCe7QcAn8UHAOPsAwCdUAIAnNEHAIJsAgDv1AIAmc0HAIJ8AgC+cAIAmd0HAJ7dBwC+AAIA42gCAJ6tBwCZuQcA42gCAIJ8AgDjDAIAvkgCAJmpBwCCWAIA78QCAJ6ZBwC+bAIA77gCAIKUAgCejQcA77gCALsAAACZeQcAuQwAAJ5xBwC/AAAAglQCAL0EAAC+aAIAs9QDAJmxBgCxcAMAggQCALc4AACeoQYAtTQAAL5wAgCrWAMAnqEGAO9cAgCZqQYArxADAIJQAgCtFAMAmYUHAJlpBgC+WAIAnmEGAL58AgCCaAIApqACAOOQAgCZaQYA43wBAOOYAQDjrAEA49ABAOPoAQC+dAIAno0FAOMwAgDvzAIAgmgCAJnRBQDvlAIA71QBAO9wAQDvJAEA7ygBAL58AgCevQUA4wwCAIJ4AgCZrQIAvnQCAJ6lAgDjNAIAgmACAJkZAAC+YAIA7/wCAJ4NAACClAIA79QCAJAmAIDj/AIAmQkAAL5gAgCYJgCAnh0AAOMAAgCwJSoAglgCAJkNAADv9AIAvmQCAK4mAIDvwAIAnhkAAIIYAgCCOAIA43ACAJkRAACaNQAAmSkBAL50AgDsJgCAnyUAAJ4JAACZ6QEAvrQDAL7gAwCazQEA79gCAJ4RAQCC2AMA/SYAgIHEAgDjsAMAHycAgOP8AwC+/AIAhMQCAIIoAgCGEAIAKicAgIg8AgCeIQAAnw0AAHonAIDvKAMAj3QCAO8sAwCCiAIAmXUAAJoVAACSxAMAldADAJktAACa0QAAjicAgL7IAgCYaAMAm3wDAILEAwCeQQAAnykAALAnAICChAIA45ACAL4IAwC+JwCABigAgJ8ZAACe7QAA49ACAJlxAACaFQAAvhQCAO8wAgCZIQAA71gCABQoAICv7AMAggQCALFMHACwABwAniUAALJMHACeXQAAn2EAAOO8AgCZIQAA+QAAAHEpAIDvlAIAdSkAgL08HACCgB0Av8EfAHkpAIDjtB0AvnQCAJ71HwDj8B0AmQUAAH0pAIC+fAIAngkAAIJgAgCZDQAAiSkAgL5gAgDvzAIAnh0AAOklAIDv3AIA42gCAPkYAIDjPB0AIRoAgP0YAIABGQCAJRoAgCkaAIAtGgCAMRoAgDUaAIA5GgCA76QCAD0aAIDvJB0AQRoAgLHFAAAFGQCAs8UAALLdAAC1yQAAtMEAALcdAAC2wQAAuWUAALhlAAC7zQAAus0AAL3dAAC83QAAv8UAAL7JAAAJGQCADRkAgE0ZAIBhGQCAERkAgBUZAIDvFHgD7wBIA+HYTQPhOKgC41x5A+O0UAOtGQCAsRkAgLUZAIC5GQCAgMkBAIHVAQCC3QEAg20CAITdAQCFcQIAhgEEAIcdBQCIJQUAiTUFAIo9BQCLbQUAjHUFAI1lBQCObQUAj80BAJC1AQCRvQEAkrUBAJNNAwCUVQMAlV0DAJZVAwCXTQMAmHUDAJl9AwCadQMAm00DAJxVAwCdWQMAnkkDAJ9JAwCguQMAobkDAKLBAwCj3QMApMUDAKXNAwCmxQMAp/0DAKjJAwCpyQMAqtEDAKvRAwCsMQMArTEDAK4xAwCvMQMAsFEDALFRAwCyUQMAs1EDALRxAwC1cQMAtnEDALdxAwC4UQMAuVEDALpRAwC7UQMAvDEDAL0xAwC+MQMAvzEDAL0ZAIDBGQCAxRkAgMkZAIDNGQCA0RkAgNUZAIDZGQCA3RkAgOEZAIDwIAIA5RkAgOkZAIDtGQCA8RkAgPUZAICc9TYAnf02APkZAICRkAIA/RkAgKkZAIBFGQCASRkAgEUaAIC6adgASRoAgE0aAIC4sTYAubE2AFEaAIBVGgCAWRoAgF0aAIBRGQCAYRoAgGUaAIBVGQCAWRkAgF0ZAIBlGQCAaRkAgG0ZAIBxGQCAdRkAgHkZAIB9GQCAgRkAgIUZAICJGQCAjRkAgJEZAICVGQCAglgCAJkZAIBpGgCA8FgCAG0aAICdGQCAoRkAgKUZAIABGgCABRoAgJF0AwDhtDsCCRoAgOPYIgINGgCAERoAgBUaAIAZGgCAHRoAgKUqAIBVLQCAqSoAgMEqAICtKgCAljMAgO/IPwK1KgCA4ZTzAuGY0gLjlPcC4xDGAuGUtgLhkJ0C44SiAuMIhwIZGQCAHRkAgO+4swLvOIsCnSoAgOAtAIDvIJcC7+DgAoLkAgBpLQCACAIAgLrF2QAOAgCAFAIAgBoCAIAgAgCAJgIAgCwCAIAyAgCAOAIAgD4CAIBEAgCASgIAgFACAIDhgHgC8OQGAOMUagKCgAgA4aAPAuEIEwLjhA4C4xgeAlYCAIA0AwCA7zQ7Au8wHwI6AwCAQAMAgO8MEgJGAwCAJRkAgCkZAIBMAwCAUgMAgC0ZAIAxGQCAWAMAgF4DAIB2AwCAggMAgIgDAICOAwCAlAMAgJoDAIB8AwCAZAMAgDUZAIA5GQCAbQMAgFwCAIA9GQCAQRkAgHQCAIBoAgCAvAIAgHoCAICYAgCAYgIAgJICAIBuAgCApAIAgNQCAICAUQYAgV0GAIJVBgCDaQYAhHkGAIV5BgCGaQYAh2kGAIhZBgCJoQcAiqUHAIu9BwCMpQcAja0HAI6lBwDyAgCA7AIAgOACAICSCRQAkxUUAJTxBwCV8QcAlvEHAJfxBwCY0QcAmdEHAJo5FACb0QcAnIEHAJ2BBwCefQcAnx0UAJktAQCYLQEAmz0BAJo9AQCdLQEAnC0BACEZAICeVQEAkd0GAJDRBgCTJQEAkiUBAJUtAQCULQEAlx0BAJYdAQCJ8QYAiOkGAIvxBgCK+QYAjbEGAIzpBgCPqQYAjrkGAIHxBgCA7QYAg/EGAIL5BgCF0QYAhOkGAIfRBgCG2QYAua0DALitAwC7vQMAur0DAL2tAwC8rQMAv90DAL7dAwCxrQMAsK0DALO9AwCyvQMAta0DALStAwC3nQMAtp0DAKm5AQCosQEAq3UBAKqxAQCtFQEArBUBAK/dAwCu3QMAobkBAKCpAQCjiQEAorEBAKWZAQCkkQEAp4kBAKaRAQAuAwCAwgIAgM4CAIDmAgCA2gIAgAQDAICwAgCA+AIAgCIDAIAKAwCAngIAgIACAIC2AgCAyAIAgP4CAICGAgCAKAMAgKoCAIAQAwCAjAIAgBYDAIAcAwCACS0AgOsuAIDKNACAhAcAgAYFAIAVBQCAJAUAgDMFAIBCBQCASwUAgPAsOABUBQCAXQUAgGYFAICSBQCA40huA5sFAIDhTG4DpAUAgO/0AQOnBQCAqgUAgK0FAIBGOgCApkwAgNZVAIA2aACAZnEAgJZ6AID2jACAVp8AgIaoAIDtugCAJMQAgFTNAICE1gCAtN8AgDG7AIA6rgCABqUAgPkqAICJKwCAoSoAgOUqAIBBMQCAATEAgE40AIDVLACABjMAgIo3AIBiNACAHSwAgJI0AICeMwCAEjgAgFkrAICFLACA+jEAgCY5AIAdKwCArSsAgJ4xAIC8LgCAySwAgFksAIA4LgCALC4AgJGgBgDuMwCAGSsAgJ43AIB1LACAzS0AgLAFAIDh1D8D4VgaA+PcLwPjUA4D4RTyA+FA0wPjQOoD40DDA7MFAIC2BQCA73jrA+9c8gO5BQCA5QUAgO9E3gPvmCUD4bSLA+E8lwPjfKID45iLA+EwQQDhUKwD4xx/AOOIRgDoBQCA6wUAgO84ewDv4EEA7gUAgPEFAIDvzIoD7yCHA4DBGACB3RgAgikLAIMpCwCE6Q4AhekOAIYZDwCH8RgAiCUPAIntGgCK5RsAiyEdAIw5HQCN5RsAjmkQAI/VGgCQhRsAkU0PAJJFDwCTXQ8AlEUPAJVNDwCWRQ8Al30PAJhFDwCZTQ8AmkUPAJtpGwCcQQ8AnUEPAJ5BDwCfQQ8AoMEPAKHBDwCiwQ8Ao8EPAKS5CwCluQsApqkLAKfNDwCo9Q8Aqf0PAKr1DwCrzQ8ArNkPAK3ZDwCuyQ8Ar8kPALC5DwCxuQ8AsmkPALNpDwC0YQ8AtWEPALY5DwC3OQ8AuBEPALkRDwC66QEAu+kBALz5AQC9+QEAvukBAL/pAQD0BQCA9wUAgPoFAID9BQCAAAYAgCAGAIDhBACAgAUAgNMFAIAOBgCANAYAgEsGAIBoBgCAfwYAgJYGAIDdAwCA9gMAgA8EAIASBwCAQQgAgD4IAIA/BwCAOSQAgHIkAICjJACAyCQAgLkmAIDEJgCAyCYAgMwmAIDQJgCALygAgG4oAICWKACAmigAgL8oAIDHKACA4ygAgPUoAID5KACA/SgAgLrp0wAVKQCAMCkAgEspAIA9JACASiQAgFckAIBkJACAdiQAgIMkAICVJACApyQAgLckAIDMJACA1iQAgOQkAIDuJACA+yQAgAwlAIAWJQCAbyUAgHYlAIAkJQCAgBkDAIEZAwCCKQMAgykDAIQ5AwCFOQMAhikDAIcpAwCIGQMAiRkDAIppAwCLaQMAjHkDAI15AwCOaQMAj2kDAJAZAwCRGQMAkgEEAJMtAwCUNQMAlVUGAJZdBgCXVQYAmG0GAJl1BgCafQYAm3UGAJxtBgCdNQYAnj0GAJ81BgCgzQYAodUGAKLdBgCj1QYApPkDAKX5AwCm6QMAp+kDAKjZAwCp+QYAqikGAKspBgCsOQYArTkGAK7FAwCvPQMAsEUDALFNAwCyRQMAs10DALRFAwC1TQMAtkUDALd9AwC4SQMAuUkDALpZAwC7fQYAvGUGAL1tBgC+ZQYAgCUAgKkVDwCoAQ8Aq00PAKpNDwCtRQ8ArEUPAK+hDQCuqQ0AoXULAKBhCwCj7QsAoqkLAKXlCwCk5QsApzkPAKZZCAC5oQ0AuJkNALuhDQC6qQ0AvaENALy5DQAxJQCAvqkNALGhDQCw2Q0As6ENALKpDQC1oQ0AtLkNALehDQC2qQ0AOCUAgEglAIBbJQCAsiUAgLwlAICRJQCAoSUAgNAlAICB7Q0AgO0NAIP9DQCC/Q0Ahe0NAITtDQCH2Q0AhiEYAJlNDQCYTQ0Am1ENAJpdDQCdeQ0AnHUNAJ9pDQCecQ0AkYkNAJCBDQCTmQ0AkoENAJWJDQCUgQ0Al30NAJaBDQDgJACAICUAgI0lAIDMJQCA3iUAgAgmAIAtJgCAQiYAgPAlAID6JQCADCYAgBkmAIAxJgCATiYAgFgmAIB2JgCASiYAgGYmAIBuJgCAgCYAgIwmAICUJgCAoyYAgN4mAICcJgCAsiYAgKcmAIC9JgCA1CYAgOImAIABJwCAEScAgBsnAIBPJwCAkicAgOcnAIBPKQCAXSkAgGEpAIBlKQCA8CYAgC4nAIA+JwCASCcAgCMnAIBTJwCAYycAgH4nAIBwJwCAlicAgMInAIDJJwCApicAgNMnAIDdJwCAtCcAgBgoAIAKKACA6ycAgCUoAIDyJwCA/CcAgDMoAIBAKACASigAgFQoAIBeKACAcigAgH8oAICGKACAnigAgKUoAICyKACAyygAgNUoAIDnKACAASkAgA4pAIAZKQCAIykAgDQpAIA7KQCAUykAgMMDAIDmBACAhQUAgNgFAIATBgCAOQYAgFAGAIBtBgCAhAYAgJsGAIDjAwCA/AMAgBUEAIAoBACAOwQAgE4EAIBhBACAdAQAgIcEAICaBACAAAUAgA8FAIAeBQCALQUAgDwFAIBjCACAJAgAgMEGAID8BwCAHQkAgOMoEwAzCQCAKggAgC0IAIAxCACAJAcAgNwuAIDKMACA2S0AgLswAIBFMQCAJwkAgO/sEwAGCQCA3A0AgM8IAICDCACAMQcAgEwHAID8BgCACggAgJQIAIAqCQCACQkAgOANAIDsDQCA2wgAgJkIAIAVBwCAhggAgFUHAID/BgCApgcAgJEkAIDwDQCA4ggAgCcIAICcCACAWAgAgBUJAID0DQCA5QgAgBQIAICfCACA6AgAgBcIAIDJCACAoggAgOwIAIAbCACAzAgAgKYIAID3CACA/QgAgIgHAICKCACAWQcAgAMHAIA9CQCAQQkAgEkJAIA2CQCAGAkAgPgNAID0CACALQkAgAwJAIDkDQCA0ggAgI4IAIBdBwCAMAkAgA8JAIDoDQCA1QgAgJEIAIBgBwCArQgAgGMHAIDjSBIA4xQSAOP4EwDjuBMA4+wSAOOgEgDjbBIA43gSAO/ADQDv2A0A73QSAO9QEgDvqBIA79wSAO8oEwDvIBMA6QcAgMwGAIAOCACAEQgAgNgGAIDUBgCAIQgAgAcHAIBnCACADAcAgHYIAIA0BwCANwcAgKoIAIC2CACAuQgAgOPYEADjoBAA46AQAON0EQDjNBAA4wgQAOPkEADj9BAA77wQAO/gEADvzBAA7zgQAO8QEADvcBAA73AQAO9MEADjhBMA4+gTAOMwEADjEBAA42ATAONAEwDjpBMA47QTAO/IEwDvtBMA75gTAO98EwDvXBMA70wTAO8UEwDv6BAAgO08AIH1PACC/TwAg/U8AITtPACFFT0Ahh09AIcVPQCILT0AiTU9AIo9PQCLNT0AjC09AI0VPQCOHT0AjxU9AJBtPQCRdT0Akn09AJN1PQCUbT0AlRU9AJYdPQCXFT0AmC09AJk1PQCaPT0AmzU9AJwtPQCdFT0Anh09AJ8VPQCg7T0AofU9AKL9PQCj9T0ApO09AKUVPQCmHT0ApxU9AKgtPQCpNT0Aqj09AKs1PQCsLT0ArRU9AK4dPQCvFT0AsG09ALF1PQCyfT0As3U9ALRtPQC1FT0AthE9ALcRPQC4MT0AuTE9ALoxPQC7MT0AvBE9AL0RPQC+ET0AvxE9AIDxPACB/TwAgvU8AIMNPwCEFT8AhR0/AIYVPwCHDT8AiDU/AIk9PwCKNT8Aiw0/AIwVPwCNHT8AjhU/AI8NPwCQdT8AkX0/AJJ1PwCTDT8AlBU/AJUZPwCWCT8Alwk/AJg5PwCZOT8Amgk/AJsJPwCcGT8AnRk/AJ4JPwCfCT8AoPk/AKH5PwCiCT8Aowk/AKQZPwClGT8Apgk/AKcJPwCoOT8AqTk/AKoJPwCrCT8ArBk/AK0ZPwCuCT8Arwk/ALB5PwCxeT8Asgk/ALMJPwC0GT8AtRk/ALYJPwC3CT8AuDk/ALk5PwC6CT8Auwk/ALwZPwC9GT8Avgk/AL8JPwCA+TwAgfk8AIJJPQCDST0AhFk9AIVZPQCGST0Ah0k9AIh5PQCJeT0Aikk9AItJPQCMWT0AjVk9AI5JPQCPST0AkDk9AJE5PQCSAQQAk00GAJRVBgCVXQYAllUGAJdNBgCYdQYAmX0GAJp1BgCbTQYAnFUGAJ1dBgCeVQYAn00GAKC1BgChvQYAorUGAKPNBgCk1QYApd0GAKbVBgCnzQYAqPUGAKn9BgCq9QYAq80GAKzVBgCt3QYArtUGAK/NBgCwtQYAsb0GALK1BgCzTQYAtFUGALVdBgC2VQYAt00GALh1BgC5fQYAunUGALtNBgC8VQYAvV0GAL5VBgC/TQYArH0/AK2lPwCurT8Ar6U/AKh9PwCpZT8Aqm0/AKtlPwCkHT8ApUU/AKZNPwCnRT8AoB0/AKEFPwCiDT8AowU/ALydPwC9pT8Avq0/AL+lPwC4nT8AuYU/ALqNPwC7hT8AtN0/ALWlPwC2rT8At6U/ALDdPwCxxT8Ass0/ALPFPwCMZToAjW06AI5lOgCPfToAiEU6AIlNOgCKRToAi306AIRlOgCFbToAhmU6AId9OgCABToAgQ06AIIFOgCDfToAnF04AJ3lPwCe7T8An+U/AJhdOACZRTgAmk04AJtFOACUuTgAlWU4AJZtOACXZTgAkAU6AJENOgCSBToAkwE5AMAIAIDYCACA3ggAgPAIAIB2BwCAIgkAgHkHAICBBwCAVAkAgJ0HAIDLBwCAvQcAgMQGAIDcBACAewUAgM4FAIAJBgCALwYAgEYGAIBjBgCAegYAgJEGAIDXAwCA8AMAgAkEAIAiBACANQQAgEgEAIBbBACAbgQAgIEEAICUBACA+gQAgAkFAIAYBQCAJwUAgDYFAIBFBQCATgUAgFcFAIBgBQCAaQUAgJUFAICeBQCAXQgAgFYOAIBZDgCAOjoAgKwKAIAVCwCANjoAgD46AICcGQAAnRkAAJ45AACfOQAA4wwAgEI6AIB6NwCA8TAAgKI3AIBaMgCAxSoAgLksAICaMDUA7C0AgB0tAIDoLQCA1y8AgJ+ENQDSMwCAnUQpAGI1AICaNgCA1jYAgAo3AIAeOACAdjEAgAIyAICuMgCARjMAgGI2AIBGOACAcjkAgOkqAICNLACAijEAgNIyAICWNgCAwjkAgJQuAIB6MgCAhjYAgBo3AIALMACAvjUAgLSAGgC1hBkAtojmALeM5ACwABwAsZQeALIAGACznBsAvADsAL2k7wC+qO4Av6TtALgA4AC5tOMAurjiALu84QCkwAAApQAMAKbIDgCnAAgA4jYAgAcvAIAFMQCArXwDAKwAEACt5BMArugSAK9gEQCo8AoAqRwJAKr4FgCr/BQAGjIAgB4zAIAqOACAKSsAgMErAIAtLACAczAAgIIxAIDOMgCA8jMAgI42AICmNgCAyjcAgO44AICiOQCAvjkAgC40AIBuNACAvAgAgCY1AIBGNgCAejgAgE43AIChLQCAIy8AgN40AICeNQCAAjMAgDY0AICaNwCA5jgAgJ0tAIBwLgCAejEAgC4yAIBiMgCAFjUAgD41AICmOACAKSwAgJwAAACqNQCAzSsAgMkrAICaNACAKjUAgF42AICuOACAajcAgA8wAIBaNwCA0SoAgEQuAIB7LwCAMjMAgLIzAIBNLACAPjQAgDkrAIBfLwCAsSoAgO4xAICLMACAEjUAgIDpAwCB6QMAgjkvAIP9AwCE5QMAhe0DAIblAwCHfS4AiEEuAIkhAgCKeS8AiyUCAIw9AgCNJQIAjiECAI8dAgCQZQIAkW0CAJJlAgCTfQIAlGUCAJVtAgCWZQIAlx0CAJglAgCZLQIAmiUCAJs9AgCcJQIAnS0CAJ4lAgCfHQIAoOUCAKHtAgCi5QIAo/0CAKTlAgCl7QIApuUCAKdNAgCodQIAqX0CAKqpAQCrqQEArLkBAK25AQCuqQEAr6kBALDZAQCx2QEAsukBALPpAQC0eSIAtf0BALb1AQC37QEAuNUBALndAQC61QEAu60BALy1AQC9uQEAvqkBAL+pAQChLACAjS0AgP4zAIBmNgCAPjcAgLoxAIDmMQCAHzAAgB42AIA/MACArjMAgAUrAICBKwCAxSsAgFYxAID+NACA9jUAgEo3AIBaOACANSwAgOksAIAXLwCApzAAgH4yAIBCNACAljgAgHo5AIDOOQCA5jkAgOkwAICmMQCA7jcAgOMuAIC/LwCA2y8AgGswAIBuMgCAujIAgGozAICONACAMjUAgJY1AIDeNwCAbjYAgAY4AIB+OACA6SsAgBUsAID9LACAqjIAgPY2AIADLwCAcy8AgDcwAICyMQCA2jQAgCYzAIAVKwCAWS0AgKguAIB/LwCAQjMAgF4zAIBuNQCAgFEBAIEBKgCCXQEAg1UBAIRNAQCFdQEAhn0BAId1AQCITQEAiVUBAIqdKwCLWQEAjEkBAI1JAQCOuQEAj7kBAJDJAQCRyQEAktkBAJPZAQCUyQEAlckBAJb5AQCX+QEAmMkBAJnJAQCa2QEAm9kBAJzJAQCdyQEAnrkBAJ+5AQCgSQEAoZUBAKJFAQCjXQEApEUBAKVNAQCmRQEAp30BAKhFAQCpTQEAqnkPAKtBAQCsQQEArUEBAK5BAQCvQQEAsMEDALHBAwCywQMAs8EDALTBAwC1wQMAtsEDALfBAwC4wQMAucEDALrBAwC7wQMAvMEDAL3BAwC+wQMAv8kMAI41AIBiOACA4jgAgPI4AIAuOQCALSsAgII0AIBOOACAyjgAgJcvAIDxKgCAUSsAgEguAIBoLgCAlzAAgMYyAIDOMwCAejYAgBo4AIDZMACAojgAgA0sAIAlMQCAMTEAgBIyAIBKMgCATjMAgKozAIAqNACADjUAgDo5AIDrLwCAsjgAgEErAICMLgCAMjIAgOI3AIBPLwCAny8AgDkxAIC6OACA8SsAgNksAIB4LgCAwjAAgBUxAIBiMQCA9jEAgEozAIC+MwCAWjUAgPo2AIAGNwCA1jgAgF0sAIBOMgCA3SwAgMoyAIBuMwCAijYAgL44AICqOQCA0jkAgC0xAICxOSMAsBEDALMVAwCyFQMAtTUDALQ1AwC3NQMAtjUDALkVAwC4FQMAuxUDALoVAwC9dQMAvHUDAL91AwC+dQMAoZkNAKCRDQCjqQ0AopENAKW5DQCksQ0Ap6kNAKaxDQCpmQ0AqJENAKtpAwCqkQ0ArXkDAKxxAwCvaQMArnEDAJEZDQCQEQ0Aky0NAJIRDQCVPQ0AlD0NAJctDQCWLQ0AmR0NAJgdDQCbbQ0Amm0NAJ15DQCcgQ4An2kNAJ5xDQCBmQ0AgAkjAIOpDQCCkQ0AhbkNAISxDQCHqQ0AhrENAImZDQCIkQ0Ai2kNAIqRDQCNeQ0AjHENAI9pDQCOcQ0AKjIAgMY1AIDGNACA6jQAgBozAICiMgCAZjcAgA0rAIAuNgCA9SsAgOUrAIDzLgCAEzAAgPY0AIA0LgCABjIAgOUwAIDqNwCAqjgAgA8vAIBhKwCANS0AgIktAIDVMACA0SsAgCIzAIDmMwCASjQAgGY0AIBqNACAfjQAgPo4AIDuNACAkjYAgFY3AIAKOACANjgAgE45AIBSOQCAVjkAgLo5AIAuOACAxjgAgDErAIBVKwCAaSsAgCUsAIAxLACAcSwAgCUtAIBBLQCASS0AgIUtAICRLQCAdC4AgIsvAICzLwCAuy8AgJH4EADTLwCAfzAAgK8wAIDdMACAWjEAgIApAQCBKQEAgjkBAIM5AQCEKQEAhSkBAIZZAQCHWQEAiNkoAIltAQCKKSUAi2EBAIxhAQCNYQEAHjIAgDoyAICQGQEAajIAgJIVAQC+MgCA3jIAgJU1AQCWPQEAlzUBAJgNAQCZFQEAmh0BAJsVAQCcDQEAnfUBAJ7dKABSMwCAoAUBADI0AICiAQEAVjQAgFI0AIClGQEApgkBAFo0AIBeNACAdjQAgKo9AQCrNQEArC0BAK0VAQCuHQEArxUBALBtAQCxdQEAsn0BALN1AQC0bQEAtRUBALYdAQC3FQEAuC0BALk1AQC6PQEAuzUBALzZLgC9KQEAvhkBAL8ZAQC6eR4Au3keALjNAgC5eR4AvpUeAL+dHgC8QQIAvZ0eALJ9HgCzRR4AsH0eALF1HgC2XR4At0UeALRdHgC1VR4AqgUeAKsNHgCodR4AqQ0eAHo0AICeNACArBUeAK0NHgCiSR4Ao0keAKBJHgChSR4ApkkeAKf5AgCkSR4ApUkeAJqNHgCblR4AmI0eAJmFHgCeiR4An4keAJyNHgCdhR4AkgUDAJP1AACQCQMAkY05AJaxHgCXFQYAlO0AAJUBHACKvQMAi0EDAIiFAwCJnQMAjkEDAI9JAwCMyTkAjVEDAIIVAgCDHQIAgAUCAIEdAgCGzQMAh7EDAIQFAgCFxQMAs/kFALLxBQCx+QUAsOEFALeZKgC2EQMAtRkDALThBQC7NQMAujUDALklAwC4JQMAvxUDAL4VAwC9JQMAvCUDAKP9BQCi/QUAof0FAKD9BQCnnQUApp0FAKWdBQCknQUAq7kFAKqxBQCpJScAqL0FAK+ZBQCukQUArZkFAKyhBQCTAQUAkvkFAJF1OQCQ9QUAlwEFAJYZBQCVEQUAlBkFAJt5CQCaOQUAmTEFAJg5BQCfHQUAnh0FAJ0dBQCcHQUAg4kFAIKBBQCBiQUAgPEFAIeFBQCGhQUAhZUFAISBJgCLhQUAioUFAIm1BQCItQUAj4UFAI6FBQCNlQUAjJUFAM40AIA6NQCAQjUAgFY1AIB+NQCAzjUAgAI2AIBqNgCAEjcAgCo3AIBeNwCAYjcAgKY3AICqNwCAAjgAgNo4AIAeOQCANjkAgIMvAICQ6gCA5jUAgLkqAIC9KwCAfSsAgCUrAIBlKwCAkSsAgCEsAIA9LACAES0AgCEtAIA9LQCAmS0AgOQtAIDwLQCADC4AgBwuAIALLwCAEy8AgEMvAIBjLwCAky8AgKsvAICbLwCAry8AgO8vAIBHMACAUzAAgFswAICDMACACTEAgB0xAIBeMgCAVjIAgIYyAIAWNACA4jIAgBYzAIBiMwCAfjMAgKIzAIDGMwCAyjMAgOozAICAjQEAgZUBAIKdAQCDlQEAhI0BAIW1AQCGvQEAh7UBAIiNAQCJwR0AipkBAIvBHQCMhQEAjY0BAI6FAQCP/QEAkIUBAJEZHQCSkRQAk4UBAJSdAQCViTIAlk0ZAJc9GwCYsQEAmbEBAJotHACbtQEAnD0cAJ2pAQCemQEAn5kBAKDlHQChbQEAomUBAKN9AQCkZQEApW0BAKbxHQCnYQEAqKEDAKmhAwCqoQMAq6EDAKyhAwCttQEArq0DAK+lAwCwYRkAsdkDALLZAQCz7QMAtPUDALX9AwC29QMAt+0DALjFAQC50QMAumEdALvVAwC82QEAvT0XAL7FAwC/0QEA+jMAgA40AIAKNACAOjQAgLY0AIDmNACAHjUAgE41AIAyNgCAWjYAgM42AIAWNwCAIjcAgEI3AIBGNwCAUjcAgG43AIDmNwCAFjgAgEo4AIBqOACAtjgAgA45AIAqOQCAijkAgCfqAIAi6gCAVOoAgOEpAIAJKgCADSoAgNbqAIAD6wCAe+sAgBY6AIAmOgCARwgAgFIIAIBVCACASggAgE4IAIBXCQCA8Q4AgOIOAIDnDgCA9g4AgOwOAICyNACASw8AgMoPAICBDwCALw8AgFoPAIBnDwCAbw8AgJ0PAIDCDwCAuA8AgL0PAICqDwCAsQ8AgP4OAIADDwCACA8AgIBBAQCBMQMAgk0BAINFAQCEXQEAhUUBAIZNAQCHIQMAiF0fAIl9AQCKaQMAi3EBAIx1AwCNVQEAjlk6AI9ZAQCQKQEAkSkBAJI5AQCTOQEAlCkBAJUpAQCW2QEAl9kBAJjpAQCZ6QEAFQ8AgCIPAIAqDwCAMg8AgDwPAIBBDwCARg8AgFAPAIBVDwCAXQ8AgGoPAIByDwCAdw8AgHwPAICEDwCAiQ8AgJMPAICYDwCAoA8AgKUPAIDFDwCANw8AgBoPAIBiDwCAjg8AgA0PAIDdFgCA5hYAgOkWAIDvFgCA4xYAgOwWAIDgFgCAExcAgBYXAID1FgCA8hYAgPgWAICAmQcAgZkHAPsWAICDrQcAhLUHAAQXAICGsQcAh7EHAIiRBwCJkQcAipEHAIuRBwCM8QcAjfEHAI7xBwCP8QcAkJEHAJGVBwCSnQcAk5kHAJSFBwCVgQcAloEHAJeFBwCYuQcAmb0HAJq1BwCbsQcAnK0HAJ2pBwCemQcAn50HAKBhBwChZQcAom0HAKNpBwCkdQcApXEHAKZxBwCndQcAqEkHAKlNBwCqRQcAq0EHAKxdBwCtWQcArkkHAK9NBwCwMQcAsTUHALI9BwCzOQcAtCUHALUhBwC2IQcAtyUHALgZBwC5HQcAuhUHALsRBwC8DQcAvQkHAL7xAAC/9QAAgAkBAIENAQCCHQEAgxkBAITZAACF3QAAhtUAAIfRAACI8QAAifUAAIr9AACL+QAAjOkAAI3tAACO5QAAj+EAAJCdAACRmQAAkq0AAJOpAACUtQAAlbEAAJaxAACXtQAAmIkAAJmNAACahQAAm4EAAJydAACdmQAAnokAAJ+NAACgdQAAoXEAAKJ9AACjeQAApGlQAqVtUAKmYQAAp2UAAKhZAACpXQAAqlUAAKtRAACsTQAArUkAAK49AwCvOQMAsClQArEtUAIBFwCABxcAgP4WAIANFwCAChcAgBkXAIDZXFICHxcAgCUXAIAiFwCAKBcAgCsXAIA0FwCALhcAgKOhAACipQAAoZEAAKCVAACntQAAprEAAKW9AACkuQAAq40AAKqJAACpgQAAqIUAAK+FAACugQAArYkAAKyNAACz/QAAsvkAALHxAACw9QAAt5kAALadAAC1nQAAtJkAALutAAC6qQAAuaUAALilAAC/ZQEAvmEBAL1tAQC8aQEAHBcAgFcXAIBAFwCAPRcAgEgXAIBOFwCAOhcAgNksUQJLFwCAVBcAgHkWAIDhDwCAMRAAgA4QAIAiEACAHRAAgJNBAAAnEACALBAAgBMQAICXWQAAllUAAJVZAACUXQAAm3EAAJppAACZZQAAmGUAAJ9lAACeYQAAnTFTApxtAAC4gQQAuYEEALqBBAC7gQQAvIEEAFEXAIC+jQQA5g8AgLDdBQCxTQQAskUEALNdBAC0RQQAtU0EALZFBADrDwCAqKEFAKntQQCqrQUAq6UFAKy9BQCtpQUArq0FAK+lBQCgqQUAoZFBAKKpQACjoQUApKEFAKWhBQCmoQUAp6EFAP8PAIAYEACAWBAAgF0QAIBpEACAnVUFAH8QAICfWQUAjhAAgJMQAICeEACAkwUFAJQdBQCVBQUAlg0FAJcFBQC4EACAyxAAgO8QAIAhEQCAJhEAgC4RAIA9EQCATBEAgIBxBQCBcQUAgnEFAINxBQCEUQUAhVEFAIZdBQBREQCAWREAgHwRAICjEQCArxEAgM8RAIDUEQCA2REAgBMSAIAmEgCAMhIAgEoSAIDEEgCAGhMAgDMTAIA4EwCASxMAgFwTAIBuEwCAcxMAgJoTAICiEwCAtxMAgN4TAIDjEwCAPRQAgEIUAIBHFACAUxQAgF8UAIBkFACAbBQAgHgUAICSFACAlxQAgJ8UAICkFACAqRQAgK4UAICzFACAuBQAgMsUAIDQFACA7BQAgAYVAIAgFQCALBUAgEQVAIBJFQCAVhUAgHcVAICaFQCAtBUAgMAVAIDFFQCAzRUAgO4VAIAIFgCAFxYAgDQWAIA5FgCAQRYAgEYWAIBZFgCAXhYAgICtAQCBtQEAgr0BAIO1AQCErQEAhdUBAIbdAQCH1QEAiO0BAIn1AQCK/QEAi/UBAIztAQCN1QEAjt0BAI/VAQCQrQEAkbUBAJK9AQCTtQEAlK0BAJVVAwCWXQMAl1UDAJhtAwCZdQMAmn0DAJt1AwCcbQMAnVUDAJ5dAwCfVQMAoK0DAKG1AwCivQMAo7UDAKStAwCl1QMAphkOAKfZAwCobQ8AqSEOAKrhAwCr4QMArCkOAK3lAwCuGQ4ArxkOALCVAwCxnQMAsgEOALORAwC0HQ4AtQUOALa5AwC3uQMAuDkOALmNAwC6NQ4AuxEOALyBAQC9gQEAvnkBAL95AQCEFgCAkBYAgJwWAICrFgCAyBYAgM0WAIDuEQCA/xEAgHwWAICBAACAiwAAgJUAAICfAACAqQAAgLMAAID1DwCA+g8AgAQQAIB1EACAehAAgIQQAIDlEACA6hAAgBcRAIAzEQCAOBEAgEIRAIBRFQCADRYAgBIWAIAqFgCAoRYAgKYWAIC+FgCA8A8AgAkQAICJEACAHBEAgNcSAIA/FQCALxYAgGMWAIDDFgCARxEAgGQSAICfEgCAshIAgBEUAIAdFACAKRQAgI0TAICSEwCA0RMAgNYTAID9EwCAAhQAgGkSAIBuEgCAtxIAgLwSAIDCEQCAxxEAgJYRAICbEQCApD0DAKVFAwCmTQMAp0UDAKA9AwChJQMAoi0DAKMlAwCsfQMArUUDAK5NAwCvRQMAqH0DAKllAwCqbQMAq2UDALQ9AwC1xQMAts0DALfFAwCwPQMAsSUDALItAwCzJQMAvP0DAL3FAwC+zQMAv8UDALj9AwC55QMAuu0DALvlAwCEBQwAhQ0MAIYFDACHHQwAgI0MAIGpDACCGQwAg1ENAIxhDACNYQwAjmEMAI9hDACIKQwAiRUMAIodDACLFQwAlD0MAJXFAwCWzQMAl8UDAJABDACRAQwAkgEMAJMBDACc/QMAncUDAJ7NAwCfxQMAmP0DAJnlAwCa7QMAm+UDAIBpBACBaQQAgnEEAINxBACEnQQAhYUEAIaNBACHhQQAiL0EAImNBACKhQQAi50EAIyFBACNqQYAjvkEAI/5BACQiQQAkYkEAJKRBACTkQQAlLEEAJWxBACW+QYAl60EAJiVBACZwQYAmmkGAJtpBgCceQYAnXkGAJ7RBgCf/QsAoA0GAKEdCwCiGQYAo0ULAKQFBgClTQsApjUGAKe1BACoEQYAqREGAKoRBgCrNQQArC0EAK0BBACuXQQArx0GALDNBgCxbQYAsnUGALMNBgC0FQYAtR0GALYVBgC3DQYAuDUGALk9BgC6NQYAuw0GALwVBgC9HQYAvhUGAL8NBgCA9QcAgf0HAIL1BwCD9QAAhO0AAIURAwCGEQMAhxEDAIgxAwCJMQMAijEDAIsxAwCMhQcAjRUDAI4dAwCPFQMAkG0DAJGNBwCShQcAk50HAJSFBwCVjQcAloUHAJe9BwCYhQcAmY0HAJqFBwCbnQcAnIUHAJ2NBwCehQcAn4UAAKB9AAChgQMAooEDAKOBAwCkgQMApYEDAKaBAwCngQMAqBUHAKmFAwCqjQMAq4UDAKydAwCtoQMArqEDAK+hAwCwdQcAsXUHALJxBwCzhQUAtM0FALX1BQC2/QUAt8kDALj5AwC5+QMAuqEFALuhBQC8wQMAvcUDAN4RAIDjEQCAhJz7ACYTAIArEwCAYRMAgGYTAIB2EgCAghIAgJUSAICaEgCARRIAgNwSAIBXEwCASxAAgKMQAIC9EACAxBAAgJB1AACRfQAAknEAAJNxAACUAfwAlVX+AJZd/gCXVf4AmG3+AJlp/gCaef4Am3n+AJxp/gCdaf4Anln+AJ9Z/gCgpf4Aoa3+AKKl/gCjof4ApKH+AKWl/gCmrf4Ap6X+AKiZ/gCpmf4Aqun+AKvt/gCs9f4ArfH+AK7x/gCv8f4AsI3+ALGV/gCymf4As5n+ALSJ/gC1if4Atrn+ALe9/gC4hf4AuY3+ALqF/gC7nf4AvIX+AL2B/gC+gf4Av4H+AKbZCACnBQcApMEIAKWZBQCi0QgAo9EIAKCJBQChtQgArgEHAK8BBwCsMQcArTEHAKo9BwCrJQcAqD0HAKk1BwC2fQcAtwUHALR9BwC1dQcAsskFALNlBwCwcQcAsXEHAL4BBwC/AQcAvDEHAL0xBwC6IQcAuyEHALg9BwC5MQcAhjkHAIc5BwCELQcAhTkHAIINBwCDNQcAgBEHAIEFBwCOSQcAj0kHAIxNBwCN1QUAisEFAIvBBQCI1QUAiXEHAJbVBQCX2QgAlE0FAJXdBQCSUQUAk9kFAJD5BQCRoQUAnnEIAJ99CACcYQgAnWEIAJpxCACbeQUAmMUIAJl1BQD0EACA+xAAgAIRAICBEQCAuxEAgLQRAIArEgCAGBIAgB8SAIBWEgCATxIAgF0SAIDJEgCAHxMAgIcSAIB7EgCApBIAgKsSAIA9EwCAUBMAgHgTAIB/EwCAhhMAgKcTAIC8EwCAwxMAgOgTAID2EwCA7xMAgEwUAIB9FACAhBQAgAsVAIAZFQCAEhUAgPEUAIAlFQCAMRUAgHwVAICDFQCAkxUAgFsVAIBpFQCAnxUAgKYVAIBiFQCASxYAgFIWAIDzFQCA+hUAgNkVAIDgFQCAIxYAgBwWAICwFgCAbhAAgLEQAICqEACA3hAAgNcQAIAQEQCACREAgI8RAIBeEQCAgIEBAIGBAQCCgQEAg4EBAISdAQCFhQEAhokBAIeJAQCItQEAib0BAIq1AQCLjQEAjJUBAI2dAQCOlQEAj40BAIgRAIA3EgCAkv0BAJP1AQCU7QEAlZUBAJadAQCXlQEAmKkBAJmpAQCauQEAm7kBAJypAQCdrQEAnqUBAJ+dAQCgZQEAoW0BAKJlAQCjfQEApGUBAKVtAQCmZQEAp90AAKjlAACppQMAqq0DAKulAwCsvQMAraUDAK6tAwCvpQMAsN0DALHlAwCy7QMAs+UDALSpAQC1VQEAtvUDALftAwC41QMAud0DALrVAwC7rQMAvM0DAL3BAwC+vQMAv7UDANASAICOEgCARBMAgP8UAIA4FQCAlRYAgIkWAIC3FgCAuRUAgIsUAIABFgCAyhMAgMQUAIDSFQCArRUAgPgUAIC9FACAZREAgKgRAIBwFQCA0BAAgFgUAIBiEACAPhIAgOcVAIATEwCAcRQAgEIQAIA5EACAihUAgOESAID2EQCArhMAgGsWAIDqEgCA8RIAgGwRAIAEEgCApgMAgA0jAIARIwCAoAYAgMcAAIC1BgCAqyMAgK8jAIC5IQCAtSEAgOMHAIB7CQCAfwkAgEEjAICnIwCANSMAgDkjAIAdIwCAISMAgCUjAIApIwCALSMAgDEjAIDbBwCA3wcAgNEAAICATQEAgVEBAIJRAQCDTQEAhE0DAIUhAwCGRQEAh30BANcAAICiAwCAqAMAgN0HAIDTAACA1QAAgL0GAIB5AACABxQAgH0AAICHAACAkQAAgAwUAICbAACAGBQAgKUAAIAkFACArwAAgDAUAIC5AACANRQAgM8PAIBVEACAmBAAgJsQAIArEQCAVhEAgKARAIDMEQCA6BEAgOsRAIDzEQCADRIAgBASAIBzEgCAwRIAgDATAIBrEwCAlxMAgJ8TAICwpQEAsa0BALKlAQCzvQEAtKUBALWtAQC2pQEAt10BALhlAQC5bQEAumUBALt9AQC8ZQEA2xMAgDoUAIBpFACAgAW5AIHhBgCC4QYAg+EGAIThBgCoBgCAswYAgIfpBgCI2QYAifmxAIr1sQCL8bEAjO2xAI31BgCO+QYAj/0GAJDZBgCR2QYAkvWxAJwUAICUiZIClfEGAJb1BgCX9QYAmNkGAJnVsgCa3bIAm6kGAJy5BgCduQYAnqkGAJ+BBgCgoQcAoaEHAKIhsgCjpQcApIUAAKWNAACmQbMA1RQAgKiNBwCplQcAqp0HAKuVBwBOFQCAyhUAgDYQAIA+FgCAsP0HALGFBwCyjQcAaBYAgLSZBwCBFgCAtpUHALeNBwC4tQcAub0HALq1BwC7jQcAvJUHAL2dBwC+lQcAv40HAIB1BgCBlaACgpmgAoOZoAKEhaAChb2gAoaxoAKHhaACiLmgAomRoAKKnaACi5mgAoyFoAKNjQEAjoEBAI9FBgCQOQYAkT0GAJIxBgCTMQYAlC0GAJXVBgCW2QYAl90GAJjhBgCZ4QYAmu0GAJvpBgCc9QYAnf0GAJ7xBgCf9QYAoAkGAKEJBgCiBQYAowEGAKQdBgClBQYApgkGAKcNBgCoMQYAqTEGAKo9BgCrNQYArCkGAK0pBgCuJQYArx0GALBhBgCxYQYAsm0GALNpBgC0dQYAtX0GALZxBgC3dQYAuEkGALlJBgC6RQYAu0EGALxdBgC9RQYAvkkGAL9NBgCAsQUAgbEFAIK9BQCDuQUAhKUFAIWtBQCGoQUAh6UFAIiZBQCJmQUAipUFAIuRBQCMjQUAjcEFAI7NBQCPyQUAkLUFAJG9BQCSsQUAk7UFAJSpBQCVqQUAlqUFAJehBQCYnQUAmSkCAJolAgCbIQIAnD0CAJ3pAgCe5QIAn+ECAKAdAgChNQIAojkCAKM9AgCkIQIApSECAKYtAgCnKQIAqBUCAKkZAgCqFQIAqxECAKwNAgCteQIArnUCAK8V8ACwafAAsRECALIdAgCzGQIAtAUCALUhAAC2LQAAtyUAALgZAAC54QEAuu0BALvlAQC8+QEA2BQAgN0UAIC/9YYCp2kNAOIUAIDnFACAzwAAgNkAAICzAwCA4QcAgH0JAID7IgCAzNSFAszghQL/IgCAgSkAgDUkAIBuJACAjSQAgLyZBQC9mQUAvqkFAL+ZvAC4mQUAuZkFALqJBQC7iQUAtKEFALXVsQC23bEAt6kFALCxsgCxzQUAssUFALO9BQCfJACAxCQAgMMoAIDfKACA8SgAgIgmAICFKQCAaSkAgCkkAIAtJACA2WSgAoEJAIDZUKAChAkAgI0JAICKCQCAhwkAgOwhAIDvIgCA9CEAgJhlBQCZEbIA/CEAgNkwoAKUOZEClU0FAJZFBQCXXQUAkGkFAJFpBQCSWQUAk1kFAID9vACB1ZwCgmW8AIPFvACEkbwAhZ28AIalvACHjbwAiK2TAonlvACKKZACi7W8AIwRkAKNlbwAji2wAI/FnAKQ6bwAkcHIAJJBkAKT8Z0ClNW8AJXlvACW4bwAl02QAphlkAKZfZACmrm8AJupCgCcbQ8Anb0KAPMiAICfXQ8AoK0PAKElCgCibQoAo2UKAKQNCgClpQ8ApgXUAKepDwComQ8AqZkPAKopDwCrKQ8ArDkPAK05DwCuKQ8ArykPALBZDwCxndEAspXRALOF1gC0sdEAtbHRALbZ1AC32dQAuOnUALnp1AC6+dQAu/nUALzp1AC96dQAvrnUAL+51ACASdUAgUnVAIJZ1QCDWdUAhEnVAIV90ACGddAAh23QAIhV0ACJXdAAinXVAIut1QCMtdUAjb3VAI611QCPQdAAkMHQAJHB0ACSwdAAk8HQAJTB0ACVwdAAlsHQAJfB0ACYwdAAmc3QAJrF0ACb3dAAnOHVAJ3pDgCe2Q4An9kOAKDV2wChwdkAotnZAKPB2QCkxdkApc3ZAKbF2QCnGdkAqGHZAKlh2QCqydkAq8nZAKzZ2QCt2dkArs3ZAK/B2QCwCdkAsRXZALId2QCzrdoAtB3ZALWx2gC2wdwAt93dALjl3QC59d0Auv3dALut3QC8td0AvaXdAL6t3QDwIQCAgvHaAIPx2gD3IgCA5OgAgIYR2ACHEdgAhOHaAIXh2gCKKdgAiynYAK9AEwClKNoAjinYAI8p2ACMKdgAjSnYAJJh2ACTYdgA6egAgO7oAICWZdgAl23YAJR12ACVbdgAml3YAJst2ADz6ACA8FwCALEw3wCR8AIAnCnYALLQAwCiOQ0Ao1GeAqAlDQChOQ0AplUNAIS8AgCkJQ0ApV0NAKptDQCrAQQAqGENAKlRAwCuuQAAp3UAAKxhDQCtxQIA+OgAgIfMAwDwVAIAzFC6AJHYBACb9NsAkRgCAJk02wCddAQAvh0AAJ9gBQCejAUAjOwCAI2sBAD96ACAvfWKAqghvwCpLb8Aqi2/AKs9vwCsKb8ArVW/AK5RvwCvTb8AoBkIAKGlvQCiIb8AozGzAKQ9vwClJb8Apg2zAKclvwC46bMAuc3LALppswC7uQkAvH0IAL2tCQC+QQwAv50JALA5vwCxhb0Asgm/ALPtywC0Gb8AtQW/ALbtswC3Bb8AiDG9AIkxvQCKrQgAiyW9AIwJCQCNvQgAjiW+AI+JDAAC6QCAgQ0JAIKlDACDUQkAhIEIAIWBCACGmQgAh60MAJhhvQCZYb0Amm0JAJsVnQKcxQ8AnQ28AJ7BDwCfcQkAkBW+AJERnwKSNZ8Ckw2fApQJvgCVCb4AlnG9AJdxvQCCuAQAl6UHALnEAwDwWAIAkUwCAJLIAgCErAQAsD0AAAzpAIAH6QCAvQUAABHpAIDwTAIAuhEAAJEkAgCN5AQAkqwCAJasAgC4uAMAudADAJb4AgCvDQAAFukAgPB4AgCRXAIAlrACAK8FAAAb6QCAIOkAgCnpAIAy6QCAP+kAgIX4AwBM6QCAh4ADAIbAAgBZ6QCAZukAgHPpAICW6QCAuzkAAHzpAICf6QCAiekAgL8dAAC+HQAAvR0AALwhAACVwB0AlMQfAJfIGgCWABgAkSAAAJDUAQCT2B4AkgAcAJ3gEgCcABAAn+gRAJ7sEwCZ8BkAmPQbAJv4FwCaABQAnnEBAJ9xAQCABQAArOkAgM0KAICwDACAXg0AgGQNAIBqDQCAdg0AgHkNAIB8DQCAfw0AgIINAICRDQCAlw0AgJoNAICdDQCAICIAgMcNAIDWDQCA/A0AgP8NAIAODgCAEQ4AgB0OAIAYIgCAMg4AgDUOAIDXFgCAEBcAgNoWAIC4ACwAuYwvALqILgC6AwCAhpwXAMx4vACEmC0AhVwXALcDAIDKAwCAiAAoAIksFADtBACAjAUAgN8FAIAaBgCAQAYAgFcGAIB0BgCAiwYAgDgBAIA8AQCAQAEAgEQBAIBIAQCATAEAgKR9AQBQAQCAonUBAKNlAQCggQEAoYEBALxxugC9kbYAvnG6AL+ltgC48bgAuXW6ALqZzgC7dboAtGG6ALVtugC2eboAt3W6ALAZugCxEboAsgm6ALMFugCsUboArXG2AK5RugCvbboAqNG4AKldugCqRbYAq1G6AKRxlgKlYZYCpnGWAqe9ugCgzZsCofG6AKLJugCjxboAnHmaAp0tugCeDc4An4WWApgJugCZtZYCmjm6AJuJtgCUMboA+CEAgJZpugCXrZYCkHm6AJE1ugCSMboAkwG6AIxJzgCN5bYAjhmaAo+hugCIoboAiUG2AIqhugCLdbYAhAG4AIWFugCGac4Ah4W6AICxugCBvboAgqm6AIOlugCAgbkAgQ27AIIVtwCDAbsAhAG7AIUhtwCGAbsAhz27AIgJuwCJAbsAihm7AIsVuwCMcbsAjX27AI5puwCPZbsAkKG5AJEluwCSyc8AkyW7AJQhuwCVwbcAliG7AJf1twCY6c8AmUW3AJq5mwKbAbsAnLm7AJ31uwCe8bsAn8G7AKARuwChCZQCokm7AKONlwKkCbsApbWXAqY5uwCnibcAqFmbAqkNuwCqLc8Aq6WXAqwNmgKtMbsArgm7AK8FuwCw0ZcCscGXArLRlwKzHbsAtFG5ALXduwC2xbcAt9G7ALjxuwC50bcAuvG7ALvNuwC82bsAvdG7AL7JuwC/xbsAgJmkAIEliAKCqaQAgxmoAFsNAICFvaQAhp3QAIcViAKInYUCiaGkAIqZpACLlaQAjCGIAo0xiAKOIYgCj+2kAJDBpgCRTaQAklWoAJNBpACUQaQAlWGoAJZBpACXfaQAmEmkAJlBpACaWaQAm1WkAJwxpACdPaQAnimkAJ8lpACgYaYAoeWkAKIJ0ACj5aQApOGkAKUBqACm4aQApzWoAKgp0ACphagAqnmEAqvBpACseaQArTWkAK4xpACvAaQAsFGkALFJiwKyCaQAs82IArRJpAC19YgCtnmkALfJqAC4GYQCuU2kALpt0AC75YgCvE2FAr1xpAC+SaQAv0WkAIARiQKBAYkCghGJAoPdpQCEkacAhR2lAFQBAICHEaUAiDGlAIkRqQCKMaUAWAEAgFwBAICNEaUAjgmlAI8FpQCQAaUAkQ2lAJIZpQCTFaUAlLGnAGABAICW2dEAlzWlAJgRpQCZ8akAmhGlAJvFqQCc+dEAZAEAgJ6phQKfEaUAoEmlAKEFpQCiAaUAozGlAKQBpQClGYoCplmlAKediQKoOaUAqYWJAqoJpQCruakArEmFAq0dpQCuPdEAr7WJArB9hAKxQaUAsnmlALN1pQC0wYkCtdGJArbBiQK3DaUAuGGnALntpQBoAQCAu+GlALzhpQC9wakAvuGlAGwBAIC3baYAttWGArUpqgC0hdIAs7mqALJtpgCxjaoAsG2mAL8higK+5aYAvaWJAnABAIC7jaYAdAEAgLm5pgC49aYAeAEAgKZ1pgClbaYAfAEAgIABAICiTaYAhAEAgIgBAICvCaYAruXSAIwBAICsjaQAqymmAKolpgCpMaYAkAEAgJc5pgCWNaYAlQ2mAJQxhwKTmYoCkhHSAJExpgCQZYYCn62mAJ65qgCUAQCAnC2kAJthpgCarYoCmb2KApitigKHfaYAhk2mAIVJpgCEBaYAg72mAIIFhgKB+aoAgFXSAI/1qgCORaYAjcmKAox1pgCL8YoCijWmAIl1iQKIbaYAgCmnAIEhpwCCOacAgzWnAIRRpwCYAQCAhkmnAJwBAIDMSIkCzYiJAoqp0wCLRacAjEGnAI2hqwCOQacAj5WrAJDJ0wBFIwCAkpmHApMhpwCUmacAldWnAJbRpwCX4acAmPGnAJnpiAKaqacAm22LApzppwCdVYsCntmnAJ9pqwCgeYcCoS2nAKIN0wCjhYsCpC2GAqURpwCmKacApyWnAKixiwKpoYsCqrGLAqt9pwCsMaUArb2nAK6lqwCvsacAsNGnALHxqwCy0acAs+2nALT5pwC18acAtumnALflpwC4oacAua2nALq5pwC7tacAvBGlAL2VpwC+edMAv5WnAICRoACBiY8CgsmgAIMNjAKEiaAAhTWMAoa5oACHCawAiNmAAomNoACKrdQAiyWMAoyNgQKNsaAAjomgAI+FoACQUYwCkUGMApJRjAKTnaAAlNGiAJVdoACWRawAl1GgAJhxoACZUawAmnGgAJtNoACcWaAAnVGgAJ5JoACfRaAAoMGgAKHNoACi2aAAo9WgAKRxogCl9aAAphnUAKf1oACo0aAAqTGsAKrRoACrBawArDnUAK2VrACuaYACr9GgALAJoACxRaAAskGgALNxoAC0QaAAtVmPArYZoAC33YwCuHmgALnFjAK6SaAAu/msALwJgAK9XaAAvn3UAL/1jAKAvYACgYGhAIK5oQCDtaEAhAGNAoURjQKGAY0Ch82hAIihowCJLaEAijWtAIshoQCMIaEAjQGtAI4hoQCPHaEAkGmhAJFhoQCSeaEAk3WhAJQRoQCVHaEAlgmhAJcFoQCYgaMAmQWhAJrp1QCbBaEAnAGhAJ3hrQCeAaEAn9WtAKAJ1QChpa0AolmBAqPhoQCkWaEApRWhAKYRoQCnIaEAqDGhAKkpjgKqaaEAq62NAqwpoQCtlY0CrhmhAK+prQCwOYECsW2hALJN1QCzxY0CtG2AArVRoQC2aaEAt2WhALjxjQK54Y0CuvGNArs9oQC8caMAvf2hAL7lrQC/8aEAs2miALKF1gCxaaIAsO2gALe5rgC2baIAtY2uALRtogC7TaIAuvWCArkJrgC4pdYAv42iAL69ogC9uaIAvPWiAKNNogCiWa4AoUGiAKDNoACncaIApk2iAKVtrgCkTaIAq1miAKpVogCpTaIAqEWiAK8pogCuJaIArTGiAKw9ogCTla4AkiWiAJGpjgKQFaIAl5mOApYR1gCVMaIAlGWCApsZogCaFaIAmS2iAJgRgwKfYaIAnq2OAp29jgKcrY4Cg2muAIK9ogCBXa4AgL2iAIe9ogCGBYIChfmuAIRV1gCLXaIAim2iAIlpogCIJaIAj/GOAo41ogCNdY0CjG2iAIARowCBMa8AghGjAIMtowCEOaMAhTGjAIYpowCHJaMAiGGjAIltowCKeaMAi3WjAIzRoQCNVaMAjrnXAI9VowCQMaMAkdGvAJIxowCT5a8AlNnXAJV1rwCWiYMClzGjAJipowCZ5aMAmuGjAJvRowCc4aMAnfmMAp65owCffY8CoBmjAKGljwKiKaMAo5mvAKRpgwKlPaMAph3XAKeVjwKoHYICqSGjAKoZowCrFaMArKGPAq2xjwKuoY8Cr22jALBBoQCxzaMAstWvALPBowC0waMAteGvALbBowC3/aMAuMmjALnBowC62aMAu9WjALyxowC9vaMAvqmjAL+lowBnDQCA0QYAgG0NAIDIBwCAcw0AgA8HAICFDQCAlAcAgIsNAICaBwCAuA0AgH0HAIDKDQCAxQcAgAIOAIBPBwCAFA4AgFIHAIAgDgCAkB0AAOEGAIAPJACA4iUAgCguAICtLACAyS0AgKpVAACrKQAAMjcAgAErAIDGMACAsjIAgAEsAIBTLwCAmSsAgJ8wAIDtKwCAGjUAgI43AICtLQCA5SwAgGYyAIADMACALzAAgA44AIAjMACA+y8AgHI0AICAIa4AgaWsAIJJ2ACDpawAhKGsAIVBoACGoawAh3WgAIhp2ACJxaAAiv0AAIsxxgCM7QAAjdEAAI7VAACPyQAAgCmhAIFNFACCIQEAg+G4AoQ5qgCFOaoAhhG9AodRFACIEQEAidW4AorNrQCLLbsCjGEUAI3ZjQKObRQAj2UUAJB5AQCRubgCkkm9ApNFuwKUDRQAlTUUAJYZAQCXqbgCmF2qAJkBFACaIQEAmwUUAJx5vQKdhbgCnnm7Ap+JuAKggb0CoXm4AqKZCQCjlRQApFmuAKWJFACmmQEAp70UAKipAQCpvbsCqrkBAKuJFACsmRQArZkUAK6JFACviRQAsNkBALEJrgCy6QEAs9W7ArTNuwK17RQAtpW8ArfhFAC4oRQAuaEUALrBoQC7pRQAvNkBAL0ZuAK+0aoAv9GqAL9FFwC+RRcAvTUXALxBvwK7KRcAugm4ArkBuAK4PQIAt+2tALY9AgC1HRcAtB0XALMdFwCyHRcAsR0XALAtAgCvWbgCrk0CAK1pFwCsTQIAq00XAKqdrQCpQRcAqE0KAK40AIDRLACApX0XAKR9FwCjoa4Aom2CAqF9ggKgbYICnzmuAJ41rgCdDa4AnDGPApuZggKaEdoAmTGuAJhljgKXtaIAlgWuAJWJggKUNa4Ak7GCApJ1rgCRNYECkC2uAI99rgCOTa4AjUmuAIwFrgCLva4AigWOAon5ogCIVdoAh0miAIadrgCFfaIAhJ2uAIOZrgCCddoAgZmuAIAdrADMqIQCzUyGAswguQLNTLkCzECOAkYyAIDMmIUCzTyEAswQgwLNUIMCzKCDAs2MgwLMMIACzSSAAswYgALNhIACmjMAgAUsAIAxLQCAiSMAgE0jAIBXIwCAayMAgJMjAIB1IwCAnSMAgGEjAIB/IwCAzPC5As2EuQLMULgCzay7AoDNAACB1QAAgt0AAIPVAACEzQAAhfUAAIb9AACH9QAAiM0AAFcvAIDBLACA1SoAgM0qAIDdKgCAuekAgCErAICQZQAAkW0AAKiIKgA1KwCAPSsAgEUrAIBJKwCATSsAgKIAMACjzDMAoOg9AKHsPACm8DYAp/QoAKQANACl/DUAgFERAIHpiAKCXREAg1URAIQpBACF6b0Chhm4AocVvgKIfREAiUURAIppBACL2b0CjA2vAI1REQCOcQQAj1URAJBJuAKRtb0Ckkm+ApO5vQKUUbgClam9ApZJDACXRREAmKmrAJl5EQCaaQQAm00RAJx5BACdbb4CnmkEAJ9ZEQCgqREAoakRAKK5EQCjuREApIkEAKVZqwCmuQQAp4W+Aqi9vgKpnREAquW5AquREQCs8REArfERAK6RpACv9REAsOkEALEpvQKy4a8As+GvALTZuAK1mREAtukEALctvQK4BagAueW+Arq5EQC7AYgCvKURAL2tEQC+wQQAvwG9AoABuQKBDb8CglUQAINtEACEUQUAheG8AoYlrgCHeRAAiGkFAIlNEACKIbkCi928AowxvwKNwbwCjjm5Ao/BvAKQUQ0AkV0QAJKBqgCTURAAlFEFAJV1EACWUQUAl0W/AphxBQCZQRAAmkEQAJtBEACcQRAAnUEQAJ5hBQCfsaoAoKEFAKGdvwKilb8Co7UQAKTduAKlqRAAptkQAKfZEACoiaUAqe0QAKqBBQCrQbwCrJmuAK2ZrgCusbkCr/EQALDxBQCxNbwCsi2pALPNvwK0gRAAtTmJAraNEAC3hRAAuNkFALkZvAK66bkCu+W/ArytEAC9lRAAvrkFAL8JvAK5La0AuC2tALtFEwC6BboCveG/ArwlBgC/GbwCvvmqALEdEwCwabsCs20TALJtEwC1eRMAtB2mALfVvwK2FQYAqXUTAKh1EwCrhakAqlUGAK1JvAKsdQYAr2ETAK5BvAKhQRMAoGUGAKNxvAKiZQYApVUTAKRlBgCnVRMAplUTAJl1vwKYhbwCm3W/ApqNugKdiRMAnIUOAJ+FEwCeVakAkVW/ApDlBgCTzRMAkpGtAJXZEwCU/QYAl0m/Apa1ugKJmRMAiJETAIs1vwKK9QYAjdm8AozVugKPuRMAjoETAIGtEwCA7boCgxm/AoLdBgCF8bwChBGqAIcVigKGrRMAgD2sAIFhEgCCQQcAg2USAIQZuwKF5b4Chhm9AofpvgKIIbsCidm+AopFEgCLXRIAjSkAgM3pAICOzaoAj8mLApCdiwKRpYsCkrGqAJOxqgCU2akAldmpAJb5qQCX+akAmJWqAJmRiwKatYsCm42LApyJqgCdiaoAnvGpAJ/xqQCgIakAoSGpAKJ9qgCjeYsCpE2LAqV1iwKmYaoAp2GqAKgpqQCpKakAqgmpAKsJqQCsRaoArUGLAq5liwKvXYsCsDmqALE5qgCyQakAs0GpALRxqQC1cakAti2qALcpiwK4PYsCuQWLAroRqgC7EaoAvHmpAL15qQC+WakAv1mpAIKJIwBtKwCAcSsAgI0rAIC+6QCAh5kjAJEpAIB5KwCAyOkAgIu5JACpKwCAifkkAI6VIwCPiSMAsSsAgI2JJACSvSMAESsAgLkrAICR4SMAo+sAgJfFIwCU8SMA4SsAgJkpAICbkSMA+SsAgJndIwD9KwCAnwktAAksAICdjdUAogkjAJ0pAIBBLACAofUjAEUsAICnGSMApCUkAG0sAICq7SQAeSwAgKgdIwCpeSQArhUjAK8JIwCsCSQArQkkALI9IwCJLACAsDEjALFhIwC2VSMAt0UjALRxIwC1XSMAulkjALsRIwCRLACAuV0jAL6JLQCVLACAvI0tANzpAICAuSUAgX0iAIKBIgCDmSIAhK0lAIXZJQCGuSIAh5EiAIiVIgCJ8SUAljIAgIuxJQCMgSUAjYElAI6dIgCPgSIAkLkiAJHpIgCStSIAk9EiAJT5IgCV1SIAlt0iAJfNIgCY+SIAmdUiAJrRIgCbmSIAqSwAgLEsAIDh6QCAvSwAgGUAAACh/SIAogEiAKMZIgDFLACApVklAKY5IgCnESIAqBUiAKlxJQDNLACAqzElAKwBJQCtASUArh0iAK8BIgCwOSIAsWkiALI1IgCzUSIAtHkiALVVIgC2XSIAt00iALh5IgC5VSIAulEiALsZIgD1LACA4SwAgO0sAIDxLACAgI0vAIGlLwCCrS8Ag70vAISlLwCFrS8AhqUvAIfdLwCI5S8Aie0vAIrlLwD5LACAAS0AgAUtAIANLQCAFS0AgJCRLwCRkS8AkpEvAJORLwCUsS8AlbEvAJa1LwCXRTMAmE0zAJlVMwCaPTMAmxkzAJyZMwCdiTMAnlUwAJ9JMACgwTAAockwAKLZMACj1TAApM0wAKX9MACm5TAApzUwAKi1MQCpuTEAqu0xAKuxmgCs0ZYArbE6AK61OgAZLQCAsEGUALHNlgCy1ZoAs8GWALTBlgC14ZoAtsGWALf9lgC4yZYAucGWALrZlgC71ZYAvLGWAL29lgC+qZYAv6WWAMUAAAChfSAAooEgACktAICkrScALS0AgDktAICnkSAAXS0AgKnxJwCqZScAq7EnAKyBJwCtgScArp0gAK+BIACwuSAAsekgALK1IABhLQCAtPkgALXVIAC23SAAt80gAEUtAIC51SAATS0AgLuZIACpLQCAcS0AgHUtAIB5LQCAgDknAIH9IACCASAAgxkgAG0tAICFWScAhjkgAIcRIACIFSAAiXEnAIrlJwCLMScAjAEnAI0BJwCOHSAAjwEgAJA5IACRaSAAkjUgAJNRIACUeSAAlVUgAJZdIACXTSAAmHkgAJlVIACaUSAAmxkgAJyFLgCdBdYAnoEuAJ+BLgCArT8AgbU/AIK9PwCDtT8AhK0/AIW5yACG1T8Ah80/AIj1PwCJ/T8AipnIAIvxPwCMATsAjQE7AI6NyACPOQQAkEkEAJFJBACSWQQAk1UEAJRNBACV3TwAlnkEAJd1BACYWQQAmSEEAJohBACbNdQAnCEEAJ3Z5gCeJQQAnx0EAKDpBACh9QQAos0/AKP1BACkFQQApfnUAKYhyACnIcgAqNHUAKktBACqOQQAq03CAKwtBACtdcgArh0EAK95BACwKQQAsTEEALI9BACzOQQAtC0EALX9BQC2qQUAt6kFALiZBQC5mQUAunkFALtFBQC8AQUAvQEFAL4BBQC/AQUAgC0HAIE1BwCCPQcAgzUHAIQtBwCFqQcAhqUHAIdl1QCILQYAiTEGAIoxBgCLDQYAjPnJAI15BgCOWQYAj1UGAJBpyQCRNQYAkj0GAJM1BgCULQYAlcUGAJZdAwCXVQMAmG0DAJl1AwCafQMAm3UDAJxtAwCdET0AnlkDAJ9ZAwCgqQMAoakDAKK5AwCjuQMApKkDAKWpAwCm2QMAp9kDAKjpAwCp6QMAqvkDAKv9AwCs5QMAre0DAK7lAwCvbcMAsKEDALGhAwCyoQMAs6EDALShAwC1zeYAtq0DALelAwC4yeYAuZkDALppAwC7aQMAvHkDAL15AwC+aQMAv2kDAIAAAACBLQCAfS0AgJUtAIDm6QCAsS0AgLUtAIC9LQCA0S0AgPQtAIDr6QCA8OkAgAAuAIAELgCACC4AgPwtAIAQLgCAoSkAgKUpAIAYLgCAIC4AgPXpAIA8LgCAQC4AgEwuAID66QCAVC4AgFguAIA3LwCAqSkAgGwuAICILgCAhC4AgATqAICQLgCACeoAgJwuAICYLgCAoC4AgLAuAIC0LgCArSkAgMQuAIDMLgCA0C4AgNQuAICxKQCADuoAgLUpAID3LgCA+y4AgP8uAIDV6wCAGOoAgNo1AIAvLwCAuSkAgDvqAIAN6wCAPy8AgEcvAIC9KQCAWy8AgGsvAICqIfQAq7U/AKilPwCpzecArkXwAK+hPwCsSfAArTH0AKJl4gCjvT8AoLk/AKG5PwCmlT8Ap50/AKSlPwClnT8Augk8AG8vAIC4CTwAuQk8AHcvAICHLwCAxSkAgMEpAICy3T8AswU9ALBN7wCx1T8Atn3wALe55AC0HT0AtWk8AB3qAICPLwCAoy8AgKcvAIC3LwCAyy8AgMMvAIDHLwCAgrX7AM8vAICA/T8AgfU/AOMvAIDnLwCA/y8AgAcwAICavT8Am/3NAJi9PwCZtT8Anlk/AJ9ZPwCcWT8AnVk/AJKBPwCTaekAkHnkAJGxPwCWgT8Al4H0AJQh5wCVmT8AFzAAgCswAIAs6gCAJzAAgBswAIAzMACAOzAAgE8wAIAx6gCAVzAAgEoAAABLMACAQzAAgMkpAIBfMACAZzAAgG8wAIBjMACAzSkAgIcwAIA26gCAszAAgPUwAIDRMACA2SkAgNUpAIDRKQCAnSsAgKErAID5MACA4TAAgK41AIA9KgCADTEAgCExAIAZMQCAT+oAgN0pAIA1MQCAKTEAgFIxAIBZ6gCAXjEAgD0xAIBmMQCAajEAgG4xAIByMQCAfjEAgF7qAICGMQCA5SkAgJIxAIBj6gCAljEAgOkpAICiMQCArjEAgL4xAIBo6gCA/+kAgG3qAIDeMQCAcuoAgLgJAQC5CQEAuhkBALsZAQC8CQEAvQkBAL45AQC/OQEAsM3FALE1zACymQ4As5kOALSJDgC1iQ4AtjkBALc5AQCo6dkAqckOAKrZDgCrqcUArMUOAK3NDgCuxQ4Ar/kOAKA1DgChPQ4AojUOAKOxxQCk8Q4ApfEOAKbxDgCn8Q4AmGkPAJlpDwCaeQ8Am3kPAJxpDwCdaQ8Ant0OAJ/NDgCQ+eoAkXEPAJJ9DwCTdQ8AlG0PAJVpDwCWWQ8Al1kPAIh5DwCJeQ8AigkPAIsJDwCMGQ8AjRkPAI4NzACPDQ8AgHkPAIF5DwCCSQ8Ag0kPAIRZDwCFWQ8AhkkPAIdJDwCKUQIAi1ECAIj5xgCJQQIAjnECAI/txgCMQQIAjUECAIIVAgCDHQIAgAUCAIEdAgCGdQIAh30CAIQFAgCFfQIAmsUCAJvNAgCYkc8AmYXaAJ7FAgCfzQIAnNUCAJ3NAgCSDQIAkxUCAJANAgCRBQIAlg0CAJf1AgCUDQIAlQUCAKo9AgCrRQIAqD0CAKk1AgCuXQIAr0UCAKxdAgCtVQIAol3GAKMBAgCgNQIAoQ0CAKYBAgCnxdgApBECAKURAgC6OQIAuzkCALg5AgC5OQIAvtkBAL/ZAQC82QEAvdkBALI9AgCzBQIAsD0CALE1AgC2GQIAtxkCALQdAgC16cIA6jEAgPIxAIDiMQCA/jEAgA4yAIAWMgCAIjIAgCYyAIB36gCACjIAgD4yAIBCMgCA7SkAgFIyAIB86gCANjIAgHIyAICB6gCAhuoAgHYyAICKMgCAgjIAgPEpAICOMgCAnjIAgJoyAICmMgCAw+kAgLYyAICL6gCAwjIAgJXqAIDWMgCA9jIAgJrqAIAKMwCADjMAgJ/qAICk6gCAKjMAgDozAID1KQCAPjMAgPkpAIBWMwCAWjMAgGYzAIByMwCA/SkAgIozAICp6gCApjMAgK7qAIAT6gCAwjMAgLPqAIC4AAAAuOoAgL3qAIABKgCABSoAgMfqAIDC6gCAzOoAgIAB3gCB8QcAgvEHAIPxBwCEFQIAhR0CAIYVAgCHEQIAiCXeAIld3gCKOQIAizkCAIwpAgCNKQIAjhkCAI99ygCQTd4AkWECAJJhAgCT7cEAlH0CAJVlAgCWIcAAl2kCAJhZAgCZMcIAmlUCAJstAgCcNQIAnT0CAJ4xAgCfMQIAoNECAKHRAgCi0QIAo9ECAKTxAgCl8QIApvECAKfxAgCo0QIAqdECAKrRAgCr0QIArDECAK0xAgCuMQIArzECALBRAgCxUQIAslECALNRAgC0cQIAtXECALZxAgC3cQIAuFECALlRAgC6+dwAu1UCALxNAgC9NQIAvj0CAL81AgC+7QYAv/UGALztBgC95QYAuskGALvJBgC4xcsAuckGALbtBgC39QYAtO0GALXlBgCyjQYAs/UGALDR3QCxhQYArvEGAK/xBgCs5QYAreEGAKr1BgCr/QYAqMUGAKn9BgCm9QYAp/0GAKTlBgCl/QYAovUGAKP9BgCg+QYAoZ3dAJ75BgCf+QYAnPkGAJ35BgCa+QYAm/kGAJj5BgCZ+QYAlvkGAJf5BgCUcd0AlfkGAJL9BgCT5QYAkP0GAJH1BgCO/QYAj4UGAIz9BgCN9QYAiuEGAIsB3QCI8QYAifEGAIbBBgCHwQYAhPEGAIXxBgCCkccAg+EGAIDpBgCBxcAAgAAAANHqAIACNACABjQAgBI0AIARKgCAFSoAgNvqAIAmNACAGSoAgODqAIDl6gCA6uoAgJY0AIAdKgCAojQAgKY0AIDv6gCA9OoAgL40AIAhKgCA+eoAgNI0AIDWNACAJSoAgP7qAIDyNACAKSoAgAI1AID6NACACjUAgAjrAIAiNQCALSoAgC41AIA2NQCARjUAgDEqAIAS6wCAF+sAgDUqAIAc6wCAXjUAgCHrAIBqNQCAdjUAgCbrAIAr6wCAkjUAgDDrAICaNQCAQOoAgDkqAICyNQCAtjUAgEEqAIC6NQCAFC4AgDXrAIA66wCAReoAgErqAIDeNQCA9jcAgIDNAQCB1QEAgt0BAIPVAQCEzQEAhfUBAIb9AQCH9QEAiM0BAInVAQCK3QEAi/UJAIzJAQCNyQEAjgEcAI89HwCQRR8AkU0fAJJFHwCTXR8AlEUfAJVNHwCWRR8Al30fAJhBxwCZQR8AmkEfAJtBHwCcQR8AnUEfAJ5BHwCfYd8AoL0fAKHFHwCizR8Ao8UfAKTdHwClxR8Aps0fAKfFHwCo/R8AqcUfAKrNHwCrxR8ArN0fAK3FHwCuzR8Ar8UfALC9HwCxRR8Ask0fALNFHwC0/ckAtVkfALZJHwC3SR8AuHkfALl5HwC6SR8Au8XdALxVHwC9XR8AvlUfAL9NHwAKNgCABjYAgA42AIAZLACAEjYAgBY2AIAaNgCAIjYAgD/rAIAmNgCAOjYAgD42AIAqNgCAQjYAgFY2AIA2NgCASjYAgE42AIBSNgCAROsAgE7rAIBJ6wCASSoAgHI2AIB2NgCAfjYAgGLrAICCNgCAU+sAgE0qAIBRKgCAWOsAgF3rAIBVKgCAojYAgKo2AICuNgCAujYAgLY2AIDCNgCAvjYAgMY2AIDKNgCA0jYAgFkqAIDaNgCA3jYAgF0qAIDuNgCAZ+sAgP42AIACNwCAYSoAgA43AICVKQCAbOsAgHHrAIBlKgCAaSoAgDo3AIB26wCAkjcAgJY3AICuNwCAgLUBAIG9AQCCtQEAg80BAITt9ACF0QEAhtEBAIfRAQCI8QEAifEBAIrxAQCL8QEAjNEBAI3RAQCO0QEAj9EBAJB9wwCRBcMAkl35AJO9AQCUpQEAla0BAJalAQCXXQMAmGUDAJltAwCaZQMAm30DAJxlAwCdbQMAnmUDAJ85wwCgoQMAoaEDAKKhAwCjoQMApKEDAKWhAwCmoQMAp6EDAKjhAwCp4QMAquEDAKvhAwCs4QMAreEDAK7hAwCv4QMAsKEDALGhAwCyoQMAs6EDALShAwC1oQMAtqEDALehAwC4YQMAuWEDALphAwC7YQMAvGEDAL1hAwC+pcMAv6HDALo3AICA6wCA0ukAgMY3AIDCNwCAzjcAgNfpAIDaNwCAhesAgIrrAIAmOACAMjgAgDo4AICP6wCAPjgAgGY4AIByOACAdjgAgG44AICCOACAhjgAgJTrAICSOACAbSoAgJo4AICZ6wCAcSoAgNI4AICkLgCA6jgAgJ7rAICo6wCAdSoAgHkqAIASOQCAresAgH0qAICy6wCAMjkAgLfrAIBKOQCAgSoAgFo5AIBmOQCAbjkAgHY5AICFKgCAvOsAgKY5AICyOQCAiSoAgI0qAIC2OQCAwesAgJEqAIDG6wCAy+sAgNDrAICVKgCA9jkAgPo5AIACOgCACjoAgNrrAICQ1QEAkd0BAJLVAQCT7QEAlPUBAJXB+wCW8QEAl/n7AJjNAQCZ1QEAmt0BAJvVAQCcyfsAnckBAEUqAICPAAAAgNkBAIHZAQCC6QEAg+kBAIT5AQCF+QEAhukBAIfpAQCI2QEAidkBAIoJwQCLrQEAjLUBAI29AQCOtQEAj60BAKAAAAChAAAAogAAAKMAAACkAAAApQAAAKYAAACnAAAAqAAAAKkAAACqAAAAqwAAAKwAAACtAAAArgAAAK8AAACwAAAAsQAAALIAAACzAAAAtAAAALUAAAC2AAAAtwAAALgAAAC5AAAAugAAALsAAAC8AAAAvQAAAL4AAAC/AAAAACAAIMyBACDMgwAgzIQAIMyFACDMhgAgzIcAIMyIACDMiMyAACDMiMyBACDMiM2CACDMigAgzIsAIMyTACDMk8yAACDMk8yBACDMk82CACDMlAAgzJTMgAAgzJTMgQAgzJTNggAgzKcAIMyoACDMswAgzYIAIM2FACDZiwAg2YwAINmM2ZEAINmNACDZjdmRACDZjgAg2Y7ZkQAg2Y8AINmP2ZEAINmQACDZkNmRACDZkQAg2ZHZsAAg2ZIAIOOCmQAg44KaACEAISEAIT8AIgAjACQAJQAmACcAKAAoMSkAKDEwKQAoMTEpACgxMikAKDEzKQAoMTQpACgxNSkAKDE2KQAoMTcpACgxOCkAKDE5KQAoMikAKDIwKQAoMykAKDQpACg1KQAoNikAKDcpACg4KQAoOSkAKEEpAChCKQAoQykAKEQpAChFKQAoRikAKEcpAChIKQAoSSkAKEopAChLKQAoTCkAKE0pAChOKQAoTykAKFApAChRKQAoUikAKFMpAChUKQAoVSkAKFYpAChXKQAoWCkAKFkpAChaKQAoYSkAKGIpAChjKQAoZCkAKGUpAChmKQAoZykAKGgpAChpKQAoaikAKGspAChsKQAobSkAKG4pAChvKQAocCkAKHEpAChyKQAocykAKHQpACh1KQAodikAKHcpACh4KQAoeSkAKHopACjhhIApACjhhIIpACjhhIMpACjhhIUpACjhhIYpACjhhIcpACjhhIkpACjhhIspACjhhIwpACjhhI4pACjhhI8pACjhhJApACjhhJEpACjhhJIpACjkuIApACjkuIMpACjkuIkpACjkuZ0pACjkuowpACjkupQpACjku6MpACjkvIEpACjkvJEpACjlhaspACjlha0pACjlirQpACjljYEpACjljZQpACjlkI0pACjlkbwpACjlm5spACjlnJ8pACjlraYpACjml6UpACjmnIgpACjmnIkpACjmnKgpACjmoKopACjmsLQpACjngaspACjnibkpACjnm6MpACjnpL4pACjnpZ0pACjnpa0pACjoh6opACjoh7MpACjosqEpACjos4cpACjph5EpACjqsIApACjrgpgpACjri6QpACjrnbwpACjrp4gpACjrsJQpACjsgqwpACjslYQpACjsmKTsoIQpACjsmKTtm4QpACjsnpApACjso7wpACjssKgpACjsubQpACjtg4ApACjtjIwpACjtlZgpACkAKgArACwALQAuAC4uAC4uLgAvADAAMCwAMC4AMOKBhDMAMOeCuQAxADEsADEuADEwADEwLgAxMOaXpQAxMOaciAAxMOeCuQAxMQAxMS4AMTHml6UAMTHmnIgAMTHngrkAMTIAMTIuADEy5pelADEy5pyIADEy54K5ADEzADEzLgAxM+aXpQAxM+eCuQAxNAAxNC4AMTTml6UAMTTngrkAMTUAMTUuADE15pelADE154K5ADE2ADE2LgAxNuaXpQAxNueCuQAxNwAxNy4AMTfml6UAMTfngrkAMTgAMTguADE45pelADE454K5ADE5ADE5LgAxOeaXpQAxOeeCuQAx4oGEADHigYQxMAAx4oGEMgAx4oGEMwAx4oGENAAx4oGENQAx4oGENgAx4oGENwAx4oGEOAAx4oGEOQAx5pelADHmnIgAMeeCuQAyADIsADIuADIwADIwLgAyMOaXpQAyMOeCuQAyMQAyMeaXpQAyMeeCuQAyMgAyMuaXpQAyMueCuQAyMwAyM+aXpQAyM+eCuQAyNAAyNOaXpQAyNOeCuQAyNQAyNeaXpQAyNgAyNuaXpQAyNwAyN+aXpQAyOAAyOOaXpQAyOQAyOeaXpQAy4oGEMwAy4oGENQAy5pelADLmnIgAMueCuQAzADMsADMuADMwADMw5pelADMxADMx5pelADMyADMzADM0ADM1ADM2ADM3ADM4ADM5ADPigYQ0ADPigYQ1ADPigYQ4ADPml6UAM+aciAAz54K5ADQANCwANC4ANDAANDEANDIANDMANDQANDUANDYANDcANDgANDkANOKBhDUANOaXpQA05pyIADTngrkANQA1LAA1LgA1MAA14oGENgA14oGEOAA15pelADXmnIgANeeCuQA2ADYsADYuADbml6UANuaciAA254K5ADcANywANy4AN+KBhDgAN+aXpQA35pyIADfngrkAOAA4LAA4LgA45pelADjmnIgAOOeCuQA5ADksADkuADnml6UAOeaciAA554K5ADoAOjo9ADsAPAA9AD09AD09PQA+AD8APyEAPz8AQABBAEFVAEHiiJVtAEIAQnEAQwBDRABDby4AQ+KIlWtnAEQAREoARFoARHoARMW9AETFvgBFAEYARkFYAEcAR0IAR0h6AEdQYQBHeQBIAEhQAEhWAEhnAEh6AEkASUkASUlJAElKAElVAElWAElYAEoASwBLQgBLSwBLTQBMAExKAExURABMagBMwrcATQBNQgBNQwBNRABNSHoATVBhAE1WAE1XAE3OqQBOAE5KAE5qAE5vAE8AUABQSABQUE0AUFBWAFBSAFBURQBQYQBRAFIAUnMAUwBTRABTTQBTUwBTdgBUAFRFTABUSHoAVE0AVQBWAFZJAFZJSQBWSUlJAFbiiJVtAFcAV0MAV1oAV2IAWABYSQBYSUkAWQBaAFsAXABdAF4AXwBgAGEAYS5tLgBhL2MAYS9zAGHKvgBiAGJhcgBjAGMvbwBjL3UAY2FsAGNjAGNkAGNtAGNtMgBjbTMAZABkQgBkYQBkbABkbQBkbTIAZG0zAGR6AGTFvgBlAGVWAGVyZwBmAGZmAGZmaQBmZmwAZmkAZmwAZm0AZwBnYWwAaABoUGEAaGEAaQBpaQBpaWkAaWoAaW4AaXYAaXgAagBrAGtBAGtIegBrUGEAa1YAa1cAa2NhbABrZwBrbABrbQBrbTIAa20zAGt0AGvOqQBsAGxqAGxtAGxuAGxvZwBseABswrcAbQBtMgBtMwBtQQBtVgBtVwBtYgBtZwBtaWwAbWwAbW0AbW0yAG1tMwBtb2wAbXMAbeKIlXMAbeKIlXMyAG4AbkEAbkYAblYAblcAbmoAbm0AbnMAbwBvVgBwAHAubS4AcEEAcEYAcFYAcFcAcGMAcHMAcQByAHJhZAByYWTiiJVzAHJhZOKIlXMyAHMAc3IAc3QAdAB1AHYAdmkAdmlpAHZpaWkAdwB4AHhpAHhpaQB5AHoAewB8AH0AwqIAwqMAwqUAwqYAwqwAwrBDAMKwRgDCtwDDgADDgQDDggDDgwDDhADDhQDDhgDDhwDDiADDiQDDigDDiwDDjADDjQDDjgDDjwDDkQDDkgDDkwDDlADDlQDDlgDDmQDDmgDDmwDDnADDnQDDoADDoQDDogDDowDDpADDpQDDpwDDqADDqQDDqgDDqwDDrADDrQDDrgDDrwDDsADDsQDDsgDDswDDtADDtQDDtgDDuQDDugDDuwDDvADDvQDDvwDEgADEgQDEggDEgwDEhADEhQDEhgDEhwDEiADEiQDEigDEiwDEjADEjQDEjgDEjwDEkgDEkwDElADElQDElgDElwDEmADEmQDEmgDEmwDEnADEnQDEngDEnwDEoADEoQDEogDEowDEpADEpQDEpgDEpwDEqADEqQDEqgDEqwDErADErQDErgDErwDEsADEsQDEtADEtQDEtgDEtwDEuQDEugDEuwDEvADEvQDEvgDFgwDFhADFhQDFhgDFhwDFiADFiwDFjADFjQDFjgDFjwDFkADFkQDFkwDFlADFlQDFlgDFlwDFmADFmQDFmgDFmwDFnADFnQDFngDFnwDFoADFoQDFogDFowDFpADFpQDFqADFqQDFqgDFqwDFrADFrQDFrgDFrwDFsADFsQDFsgDFswDFtADFtQDFtgDFtwDFuADFuQDFugDFuwDFvADFvQDFvgDGjgDGkADGoADGoQDGqwDGrwDGsADHjQDHjgDHjwDHkADHkQDHkgDHkwDHlADHlQDHlgDHlwDHmADHmQDHmgDHmwDHnADHngDHnwDHoADHoQDHogDHowDHpgDHpwDHqADHqQDHqgDHqwDHrADHrQDHrgDHrwDHsADHtADHtQDHuADHuQDHugDHuwDHvADHvQDHvgDHvwDIgADIgQDIggDIgwDIhADIhQDIhgDIhwDIiADIiQDIigDIiwDIjADIjQDIjgDIjwDIkADIkQDIkgDIkwDIlADIlQDIlgDIlwDImADImQDImgDImwDIngDInwDIogDIpgDIpwDIqADIqQDIqgDIqwDIrADIrQDIrgDIrwDIsADIsQDIsgDIswDItwDJkADJkQDJkgDJlADJlQDJmQDJmwDJnADJnwDJoQDJowDJpQDJpgDJqADJqQDJqgDJqwDJrQDJrwDJsADJsQDJsgDJswDJtADJtQDJuADJuQDJuwDKgQDKggDKgwDKiQDKigDKiwDKjADKkADKkQDKkgDKlQDKnQDKnwDKuQDKvG4AzIAAzIEAzIjMgQDMkwDOhgDOiADOiQDOigDOjADOjgDOjwDOkADOkQDOkgDOkwDOlADOlQDOlgDOlwDOmADOmQDOmgDOmwDOnADOnQDOngDOnwDOoADOoQDOowDOpADOpQDOpgDOpwDOqADOqQDOqgDOqwDOrADOrQDOrgDOrwDOsADOsQDOsgDOswDOtADOtQDOtgDOtwDOuADOuQDOugDOuwDOvADOvEEAzrxGAM68VgDOvFcAzrxnAM68bADOvG0AzrxzAM69AM6+AM6/AM+AAM+BAM+CAM+DAM+EAM+FAM+GAM+HAM+IAM+JAM+KAM+LAM+MAM+NAM+OAM+cAM+dANCAANCBANCDANCHANCMANCNANCOANCZANC5ANC9ANGKANGMANGQANGRANGTANGXANGcANGdANGeANG2ANG3ANOBANOCANOQANORANOSANOTANOWANOXANOaANObANOcANOdANOeANOfANOiANOjANOkANOlANOmANOnANOqANOrANOsANOtANOuANOvANOwANOxANOyANOzANO0ANO1ANO4ANO5ANWl1oIA1bTVpQDVtNWrANW01a0A1bTVtgDVvtW2ANeQANeQ1rcA15DWuADXkNa8ANeQ15wA15EA15HWvADXkda/ANeSANeS1rwA15MA15PWvADXlADXlNa8ANeV1rkA15XWvADXlta8ANeY1rwA15nWtADXmda8ANea1rwA15sA15vWvADXm9a/ANecANec1rwA150A157WvADXoNa8ANeh1rwA16IA16PWvADXpNa8ANek1r8A16bWvADXp9a8ANeoANeo1rwA16nWvADXqda814EA16nWvNeCANep14EA16nXggDXqgDXqta8ANey1rcA2KEA2KIA2KMA2KQA2KUA2KYA2KbYpwDYptisANim2K0A2KbYrgDYptixANim2LIA2KbZhQDYptmGANim2YcA2KbZiADYptmJANim2YoA2KbbhgDYptuHANim24gA2KbbkADYptuVANinANin2YPYqNixANin2YTZhNmHANin2YsA2KfZtADYqADYqNisANio2K0A2KjYrdmKANio2K4A2KjYrtmKANio2LEA2KjYsgDYqNmFANio2YYA2KjZhwDYqNmJANio2YoA2KkA2KoA2KrYrADYqtis2YUA2KrYrNmJANiq2KzZigDYqtitANiq2K3YrADYqtit2YUA2KrYrgDYqtiu2YUA2KrYrtmJANiq2K7ZigDYqtixANiq2LIA2KrZhQDYqtmF2KwA2KrZhditANiq2YXYrgDYqtmF2YkA2KrZhdmKANiq2YYA2KrZhwDYqtmJANiq2YoA2KsA2KvYrADYq9ixANir2LIA2KvZhQDYq9mGANir2YcA2KvZiQDYq9mKANisANis2K0A2KzYrdmJANis2K3ZigDYrNmEINis2YTYp9mE2YcA2KzZhQDYrNmF2K0A2KzZhdmJANis2YXZigDYrNmJANis2YoA2K0A2K3YrADYrdis2YoA2K3ZhQDYrdmF2YkA2K3ZhdmKANit2YkA2K3ZigDYrgDYrtisANiu2K0A2K7ZhQDYrtmJANiu2YoA2K8A2LAA2LDZsADYsQDYsdiz2YjZhADYsdmwANix24zYp9mEANiyANizANiz2KwA2LPYrNitANiz2KzZiQDYs9itANiz2K3YrADYs9iuANiz2K7ZiQDYs9iu2YoA2LPYsQDYs9mFANiz2YXYrADYs9mF2K0A2LPZhdmFANiz2YcA2LPZiQDYs9mKANi0ANi02KwA2LTYrNmKANi02K0A2LTYrdmFANi02K3ZigDYtNiuANi02LEA2LTZhQDYtNmF2K4A2LTZhdmFANi02YcA2LTZiQDYtNmKANi1ANi12K0A2LXYrditANi12K3ZigDYtdiuANi12LEA2LXZhNi52YUA2LXZhNmJANi12YTZiSDYp9mE2YTZhyDYudmE2YrZhyDZiNiz2YTZhQDYtdmE25IA2LXZhQDYtdmF2YUA2LXZiQDYtdmKANi2ANi22KwA2LbYrQDYttit2YkA2LbYrdmKANi22K4A2LbYrtmFANi22LEA2LbZhQDYttmJANi22YoA2LcA2LfYrQDYt9mFANi32YXYrQDYt9mF2YUA2LfZhdmKANi32YkA2LfZigDYuADYuNmFANi5ANi52KwA2LnYrNmFANi52YTZitmHANi52YUA2LnZhdmFANi52YXZiQDYudmF2YoA2LnZiQDYudmKANi6ANi62KwA2LrZhQDYutmF2YUA2LrZhdmJANi62YXZigDYutmJANi62YoA2YDZiwDZgNmOANmA2Y7ZkQDZgNmPANmA2Y/ZkQDZgNmQANmA2ZDZkQDZgNmRANmA2ZIA2YEA2YHYrADZgditANmB2K4A2YHYrtmFANmB2YUA2YHZhdmKANmB2YkA2YHZigDZggDZgtitANmC2YTbkgDZgtmFANmC2YXYrQDZgtmF2YUA2YLZhdmKANmC2YkA2YLZigDZgwDZg9inANmD2KwA2YPYrQDZg9iuANmD2YQA2YPZhQDZg9mF2YUA2YPZhdmKANmD2YkA2YPZigDZhADZhNiiANmE2KMA2YTYpQDZhNinANmE2KwA2YTYrNisANmE2KzZhQDZhNis2YoA2YTYrQDZhNit2YUA2YTYrdmJANmE2K3ZigDZhNiuANmE2K7ZhQDZhNmFANmE2YXYrQDZhNmF2YoA2YTZhwDZhNmJANmE2YoA2YUA2YXYpwDZhdisANmF2KzYrQDZhdis2K4A2YXYrNmFANmF2KzZigDZhditANmF2K3YrADZhdit2YUA2YXYrdmF2K8A2YXYrdmKANmF2K4A2YXYrtisANmF2K7ZhQDZhdiu2YoA2YXZhQDZhdmF2YoA2YXZiQDZhdmKANmGANmG2KwA2YbYrNitANmG2KzZhQDZhtis2YkA2YbYrNmKANmG2K0A2YbYrdmFANmG2K3ZiQDZhtit2YoA2YbYrgDZhtixANmG2LIA2YbZhQDZhtmF2YkA2YbZhdmKANmG2YYA2YbZhwDZhtmJANmG2YoA2YcA2YfYrADZh9mFANmH2YXYrADZh9mF2YUA2YfZiQDZh9mKANmH2bAA2YgA2YjYs9mE2YUA2YjZtADZiQDZidmwANmKANmK2KwA2YrYrNmKANmK2K0A2YrYrdmKANmK2K4A2YrYsQDZitiyANmK2YUA2YrZhdmFANmK2YXZigDZitmGANmK2YcA2YrZiQDZitmKANmK2bQA2a4A2a8A2bEA2bkA2boA2bsA2b4A2b8A2oAA2oMA2oQA2oYA2ocA2ogA2owA2o0A2o4A2pEA2pgA2qEA2qQA2qYA2qkA2q0A2q8A2rEA2rMA2roA2rsA2r4A24AA24EA24IA24UA24YA24cA24fZtADbiADbiQDbiwDbjADbkADbkgDbkwDgpJXgpLwA4KSW4KS8AOCkl+CkvADgpJzgpLwA4KSh4KS8AOCkouCkvADgpKkA4KSr4KS8AOCkr+CkvADgpLEA4KS0AOCmoeCmvADgpqLgprwA4Kav4Ka8AOCniwDgp4wA4KiW4Ki8AOCol+CovADgqJzgqLwA4Kir4Ki8AOCosuCovADgqLjgqLwA4Kyh4Ky8AOCsouCsvADgrYgA4K2LAOCtjADgrpQA4K+KAOCviwDgr4wA4LGIAOCzgADgs4cA4LOIAOCzigDgs4sA4LWKAOC1iwDgtYwA4LeaAOC3nADgt50A4LeeAOC5jeC4sgDguqvgupkA4Lqr4LqhAOC7jeC6sgDgvIsA4L2A4L61AOC9guC+twDgvYzgvrcA4L2R4L63AOC9luC+twDgvZvgvrcA4L2x4L2yAOC9seC9tADgvbHgvoAA4L6Q4L61AOC+kuC+twDgvpzgvrcA4L6h4L63AOC+puC+twDgvqvgvrcA4L6y4L2x4L6AAOC+suC+gADgvrPgvbHgvoAA4L6z4L6AAOGApgDhg5wA4YSAAOGEgQDhhIIA4YSDAOGEhADhhIUA4YSGAOGEhwDhhIgA4YSJAOGEigDhhIsA4YSMAOGEjQDhhI4A4YSPAOGEkADhhJEA4YSSAOGElADhhJUA4YSaAOGEnADhhJ0A4YSeAOGEoADhhKEA4YSiAOGEowDhhKcA4YSpAOGEqwDhhKwA4YStAOGErgDhhK8A4YSyAOGEtgDhhYAA4YWHAOGFjADhhZcA4YWYAOGFmQDhhaAA4YWhAOGFogDhhaMA4YWkAOGFpQDhhaYA4YWnAOGFqADhhakA4YWqAOGFqwDhhawA4YWtAOGFrgDhha8A4YWwAOGFsQDhhbIA4YWzAOGFtADhhbUA4YaEAOGGhQDhhogA4YaRAOGGkgDhhpQA4YaeAOGGoQDhhqoA4YasAOGGrQDhhrAA4YaxAOGGsgDhhrMA4Ya0AOGGtQDhh4cA4YeIAOGHjADhh44A4YeTAOGHlwDhh5kA4YedAOGHnwDhh7EA4YeyAOGshgDhrIgA4ayKAOGsjADhrI4A4aySAOGsuwDhrL0A4a2AAOGtgQDhrYMA4bSCAOG0lgDhtJcA4bScAOG0nQDhtKUA4bW7AOG2hQDhuIAA4biBAOG4ggDhuIMA4biEAOG4hQDhuIYA4biHAOG4iADhuIkA4biKAOG4iwDhuIwA4biNAOG4jgDhuI8A4biQAOG4kQDhuJIA4biTAOG4lADhuJUA4biWAOG4lwDhuJgA4biZAOG4mgDhuJsA4bicAOG4nQDhuJ4A4bifAOG4oADhuKEA4biiAOG4owDhuKQA4bilAOG4pgDhuKcA4bioAOG4qQDhuKoA4birAOG4rADhuK0A4biuAOG4rwDhuLAA4bixAOG4sgDhuLMA4bi0AOG4tQDhuLYA4bi3AOG4uADhuLkA4bi6AOG4uwDhuLwA4bi9AOG4vgDhuL8A4bmAAOG5gQDhuYIA4bmDAOG5hADhuYUA4bmGAOG5hwDhuYgA4bmJAOG5igDhuYsA4bmMAOG5jQDhuY4A4bmPAOG5kADhuZEA4bmSAOG5kwDhuZQA4bmVAOG5lgDhuZcA4bmYAOG5mQDhuZoA4bmbAOG5nADhuZ0A4bmeAOG5nwDhuaAA4bmhAOG5ogDhuaMA4bmkAOG5pQDhuaYA4bmnAOG5qADhuakA4bmqAOG5qwDhuawA4bmtAOG5rgDhua8A4bmwAOG5sQDhubIA4bmzAOG5tADhubUA4bm2AOG5twDhubgA4bm5AOG5ugDhubsA4bm8AOG5vQDhub4A4bm/AOG6gADhuoEA4bqCAOG6gwDhuoQA4bqFAOG6hgDhuocA4bqIAOG6iQDhuooA4bqLAOG6jADhuo0A4bqOAOG6jwDhupAA4bqRAOG6kgDhupMA4bqUAOG6lQDhupYA4bqXAOG6mADhupkA4bqgAOG6oQDhuqIA4bqjAOG6pADhuqUA4bqmAOG6pwDhuqgA4bqpAOG6qgDhuqsA4bqsAOG6rQDhuq4A4bqvAOG6sADhurEA4bqyAOG6swDhurQA4bq1AOG6tgDhurcA4bq4AOG6uQDhuroA4bq7AOG6vADhur0A4bq+AOG6vwDhu4AA4buBAOG7ggDhu4MA4buEAOG7hQDhu4YA4buHAOG7iADhu4kA4buKAOG7iwDhu4wA4buNAOG7jgDhu48A4buQAOG7kQDhu5IA4buTAOG7lADhu5UA4buWAOG7lwDhu5gA4buZAOG7mgDhu5sA4bucAOG7nQDhu54A4bufAOG7oADhu6EA4buiAOG7owDhu6QA4bulAOG7pgDhu6cA4buoAOG7qQDhu6oA4burAOG7rADhu60A4buuAOG7rwDhu7AA4buxAOG7sgDhu7MA4bu0AOG7tQDhu7YA4bu3AOG7uADhu7kA4byAAOG8gQDhvIIA4byDAOG8hADhvIUA4byGAOG8hwDhvIgA4byJAOG8igDhvIsA4byMAOG8jQDhvI4A4byPAOG8kADhvJEA4bySAOG8kwDhvJQA4byVAOG8mADhvJkA4byaAOG8mwDhvJwA4bydAOG8oADhvKEA4byiAOG8owDhvKQA4bylAOG8pgDhvKcA4byoAOG8qQDhvKoA4byrAOG8rADhvK0A4byuAOG8rwDhvLAA4byxAOG8sgDhvLMA4by0AOG8tQDhvLYA4by3AOG8uADhvLkA4by6AOG8uwDhvLwA4by9AOG8vgDhvL8A4b2AAOG9gQDhvYIA4b2DAOG9hADhvYUA4b2IAOG9iQDhvYoA4b2LAOG9jADhvY0A4b2QAOG9kQDhvZIA4b2TAOG9lADhvZUA4b2WAOG9lwDhvZkA4b2bAOG9nQDhvZ8A4b2gAOG9oQDhvaIA4b2jAOG9pADhvaUA4b2mAOG9pwDhvagA4b2pAOG9qgDhvasA4b2sAOG9rQDhva4A4b2vAOG9sADhvbIA4b20AOG9tgDhvbgA4b26AOG9vADhvoAA4b6BAOG+ggDhvoMA4b6EAOG+hQDhvoYA4b6HAOG+iADhvokA4b6KAOG+iwDhvowA4b6NAOG+jgDhvo8A4b6QAOG+kQDhvpIA4b6TAOG+lADhvpUA4b6WAOG+lwDhvpgA4b6ZAOG+mgDhvpsA4b6cAOG+nQDhvp4A4b6fAOG+oADhvqEA4b6iAOG+owDhvqQA4b6lAOG+pgDhvqcA4b6oAOG+qQDhvqoA4b6rAOG+rADhvq0A4b6uAOG+rwDhvrAA4b6xAOG+sgDhvrMA4b60AOG+tgDhvrcA4b64AOG+uQDhvroA4b68AOG/ggDhv4MA4b+EAOG/hgDhv4cA4b+IAOG/igDhv4wA4b+QAOG/kQDhv5IA4b+WAOG/lwDhv5gA4b+ZAOG/mgDhv6AA4b+hAOG/ogDhv6QA4b+lAOG/pgDhv6cA4b+oAOG/qQDhv6oA4b+sAOG/sgDhv7MA4b+0AOG/tgDhv7cA4b+4AOG/ugDhv7wA4oCQAOKAkwDigJQA4oCy4oCyAOKAsuKAsuKAsgDigLLigLLigLLigLIA4oC14oC1AOKAteKAteKAtQDigqkA4oaQAOKGkQDihpIA4oaTAOKGmgDihpsA4oauAOKHjQDih44A4oePAOKIggDiiIQA4oiHAOKIiQDiiIwA4oiRAOKIkgDiiKQA4oimAOKIq+KIqwDiiKviiKviiKsA4oir4oir4oir4oirAOKIruKIrgDiiK7iiK7iiK4A4omBAOKJhADiiYcA4omJAOKJoADiiaIA4omtAOKJrgDiia8A4omwAOKJsQDiibQA4om1AOKJuADiibkA4oqAAOKKgQDiioQA4oqFAOKKiADiiokA4oqsAOKKrQDiiq4A4oqvAOKLoADii6EA4ouiAOKLowDii6oA4ourAOKLrADii60A4pSCAOKWoADil4sA4qaFAOKmhgDiq53MuADitaEA44CBAOOAggDjgIgA44CJAOOAigDjgIsA44CMAOOAjQDjgI4A44CPAOOAkADjgJEA44CSAOOAlADjgJRT44CVAOOAlOS4ieOAlQDjgJTkuozjgJUA44CU5Yud44CVAOOAlOWuieOAlQDjgJTmiZPjgJUA44CU5pWX44CVAOOAlOacrOOAlQDjgJTngrnjgJUA44CU55uX44CVAOOAlQDjgJYA44CXAOOBjADjgY4A44GQAOOBkgDjgZQA44GWAOOBmADjgZoA44GcAOOBngDjgaAA44GiAOOBpQDjgacA44GpAOOBsADjgbEA44GzAOOBtADjgbYA44G3AOOBuQDjgboA44G744GLAOOBvADjgb0A44KI44KKAOOClADjgpkA44KaAOOCngDjgqEA44KiAOOCouODkeODvOODiADjgqLjg6vjg5XjgqEA44Ki44Oz44Oa44KiAOOCouODvOODqwDjgqMA44KkAOOCpOODi+ODs+OCsADjgqTjg7Pjg4EA44KlAOOCpgDjgqbjgqnjg7MA44KnAOOCqADjgqjjgrnjgq/jg7zjg4kA44Ko44O844Kr44O8AOOCqQDjgqoA44Kq44Oz44K5AOOCquODvOODoADjgqsA44Kr44Kk44OqAOOCq+ODqeODg+ODiADjgqvjg63jg6rjg7wA44KsAOOCrOODreODswDjgqzjg7Pjg54A44KtAOOCreODpeODquODvADjgq3jg60A44Kt44Ot44Kw44Op44OgAOOCreODreODoeODvOODiOODqwDjgq3jg63jg6/jg4Pjg4gA44KuAOOCruOCrADjgq7jg4vjg7wA44Ku44Or44OA44O8AOOCrwDjgq/jg6vjgrzjgqTjg60A44Kv44Ot44O844ONAOOCsADjgrDjg6njg6AA44Kw44Op44Og44OI44OzAOOCsQDjgrHjg7zjgrkA44KyAOOCswDjgrPjgrMA44Kz44OIAOOCs+ODq+ODigDjgrPjg7zjg50A44K0AOOCtQDjgrXjgqTjgq/jg6sA44K144Oz44OB44O844OgAOOCtgDjgrcA44K344Oq44Oz44KwAOOCuADjgrkA44K6AOOCuwDjgrvjg7Pjg4EA44K744Oz44OIAOOCvADjgr0A44K+AOOCvwDjg4AA44OA44O844K5AOODgQDjg4IA44ODAOODhADjg4UA44OGAOODhwDjg4fjgrcA44OIAOODiOODswDjg4kA44OJ44OrAOODigDjg4rjg44A44OLAOODjADjg40A44OOAOODjuODg+ODiADjg48A44OP44Kk44OEAOODkADjg5Djg7zjg6zjg6sA44ORAOODkeODvOOCu+ODs+ODiADjg5Hjg7zjg4QA44OSAOODkwDjg5Pjg6sA44OUAOODlOOCouOCueODiOODqwDjg5Tjgq/jg6sA44OU44KzAOODlQDjg5XjgqHjg6njg4Pjg4kA44OV44Kj44O844OIAOODleODqeODswDjg5YA44OW44OD44K344Kn44OrAOODlwDjg5gA44OY44Kv44K/44O844OrAOODmOODq+ODhADjg5kA44OZ44O844K/AOODmgDjg5rjgr0A44Oa44OL44OSAOODmuODs+OCuQDjg5rjg7zjgrgA44ObAOODm+ODswDjg5vjg7zjg6sA44Ob44O844OzAOODnADjg5zjg6vjg4gA44OdAOODneOCpOODs+ODiADjg53jg7Pjg4kA44OeAOODnuOCpOOCr+ODrQDjg57jgqTjg6sA44Oe44OD44OPAOODnuODq+OCrwDjg57jg7Pjgrfjg6fjg7MA44OfAOODn+OCr+ODreODswDjg5/jg6oA44Of44Oq44OQ44O844OrAOODoADjg6EA44Oh44KsAOODoeOCrOODiOODswDjg6Hjg7zjg4jjg6sA44OiAOODowDjg6QA44Ok44O844OJAOODpOODvOODqwDjg6UA44OmAOODpuOCouODswDjg6cA44OoAOODqQDjg6oA44Oq44OD44OI44OrAOODquODqQDjg6sA44Or44OU44O8AOODq+ODvOODluODqwDjg6wA44Os44OgAOODrOODs+ODiOOCsuODswDjg60A44OvAOODr+ODg+ODiADjg7AA44OxAOODsgDjg7MA44O0AOODtwDjg7gA44O5AOODugDjg7sA44O8AOODvgDjkp4A45K5AOOSuwDjk58A45SVAOObrgDjm7wA456BAOOgrwDjoaIA46G8AOOjhwDjo6MA46ScAOOkugDjqK4A46msAOOrpADjrIgA46yZAOOtiQDjrp0A47CYAOOxjgDjtLMA47aWAOO6rADjurgA47ybAOO/vADkgIgA5ICYAOSAuQDkgYYA5IKWAOSDowDkhK8A5IiCAOSIpwDkiqAA5IyBAOSMtADkjZkA5I+VAOSPmQDkkIsA5JGrAOSUqwDklZ0A5JWhAOSVqwDkl5cA5Je5AOSYtQDkmr4A5JuHAOSmlQDkp6YA5KmuAOSptgDkqrIA5KyzAOSvjgDks44A5LOtAOSzuADktZYA5LiAAOS4gQDkuIMA5LiJAOS4igDkuIsA5LiNAOS4mQDkuKYA5LioAOS4rQDkuLIA5Li2AOS4uADkuLkA5Li9AOS4vwDkuYEA5LmZAOS5nQDkuoIA5LqFAOS6hgDkuowA5LqUAOS6oADkuqQA5LquAOS6ugDku4AA5LuMAOS7pADkvIEA5LyRAOS9oADkvoAA5L6GAOS+iwDkvq4A5L67AOS+vwDlgIIA5YCrAOWBugDlgpkA5YOPAOWDmgDlg6cA5YSqAOWEvwDlhYAA5YWFAOWFjQDlhZQA5YWkAOWFpQDlhacA5YWoAOWFqQDlhasA5YWtAOWFtwDlhoAA5YaCAOWGjQDlhpIA5YaVAOWGlgDlhpcA5YaZAOWGpADlhqsA5YasAOWGtQDlhrcA5YeJAOWHjADlh5wA5YeeAOWHoADlh7UA5YiAAOWIgwDliIcA5YiXAOWInQDliKkA5Yi6AOWIuwDliYYA5YmNAOWJsgDlibcA5YqJAOWKmwDliqMA5YqzAOWKtADli4cA5YuJAOWLkgDli54A5YukAOWLtQDli7kA5Yu6AOWMhQDljIYA5YyVAOWMlwDljJoA5Yy4AOWMuwDljL8A5Y2BAOWNhADljYUA5Y2JAOWNkQDljZQA5Y2aAOWNnADljakA5Y2wAOWNswDljbUA5Y29AOWNvwDljoIA5Y62AOWPgwDlj4gA5Y+KAOWPjADlj58A5Y+jAOWPpQDlj6sA5Y+vAOWPsQDlj7MA5ZCGAOWQiADlkI0A5ZCPAOWQnQDlkLgA5ZC5AOWRggDlkYgA5ZGoAOWSngDlkqIA5ZK9AOWTtgDllJAA5ZWPAOWVkwDllZUA5ZWjAOWWhADllocA5ZaZAOWWnQDllqsA5ZazAOWWtgDll4AA5ZeCAOWXogDlmIYA5ZmRAOWZqADlmbQA5ZuXAOWbmwDlm7kA5ZyWAOWclwDlnJ8A5ZywAOWeiwDln44A5Z+0AOWgjQDloLEA5aCyAOWhgADloZoA5aGeAOWiqADloqwA5aKzAOWjmADlo58A5aOrAOWjrgDlo7AA5aOyAOWjtwDlpIIA5aSGAOWkigDlpJUA5aSaAOWknADlpKIA5aSnAOWkp+atowDlpKkA5aWEAOWliADlpZEA5aWUAOWlogDlpbMA5aeYAOWnrADlqJsA5ainAOWpogDlqaYA5aq1AOWsiADlrKgA5ay+AOWtkADlrZcA5a2mAOWugADlroUA5a6XAOWvgwDlr5gA5a+nAOWvrgDlr7MA5a+4AOWvvwDlsIYA5bCPAOWwogDlsLgA5bC/AOWxoADlsaIA5bGkAOWxpQDlsa4A5bGxAOWyjQDls4AA5bSZAOW1gwDltZAA5bWrAOW1rgDltbwA5bayAOW2ugDlt5sA5behAOW3ogDlt6UA5bemAOW3sQDlt70A5be+AOW4qADluL0A5bmpAOW5sgDlubPmiJAA5bm0AOW5ugDlubwA5bm/AOW6pgDlurAA5bqzAOW6tgDlu4kA5buKAOW7kgDlu5MA5buZAOW7rADlu7QA5bu+AOW8hADlvIsA5byTAOW8ogDlvZAA5b2TAOW9oQDlvaIA5b2pAOW9qwDlvbMA5b6LAOW+jADlvpcA5b6aAOW+qQDlvq0A5b+DAOW/jQDlv5cA5b+1AOW/uQDmgJIA5oCcAOaBtQDmgoEA5oKUAOaDhwDmg5gA5oOhAOaEiADmhYQA5oWIAOaFjADmhY4A5oWgAOaFqADmhboA5oaOAOaGkADmhqQA5oavAOaGsgDmh54A5oeyAOaHtgDmiIAA5oiIAOaIkADmiJsA5oiuAOaItADmiLYA5omLAOaJkwDmiZ0A5oqVAOaKsQDmi4kA5ouPAOaLkwDmi5QA5ou8AOaLvgDmjIcA5oy9AOaNkADmjZUA5o2oAOaNuwDmjoMA5o6gAOaOqQDmj4QA5o+FAOaPpADmkJwA5pCiAOaRkgDmkakA5pG3AOaRvgDmkpoA5pKdAOaThADmlK8A5pS0AOaVjwDmlZYA5pWsAOaVuADmlocA5paXAOaWmQDmlqQA5pawAOaWuQDml4UA5pegAOaXogDml6MA5pelAOaYjuayuwDmmJMA5pigAOaYreWSjADmmYkA5pm0AOaaiADmmpEA5pqcAOaatADmm4YA5puwAOabtADmm7gA5pyAAOaciADmnIkA5pyXAOacmwDmnKEA5pyoAOadjgDmnZMA5p2WAOadngDmnbsA5p6FAOaelwDmn7MA5p+6AOaglwDmoJ8A5qCqAOagquW8j+S8muekvgDmoZIA5qKBAOaihQDmoo4A5qKoAOaklADmpYIA5qajAOanqgDmqIIA5qiTAOaqqADmq5MA5qubAOashADmrKAA5qyhAOatlADmraIA5q2jAOatsgDmrbcA5q25AOaunwDmrq4A5q6zAOauugDmrrsA5q+LAOavjQDmr5QA5q+bAOawjwDmsJQA5rC0AOaxjgDmsacA5rKIAOayvwDms4wA5rONAOazpQDms6gA5rSWAOa0mwDmtJ4A5rS0AOa0vgDmtYEA5rWpAOa1qgDmtbcA5rW4AOa2hQDmt4sA5reaAOa3qgDmt7kA5riaAOa4rwDmua4A5rqAAOa6nADmuroA5ruHAOa7iwDmu5EA5rubAOa8jwDmvJQA5ryiAOa8owDmva4A5r+GAOa/qwDmv74A54CbAOeAngDngLkA54GKAOeBqwDngbAA54G3AOeBvQDngpkA54KtAOeDiADng5kA54ShAOeFhQDnhYkA54WuAOeGnADnh44A54eQAOeIkADniJsA54ioAOeIqgDniKsA54i1AOeItgDniLsA54i/AOeJhwDniZAA54mZAOeJmwDniaIA54m5AOeKgADnipUA54qsAOeKrwDni4AA54u8AOeMqgDnjbUA5426AOeOhADnjocA546JAOeOiwDnjqUA546yAOePngDnkIYA55CJAOeQogDnkYcA55GcAOeRqQDnkbEA55KFAOeSiQDnkpgA55OKAOeTnADnk6YA55SGAOeUmADnlJ8A55SkAOeUqADnlLAA55SyAOeUswDnlLcA55S7AOeUvgDnlZkA55WlAOeVsADnlosA55aSAOeXogDnmJAA55idAOeYnwDnmYIA55mpAOeZtgDnmb0A55quAOeavwDnm4oA55ubAOebowDnm6cA55uuAOebtADnnIEA55yeAOecnwDnnYAA552KAOeeiwDnnqcA55+bAOefogDnn7MA56GOAOehqwDnoowA56KRAOejigDno4wA56O7AOekqgDnpLoA56S8AOekvgDnpYgA56WJAOelkADnpZYA56WdAOelngDnpaUA56W/AOemgQDnpo0A56aOAOemjwDnpq4A56a4AOemvgDnp4oA56eYAOenqwDnqJwA56mAAOepigDnqY8A56m0AOepugDnqoEA56qxAOeriwDnq64A56u5AOesoADnro8A56+AAOevhgDnr4kA57C+AOexoADnsbMA57G7AOeykgDnsr4A57OSAOezlgDns6MA57OnAOezqADns7gA57SAAOe0kADntKIA57SvAOe1ggDntZsA57WjAOe2oADntr4A57eHAOe3tADnuIIA57iJAOe4twDnuYEA57mFAOe8tgDnvL4A572RAOe9sgDnvbkA5726AOe+hQDnvooA576VAOe+mgDnvr0A57+6AOiAgQDogIUA6ICMAOiAkgDogLMA6IGGAOiBoADoga8A6IGwAOiBvgDogb8A6IKJAOiCiwDogq0A6IKyAOiEgwDohL4A6IeYAOiHowDoh6gA6IeqAOiHrQDoh7MA6Ie8AOiIgQDoiIQA6IiMAOiImADoiJsA6IifAOiJrgDoia8A6ImyAOiJuADoibkA6IqLAOiKkQDoip0A6IqxAOiKswDoir0A6IulAOiLpgDojJ0A6IyjAOiMtgDojZIA6I2TAOiNowDojq0A6I69AOiPiQDoj4oA6I+MAOiPnADoj6cA6I+vAOiPsQDokL0A6JGJAOiRlwDok64A6JOxAOiTswDok7wA6JSWAOiVpADol40A6Je6AOiYhgDomJIA6JitAOiYvwDomY0A6JmQAOiZnADomacA6JmpAOiZqwDomogA6JqpAOibogDonI4A6JyoAOidqwDonbkA6J6GAOieugDon6EA6KCBAOignwDooYAA6KGMAOihoADooaMA6KOCAOijjwDoo5cA6KOeAOijoQDoo7gA6KO6AOikkADopYEA6KWkAOilvgDopoYA6KaLAOimlgDop5IA6KejAOiogADoqqAA6KqqAOiqvwDoq4sA6KuSAOirlgDoq60A6Ku4AOirvgDorIEA6Ky5AOitmADoroAA6K6KAOiwtwDosYYA6LGIAOixlQDosbgA6LKdAOiyoQDosqkA6LKrAOizgQDos4IA6LOHAOiziADos5MA6LSIAOi0mwDotaQA6LWwAOi1twDotrMA6La8AOi3iwDot68A6LewAOi6qwDou4oA6LuUAOi8pgDovKoA6Ly4AOi8uwDovaIA6L6bAOi+ngDovrAA6L61AOi+tgDpgKMA6YC4AOmBigDpgakA6YGyAOmBvADpgo8A6YKRAOmClADpg44A6YOeAOmDsQDpg70A6YSRAOmEmwDphYkA6YWqAOmGmQDphrQA6YeGAOmHjADph48A6YeRAOmItADpiLgA6Ym2AOmJvADpi5cA6YuYAOmMhADpjYoA6Y+5AOmQlQDplbcA6ZaAAOmWiwDplq0A6Za3AOmYnADpmK4A6ZmLAOmZjQDpmbUA6Zm4AOmZvADpmoYA6ZqjAOmatgDpmrcA6Zq4AOmauQDpm4MA6ZuiAOmbowDpm6gA6Zu2AOmbtwDpnKMA6ZyyAOmdiADpnZEA6Z2WAOmdngDpnaIA6Z2pAOmfiwDpn5sA6Z+gAOmfrQDpn7MA6Z+/AOmggQDpoIUA6aCLAOmgmADpoKkA6aC7AOmhngDpoqgA6aObAOmjnwDpo6IA6aOvAOmjvADppKgA6aSpAOmmlgDpppkA6aanAOmmrADpp4IA6aexAOmnvgDpqaoA6aqoAOmrmADpq58A6aySAOmspQDprK8A6ayyAOmsvADprZoA6a2vAOmxgADpsZcA6bOlAOmzvQDptacA6ba0AOm3ugDpuJ4A6bm1AOm5vwDpupcA6bqfAOm6pQDpursA6buDAOm7jQDpu44A6buRAOm7uQDpu70A6bu+AOm8hQDpvI4A6byPAOm8kwDpvJYA6bygAOm8uwDpvYMA6b2KAOm9kgDpvo0A6b6OAOm+nADpvp8A6b6gAOqcpwDqna8A6qy3AOqtkgDqsIAA6rCBAOqwggDqsIMA6rCEAOqwhQDqsIYA6rCHAOqwiADqsIkA6rCKAOqwiwDqsIwA6rCNAOqwjgDqsI8A6rCQAOqwkQDqsJIA6rCTAOqwlADqsJUA6rCWAOqwlwDqsJgA6rCZAOqwmgDqsJsA6rCcAOqwnQDqsJ4A6rCfAOqwoADqsKEA6rCiAOqwowDqsKQA6rClAOqwpgDqsKcA6rCoAOqwqQDqsKoA6rCrAOqwrADqsK0A6rCuAOqwrwDqsLAA6rCxAOqwsgDqsLMA6rC0AOqwtQDqsLYA6rC3AOqwuADqsLkA6rC6AOqwuwDqsLwA6rC9AOqwvgDqsL8A6rGAAOqxgQDqsYIA6rGDAOqxhADqsYUA6rGGAOqxhwDqsYgA6rGJAOqxigDqsYsA6rGMAOqxjQDqsY4A6rGPAOqxkADqsZEA6rGSAOqxkwDqsZQA6rGVAOqxlgDqsZcA6rGYAOqxmQDqsZoA6rGbAOqxnADqsZ0A6rGeAOqxnwDqsaAA6rGhAOqxogDqsaMA6rGkAOqxpQDqsaYA6rGnAOqxqADqsakA6rGqAOqxqwDqsawA6rGtAOqxrgDqsa8A6rGwAOqxsQDqsbIA6rGzAOqxtADqsbUA6rG2AOqxtwDqsbgA6rG5AOqxugDqsbsA6rG8AOqxvQDqsb4A6rG/AOqygADqsoEA6rKCAOqygwDqsoQA6rKFAOqyhgDqsocA6rKIAOqyiQDqsooA6rKLAOqyjADqso0A6rKOAOqyjwDqspAA6rKRAOqykgDqspMA6rKUAOqylQDqspYA6rKXAOqymADqspkA6rKaAOqymwDqspwA6rKdAOqyngDqsp8A6rKgAOqyoQDqsqIA6rKjAOqypADqsqUA6rKmAOqypwDqsqgA6rKpAOqyqgDqsqsA6rKsAOqyrQDqsq4A6rKvAOqysADqsrEA6rKyAOqyswDqsrQA6rK1AOqytgDqsrcA6rK4AOqyuQDqsroA6rK7AOqyvADqsr0A6rK+AOqyvwDqs4AA6rOBAOqzggDqs4MA6rOEAOqzhQDqs4YA6rOHAOqziADqs4kA6rOKAOqziwDqs4wA6rONAOqzjgDqs48A6rOQAOqzkQDqs5IA6rOTAOqzlADqs5UA6rOWAOqzlwDqs5gA6rOZAOqzmgDqs5sA6rOcAOqznQDqs54A6rOfAOqzoADqs6EA6rOiAOqzowDqs6QA6rOlAOqzpgDqs6cA6rOoAOqzqQDqs6oA6rOrAOqzrADqs60A6rOuAOqzrwDqs7AA6rOxAOqzsgDqs7MA6rO0AOqztQDqs7YA6rO3AOqzuADqs7kA6rO6AOqzuwDqs7wA6rO9AOqzvgDqs78A6rSAAOq0gQDqtIIA6rSDAOq0hADqtIUA6rSGAOq0hwDqtIgA6rSJAOq0igDqtIsA6rSMAOq0jQDqtI4A6rSPAOq0kADqtJEA6rSSAOq0kwDqtJQA6rSVAOq0lgDqtJcA6rSYAOq0mQDqtJoA6rSbAOq0nADqtJ0A6rSeAOq0nwDqtKAA6rShAOq0ogDqtKMA6rSkAOq0pQDqtKYA6rSnAOq0qADqtKkA6rSqAOq0qwDqtKwA6rStAOq0rgDqtK8A6rSwAOq0sQDqtLIA6rSzAOq0tADqtLUA6rS2AOq0twDqtLgA6rS5AOq0ugDqtLsA6rS8AOq0vQDqtL4A6rS/AOq1gADqtYEA6rWCAOq1gwDqtYQA6rWFAOq1hgDqtYcA6rWIAOq1iQDqtYoA6rWLAOq1jADqtY0A6rWOAOq1jwDqtZAA6rWRAOq1kgDqtZMA6rWUAOq1lQDqtZYA6rWXAOq1mADqtZkA6rWaAOq1mwDqtZwA6rWdAOq1ngDqtZ8A6rWgAOq1oQDqtaIA6rWjAOq1pADqtaUA6rWmAOq1pwDqtagA6rWpAOq1qgDqtasA6rWsAOq1rQDqta4A6rWvAOq1sADqtbEA6rWyAOq1swDqtbQA6rW1AOq1tgDqtbcA6rW4AOq1uQDqtboA6rW7AOq1vADqtb0A6rW+AOq1vwDqtoAA6raBAOq2ggDqtoMA6raEAOq2hQDqtoYA6raHAOq2iADqtokA6raKAOq2iwDqtowA6raNAOq2jgDqto8A6raQAOq2kQDqtpIA6raTAOq2lADqtpUA6raWAOq2lwDqtpgA6raZAOq2mgDqtpsA6racAOq2nQDqtp4A6rafAOq2oADqtqEA6raiAOq2owDqtqQA6ralAOq2pgDqtqcA6raoAOq2qQDqtqoA6rarAOq2rADqtq0A6rauAOq2rwDqtrAA6raxAOq2sgDqtrMA6ra0AOq2tQDqtrYA6ra3AOq2uADqtrkA6ra6AOq2uwDqtrwA6ra9AOq2vgDqtr8A6reAAOq3gQDqt4IA6reDAOq3hADqt4UA6reGAOq3hwDqt4gA6reJAOq3igDqt4sA6reMAOq3jQDqt44A6rePAOq3kADqt5EA6reSAOq3kwDqt5QA6reVAOq3lgDqt5cA6reYAOq3mQDqt5oA6rebAOq3nADqt50A6reeAOq3nwDqt6AA6rehAOq3ogDqt6MA6rekAOq3pQDqt6YA6renAOq3qADqt6kA6reqAOq3qwDqt6wA6retAOq3rgDqt68A6rewAOq3sQDqt7IA6rezAOq3tADqt7UA6re2AOq3twDqt7gA6re5AOq3ugDqt7sA6re8AOq3vQDqt74A6re/AOq4gADquIEA6riCAOq4gwDquIQA6riFAOq4hgDquIcA6riIAOq4iQDquIoA6riLAOq4jADquI0A6riOAOq4jwDquJAA6riRAOq4kgDquJMA6riUAOq4lQDquJYA6riXAOq4mADquJkA6riaAOq4mwDquJwA6ridAOq4ngDquJ8A6rigAOq4oQDquKIA6rijAOq4pADquKUA6rimAOq4pwDquKgA6ripAOq4qgDquKsA6risAOq4rQDquK4A6rivAOq4sADquLEA6riyAOq4swDquLQA6ri1AOq4tgDquLcA6ri4AOq4uQDquLoA6ri7AOq4vADquL0A6ri+AOq4vwDquYAA6rmBAOq5ggDquYMA6rmEAOq5hQDquYYA6rmHAOq5iADquYkA6rmKAOq5iwDquYwA6rmNAOq5jgDquY8A6rmQAOq5kQDquZIA6rmTAOq5lADquZUA6rmWAOq5lwDquZgA6rmZAOq5mgDquZsA6rmcAOq5nQDquZ4A6rmfAOq5oADquaEA6rmiAOq5owDquaQA6rmlAOq5pgDquacA6rmoAOq5qQDquaoA6rmrAOq5rADqua0A6rmuAOq5rwDqubAA6rmxAOq5sgDqubMA6rm0AOq5tQDqubYA6rm3AOq5uADqubkA6rm6AOq5uwDqubwA6rm9AOq5vgDqub8A6rqAAOq6gQDquoIA6rqDAOq6hADquoUA6rqGAOq6hwDquogA6rqJAOq6igDquosA6rqMAOq6jQDquo4A6rqPAOq6kADqupEA6rqSAOq6kwDqupQA6rqVAOq6lgDqupcA6rqYAOq6mQDqupoA6rqbAOq6nADqup0A6rqeAOq6nwDquqAA6rqhAOq6ogDquqMA6rqkAOq6pQDquqYA6rqnAOq6qADquqkA6rqqAOq6qwDquqwA6rqtAOq6rgDquq8A6rqwAOq6sQDqurIA6rqzAOq6tADqurUA6rq2AOq6twDqurgA6rq5AOq6ugDqursA6rq8AOq6vQDqur4A6rq/AOq7gADqu4EA6ruCAOq7gwDqu4QA6ruFAOq7hgDqu4cA6ruIAOq7iQDqu4oA6ruLAOq7jADqu40A6ruOAOq7jwDqu5AA6ruRAOq7kgDqu5MA6ruUAOq7lQDqu5YA6ruXAOq7mADqu5kA6ruaAOq7mwDqu5wA6rudAOq7ngDqu58A6rugAOq7oQDqu6IA6rujAOq7pADqu6UA6rumAOq7pwDqu6gA6rupAOq7qgDqu6sA6rusAOq7rQDqu64A6ruvAOq7sADqu7EA6ruyAOq7swDqu7QA6ru1AOq7tgDqu7cA6ru4AOq7uQDqu7oA6ru7AOq7vADqu70A6ru+AOq7vwDqvIAA6ryBAOq8ggDqvIMA6ryEAOq8hQDqvIYA6ryHAOq8iADqvIkA6ryKAOq8iwDqvIwA6ryNAOq8jgDqvI8A6ryQAOq8kQDqvJIA6ryTAOq8lADqvJUA6ryWAOq8lwDqvJgA6ryZAOq8mgDqvJsA6rycAOq8nQDqvJ4A6ryfAOq8oADqvKEA6ryiAOq8owDqvKQA6rylAOq8pgDqvKcA6ryoAOq8qQDqvKoA6ryrAOq8rADqvK0A6ryuAOq8rwDqvLAA6ryxAOq8sgDqvLMA6ry0AOq8tQDqvLYA6ry3AOq8uADqvLkA6ry6AOq8uwDqvLwA6ry9AOq8vgDqvL8A6r2AAOq9gQDqvYIA6r2DAOq9hADqvYUA6r2GAOq9hwDqvYgA6r2JAOq9igDqvYsA6r2MAOq9jQDqvY4A6r2PAOq9kADqvZEA6r2SAOq9kwDqvZQA6r2VAOq9lgDqvZcA6r2YAOq9mQDqvZoA6r2bAOq9nADqvZ0A6r2eAOq9nwDqvaAA6r2hAOq9ogDqvaMA6r2kAOq9pQDqvaYA6r2nAOq9qADqvakA6r2qAOq9qwDqvawA6r2tAOq9rgDqva8A6r2wAOq9sQDqvbIA6r2zAOq9tADqvbUA6r22AOq9twDqvbgA6r25AOq9ugDqvbsA6r28AOq9vQDqvb4A6r2/AOq+gADqvoEA6r6CAOq+gwDqvoQA6r6FAOq+hgDqvocA6r6IAOq+iQDqvooA6r6LAOq+jADqvo0A6r6OAOq+jwDqvpAA6r6RAOq+kgDqvpMA6r6UAOq+lQDqvpYA6r6XAOq+mADqvpkA6r6aAOq+mwDqvpwA6r6dAOq+ngDqvp8A6r6gAOq+oQDqvqIA6r6jAOq+pADqvqUA6r6mAOq+pwDqvqgA6r6pAOq+qgDqvqsA6r6sAOq+rQDqvq4A6r6vAOq+sADqvrEA6r6yAOq+swDqvrQA6r61AOq+tgDqvrcA6r64AOq+uQDqvroA6r67AOq+vADqvr0A6r6+AOq+vwDqv4AA6r+BAOq/ggDqv4MA6r+EAOq/hQDqv4YA6r+HAOq/iADqv4kA6r+KAOq/iwDqv4wA6r+NAOq/jgDqv48A6r+QAOq/kQDqv5IA6r+TAOq/lADqv5UA6r+WAOq/lwDqv5gA6r+ZAOq/mgDqv5sA6r+cAOq/nQDqv54A6r+fAOq/oADqv6EA6r+iAOq/owDqv6QA6r+lAOq/pgDqv6cA6r+oAOq/qQDqv6oA6r+rAOq/rADqv60A6r+uAOq/rwDqv7AA6r+xAOq/sgDqv7MA6r+0AOq/tQDqv7YA6r+3AOq/uADqv7kA6r+6AOq/uwDqv7wA6r+9AOq/vgDqv78A64CAAOuAgQDrgIIA64CDAOuAhADrgIUA64CGAOuAhwDrgIgA64CJAOuAigDrgIsA64CMAOuAjQDrgI4A64CPAOuAkADrgJEA64CSAOuAkwDrgJQA64CVAOuAlgDrgJcA64CYAOuAmQDrgJoA64CbAOuAnADrgJ0A64CeAOuAnwDrgKAA64ChAOuAogDrgKMA64CkAOuApQDrgKYA64CnAOuAqADrgKkA64CqAOuAqwDrgKwA64CtAOuArgDrgK8A64CwAOuAsQDrgLIA64CzAOuAtADrgLUA64C2AOuAtwDrgLgA64C5AOuAugDrgLsA64C8AOuAvQDrgL4A64C/AOuBgADrgYEA64GCAOuBgwDrgYQA64GFAOuBhgDrgYcA64GIAOuBiQDrgYoA64GLAOuBjADrgY0A64GOAOuBjwDrgZAA64GRAOuBkgDrgZMA64GUAOuBlQDrgZYA64GXAOuBmADrgZkA64GaAOuBmwDrgZwA64GdAOuBngDrgZ8A64GgAOuBoQDrgaIA64GjAOuBpADrgaUA64GmAOuBpwDrgagA64GpAOuBqgDrgasA64GsAOuBrQDrga4A64GvAOuBsADrgbEA64GyAOuBswDrgbQA64G1AOuBtgDrgbcA64G4AOuBuQDrgboA64G7AOuBvADrgb0A64G+AOuBvwDrgoAA64KBAOuCggDrgoMA64KEAOuChQDrgoYA64KHAOuCiADrgokA64KKAOuCiwDrgowA64KNAOuCjgDrgo8A64KQAOuCkQDrgpIA64KTAOuClADrgpUA64KWAOuClwDrgpgA64KZAOuCmgDrgpsA64KcAOuCnQDrgp4A64KfAOuCoADrgqEA64KiAOuCowDrgqQA64KlAOuCpgDrgqcA64KoAOuCqQDrgqoA64KrAOuCrADrgq0A64KuAOuCrwDrgrAA64KxAOuCsgDrgrMA64K0AOuCtQDrgrYA64K3AOuCuADrgrkA64K6AOuCuwDrgrwA64K9AOuCvgDrgr8A64OAAOuDgQDrg4IA64ODAOuDhADrg4UA64OGAOuDhwDrg4gA64OJAOuDigDrg4sA64OMAOuDjQDrg44A64OPAOuDkADrg5EA64OSAOuDkwDrg5QA64OVAOuDlgDrg5cA64OYAOuDmQDrg5oA64ObAOuDnADrg50A64OeAOuDnwDrg6AA64OhAOuDogDrg6MA64OkAOuDpQDrg6YA64OnAOuDqADrg6kA64OqAOuDqwDrg6wA64OtAOuDrgDrg68A64OwAOuDsQDrg7IA64OzAOuDtADrg7UA64O2AOuDtwDrg7gA64O5AOuDugDrg7sA64O8AOuDvQDrg74A64O/AOuEgADrhIEA64SCAOuEgwDrhIQA64SFAOuEhgDrhIcA64SIAOuEiQDrhIoA64SLAOuEjADrhI0A64SOAOuEjwDrhJAA64SRAOuEkgDrhJMA64SUAOuElQDrhJYA64SXAOuEmADrhJkA64SaAOuEmwDrhJwA64SdAOuEngDrhJ8A64SgAOuEoQDrhKIA64SjAOuEpADrhKUA64SmAOuEpwDrhKgA64SpAOuEqgDrhKsA64SsAOuErQDrhK4A64SvAOuEsADrhLEA64SyAOuEswDrhLQA64S1AOuEtgDrhLcA64S4AOuEuQDrhLoA64S7AOuEvADrhL0A64S+AOuEvwDrhYAA64WBAOuFggDrhYMA64WEAOuFhQDrhYYA64WHAOuFiADrhYkA64WKAOuFiwDrhYwA64WNAOuFjgDrhY8A64WQAOuFkQDrhZIA64WTAOuFlADrhZUA64WWAOuFlwDrhZgA64WZAOuFmgDrhZsA64WcAOuFnQDrhZ4A64WfAOuFoADrhaEA64WiAOuFowDrhaQA64WlAOuFpgDrhacA64WoAOuFqQDrhaoA64WrAOuFrADrha0A64WuAOuFrwDrhbAA64WxAOuFsgDrhbMA64W0AOuFtQDrhbYA64W3AOuFuADrhbkA64W6AOuFuwDrhbwA64W9AOuFvgDrhb8A64aAAOuGgQDrhoIA64aDAOuGhADrhoUA64aGAOuGhwDrhogA64aJAOuGigDrhosA64aMAOuGjQDrho4A64aPAOuGkADrhpEA64aSAOuGkwDrhpQA64aVAOuGlgDrhpcA64aYAOuGmQDrhpoA64abAOuGnADrhp0A64aeAOuGnwDrhqAA64ahAOuGogDrhqMA64akAOuGpQDrhqYA64anAOuGqADrhqkA64aqAOuGqwDrhqwA64atAOuGrgDrhq8A64awAOuGsQDrhrIA64azAOuGtADrhrUA64a2AOuGtwDrhrgA64a5AOuGugDrhrsA64a8AOuGvQDrhr4A64a/AOuHgADrh4EA64eCAOuHgwDrh4QA64eFAOuHhgDrh4cA64eIAOuHiQDrh4oA64eLAOuHjADrh40A64eOAOuHjwDrh5AA64eRAOuHkgDrh5MA64eUAOuHlQDrh5YA64eXAOuHmADrh5kA64eaAOuHmwDrh5wA64edAOuHngDrh58A64egAOuHoQDrh6IA64ejAOuHpADrh6UA64emAOuHpwDrh6gA64epAOuHqgDrh6sA64esAOuHrQDrh64A64evAOuHsADrh7EA64eyAOuHswDrh7QA64e1AOuHtgDrh7cA64e4AOuHuQDrh7oA64e7AOuHvADrh70A64e+AOuHvwDriIAA64iBAOuIggDriIMA64iEAOuIhQDriIYA64iHAOuIiADriIkA64iKAOuIiwDriIwA64iNAOuIjgDriI8A64iQAOuIkQDriJIA64iTAOuIlADriJUA64iWAOuIlwDriJgA64iZAOuImgDriJsA64icAOuInQDriJ4A64ifAOuIoADriKEA64iiAOuIowDriKQA64ilAOuIpgDriKcA64ioAOuIqQDriKoA64irAOuIrADriK0A64iuAOuIrwDriLAA64ixAOuIsgDriLMA64i0AOuItQDriLYA64i3AOuIuADriLkA64i6AOuIuwDriLwA64i9AOuIvgDriL8A64mAAOuJgQDriYIA64mDAOuJhADriYUA64mGAOuJhwDriYgA64mJAOuJigDriYsA64mMAOuJjQDriY4A64mPAOuJkADriZEA64mSAOuJkwDriZQA64mVAOuJlgDriZcA64mYAOuJmQDriZoA64mbAOuJnADriZ0A64meAOuJnwDriaAA64mhAOuJogDriaMA64mkAOuJpQDriaYA64mnAOuJqADriakA64mqAOuJqwDriawA64mtAOuJrgDria8A64mwAOuJsQDribIA64mzAOuJtADribUA64m2AOuJtwDribgA64m5AOuJugDribsA64m8AOuJvQDrib4A64m/AOuKgADrioEA64qCAOuKgwDrioQA64qFAOuKhgDriocA64qIAOuKiQDriooA64qLAOuKjADrio0A64qOAOuKjwDripAA64qRAOuKkgDripMA64qUAOuKlQDripYA64qXAOuKmADripkA64qaAOuKmwDripwA64qdAOuKngDrip8A64qgAOuKoQDriqIA64qjAOuKpADriqUA64qmAOuKpwDriqgA64qpAOuKqgDriqsA64qsAOuKrQDriq4A64qvAOuKsADrirEA64qyAOuKswDrirQA64q1AOuKtgDrircA64q4AOuKuQDriroA64q7AOuKvADrir0A64q+AOuKvwDri4AA64uBAOuLggDri4MA64uEAOuLhQDri4YA64uHAOuLiADri4kA64uKAOuLiwDri4wA64uNAOuLjgDri48A64uQAOuLkQDri5IA64uTAOuLlADri5UA64uWAOuLlwDri5gA64uZAOuLmgDri5sA64ucAOuLnQDri54A64ufAOuLoADri6EA64uiAOuLowDri6QA64ulAOuLpgDri6cA64uoAOuLqQDri6oA64urAOuLrADri60A64uuAOuLrwDri7AA64uxAOuLsgDri7MA64u0AOuLtQDri7YA64u3AOuLuADri7kA64u6AOuLuwDri7wA64u9AOuLvgDri78A64yAAOuMgQDrjIIA64yDAOuMhADrjIUA64yGAOuMhwDrjIgA64yJAOuMigDrjIsA64yMAOuMjQDrjI4A64yPAOuMkADrjJEA64ySAOuMkwDrjJQA64yVAOuMlgDrjJcA64yYAOuMmQDrjJoA64ybAOuMnADrjJ0A64yeAOuMnwDrjKAA64yhAOuMogDrjKMA64ykAOuMpQDrjKYA64ynAOuMqADrjKkA64yqAOuMqwDrjKwA64ytAOuMrgDrjK8A64ywAOuMsQDrjLIA64yzAOuMtADrjLUA64y2AOuMtwDrjLgA64y5AOuMugDrjLsA64y8AOuMvQDrjL4A64y/AOuNgADrjYEA642CAOuNgwDrjYQA642FAOuNhgDrjYcA642IAOuNiQDrjYoA642LAOuNjADrjY0A642OAOuNjwDrjZAA642RAOuNkgDrjZMA642UAOuNlQDrjZYA642XAOuNmADrjZkA642aAOuNmwDrjZwA642dAOuNngDrjZ8A642gAOuNoQDrjaIA642jAOuNpADrjaUA642mAOuNpwDrjagA642pAOuNqgDrjasA642sAOuNrQDrja4A642vAOuNsADrjbEA642yAOuNswDrjbQA6421AOuNtgDrjbcA6424AOuNuQDrjboA6427AOuNvADrjb0A642+AOuNvwDrjoAA646BAOuOggDrjoMA646EAOuOhQDrjoYA646HAOuOiADrjokA646KAOuOiwDrjowA646NAOuOjgDrjo8A646QAOuOkQDrjpIA646TAOuOlADrjpUA646WAOuOlwDrjpgA646ZAOuOmgDrjpsA646cAOuOnQDrjp4A646fAOuOoADrjqEA646iAOuOowDrjqQA646lAOuOpgDrjqcA646oAOuOqQDrjqoA646rAOuOrADrjq0A646uAOuOrwDrjrAA646xAOuOsgDrjrMA6460AOuOtQDrjrYA6463AOuOuADrjrkA6466AOuOuwDrjrwA6469AOuOvgDrjr8A64+AAOuPgQDrj4IA64+DAOuPhADrj4UA64+GAOuPhwDrj4gA64+JAOuPigDrj4sA64+MAOuPjQDrj44A64+PAOuPkADrj5EA64+SAOuPkwDrj5QA64+VAOuPlgDrj5cA64+YAOuPmQDrj5oA64+bAOuPnADrj50A64+eAOuPnwDrj6AA64+hAOuPogDrj6MA64+kAOuPpQDrj6YA64+nAOuPqADrj6kA64+qAOuPqwDrj6wA64+tAOuPrgDrj68A64+wAOuPsQDrj7IA64+zAOuPtADrj7UA64+2AOuPtwDrj7gA64+5AOuPugDrj7sA64+8AOuPvQDrj74A64+/AOuQgADrkIEA65CCAOuQgwDrkIQA65CFAOuQhgDrkIcA65CIAOuQiQDrkIoA65CLAOuQjADrkI0A65COAOuQjwDrkJAA65CRAOuQkgDrkJMA65CUAOuQlQDrkJYA65CXAOuQmADrkJkA65CaAOuQmwDrkJwA65CdAOuQngDrkJ8A65CgAOuQoQDrkKIA65CjAOuQpADrkKUA65CmAOuQpwDrkKgA65CpAOuQqgDrkKsA65CsAOuQrQDrkK4A65CvAOuQsADrkLEA65CyAOuQswDrkLQA65C1AOuQtgDrkLcA65C4AOuQuQDrkLoA65C7AOuQvADrkL0A65C+AOuQvwDrkYAA65GBAOuRggDrkYMA65GEAOuRhQDrkYYA65GHAOuRiADrkYkA65GKAOuRiwDrkYwA65GNAOuRjgDrkY8A65GQAOuRkQDrkZIA65GTAOuRlADrkZUA65GWAOuRlwDrkZgA65GZAOuRmgDrkZsA65GcAOuRnQDrkZ4A65GfAOuRoADrkaEA65GiAOuRowDrkaQA65GlAOuRpgDrkacA65GoAOuRqQDrkaoA65GrAOuRrADrka0A65GuAOuRrwDrkbAA65GxAOuRsgDrkbMA65G0AOuRtQDrkbYA65G3AOuRuADrkbkA65G6AOuRuwDrkbwA65G9AOuRvgDrkb8A65KAAOuSgQDrkoIA65KDAOuShADrkoUA65KGAOuShwDrkogA65KJAOuSigDrkosA65KMAOuSjQDrko4A65KPAOuSkADrkpEA65KSAOuSkwDrkpQA65KVAOuSlgDrkpcA65KYAOuSmQDrkpoA65KbAOuSnADrkp0A65KeAOuSnwDrkqAA65KhAOuSogDrkqMA65KkAOuSpQDrkqYA65KnAOuSqADrkqkA65KqAOuSqwDrkqwA65KtAOuSrgDrkq8A65KwAOuSsQDrkrIA65KzAOuStADrkrUA65K2AOuStwDrkrgA65K5AOuSugDrkrsA65K8AOuSvQDrkr4A65K/AOuTgADrk4EA65OCAOuTgwDrk4QA65OFAOuThgDrk4cA65OIAOuTiQDrk4oA65OLAOuTjADrk40A65OOAOuTjwDrk5AA65ORAOuTkgDrk5MA65OUAOuTlQDrk5YA65OXAOuTmADrk5kA65OaAOuTmwDrk5wA65OdAOuTngDrk58A65OgAOuToQDrk6IA65OjAOuTpADrk6UA65OmAOuTpwDrk6gA65OpAOuTqgDrk6sA65OsAOuTrQDrk64A65OvAOuTsADrk7EA65OyAOuTswDrk7QA65O1AOuTtgDrk7cA65O4AOuTuQDrk7oA65O7AOuTvADrk70A65O+AOuTvwDrlIAA65SBAOuUggDrlIMA65SEAOuUhQDrlIYA65SHAOuUiADrlIkA65SKAOuUiwDrlIwA65SNAOuUjgDrlI8A65SQAOuUkQDrlJIA65STAOuUlADrlJUA65SWAOuUlwDrlJgA65SZAOuUmgDrlJsA65ScAOuUnQDrlJ4A65SfAOuUoADrlKEA65SiAOuUowDrlKQA65SlAOuUpgDrlKcA65SoAOuUqQDrlKoA65SrAOuUrADrlK0A65SuAOuUrwDrlLAA65SxAOuUsgDrlLMA65S0AOuUtQDrlLYA65S3AOuUuADrlLkA65S6AOuUuwDrlLwA65S9AOuUvgDrlL8A65WAAOuVgQDrlYIA65WDAOuVhADrlYUA65WGAOuVhwDrlYgA65WJAOuVigDrlYsA65WMAOuVjQDrlY4A65WPAOuVkADrlZEA65WSAOuVkwDrlZQA65WVAOuVlgDrlZcA65WYAOuVmQDrlZoA65WbAOuVnADrlZ0A65WeAOuVnwDrlaAA65WhAOuVogDrlaMA65WkAOuVpQDrlaYA65WnAOuVqADrlakA65WqAOuVqwDrlawA65WtAOuVrgDrla8A65WwAOuVsQDrlbIA65WzAOuVtADrlbUA65W2AOuVtwDrlbgA65W5AOuVugDrlbsA65W8AOuVvQDrlb4A65W/AOuWgADrloEA65aCAOuWgwDrloQA65aFAOuWhgDrlocA65aIAOuWiQDrlooA65aLAOuWjADrlo0A65aOAOuWjwDrlpAA65aRAOuWkgDrlpMA65aUAOuWlQDrlpYA65aXAOuWmADrlpkA65aaAOuWmwDrlpwA65adAOuWngDrlp8A65agAOuWoQDrlqIA65ajAOuWpADrlqUA65amAOuWpwDrlqgA65apAOuWqgDrlqsA65asAOuWrQDrlq4A65avAOuWsADrlrEA65ayAOuWswDrlrQA65a1AOuWtgDrlrcA65a4AOuWuQDrlroA65a7AOuWvADrlr0A65a+AOuWvwDrl4AA65eBAOuXggDrl4MA65eEAOuXhQDrl4YA65eHAOuXiADrl4kA65eKAOuXiwDrl4wA65eNAOuXjgDrl48A65eQAOuXkQDrl5IA65eTAOuXlADrl5UA65eWAOuXlwDrl5gA65eZAOuXmgDrl5sA65ecAOuXnQDrl54A65efAOuXoADrl6EA65eiAOuXowDrl6QA65elAOuXpgDrl6cA65eoAOuXqQDrl6oA65erAOuXrADrl60A65euAOuXrwDrl7AA65exAOuXsgDrl7MA65e0AOuXtQDrl7YA65e3AOuXuADrl7kA65e6AOuXuwDrl7wA65e9AOuXvgDrl78A65iAAOuYgQDrmIIA65iDAOuYhADrmIUA65iGAOuYhwDrmIgA65iJAOuYigDrmIsA65iMAOuYjQDrmI4A65iPAOuYkADrmJEA65iSAOuYkwDrmJQA65iVAOuYlgDrmJcA65iYAOuYmQDrmJoA65ibAOuYnADrmJ0A65ieAOuYnwDrmKAA65ihAOuYogDrmKMA65ikAOuYpQDrmKYA65inAOuYqADrmKkA65iqAOuYqwDrmKwA65itAOuYrgDrmK8A65iwAOuYsQDrmLIA65izAOuYtADrmLUA65i2AOuYtwDrmLgA65i5AOuYugDrmLsA65i8AOuYvQDrmL4A65i/AOuZgADrmYEA65mCAOuZgwDrmYQA65mFAOuZhgDrmYcA65mIAOuZiQDrmYoA65mLAOuZjADrmY0A65mOAOuZjwDrmZAA65mRAOuZkgDrmZMA65mUAOuZlQDrmZYA65mXAOuZmADrmZkA65maAOuZmwDrmZwA65mdAOuZngDrmZ8A65mgAOuZoQDrmaIA65mjAOuZpADrmaUA65mmAOuZpwDrmagA65mpAOuZqgDrmasA65msAOuZrQDrma4A65mvAOuZsADrmbEA65myAOuZswDrmbQA65m1AOuZtgDrmbcA65m4AOuZuQDrmboA65m7AOuZvADrmb0A65m+AOuZvwDrmoAA65qBAOuaggDrmoMA65qEAOuahQDrmoYA65qHAOuaiADrmokA65qKAOuaiwDrmowA65qNAOuajgDrmo8A65qQAOuakQDrmpIA65qTAOualADrmpUA65qWAOualwDrmpgA65qZAOuamgDrmpsA65qcAOuanQDrmp4A65qfAOuaoADrmqEA65qiAOuaowDrmqQA65qlAOuapgDrmqcA65qoAOuaqQDrmqoA65qrAOuarADrmq0A65quAOuarwDrmrAA65qxAOuasgDrmrMA65q0AOuatQDrmrYA65q3AOuauADrmrkA65q6AOuauwDrmrwA65q9AOuavgDrmr8A65uAAOubgQDrm4IA65uDAOubhADrm4UA65uGAOubhwDrm4gA65uJAOubigDrm4sA65uMAOubjQDrm44A65uPAOubkADrm5EA65uSAOubkwDrm5QA65uVAOublgDrm5cA65uYAOubmQDrm5oA65ubAOubnADrm50A65ueAOubnwDrm6AA65uhAOubogDrm6MA65ukAOubpQDrm6YA65unAOubqADrm6kA65uqAOubqwDrm6wA65utAOubrgDrm68A65uwAOubsQDrm7IA65uzAOubtADrm7UA65u2AOubtwDrm7gA65u5AOubugDrm7sA65u8AOubvQDrm74A65u/AOucgADrnIEA65yCAOucgwDrnIQA65yFAOuchgDrnIcA65yIAOuciQDrnIoA65yLAOucjADrnI0A65yOAOucjwDrnJAA65yRAOuckgDrnJMA65yUAOuclQDrnJYA65yXAOucmADrnJkA65yaAOucmwDrnJwA65ydAOucngDrnJ8A65ygAOucoQDrnKIA65yjAOucpADrnKUA65ymAOucpwDrnKgA65ypAOucqgDrnKsA65ysAOucrQDrnK4A65yvAOucsADrnLEA65yyAOucswDrnLQA65y1AOuctgDrnLcA65y4AOucuQDrnLoA65y7AOucvADrnL0A65y+AOucvwDrnYAA652BAOudggDrnYMA652EAOudhQDrnYYA652HAOudiADrnYkA652KAOudiwDrnYwA652NAOudjgDrnY8A652QAOudkQDrnZIA652TAOudlADrnZUA652WAOudlwDrnZgA652ZAOudmgDrnZsA652cAOudnQDrnZ4A652fAOudoADrnaEA652iAOudowDrnaQA652lAOudpgDrnacA652oAOudqQDrnaoA652rAOudrADrna0A652uAOudrwDrnbAA652xAOudsgDrnbMA6520AOudtQDrnbYA6523AOuduADrnbkA6526AOuduwDrnbwA6529AOudvgDrnb8A656AAOuegQDrnoIA656DAOuehADrnoUA656GAOuehwDrnogA656JAOueigDrnosA656MAOuejQDrno4A656PAOuekADrnpEA656SAOuekwDrnpQA656VAOuelgDrnpcA656YAOuemQDrnpoA656bAOuenADrnp0A656eAOuenwDrnqAA656hAOueogDrnqMA656kAOuepQDrnqYA656nAOueqADrnqkA656qAOueqwDrnqwA656tAOuergDrnq8A656wAOuesQDrnrIA656zAOuetADrnrUA6562AOuetwDrnrgA6565AOueugDrnrsA6568AOuevQDrnr4A656/AOufgADrn4EA65+CAOufgwDrn4QA65+FAOufhgDrn4cA65+IAOufiQDrn4oA65+LAOufjADrn40A65+OAOufjwDrn5AA65+RAOufkgDrn5MA65+UAOuflQDrn5YA65+XAOufmADrn5kA65+aAOufmwDrn5wA65+dAOufngDrn58A65+gAOufoQDrn6IA65+jAOufpADrn6UA65+mAOufpwDrn6gA65+pAOufqgDrn6sA65+sAOufrQDrn64A65+vAOufsADrn7EA65+yAOufswDrn7QA65+1AOuftgDrn7cA65+4AOufuQDrn7oA65+7AOufvADrn70A65++AOufvwDroIAA66CBAOugggDroIMA66CEAOughQDroIYA66CHAOugiADroIkA66CKAOugiwDroIwA66CNAOugjgDroI8A66CQAOugkQDroJIA66CTAOuglADroJUA66CWAOuglwDroJgA66CZAOugmgDroJsA66CcAOugnQDroJ4A66CfAOugoADroKEA66CiAOugowDroKQA66ClAOugpgDroKcA66CoAOugqQDroKoA66CrAOugrADroK0A66CuAOugrwDroLAA66CxAOugsgDroLMA66C0AOugtQDroLYA66C3AOuguADroLkA66C6AOuguwDroLwA66C9AOugvgDroL8A66GAAOuhgQDroYIA66GDAOuhhADroYUA66GGAOuhhwDroYgA66GJAOuhigDroYsA66GMAOuhjQDroY4A66GPAOuhkADroZEA66GSAOuhkwDroZQA66GVAOuhlgDroZcA66GYAOuhmQDroZoA66GbAOuhnADroZ0A66GeAOuhnwDroaAA66GhAOuhogDroaMA66GkAOuhpQDroaYA66GnAOuhqADroakA66GqAOuhqwDroawA66GtAOuhrgDroa8A66GwAOuhsQDrobIA66GzAOuhtADrobUA66G2AOuhtwDrobgA66G5AOuhugDrobsA66G8AOuhvQDrob4A66G/AOuigADrooEA66KCAOuigwDrooQA66KFAOuihgDroocA66KIAOuiiQDroooA66KLAOuijADroo0A66KOAOuijwDropAA66KRAOuikgDropMA66KUAOuilQDropYA66KXAOuimADropkA66KaAOuimwDropwA66KdAOuingDrop8A66KgAOuioQDroqIA66KjAOuipADroqUA66KmAOuipwDroqgA66KpAOuiqgDroqsA66KsAOuirQDroq4A66KvAOuisADrorEA66KyAOuiswDrorQA66K1AOuitgDrorcA66K4AOuiuQDroroA66K7AOuivADror0A66K+AOuivwDro4AA66OBAOujggDro4MA66OEAOujhQDro4YA66OHAOujiADro4kA66OKAOujiwDro4wA66ONAOujjgDro48A66OQAOujkQDro5IA66OTAOujlADro5UA66OWAOujlwDro5gA66OZAOujmgDro5sA66OcAOujnQDro54A66OfAOujoADro6EA66OiAOujowDro6QA66OlAOujpgDro6cA66OoAOujqQDro6oA66OrAOujrADro60A66OuAOujrwDro7AA66OxAOujsgDro7MA66O0AOujtQDro7YA66O3AOujuADro7kA66O6AOujuwDro7wA66O9AOujvgDro78A66SAAOukgQDrpIIA66SDAOukhADrpIUA66SGAOukhwDrpIgA66SJAOukigDrpIsA66SMAOukjQDrpI4A66SPAOukkADrpJEA66SSAOukkwDrpJQA66SVAOuklgDrpJcA66SYAOukmQDrpJoA66SbAOuknADrpJ0A66SeAOuknwDrpKAA66ShAOukogDrpKMA66SkAOukpQDrpKYA66SnAOukqADrpKkA66SqAOukqwDrpKwA66StAOukrgDrpK8A66SwAOuksQDrpLIA66SzAOuktADrpLUA66S2AOuktwDrpLgA66S5AOukugDrpLsA66S8AOukvQDrpL4A66S/AOulgADrpYEA66WCAOulgwDrpYQA66WFAOulhgDrpYcA66WIAOuliQDrpYoA66WLAOuljADrpY0A66WOAOuljwDrpZAA66WRAOulkgDrpZMA66WUAOullQDrpZYA66WXAOulmADrpZkA66WaAOulmwDrpZwA66WdAOulngDrpZ8A66WgAOuloQDrpaIA66WjAOulpADrpaUA66WmAOulpwDrpagA66WpAOulqgDrpasA66WsAOulrQDrpa4A66WvAOulsADrpbEA66WyAOulswDrpbQA66W1AOultgDrpbcA66W4AOuluQDrpboA66W7AOulvADrpb0A66W+AOulvwDrpoAA66aBAOumggDrpoMA66aEAOumhQDrpoYA66aHAOumiADrpokA66aKAOumiwDrpowA66aNAOumjgDrpo8A66aQAOumkQDrppIA66aTAOumlADrppUA66aWAOumlwDrppgA66aZAOummgDrppsA66acAOumnQDrpp4A66afAOumoADrpqEA66aiAOumowDrpqQA66alAOumpgDrpqcA66aoAOumqQDrpqoA66arAOumrADrpq0A66auAOumrwDrprAA66axAOumsgDrprMA66a0AOumtQDrprYA66a3AOumuADrprkA66a6AOumuwDrprwA66a9AOumvgDrpr8A66eAAOungQDrp4IA66eDAOunhADrp4UA66eGAOunhwDrp4gA66eJAOunigDrp4sA66eMAOunjQDrp44A66ePAOunkADrp5EA66eSAOunkwDrp5QA66eVAOunlgDrp5cA66eYAOunmQDrp5oA66ebAOunnADrp50A66eeAOunnwDrp6AA66ehAOunogDrp6MA66ekAOunpQDrp6YA66enAOunqADrp6kA66eqAOunqwDrp6wA66etAOunrgDrp68A66ewAOunsQDrp7IA66ezAOuntADrp7UA66e2AOuntwDrp7gA66e5AOunugDrp7sA66e8AOunvQDrp74A66e/AOuogADrqIEA66iCAOuogwDrqIQA66iFAOuohgDrqIcA66iIAOuoiQDrqIoA66iLAOuojADrqI0A66iOAOuojwDrqJAA66iRAOuokgDrqJMA66iUAOuolQDrqJYA66iXAOuomADrqJkA66iaAOuomwDrqJwA66idAOuongDrqJ8A66igAOuooQDrqKIA66ijAOuopADrqKUA66imAOuopwDrqKgA66ipAOuoqgDrqKsA66isAOuorQDrqK4A66ivAOuosADrqLEA66iyAOuoswDrqLQA66i1AOuotgDrqLcA66i4AOuouQDrqLoA66i7AOuovADrqL0A66i+AOuovwDrqYAA66mBAOupggDrqYMA66mEAOuphQDrqYYA66mHAOupiADrqYkA66mKAOupiwDrqYwA66mNAOupjgDrqY8A66mQAOupkQDrqZIA66mTAOuplADrqZUA66mWAOuplwDrqZgA66mZAOupmgDrqZsA66mcAOupnQDrqZ4A66mfAOupoADrqaEA66miAOupowDrqaQA66mlAOuppgDrqacA66moAOupqQDrqaoA66mrAOuprADrqa0A66muAOuprwDrqbAA66mxAOupsgDrqbMA66m0AOuptQDrqbYA66m3AOupuADrqbkA66m6AOupuwDrqbwA66m9AOupvgDrqb8A66qAAOuqgQDrqoIA66qDAOuqhADrqoUA66qGAOuqhwDrqogA66qJAOuqigDrqosA66qMAOuqjQDrqo4A66qPAOuqkADrqpEA66qSAOuqkwDrqpQA66qVAOuqlgDrqpcA66qYAOuqmQDrqpoA66qbAOuqnADrqp0A66qeAOuqnwDrqqAA66qhAOuqogDrqqMA66qkAOuqpQDrqqYA66qnAOuqqADrqqkA66qqAOuqqwDrqqwA66qtAOuqrgDrqq8A66qwAOuqsQDrqrIA66qzAOuqtADrqrUA66q2AOuqtwDrqrgA66q5AOuqugDrqrsA66q8AOuqvQDrqr4A66q/AOurgADrq4EA66uCAOurgwDrq4QA66uFAOurhgDrq4cA66uIAOuriQDrq4oA66uLAOurjADrq40A66uOAOurjwDrq5AA66uRAOurkgDrq5MA66uUAOurlQDrq5YA66uXAOurmADrq5kA66uaAOurmwDrq5wA66udAOurngDrq58A66ugAOuroQDrq6IA66ujAOurpADrq6UA66umAOurpwDrq6gA66upAOurqgDrq6sA66usAOurrQDrq64A66uvAOursADrq7EA66uyAOurswDrq7QA66u1AOurtgDrq7cA66u4AOuruQDrq7oA66u7AOurvADrq70A66u+AOurvwDrrIAA66yBAOusggDrrIMA66yEAOushQDrrIYA66yHAOusiADrrIkA66yKAOusiwDrrIwA66yNAOusjgDrrI8A66yQAOuskQDrrJIA66yTAOuslADrrJUA66yWAOuslwDrrJgA66yZAOusmgDrrJsA66ycAOusnQDrrJ4A66yfAOusoADrrKEA66yiAOusowDrrKQA66ylAOuspgDrrKcA66yoAOusqQDrrKoA66yrAOusrADrrK0A66yuAOusrwDrrLAA66yxAOussgDrrLMA66y0AOustQDrrLYA66y3AOusuADrrLkA66y6AOusuwDrrLwA66y9AOusvgDrrL8A662AAOutgQDrrYIA662DAOuthADrrYUA662GAOuthwDrrYgA662JAOutigDrrYsA662MAOutjQDrrY4A662PAOutkADrrZEA662SAOutkwDrrZQA662VAOutlgDrrZcA662YAOutmQDrrZoA662bAOutnADrrZ0A662eAOutnwDrraAA662hAOutogDrraMA662kAOutpQDrraYA662nAOutqADrrakA662qAOutqwDrrawA662tAOutrgDrra8A662wAOutsQDrrbIA662zAOuttADrrbUA6622AOuttwDrrbgA6625AOutugDrrbsA6628AOutvQDrrb4A662/AOuugADrroEA666CAOuugwDrroQA666FAOuuhgDrrocA666IAOuuiQDrrooA666LAOuujADrro0A666OAOuujwDrrpAA666RAOuukgDrrpMA666UAOuulQDrrpYA666XAOuumADrrpkA666aAOuumwDrrpwA666dAOuungDrrp8A666gAOuuoQDrrqIA666jAOuupADrrqUA666mAOuupwDrrqgA666pAOuuqgDrrqsA666sAOuurQDrrq4A666vAOuusADrrrEA666yAOuuswDrrrQA6661AOuutgDrrrcA6664AOuuuQDrrroA6667AOuuvADrrr0A666+AOuuvwDrr4AA66+BAOuvggDrr4MA66+EAOuvhQDrr4YA66+HAOuviADrr4kA66+KAOuviwDrr4wA66+NAOuvjgDrr48A66+QAOuvkQDrr5IA66+TAOuvlADrr5UA66+WAOuvlwDrr5gA66+ZAOuvmgDrr5sA66+cAOuvnQDrr54A66+fAOuvoADrr6EA66+iAOuvowDrr6QA66+lAOuvpgDrr6cA66+oAOuvqQDrr6oA66+rAOuvrADrr60A66+uAOuvrwDrr7AA66+xAOuvsgDrr7MA66+0AOuvtQDrr7YA66+3AOuvuADrr7kA66+6AOuvuwDrr7wA66+9AOuvvgDrr78A67CAAOuwgQDrsIIA67CDAOuwhADrsIUA67CGAOuwhwDrsIgA67CJAOuwigDrsIsA67CMAOuwjQDrsI4A67CPAOuwkADrsJEA67CSAOuwkwDrsJQA67CVAOuwlgDrsJcA67CYAOuwmQDrsJoA67CbAOuwnADrsJ0A67CeAOuwnwDrsKAA67ChAOuwogDrsKMA67CkAOuwpQDrsKYA67CnAOuwqADrsKkA67CqAOuwqwDrsKwA67CtAOuwrgDrsK8A67CwAOuwsQDrsLIA67CzAOuwtADrsLUA67C2AOuwtwDrsLgA67C5AOuwugDrsLsA67C8AOuwvQDrsL4A67C/AOuxgADrsYEA67GCAOuxgwDrsYQA67GFAOuxhgDrsYcA67GIAOuxiQDrsYoA67GLAOuxjADrsY0A67GOAOuxjwDrsZAA67GRAOuxkgDrsZMA67GUAOuxlQDrsZYA67GXAOuxmADrsZkA67GaAOuxmwDrsZwA67GdAOuxngDrsZ8A67GgAOuxoQDrsaIA67GjAOuxpADrsaUA67GmAOuxpwDrsagA67GpAOuxqgDrsasA67GsAOuxrQDrsa4A67GvAOuxsADrsbEA67GyAOuxswDrsbQA67G1AOuxtgDrsbcA67G4AOuxuQDrsboA67G7AOuxvADrsb0A67G+AOuxvwDrsoAA67KBAOuyggDrsoMA67KEAOuyhQDrsoYA67KHAOuyiADrsokA67KKAOuyiwDrsowA67KNAOuyjgDrso8A67KQAOuykQDrspIA67KTAOuylADrspUA67KWAOuylwDrspgA67KZAOuymgDrspsA67KcAOuynQDrsp4A67KfAOuyoADrsqEA67KiAOuyowDrsqQA67KlAOuypgDrsqcA67KoAOuyqQDrsqoA67KrAOuyrADrsq0A67KuAOuyrwDrsrAA67KxAOuysgDrsrMA67K0AOuytQDrsrYA67K3AOuyuADrsrkA67K6AOuyuwDrsrwA67K9AOuyvgDrsr8A67OAAOuzgQDrs4IA67ODAOuzhADrs4UA67OGAOuzhwDrs4gA67OJAOuzigDrs4sA67OMAOuzjQDrs44A67OPAOuzkADrs5EA67OSAOuzkwDrs5QA67OVAOuzlgDrs5cA67OYAOuzmQDrs5oA67ObAOuznADrs50A67OeAOuznwDrs6AA67OhAOuzogDrs6MA67OkAOuzpQDrs6YA67OnAOuzqADrs6kA67OqAOuzqwDrs6wA67OtAOuzrgDrs68A67OwAOuzsQDrs7IA67OzAOuztADrs7UA67O2AOuztwDrs7gA67O5AOuzugDrs7sA67O8AOuzvQDrs74A67O/AOu0gADrtIEA67SCAOu0gwDrtIQA67SFAOu0hgDrtIcA67SIAOu0iQDrtIoA67SLAOu0jADrtI0A67SOAOu0jwDrtJAA67SRAOu0kgDrtJMA67SUAOu0lQDrtJYA67SXAOu0mADrtJkA67SaAOu0mwDrtJwA67SdAOu0ngDrtJ8A67SgAOu0oQDrtKIA67SjAOu0pADrtKUA67SmAOu0pwDrtKgA67SpAOu0qgDrtKsA67SsAOu0rQDrtK4A67SvAOu0sADrtLEA67SyAOu0swDrtLQA67S1AOu0tgDrtLcA67S4AOu0uQDrtLoA67S7AOu0vADrtL0A67S+AOu0vwDrtYAA67WBAOu1ggDrtYMA67WEAOu1hQDrtYYA67WHAOu1iADrtYkA67WKAOu1iwDrtYwA67WNAOu1jgDrtY8A67WQAOu1kQDrtZIA67WTAOu1lADrtZUA67WWAOu1lwDrtZgA67WZAOu1mgDrtZsA67WcAOu1nQDrtZ4A67WfAOu1oADrtaEA67WiAOu1owDrtaQA67WlAOu1pgDrtacA67WoAOu1qQDrtaoA67WrAOu1rADrta0A67WuAOu1rwDrtbAA67WxAOu1sgDrtbMA67W0AOu1tQDrtbYA67W3AOu1uADrtbkA67W6AOu1uwDrtbwA67W9AOu1vgDrtb8A67aAAOu2gQDrtoIA67aDAOu2hADrtoUA67aGAOu2hwDrtogA67aJAOu2igDrtosA67aMAOu2jQDrto4A67aPAOu2kADrtpEA67aSAOu2kwDrtpQA67aVAOu2lgDrtpcA67aYAOu2mQDrtpoA67abAOu2nADrtp0A67aeAOu2nwDrtqAA67ahAOu2ogDrtqMA67akAOu2pQDrtqYA67anAOu2qADrtqkA67aqAOu2qwDrtqwA67atAOu2rgDrtq8A67awAOu2sQDrtrIA67azAOu2tADrtrUA67a2AOu2twDrtrgA67a5AOu2ugDrtrsA67a8AOu2vQDrtr4A67a/AOu3gADrt4EA67eCAOu3gwDrt4QA67eFAOu3hgDrt4cA67eIAOu3iQDrt4oA67eLAOu3jADrt40A67eOAOu3jwDrt5AA67eRAOu3kgDrt5MA67eUAOu3lQDrt5YA67eXAOu3mADrt5kA67eaAOu3mwDrt5wA67edAOu3ngDrt58A67egAOu3oQDrt6IA67ejAOu3pADrt6UA67emAOu3pwDrt6gA67epAOu3qgDrt6sA67esAOu3rQDrt64A67evAOu3sADrt7EA67eyAOu3swDrt7QA67e1AOu3tgDrt7cA67e4AOu3uQDrt7oA67e7AOu3vADrt70A67e+AOu3vwDruIAA67iBAOu4ggDruIMA67iEAOu4hQDruIYA67iHAOu4iADruIkA67iKAOu4iwDruIwA67iNAOu4jgDruI8A67iQAOu4kQDruJIA67iTAOu4lADruJUA67iWAOu4lwDruJgA67iZAOu4mgDruJsA67icAOu4nQDruJ4A67ifAOu4oADruKEA67iiAOu4owDruKQA67ilAOu4pgDruKcA67ioAOu4qQDruKoA67irAOu4rADruK0A67iuAOu4rwDruLAA67ixAOu4sgDruLMA67i0AOu4tQDruLYA67i3AOu4uADruLkA67i6AOu4uwDruLwA67i9AOu4vgDruL8A67mAAOu5gQDruYIA67mDAOu5hADruYUA67mGAOu5hwDruYgA67mJAOu5igDruYsA67mMAOu5jQDruY4A67mPAOu5kADruZEA67mSAOu5kwDruZQA67mVAOu5lgDruZcA67mYAOu5mQDruZoA67mbAOu5nADruZ0A67meAOu5nwDruaAA67mhAOu5ogDruaMA67mkAOu5pQDruaYA67mnAOu5qADruakA67mqAOu5qwDruawA67mtAOu5rgDrua8A67mwAOu5sQDrubIA67mzAOu5tADrubUA67m2AOu5twDrubgA67m5AOu5ugDrubsA67m8AOu5vQDrub4A67m/AOu6gADruoEA67qCAOu6gwDruoQA67qFAOu6hgDruocA67qIAOu6iQDruooA67qLAOu6jADruo0A67qOAOu6jwDrupAA67qRAOu6kgDrupMA67qUAOu6lQDrupYA67qXAOu6mADrupkA67qaAOu6mwDrupwA67qdAOu6ngDrup8A67qgAOu6oQDruqIA67qjAOu6pADruqUA67qmAOu6pwDruqgA67qpAOu6qgDruqsA67qsAOu6rQDruq4A67qvAOu6sADrurEA67qyAOu6swDrurQA67q1AOu6tgDrurcA67q4AOu6uQDruroA67q7AOu6vADrur0A67q+AOu6vwDru4AA67uBAOu7ggDru4MA67uEAOu7hQDru4YA67uHAOu7iADru4kA67uKAOu7iwDru4wA67uNAOu7jgDru48A67uQAOu7kQDru5IA67uTAOu7lADru5UA67uWAOu7lwDru5gA67uZAOu7mgDru5sA67ucAOu7nQDru54A67ufAOu7oADru6EA67uiAOu7owDru6QA67ulAOu7pgDru6cA67uoAOu7qQDru6oA67urAOu7rADru60A67uuAOu7rwDru7AA67uxAOu7sgDru7MA67u0AOu7tQDru7YA67u3AOu7uADru7kA67u6AOu7uwDru7wA67u9AOu7vgDru78A67yAAOu8gQDrvIIA67yDAOu8hADrvIUA67yGAOu8hwDrvIgA67yJAOu8igDrvIsA67yMAOu8jQDrvI4A67yPAOu8kADrvJEA67ySAOu8kwDrvJQA67yVAOu8lgDrvJcA67yYAOu8mQDrvJoA67ybAOu8nADrvJ0A67yeAOu8nwDrvKAA67yhAOu8ogDrvKMA67ykAOu8pQDrvKYA67ynAOu8qADrvKkA67yqAOu8qwDrvKwA67ytAOu8rgDrvK8A67ywAOu8sQDrvLIA67yzAOu8tADrvLUA67y2AOu8twDrvLgA67y5AOu8ugDrvLsA67y8AOu8vQDrvL4A67y/AOu9gADrvYEA672CAOu9gwDrvYQA672FAOu9hgDrvYcA672IAOu9iQDrvYoA672LAOu9jADrvY0A672OAOu9jwDrvZAA672RAOu9kgDrvZMA672UAOu9lQDrvZYA672XAOu9mADrvZkA672aAOu9mwDrvZwA672dAOu9ngDrvZ8A672gAOu9oQDrvaIA672jAOu9pADrvaUA672mAOu9pwDrvagA672pAOu9qgDrvasA672sAOu9rQDrva4A672vAOu9sADrvbEA672yAOu9swDrvbQA6721AOu9tgDrvbcA6724AOu9uQDrvboA6727AOu9vADrvb0A672+AOu9vwDrvoAA676BAOu+ggDrvoMA676EAOu+hQDrvoYA676HAOu+iADrvokA676KAOu+iwDrvowA676NAOu+jgDrvo8A676QAOu+kQDrvpIA676TAOu+lADrvpUA676WAOu+lwDrvpgA676ZAOu+mgDrvpsA676cAOu+nQDrvp4A676fAOu+oADrvqEA676iAOu+owDrvqQA676lAOu+pgDrvqcA676oAOu+qQDrvqoA676rAOu+rADrvq0A676uAOu+rwDrvrAA676xAOu+sgDrvrMA6760AOu+tQDrvrYA6763AOu+uADrvrkA6766AOu+uwDrvrwA6769AOu+vgDrvr8A67+AAOu/gQDrv4IA67+DAOu/hADrv4UA67+GAOu/hwDrv4gA67+JAOu/igDrv4sA67+MAOu/jQDrv44A67+PAOu/kADrv5EA67+SAOu/kwDrv5QA67+VAOu/lgDrv5cA67+YAOu/mQDrv5oA67+bAOu/nADrv50A67+eAOu/nwDrv6AA67+hAOu/ogDrv6MA67+kAOu/pQDrv6YA67+nAOu/qADrv6kA67+qAOu/qwDrv6wA67+tAOu/rgDrv68A67+wAOu/sQDrv7IA67+zAOu/tADrv7UA67+2AOu/twDrv7gA67+5AOu/ugDrv7sA67+8AOu/vQDrv74A67+/AOyAgADsgIEA7ICCAOyAgwDsgIQA7ICFAOyAhgDsgIcA7ICIAOyAiQDsgIoA7ICLAOyAjADsgI0A7ICOAOyAjwDsgJAA7ICRAOyAkgDsgJMA7ICUAOyAlQDsgJYA7ICXAOyAmADsgJkA7ICaAOyAmwDsgJwA7ICdAOyAngDsgJ8A7ICgAOyAoQDsgKIA7ICjAOyApADsgKUA7ICmAOyApwDsgKgA7ICpAOyAqgDsgKsA7ICsAOyArQDsgK4A7ICvAOyAsADsgLEA7ICyAOyAswDsgLQA7IC1AOyAtgDsgLcA7IC4AOyAuQDsgLoA7IC7AOyAvADsgL0A7IC+AOyAvwDsgYAA7IGBAOyBggDsgYMA7IGEAOyBhQDsgYYA7IGHAOyBiADsgYkA7IGKAOyBiwDsgYwA7IGNAOyBjgDsgY8A7IGQAOyBkQDsgZIA7IGTAOyBlADsgZUA7IGWAOyBlwDsgZgA7IGZAOyBmgDsgZsA7IGcAOyBnQDsgZ4A7IGfAOyBoADsgaEA7IGiAOyBowDsgaQA7IGlAOyBpgDsgacA7IGoAOyBqQDsgaoA7IGrAOyBrADsga0A7IGuAOyBrwDsgbAA7IGxAOyBsgDsgbMA7IG0AOyBtQDsgbYA7IG3AOyBuADsgbkA7IG6AOyBuwDsgbwA7IG9AOyBvgDsgb8A7IKAAOyCgQDsgoIA7IKDAOyChADsgoUA7IKGAOyChwDsgogA7IKJAOyCigDsgosA7IKMAOyCjQDsgo4A7IKPAOyCkADsgpEA7IKSAOyCkwDsgpQA7IKVAOyClgDsgpcA7IKYAOyCmQDsgpoA7IKbAOyCnADsgp0A7IKeAOyCnwDsgqAA7IKhAOyCogDsgqMA7IKkAOyCpQDsgqYA7IKnAOyCqADsgqkA7IKqAOyCqwDsgqwA7IKtAOyCrgDsgq8A7IKwAOyCsQDsgrIA7IKzAOyCtADsgrUA7IK2AOyCtwDsgrgA7IK5AOyCugDsgrsA7IK8AOyCvQDsgr4A7IK/AOyDgADsg4EA7IOCAOyDgwDsg4QA7IOFAOyDhgDsg4cA7IOIAOyDiQDsg4oA7IOLAOyDjADsg40A7IOOAOyDjwDsg5AA7IORAOyDkgDsg5MA7IOUAOyDlQDsg5YA7IOXAOyDmADsg5kA7IOaAOyDmwDsg5wA7IOdAOyDngDsg58A7IOgAOyDoQDsg6IA7IOjAOyDpADsg6UA7IOmAOyDpwDsg6gA7IOpAOyDqgDsg6sA7IOsAOyDrQDsg64A7IOvAOyDsADsg7EA7IOyAOyDswDsg7QA7IO1AOyDtgDsg7cA7IO4AOyDuQDsg7oA7IO7AOyDvADsg70A7IO+AOyDvwDshIAA7ISBAOyEggDshIMA7ISEAOyEhQDshIYA7ISHAOyEiADshIkA7ISKAOyEiwDshIwA7ISNAOyEjgDshI8A7ISQAOyEkQDshJIA7ISTAOyElADshJUA7ISWAOyElwDshJgA7ISZAOyEmgDshJsA7IScAOyEnQDshJ4A7ISfAOyEoADshKEA7ISiAOyEowDshKQA7ISlAOyEpgDshKcA7ISoAOyEqQDshKoA7ISrAOyErADshK0A7ISuAOyErwDshLAA7ISxAOyEsgDshLMA7IS0AOyEtQDshLYA7IS3AOyEuADshLkA7IS6AOyEuwDshLwA7IS9AOyEvgDshL8A7IWAAOyFgQDshYIA7IWDAOyFhADshYUA7IWGAOyFhwDshYgA7IWJAOyFigDshYsA7IWMAOyFjQDshY4A7IWPAOyFkADshZEA7IWSAOyFkwDshZQA7IWVAOyFlgDshZcA7IWYAOyFmQDshZoA7IWbAOyFnADshZ0A7IWeAOyFnwDshaAA7IWhAOyFogDshaMA7IWkAOyFpQDshaYA7IWnAOyFqADshakA7IWqAOyFqwDshawA7IWtAOyFrgDsha8A7IWwAOyFsQDshbIA7IWzAOyFtADshbUA7IW2AOyFtwDshbgA7IW5AOyFugDshbsA7IW8AOyFvQDshb4A7IW/AOyGgADshoEA7IaCAOyGgwDshoQA7IaFAOyGhgDshocA7IaIAOyGiQDshooA7IaLAOyGjADsho0A7IaOAOyGjwDshpAA7IaRAOyGkgDshpMA7IaUAOyGlQDshpYA7IaXAOyGmADshpkA7IaaAOyGmwDshpwA7IadAOyGngDshp8A7IagAOyGoQDshqIA7IajAOyGpADshqUA7IamAOyGpwDshqgA7IapAOyGqgDshqsA7IasAOyGrQDshq4A7IavAOyGsADshrEA7IayAOyGswDshrQA7Ia1AOyGtgDshrcA7Ia4AOyGuQDshroA7Ia7AOyGvADshr0A7Ia+AOyGvwDsh4AA7IeBAOyHggDsh4MA7IeEAOyHhQDsh4YA7IeHAOyHiADsh4kA7IeKAOyHiwDsh4wA7IeNAOyHjgDsh48A7IeQAOyHkQDsh5IA7IeTAOyHlADsh5UA7IeWAOyHlwDsh5gA7IeZAOyHmgDsh5sA7IecAOyHnQDsh54A7IefAOyHoADsh6EA7IeiAOyHowDsh6QA7IelAOyHpgDsh6cA7IeoAOyHqQDsh6oA7IerAOyHrADsh60A7IeuAOyHrwDsh7AA7IexAOyHsgDsh7MA7Ie0AOyHtQDsh7YA7Ie3AOyHuADsh7kA7Ie6AOyHuwDsh7wA7Ie9AOyHvgDsh78A7IiAAOyIgQDsiIIA7IiDAOyIhADsiIUA7IiGAOyIhwDsiIgA7IiJAOyIigDsiIsA7IiMAOyIjQDsiI4A7IiPAOyIkADsiJEA7IiSAOyIkwDsiJQA7IiVAOyIlgDsiJcA7IiYAOyImQDsiJoA7IibAOyInADsiJ0A7IieAOyInwDsiKAA7IihAOyIogDsiKMA7IikAOyIpQDsiKYA7IinAOyIqADsiKkA7IiqAOyIqwDsiKwA7IitAOyIrgDsiK8A7IiwAOyIsQDsiLIA7IizAOyItADsiLUA7Ii2AOyItwDsiLgA7Ii5AOyIugDsiLsA7Ii8AOyIvQDsiL4A7Ii/AOyJgADsiYEA7ImCAOyJgwDsiYQA7ImFAOyJhgDsiYcA7ImIAOyJiQDsiYoA7ImLAOyJjADsiY0A7ImOAOyJjwDsiZAA7ImRAOyJkgDsiZMA7ImUAOyJlQDsiZYA7ImXAOyJmADsiZkA7ImaAOyJmwDsiZwA7ImdAOyJngDsiZ8A7ImgAOyJoQDsiaIA7ImjAOyJpADsiaUA7ImmAOyJpwDsiagA7ImpAOyJqgDsiasA7ImsAOyJrQDsia4A7ImvAOyJsADsibEA7ImyAOyJswDsibQA7Im1AOyJtgDsibcA7Im4AOyJuQDsiboA7Im7AOyJvADsib0A7Im+AOyJvwDsioAA7IqBAOyKggDsioMA7IqEAOyKhQDsioYA7IqHAOyKiADsiokA7IqKAOyKiwDsiowA7IqNAOyKjgDsio8A7IqQAOyKkQDsipIA7IqTAOyKlADsipUA7IqWAOyKlwDsipgA7IqZAOyKmgDsipsA7IqcAOyKnQDsip4A7IqfAOyKoADsiqEA7IqiAOyKowDsiqQA7IqlAOyKpgDsiqcA7IqoAOyKqQDsiqoA7IqrAOyKrADsiq0A7IquAOyKrwDsirAA7IqxAOyKsgDsirMA7Iq0AOyKtQDsirYA7Iq3AOyKuADsirkA7Iq6AOyKuwDsirwA7Iq9AOyKvgDsir8A7IuAAOyLgQDsi4IA7IuDAOyLhADsi4UA7IuGAOyLhwDsi4gA7IuJAOyLigDsi4sA7IuMAOyLjQDsi44A7IuPAOyLkADsi5EA7IuSAOyLkwDsi5QA7IuVAOyLlgDsi5cA7IuYAOyLmQDsi5oA7IubAOyLnADsi50A7IueAOyLnwDsi6AA7IuhAOyLogDsi6MA7IukAOyLpQDsi6YA7IunAOyLqADsi6kA7IuqAOyLqwDsi6wA7IutAOyLrgDsi68A7IuwAOyLsQDsi7IA7IuzAOyLtADsi7UA7Iu2AOyLtwDsi7gA7Iu5AOyLugDsi7sA7Iu8AOyLvQDsi74A7Iu/AOyMgADsjIEA7IyCAOyMgwDsjIQA7IyFAOyMhgDsjIcA7IyIAOyMiQDsjIoA7IyLAOyMjADsjI0A7IyOAOyMjwDsjJAA7IyRAOyMkgDsjJMA7IyUAOyMlQDsjJYA7IyXAOyMmADsjJkA7IyaAOyMmwDsjJwA7IydAOyMngDsjJ8A7IygAOyMoQDsjKIA7IyjAOyMpADsjKUA7IymAOyMpwDsjKgA7IypAOyMqgDsjKsA7IysAOyMrQDsjK4A7IyvAOyMsADsjLEA7IyyAOyMswDsjLQA7Iy1AOyMtgDsjLcA7Iy4AOyMuQDsjLoA7Iy7AOyMvADsjL0A7Iy+AOyMvwDsjYAA7I2BAOyNggDsjYMA7I2EAOyNhQDsjYYA7I2HAOyNiADsjYkA7I2KAOyNiwDsjYwA7I2NAOyNjgDsjY8A7I2QAOyNkQDsjZIA7I2TAOyNlADsjZUA7I2WAOyNlwDsjZgA7I2ZAOyNmgDsjZsA7I2cAOyNnQDsjZ4A7I2fAOyNoADsjaEA7I2iAOyNowDsjaQA7I2lAOyNpgDsjacA7I2oAOyNqQDsjaoA7I2rAOyNrADsja0A7I2uAOyNrwDsjbAA7I2xAOyNsgDsjbMA7I20AOyNtQDsjbYA7I23AOyNuADsjbkA7I26AOyNuwDsjbwA7I29AOyNvgDsjb8A7I6AAOyOgQDsjoIA7I6DAOyOhADsjoUA7I6GAOyOhwDsjogA7I6JAOyOigDsjosA7I6MAOyOjQDsjo4A7I6PAOyOkADsjpEA7I6SAOyOkwDsjpQA7I6VAOyOlgDsjpcA7I6YAOyOmQDsjpoA7I6bAOyOnADsjp0A7I6eAOyOnwDsjqAA7I6hAOyOogDsjqMA7I6kAOyOpQDsjqYA7I6nAOyOqADsjqkA7I6qAOyOqwDsjqwA7I6tAOyOrgDsjq8A7I6wAOyOsQDsjrIA7I6zAOyOtADsjrUA7I62AOyOtwDsjrgA7I65AOyOugDsjrsA7I68AOyOvQDsjr4A7I6/AOyPgADsj4EA7I+CAOyPgwDsj4QA7I+FAOyPhgDsj4cA7I+IAOyPiQDsj4oA7I+LAOyPjADsj40A7I+OAOyPjwDsj5AA7I+RAOyPkgDsj5MA7I+UAOyPlQDsj5YA7I+XAOyPmADsj5kA7I+aAOyPmwDsj5wA7I+dAOyPngDsj58A7I+gAOyPoQDsj6IA7I+jAOyPpADsj6UA7I+mAOyPpwDsj6gA7I+pAOyPqgDsj6sA7I+sAOyPrQDsj64A7I+vAOyPsADsj7EA7I+yAOyPswDsj7QA7I+1AOyPtgDsj7cA7I+4AOyPuQDsj7oA7I+7AOyPvADsj70A7I++AOyPvwDskIAA7JCBAOyQggDskIMA7JCEAOyQhQDskIYA7JCHAOyQiADskIkA7JCKAOyQiwDskIwA7JCNAOyQjgDskI8A7JCQAOyQkQDskJIA7JCTAOyQlADskJUA7JCWAOyQlwDskJgA7JCZAOyQmgDskJsA7JCcAOyQnQDskJ4A7JCfAOyQoADskKEA7JCiAOyQowDskKQA7JClAOyQpgDskKcA7JCoAOyQqQDskKoA7JCrAOyQrADskK0A7JCuAOyQrwDskLAA7JCxAOyQsgDskLMA7JC0AOyQtQDskLYA7JC3AOyQuADskLkA7JC6AOyQuwDskLwA7JC9AOyQvgDskL8A7JGAAOyRgQDskYIA7JGDAOyRhADskYUA7JGGAOyRhwDskYgA7JGJAOyRigDskYsA7JGMAOyRjQDskY4A7JGPAOyRkADskZEA7JGSAOyRkwDskZQA7JGVAOyRlgDskZcA7JGYAOyRmQDskZoA7JGbAOyRnADskZ0A7JGeAOyRnwDskaAA7JGhAOyRogDskaMA7JGkAOyRpQDskaYA7JGnAOyRqADskakA7JGqAOyRqwDskawA7JGtAOyRrgDska8A7JGwAOyRsQDskbIA7JGzAOyRtADskbUA7JG2AOyRtwDskbgA7JG5AOyRugDskbsA7JG8AOyRvQDskb4A7JG/AOySgADskoEA7JKCAOySgwDskoQA7JKFAOyShgDskocA7JKIAOySiQDskooA7JKLAOySjADsko0A7JKOAOySjwDskpAA7JKRAOySkgDskpMA7JKUAOySlQDskpYA7JKXAOySmADskpkA7JKaAOySmwDskpwA7JKdAOySngDskp8A7JKgAOySoQDskqIA7JKjAOySpADskqUA7JKmAOySpwDskqgA7JKpAOySqgDskqsA7JKsAOySrQDskq4A7JKvAOySsADskrEA7JKyAOySswDskrQA7JK1AOyStgDskrcA7JK4AOySuQDskroA7JK7AOySvADskr0A7JK+AOySvwDsk4AA7JOBAOyTggDsk4MA7JOEAOyThQDsk4YA7JOHAOyTiADsk4kA7JOKAOyTiwDsk4wA7JONAOyTjgDsk48A7JOQAOyTkQDsk5IA7JOTAOyTlADsk5UA7JOWAOyTlwDsk5gA7JOZAOyTmgDsk5sA7JOcAOyTnQDsk54A7JOfAOyToADsk6EA7JOiAOyTowDsk6QA7JOlAOyTpgDsk6cA7JOoAOyTqQDsk6oA7JOrAOyTrADsk60A7JOuAOyTrwDsk7AA7JOxAOyTsgDsk7MA7JO0AOyTtQDsk7YA7JO3AOyTuADsk7kA7JO6AOyTuwDsk7wA7JO9AOyTvgDsk78A7JSAAOyUgQDslIIA7JSDAOyUhADslIUA7JSGAOyUhwDslIgA7JSJAOyUigDslIsA7JSMAOyUjQDslI4A7JSPAOyUkADslJEA7JSSAOyUkwDslJQA7JSVAOyUlgDslJcA7JSYAOyUmQDslJoA7JSbAOyUnADslJ0A7JSeAOyUnwDslKAA7JShAOyUogDslKMA7JSkAOyUpQDslKYA7JSnAOyUqADslKkA7JSqAOyUqwDslKwA7JStAOyUrgDslK8A7JSwAOyUsQDslLIA7JSzAOyUtADslLUA7JS2AOyUtwDslLgA7JS5AOyUugDslLsA7JS8AOyUvQDslL4A7JS/AOyVgADslYEA7JWCAOyVgwDslYQA7JWFAOyVhgDslYcA7JWIAOyViQDslYoA7JWLAOyVjADslY0A7JWOAOyVjwDslZAA7JWRAOyVkgDslZMA7JWUAOyVlQDslZYA7JWXAOyVmADslZkA7JWaAOyVmwDslZwA7JWdAOyVngDslZ8A7JWgAOyVoQDslaIA7JWjAOyVpADslaUA7JWmAOyVpwDslagA7JWpAOyVqgDslasA7JWsAOyVrQDsla4A7JWvAOyVsADslbEA7JWyAOyVswDslbQA7JW1AOyVtgDslbcA7JW4AOyVuQDslboA7JW7AOyVvADslb0A7JW+AOyVvwDsloAA7JaBAOyWggDsloMA7JaEAOyWhQDsloYA7JaHAOyWiADslokA7JaKAOyWiwDslowA7JaNAOyWjgDslo8A7JaQAOyWkQDslpIA7JaTAOyWlADslpUA7JaWAOyWlwDslpgA7JaZAOyWmgDslpsA7JacAOyWnQDslp4A7JafAOyWoADslqEA7JaiAOyWowDslqQA7JalAOyWpgDslqcA7JaoAOyWqQDslqoA7JarAOyWrADslq0A7JauAOyWrwDslrAA7JaxAOyWsgDslrMA7Ja0AOyWtQDslrYA7Ja3AOyWuADslrkA7Ja6AOyWuwDslrwA7Ja9AOyWvgDslr8A7JeAAOyXgQDsl4IA7JeDAOyXhADsl4UA7JeGAOyXhwDsl4gA7JeJAOyXigDsl4sA7JeMAOyXjQDsl44A7JePAOyXkADsl5EA7JeSAOyXkwDsl5QA7JeVAOyXlgDsl5cA7JeYAOyXmQDsl5oA7JebAOyXnADsl50A7JeeAOyXnwDsl6AA7JehAOyXogDsl6MA7JekAOyXpQDsl6YA7JenAOyXqADsl6kA7JeqAOyXqwDsl6wA7JetAOyXrgDsl68A7JewAOyXsQDsl7IA7JezAOyXtADsl7UA7Je2AOyXtwDsl7gA7Je5AOyXugDsl7sA7Je8AOyXvQDsl74A7Je/AOyYgADsmIEA7JiCAOyYgwDsmIQA7JiFAOyYhgDsmIcA7JiIAOyYiQDsmIoA7JiLAOyYjADsmI0A7JiOAOyYjwDsmJAA7JiRAOyYkgDsmJMA7JiUAOyYlQDsmJYA7JiXAOyYmADsmJkA7JiaAOyYmwDsmJwA7JidAOyYngDsmJ8A7JigAOyYoQDsmKIA7JijAOyYpADsmKUA7JimAOyYpwDsmKgA7JipAOyYqgDsmKsA7JisAOyYrQDsmK4A7JivAOyYsADsmLEA7JiyAOyYswDsmLQA7Ji1AOyYtgDsmLcA7Ji4AOyYuQDsmLoA7Ji7AOyYvADsmL0A7Ji+AOyYvwDsmYAA7JmBAOyZggDsmYMA7JmEAOyZhQDsmYYA7JmHAOyZiADsmYkA7JmKAOyZiwDsmYwA7JmNAOyZjgDsmY8A7JmQAOyZkQDsmZIA7JmTAOyZlADsmZUA7JmWAOyZlwDsmZgA7JmZAOyZmgDsmZsA7JmcAOyZnQDsmZ4A7JmfAOyZoADsmaEA7JmiAOyZowDsmaQA7JmlAOyZpgDsmacA7JmoAOyZqQDsmaoA7JmrAOyZrADsma0A7JmuAOyZrwDsmbAA7JmxAOyZsgDsmbMA7Jm0AOyZtQDsmbYA7Jm3AOyZuADsmbkA7Jm6AOyZuwDsmbwA7Jm9AOyZvgDsmb8A7JqAAOyagQDsmoIA7JqDAOyahADsmoUA7JqGAOyahwDsmogA7JqJAOyaigDsmosA7JqMAOyajQDsmo4A7JqPAOyakADsmpEA7JqSAOyakwDsmpQA7JqVAOyalgDsmpcA7JqYAOyamQDsmpoA7JqbAOyanADsmp0A7JqeAOyanwDsmqAA7JqhAOyaogDsmqMA7JqkAOyapQDsmqYA7JqnAOyaqADsmqkA7JqqAOyaqwDsmqwA7JqtAOyargDsmq8A7JqwAOyasQDsmrIA7JqzAOyatADsmrUA7Jq2AOyatwDsmrgA7Jq5AOyaugDsmrsA7Jq8AOyavQDsmr4A7Jq/AOybgADsm4EA7JuCAOybgwDsm4QA7JuFAOybhgDsm4cA7JuIAOybiQDsm4oA7JuLAOybjADsm40A7JuOAOybjwDsm5AA7JuRAOybkgDsm5MA7JuUAOyblQDsm5YA7JuXAOybmADsm5kA7JuaAOybmwDsm5wA7JudAOybngDsm58A7JugAOyboQDsm6IA7JujAOybpADsm6UA7JumAOybpwDsm6gA7JupAOybqgDsm6sA7JusAOybrQDsm64A7JuvAOybsADsm7EA7JuyAOybswDsm7QA7Ju1AOybtgDsm7cA7Ju4AOybuQDsm7oA7Ju7AOybvADsm70A7Ju+AOybvwDsnIAA7JyBAOycggDsnIMA7JyEAOychQDsnIYA7JyHAOyciADsnIkA7JyKAOyciwDsnIwA7JyNAOycjgDsnI8A7JyQAOyckQDsnJIA7JyTAOyclADsnJUA7JyWAOyclwDsnJgA7JyZAOycmgDsnJsA7JycAOycnQDsnJ4A7JyfAOycoADsnKEA7JyiAOycowDsnKQA7JylAOycpgDsnKcA7JyoAOycqQDsnKoA7JyrAOycrADsnK0A7JyuAOycrwDsnLAA7JyxAOycsgDsnLMA7Jy0AOyctQDsnLYA7Jy3AOycuADsnLkA7Jy6AOycuwDsnLwA7Jy9AOycvgDsnL8A7J2AAOydgQDsnYIA7J2DAOydhADsnYUA7J2GAOydhwDsnYgA7J2JAOydigDsnYsA7J2MAOydjQDsnY4A7J2PAOydkADsnZEA7J2SAOydkwDsnZQA7J2VAOydlgDsnZcA7J2YAOydmQDsnZoA7J2bAOydnADsnZ0A7J2eAOydnwDsnaAA7J2hAOydogDsnaMA7J2kAOydpQDsnaYA7J2nAOydqADsnakA7J2qAOydqwDsnawA7J2tAOydrgDsna8A7J2wAOydsQDsnbIA7J2zAOydtADsnbUA7J22AOydtwDsnbgA7J25AOydugDsnbsA7J28AOydvQDsnb4A7J2/AOyegADsnoEA7J6CAOyegwDsnoQA7J6FAOyehgDsnocA7J6IAOyeiQDsnooA7J6LAOyejADsno0A7J6OAOyejwDsnpAA7J6RAOyekgDsnpMA7J6UAOyelQDsnpYA7J6XAOyemADsnpkA7J6aAOyemwDsnpwA7J6dAOyengDsnp8A7J6gAOyeoQDsnqIA7J6jAOyepADsnqUA7J6mAOyepwDsnqgA7J6pAOyeqgDsnqsA7J6sAOyerQDsnq4A7J6vAOyesADsnrEA7J6yAOyeswDsnrQA7J61AOyetgDsnrcA7J64AOyeuQDsnroA7J67AOyevADsnr0A7J6+AOyevwDsn4AA7J+BAOyfggDsn4MA7J+EAOyfhQDsn4YA7J+HAOyfiADsn4kA7J+KAOyfiwDsn4wA7J+NAOyfjgDsn48A7J+QAOyfkQDsn5IA7J+TAOyflADsn5UA7J+WAOyflwDsn5gA7J+ZAOyfmgDsn5sA7J+cAOyfnQDsn54A7J+fAOyfoADsn6EA7J+iAOyfowDsn6QA7J+lAOyfpgDsn6cA7J+oAOyfqQDsn6oA7J+rAOyfrADsn60A7J+uAOyfrwDsn7AA7J+xAOyfsgDsn7MA7J+0AOyftQDsn7YA7J+3AOyfuADsn7kA7J+6AOyfuwDsn7wA7J+9AOyfvgDsn78A7KCAAOyggQDsoIIA7KCDAOyghADsoIUA7KCGAOyghwDsoIgA7KCJAOygigDsoIsA7KCMAOygjQDsoI4A7KCPAOygkADsoJEA7KCSAOygkwDsoJQA7KCVAOyglgDsoJcA7KCYAOygmQDsoJoA7KCbAOygnADsoJ0A7KCeAOygnwDsoKAA7KChAOygogDsoKMA7KCkAOygpQDsoKYA7KCnAOygqADsoKkA7KCqAOygqwDsoKwA7KCtAOygrgDsoK8A7KCwAOygsQDsoLIA7KCzAOygtADsoLUA7KC2AOygtwDsoLgA7KC5AOygugDsoLsA7KC8AOygvQDsoL4A7KC/AOyhgADsoYEA7KGCAOyhgwDsoYQA7KGFAOyhhgDsoYcA7KGIAOyhiQDsoYoA7KGLAOyhjADsoY0A7KGOAOyhjwDsoZAA7KGRAOyhkgDsoZMA7KGUAOyhlQDsoZYA7KGXAOyhmADsoZkA7KGaAOyhmwDsoZwA7KGdAOyhngDsoZ8A7KGgAOyhoQDsoaIA7KGjAOyhpADsoaUA7KGmAOyhpwDsoagA7KGpAOyhqgDsoasA7KGsAOyhrQDsoa4A7KGvAOyhsADsobEA7KGyAOyhswDsobQA7KG1AOyhtgDsobcA7KG4AOyhuQDsoboA7KG7AOyhvADsob0A7KG+AOyhvwDsooAA7KKBAOyiggDsooMA7KKEAOyihQDsooYA7KKHAOyiiADsookA7KKKAOyiiwDsoowA7KKNAOyijgDsoo8A7KKQAOyikQDsopIA7KKTAOyilADsopUA7KKWAOyilwDsopgA7KKZAOyimgDsopsA7KKcAOyinQDsop4A7KKfAOyioADsoqEA7KKiAOyiowDsoqQA7KKlAOyipgDsoqcA7KKoAOyiqQDsoqoA7KKrAOyirADsoq0A7KKuAOyirwDsorAA7KKxAOyisgDsorMA7KK0AOyitQDsorYA7KK3AOyiuADsorkA7KK6AOyiuwDsorwA7KK9AOyivgDsor8A7KOAAOyjgQDso4IA7KODAOyjhADso4UA7KOGAOyjhwDso4gA7KOJAOyjigDso4sA7KOMAOyjjQDso44A7KOPAOyjkADso5EA7KOSAOyjkwDso5QA7KOVAOyjlgDso5cA7KOYAOyjmQDso5oA7KObAOyjnADso50A7KOeAOyjnwDso6AA7KOhAOyjogDso6MA7KOkAOyjpQDso6YA7KOnAOyjqADso6kA7KOqAOyjqwDso6wA7KOtAOyjrgDso68A7KOwAOyjsQDso7IA7KOzAOyjtADso7UA7KO2AOyjtwDso7gA7KO5AOyjugDso7sA7KO8AOyjvOydmADso70A7KO+AOyjvwDspIAA7KSBAOykggDspIMA7KSEAOykhQDspIYA7KSHAOykiADspIkA7KSKAOykiwDspIwA7KSNAOykjgDspI8A7KSQAOykkQDspJIA7KSTAOyklADspJUA7KSWAOyklwDspJgA7KSZAOykmgDspJsA7KScAOyknQDspJ4A7KSfAOykoADspKEA7KSiAOykowDspKQA7KSlAOykpgDspKcA7KSoAOykqQDspKoA7KSrAOykrADspK0A7KSuAOykrwDspLAA7KSxAOyksgDspLMA7KS0AOyktQDspLYA7KS3AOykuADspLkA7KS6AOykuwDspLwA7KS9AOykvgDspL8A7KWAAOylgQDspYIA7KWDAOylhADspYUA7KWGAOylhwDspYgA7KWJAOyligDspYsA7KWMAOyljQDspY4A7KWPAOylkADspZEA7KWSAOylkwDspZQA7KWVAOyllgDspZcA7KWYAOylmQDspZoA7KWbAOylnADspZ0A7KWeAOylnwDspaAA7KWhAOylogDspaMA7KWkAOylpQDspaYA7KWnAOylqADspakA7KWqAOylqwDspawA7KWtAOylrgDspa8A7KWwAOylsQDspbIA7KWzAOyltADspbUA7KW2AOyltwDspbgA7KW5AOylugDspbsA7KW8AOylvQDspb4A7KW/AOymgADspoEA7KaCAOymgwDspoQA7KaFAOymhgDspocA7KaIAOymiQDspooA7KaLAOymjADspo0A7KaOAOymjwDsppAA7KaRAOymkgDsppMA7KaUAOymlQDsppYA7KaXAOymmADsppkA7KaaAOymmwDsppwA7KadAOymngDspp8A7KagAOymoQDspqIA7KajAOympADspqUA7KamAOympwDspqgA7KapAOymqgDspqsA7KasAOymrQDspq4A7KavAOymsADsprEA7KayAOymswDsprQA7Ka1AOymtgDsprcA7Ka4AOymuQDsproA7Ka7AOymvADspr0A7Ka+AOymvwDsp4AA7KeBAOynggDsp4MA7KeEAOynhQDsp4YA7KeHAOyniADsp4kA7KeKAOyniwDsp4wA7KeNAOynjgDsp48A7KeQAOynkQDsp5IA7KeTAOynlADsp5UA7KeWAOynlwDsp5gA7KeZAOynmgDsp5sA7KecAOynnQDsp54A7KefAOynoADsp6EA7KeiAOynowDsp6QA7KelAOynpgDsp6cA7KeoAOynqQDsp6oA7KerAOynrADsp60A7KeuAOynrwDsp7AA7KexAOynsgDsp7MA7Ke0AOyntQDsp7YA7Ke3AOynuADsp7kA7Ke6AOynuwDsp7wA7Ke9AOynvgDsp78A7KiAAOyogQDsqIIA7KiDAOyohADsqIUA7KiGAOyohwDsqIgA7KiJAOyoigDsqIsA7KiMAOyojQDsqI4A7KiPAOyokADsqJEA7KiSAOyokwDsqJQA7KiVAOyolgDsqJcA7KiYAOyomQDsqJoA7KibAOyonADsqJ0A7KieAOyonwDsqKAA7KihAOyoogDsqKMA7KikAOyopQDsqKYA7KinAOyoqADsqKkA7KiqAOyoqwDsqKwA7KitAOyorgDsqK8A7KiwAOyosQDsqLIA7KizAOyotADsqLUA7Ki2AOyotwDsqLgA7Ki5AOyougDsqLsA7Ki8AOyovQDsqL4A7Ki/AOypgADsqYEA7KmCAOypgwDsqYQA7KmFAOyphgDsqYcA7KmIAOypiQDsqYoA7KmLAOypjADsqY0A7KmOAOypjwDsqZAA7KmRAOypkgDsqZMA7KmUAOyplQDsqZYA7KmXAOypmADsqZkA7KmaAOypmwDsqZwA7KmdAOypngDsqZ8A7KmgAOypoQDsqaIA7KmjAOyppADsqaUA7KmmAOyppwDsqagA7KmpAOypqgDsqasA7KmsAOyprQDsqa4A7KmvAOypsADsqbEA7KmyAOypswDsqbQA7Km1AOyptgDsqbcA7Km4AOypuQDsqboA7Km7AOypvADsqb0A7Km+AOypvwDsqoAA7KqBAOyqggDsqoMA7KqEAOyqhQDsqoYA7KqHAOyqiADsqokA7KqKAOyqiwDsqowA7KqNAOyqjgDsqo8A7KqQAOyqkQDsqpIA7KqTAOyqlADsqpUA7KqWAOyqlwDsqpgA7KqZAOyqmgDsqpsA7KqcAOyqnQDsqp4A7KqfAOyqoADsqqEA7KqiAOyqowDsqqQA7KqlAOyqpgDsqqcA7KqoAOyqqQDsqqoA7KqrAOyqrADsqq0A7KquAOyqrwDsqrAA7KqxAOyqsgDsqrMA7Kq0AOyqtQDsqrYA7Kq3AOyquADsqrkA7Kq6AOyquwDsqrwA7Kq9AOyqvgDsqr8A7KuAAOyrgQDsq4IA7KuDAOyrhADsq4UA7KuGAOyrhwDsq4gA7KuJAOyrigDsq4sA7KuMAOyrjQDsq44A7KuPAOyrkADsq5EA7KuSAOyrkwDsq5QA7KuVAOyrlgDsq5cA7KuYAOyrmQDsq5oA7KubAOyrnADsq50A7KueAOyrnwDsq6AA7KuhAOyrogDsq6MA7KukAOyrpQDsq6YA7KunAOyrqADsq6kA7KuqAOyrqwDsq6wA7KutAOyrrgDsq68A7KuwAOyrsQDsq7IA7KuzAOyrtADsq7UA7Ku2AOyrtwDsq7gA7Ku5AOyrugDsq7sA7Ku8AOyrvQDsq74A7Ku/AOysgADsrIEA7KyCAOysgwDsrIQA7KyFAOyshgDsrIcA7KyIAOysiQDsrIoA7KyLAOysjADsrI0A7KyOAOysjwDsrJAA7KyRAOyskgDsrJMA7KyUAOyslQDsrJYA7KyXAOysmADsrJkA7KyaAOysmwDsrJwA7KydAOysngDsrJ8A7KygAOysoQDsrKIA7KyjAOyspADsrKUA7KymAOyspwDsrKgA7KypAOysqgDsrKsA7KysAOysrQDsrK4A7KyvAOyssADsrLEA7KyyAOysswDsrLQA7Ky1AOystgDsrLcA7Ky4AOysuQDsrLoA7Ky7AOysvADsrL0A7Ky+AOysvwDsrYAA7K2BAOytggDsrYMA7K2EAOythQDsrYYA7K2HAOytiADsrYkA7K2KAOytiwDsrYwA7K2NAOytjgDsrY8A7K2QAOytkQDsrZIA7K2TAOytlADsrZUA7K2WAOytlwDsrZgA7K2ZAOytmgDsrZsA7K2cAOytnQDsrZ4A7K2fAOytoADsraEA7K2iAOytowDsraQA7K2lAOytpgDsracA7K2oAOytqQDsraoA7K2rAOytrADsra0A7K2uAOytrwDsrbAA7K2xAOytsgDsrbMA7K20AOyttQDsrbYA7K23AOytuADsrbkA7K26AOytuwDsrbwA7K29AOytvgDsrb8A7K6AAOyugQDsroIA7K6DAOyuhADsroUA7K6GAOyuhwDsrogA7K6JAOyuigDsrosA7K6MAOyujQDsro4A7K6PAOyukADsrpEA7K6SAOyukwDsrpQA7K6VAOyulgDsrpcA7K6YAOyumQDsrpoA7K6bAOyunADsrp0A7K6eAOyunwDsrqAA7K6hAOyuogDsrqMA7K6kAOyupQDsrqYA7K6nAOyuqADsrqkA7K6qAOyuqwDsrqwA7K6tAOyurgDsrq8A7K6wAOyusQDsrrIA7K6zAOyutADsrrUA7K62AOyutwDsrrgA7K65AOyuugDsrrsA7K68AOyuvQDsrr4A7K6/AOyvgADsr4EA7K+CAOyvgwDsr4QA7K+FAOyvhgDsr4cA7K+IAOyviQDsr4oA7K+LAOyvjADsr40A7K+OAOyvjwDsr5AA7K+RAOyvkgDsr5MA7K+UAOyvlQDsr5YA7K+XAOyvmADsr5kA7K+aAOyvmwDsr5wA7K+dAOyvngDsr58A7K+gAOyvoQDsr6IA7K+jAOyvpADsr6UA7K+mAOyvpwDsr6gA7K+pAOyvqgDsr6sA7K+sAOyvrQDsr64A7K+vAOyvsADsr7EA7K+yAOyvswDsr7QA7K+1AOyvtgDsr7cA7K+4AOyvuQDsr7oA7K+7AOyvvADsr70A7K++AOyvvwDssIAA7LCBAOywggDssIMA7LCEAOywhQDssIYA7LCHAOywiADssIkA7LCKAOywiwDssIwA7LCNAOywjgDssI8A7LCQAOywkQDssJIA7LCTAOywlADssJUA7LCWAOywlwDssJgA7LCZAOywmgDssJsA7LCcAOywnQDssJ4A7LCfAOywoADssKEA7LCiAOywowDssKQA7LClAOywpgDssKcA7LCoAOywqQDssKoA7LCrAOywrADssK0A7LCuAOywrwDssLAA7LCxAOywsgDssLMA7LC0AOywtQDssLYA7LC3AOywuADssLjqs6AA7LC5AOywugDssLsA7LC8AOywvQDssL4A7LC/AOyxgADssYEA7LGCAOyxgwDssYQA7LGFAOyxhgDssYcA7LGIAOyxiQDssYoA7LGLAOyxjADssY0A7LGOAOyxjwDssZAA7LGRAOyxkgDssZMA7LGUAOyxlQDssZYA7LGXAOyxmADssZkA7LGaAOyxmwDssZwA7LGdAOyxngDssZ8A7LGgAOyxoQDssaIA7LGjAOyxpADssaUA7LGmAOyxpwDssagA7LGpAOyxqgDssasA7LGsAOyxrQDssa4A7LGvAOyxsADssbEA7LGyAOyxswDssbQA7LG1AOyxtgDssbcA7LG4AOyxuQDssboA7LG7AOyxvADssb0A7LG+AOyxvwDssoAA7LKBAOyyggDssoMA7LKEAOyyhQDssoYA7LKHAOyyiADssokA7LKKAOyyiwDssowA7LKNAOyyjgDsso8A7LKQAOyykQDsspIA7LKTAOyylADsspUA7LKWAOyylwDsspgA7LKZAOyymgDsspsA7LKcAOyynQDssp4A7LKfAOyyoADssqEA7LKiAOyyowDssqQA7LKlAOyypgDssqcA7LKoAOyyqQDssqoA7LKrAOyyrADssq0A7LKuAOyyrwDssrAA7LKxAOyysgDssrMA7LK0AOyytQDssrYA7LK3AOyyuADssrkA7LK6AOyyuwDssrwA7LK9AOyyvgDssr8A7LOAAOyzgQDss4IA7LODAOyzhADss4UA7LOGAOyzhwDss4gA7LOJAOyzigDss4sA7LOMAOyzjQDss44A7LOPAOyzkADss5EA7LOSAOyzkwDss5QA7LOVAOyzlgDss5cA7LOYAOyzmQDss5oA7LObAOyznADss50A7LOeAOyznwDss6AA7LOhAOyzogDss6MA7LOkAOyzpQDss6YA7LOnAOyzqADss6kA7LOqAOyzqwDss6wA7LOtAOyzrgDss68A7LOwAOyzsQDss7IA7LOzAOyztADss7UA7LO2AOyztwDss7gA7LO5AOyzugDss7sA7LO8AOyzvQDss74A7LO/AOy0gADstIEA7LSCAOy0gwDstIQA7LSFAOy0hgDstIcA7LSIAOy0iQDstIoA7LSLAOy0jADstI0A7LSOAOy0jwDstJAA7LSRAOy0kgDstJMA7LSUAOy0lQDstJYA7LSXAOy0mADstJkA7LSaAOy0mwDstJwA7LSdAOy0ngDstJ8A7LSgAOy0oQDstKIA7LSjAOy0pADstKUA7LSmAOy0pwDstKgA7LSpAOy0qgDstKsA7LSsAOy0rQDstK4A7LSvAOy0sADstLEA7LSyAOy0swDstLQA7LS1AOy0tgDstLcA7LS4AOy0uQDstLoA7LS7AOy0vADstL0A7LS+AOy0vwDstYAA7LWBAOy1ggDstYMA7LWEAOy1hQDstYYA7LWHAOy1iADstYkA7LWKAOy1iwDstYwA7LWNAOy1jgDstY8A7LWQAOy1kQDstZIA7LWTAOy1lADstZUA7LWWAOy1lwDstZgA7LWZAOy1mgDstZsA7LWcAOy1nQDstZ4A7LWfAOy1oADstaEA7LWiAOy1owDstaQA7LWlAOy1pgDstacA7LWoAOy1qQDstaoA7LWrAOy1rADsta0A7LWuAOy1rwDstbAA7LWxAOy1sgDstbMA7LW0AOy1tQDstbYA7LW3AOy1uADstbkA7LW6AOy1uwDstbwA7LW9AOy1vgDstb8A7LaAAOy2gQDstoIA7LaDAOy2hADstoUA7LaGAOy2hwDstogA7LaJAOy2igDstosA7LaMAOy2jQDsto4A7LaPAOy2kADstpEA7LaSAOy2kwDstpQA7LaVAOy2lgDstpcA7LaYAOy2mQDstpoA7LabAOy2nADstp0A7LaeAOy2nwDstqAA7LahAOy2ogDstqMA7LakAOy2pQDstqYA7LanAOy2qADstqkA7LaqAOy2qwDstqwA7LatAOy2rgDstq8A7LawAOy2sQDstrIA7LazAOy2tADstrUA7La2AOy2twDstrgA7La5AOy2ugDstrsA7La8AOy2vQDstr4A7La/AOy3gADst4EA7LeCAOy3gwDst4QA7LeFAOy3hgDst4cA7LeIAOy3iQDst4oA7LeLAOy3jADst40A7LeOAOy3jwDst5AA7LeRAOy3kgDst5MA7LeUAOy3lQDst5YA7LeXAOy3mADst5kA7LeaAOy3mwDst5wA7LedAOy3ngDst58A7LegAOy3oQDst6IA7LejAOy3pADst6UA7LemAOy3pwDst6gA7LepAOy3qgDst6sA7LesAOy3rQDst64A7LevAOy3sADst7EA7LeyAOy3swDst7QA7Le1AOy3tgDst7cA7Le4AOy3uQDst7oA7Le7AOy3vADst70A7Le+AOy3vwDsuIAA7LiBAOy4ggDsuIMA7LiEAOy4hQDsuIYA7LiHAOy4iADsuIkA7LiKAOy4iwDsuIwA7LiNAOy4jgDsuI8A7LiQAOy4kQDsuJIA7LiTAOy4lADsuJUA7LiWAOy4lwDsuJgA7LiZAOy4mgDsuJsA7LicAOy4nQDsuJ4A7LifAOy4oADsuKEA7LiiAOy4owDsuKQA7LilAOy4pgDsuKcA7LioAOy4qQDsuKoA7LirAOy4rADsuK0A7LiuAOy4rwDsuLAA7LixAOy4sgDsuLMA7Li0AOy4tQDsuLYA7Li3AOy4uADsuLkA7Li6AOy4uwDsuLwA7Li9AOy4vgDsuL8A7LmAAOy5gQDsuYIA7LmDAOy5hADsuYUA7LmGAOy5hwDsuYgA7LmJAOy5igDsuYsA7LmMAOy5jQDsuY4A7LmPAOy5kADsuZEA7LmSAOy5kwDsuZQA7LmVAOy5lgDsuZcA7LmYAOy5mQDsuZoA7LmbAOy5nADsuZ0A7LmeAOy5nwDsuaAA7LmhAOy5ogDsuaMA7LmkAOy5pQDsuaYA7LmnAOy5qADsuakA7LmqAOy5qwDsuawA7LmtAOy5rgDsua8A7LmwAOy5sQDsubIA7LmzAOy5tADsubUA7Lm2AOy5twDsubgA7Lm5AOy5ugDsubsA7Lm8AOy5vQDsub4A7Lm/AOy6gADsuoEA7LqCAOy6gwDsuoQA7LqFAOy6hgDsuocA7LqIAOy6iQDsuooA7LqLAOy6jADsuo0A7LqOAOy6jwDsupAA7LqRAOy6kgDsupMA7LqUAOy6lQDsupYA7LqXAOy6mADsupkA7LqaAOy6mwDsupwA7LqdAOy6ngDsup8A7LqgAOy6oQDsuqIA7LqjAOy6pADsuqUA7LqmAOy6pwDsuqgA7LqpAOy6qgDsuqsA7LqsAOy6rQDsuq4A7LqvAOy6sADsurEA7LqyAOy6swDsurQA7Lq1AOy6tgDsurcA7Lq4AOy6uQDsuroA7Lq7AOy6vADsur0A7Lq+AOy6vwDsu4AA7LuBAOy7ggDsu4MA7LuEAOy7hQDsu4YA7LuHAOy7iADsu4kA7LuKAOy7iwDsu4wA7LuNAOy7jgDsu48A7LuQAOy7kQDsu5IA7LuTAOy7lADsu5UA7LuWAOy7lwDsu5gA7LuZAOy7mgDsu5sA7LucAOy7nQDsu54A7LufAOy7oADsu6EA7LuiAOy7owDsu6QA7LulAOy7pgDsu6cA7LuoAOy7qQDsu6oA7LurAOy7rADsu60A7LuuAOy7rwDsu7AA7LuxAOy7sgDsu7MA7Lu0AOy7tQDsu7YA7Lu3AOy7uADsu7kA7Lu6AOy7uwDsu7wA7Lu9AOy7vgDsu78A7LyAAOy8gQDsvIIA7LyDAOy8hADsvIUA7LyGAOy8hwDsvIgA7LyJAOy8igDsvIsA7LyMAOy8jQDsvI4A7LyPAOy8kADsvJEA7LySAOy8kwDsvJQA7LyVAOy8lgDsvJcA7LyYAOy8mQDsvJoA7LybAOy8nADsvJ0A7LyeAOy8nwDsvKAA7LyhAOy8ogDsvKMA7LykAOy8pQDsvKYA7LynAOy8qADsvKkA7LyqAOy8qwDsvKwA7LytAOy8rgDsvK8A7LywAOy8sQDsvLIA7LyzAOy8tADsvLUA7Ly2AOy8twDsvLgA7Ly5AOy8ugDsvLsA7Ly8AOy8vQDsvL4A7Ly/AOy9gADsvYEA7L2CAOy9gwDsvYQA7L2FAOy9hgDsvYcA7L2IAOy9iQDsvYoA7L2LAOy9jADsvY0A7L2OAOy9jwDsvZAA7L2RAOy9kgDsvZMA7L2UAOy9lQDsvZYA7L2XAOy9mADsvZkA7L2aAOy9mwDsvZwA7L2dAOy9ngDsvZ8A7L2gAOy9oQDsvaIA7L2jAOy9pADsvaUA7L2mAOy9pwDsvagA7L2pAOy9qgDsvasA7L2sAOy9rQDsva4A7L2vAOy9sADsvbEA7L2yAOy9swDsvbQA7L21AOy9tgDsvbcA7L24AOy9uQDsvboA7L27AOy9vADsvb0A7L2+AOy9vwDsvoAA7L6BAOy+ggDsvoMA7L6EAOy+hQDsvoYA7L6HAOy+iADsvokA7L6KAOy+iwDsvowA7L6NAOy+jgDsvo8A7L6QAOy+kQDsvpIA7L6TAOy+lADsvpUA7L6WAOy+lwDsvpgA7L6ZAOy+mgDsvpsA7L6cAOy+nQDsvp4A7L6fAOy+oADsvqEA7L6iAOy+owDsvqQA7L6lAOy+pgDsvqcA7L6oAOy+qQDsvqoA7L6rAOy+rADsvq0A7L6uAOy+rwDsvrAA7L6xAOy+sgDsvrMA7L60AOy+tQDsvrYA7L63AOy+uADsvrkA7L66AOy+uwDsvrwA7L69AOy+vgDsvr8A7L+AAOy/gQDsv4IA7L+DAOy/hADsv4UA7L+GAOy/hwDsv4gA7L+JAOy/igDsv4sA7L+MAOy/jQDsv44A7L+PAOy/kADsv5EA7L+SAOy/kwDsv5QA7L+VAOy/lgDsv5cA7L+YAOy/mQDsv5oA7L+bAOy/nADsv50A7L+eAOy/nwDsv6AA7L+hAOy/ogDsv6MA7L+kAOy/pQDsv6YA7L+nAOy/qADsv6kA7L+qAOy/qwDsv6wA7L+tAOy/rgDsv68A7L+wAOy/sQDsv7IA7L+zAOy/tADsv7UA7L+2AOy/twDsv7gA7L+5AOy/ugDsv7sA7L+8AOy/vQDsv74A7L+/AO2AgADtgIEA7YCCAO2AgwDtgIQA7YCFAO2AhgDtgIcA7YCIAO2AiQDtgIoA7YCLAO2AjADtgI0A7YCOAO2AjwDtgJAA7YCRAO2AkgDtgJMA7YCUAO2AlQDtgJYA7YCXAO2AmADtgJkA7YCaAO2AmwDtgJwA7YCdAO2AngDtgJ8A7YCgAO2AoQDtgKIA7YCjAO2ApADtgKUA7YCmAO2ApwDtgKgA7YCpAO2AqgDtgKsA7YCsAO2ArQDtgK4A7YCvAO2AsADtgLEA7YCyAO2AswDtgLQA7YC1AO2AtgDtgLcA7YC4AO2AuQDtgLoA7YC7AO2AvADtgL0A7YC+AO2AvwDtgYAA7YGBAO2BggDtgYMA7YGEAO2BhQDtgYYA7YGHAO2BiADtgYkA7YGKAO2BiwDtgYwA7YGNAO2BjgDtgY8A7YGQAO2BkQDtgZIA7YGTAO2BlADtgZUA7YGWAO2BlwDtgZgA7YGZAO2BmgDtgZsA7YGcAO2BnQDtgZ4A7YGfAO2BoADtgaEA7YGiAO2BowDtgaQA7YGlAO2BpgDtgacA7YGoAO2BqQDtgaoA7YGrAO2BrADtga0A7YGuAO2BrwDtgbAA7YGxAO2BsgDtgbMA7YG0AO2BtQDtgbYA7YG3AO2BuADtgbkA7YG6AO2BuwDtgbwA7YG9AO2BvgDtgb8A7YKAAO2CgQDtgoIA7YKDAO2ChADtgoUA7YKGAO2ChwDtgogA7YKJAO2CigDtgosA7YKMAO2CjQDtgo4A7YKPAO2CkADtgpEA7YKSAO2CkwDtgpQA7YKVAO2ClgDtgpcA7YKYAO2CmQDtgpoA7YKbAO2CnADtgp0A7YKeAO2CnwDtgqAA7YKhAO2CogDtgqMA7YKkAO2CpQDtgqYA7YKnAO2CqADtgqkA7YKqAO2CqwDtgqwA7YKtAO2CrgDtgq8A7YKwAO2CsQDtgrIA7YKzAO2CtADtgrUA7YK2AO2CtwDtgrgA7YK5AO2CugDtgrsA7YK8AO2CvQDtgr4A7YK/AO2DgADtg4EA7YOCAO2DgwDtg4QA7YOFAO2DhgDtg4cA7YOIAO2DiQDtg4oA7YOLAO2DjADtg40A7YOOAO2DjwDtg5AA7YORAO2DkgDtg5MA7YOUAO2DlQDtg5YA7YOXAO2DmADtg5kA7YOaAO2DmwDtg5wA7YOdAO2DngDtg58A7YOgAO2DoQDtg6IA7YOjAO2DpADtg6UA7YOmAO2DpwDtg6gA7YOpAO2DqgDtg6sA7YOsAO2DrQDtg64A7YOvAO2DsADtg7EA7YOyAO2DswDtg7QA7YO1AO2DtgDtg7cA7YO4AO2DuQDtg7oA7YO7AO2DvADtg70A7YO+AO2DvwDthIAA7YSBAO2EggDthIMA7YSEAO2EhQDthIYA7YSHAO2EiADthIkA7YSKAO2EiwDthIwA7YSNAO2EjgDthI8A7YSQAO2EkQDthJIA7YSTAO2ElADthJUA7YSWAO2ElwDthJgA7YSZAO2EmgDthJsA7YScAO2EnQDthJ4A7YSfAO2EoADthKEA7YSiAO2EowDthKQA7YSlAO2EpgDthKcA7YSoAO2EqQDthKoA7YSrAO2ErADthK0A7YSuAO2ErwDthLAA7YSxAO2EsgDthLMA7YS0AO2EtQDthLYA7YS3AO2EuADthLkA7YS6AO2EuwDthLwA7YS9AO2EvgDthL8A7YWAAO2FgQDthYIA7YWDAO2FhADthYUA7YWGAO2FhwDthYgA7YWJAO2FigDthYsA7YWMAO2FjQDthY4A7YWPAO2FkADthZEA7YWSAO2FkwDthZQA7YWVAO2FlgDthZcA7YWYAO2FmQDthZoA7YWbAO2FnADthZ0A7YWeAO2FnwDthaAA7YWhAO2FogDthaMA7YWkAO2FpQDthaYA7YWnAO2FqADthakA7YWqAO2FqwDthawA7YWtAO2FrgDtha8A7YWwAO2FsQDthbIA7YWzAO2FtADthbUA7YW2AO2FtwDthbgA7YW5AO2FugDthbsA7YW8AO2FvQDthb4A7YW/AO2GgADthoEA7YaCAO2GgwDthoQA7YaFAO2GhgDthocA7YaIAO2GiQDthooA7YaLAO2GjADtho0A7YaOAO2GjwDthpAA7YaRAO2GkgDthpMA7YaUAO2GlQDthpYA7YaXAO2GmADthpkA7YaaAO2GmwDthpwA7YadAO2GngDthp8A7YagAO2GoQDthqIA7YajAO2GpADthqUA7YamAO2GpwDthqgA7YapAO2GqgDthqsA7YasAO2GrQDthq4A7YavAO2GsADthrEA7YayAO2GswDthrQA7Ya1AO2GtgDthrcA7Ya4AO2GuQDthroA7Ya7AO2GvADthr0A7Ya+AO2GvwDth4AA7YeBAO2HggDth4MA7YeEAO2HhQDth4YA7YeHAO2HiADth4kA7YeKAO2HiwDth4wA7YeNAO2HjgDth48A7YeQAO2HkQDth5IA7YeTAO2HlADth5UA7YeWAO2HlwDth5gA7YeZAO2HmgDth5sA7YecAO2HnQDth54A7YefAO2HoADth6EA7YeiAO2HowDth6QA7YelAO2HpgDth6cA7YeoAO2HqQDth6oA7YerAO2HrADth60A7YeuAO2HrwDth7AA7YexAO2HsgDth7MA7Ye0AO2HtQDth7YA7Ye3AO2HuADth7kA7Ye6AO2HuwDth7wA7Ye9AO2HvgDth78A7YiAAO2IgQDtiIIA7YiDAO2IhADtiIUA7YiGAO2IhwDtiIgA7YiJAO2IigDtiIsA7YiMAO2IjQDtiI4A7YiPAO2IkADtiJEA7YiSAO2IkwDtiJQA7YiVAO2IlgDtiJcA7YiYAO2ImQDtiJoA7YibAO2InADtiJ0A7YieAO2InwDtiKAA7YihAO2IogDtiKMA7YikAO2IpQDtiKYA7YinAO2IqADtiKkA7YiqAO2IqwDtiKwA7YitAO2IrgDtiK8A7YiwAO2IsQDtiLIA7YizAO2ItADtiLUA7Yi2AO2ItwDtiLgA7Yi5AO2IugDtiLsA7Yi8AO2IvQDtiL4A7Yi/AO2JgADtiYEA7YmCAO2JgwDtiYQA7YmFAO2JhgDtiYcA7YmIAO2JiQDtiYoA7YmLAO2JjADtiY0A7YmOAO2JjwDtiZAA7YmRAO2JkgDtiZMA7YmUAO2JlQDtiZYA7YmXAO2JmADtiZkA7YmaAO2JmwDtiZwA7YmdAO2JngDtiZ8A7YmgAO2JoQDtiaIA7YmjAO2JpADtiaUA7YmmAO2JpwDtiagA7YmpAO2JqgDtiasA7YmsAO2JrQDtia4A7YmvAO2JsADtibEA7YmyAO2JswDtibQA7Ym1AO2JtgDtibcA7Ym4AO2JuQDtiboA7Ym7AO2JvADtib0A7Ym+AO2JvwDtioAA7YqBAO2KggDtioMA7YqEAO2KhQDtioYA7YqHAO2KiADtiokA7YqKAO2KiwDtiowA7YqNAO2KjgDtio8A7YqQAO2KkQDtipIA7YqTAO2KlADtipUA7YqWAO2KlwDtipgA7YqZAO2KmgDtipsA7YqcAO2KnQDtip4A7YqfAO2KoADtiqEA7YqiAO2KowDtiqQA7YqlAO2KpgDtiqcA7YqoAO2KqQDtiqoA7YqrAO2KrADtiq0A7YquAO2KrwDtirAA7YqxAO2KsgDtirMA7Yq0AO2KtQDtirYA7Yq3AO2KuADtirkA7Yq6AO2KuwDtirwA7Yq9AO2KvgDtir8A7YuAAO2LgQDti4IA7YuDAO2LhADti4UA7YuGAO2LhwDti4gA7YuJAO2LigDti4sA7YuMAO2LjQDti44A7YuPAO2LkADti5EA7YuSAO2LkwDti5QA7YuVAO2LlgDti5cA7YuYAO2LmQDti5oA7YubAO2LnADti50A7YueAO2LnwDti6AA7YuhAO2LogDti6MA7YukAO2LpQDti6YA7YunAO2LqADti6kA7YuqAO2LqwDti6wA7YutAO2LrgDti68A7YuwAO2LsQDti7IA7YuzAO2LtADti7UA7Yu2AO2LtwDti7gA7Yu5AO2LugDti7sA7Yu8AO2LvQDti74A7Yu/AO2MgADtjIEA7YyCAO2MgwDtjIQA7YyFAO2MhgDtjIcA7YyIAO2MiQDtjIoA7YyLAO2MjADtjI0A7YyOAO2MjwDtjJAA7YyRAO2MkgDtjJMA7YyUAO2MlQDtjJYA7YyXAO2MmADtjJkA7YyaAO2MmwDtjJwA7YydAO2MngDtjJ8A7YygAO2MoQDtjKIA7YyjAO2MpADtjKUA7YymAO2MpwDtjKgA7YypAO2MqgDtjKsA7YysAO2MrQDtjK4A7YyvAO2MsADtjLEA7YyyAO2MswDtjLQA7Yy1AO2MtgDtjLcA7Yy4AO2MuQDtjLoA7Yy7AO2MvADtjL0A7Yy+AO2MvwDtjYAA7Y2BAO2NggDtjYMA7Y2EAO2NhQDtjYYA7Y2HAO2NiADtjYkA7Y2KAO2NiwDtjYwA7Y2NAO2NjgDtjY8A7Y2QAO2NkQDtjZIA7Y2TAO2NlADtjZUA7Y2WAO2NlwDtjZgA7Y2ZAO2NmgDtjZsA7Y2cAO2NnQDtjZ4A7Y2fAO2NoADtjaEA7Y2iAO2NowDtjaQA7Y2lAO2NpgDtjacA7Y2oAO2NqQDtjaoA7Y2rAO2NrADtja0A7Y2uAO2NrwDtjbAA7Y2xAO2NsgDtjbMA7Y20AO2NtQDtjbYA7Y23AO2NuADtjbkA7Y26AO2NuwDtjbwA7Y29AO2NvgDtjb8A7Y6AAO2OgQDtjoIA7Y6DAO2OhADtjoUA7Y6GAO2OhwDtjogA7Y6JAO2OigDtjosA7Y6MAO2OjQDtjo4A7Y6PAO2OkADtjpEA7Y6SAO2OkwDtjpQA7Y6VAO2OlgDtjpcA7Y6YAO2OmQDtjpoA7Y6bAO2OnADtjp0A7Y6eAO2OnwDtjqAA7Y6hAO2OogDtjqMA7Y6kAO2OpQDtjqYA7Y6nAO2OqADtjqkA7Y6qAO2OqwDtjqwA7Y6tAO2OrgDtjq8A7Y6wAO2OsQDtjrIA7Y6zAO2OtADtjrUA7Y62AO2OtwDtjrgA7Y65AO2OugDtjrsA7Y68AO2OvQDtjr4A7Y6/AO2PgADtj4EA7Y+CAO2PgwDtj4QA7Y+FAO2PhgDtj4cA7Y+IAO2PiQDtj4oA7Y+LAO2PjADtj40A7Y+OAO2PjwDtj5AA7Y+RAO2PkgDtj5MA7Y+UAO2PlQDtj5YA7Y+XAO2PmADtj5kA7Y+aAO2PmwDtj5wA7Y+dAO2PngDtj58A7Y+gAO2PoQDtj6IA7Y+jAO2PpADtj6UA7Y+mAO2PpwDtj6gA7Y+pAO2PqgDtj6sA7Y+sAO2PrQDtj64A7Y+vAO2PsADtj7EA7Y+yAO2PswDtj7QA7Y+1AO2PtgDtj7cA7Y+4AO2PuQDtj7oA7Y+7AO2PvADtj70A7Y++AO2PvwDtkIAA7ZCBAO2QggDtkIMA7ZCEAO2QhQDtkIYA7ZCHAO2QiADtkIkA7ZCKAO2QiwDtkIwA7ZCNAO2QjgDtkI8A7ZCQAO2QkQDtkJIA7ZCTAO2QlADtkJUA7ZCWAO2QlwDtkJgA7ZCZAO2QmgDtkJsA7ZCcAO2QnQDtkJ4A7ZCfAO2QoADtkKEA7ZCiAO2QowDtkKQA7ZClAO2QpgDtkKcA7ZCoAO2QqQDtkKoA7ZCrAO2QrADtkK0A7ZCuAO2QrwDtkLAA7ZCxAO2QsgDtkLMA7ZC0AO2QtQDtkLYA7ZC3AO2QuADtkLkA7ZC6AO2QuwDtkLwA7ZC9AO2QvgDtkL8A7ZGAAO2RgQDtkYIA7ZGDAO2RhADtkYUA7ZGGAO2RhwDtkYgA7ZGJAO2RigDtkYsA7ZGMAO2RjQDtkY4A7ZGPAO2RkADtkZEA7ZGSAO2RkwDtkZQA7ZGVAO2RlgDtkZcA7ZGYAO2RmQDtkZoA7ZGbAO2RnADtkZ0A7ZGeAO2RnwDtkaAA7ZGhAO2RogDtkaMA7ZGkAO2RpQDtkaYA7ZGnAO2RqADtkakA7ZGqAO2RqwDtkawA7ZGtAO2RrgDtka8A7ZGwAO2RsQDtkbIA7ZGzAO2RtADtkbUA7ZG2AO2RtwDtkbgA7ZG5AO2RugDtkbsA7ZG8AO2RvQDtkb4A7ZG/AO2SgADtkoEA7ZKCAO2SgwDtkoQA7ZKFAO2ShgDtkocA7ZKIAO2SiQDtkooA7ZKLAO2SjADtko0A7ZKOAO2SjwDtkpAA7ZKRAO2SkgDtkpMA7ZKUAO2SlQDtkpYA7ZKXAO2SmADtkpkA7ZKaAO2SmwDtkpwA7ZKdAO2SngDtkp8A7ZKgAO2SoQDtkqIA7ZKjAO2SpADtkqUA7ZKmAO2SpwDtkqgA7ZKpAO2SqgDtkqsA7ZKsAO2SrQDtkq4A7ZKvAO2SsADtkrEA7ZKyAO2SswDtkrQA7ZK1AO2StgDtkrcA7ZK4AO2SuQDtkroA7ZK7AO2SvADtkr0A7ZK+AO2SvwDtk4AA7ZOBAO2TggDtk4MA7ZOEAO2ThQDtk4YA7ZOHAO2TiADtk4kA7ZOKAO2TiwDtk4wA7ZONAO2TjgDtk48A7ZOQAO2TkQDtk5IA7ZOTAO2TlADtk5UA7ZOWAO2TlwDtk5gA7ZOZAO2TmgDtk5sA7ZOcAO2TnQDtk54A7ZOfAO2ToADtk6EA7ZOiAO2TowDtk6QA7ZOlAO2TpgDtk6cA7ZOoAO2TqQDtk6oA7ZOrAO2TrADtk60A7ZOuAO2TrwDtk7AA7ZOxAO2TsgDtk7MA7ZO0AO2TtQDtk7YA7ZO3AO2TuADtk7kA7ZO6AO2TuwDtk7wA7ZO9AO2TvgDtk78A7ZSAAO2UgQDtlIIA7ZSDAO2UhADtlIUA7ZSGAO2UhwDtlIgA7ZSJAO2UigDtlIsA7ZSMAO2UjQDtlI4A7ZSPAO2UkADtlJEA7ZSSAO2UkwDtlJQA7ZSVAO2UlgDtlJcA7ZSYAO2UmQDtlJoA7ZSbAO2UnADtlJ0A7ZSeAO2UnwDtlKAA7ZShAO2UogDtlKMA7ZSkAO2UpQDtlKYA7ZSnAO2UqADtlKkA7ZSqAO2UqwDtlKwA7ZStAO2UrgDtlK8A7ZSwAO2UsQDtlLIA7ZSzAO2UtADtlLUA7ZS2AO2UtwDtlLgA7ZS5AO2UugDtlLsA7ZS8AO2UvQDtlL4A7ZS/AO2VgADtlYEA7ZWCAO2VgwDtlYQA7ZWFAO2VhgDtlYcA7ZWIAO2ViQDtlYoA7ZWLAO2VjADtlY0A7ZWOAO2VjwDtlZAA7ZWRAO2VkgDtlZMA7ZWUAO2VlQDtlZYA7ZWXAO2VmADtlZkA7ZWaAO2VmwDtlZwA7ZWdAO2VngDtlZ8A7ZWgAO2VoQDtlaIA7ZWjAO2VpADtlaUA7ZWmAO2VpwDtlagA7ZWpAO2VqgDtlasA7ZWsAO2VrQDtla4A7ZWvAO2VsADtlbEA7ZWyAO2VswDtlbQA7ZW1AO2VtgDtlbcA7ZW4AO2VuQDtlboA7ZW7AO2VvADtlb0A7ZW+AO2VvwDtloAA7ZaBAO2WggDtloMA7ZaEAO2WhQDtloYA7ZaHAO2WiADtlokA7ZaKAO2WiwDtlowA7ZaNAO2WjgDtlo8A7ZaQAO2WkQDtlpIA7ZaTAO2WlADtlpUA7ZaWAO2WlwDtlpgA7ZaZAO2WmgDtlpsA7ZacAO2WnQDtlp4A7ZafAO2WoADtlqEA7ZaiAO2WowDtlqQA7ZalAO2WpgDtlqcA7ZaoAO2WqQDtlqoA7ZarAO2WrADtlq0A7ZauAO2WrwDtlrAA7ZaxAO2WsgDtlrMA7Za0AO2WtQDtlrYA7Za3AO2WuADtlrkA7Za6AO2WuwDtlrwA7Za9AO2WvgDtlr8A7ZeAAO2XgQDtl4IA7ZeDAO2XhADtl4UA7ZeGAO2XhwDtl4gA7ZeJAO2XigDtl4sA7ZeMAO2XjQDtl44A7ZePAO2XkADtl5EA7ZeSAO2XkwDtl5QA7ZeVAO2XlgDtl5cA7ZeYAO2XmQDtl5oA7ZebAO2XnADtl50A7ZeeAO2XnwDtl6AA7ZehAO2XogDtl6MA7ZekAO2XpQDtl6YA7ZenAO2XqADtl6kA7ZeqAO2XqwDtl6wA7ZetAO2XrgDtl68A7ZewAO2XsQDtl7IA7ZezAO2XtADtl7UA7Ze2AO2XtwDtl7gA7Ze5AO2XugDtl7sA7Ze8AO2XvQDtl74A7Ze/AO2YgADtmIEA7ZiCAO2YgwDtmIQA7ZiFAO2YhgDtmIcA7ZiIAO2YiQDtmIoA7ZiLAO2YjADtmI0A7ZiOAO2YjwDtmJAA7ZiRAO2YkgDtmJMA7ZiUAO2YlQDtmJYA7ZiXAO2YmADtmJkA7ZiaAO2YmwDtmJwA7ZidAO2YngDtmJ8A7ZigAO2YoQDtmKIA7ZijAO2YpADtmKUA7ZimAO2YpwDtmKgA7ZipAO2YqgDtmKsA7ZisAO2YrQDtmK4A7ZivAO2YsADtmLEA7ZiyAO2YswDtmLQA7Zi1AO2YtgDtmLcA7Zi4AO2YuQDtmLoA7Zi7AO2YvADtmL0A7Zi+AO2YvwDtmYAA7ZmBAO2ZggDtmYMA7ZmEAO2ZhQDtmYYA7ZmHAO2ZiADtmYkA7ZmKAO2ZiwDtmYwA7ZmNAO2ZjgDtmY8A7ZmQAO2ZkQDtmZIA7ZmTAO2ZlADtmZUA7ZmWAO2ZlwDtmZgA7ZmZAO2ZmgDtmZsA7ZmcAO2ZnQDtmZ4A7ZmfAO2ZoADtmaEA7ZmiAO2ZowDtmaQA7ZmlAO2ZpgDtmacA7ZmoAO2ZqQDtmaoA7ZmrAO2ZrADtma0A7ZmuAO2ZrwDtmbAA7ZmxAO2ZsgDtmbMA7Zm0AO2ZtQDtmbYA7Zm3AO2ZuADtmbkA7Zm6AO2ZuwDtmbwA7Zm9AO2ZvgDtmb8A7ZqAAO2agQDtmoIA7ZqDAO2ahADtmoUA7ZqGAO2ahwDtmogA7ZqJAO2aigDtmosA7ZqMAO2ajQDtmo4A7ZqPAO2akADtmpEA7ZqSAO2akwDtmpQA7ZqVAO2algDtmpcA7ZqYAO2amQDtmpoA7ZqbAO2anADtmp0A7ZqeAO2anwDtmqAA7ZqhAO2aogDtmqMA7ZqkAO2apQDtmqYA7ZqnAO2aqADtmqkA7ZqqAO2aqwDtmqwA7ZqtAO2argDtmq8A7ZqwAO2asQDtmrIA7ZqzAO2atADtmrUA7Zq2AO2atwDtmrgA7Zq5AO2augDtmrsA7Zq8AO2avQDtmr4A7Zq/AO2bgADtm4EA7ZuCAO2bgwDtm4QA7ZuFAO2bhgDtm4cA7ZuIAO2biQDtm4oA7ZuLAO2bjADtm40A7ZuOAO2bjwDtm5AA7ZuRAO2bkgDtm5MA7ZuUAO2blQDtm5YA7ZuXAO2bmADtm5kA7ZuaAO2bmwDtm5wA7ZudAO2bngDtm58A7ZugAO2boQDtm6IA7ZujAO2bpADtm6UA7ZumAO2bpwDtm6gA7ZupAO2bqgDtm6sA7ZusAO2brQDtm64A7ZuvAO2bsADtm7EA7ZuyAO2bswDtm7QA7Zu1AO2btgDtm7cA7Zu4AO2buQDtm7oA7Zu7AO2bvADtm70A7Zu+AO2bvwDtnIAA7ZyBAO2cggDtnIMA7ZyEAO2chQDtnIYA7ZyHAO2ciADtnIkA7ZyKAO2ciwDtnIwA7ZyNAO2cjgDtnI8A7ZyQAO2ckQDtnJIA7ZyTAO2clADtnJUA7ZyWAO2clwDtnJgA7ZyZAO2cmgDtnJsA7ZycAO2cnQDtnJ4A7ZyfAO2coADtnKEA7ZyiAO2cowDtnKQA7ZylAO2cpgDtnKcA7ZyoAO2cqQDtnKoA7ZyrAO2crADtnK0A7ZyuAO2crwDtnLAA7ZyxAO2csgDtnLMA7Zy0AO2ctQDtnLYA7Zy3AO2cuADtnLkA7Zy6AO2cuwDtnLwA7Zy9AO2cvgDtnL8A7Z2AAO2dgQDtnYIA7Z2DAO2dhADtnYUA7Z2GAO2dhwDtnYgA7Z2JAO2digDtnYsA7Z2MAO2djQDtnY4A7Z2PAO2dkADtnZEA7Z2SAO2dkwDtnZQA7Z2VAO2dlgDtnZcA7Z2YAO2dmQDtnZoA7Z2bAO2dnADtnZ0A7Z2eAO2dnwDtnaAA7Z2hAO2dogDtnaMA7Z2kAO2dpQDtnaYA7Z2nAO2dqADtnakA7Z2qAO2dqwDtnawA7Z2tAO2drgDtna8A7Z2wAO2dsQDtnbIA7Z2zAO2dtADtnbUA7Z22AO2dtwDtnbgA7Z25AO2dugDtnbsA7Z28AO2dvQDtnb4A7Z2/AO2egADtnoEA7Z6CAO2egwDtnoQA7Z6FAO2ehgDtnocA7Z6IAO2eiQDtnooA7Z6LAO2ejADtno0A7Z6OAO2ejwDtnpAA7Z6RAO2ekgDtnpMA7Z6UAO2elQDtnpYA7Z6XAO2emADtnpkA7Z6aAO2emwDtnpwA7Z6dAO2engDtnp8A7Z6gAO2eoQDtnqIA7Z6jAPCRgpoA8JGCnADwkYKrAPCRhK4A8JGErwDwkY2LAPCRjYwA8JGSuwDwkZK8APCRkr4A8JGWugDwkZa7APCdhZfwnYWlAPCdhZjwnYWlAPCdhZjwnYWl8J2FrgDwnYWY8J2FpfCdha8A8J2FmPCdhaXwnYWwAPCdhZjwnYWl8J2FsQDwnYWY8J2FpfCdhbIA8J2GufCdhaUA8J2GufCdhaXwnYWuAPCdhrnwnYWl8J2FrwDwnYa68J2FpQDwnYa68J2FpfCdha4A8J2GuvCdhaXwnYWvAPCghKIA8KCUnADwoJSlAPCglYsA8KCYugDwoKCEAPCgo54A8KCorADwoK2jAPChk6QA8KGaqADwoZuqAPChp4gA8KGsmADwobSLAPCht6QA8KG3pgDwooaDAPCihp8A8KKMsQDwopuUAPCioYQA8KKhigDwoqyMAPCir7EA8KOAigDwo4q4APCjjZ8A8KOOkwDwo46cAPCjj4MA8KOPlQDwo5GtAPCjmqMA8KOipwDwo6qNAPCjq7oA8KOyvADwo7SeAPCju5EA8KO9ngDwo76OAPCkiaMA8KSLrgDwpI6rAPCkmIgA8KSctQDwpKCUAPCksLYA8KSykgDwpL6hAPCkvrgA8KWBhADwpYOyAPClg7MA8KWEmQDwpYSzAPCliYkA8KWQnQDwpZimAPClmpoA8KWbhQDwpaW8APClqqcA8KWuqwDwpbKAAPCls5AA8KW+hgDwpoeaAPCmiKgA8KaJhwDwpouZAPCmjL4A8KaTmgDwppSjAPCmlqgA8KaepwDwpp61APCmrLwA8KawtgDwprOVAPCmtasA8Ka8rADwpr6xAPCng5IA8KePigDwp5mnAPCnoq4A8KelpgDwp7KoAPCnu5MA8Ke8rwDwqJeSAPCol60A8KicrgDwqK+6APCotbcA8KmFhQDwqYefAPCpiJoA8KmQigDwqZKWAPCplrYA8KmssADwqoOOAPCqhIUA8KqIjgDwqoqRAPCqjpIA8KqYgAA="},"pre_tokenizer":{"type":"Sequence","pretokenizers":[{"type":"WhitespaceSplit"},{"type":"Metaspace","replacement":"▁","add_prefix_space":true}]},"post_processor":{"type":"TemplateProcessing","single":[{"SpecialToken":{"id":"","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"","type_id":0}}],"pair":[{"SpecialToken":{"id":"","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"","type_id":0}},{"SpecialToken":{"id":"","type_id":0}},{"Sequence":{"id":"B","type_id":0}},{"SpecialToken":{"id":"","type_id":0}}],"special_tokens":{"":{"id":"","ids":[2],"tokens":[""]},"":{"id":"","ids":[0],"tokens":[""]}}},"decoder":{"type":"Metaspace","replacement":"▁","add_prefix_space":true},"model":{"type":"Unigram","unk_id":3,"vocab":[["",0.0],["",0.0],["",0.0],["",0.0],[",",-3.4635426998138428],[".",-3.625642776489258],["▁",-3.9299705028533936],["s",-5.072621822357178],["▁de",-5.306643009185791],["-",-5.404437065124512],["▁a",-5.530364990234375],["a",-5.5477118492126465],[":",-5.629745960235596],["e",-5.701941967010498],["i",-5.785372257232666],["▁(",-5.926211357116699],[")",-5.9974517822265625],["▁i",-6.0461626052856445],["t",-6.071900844573975],["n",-6.093497276306152],["▁-",-6.1764984130859375],["▁la",-6.233835697174072],["▁en",-6.31805419921875],["▁in",-6.3201680183410645],["▁na",-6.327768802642822],["'",-6.345553398132324],["’",-6.379403591156006],["...",-6.38959264755249],["▁e",-6.3929057121276855],["▁на",-6.396481990814209],["。",-6.411019802093506],["o",-6.417782306671143],["?",-6.460666179656982],["en",-6.465692520141602],["u",-6.467804431915283],["▁и",-6.580890655517578],["▁o",-6.582267761230469],["、",-6.610896110534668],["!",-6.61658239364624],["m",-6.647110939025879],["▁se",-6.716159820556641],["▁que",-6.732594966888428],["r",-6.7604851722717285],["的",-6.779519081115723],["▁\"",-6.814488887786865],["▁di",-6.818948268890381],["▁–",-6.821335315704346],["▁to",-6.913968563079834],["▁da",-6.9197258949279785],["▁в",-6.935658931732178],["،",-6.938859939575195],["▁un",-6.960879325866699],["▁“",-6.971805572509766],["y",-6.976414203643799],["▁do",-6.97949743270874],["▁je",-6.986958980560303],["er",-7.031227111816406],["▁sa",-7.0481696128845215],["\"",-7.0502214431762695],["а",-7.05538272857666],["▁og",-7.068160057067871],["▁за",-7.087216377258301],["▁A",-7.08988618850708],["”",-7.112045764923096],["/",-7.158257007598877],["▁و",-7.174049377441406],["an",-7.205831050872803],["te",-7.230983257293701],["▁die",-7.24462890625],["▁да",-7.283091068267822],["▁the",-7.283115386962891],["d",-7.293672561645508],["▁er",-7.295158863067627],["in",-7.297125816345215],[";",-7.303779125213623],["▁u",-7.319528579711914],["na",-7.320949554443359],["▁не",-7.336023330688477],["▁si",-7.355116367340088],["▁ja",-7.370460510253906],["▁za",-7.37307596206665],["▁v",-7.385393142700195],["▁et",-7.38629150390625],["▁is",-7.402246475219727],["▁у",-7.408124923706055],["da",-7.412682056427002],["ne",-7.417489528656006],["▁I",-7.424222946166992],["▁el",-7.439484119415283],["и",-7.44006872177124],["es",-7.441272735595703],["▁s",-7.452380657196045],["k",-7.471577644348144],["ni",-7.472222328186035],["▁«",-7.474700927734375],["▁le",-7.475855827331543],["▁l",-7.482025623321533],["▁z",-7.490857124328613],["▁on",-7.491168022155762],["▁at",-7.494873046875],["▁for",-7.498734951019287],["▁_",-7.502312660217285],["ta",-7.511834144592285],["е",-7.520388603210449],["▁d",-7.537978649139404],["у",-7.540219783782959],["▁1",-7.551031589508057],["re",-7.563330173492432],["▁ne",-7.563854217529297],["▁på",-7.566867351531982],["▁no",-7.56878137588501],["▁of",-7.573562145233154],["de",-7.589335918426514],["▁y",-7.591180801391602],["▁се",-7.5921502113342285],["▁du",-7.593316555023193],["▁2",-7.619575500488281],["▁per",-7.621859550476074],["ti",-7.62448787689209],["▁yang",-7.629697322845459],["▁te",-7.653661727905273],["▁para",-7.666834831237793],["▁der",-7.676575660705566],["▁dan",-7.680234432220459],["▁som",-7.688714027404785],["।",-7.690441608428955],["et",-7.697822093963623],["h",-7.701241970062256],["▁med",-7.701714992523193],["▁по",-7.702396392822266],["м",-7.716213226318359],["▁van",-7.720690250396728],["(",-7.722129821777344],["le",-7.727717399597168],["▁të",-7.72967004776001],["▁с",-7.740950584411621],["▁and",-7.75172758102417],["▁„",-7.758167743682861],["▁3",-7.759578704833984],["▁til",-7.760351657867432],["ی",-7.761864185333252],["l",-7.762022495269775],["▁an",-7.762918949127197],["la",-7.770702362060547],["▁al",-7.77080488204956],["ja",-7.7740159034729],["▁del",-7.777421951293945],["ar",-7.794132709503174],["▁w",-7.80467700958252],["▁det",-7.811462879180908],["li",-7.818939208984375],["▁ya",-7.824791431427002],["▁:",-7.829031944274902],["▁...",-7.829613208770752],["の",-7.835560321807861],["“",-7.84205961227417],["▁ka",-7.852991580963135],["no",-7.8542914390563965],["▁con",-7.861650943756104],["▁S",-7.863271236419678],["▁po",-7.864959716796875],["ka",-7.871254920959473],["as",-7.871861934661865],["▁me",-7.873553276062012],["is",-7.876260757446289],["▁und",-7.879409313201904],["▁su",-7.880402088165283],["»",-7.886897087097168],["▁den",-7.895546913146973],["z",-7.898513317108154],["j",-7.900866031646728],["▁om",-7.909025192260742],["si",-7.915489196777344],["▁ve",-7.916622638702393],["▁och",-7.928811073303223],["▁در",-7.929356098175049],["ه",-7.929381370544434],["g",-7.932966709136963],["▁به",-7.942804336547852],["ي",-7.946563243865967],["▁O",-7.966879844665527],["▁د",-7.969033718109131],["▁har",-7.9711456298828125],["▁от",-7.985174179077148],["se",-7.985258102416992],["▁av",-7.985346794128418],["▁be",-7.990510940551758],["▁ar",-7.993113040924072],["to",-8.000646591186523],["▁і",-8.000714302062988],["▁5",-8.026392936706543],["on",-8.028497695922852],["ma",-8.039029121398926],["▁ir",-8.040148735046387],[").",-8.046093940734863],["em",-8.047385215759277],["▁por",-8.057129859924316],["о",-8.059514999389648],["▁es",-8.061369895935059],["▁les",-8.06153392791748],["▁ki",-8.06203842163086],["▁4",-8.062232971191406],["ul",-8.066244125366211],["▁kan",-8.068714141845703],["▁‘",-8.07385540008545],["та",-8.074616432189941],["ch",-8.075675010681152],["▁।",-8.07983112335205],["ga",-8.08101749420166],["▁10",-8.092198371887207],["▁з",-8.093571662902832],["▁il",-8.094630241394043],["на",-8.10124397277832],["在",-8.106016159057617],["ing",-8.10777473449707],["▁και",-8.108624458312988],["▁کے",-8.111339569091797],["it",-8.12078857421875],["▁е",-8.125797271728516],["ra",-8.134736061096191],["▁una",-8.137834548950195],["▁so",-8.140031814575195],["т",-8.144621849060059],["us",-8.146340370178223],["▁des",-8.153715133666992],["▁het",-8.153762817382812],["те",-8.156417846679688],["ы",-8.157510757446289],["▁ku",-8.164762496948242],["▁od",-8.165127754211426],["▁من",-8.165318489074707],["­",-8.169711112976074],["os",-8.170713424682617],["▁op",-8.17135238647461],["▁ng",-8.175311088562012],["ς",-8.179069519042969],["je",-8.179301261901855],["▁as",-8.189149856567383],["c",-8.199555397033691],["▁La",-8.199591636657715],["▁في",-8.200462341308594],["▁E",-8.202266693115234],["▁'",-8.20328426361084],["▁att",-8.205853462219238],["х",-8.211251258850098],["я",-8.213692665100098],["do",-8.21650505065918],["),",-8.222518920898438],["▁/",-8.223021507263184],["▁pa",-8.223164558410645],["ة",-8.223382949829102],["を",-8.22523021697998],["▁а",-8.227853775024414],["▁à",-8.228625297546387],["p",-8.232597351074219],["▁до",-8.241254806518555],["▁ha",-8.246929168701172],["at",-8.248958587646484],["ا",-8.25012493133545],["▁wa",-8.251487731933594],["і",-8.255227088928223],["▁af",-8.259132385253906],["▁De",-8.262863159179688],["▁bir",-8.264545440673828],["和",-8.265955924987793],["ko",-8.266639709472656],["mi",-8.26934814453125],["▁2018",-8.270668983459473],["]",-8.270896911621094],["▁nie",-8.27227783203125],["▁از",-8.276718139648438],["st",-8.2816801071167],["ve",-8.281817436218262],["nya",-8.28469181060791],["了",-8.288339614868164],["b",-8.289443016052246],["▁M",-8.293008804321289],["com",-8.293252944946289],["َ",-8.294340133666992],["▁vi",-8.29475212097168],["▁pe",-8.298158645629883],["が",-8.302067756652832],["me",-8.302555084228516],["▁és",-8.304712295532227],["A",-8.304994583129883],["▁var",-8.316362380981445],["▁um",-8.317639350891113],["▁के",-8.318626403808594],["▁کی",-8.321968078613281],["al",-8.325163841247559],["▁che",-8.325228691101074],["▁ma",-8.327710151672363],["▁—",-8.329010009765625],["▁een",-8.33410930633545],["S",-8.335785865783691],["▁az",-8.341935157775879],["▁је",-8.350981712341309],["ed",-8.355191230773926],["▁ko",-8.355914115905762],["ny",-8.357503890991211],["▁ni",-8.3582181930542],["ki",-8.359819412231445],["am",-8.36097240447998],["н",-8.368477821350098],["2",-8.369061470031738],["▁6",-8.376090049743652],["om",-8.37712574005127],["▁va",-8.377157211303711],["▁ta",-8.378125190734863],["un",-8.381165504455566],["▁V",-8.387639045715332],["▁ke",-8.390829086303711],["й",-8.396209716796875],["▁C",-8.396952629089355],["▁cu",-8.398150444030762],["▁nu",-8.400979042053223],["um",-8.403388023376465],["▁میں",-8.404854774475098],["ci",-8.413408279418945],["ku",-8.41537094116211],["lar",-8.417649269104004],["▁din",-8.420601844787598],["▁në",-8.421332359313965],["ن",-8.42164134979248],["▁mi",-8.426046371459961],["▁და",-8.429140090942383],["▁во",-8.433842658996582],["に",-8.438796997070312],["то",-8.448441505432129],["ce",-8.450657844543457],["va",-8.4537353515625],["kan",-8.457025527954102],["▁man",-8.457426071166992],["▁bi",-8.467313766479492],["v",-8.47355842590332],["▁B",-8.477783203125],["не",-8.478170394897461],["▁bo",-8.481490135192871],["▁од",-8.483658790588379],["▁L",-8.484365463256836],["▁»",-8.484634399414062],["▁K",-8.490921020507812],["は",-8.493261337280273],["ek",-8.49502944946289],["ak",-8.496010780334473],["▁U",-8.497254371643066],["▁în",-8.497541427612305],["▁m",-8.4979887008667],["▁ang",-8.501138687133789],["▁է",-8.504491806030273],["ke",-8.504961013793945],["▁non",-8.50499439239502],["▁em",-8.507031440734863],["▁Na",-8.510491371154785],["是",-8.511689186096191],["ს",-8.512619018554688],["को",-8.513169288635254],["▁En",-8.513978004455566],["ни",-8.517156600952148],["▁ال",-8.52457332611084],["▁In",-8.524736404418945],["▁7",-8.525569915771484],["▁os",-8.5321683883667],["3",-8.533885955810547],["▁ה",-8.536396026611328],["lo",-8.545232772827148],["▁par",-8.546632766723633],["의",-8.548405647277832],["ت",-8.54870319366455],["▁är",-8.55067253112793],["▁tu",-8.552896499633789],["‘",-8.555432319641113],["be",-8.558698654174805],["▁bu",-8.560547828674316],["▁το",-8.56318187713623],["▁com",-8.566923141479492],["م",-8.566967010498047],["▁ca",-8.569270133972168],["▁[",-8.569624900817871],["il",-8.571856498718262],["но",-8.575328826904297],["▁das",-8.575770378112793],["▁8",-8.576967239379883],["▁می",-8.577592849731445],["▁T",-8.577641487121582],["▁του",-8.578095436096191],["▁να",-8.58119010925293],["▁20",-8.58205795288086],["▁los",-8.583404541015625],["▁að",-8.584245681762695],["▁by",-8.587043762207031],["▁D",-8.591439247131348],["▁á",-8.592487335205078],["▁é",-8.59322738647461],["▁för",-8.593844413757324],["ya",-8.594854354858398],["ات",-8.608479499816895],["D",-8.609557151794434],["▁you",-8.610285758972168],["ia",-8.611927032470703],["▁li",-8.615797996520996],["ի",-8.617545127868652],["ba",-8.622004508972168],["▁که",-8.622882843017578],["▁zu",-8.624557495117188],["▁ce",-8.626120567321777],["▁را",-8.630060195922852],["▁о",-8.630776405334473],["ca",-8.631218910217285],["▁mai",-8.63427448272705],["▁się",-8.636299133300781],["मा",-8.637207984924316],["▁با",-8.641522407531738],["을",-8.642354965209961],["▁что",-8.643779754638672],["ка",-8.64509105682373],["ri",-8.647806167602539],["▁В",-8.648494720458984],["1",-8.649255752563477],["ом",-8.64979362487793],["f",-8.649986267089844],["▁में",-8.650482177734375],["▁yn",-8.651301383972168],["▁15",-8.653078079223633],["▁из",-8.655006408691406],["x",-8.660321235656738],["▁å",-8.661585807800293],["▁12",-8.663192749023438],["di",-8.6665678024292],["ge",-8.66818904876709],["í",-8.669124603271484],["و",-8.675025939941406],["mo",-8.675875663757324],["sa",-8.679758071899414],["w",-8.682831764221191],["▁met",-8.684473037719727],["▁P",-8.68532657623291],["▁est",-8.685562133789062],["▁No",-8.689908981323242],["▁í",-8.69051742553711],["▁при",-8.699014663696289],["C",-8.699597358703613],["▁it",-8.702272415161133],["▁ist",-8.705501556396484],["▁am",-8.705870628356934],["▁iz",-8.706581115722656],["é",-8.712347030639648],["▁У",-8.714998245239258],["ים",-8.718018531799316],["ng",-8.720099449157715],["▁that",-8.72273063659668],["ă",-8.724922180175781],["▁pas",-8.725055694580078],["▁men",-8.725703239440918],["_",-8.72813892364502],["ը",-8.729147911071777],["▁re",-8.729783058166504],["▁са",-8.734212875366211],["tu",-8.734413146972656],["▁lo",-8.736749649047852],["▁है",-8.737104415893555],["ju",-8.737150192260742],["▁dengan",-8.737261772155762],["▁της",-8.739017486572266],["im",-8.739598274230957],["有",-8.740579605102539],["▁tak",-8.740862846374512],["▁dhe",-8.741000175476074],["▁#",-8.741698265075684],["이",-8.746895790100098],["年",-8.748784065246582],["▁की",-8.749091148376465],["▁k",-8.752243041992188],["▁este",-8.754573822021484],["ur",-8.754871368408203],["▁ei",-8.756780624389648],["г",-8.759315490722656],["▁است",-8.760712623596191],["ie",-8.761056900024414],["▁pre",-8.761504173278809],["에",-8.765233993530273],["ir",-8.765268325805664],["▁untuk",-8.76549243927002],["▁9",-8.765725135803223],["▁sem",-8.769726753234863],["▁ini",-8.773675918579102],["▁ut",-8.774187088012695],["人",-8.774442672729492],["ِ",-8.774731636047363],["▁та",-8.777359008789062],["▁اور",-8.778549194335938],["▁mit",-8.778573036193848],["▁جي",-8.77881145477295],["▁ver",-8.781251907348633],["▁pri",-8.787092208862305],["▁så",-8.78979778289795],["▁30",-8.791316032409668],["ho",-8.792523384094238],["▁این",-8.795047760009766],["▁کا",-8.796040534973145],["▁په",-8.796231269836426],["▁c",-8.796775817871094],["▁pro",-8.79685115814209],["▁Se",-8.802419662475586],["▁سے",-8.803434371948242],["▁2017",-8.80356216430664],["ה",-8.804070472717285],["で",-8.805150985717773],["ai",-8.807412147521973],["▁was",-8.810138702392578],["ten",-8.811378479003906],["▁την",-8.811773300170898],["от",-8.81633186340332],["jo",-8.817042350769043],["中",-8.817706108093262],["ov",-8.817838668823242],["ro",-8.818936347961426],["▁care",-8.820284843444824],["▁для",-8.82458209991455],["go",-8.825934410095215],["▁ser",-8.827760696411133],["▁për",-8.827917098999023],["▁eta",-8.8289213180542],["▁پر",-8.830430030822754],["▁có",-8.830436706542969],["est",-8.8350191116333],["д",-8.840970039367676],["▁G",-8.841058731079102],["ha",-8.841383934020996],["▁со",-8.852415084838867],["▁və",-8.859338760375977],["▁au",-8.866000175476074],["id",-8.866331100463867],["▁como",-8.868606567382812],["▁11",-8.870092391967773],["ले",-8.871129989624023],["▁А",-8.871901512145996],["▁ay",-8.872276306152344],["ly",-8.873795509338379],["nu",-8.873832702636719],["▁El",-8.877116203308105],["▁N",-8.878573417663574],["▁von",-8.87989330291748],["▁18",-8.880353927612305],["▁và",-8.881560325622559],["▁ikke",-8.884970664978027],["ли",-8.89445686340332],["л",-8.894556045532227],["ን",-8.896896362304688],["dan",-8.898950576782227],["▁của",-8.900094985961914],["ла",-8.90051555633545],["▁co",-8.900552749633789],["▁او",-8.902569770812988],["▁کو",-8.90641975402832],["den",-8.907776832580566],["▁على",-8.907960891723633],["ndo",-8.908272743225098],["▁με",-8.909878730773926],["ов",-8.913491249084473],["」",-8.913808822631836],["mu",-8.91528034210205],["不",-8.915413856506348],["▁F",-8.916865348815918],["▁ב",-8.917595863342285],["▁è",-8.918875694274902],["▁im",-8.919073104858398],["▁Z",-8.91927433013916],["I",-8.919625282287598],["▁qui",-8.919754981994629],["▁till",-8.922435760498047],["B",-8.923480033874512],["▁H",-8.923832893371582],["大",-8.925945281982422],["ം",-8.929733276367188],["上",-8.931924819946289],["▁las",-8.93349552154541],["ُ",-8.934319496154785],["▁pour",-8.935561180114746],["▁Det",-8.936155319213867],["▁là",-8.942756652832031],["▁The",-8.943010330200195],["▁Vi",-8.943692207336426],["el",-8.945587158203125],["▁های",-8.948911666870117],["「",-8.94989013671875],["▁ў",-8.950296401977539],["co",-8.9508695602417],["▁fra",-8.951292991638184],["▁С",-8.956583023071289],["sta",-8.956795692443848],["▁про",-8.962263107299805],["ning",-8.962279319763184],["ل",-8.963071823120117],["M",-8.964536666870117],["ą",-8.966904640197754],["za",-8.9705810546875],["lik",-8.972264289855957],["▁že",-8.97653865814209],["▁ein",-8.978093147277832],["▁ny",-8.981180191040039],["▁W",-8.98157024383545],["▁Si",-8.983951568603516],["ler",-8.98431396484375],["▁her",-8.984784126281738],["K",-8.985796928405762],["▁ad",-8.98613166809082],["▁dat",-8.988334655761719],["ها",-8.988945007324219],["ת",-8.990631103515625],["と",-8.994420051574707],["▁16",-8.994881629943848],["▁pentru",-8.99555778503418],["▁sur",-8.995820045471191],["▁για",-8.9977388381958],["▁1.",-8.99776840209961],["▁14",-8.998332977294922],["4",-8.99893856048584],["T",-9.00064754486084],["▁&",-9.002508163452148],["▁של",-9.004742622375488],["▁are",-9.010615348815918],["▁Die",-9.012301445007324],["▁የ",-9.013846397399902],["▁voor",-9.0150785446167],["ი",-9.017183303833008],["ა",-9.018770217895508],["▁R",-9.0201997756958],["▁ou",-9.020796775817873],["▁को",-9.021939277648926],["月",-9.02311897277832],["我",-9.027488708496094],["▁mga",-9.028017044067385],["”,",-9.0287504196167],["wa",-9.028922080993652],["日",-9.031612396240234],["▁Le",-9.03233242034912],["▁или",-9.032631874084473],["▁dalam",-9.03400421142578],["▁ہے",-9.034796714782717],["ти",-9.036293983459473],["▁का",-9.03656005859375],["▁we",-9.039949417114258],["▁für",-9.040390968322754],["▁auf",-9.040831565856934],["▁over",-9.040912628173828],["▁से",-9.042977333068848],["E",-9.044665336608888],["ية",-9.046391487121582],["▁су",-9.04786777496338],["с",-9.049890518188477],["▁που",-9.051790237426758],["в",-9.052420616149902],["▁n",-9.052701950073242],["▁ich",-9.053545951843262],["▁dos",-9.05600643157959],["का",-9.057720184326172],["▁ל",-9.059584617614746],["ji",-9.062299728393556],["ी",-9.066560745239258],["nt",-9.067607879638672],["▁*",-9.068071365356444],["▁ale",-9.068229675292969],["▁Po",-9.069149017333984],["▁aan",-9.069208145141602],["▁م",-9.07016372680664],["▁છે",-9.070534706115724],["▁Bu",-9.072623252868652],["▁jeg",-9.0731201171875],["man",-9.074151039123535],["O",-9.076007843017578],["▁eller",-9.08041763305664],["▁На",-9.084749221801758],["▁ra",-9.087812423706056],["ment",-9.088258743286133],["▁што",-9.088852882385254],["▁ب",-9.089756965637209],["да",-9.090639114379885],["▁with",-9.09117603302002],["▁από",-9.095274925231934],["га",-9.096118927001951],["▁cho",-9.096586227416992],["▁nem",-9.097980499267578],["P",-9.098166465759276],["一",-9.102598190307615],["ok",-9.102860450744627],["vi",-9.103118896484377],["▁không",-9.105111122131348],["를",-9.106209754943848],["▁את",-9.108163833618164],["▁то",-9.108492851257324],["ê",-9.108893394470217],["ն",-9.109390258789062],["du",-9.113390922546388],["nin",-9.1139554977417],["го",-9.114588737487791],["://",-9.114823341369627],["은",-9.116799354553224],["к",-9.116905212402344],["ที่",-9.124282836914062],["▁ge",-9.124996185302734],["nie",-9.125067710876465],["▁13",-9.125503540039062],["ً",-9.12863826751709],["▁ter",-9.12943172454834],["▁?",-9.12965202331543],["▁g",-9.131820678710938],["▁or",-9.134698867797852],["zi",-9.136881828308104],["á",-9.136889457702637],["י",-9.137608528137209],["▁!",-9.138727188110352],["ad",-9.13996124267578],["가",-9.140023231506348],["▁25",-9.140275955200195],["▁جو",-9.14141845703125],["▁ان",-9.142242431640623],["▁To",-9.144102096557615],["▁к",-9.14424991607666],["▁kaj",-9.14524269104004],["ter",-9.146991729736328],["▁meg",-9.147032737731934],["ට",-9.147146224975586],["ма",-9.14719009399414],["G",-9.1488676071167],["▁،",-9.150372505187988],["▁şi",-9.150694847106934],["▁Sie",-9.153362274169922],["▁በ",-9.154901504516602],["▁17",-9.155641555786133],["▁hogy",-9.157687187194824],["ze",-9.15980339050293],["▁mais",-9.159883499145508],["né",-9.161134719848633],["▁dari",-9.161781311035156],["gi",-9.16303253173828],["ш",-9.16396141052246],["▁als",-9.164063453674316],["▁go",-9.165409088134766],["▁ho",-9.165645599365234],["\".",-9.167020797729492],["▁jo",-9.167539596557615],["ите",-9.167799949645996],["ю",-9.16933250427246],["▁24",-9.170135498046877],["▁dem",-9.170515060424805],["▁ت",-9.171648025512695],["▁alle",-9.17231273651123],["or",-9.172828674316406],["▁nicht",-9.173312187194824],["▁û",-9.173539161682127],["ки",-9.17357063293457],["▁itu",-9.175378799438477],["▁па",-9.175925254821776],["▁اس",-9.176112174987791],["ين",-9.176533699035645],["▁all",-9.17773723602295],["▁0",-9.179390907287598],["5",-9.180099487304688],["▁my",-9.18289566040039],["▁kwa",-9.183601379394531],["ban",-9.18553352355957],["▁tidak",-9.185559272766112],["pa",-9.186735153198242],["▁he",-9.18873119354248],["▁have",-9.18918228149414],["ում",-9.189682006835938],["▁ي",-9.191478729248049],["▁•",-9.193609237670898],["는",-9.197030067443848],["▁ber",-9.197957038879396],["po",-9.199155807495115],["▁să",-9.199546813964844],["▁une",-9.200051307678224],["▁र",-9.204625129699709],["▁son",-9.204952239990234],["ben",-9.205275535583496],["▁han",-9.20656394958496],["▁نے",-9.208064079284668],["▁но",-9.210156440734863],["▁2016",-9.210256576538086],["▁η",-9.214226722717283],["▁ته",-9.214642524719238],["ik",-9.217286109924316],["▁я",-9.21914291381836],["ان",-9.22323226928711],["▁Du",-9.224126815795898],["▁2.",-9.226000785827637],["▁uma",-9.227346420288086],["到",-9.227911949157717],["ів",-9.228084564208984],["▁եւ",-9.229443550109863],["”.",-9.230523109436035],["▁wat",-9.23345947265625],["š",-9.237140655517578],["▁ل",-9.23928451538086],["ou",-9.239537239074709],["ní",-9.240830421447754],["▁(1)",-9.24154567718506],["▁Ne",-9.243203163146973],["р",-9.243230819702148],["▁си",-9.244105339050291],["▁۾",-9.24562168121338],["▁Di",-9.24885082244873],["▁И",-9.249923706054688],["▁100",-9.25047206878662],["▁oo",-9.252707481384276],["▁dans",-9.252708435058594],["▁t",-9.253575325012209],["▁fi",-9.255378723144531],["▁jag",-9.255592346191406],["▁вы",-9.25631618499756],["ен",-9.25738525390625],["ын",-9.257719039916992],["ver",-9.257781982421877],["▁që",-9.257967948913574],["ም",-9.258596420288086],["▁nga",-9.259050369262695],["ot",-9.267127990722656],["▁как",-9.26819896697998],["der",-9.268291473388672],["▁J",-9.268522262573242],["lu",-9.268874168395996],["▁Ja",-9.2692232131958],["ste",-9.270514488220217],["▁Za",-9.271903038024902],["د",-9.272168159484863],["ми",-9.273432731628418],["▁și",-9.273597717285156],["▁За",-9.274200439453123],["\",",-9.274276733398438],["▁can",-9.274348258972168],["▁della",-9.275382041931152],["▁sich",-9.278862953186035],["▁jest",-9.27920913696289],["ر",-9.2794771194458],["▁50",-9.279532432556152],["bo",-9.282983779907228],["ं",-9.287854194641112],["N",-9.288337707519531],["mos",-9.288402557373049],["H",-9.289240837097168],["▁mu",-9.289596557617188],["▁pada",-9.289867401123049],["等",-9.289950370788574],["▁Je",-9.29055881500244],["io",-9.292072296142578],["്",-9.29219913482666],["▁छ",-9.29325008392334],["ó",-9.293282508850098],["ות",-9.295504570007324],["ის",-9.295584678649902],["他",-9.299172401428224],["▁Re",-9.299670219421388],["▁dit",-9.30209255218506],["▁pod",-9.303495407104492],["V",-9.304312705993652],["も",-9.31307315826416],["▁sau",-9.313690185546877],["▁agus",-9.314043998718262],["▁τα",-9.318511009216309],["ж",-9.31925106048584],["▁أن",-9.32010269165039],["ų",-9.32418727874756],["q",-9.32458209991455],["ów",-9.32472038269043],["L",-9.324870109558104],["tar",-9.32560920715332],["ରେ",-9.32565975189209],["ž",-9.32642936706543],["ش",-9.32898998260498],["▁और",-9.329050064086914],["ig",-9.330278396606444],["▁Be",-9.330318450927734],["▁מ",-9.3310546875],["kin",-9.331557273864746],["▁b",-9.331740379333496],["▁ما",-9.332493782043455],["▁ا",-9.33310604095459],["▁dia",-9.333159446716309],["▁sin",-9.333536148071287],["▁min",-9.333722114562988],["ru",-9.334487915039062],["▁۽",-9.335156440734863],["▁Al",-9.335393905639648],["ina",-9.335424423217772],["也",-9.335541725158691],["就",-9.3360013961792],["ме",-9.33670711517334],["▁một",-9.33787727355957],["▁ji",-9.337931632995604],["▁Il",-9.33911418914795],["▁inte",-9.339412689208984],["▁An",-9.341317176818848],["ні",-9.342265129089355],["▁על",-9.345603942871094],["ের",-9.34603500366211],["oj",-9.346899032592772],["▁një",-9.347655296325684],["▁що",-9.347856521606444],["ون",-9.349527359008787],["▁По",-9.349780082702637],["ى",-9.352302551269531],["▁this",-9.35264015197754],["وں",-9.353793144226074],["ו",-9.355067253112791],["▁الم",-9.355377197265623],["ہ",-9.358236312866213],["▁Per",-9.358357429504396],["▁eine",-9.358375549316406],["6",-9.360912322998049],["▁Ma",-9.36208724975586],["▁được",-9.36415958404541],["ita",-9.364544868469238],["▁ga",-9.364801406860352],["▁p",-9.365259170532228],["au",-9.367286682128906],["▁ها",-9.367757797241213],["▁2015",-9.368926048278809],["F",-9.36932373046875],["▁amb",-9.36976432800293],["▁auch",-9.371761322021484],["▁ako",-9.375099182128906],["▁ali",-9.375162124633787],["▁کہ",-9.379314422607422],["▁các",-9.382975578308104],["س",-9.38442325592041],["th",-9.384464263916016],["ку",-9.387293815612791],["ol",-9.387616157531738],["▁Da",-9.387701034545898],["▁mo",-9.38770866394043],["▁ze",-9.390381813049316],["ts",-9.390750885009766],["ach",-9.39092254638672],["▁your",-9.391132354736328],["dir",-9.391524314880373],["ska",-9.391619682312012],["ம்",-9.393388748168944],["ty",-9.393775939941406],["▁ao",-9.395411491394045],["ului",-9.39805507659912],["ක්",-9.39828109741211],["ini",-9.399343490600586],["que",-9.400616645812988],["▁akan",-9.401762008666992],["ና",-9.401796340942385],["▁Sa",-9.402860641479492],["▁го",-9.403124809265137],["▁kun",-9.40345287322998],["▁©",-9.406279563903809],["▁nos",-9.410724639892578],["▁21",-9.411081314086914],["▁19",-9.411993026733398],["▁sy",-9.412508964538574],["地",-9.41384220123291],["ে",-9.415071487426758],["ну",-9.416175842285156],["▁uz",-9.416645050048828],["▁not",-9.417351722717283],["▁mal",-9.417628288269045],["▁ba",-9.417988777160645],["▁σε",-9.420607566833496],["10",-9.42248821258545],["bi",-9.423229217529297],["vo",-9.424642562866213],["7",-9.427630424499512],["▁एक",-9.429312705993652],["▁पर",-9.430109977722168],["▁под",-9.430294036865234],["▁”",-9.430448532104492],["ú",-9.43130111694336],["ك",-9.431324005126951],["ä",-9.432011604309082],[".”",-9.433137893676758],["▁че",-9.435275077819824],["े",-9.435395240783691],["▁>",-9.435447692871094],["bu",-9.437965393066406],["hi",-9.438912391662598],["ları",-9.440841674804688],["@",-9.441329956054688],["ren",-9.441865921020508],["ий",-9.443041801452637],["▁Do",-9.444350242614746],["▁των",-9.445845603942873],["▁alla",-9.445929527282717],["tur",-9.44681167602539],["des",-9.447047233581545],["ся",-9.447165489196776],["▁Y",-9.448863983154297],["so",-9.451133728027344],["▁Un",-9.452092170715332],["한",-9.45376968383789],["你",-9.454510688781738],["▁ob",-9.457472801208496],["त",-9.457737922668455],["▁+",-9.460565567016602],["न",-9.46140956878662],["র",-9.461655616760254],["▁trong",-9.462051391601562],["▁plus",-9.463733673095703],["ky",-9.46375560760498],["ut",-9.46432113647461],["▁Er",-9.464439392089844],["▁más",-9.465105056762695],["ны",-9.467531204223633],["▁στο",-9.467885971069336],["▁người",-9.468488693237305],["их",-9.468530654907228],["cu",-9.470626831054688],["▁dar",-9.470805168151855],["▁برای",-9.471752166748049],["▁tai",-9.47419548034668],["▁egy",-9.474421501159668],["sen",-9.47447681427002],["▁کې",-9.47485065460205],["▁vir",-9.475507736206056],["ele",-9.478610038757324],["8",-9.47879123687744],["▁수",-9.47888469696045],["fa",-9.479153633117676],["▁x",-9.481934547424316],["▁vous",-9.482863426208496],["ದ",-9.48438549041748],["ë",-9.486821174621582],["ा",-9.488632202148438],["▁não",-9.488740921020508],["▁sobre",-9.489019393920898],["fi",-9.48967456817627],["▁ac",-9.490610122680664],["▁3.",-9.490775108337402],["ć",-9.490818977355955],["ye",-9.492547988891602],["以",-9.49267864227295],["ista",-9.493133544921877],["对",-9.493570327758787],["ของ",-9.494378089904783],["▁ο",-9.495956420898438],["▁22",-9.496496200561523],["出",-9.496801376342772],["ь",-9.497625350952148],["sk",-9.498204231262209],["ih",-9.499438285827637],["▁ett",-9.499934196472168],["▁ви",-9.503026008605955],["are",-9.50424575805664],["ат",-9.50430679321289],["도",-9.505677223205566],["▁2014",-9.506145477294922],["要",-9.50741958618164],["▁είναι",-9.50775909423828],["R",-9.508118629455566],["▁ti",-9.509035110473633],["▁ו",-9.509771347045898],["men",-9.511724472045898],["▁zijn",-9.51454734802246],["ı",-9.514688491821287],["ම",-9.514738082885742],["▁ci",-9.51894187927246],["ْ",-9.519185066223145],["▁So",-9.51940631866455],["U",-9.51954174041748],["▁vil",-9.520012855529783],["为",-9.52134609222412],["[",-9.522266387939451],["ର",-9.523975372314451],["nde",-9.524624824523926],["ч",-9.52487564086914],["نا",-9.525500297546388],["▁г",-9.52572250366211],["▁fa",-9.527026176452637],["▁kad",-9.52813720703125],["tas",-9.528417587280272],["માં",-9.528919219970703],["▁ਦੇ",-9.531161308288574],["tan",-9.531752586364746],["고",-9.533198356628418],["п",-9.53378200531006],["▁bei",-9.53514003753662],["▁раз",-9.535477638244627],["pe",-9.535565376281738],["ого",-9.536376953125],["로",-9.536968231201172],["sti",-9.539387702941896],["ды",-9.53950023651123],["▁об",-9.540864944458008],["▁Не",-9.54086685180664],["▁że",-9.545248031616213],["▁О",-9.54537296295166],["ada",-9.54591178894043],["▁ще",-9.547650337219238],["ం",-9.547985076904297],["▁без",-9.54813003540039],["▁:)",-9.548583030700684],["ში",-9.551457405090332],["▁h",-9.553081512451172],["▁это",-9.553460121154783],["ken",-9.554195404052734],["▁için",-9.55470085144043],["▁sex",-9.555216789245604],["▁وال",-9.55543613433838],["及",-9.5563383102417],["▁qu",-9.556811332702637],["–",-9.557047843933104],["▁23",-9.557348251342772],["he",-9.557723999023438],["而",-9.559625625610352],["▁mar",-9.56204891204834],["與",-9.56478214263916],["ın",-9.565210342407228],["▁Men",-9.565606117248535],["▁40",-9.566628456115724],["ná",-9.567344665527344],["ия",-9.567633628845217],["▁els",-9.567638397216797],["▁với",-9.568115234375],["ть",-9.568150520324709],["▁online",-9.568780899047852],["▁ex",-9.569488525390623],["ва",-9.56980800628662],["han",-9.569931983947754],["aj",-9.57040023803711],["▁uit",-9.572211265563965],["das",-9.572348594665527],["де",-9.57300853729248],["9",-9.574579238891602],["nda",-9.576127052307127],["小",-9.576431274414062],["▁ju",-9.57646369934082],["下",-9.57734203338623],["ය",-9.577730178833008],["hu",-9.57785987854004],["▁کي",-9.57985496520996],["▁skal",-9.581615447998049],["`",-9.581768035888672],["▁Ka",-9.582079887390137],["▁له",-9.582528114318848],["ado",-9.582815170288086],["ના",-9.584280967712402],["dos",-9.58546543121338],["nak",-9.586254119873049],["▁ने",-9.586669921875],["▁mer",-9.587167739868164],["▁sig",-9.587881088256836],["▁так",-9.588897705078123],["!!",-9.589669227600098],["会",-9.590448379516602],["▁entre",-9.590816497802734],["▁هم",-9.593616485595703],["ción",-9.594247817993164],["ene",-9.59463596343994],["▁tra",-9.594686508178713],["kar",-9.596979141235352],["▁vy",-9.598516464233398],["▁Az",-9.598691940307615],["ção",-9.599252700805664],["▁לא",-9.599287033081056],["zo",-9.599873542785645],["su",-9.600014686584473],["▁အ",-9.601140022277832],["▁kas",-9.601357460021973],["uri",-9.601694107055664],["▁mas",-9.602076530456545],["▁į",-9.602667808532717],["lla",-9.603671073913574],["▁atau",-9.606419563293455],["ની",-9.60670566558838],["ّ",-9.606856346130373],["▁seu",-9.606916427612305],["ů",-9.609798431396484],["▁už",-9.610058784484863],["▁Ar",-9.611007690429688],["用",-9.611517906188965],["nia",-9.61328125],["▁eu",-9.614297866821287],["my",-9.614360809326172],["we",-9.614883422851562],["ው",-9.615296363830566],["net",-9.616982460021973],["에서",-9.61751651763916],["ll",-9.62045955657959],["▁від",-9.62077808380127],["mente",-9.62083339691162],["▁Es",-9.621520042419434],["▁kar",-9.623283386230469],["▁tot",-9.62344455718994],["ने",-9.624403953552246],["ේ",-9.62476921081543],["与",-9.625773429870604],["지",-9.625944137573242],["ami",-9.62611198425293],["ssa",-9.626348495483398],["▁X",-9.626352310180664],["▁oli",-9.626826286315918],["ع",-9.628833770751951],["ных",-9.629215240478516],["30",-9.629667282104492],["都",-9.630285263061523],["▁jak",-9.63039779663086],["ле",-9.631242752075195],["ใน",-9.63174819946289],["▁video",-9.634387969970703],["▁все",-9.634922981262209],["▁Ko",-9.635401725769045],["ske",-9.635494232177734],["ά",-9.635782241821287],["▁fan",-9.635873794555664],["▁Mi",-9.636358261108398],["▁bez",-9.636474609375],["▁2013",-9.63759994506836],["ן",-9.641027450561523],["ine",-9.642523765563965],["และ",-9.642602920532228],["ах",-9.643065452575684],["▁Me",-9.64306640625],["▁saya",-9.643272399902344],["や",-9.643321990966797],["▁Ta",-9.644025802612305],["ло",-9.644115447998049],["▁juga",-9.644882202148438],["▁will",-9.650824546813965],["▁θα",-9.651020050048828],["क",-9.651144981384276],["ks",-9.65139865875244],["▁wie",-9.651451110839844],["▁ved",-9.652276992797852],["▁στην",-9.652603149414062],["▁آن",-9.65263843536377],["lle",-9.653215408325195],["&",-9.653817176818848],["ę",-9.65501880645752],["▁ook",-9.655452728271484],["▁են",-9.655712127685549],["gu",-9.655814170837402],["▁parte",-9.655977249145508],["ant",-9.657833099365234],["▁لل",-9.658316612243652],["▁f",-9.658580780029297],["▁mis",-9.659417152404783],["▁ez",-9.659984588623049],["▁تو",-9.660466194152832],[".\"",-9.66055393218994],["▁ada",-9.660932540893556],["▁τον",-9.660982131958008],["؟",-9.661115646362305],["▁fel",-9.662142753601074],["▁چې",-9.662410736083984],["▁vor",-9.66317081451416],["min",-9.663958549499512],["▁Pro",-9.664514541625977],["».",-9.668113708496094],["▁може",-9.668119430541992],["▁हो",-9.6690092086792],["▁iš",-9.670089721679688],["ez",-9.670428276062012],["▁hat",-9.670469284057615],["▁up",-9.670775413513184],["ам",-9.67147731781006],["▁kita",-9.67276668548584],["▁ons",-9.672881126403809],["▁Li",-9.673724174499512],["▁کر",-9.674080848693848],["▁ah",-9.674179077148438],["per",-9.67483139038086],["▁آهي",-9.675137519836426],["▁он",-9.676328659057615],["ին",-9.676382064819336],["▁sunt",-9.676490783691406],["ке",-9.677204132080078],["ного",-9.677955627441406],["▁iyo",-9.678144454956056],["by",-9.67880153656006],["但",-9.67886734008789],["▁hi",-9.678874969482422],["▁·",-9.679158210754396],["▁sind",-9.679346084594728],["lt",-9.68024730682373],["性",-9.68087100982666],["ό",-9.681459426879885],["·",-9.681750297546388],["ами",-9.682035446166992],["ال",-9.682504653930664],["rea",-9.682531356811523],["▁but",-9.682559967041016],["▁די",-9.682680130004885],["▁more",-9.683137893676758],["▁نه",-9.68356990814209],["多",-9.683948516845703],["▁sam",-9.684335708618164],["tor",-9.685091018676758],["과",-9.685598373413086],["ā",-9.688302040100098],["▁हैं",-9.69004726409912],["ry",-9.69105052947998],["▁from",-9.69173812866211],["లో",-9.693593978881836],["ber",-9.693892478942873],["à",-9.695076942443848],["bar",-9.698172569274902],["▁ai",-9.69838523864746],["▁As",-9.698895454406738],["ों",-9.699655532836914],["nek",-9.699708938598633],["lı",-9.70193862915039],["▁post",-9.702012062072754],["nih",-9.702229499816896],["▁aj",-9.703210830688477],["な",-9.70376205444336],["から",-9.703826904296877],["▁Der",-9.704350471496582],["▁Co",-9.705989837646484],["▁aus",-9.706113815307615],["ή",-9.70683479309082],["ers",-9.706972122192385],["них",-9.707056045532228],["з",-9.707621574401855],["被",-9.708003044128418],["為",-9.708331108093262],["▁më",-9.70896816253662],["თ",-9.70912742614746],["ව",-9.710101127624512],["好",-9.712218284606934],["за",-9.712322235107422],["▁2012",-9.712812423706056],["ب",-9.714176177978516],["▁For",-9.714216232299805],["▁لا",-9.714284896850586],["+",-9.714576721191406],["▁sie",-9.714922904968262],["sz",-9.714985847473145],["▁werden",-9.71537971496582],["▁нь",-9.716836929321287],["▁أ",-9.717007637023926],["ej",-9.718137741088867],["▁إلى",-9.71821117401123],["ski",-9.71823787689209],["▁hai",-9.719112396240234],["▁τους",-9.719320297241213],["»,",-9.719687461853027],["ile",-9.71994400024414],["leri",-9.720264434814451],["▁Pa",-9.721074104309082],["ят",-9.722315788269045],["子",-9.72360134124756],["▁Ke",-9.72396469116211],["▁film",-9.724055290222168],["▁vai",-9.72485065460205],["▁niet",-9.725491523742676],["ել",-9.72595500946045],["▁ile",-9.726445198059082],["ു",-9.727113723754885],["ima",-9.727120399475098],["ven",-9.729483604431152],["ə",-9.730582237243652],["▁же",-9.732608795166016],["च",-9.733362197875977],["ere",-9.733800888061523],["▁những",-9.734172821044922],["▁بر",-9.734641075134276],["▁tre",-9.73566436767578],["లు",-9.736577033996582],["▁că",-9.738061904907228],["tion",-9.73834228515625],["အ",-9.739540100097656],["▁عن",-9.739678382873535],["ah",-9.740649223327637],["▁oder",-9.74561595916748],["▁år",-9.745771408081056],["=",-9.746337890625],["▁ਦੀ",-9.74655055999756],["▁Tu",-9.747414588928224],["▁28",-9.748703956604004],["し",-9.750066757202148],["▁@",-9.750085830688477],["J",-9.751221656799316],["▁ik",-9.751489639282228],["▁ag",-9.751761436462402],["新",-9.7530517578125],["▁under",-9.754140853881836],["▁come",-9.755123138427734],["▁26",-9.756399154663086],["▁К",-9.757333755493164],["▁आहे",-9.757973670959473],["▁adalah",-9.758195877075195],["लाई",-9.758574485778809],["ත්",-9.758707046508787],["ийн",-9.75882625579834],["▁)",-9.758934020996094],["▁sono",-9.75942325592041],["jen",-9.759969711303713],["▁Ha",-9.76028823852539],["▁2011",-9.760924339294434],["र",-9.763418197631836],["ний",-9.764917373657228],["高",-9.765386581420898],["▁është",-9.76952838897705],["▁estas",-9.769808769226074],["▁4.",-9.770756721496582],["ei",-9.77113914489746],["▁само",-9.771306991577148],["▁We",-9.774569511413574],["nın",-9.774700164794922],["可",-9.77493381500244],["ला",-9.775364875793455],["val",-9.775945663452148],["▁ਹੈ",-9.77600383758545],["▁upp",-9.778010368347168],["▁đã",-9.778624534606934],["gen",-9.779281616210938],["yo",-9.780431747436523],["uje",-9.78044891357422],["ње",-9.781253814697266],["▁Te",-9.782221794128418],["lan",-9.782549858093262],["▁blog",-9.78260326385498],["▁nas",-9.783257484436035],["ко",-9.78347873687744],["iz",-9.783759117126465],["ist",-9.78423023223877],["之",-9.784327507019045],["▁dass",-9.785402297973633],["ната",-9.78572940826416],["▁الله",-9.786359786987305],["аў",-9.78769588470459],["▁olan",-9.787880897521973],["▁www",-9.788339614868164],["▁ri",-9.788453102111816],["sel",-9.788525581359863],["▁Nu",-9.788947105407717],["che",-9.789331436157228],["▁Facebook",-9.78965950012207],["▁kepada",-9.7900390625],["家",-9.790146827697754],["pi",-9.791221618652344],["▁М",-9.792184829711914],["cha",-9.79363250732422],["ት",-9.79507827758789],["▁27",-9.7957181930542],["▁wir",-9.79737663269043],["▁også",-9.797423362731934],["可以",-9.79934787750244],["▁dag",-9.799413681030272],["▁як",-9.799580574035645],["nes",-9.799863815307615],["α",-9.800280570983888],["▁Ver",-9.800763130187988],["б",-9.801151275634766],["het",-9.801833152770996],["▁[...]",-9.803078651428224],["▁ми",-9.804325103759766],["▁thể",-9.804839134216309],["▁cum",-9.805012702941896],["です",-9.805391311645508],["▁અને",-9.806537628173828],["мо",-9.806841850280762],["W",-9.80726718902588],["ο",-9.808526039123535],["։",-9.80895709991455],["dy",-9.810847282409668],["▁Pe",-9.812826156616213],["▁Ku",-9.812969207763672],["san",-9.813743591308594],["ang",-9.814218521118164],["▁ከ",-9.81425666809082],["▁τη",-9.814400672912598],["▁dig",-9.816262245178224],["▁web",-9.816493034362791],["▁..",-9.817217826843262],["ز",-9.817319869995115],["▁א",-9.81776237487793],["ल",-9.818056106567385],["स",-9.818811416625977],["ι",-9.819283485412598],["cie",-9.820160865783691],["▁આ",-9.821587562561035],["▁maar",-9.821744918823242],["də",-9.821747779846191],["org",-9.821844100952148],["nte",-9.82237720489502],["ता",-9.82297420501709],["▁tam",-9.823495864868164],["▁orang",-9.82473087310791],["▁forma",-9.825316429138184],["lor",-9.82544231414795],["▁bisa",-9.825761795043944],["ite",-9.825878143310549],["《",-9.825922966003418],["мен",-9.826092720031738],["က",-9.826197624206545],["▁sen",-9.82648468017578],["tik",-9.82721996307373],["▁ten",-9.827960014343262],["所",-9.82800006866455],["▁และ",-9.828064918518066],["sh",-9.830385208129885],["▁60",-9.83043384552002],["▁við",-9.830649375915527],["▁để",-9.830670356750488],["▁ili",-9.83073902130127],["ana",-9.830772399902344],["▁және",-9.83199977874756],["▁unha",-9.83259391784668],["기",-9.832951545715332],["▁이",-9.833052635192873],["ger",-9.83632469177246],["dar",-9.837039947509766],["ton",-9.837080955505373],["ಯ",-9.8377046585083],["▁Я",-9.838178634643556],["mas",-9.839102745056152],["Z",-9.84099006652832],["▁että",-9.841038703918455],["ې",-9.841146469116213],["▁DE",-9.841904640197754],["ster",-9.84419059753418],["本",-9.844562530517578],["tra",-9.845602989196776],["▁vel",-9.84571933746338],["▁Ki",-9.845885276794434],["▁Ni",-9.846221923828123],["ге",-9.847725868225098],["▁З",-9.848114013671877],["ните",-9.848400115966797],["▁mereka",-9.849702835083008],["፣",-9.8505220413208],["▁più",-9.850547790527344],["ကို",-9.851463317871094],["▁program",-9.851987838745115],["▁He",-9.85229778289795],["12",-9.852426528930664],["▁որ",-9.85244369506836],["▁2010",-9.85273551940918],["▁بعد",-9.852779388427734],["▁nuk",-9.853519439697266],["我们",-9.854394912719728],["лі",-9.854496002197266],["▁Б",-9.854631423950195],["ció",-9.855167388916016],["ala",-9.855179786682127],["▁pot",-9.85542106628418],["▁ایک",-9.855673789978027],["X",-9.855915069580078],["▁may",-9.85609245300293],["asi",-9.856667518615724],["時",-9.856690406799316],["▁مع",-9.858101844787598],["ได้",-9.858123779296877],["者",-9.85965633392334],["20",-9.859763145446776],["▁jy",-9.86050510406494],["ем",-9.860770225524902],["▁sal",-9.861080169677734],["水",-9.861298561096191],["ný",-9.861462593078612],["î",-9.861719131469728],["▁has",-9.86198902130127],["會",-9.862529754638672],["ական",-9.863951683044434],["nje",-9.864765167236328],["С",-9.864896774291992],["▁nach",-9.865097999572754],["着",-9.865845680236816],["▁ab",-9.866167068481444],["!!!",-9.866480827331545],["▁anda",-9.867228507995604],["lah",-9.8689603805542],["▁ਨੂੰ",-9.869314193725586],["▁بھی",-9.869390487670898],["ને",-9.869439125061035],["▁está",-9.87023639678955],["인",-9.870577812194824],["▁Man",-9.87081241607666],["ii",-9.872536659240724],["www",-9.872698783874512],["▁Den",-9.872907638549805],["▁تا",-9.873370170593262],["les",-9.87338161468506],["▁ہیں",-9.87491512298584],["sko",-9.875309944152832],["能",-9.876049995422363],["▁ska",-9.876131057739258],["▁bij",-9.876956939697266],["分",-9.877098083496094],["ты",-9.8779878616333],["▁ben",-9.878954887390137],["၊",-9.87895965576172],["▁få",-9.8790864944458],["》",-9.879623413085938],["来",-9.880608558654783],["前",-9.88063144683838],["▁या",-9.88158130645752],["▁fer",-9.882009506225586],["ः",-9.884220123291016],["▁ilə",-9.884374618530272],["им",-9.88473892211914],["sin",-9.885518074035645],["ے",-9.886985778808594],["ν",-9.88729476928711],["일",-9.887825012207031],["▁bet",-9.887919425964355],["▁super",-9.887984275817873],["▁zo",-9.888474464416504],["ran",-9.88912582397461],["▁δεν",-9.88963794708252],["rum",-9.89022159576416],["ais",-9.890250205993652],["▁خود",-9.89084243774414],["▁cha",-9.89179229736328],["▁avec",-9.892372131347656],["ға",-9.893202781677246],["▁нас",-9.89345932006836],["说",-9.895004272460938],["▁dei",-9.8951416015625],["▁sub",-9.896316528320312],["▁era",-9.897008895874023],["▁یک",-9.897271156311035],["▁này",-9.897465705871582],["▁न",-9.898076057434082],["ung",-9.898401260375977],["▁bra",-9.898963928222656],["▁http",-9.899395942687988],["▁katika",-9.89944076538086],["▁би",-9.899600982666016],["ների",-9.899686813354492],["ше",-9.90247631072998],["ന്",-9.903742790222168],["▁ek",-9.904006004333496],["▁oleh",-9.904341697692873],["கள்",-9.904460906982422],["▁kom",-9.905263900756836],["▁war",-9.905593872070312],["▁one",-9.90587043762207],["▁ہے۔",-9.907909393310549],["手",-9.908116340637209],["或",-9.908873558044434],["tes",-9.909648895263672],["▁ප්",-9.910318374633787],["▁هو",-9.910398483276367],["*",-9.910673141479492],["▁dal",-9.911412239074709],["▁như",-9.911436080932615],["▁esta",-9.912199020385742],["yan",-9.91275691986084],["ל",-9.913475036621094],["▁pra",-9.913619995117188],["▁sua",-9.91419506072998],["▁j",-9.914478302001951],["nja",-9.91512966156006],["▁nur",-9.915382385253906],["▁It",-9.915989875793455],["ria",-9.91664695739746],["自己",-9.9168062210083],["▁pred",-9.916930198669434],["으로",-9.91943645477295],["▁რომ",-9.919962882995604],["▁ли",-9.920930862426758],["▁Con",-9.921161651611328],["▁chi",-9.92136573791504],["▁über",-9.922694206237791],["▁just",-9.923492431640623],["▁sit",-9.92365550994873],["11",-9.924934387207031],["og",-9.924964904785156],["ј",-9.925384521484377],["án",-9.925968170166016],["യ",-9.926758766174316],["zu",-9.927132606506348],["▁οι",-9.927188873291016],["ът",-9.927404403686523],["▁ги",-9.927617073059082],["ები",-9.927796363830566],["▁about",-9.929567337036133],["ý",-9.929929733276367],["ud",-9.929936408996582],["▁არ",-9.930120468139648],["▁koji",-9.930209159851074],["去",-9.930509567260742],["▁ќе",-9.930649757385254],["ner",-9.93186855316162],["rá",-9.93200969696045],["son",-9.932287216186523],["▁कि",-9.932945251464844],["ida",-9.933101654052734],["ła",-9.93359661102295],["她",-9.934749603271484],["ari",-9.934962272644045],["nom",-9.935545921325684],["▁ни",-9.935769081115724],["▁یا",-9.937220573425291],["▁r",-9.938166618347168],["して",-9.938878059387209],["هم",-9.939101219177246],["▁mig",-9.939498901367188],["▁kur",-9.940613746643066],["▁ය",-9.941390991210938],["▁dell",-9.941688537597656],["▁mag",-9.942012786865234],["▁има",-9.942353248596191],["át",-9.942367553710938],["▁Η",-9.94288444519043],["ně",-9.943405151367188],["▁29",-9.9439115524292],["lari",-9.944229125976562],["era",-9.944847106933594],["▁بال",-9.945484161376951],["▁ن",-9.9458646774292],["ски",-9.947222709655762],["tel",-9.947428703308104],["А",-9.94784927368164],["▁شده",-9.94789218902588],["ска",-9.948237419128418],["▁være",-9.948610305786133],["▁τις",-9.949838638305664],["▁пре",-9.950440406799316],["▁foi",-9.951414108276367],["kon",-9.95155429840088],["ł",-9.951809883117676],["▁nel",-9.952574729919434],["му",-9.95265769958496],["▁Para",-9.95346450805664],["▁Да",-9.953920364379885],["ზე",-9.954145431518556],["Y",-9.954444885253906],["ata",-9.954557418823242],["▁их",-9.954825401306152],["将",-9.955137252807615],["mar",-9.956571578979492],["ker",-9.956841468811035],["▁Д",-9.958809852600098],["ет",-9.959213256835938],["▁Jeg",-9.959678649902344],["ên",-9.959990501403809],["▁time",-9.960152626037598],["▁Les",-9.960408210754396],["▁San",-9.960893630981444],["▁sed",-9.961004257202148],["▁(2)",-9.961138725280762],["▁fost",-9.961211204528809],["▁его",-9.962385177612305],["▁dating",-9.962738990783691],["▁пере",-9.963849067687988],["2018",-9.964048385620115],["del",-9.964594841003418],["aren",-9.964899063110352],["▁%",-9.96491813659668],["lig",-9.965150833129885],["▁ap",-9.965554237365724],["ना",-9.965630531311035],["▁му",-9.966466903686523],["nej",-9.966482162475586],["▁ש",-9.966862678527832],["ди",-9.966891288757324],["▁Anda",-9.966980934143066],["ով",-9.967509269714355],["ens",-9.967814445495604],["▁gan",-9.968795776367188],["▁де",-9.968825340270996],["iga",-9.96905517578125],["ş",-9.969118118286133],["land",-9.969223022460938],["▁التي",-9.969587326049805],["ੀ",-9.969589233398438],["▁ഒരു",-9.969820976257324],["▁site",-9.970717430114746],["▁ای",-9.972996711730955],["pu",-9.973203659057615],["▁ఈ",-9.975555419921877],["ой",-9.975729942321776],["後",-9.97607421875],["▁като",-9.976680755615234],["ic",-9.977563858032228],["kom",-9.97810173034668],["▁Dr",-9.978767395019531],["▁plan",-9.980316162109377],["▁Et",-9.98088550567627],["▁bạn",-9.981314659118652],["▁bat",-9.981350898742676],["وا",-9.981459617614746],["ns",-9.98166847229004],["▁भी",-9.98183536529541],["▁200",-9.983458518981934],["zione",-9.98390769958496],["vu",-9.984488487243652],["▁Mar",-9.985547065734863],["▁km",-9.985692024230955],["する",-9.985987663269045],["ند",-9.986000061035156],["▁pero",-9.987058639526367],["ని",-9.987085342407228],["▁tem",-9.987920761108398],["▁hier",-9.988622665405272],["▁пра",-9.989030838012695],["▁wy",-9.990187644958496],["че",-9.990199089050291],["名",-9.99026870727539],["▁lebih",-9.992229461669922],["υ",-9.993206024169922],["▁gratis",-9.99336051940918],["ali",-9.993936538696287],["最",-9.99415683746338],["心",-9.994964599609377],["▁dapat",-9.99508571624756],["tal",-9.995355606079102],["▁ee",-9.996748924255373],["เป็น",-9.996828079223633],["▁bien",-9.997424125671388],["▁سال",-9.998866081237791],["။",-9.999348640441896],["▁auto",-9.999699592590332],["▁out",-9.999876976013184],["une",-10.000410079956056],["▁लिए",-10.00245189666748],["▁При",-10.00346851348877],["tis",-10.003619194030762],["▁Google",-10.003649711608888],["▁මේ",-10.00365161895752],["▁sus",-10.00368881225586],["▁Por",-10.003738403320312],["18",-10.004104614257812],["▁internet",-10.004253387451172],["▁us",-10.005047798156738],["▁dla",-10.005452156066896],["▁kommer",-10.005468368530272],["vis",-10.005738258361816],["▁telah",-10.00576114654541],["后",-10.006170272827148],["▁cm",-10.006657600402832],["az",-10.00705623626709],["▁via",-10.007356643676758],["ion",-10.007582664489746],["▁air",-10.007615089416504],["▁Su",-10.0081205368042],["▁News",-10.00886058807373],["การ",-10.008912086486816],["ци",-10.00900650024414],["▁they",-10.009111404418944],["15",-10.009882926940918],["▁noch",-10.010698318481444],["▁làm",-10.011035919189451],["▁Her",-10.013629913330078],["▁ад",-10.01488208770752],["这",-10.014904022216797],["▁Bi",-10.014976501464844],["得",-10.015206336975098],["▁Ce",-10.015544891357422],["dor",-10.017091751098633],["▁sama",-10.017449378967283],["ส",-10.017537117004396],["▁amin",-10.017988204956056],["▁nog",-10.018686294555664],["▁Но",-10.020744323730469],["ја",-10.020971298217772],["▁Ba",-10.021137237548828],["?”",-10.021236419677734],["نى",-10.021561622619627],["▁daha",-10.021620750427246],["ной",-10.02175235748291],["▁Das",-10.022146224975586],["ද",-10.02228832244873],["一个",-10.02231788635254],["chi",-10.022672653198242],["ха",-10.023207664489746],["cz",-10.023832321166992],["▁delle",-10.024453163146973],["tak",-10.024555206298828],["▁ani",-10.02527141571045],["ої",-10.026371002197266],["▁från",-10.027546882629396],["▁lui",-10.028626441955566],["▁hal",-10.029568672180176],["▁công",-10.029674530029297],["▁ud",-10.029744148254396],["▁får",-10.029852867126465],["ные",-10.030242919921877],["다",-10.031167030334473],["▁kann",-10.03140354156494],["▁2009",-10.03256607055664],["ți",-10.032822608947754],["工作",-10.033163070678713],["▁wird",-10.034647941589355],["nik",-10.0358247756958],["ту",-10.03604793548584],["cia",-10.036248207092283],["▁like",-10.03654670715332],["▁đến",-10.036843299865724],["స్",-10.037308692932127],["lər",-10.03803539276123],["ati",-10.04028606414795],["▁boleh",-10.041252136230469],["iti",-10.041521072387695],["чи",-10.041521072387695],["▁5.",-10.041637420654297],["▁về",-10.042037010192873],["か",-10.04223346710205],["▁će",-10.04372215270996],["▁कर",-10.043868064880373],["▁més",-10.044816970825195],["时",-10.04490089416504],["მა",-10.045514106750488],["ron",-10.045605659484863],["ک",-10.045624732971191],["▁had",-10.045928955078123],["公司",-10.046028137207031],["▁мен",-10.046231269836426],["ության",-10.04627513885498],["天",-10.046700477600098],["▁khi",-10.04730987548828],["ді",-10.047443389892578],["▁bar",-10.047928810119627],["▁TV",-10.049120903015137],["▁door",-10.04969310760498],["ата",-10.050006866455078],["▁At",-10.050277709960938],["▁Pre",-10.05184555053711],["▁með",-10.05189609527588],["ting",-10.051998138427734],["▁lại",-10.052078247070312],["▁einen",-10.052157402038574],["▁his",-10.053574562072754],["म",-10.053759574890137],["▁tudi",-10.053915977478027],["▁kuwa",-10.05410099029541],["▁نہیں",-10.05649185180664],["▁sta",-10.05655574798584],["・",-10.057239532470703],["▁hun",-10.057872772216797],["▁tri",-10.057950019836426],["met",-10.05826187133789],["ð",-10.058795928955078],["中国",-10.060060501098633],["▁seg",-10.060256958007812],["▁nad",-10.06166648864746],["ق",-10.06198787689209],["è",-10.062427520751951],["ale",-10.062884330749512],["▁31",-10.064325332641602],["▁lang",-10.064451217651367],["▁یہ",-10.06460666656494],["▁kon",-10.065000534057615],["▁inter",-10.065033912658691],["ра",-10.065340995788574],["▁ole",-10.06605052947998],["▁cũng",-10.06611442565918],["ల",-10.066997528076172],["▁►",-10.06723976135254],["una",-10.069412231445312],["▁Het",-10.069844245910645],["že",-10.07060718536377],["గా",-10.070664405822754],["▁Har",-10.070981979370115],["ня",-10.072809219360352],["tos",-10.073068618774414],["▁efter",-10.073628425598145],["▁raz",-10.073638916015623],["更",-10.073692321777344],["nim",-10.074004173278809],["▁see",-10.074136734008787],["ರ",-10.074745178222656],["▁kr",-10.074968338012695],["▁те",-10.076272010803224],["var",-10.076531410217283],["க்",-10.07656478881836],["해",-10.077872276306152],["व",-10.079258918762209],["从",-10.079513549804688],["ło",-10.079678535461426],["▁przy",-10.079792022705078],["▁anche",-10.080223083496094],["ების",-10.081948280334473],["gar",-10.08201789855957],["三",-10.08286190032959],["gan",-10.0829496383667],["▁todo",-10.083439826965332],["let",-10.084388732910156],["▁kot",-10.084399223327637],["ц",-10.085404396057127],["len",-10.085999488830566],["▁gli",-10.086030006408691],["▁kui",-10.086820602416992],["дан",-10.087727546691896],["ť",-10.088337898254396],["ב",-10.088537216186523],["▁много",-10.088632583618164],["▁::",-10.089898109436035],["▁како",-10.090442657470703],["▁Т",-10.090789794921877],["▁ја",-10.09135913848877],["化",-10.09167766571045],["ે",-10.09262466430664],["▁पनि",-10.092832565307615],["mer",-10.093677520751951],["▁pun",-10.095030784606934],["ии",-10.09580421447754],["▁شود",-10.096189498901367],["▁II",-10.098589897155762],["ба",-10.098859786987305],["η",-10.099128723144531],["外",-10.099468231201172],["▁کیا",-10.100441932678224],["▁أو",-10.100496292114258],["پ",-10.101223945617676],["對",-10.101308822631836],["行",-10.10196304321289],["“,",-10.102327346801758],["▁Home",-10.102584838867188],["金",-10.10448169708252],["▁מה",-10.104567527770996],["里",-10.10524082183838],["යි",-10.106109619140623],["ር",-10.106537818908691],["▁foto",-10.106544494628906],["▁ਦਾ",-10.10678482055664],["▁saat",-10.106802940368652],["▁који",-10.106995582580566],["ര്",-10.107054710388184],["▁Go",-10.108052253723145],["力",-10.10842514038086],["▁roz",-10.108458518981934],["▁ແລະ",-10.10860538482666],["와",-10.108624458312988],["▁2008",-10.108726501464844],["▁Am",-10.11007308959961],["▁fyrir",-10.111675262451172],["č",-10.112030982971191],["ப்",-10.11219310760498],["生",-10.113627433776855],["▁Os",-10.11382293701172],["▁haben",-10.114025115966797],["ක",-10.11488437652588],["น",-10.116049766540527],["ف",-10.116292953491213],["▁най",-10.11648654937744],["ля",-10.116928100585938],["▁naj",-10.117053031921388],["ให้",-10.117462158203123],["କୁ",-10.117698669433594],["up",-10.117767333984377],["▁дека",-10.11812973022461],["想",-10.118605613708496],["▁жана",-10.118722915649414],["▁Ca",-10.118749618530272],["по",-10.119121551513672],["▁net",-10.12051486968994],["ـ",-10.12060260772705],["▁sont",-10.120989799499512],["▁get",-10.121017456054688],["▁جا",-10.121893882751465],["ج",-10.123574256896973],["▁تي",-10.12393283843994],["ם",-10.12433910369873],["▁uu",-10.12532901763916],["ча",-10.12547206878662],["▁data",-10.126009941101074],["▁hay",-10.128010749816896],["ab",-10.128340721130373],["▁आणि",-10.129305839538574],["▁ham",-10.12962532043457],["再",-10.13009548187256],["▁ở",-10.130666732788086],["▁far",-10.131063461303713],["tie",-10.132451057434082],["▁ro",-10.132527351379396],["ica",-10.132535934448242],["▁nous",-10.132699966430664],["mail",-10.133148193359377],["ει",-10.13366985321045],["▁کار",-10.134190559387209],["ју",-10.13485336303711],["ling",-10.136800765991213],["▁pag",-10.137310981750488],["▁Is",-10.13754940032959],["▁kako",-10.13797664642334],["▁तो",-10.13819980621338],["ನ",-10.138598442077637],["る",-10.139140129089355],["▁Ber",-10.139314651489258],["):",-10.139434814453123],["ను",-10.139455795288086],["▁Ci",-10.139514923095703],["▁mot",-10.13982391357422],["▁samo",-10.140141487121582],["vin",-10.140460014343262],["▁inn",-10.141190528869627],["▁менен",-10.14126968383789],["리",-10.141365051269531],["▁olarak",-10.141621589660645],["кі",-10.142203330993652],["χ",-10.142663955688477],["న",-10.143365859985352],["▁სა",-10.143719673156738],["▁Lo",-10.144014358520508],["ી",-10.144140243530272],["▁ही",-10.145303726196287],["ಿ",-10.145376205444336],["ስ",-10.14547061920166],["▁Nam",-10.145750999450684],["ния",-10.147595405578612],["ją",-10.147822380065918],["사",-10.148526191711426],["又",-10.148683547973633],["▁500",-10.149158477783203],["сы",-10.149516105651855],["▁ف",-10.149760246276855],["ili",-10.149961471557615],["ња",-10.150938987731934],["కు",-10.150958061218262],["vat",-10.15144157409668],["▁Ο",-10.152249336242676],["res",-10.15234088897705],["向",-10.152971267700195],["い",-10.153070449829102],["看",-10.153470039367676],["vad",-10.15419101715088],["▁ও",-10.155773162841797],["՝",-10.155891418457031],["▁má",-10.156039237976074],["ത്",-10.156084060668944],["elle",-10.156606674194336],["▁hy",-10.156662940979004],["▁было",-10.156805038452148],["▁aku",-10.15689182281494],["自",-10.157424926757812],["하는",-10.157720565795898],["▁cada",-10.157994270324709],["▁kung",-10.15821361541748],["▁tik",-10.158401489257812],["ు",-10.158489227294922],["市",-10.158578872680664],["▁sẽ",-10.160768508911133],["▁tur",-10.16172695159912],["มา",-10.162999153137209],["▁Ali",-10.163493156433104],["▁॥",-10.164254188537598],["▁satu",-10.164278030395508],["▁ότι",-10.164898872375488],["ો",-10.165328025817873],["▁ඒ",-10.165766716003418],["प",-10.16615867614746],["▁अ",-10.166707992553713],["је",-10.16684627532959],["മ",-10.167121887207031],["tek",-10.167279243469238],["tus",-10.167495727539062],["▁І",-10.167570114135742],["ন",-10.167640686035156],["op",-10.16852569580078],["▁pi",-10.169116020202637],["න්",-10.16966724395752],["ire",-10.169726371765137],["法",-10.169986724853516],["ным",-10.171371459960938],["ţi",-10.171446800231934],["▁są",-10.171615600585938],["rik",-10.17229461669922],["ан",-10.172318458557127],["▁това",-10.173538208007812],["▁це",-10.17403793334961],["▁pie",-10.174092292785645],["نىڭ",-10.174494743347168],["▁dy",-10.17457675933838],["▁On",-10.174945831298828],["▁çok",-10.175436973571776],["ਾਂ",-10.176222801208496],["來",-10.17626953125],["很",-10.17644214630127],["시",-10.17652702331543],["▁tempo",-10.17693328857422],["▁sé",-10.177938461303713],["▁байна",-10.178293228149414],["▁xwe",-10.17856216430664],["스",-10.17860984802246],["ном",-10.179258346557615],["ών",-10.179864883422852],["▁if",-10.180391311645508],["rat",-10.180675506591797],["▁мы",-10.181035995483398],["▁Jo",-10.181137084960938],["▁nam",-10.181392669677734],["mis",-10.1820650100708],["ша",-10.182333946228027],["▁עם",-10.182575225830078],["ate",-10.182609558105469],["事",-10.18264389038086],["▁beste",-10.183098793029783],["▁تر",-10.183866500854492],["ille",-10.183968544006348],["van",-10.184060096740724],["це",-10.184333801269531],["ser",-10.184370994567873],["▁או",-10.184426307678224],["▁नहीं",-10.185298919677734],["ный",-10.185860633850098],["▁bil",-10.18669891357422],["▁todos",-10.186707496643066],["▁ఆ",-10.186772346496582],["မ",-10.18685817718506],["▁tan",-10.187416076660156],["ram",-10.18814754486084],["od",-10.188192367553713],["▁און",-10.188521385192873],["း",-10.18860912322998],["ል",-10.188889503479004],["▁=",-10.18993854522705],["▁Azərbaycan",-10.19175910949707],["▁אין",-10.191765785217283],["▁Ge",-10.192121505737305],["▁Allah",-10.192307472229004],["nd",-10.192818641662598],["▁iki",-10.192964553833008],["▁അ",-10.194012641906738],["어",-10.194202423095703],["提供",-10.194330215454102],["做",-10.19460391998291],["▁lagi",-10.194988250732422],["▁kami",-10.19521141052246],["▁də",-10.196165084838867],["inen",-10.196702003479004],["▁स",-10.196903228759766],["▁jou",-10.197039604187012],["▁आ",-10.19740104675293],["ներ",-10.197504997253418],["▁જ",-10.197917938232422],["▁ed",-10.198224067687988],["▁bila",-10.200002670288086],["ст",-10.20116901397705],["▁oraz",-10.20141887664795],["mat",-10.201783180236816],["▁કે",-10.201810836791992],["使用",-10.202255249023438],["inn",-10.203165054321287],["▁وي",-10.203167915344238],["▁mà",-10.20323371887207],["ная",-10.203649520874023],["▁uns",-10.203656196594238],["▁einer",-10.20379638671875],["▁година",-10.2039213180542],["▁ለ",-10.204327583312988],["ların",-10.206302642822266],["მ",-10.20677661895752],["uar",-10.208243370056152],["▁apa",-10.208284378051758],["fe",-10.20844841003418],["▁aber",-10.208765029907228],["▁vo",-10.209157943725586],["nos",-10.20959758758545],["és",-10.209969520568848],["uk",-10.21029281616211],["▁80",-10.21129322052002],["▁vào",-10.211941719055176],["ið",-10.21242332458496],["▁tại",-10.212557792663574],["就是",-10.21257209777832],["▁menjadi",-10.212688446044922],["▁دو",-10.213047981262209],["▁هر",-10.21345329284668],["line",-10.213820457458496],["са",-10.213927268981934],["▁car",-10.214239120483398],["▁tôi",-10.214473724365234],["▁soo",-10.214488983154297],["▁quan",-10.214818000793455],["▁пред",-10.215929985046388],["ac",-10.216479301452637],["ret",-10.216818809509276],["▁social",-10.217543601989746],["های",-10.219086647033691],["▁yra",-10.219722747802734],["자",-10.220314025878906],["▁ላይ",-10.2203950881958],["▁fin",-10.220762252807615],["con",-10.220809936523438],["山",-10.221181869506836],["▁35",-10.221728324890137],["▁μας",-10.221884727478027],["▁đó",-10.222025871276855],["mm",-10.222129821777344],["ม",-10.222335815429688],["pen",-10.22235107421875],["ën",-10.222803115844728],["수",-10.22321605682373],["▁ty",-10.223657608032228],["ത",-10.223766326904297],["入",-10.22387981414795],["ो",-10.224656105041504],["ende",-10.224910736083984],["ก",-10.225725173950195],["▁kā",-10.225732803344728],["▁ste",-10.226017951965332],["▁6.",-10.2262544631958],["▁Dan",-10.227883338928224],["▁Ho",-10.22808837890625],["▁voi",-10.228206634521484],["서",-10.228710174560549],["▁бы",-10.228808403015137],["▁što",-10.228925704956056],["▁mir",-10.229572296142578],["ок",-10.229917526245115],["illa",-10.230172157287598],["ները",-10.230875968933104],["či",-10.23092555999756],["▁don",-10.23169231414795],["▁እና",-10.231730461120604],["би",-10.232038497924805],["▁contra",-10.232131958007812],["ét",-10.232461929321287],["~",-10.232641220092772],["▁sebagai",-10.23275661468506],["于",-10.23301124572754],["▁oma",-10.235540390014648],["tta",-10.237249374389648],["tin",-10.238265991210938],["té",-10.238330841064451],["▁naar",-10.238677024841309],["▁aby",-10.239548683166504],["▁einem",-10.239616394042969],["ು",-10.239731788635254],["sal",-10.23981761932373],["О",-10.240120887756348],["▁Pri",-10.24030303955078],["ation",-10.240924835205078],["ಗಳು",-10.24173355102539],["nar",-10.24178409576416],["▁dels",-10.242469787597656],["iya",-10.242663383483888],["во",-10.242982864379885],["▁които",-10.243266105651855],["mak",-10.244142532348633],["▁hvor",-10.244271278381348],["ից",-10.244566917419434],["▁які",-10.246417045593262],["▁Op",-10.246492385864258],["將",-10.246612548828123],["▁شد",-10.246665000915527],["▁phải",-10.246679306030272],["lat",-10.246804237365724],["▁під",-10.248366355895996],["kat",-10.24858856201172],["▁sempre",-10.249338150024414],["▁זה",-10.249550819396973],["▁pat",-10.249645233154297],["▁meer",-10.25059986114502],["ક",-10.250821113586426],["▁opp",-10.251258850097656],["nych",-10.251280784606934],["▁blir",-10.252643585205078],["把",-10.252755165100098],["the",-10.252835273742676],["▁bli",-10.253229141235352],["▁casa",-10.253265380859377],["▁አ",-10.253313064575195],["ца",-10.25356674194336],["▁Hotel",-10.254399299621582],["ě",-10.254416465759276],["В",-10.254683494567873],["▁god",-10.255109786987305],["▁New",-10.255802154541016],["24",-10.25615119934082],["▁70",-10.256158828735352],["。”",-10.257157325744627],["▁hem",-10.257986068725586],["ның",-10.25799560546875],["▁së",-10.258001327514648],["▁their",-10.258460998535156],["М",-10.258774757385254],["▁Od",-10.260255813598633],["▁vez",-10.260557174682615],["▁what",-10.260702133178713],["▁từ",-10.260783195495604],["▁Ya",-10.261181831359863],["▁መ",-10.262310981750488],["▁Ti",-10.262781143188477],["мі",-10.2628755569458],["▁ano",-10.263166427612305],["па",-10.264010429382324],["ою",-10.2644681930542],["▁כל",-10.26455783843994],["▁Jag",-10.264787673950195],["▁sve",-10.264939308166504],["۔",-10.265408515930176],["до",-10.265698432922363],["ാ",-10.266816139221191],["하고",-10.266958236694336],["▁Om",-10.26795768737793],["▁לה",-10.26899528503418],["▁üçün",-10.269309997558594],["มี",-10.269579887390137],["yi",-10.270160675048828],["▁zum",-10.270479202270508],["0",-10.270647048950195],["▁ч",-10.270926475524902],["点",-10.271102905273438],["▁Par",-10.27198600769043],["▁Ang",-10.272435188293455],["▁bin",-10.27268123626709],["ду",-10.272793769836426],["▁Q",-10.273056030273438],["lin",-10.273252487182615],["!”",-10.273452758789062],["▁និង",-10.273761749267578],["los",-10.273771286010742],["ї",-10.273789405822754],["▁še",-10.273933410644531],["▁arī",-10.273975372314451],["▁lahko",-10.274685859680176],["▁tar",-10.275010108947754],["▁бар",-10.275126457214355],["af",-10.27530002593994],["cy",-10.275564193725586],["▁mes",-10.275888442993164],["ज",-10.276573181152344],["ové",-10.278491973876951],["▁she",-10.279151916503906],["其",-10.279279708862305],["imo",-10.279314041137695],["nen",-10.280912399291992],["ली",-10.281566619873049],["นี้",-10.281667709350586],["دا",-10.281878471374512],["した",-10.281940460205078],["sha",-10.282450675964355],["ので",-10.283096313476562],["▁ye",-10.28310203552246],["▁roku",-10.283246994018556],["pl",-10.2833890914917],["▁sú",-10.283533096313477],["К",-10.283920288085938],["た",-10.283937454223633],["▁kao",-10.285103797912598],["▁П",-10.285151481628418],["▁nav",-10.285490036010742],["▁им",-10.286972045898438],["▁selv",-10.287105560302734],["▁هذا",-10.287479400634766],["kor",-10.28748893737793],["ड",-10.2877197265625],["▁vida",-10.288533210754396],["▁ہو",-10.288777351379396],["管理",-10.289185523986816],["ila",-10.289816856384276],["ված",-10.29061794281006],["▁bagi",-10.290685653686523],["ר",-10.290753364562988],["▁Про",-10.2908296585083],["▁سان",-10.290907859802246],["▁000",-10.290959358215332],["▁our",-10.2923002243042],["▁lain",-10.292325973510742],["▁prin",-10.292329788208008],["ties",-10.292548179626465],["▁media",-10.293183326721191],["—",-10.293254852294922],["wi",-10.294123649597168],["▁qo",-10.294146537780762],["ться",-10.294734954833984],["▁nhà",-10.2948637008667],["▁ó",-10.294994354248049],["тар",-10.295083999633787],["道",-10.29531478881836],["▁xa",-10.295499801635742],["▁Bo",-10.296242713928224],["할",-10.296403884887695],["ไป",-10.296697616577148],["▁tym",-10.29701328277588],["▁bis",-10.297282218933104],["dal",-10.298501968383787],["▁või",-10.298701286315918],["▁đi",-10.298723220825195],["▁przez",-10.298872947692873],["ว่า",-10.299677848815918],["kas",-10.2997465133667],["▁Ak",-10.300379753112791],["for",-10.300384521484377],["ਚ",-10.30062198638916],["ರು",-10.300689697265623],["ці",-10.300853729248049],["全",-10.301103591918944],["ona",-10.301302909851074],["▁tin",-10.301894187927246],["▁trên",-10.302811622619627],["ft",-10.302947998046877],["ity",-10.30307960510254],["▁tako",-10.303281784057615],["ման",-10.30344295501709],["▁Ich",-10.3036527633667],["16",-10.30447769165039],["▁tersebut",-10.304821014404297],["▁vara",-10.304854393005373],["ў",-10.30487060546875],["17",-10.305571556091309],["回",-10.305636405944824],["▁sudah",-10.306325912475586],["▁máis",-10.306572914123535],["ним",-10.306700706481934],["▁đầu",-10.306955337524414],["▁ekki",-10.307242393493652],["▁här",-10.307272911071776],["▁gu",-10.307915687561035],["▁ಈ",-10.30850315093994],["▁any",-10.308510780334473],["par",-10.308599472045898],["▁Mo",-10.30864143371582],["వ",-10.308712005615234],["ύ",-10.309557914733888],["▁bị",-10.310250282287598],["▁€",-10.31057357788086],["ide",-10.31063747406006],["eg",-10.310992240905762],["▁hàng",-10.311631202697754],["сти",-10.311715126037598],["▁90",-10.311766624450684],["▁над",-10.31203269958496],["las",-10.312142372131348],["▁України",-10.313157081604004],["ara",-10.313234329223633],["▁lan",-10.313328742980955],["ò",-10.313414573669434],["ent",-10.313552856445312],["於",-10.313652992248535],["ей",-10.31373405456543],["▁ayaa",-10.313814163208008],["▁време",-10.313987731933594],["▁pak",-10.314249992370604],["▁ایران",-10.314749717712402],["▁chỉ",-10.314913749694824],["50",-10.315403938293455],["▁دی",-10.316397666931152],["▁ama",-10.316840171813965],["▁andre",-10.316853523254396],["ert",-10.316868782043455],["▁только",-10.317252159118652],["▁пры",-10.31749439239502],["።",-10.317533493041992],["▁tout",-10.318799018859863],["▁इस",-10.318937301635742],["ात",-10.319297790527344],["▁helt",-10.319589614868164],["tam",-10.319788932800291],["ి",-10.319917678833008],["▁prze",-10.320982933044434],["sı",-10.321332931518556],["▁Dar",-10.321393966674805],["ства",-10.322279930114746],["ค",-10.322586059570312],["dır",-10.323149681091309],["ಗೆ",-10.32321071624756],["▁học",-10.323234558105469],["▁بی",-10.323305130004885],["▁Han",-10.323670387268066],["ник",-10.324140548706056],["▁sự",-10.324231147766112],["......",-10.32432460784912],["▁Ra",-10.32447624206543],["ийг",-10.324687004089355],["打",-10.324806213378906],["tum",-10.324960708618164],["▁können",-10.325058937072754],["ин",-10.325106620788574],["▁nhiều",-10.32513427734375],["ും",-10.32522964477539],["ო",-10.325681686401367],["▁Tak",-10.326319694519045],["дар",-10.326604843139648],["▁mere",-10.327587127685549],["文",-10.328255653381348],["▁word",-10.328483581542969],["ib",-10.32899284362793],["▁بن",-10.32934284210205],["▁וה",-10.330668449401855],["▁ആ",-10.332711219787598],["▁tahun",-10.33332347869873],["▁ਨੇ",-10.333337783813477],["ė",-10.3336820602417],["という",-10.33398723602295],["▁того",-10.333995819091797],["表示",-10.334110260009766],["და",-10.33590316772461],["▁face",-10.3360013961792],["▁Е",-10.336628913879396],["▁thì",-10.33687686920166],["▁نظر",-10.337203025817873],["sten",-10.337320327758787],["ден",-10.337339401245115],["▁You",-10.338011741638184],["కి",-10.338071823120115],["▁seperti",-10.338494300842283],["▁ble",-10.338528633117676],["тай",-10.339089393615724],["25",-10.33994483947754],["▁mat",-10.340240478515623],["kel",-10.340811729431152],[".....",-10.340837478637695],["14",-10.341371536254885],["▁nak",-10.341371536254885],["ck",-10.341519355773926],["kal",-10.341656684875488],["▁jako",-10.34174919128418],["enie",-10.342806816101074],["▁وہ",-10.343199729919434],["സ്",-10.343242645263672],["owe",-10.343954086303713],["vel",-10.344741821289062],["▁व",-10.344931602478027],["花",-10.346095085144045],["sku",-10.348271369934082],["imi",-10.34861183166504],["num",-10.349081993103027],["จาก",-10.349431037902832],["▁edhe",-10.349469184875488],["▁Ve",-10.349566459655762],["▁worden",-10.350092887878418],["mento",-10.350177764892578],["▁پاکستان",-10.350655555725098],["▁động",-10.350712776184082],["▁smo",-10.351078033447266],["比",-10.351239204406738],["▁dette",-10.35169506072998],["ते",-10.352066040039062],["▁نام",-10.353020668029783],["sse",-10.353355407714844],["▁ع",-10.353537559509276],["度",-10.353941917419434],["tă",-10.35408878326416],["กับ",-10.35438060760498],["ici",-10.355048179626465],["▁그",-10.356576919555664],["▁bara",-10.357001304626465],["▁የሚ",-10.357388496398926],["ani",-10.357921600341797],["म्",-10.35842514038086],["Д",-10.35846996307373],["ap",-10.35852336883545],["Б",-10.358799934387209],["▁let",-10.359158515930176],["ira",-10.359292984008787],["క",-10.359417915344238],["お",-10.35943603515625],["▁ж",-10.360528945922852],["▁pu",-10.360706329345703],["▁solo",-10.36081886291504],["說",-10.361101150512695],["▁alt",-10.361186981201172],["▁sonra",-10.361265182495115],["če",-10.361592292785645],["▁tema",-10.361846923828123],["ర్",-10.36206340789795],["▁My",-10.362174987792969],["mus",-10.362568855285645],["ד",-10.362591743469238],["部",-10.36338996887207],["க",-10.363798141479492],["▁hari",-10.3644437789917],["▁pan",-10.364526748657228],["ट",-10.364582061767578],["▁Το",-10.365726470947266],["美",-10.366249084472656],["jem",-10.366548538208008],["特",-10.366652488708496],["nas",-10.36697769165039],["▁vagy",-10.367103576660156],["▁том",-10.36776351928711],["ble",-10.36789608001709],["નો",-10.368057250976562],["▁top",-10.368497848510742],["▁भएको",-10.36911392211914],["▁Internet",-10.36961555480957],["▁maka",-10.36974811553955],["▁mon",-10.370040893554688],["▁300",-10.37183952331543],["▁denne",-10.372269630432127],["那",-10.372777938842772],["▁nhất",-10.372973442077637],["并",-10.373148918151855],["▁Hi",-10.37380313873291],["▁fram",-10.373805046081545],["▁გა",-10.375194549560549],["mp",-10.375411033630373],["rin",-10.37561321258545],["▁45",-10.37584114074707],["gel",-10.376181602478027],["년",-10.376415252685549],["13",-10.376900672912598],["iem",-10.376952171325684],["▁når",-10.37712574005127],["▁most",-10.37738037109375],["▁there",-10.377476692199709],["។",-10.377963066101074],["се",-10.378053665161133],["주",-10.378337860107422],["اء",-10.37876033782959],["▁அ",-10.37883186340332],["▁2007",-10.378997802734377],["▁Video",-10.37961483001709],["▁Г",-10.379616737365724],["り",-10.379737854003906],["ా",-10.379801750183104],["▁если",-10.379837036132812],["ков",-10.380582809448242],["方",-10.38075065612793],["▁года",-10.381685256958008],["nog",-10.381863594055176],["ras",-10.382649421691896],["ס",-10.38300323486328],["ag",-10.384281158447266],["▁final",-10.38483428955078],["無",-10.385379791259766],["nis",-10.38579273223877],["▁час",-10.385811805725098],["ές",-10.386651992797852],["▁deze",-10.387197494506836],["▁లో",-10.387435913085938],["▁volt",-10.388036727905272],["▁הוא",-10.388176918029783],["▁oss",-10.388225555419922],["▁euro",-10.388361930847168],["ие",-10.388704299926758],["ль",-10.38882827758789],["▁mil",-10.388858795166016],["▁До",-10.389412879943848],["由",-10.389474868774414],["▁mata",-10.389899253845217],["bil",-10.389962196350098],["▁deg",-10.390121459960938],["▁isang",-10.39012622833252],["▁роз",-10.390233039855955],["▁nei",-10.39095687866211],["isi",-10.391180038452148],["ین",-10.391376495361328],["】",-10.39195442199707],["La",-10.392297744750977],["ই",-10.39230728149414],["tzen",-10.39240837097168],["▁она",-10.392420768737791],["ante",-10.392709732055664],["▁Kan",-10.392728805541992],["▁việc",-10.393152236938477],["▁کرد",-10.393208503723145],["Q",-10.393885612487791],["ç",-10.393969535827637],["▁Х",-10.394183158874512],[">",-10.39428424835205],["▁vol",-10.39458465576172],["ရ",-10.394736289978027],["ве",-10.396187782287598],["▁т",-10.396240234375],["需要",-10.396501541137695],["én",-10.397308349609377],["▁banyak",-10.397714614868164],["▁karena",-10.398309707641602],["mal",-10.398321151733398],["▁who",-10.398574829101562],["▁Ek",-10.398956298828123],["▁ind",-10.399481773376465],["▁سره",-10.399648666381836],["ית",-10.399799346923828],["aan",-10.40042781829834],["▁ning",-10.400471687316896],["тер",-10.400811195373535],["▁Mu",-10.401588439941406],["jas",-10.40187644958496],["▁amet",-10.40220832824707],["▁ու",-10.402382850646973],["▁när",-10.402398109436035],["ল",-10.402457237243652],["ր",-10.402555465698242],["ega",-10.402983665466309],["▁પણ",-10.403118133544922],["海",-10.403155326843262],["▁شما",-10.403318405151367],["▁као",-10.404129028320312],["▁деп",-10.404152870178224],["▁votre",-10.404757499694824],["▁mehr",-10.404879570007324],["▁real",-10.404899597167969],["▁ການ",-10.405049324035645],["▁Em",-10.405195236206056],["▁tal",-10.405531883239746],["▁Ro",-10.406256675720217],["ω",-10.406402587890623],["μ",-10.4064302444458],["▁ur",-10.406620979309082],["▁thành",-10.406638145446776],["ய",-10.407601356506348],["rit",-10.407776832580566],["ক",-10.407914161682127],["sto",-10.408238410949709],["▁dụng",-10.408251762390137],["enia",-10.408557891845703],["▁(3)",-10.408852577209473],["▁32",-10.408927917480469],["▁него",-10.409493446350098],["▁ко",-10.410086631774902],["▁они",-10.410955429077148],["▁Um",-10.41153621673584],["▁کان",-10.412057876586914],["▁Post",-10.412211418151855],["يا",-10.412357330322266],["起",-10.412541389465332],["க்க",-10.412827491760254],["▁qua",-10.41291618347168],["ій",-10.41318416595459],["য়",-10.413481712341309],["து",-10.413655281066896],["成",-10.414212226867676],["▁said",-10.414311408996582],["ക",-10.414579391479492],["▁would",-10.414769172668455],["ന",-10.414878845214844],["א",-10.41490650177002],["▁been",-10.414976119995115],["kov",-10.415026664733888],["▁mod",-10.41550636291504],["▁hiện",-10.415790557861328],["zioni",-10.415963172912598],["▁gaan",-10.416055679321287],["रा",-10.416900634765623],["stra",-10.416911125183104],["મ",-10.417552947998049],["我們",-10.417845726013184],["▁;",-10.418042182922363],["▁kes",-10.418070793151855],["ži",-10.418384552001951],["red",-10.41903591156006],["▁Online",-10.419229507446287],["ง",-10.419256210327148],["光",-10.419490814208984],["tir",-10.419569969177246],["ви",-10.420165061950684],["leg",-10.420540809631348],["▁prima",-10.420731544494627],["重",-10.421844482421877],["▁part",-10.42199420928955],["ato",-10.422419548034668],["▁بود",-10.42271900177002],["ಕ",-10.423157691955566],["▁cara",-10.423661231994627],["ਨ",-10.42394733428955],["▁dis",-10.424065589904783],["▁ਅਤੇ",-10.424256324768066],["40",-10.425334930419922],["▁’",-10.426040649414062],["nem",-10.426673889160156],["▁Во",-10.426695823669434],["▁also",-10.426990509033203],["bra",-10.427148818969728],["ര",-10.427223205566406],["長",-10.427331924438477],["▁bude",-10.427852630615234],["bat",-10.427998542785645],["▁бол",-10.42807388305664],["▁س",-10.42892360687256],["ด",-10.42946720123291],["ring",-10.430377006530762],["لا",-10.430670738220217],["dik",-10.43069076538086],["▁gran",-10.431148529052734],["▁them",-10.43118667602539],["▁Ви",-10.431404113769531],["▁(1",-10.431602478027344],["ล",-10.432281494140623],["▁jau",-10.43266487121582],["▁7.",-10.432825088500977],["ته",-10.433000564575195],["▁→",-10.43319606781006],["jan",-10.433771133422852],["מ",-10.433965682983398],["▁mm",-10.434611320495604],["▁або",-10.43475341796875],["▁права",-10.434781074523926],["च्या",-10.435188293457031],["gal",-10.435396194458008],["су",-10.43564510345459],["▁nói",-10.436110496520996],["ade",-10.436386108398438],["▁anti",-10.43658447265625],["fer",-10.436775207519531],["ා",-10.436800003051758],["▁која",-10.436954498291016],["▁parti",-10.43742847442627],["面",-10.4381685256958],["には",-10.43830108642578],["π",-10.438506126403809],["ט",-10.439042091369627],["ર",-10.439061164855955],["ון",-10.439263343811035],["ල",-10.439640045166016],["able",-10.440007209777832],["ama",-10.440314292907717],["▁Med",-10.4407377243042],["ما",-10.441020011901855],["න",-10.441137313842772],["▁út",-10.44175148010254],["▁vid",-10.441835403442385],["чу",-10.44201946258545],["ర",-10.44204044342041],["nosti",-10.442049026489258],["▁sport",-10.442052841186523],["َّ",-10.442155838012695],["▁στη",-10.44221019744873],["؛",-10.443130493164062],["▁سر",-10.443224906921388],["▁있는",-10.443526268005373],["▁Ga",-10.443861961364746],["വ",-10.444059371948242],["▁បាន",-10.4452486038208],["정",-10.445311546325684],["▁sul",-10.445382118225098],["▁St",-10.445844650268556],["ದಲ್ಲಿ",-10.445847511291504],["“.",-10.446001052856444],["▁მო",-10.446046829223633],["▁pode",-10.446359634399414],["ர்",-10.447246551513672],["ன்",-10.447263717651367],["ova",-10.447529792785645],["ných",-10.447555541992188],["▁már",-10.448555946350098],["▁б",-10.448741912841797],["▁mange",-10.449143409729004],["這",-10.44927978515625],["斯",-10.449560165405272],["tre",-10.45032787322998],["▁च",-10.450410842895508],["▁hele",-10.450772285461426],["▁telefon",-10.450900077819824],["ස්",-10.451286315917969],["▁motor",-10.451329231262209],["▁cel",-10.45139980316162],["く",-10.451481819152832],["ید",-10.452103614807127],["ებს",-10.45249080657959],["▁định",-10.452536582946776],["ג",-10.45311164855957],["▁năm",-10.45339584350586],["▁ይ",-10.453407287597656],["这个",-10.453999519348145],["▁tid",-10.454355239868164],["কে",-10.454445838928224],["▁yr",-10.454608917236328],["▁המ",-10.45490550994873],["and",-10.45492172241211],["▁caso",-10.456127166748049],["▁Nie",-10.456595420837402],["hat",-10.456612586975098],["▁लागि",-10.45684814453125],["▁Mae",-10.457321166992188],["ова",-10.457609176635742],["19",-10.457674980163574],["▁myös",-10.457966804504396],["▁Ny",-10.458199501037598],["▁wel",-10.45830535888672],["▁seus",-10.458853721618652],["ң",-10.459561347961426],["?\"",-10.459692001342772],["元",-10.459815979003906],["tro",-10.460393905639648],["▁तर",-10.460538864135742],["▁zur",-10.460793495178224],["ых",-10.460814476013184],["ika",-10.461380004882812],["ન",-10.461466789245604],["以及",-10.461631774902344],["mine",-10.46170139312744],["他们",-10.46209716796875],["▁wenn",-10.462282180786133],["▁best",-10.462435722351074],["▁czy",-10.462512969970703],["▁problem",-10.462763786315918],["ça",-10.46278190612793],["身",-10.4629487991333],["▁aux",-10.463884353637695],["kur",-10.46400260925293],["ës",-10.464062690734863],["▁faire",-10.464492797851562],["ల్",-10.464512825012209],["▁Ri",-10.46459674835205],["ଙ୍କ",-10.46484088897705],["▁محمد",-10.465039253234863],["ál",-10.465270042419434],["▁ଓ",-10.46550464630127],["ice",-10.465516090393066],["▁ተ",-10.46613883972168],["tem",-10.466638565063477],["ő",-10.467381477355955],["▁можно",-10.467409133911133],["生活",-10.467677116394045],["ito",-10.467925071716309],["▁při",-10.468008995056152],["▁sein",-10.468202590942385],["▁ამ",-10.468652725219728],["થી",-10.468860626220703],["▁Ein",-10.46894359588623],["▁Bir",-10.46925163269043],["▁mor",-10.469510078430176],["▁Ter",-10.469528198242188],["って",-10.469775199890137],["先",-10.469902992248535],["企业",-10.46993923187256],["▁vad",-10.47041130065918],["▁համար",-10.47042465209961],["▁Van",-10.471633911132812],["ად",-10.471853256225586],["▁mutta",-10.47209644317627],["▁Cu",-10.47216796875],["▁info",-10.472328186035156],["▁yo",-10.4723482131958],["▁jsem",-10.47309112548828],["си",-10.473932266235352],["ለ",-10.474207878112791],["車",-10.474225997924805],["▁Mas",-10.474679946899414],["▁Dit",-10.474691390991213],["të",-10.474884033203123],["장",-10.475115776062012],["て",-10.475837707519531],["▁دي",-10.476078033447266],["▁gi",-10.476272583007812],["▁bor",-10.476567268371582],["▁hotel",-10.477051734924316],["mme",-10.477601051330566],["dur",-10.47782039642334],["▁etc",-10.477835655212402],["ಸ್",-10.478389739990234],["▁ഈ",-10.47861099243164],["国",-10.478854179382324],["tele",-10.479180335998535],["ային",-10.479498863220217],["▁questo",-10.479846000671388],["▁ал",-10.479857444763184],["没有",-10.479923248291016],["▁số",-10.480365753173828],["issa",-10.48043155670166],["대",-10.480751037597656],["vy",-10.480792045593262],["▁test",-10.481038093566896],["din",-10.481141090393066],["▁Fa",-10.482000350952148],["▁daripada",-10.48204517364502],["hy",-10.48223114013672],["▁دا",-10.48223114013672],["กัน",-10.482378959655762],["dra",-10.482638359069824],["▁nước",-10.482754707336426],["ksi",-10.484230041503906],["द",-10.484370231628418],["ै",-10.484569549560549],["ša",-10.484649658203123],["▁كل",-10.484674453735352],["▁það",-10.484750747680664],["న్",-10.484792709350586],["▁É",-10.48540210723877],["ləri",-10.48580837249756],["▁dolor",-10.48634910583496],["▁ነው",-10.486664772033691],["ही",-10.48707103729248],["tter",-10.487408638000488],["▁איז",-10.488099098205566],["ല്",-10.48812484741211],["ار",-10.4881591796875],["▁bare",-10.488274574279783],["▁some",-10.488542556762695],["▁thế",-10.489048957824709],["tt",-10.489277839660645],["▁ngày",-10.489498138427734],["▁tek",-10.489604949951172],["▁Pi",-10.490036010742188],["lang",-10.490046501159668],["تا",-10.4900541305542],["▁및",-10.491065979003906],["▁צו",-10.49110221862793],["▁هذه",-10.491169929504396],["మ",-10.491235733032228],["യും",-10.49143886566162],["idade",-10.491617202758787],["过",-10.491830825805664],["ise",-10.49184513092041],["▁NA",-10.491982460021973],["▁чтобы",-10.49201774597168],["ৰ",-10.492340087890623],["▁ένα",-10.492517471313477],["▁hos",-10.492593765258787],["ego",-10.492769241333008],["▁stor",-10.493446350097656],["▁bilan",-10.493677139282228],["▁csak",-10.495383262634276],["▁رو",-10.495586395263672],["▁ба",-10.495811462402344],["▁mình",-10.495851516723633],["ء",-10.495951652526855],["cho",-10.495996475219728],["▁nhân",-10.49610424041748],["ც",-10.496186256408691],["ص",-10.496241569519045],["த்",-10.496326446533203],["著",-10.496464729309082],["▁moment",-10.49656867980957],["▁jî",-10.496569633483888],["वा",-10.496707916259766],["▁len",-10.496922492980955],["▁کند",-10.497217178344728],["▁lub",-10.497325897216797],["τε",-10.497413635253906],["▁hanya",-10.49742603302002],["▁한",-10.497684478759766],["vor",-10.497817039489746],["▁باشد",-10.497930526733398],["▁alebo",-10.497989654541016],["или",-10.498215675354004],["▁لم",-10.498984336853027],["▁cui",-10.499229431152344],["nost",-10.499372482299805],["נו",-10.499560356140137],["ige",-10.499937057495115],["▁já",-10.500354766845703],["▁ru",-10.500371932983398],["▁sol",-10.500431060791016],["ua",-10.50047206878662],["21",-10.500844955444336],["▁छन्",-10.500948905944824],["ismo",-10.501720428466797],["▁פון",-10.502167701721191],["zen",-10.502351760864258],["Г",-10.50314712524414],["已",-10.50317096710205],["ś",-10.503241539001465],["fu",-10.503440856933594],["▁link",-10.504199981689451],["କ",-10.504220962524414],["▁sistema",-10.504342079162598],["▁which",-10.50479507446289],["कर",-10.505008697509766],["ש",-10.50506591796875],["ves",-10.50530242919922],["▁kat",-10.505346298217772],["eve",-10.505517959594728],["tte",-10.505537986755373],["路",-10.505640983581545],["bit",-10.505728721618652],["▁وا",-10.505772590637209],["rà",-10.505805015563965],["॥",-10.505807876586914],["lim",-10.50583553314209],[");",-10.506364822387695],["▁🙂",-10.506540298461914],["▁anos",-10.507036209106444],["▁Ad",-10.508672714233398],["▁три",-10.508866310119627],["▁кои",-10.509098052978516],["还",-10.50922393798828],["ח",-10.509325981140137],["▁может",-10.51007843017578],["▁сам",-10.51070499420166],["▁yeni",-10.511641502380373],["க்கு",-10.511667251586914],["บ",-10.51201343536377],["dən",-10.512091636657717],["게",-10.512680053710938],["▁Ab",-10.512757301330566],["፤",-10.513229370117188],["У",-10.513245582580566],["යක්",-10.513684272766112],["nika",-10.513893127441406],["▁있다",-10.514934539794922],["▁ele",-10.51504135131836],["▁All",-10.515074729919434],["nou",-10.51527976989746],["pas",-10.516013145446776],["Т",-10.516030311584473],["ura",-10.516220092773438],["同",-10.516242027282717],["▁موضوع",-10.516246795654297],["▁pela",-10.516318321228027],["至",-10.517054557800291],["▁form",-10.51724338531494],["▁ڪري",-10.517338752746582],["ções",-10.517722129821776],["▁veel",-10.518092155456545],["▁anak",-10.51811981201172],["▁chính",-10.518502235412598],["▁ná",-10.51877212524414],["pan",-10.519183158874512],["qui",-10.519267082214355],["内",-10.519302368164062],["▁Og",-10.519450187683104],["만",-10.51952838897705],["ż",-10.519769668579102],["▁ეს",-10.520298957824709],["nud",-10.520511627197266],["▁heeft",-10.520575523376465],["▁grande",-10.521044731140137],["if",-10.521430015563965],["ін",-10.521514892578123],["▁hindi",-10.521595001220703],["▁fait",-10.52186393737793],["ਲ",-10.5220365524292],["二",-10.522644996643066],["erne",-10.522711753845217],["▁gibi",-10.52291488647461],["rt",-10.523581504821776],["ε",-10.52397346496582],["▁etter",-10.524140357971191],["▁ти",-10.524431228637695],["▁الت",-10.524463653564451],["ա",-10.524551391601562],["▁м",-10.524551391601562],["▁nuo",-10.52469253540039],["wan",-10.525075912475586],["▁юм",-10.52509307861328],["në",-10.525158882141112],["▁mund",-10.525519371032717],["▁کرنے",-10.5259370803833],["所以",-10.52594757080078],["▁إ",-10.526481628417969],["▁kell",-10.526490211486816],["iu",-10.526814460754396],["ás",-10.526918411254885],["▁уже",-10.5272216796875],["▁8.",-10.528030395507812],["kk",-10.528328895568848],["▁ஒரு",-10.528525352478027],["hen",-10.528558731079102],["世界",-10.528743743896484],["아",-10.52936840057373],["τα",-10.529584884643556],["▁Este",-10.52967929840088],["ಕ್ಕೆ",-10.52991771697998],["ру",-10.52998161315918],["ే",-10.530012130737305],["ыг",-10.53034496307373],["▁when",-10.53049373626709],["その",-10.530744552612305],["ví",-10.531098365783691],["sch",-10.531314849853516],["▁sangat",-10.531408309936523],["▁الذي",-10.5316801071167],["نگ",-10.531815528869627],["ድ",-10.532782554626465],["፡፡",-10.53300952911377],["tha",-10.533196449279783],["ئ",-10.533735275268556],["ec",-10.533754348754885],["ग",-10.533771514892578],["റ",-10.53383445739746],["▁či",-10.53456974029541],["▁ça",-10.534764289855955],["▁chat",-10.535185813903809],["့",-10.535655975341797],["ح",-10.535843849182127],["बाट",-10.535893440246582],["▁make",-10.53598976135254],["ി",-10.53599739074707],["▁tas",-10.536897659301758],["▁150",-10.537199020385742],["▁Bar",-10.537405967712402],["േ",-10.5375337600708],["დ",-10.537830352783203],["▁Ik",-10.53823184967041],["lega",-10.538661003112791],["▁след",-10.53898811340332],["2017",-10.539077758789062],["▁habe",-10.539746284484863],["▁oor",-10.540016174316406],["▁masa",-10.540053367614746],["tet",-10.540800094604492],["▁आज",-10.541069984436035],["ย",-10.541543960571287],["inin",-10.542055130004885],["br",-10.542113304138184],["▁mobil",-10.542285919189451],["▁uno",-10.54232120513916],["作",-10.542333602905272],["▁skulle",-10.542972564697266],["量",-10.543007850646973],["▁є",-10.543742179870604],["▁ಮತ್ತು",-10.54380989074707],["台",-10.543880462646484],["성",-10.54422664642334],["ब",-10.544526100158691],["▁går",-10.544873237609863],["▁માટે",-10.544873237609863],["ство",-10.545217514038086],["ги",-10.54534912109375],["ित",-10.54590129852295],["▁ont",-10.545998573303224],["car",-10.54606819152832],["zy",-10.546131134033203],["แล้ว",-10.546197891235352],["▁desde",-10.54627513885498],["▁Com",-10.546387672424316],["త",-10.546436309814451],["ía",-10.546480178833008],["▁Ji",-10.54658031463623],["▁있습니다",-10.546741485595703],["▁This",-10.546879768371582],["个",-10.54692840576172],["▁2006",-10.547160148620604],["nica",-10.547222137451172],["▁sme",-10.54733180999756],["ар",-10.547737121582031],["▁model",-10.5479736328125],["拉",-10.548376083374023],["▁kadar",-10.548441886901855],["正",-10.548644065856934],["但是",-10.548784255981444],["▁koje",-10.549439430236816],["▁ing",-10.550089836120604],["sam",-10.550225257873535],["▁mundo",-10.550227165222168],["▁siis",-10.550235748291016],["▁́",-10.550413131713867],["では",-10.551331520080566],["▁By",-10.552094459533691],["จะ",-10.552456855773926],["▁काम",-10.55247402191162],["▁Ju",-10.552715301513672],["ри",-10.552727699279783],["▁Wa",-10.552825927734377],["ове",-10.552916526794434],["▁будет",-10.553267478942873],["▁tha",-10.55436897277832],["▁YA",-10.554576873779297],["ць",-10.554705619812012],["如",-10.555220603942873],["▁Var",-10.555230140686035],["物",-10.55605697631836],["▁jaar",-10.55632209777832],["uz",-10.556424140930176],["进行",-10.556740760803224],["▁hor",-10.556992530822754],["▁súa",-10.55721664428711],["▁quod",-10.557241439819336],["nega",-10.557400703430176],["▁res",-10.55780029296875],["▁ће",-10.55859661102295],["her",-10.558793067932127],["ර",-10.5589017868042],["▁Wir",-10.558913230895996],["ay",-10.558917045593262],["ക്ക",-10.558996200561523],["▁එක",-10.5591402053833],["▁email",-10.559274673461914],["خ",-10.55936336517334],["▁Jan",-10.559374809265137],["▁στα",-10.559616088867188],["发展",-10.560020446777344],["▁ការ",-10.560182571411133],["य",-10.56020736694336],["▁Or",-10.56029224395752],["ታ",-10.560484886169434],["▁ಅ",-10.56086540222168],["▁Lu",-10.561297416687012],["▁روز",-10.56153964996338],["都是",-10.561568260192873],["▁μια",-10.562411308288574],["ๆ",-10.562474250793455],["ex",-10.562864303588867],["ต",-10.562874794006348],["▁personal",-10.563122749328612],["▁LA",-10.563298225402832],["ල්",-10.563507080078123],["चे",-10.563859939575195],["▁ovat",-10.56402587890625],["▁Sam",-10.564115524291992],["▁الأ",-10.564518928527832],["യുടെ",-10.564559936523438],["▁col",-10.564800262451172],["rak",-10.56480884552002],["服务",-10.5651273727417],["än",-10.565133094787598],["lí",-10.565503120422363],["▁Im",-10.565521240234377],["yn",-10.565646171569824],["教育",-10.565655708312988],["▁ס",-10.565958976745604],["▁Та",-10.566132545471191],["▁នៅ",-10.566603660583496],["ረ",-10.566801071166992],["▁એક",-10.567191123962402],["ian",-10.567450523376465],["ไม่",-10.56796646118164],["ท",-10.568387031555176],["店",-10.56849193572998],["主",-10.56858730316162],["▁sistem",-10.568596839904783],["▁qe",-10.568646430969238],["ス",-10.56867790222168],["ті",-10.56870937347412],["ies",-10.568906784057615],["▁över",-10.569366455078123],["તા",-10.56937026977539],["dia",-10.569451332092283],["▁durante",-10.569783210754396],["▁قرار",-10.569905281066896],["▁cette",-10.57004737854004],["상",-10.57007884979248],["▁people",-10.570144653320312],["▁म",-10.570181846618652],["ত",-10.570423126220703],["▁ann",-10.570634841918944],["ły",-10.570659637451172],["jos",-10.5707426071167],["▁ses",-10.570916175842283],["▁dé",-10.57116413116455],["jar",-10.571813583374023],["▁ev",-10.571887016296388],["æ",-10.571956634521484],["▁quando",-10.572142601013184],["ев",-10.57221508026123],["ure",-10.572360038757324],["▁అ",-10.572509765625],["ить",-10.572928428649902],["лар",-10.573081970214844],["▁pla",-10.57325267791748],["▁01",-10.573391914367676],["▁pasi",-10.57357692718506],["ಗಳ",-10.573847770690918],["▁če",-10.573994636535645],["▁შე",-10.574376106262209],["▁त",-10.574493408203123],["▁Ben",-10.574737548828123],["ία",-10.574782371520996],["క్",-10.57501983642578],["bor",-10.575140953063965],["▁Kar",-10.575204849243164],["▁bal",-10.575311660766602],["▁því",-10.57565975189209],["▁وت",-10.575865745544434],["▁você",-10.575922012329102],["▁tro",-10.57623291015625],["ре",-10.576340675354004],["▁God",-10.576351165771484],["θ",-10.57659912109375],["gy",-10.57680892944336],["▁рас",-10.57712459564209],["▁må",-10.577275276184082],["െ",-10.577590942382812],["如果",-10.577637672424316],["▁semua",-10.578158378601074],["▁ned",-10.578304290771484],["並",-10.578398704528809],["라",-10.578653335571287],["▁kata",-10.578818321228027],["ip",-10.579090118408203],["cer",-10.579151153564451],["▁want",-10.579343795776367],["▁person",-10.579697608947754],["▁የተ",-10.579999923706056],["▁id",-10.580293655395508],["ando",-10.580307960510254],["ü",-10.581048011779783],["▁niin",-10.581121444702148],["時間",-10.58145523071289],["老",-10.581537246704102],["▁Va",-10.581751823425291],["pro",-10.58215618133545],["ым",-10.582183837890623],["▁اگر",-10.582293510437012],["▁вас",-10.58239459991455],["স",-10.582533836364746],["via",-10.582634925842283],["ವ",-10.582708358764648],["ಸ",-10.582718849182127],["▁પર",-10.582901000976562],["ർ",-10.583044052124023],["area",-10.583158493041992],["ੇ",-10.583566665649414],["nice",-10.584016799926758],["har",-10.5840425491333],["ਕ",-10.58493709564209],["走",-10.585149765014648],["▁й",-10.585222244262695],["▁durch",-10.585244178771973],["▁bak",-10.585254669189451],["ket",-10.58580493927002],["▁sto",-10.585855484008787],["ија",-10.585932731628418],["▁moet",-10.58606243133545],["▁kontakt",-10.586248397827148],["sia",-10.58627700805664],["您",-10.586313247680664],["▁lu",-10.586337089538574],["▁۔",-10.586923599243164],["თა",-10.586994171142578],["ną",-10.587648391723633],["▁الا",-10.587677001953123],["▁وقت",-10.588051795959473],["▁hon",-10.588253021240234],["▁pelo",-10.588274002075195],["▁bol",-10.588335037231444],["ši",-10.588367462158203],["加",-10.588505744934082],["▁blev",-10.5885591506958],["tä",-10.588871002197266],["▁And",-10.588947296142578],["ити",-10.589126586914062],["তে",-10.589213371276855],["▁כ",-10.589378356933594],["나",-10.590023040771484],["▁ہیں۔",-10.590344429016112],["▁Вы",-10.590747833251951],["ού",-10.590987205505373],["▁than",-10.591230392456056],["▁val",-10.591564178466797],["ست",-10.591570854187012],["▁गर्न",-10.591585159301758],["▁быть",-10.591651916503906],["tud",-10.59178352355957],["લ",-10.591785430908203],["德",-10.591992378234863],["set",-10.59201717376709],["▁baru",-10.592430114746094],["▁तथा",-10.592578887939451],["▁Па",-10.592853546142578],["▁mana",-10.593135833740234],["▁gre",-10.593607902526855],["白",-10.593918800354004],["же",-10.595430374145508],["वि",-10.595863342285156],["▁سي",-10.596038818359377],["▁Iz",-10.59607982635498],["తో",-10.596153259277344],["cio",-10.596689224243164],["ob",-10.59674072265625],["▁נ",-10.597192764282228],["口",-10.597216606140137],["▁new",-10.597224235534668],["ста",-10.597542762756348],["taan",-10.597554206848145],["▁ak",-10.59757137298584],["▁gia",-10.597639083862305],["▁bio",-10.59774112701416],["▁còn",-10.59810733795166],["има",-10.59917163848877],["▁esse",-10.599272727966309],["ການ",-10.599359512329102],["▁hand",-10.599535942077637],["ਾ",-10.599907875061035],["▁ré",-10.600106239318848],["ρ",-10.60032844543457],["ပါ",-10.600371360778809],["▁cap",-10.600725173950195],["▁jos",-10.601194381713867],["▁were",-10.601404190063477],["ojn",-10.60158920288086],["лық",-10.601649284362791],["▁Ob",-10.601983070373535],["▁zi",-10.602131843566896],["▁მე",-10.602461814880373],["یم",-10.602676391601562],["▁есть",-10.602679252624512],["uta",-10.602703094482422],["▁land",-10.602826118469238],["▁biti",-10.60334014892578],["▁essere",-10.60334587097168],["▁वि",-10.603554725646973],["▁100%",-10.603645324707031],["ونه",-10.60376262664795],["▁nije",-10.604230880737305],["ि",-10.604382514953612],["100",-10.604726791381836],["▁noi",-10.604894638061523],["▁Pu",-10.605048179626465],["▁wil",-10.60530948638916],["▁години",-10.605372428894045],["▁end",-10.605533599853516],["▁secara",-10.605660438537598],["▁Á",-10.605838775634766],["së",-10.6062593460083],["▁dr",-10.606578826904297],["▁9.",-10.606626510620115],["ં",-10.606635093688965],["bal",-10.606804847717283],["▁hulle",-10.607422828674316],["▁але",-10.6074857711792],["▁Mit",-10.607648849487305],["원",-10.607780456542969],["▁2000",-10.607921600341797],["▁бо",-10.608014106750488],["▁nella",-10.608256340026855],["▁എന്ന",-10.608285903930664],["▁Н",-10.608781814575195],["och",-10.60890007019043],["ու",-10.609047889709473],["▁તો",-10.609601020812988],["fo",-10.609647750854492],["owych",-10.609648704528809],["安",-10.609816551208496],["▁hur",-10.61020851135254],["從",-10.610221862792969],["日本",-10.61032772064209],["්",-10.610386848449709],["▁muy",-10.6104097366333],["▁pm",-10.610958099365234],["վել",-10.611156463623049],["▁momento",-10.611482620239258],["tura",-10.611978530883787],["▁Mai",-10.612942695617676],["ela",-10.613298416137695],["nti",-10.613425254821776],["▁eru",-10.613969802856444],["▁gün",-10.61428165435791],["▁ipsum",-10.614540100097656],["ime",-10.614575386047363],["▁nou",-10.61474895477295],["სა",-10.614821434020996],["▁Malaysia",-10.614869117736816],["ول",-10.61489200592041],["եր",-10.614911079406738],["▁Š",-10.615174293518066],["nut",-10.615262031555176],["▁Twitter",-10.615336418151855],["end",-10.616128921508787],["ല",-10.616140365600586],["wo",-10.616158485412598],["부",-10.616432189941406],["感",-10.616707801818848],["▁anh",-10.61679744720459],["ados",-10.61680507659912],["vet",-10.617271423339844],["この",-10.61765193939209],["▁ora",-10.617825508117676],["▁Sex",-10.617989540100098],["▁total",-10.618694305419922],["▁sehr",-10.618732452392578],["区",-10.619330406188965],["開",-10.61936378479004],["▁전",-10.619794845581056],["▁tom",-10.619990348815918],["▁muito",-10.620343208312988],["sie",-10.62037181854248],["one",-10.62038230895996],["चा",-10.620397567749023],["ilor",-10.620572090148926],["▁mới",-10.620595932006836],["▁गर्ने",-10.620752334594728],["лу",-10.62116813659668],["▁čo",-10.621233940124512],["ൻ",-10.621627807617188],["▁normal",-10.62208652496338],["በ",-10.622323036193848],["▁2019",-10.622454643249512],["ดี",-10.622736930847168],["▁how",-10.623783111572266],["▁jam",-10.62405014038086],["▁ທີ່",-10.62441635131836],["ії",-10.624605178833008],["▁mest",-10.62480354309082],["▁base",-10.625165939331056],["▁nå",-10.62529468536377],["的人",-10.625495910644531],["▁$",-10.62550449371338],["▁себе",-10.625537872314451],["▁izan",-10.625761032104492],["▁हे",-10.62584114074707],["نے",-10.626527786254885],["▁کنید",-10.626678466796877],["ული",-10.627105712890623],["▁ere",-10.627148628234863],["▁Mal",-10.62737274169922],["트",-10.6273832321167],["ія",-10.627421379089355],["▁projekt",-10.628222465515137],["▁آپ",-10.628350257873535],["▁ಆ",-10.629545211791992],["▁ok",-10.630154609680176],["화",-10.630173683166504],["ros",-10.630335807800291],["det",-10.630422592163086],["lag",-10.630544662475586],["▁към",-10.630775451660156],["ско",-10.631144523620604],["▁gar",-10.631260871887209],["ē",-10.632182121276855],["ची",-10.632619857788086],["ted",-10.6329927444458],["til",-10.633220672607422],["etan",-10.633434295654297],["ovi",-10.633488655090332],["unk",-10.633880615234377],["ris",-10.63415813446045],["ão",-10.634206771850586],["▁Ш",-10.634687423706056],["નું",-10.63485336303711],["eren",-10.635190963745115],["▁မ",-10.635252952575684],["▁پی",-10.635354042053224],["ு",-10.635494232177734],["▁place",-10.636025428771973],["▁here",-10.636102676391602],["ట్",-10.636301040649414],["▁प्र",-10.63647747039795],["▁「",-10.636645317077637],["▁Kon",-10.636712074279783],["▁два",-10.637077331542969],["原",-10.637226104736328],["▁vill",-10.637535095214844],["ča",-10.637633323669434],["▁بار",-10.638322830200195],["▁Fi",-10.63893985748291],["▁время",-10.639025688171388],["▁dann",-10.63955783843994],["rar",-10.639910697937012],["▁még",-10.63991641998291],["lé",-10.640610694885254],["ගේ",-10.640853881835938],["▁ہی",-10.641064643859863],["▁پیش",-10.641136169433594],["▁п",-10.641190528869627],["過",-10.641229629516602],["▁просто",-10.641253471374512],["▁które",-10.641595840454102],["▁nó",-10.641717910766602],["ор",-10.6419095993042],["▁hơn",-10.642118453979492],["▁know",-10.642237663269045],["车",-10.64293098449707],["mes",-10.64376449584961],["▁neu",-10.643961906433104],["ি",-10.64426040649414],["lam",-10.644399642944336],["ità",-10.644448280334473],["▁shumë",-10.64450454711914],["▁pol",-10.644521713256836],["lv",-10.64453125],["▁એ",-10.64458179473877],["▁san",-10.645505905151367],["▁Islam",-10.645633697509766],["▁savo",-10.645888328552246],["▁godt",-10.645920753479004],["ře",-10.646245956420898],["品",-10.646556854248049],["▁Los",-10.646595001220703],["▁право",-10.647127151489258],["▁porno",-10.64724349975586],["ови",-10.647418975830078],["жа",-10.647510528564451],["ց",-10.64767074584961],["ог",-10.64849090576172],["▁после",-10.64849090576172],["▁comme",-10.649070739746094],["лік",-10.649341583251951],["▁ή",-10.649431228637695],["23",-10.649659156799316],["▁Из",-10.6497220993042],["国家",-10.650022506713867],["呢",-10.650110244750977],["▁ٿي",-10.650121688842772],["▁EU",-10.650175094604492],["ová",-10.650521278381348],["▁mare",-10.65096378326416],["▁seo",-10.651150703430176],["▁Ch",-10.651288986206056],["機",-10.6513032913208],["▁Ал",-10.65134334564209],["▁απο",-10.651601791381836],["▁خان",-10.651762008666992],["▁ако",-10.651997566223145],["▁porque",-10.652055740356444],["▁قبل",-10.652315139770508],["无",-10.652410507202148],["vit",-10.652471542358398],["적",-10.652782440185549],["▁schon",-10.652972221374512],["▁Việt",-10.652974128723145],["▁ඇති",-10.653183937072754],["▁साथ",-10.65339183807373],["tul",-10.653483390808104],["shi",-10.653794288635254],["27",-10.654029846191406],["ல்",-10.65423583984375],["த்த",-10.654547691345217],["tti",-10.654930114746094],["ır",-10.654947280883787],["▁Windows",-10.655129432678224],["гүй",-10.655394554138184],["▁Non",-10.655452728271484],["전",-10.655546188354492],["▁հետ",-10.655560493469238],["▁wordt",-10.655613899230955],["定",-10.655633926391602],["▁גם",-10.655728340148926],["тан",-10.656540870666504],["ս",-10.656609535217283],["▁ਲਈ",-10.656628608703612],["vol",-10.6570405960083],["월",-10.65719985961914],["▁Ze",-10.657838821411133],["▁nye",-10.657841682434082],["▁polo",-10.657907485961914],["▁other",-10.658279418945312],["▁theo",-10.658681869506836],["யில்",-10.658713340759276],["▁modo",-10.65878963470459],["larda",-10.65962028503418],["get",-10.65964412689209],["ную",-10.65981101989746],["mon",-10.65986442565918],["▁â",-10.659882545471191],["xa",-10.660585403442385],["▁Indonesia",-10.660588264465332],["它",-10.660810470581056],["ती",-10.66093635559082],["യില്",-10.66094207763672],["位",-10.660980224609377],["▁lai",-10.66098403930664],["▁Pas",-10.661686897277832],["вер",-10.661845207214355],["▁immer",-10.661856651306152],["▁ਵਿੱਚ",-10.662160873413086],["▁nên",-10.66225242614746],["▁Så",-10.662493705749512],["▁sinh",-10.662970542907717],["idad",-10.663033485412598],["▁था",-10.663131713867188],["far",-10.66358470916748],["dom",-10.663667678833008],["▁giá",-10.66392993927002],["▁både",-10.664241790771484],["av",-10.664386749267578],["ουν",-10.665074348449709],["▁fe",-10.665154457092283],["▁той",-10.665172576904297],["▁hợp",-10.665218353271484],["П",-10.665396690368652],["點",-10.665595054626465],["ciones",-10.66594123840332],["tori",-10.666013717651367],["dis",-10.666020393371582],["ນ",-10.666407585144045],["▁waar",-10.666424751281738],["▁baik",-10.666658401489258],["▁cả",-10.668108940124512],["▁نا",-10.668128967285156],["ብ",-10.668155670166016],["ius",-10.668245315551758],["▁public",-10.669015884399414],["ије",-10.669050216674805],["▁которые",-10.669378280639648],["ott",-10.669930458068848],["▁thực",-10.670270919799805],["▁kam",-10.670729637145996],["ይ",-10.671215057373049],["സ",-10.671308517456056],["▁08",-10.671396255493164],["ал",-10.671719551086426],["▁године",-10.672141075134276],["长",-10.672165870666504],["▁Film",-10.672225952148438],["▁бир",-10.672245979309082],["ह",-10.672579765319824],["局",-10.672735214233398],["ራ",-10.673002243041992],["▁baş",-10.673175811767578],["▁even",-10.673285484313965],["▁são",-10.67379379272461],["lə",-10.67381477355957],["可能",-10.674077033996582],["▁autor",-10.67422580718994],["▁От",-10.674324035644531],["▁zaman",-10.674503326416016],["▁tuo",-10.674612998962402],["ਸ",-10.674620628356934],["▁vin",-10.67515468597412],["▁ch",-10.675163269042969],["▁pos",-10.675272941589355],["▁despre",-10.675370216369627],["▁mesmo",-10.675469398498535],["▁Α",-10.675508499145508],["▁På",-10.675554275512695],["▁mult",-10.675745010375977],["▁elit",-10.676128387451172],["▁need",-10.676177978515623],["▁ද",-10.676511764526367],["ил",-10.676675796508787],["▁მ",-10.676680564880373],["ป",-10.67682933807373],["▁merupakan",-10.676861763000488],["▁ana",-10.677079200744627],["头",-10.677634239196776],["▁día",-10.677809715270996],["▁කර",-10.678267478942873],["ෙන්",-10.678468704223633],["28",-10.678505897521973],["വും",-10.678793907165527],["▁put",-10.678946495056152],["▁افغانستان",-10.679054260253906],["▁sana",-10.679088592529297],["王",-10.679247856140137],["Х",-10.679941177368164],["या",-10.680283546447754],["!\"",-10.680371284484863],["西",-10.680465698242188],["ില്",-10.681086540222168],["美国",-10.68121337890625],["▁09",-10.681649208068848],["万",-10.681739807128906],["heid",-10.6820068359375],["自己的",-10.68210220336914],["跟",-10.6821928024292],["▁hebben",-10.682391166687012],["▁trans",-10.682683944702148],["ovať",-10.682762145996094],["vá",-10.682783126831056],["▁lý",-10.68321704864502],["▁vý",-10.683711051940918],["▁Hy",-10.68385410308838],["тэй",-10.683960914611816],["ак",-10.685211181640623],["利",-10.685332298278809],["通",-10.685455322265623],["▁аб",-10.685542106628418],["ido",-10.685636520385742],["▁33",-10.685638427734377],["▁све",-10.685699462890623],["ող",-10.685836791992188],["ham",-10.685907363891602],["▁siden",-10.68593978881836],["▁way",-10.685949325561523],["ด้วย",-10.685986518859863],["▁programa",-10.68615436553955],["▁этом",-10.686177253723145],["nju",-10.68621063232422],["ano",-10.686440467834473],["gas",-10.68666172027588],["明",-10.686731338500977],["▁меня",-10.687177658081056],["▁там",-10.687231063842772],["▁दिन",-10.687360763549805],["▁ме",-10.687722206115724],["ori",-10.68796730041504],["miz",-10.687979698181152],["cm",-10.688051223754885],["▁onder",-10.688095092773438],["让",-10.688243865966797],["▁into",-10.6882905960083],["tat",-10.688529014587402],["▁¿",-10.68873405456543],["வ",-10.688923835754396],["adi",-10.689024925231934],["▁Euro",-10.689359664916992],["▁joka",-10.689607620239258],["▁nào",-10.689651489257812],["林",-10.69016933441162],["ಮ",-10.690686225891112],["▁לי",-10.691208839416504],["lara",-10.691217422485352],["▁भारत",-10.691335678100586],["▁ת",-10.691400527954102],["▁bon",-10.691669464111328],["%",-10.691938400268556],["▁där",-10.69198226928711],["讓",-10.692066192626951],["▁Android",-10.69242000579834],["ಲ್",-10.692435264587402],["发",-10.692577362060549],["▁پ",-10.69263744354248],["nce",-10.692818641662598],["The",-10.693048477172852],["php",-10.693178176879885],["▁cách",-10.69329833984375],["art",-10.69340705871582],["▁lite",-10.693577766418455],["rah",-10.69363498687744],["▁kunne",-10.693655967712402],["gr",-10.693774223327637],["▁finns",-10.693801879882812],["ରୁ",-10.693828582763672],["▁rất",-10.6939115524292],["▁janë",-10.693912506103516],["▁باید",-10.693942070007324],["▁Kor",-10.694382667541504],["▁(4)",-10.694635391235352],["▁harus",-10.695568084716797],["ár",-10.695634841918944],["▁Europa",-10.695778846740724],["▁https",-10.695857048034668],["မ်ား",-10.696046829223633],["▁kir",-10.696244239807127],["▁În",-10.696316719055176],["بر",-10.696345329284668],["▁Car",-10.69641399383545],["▁ре",-10.696414947509766],["یں",-10.69667625427246],["でも",-10.697145462036133],["iva",-10.69719409942627],["合",-10.697409629821776],["lige",-10.697608947753906],["克",-10.697662353515623],["ф",-10.697887420654297],["▁sản",-10.698076248168944],["已经",-10.698485374450684],["రి",-10.69858169555664],["▁هي",-10.698843002319336],["▁unter",-10.698944091796877],["▁Ut",-10.698999404907228],["Н",-10.699047088623049],["tri",-10.699347496032717],["ая",-10.699945449829102],["ලා",-10.70008373260498],["소",-10.700246810913086],["▁local",-10.70033073425293],["▁poate",-10.700475692749023],["▁Web",-10.700587272644045],["▁كان",-10.700756072998049],["▁vie",-10.701106071472168],["▁ਨਾਲ",-10.70132064819336],["▁chúng",-10.701632499694824],["mit",-10.701736450195312],["τ",-10.701990127563477],["▁çox",-10.702013969421388],["री",-10.702025413513184],["吃",-10.702948570251465],["▁مي",-10.702977180480955],["▁saja",-10.703025817871094],["▁godine",-10.703191757202148],["22",-10.703211784362791],["政府",-10.703621864318848],["endo",-10.703669548034668],["ый",-10.704081535339355],["лер",-10.704093933105469],["見",-10.704115867614746],["ong",-10.704148292541504],["▁biết",-10.704350471496582],["ನ್",-10.704504013061523],["▁dira",-10.704537391662598],["چ",-10.70504379272461],["З",-10.705111503601074],["▁app",-10.705123901367188],["dd",-10.70519733428955],["▁किया",-10.705391883850098],["▁ante",-10.705648422241213],["▁nebo",-10.705880165100098],["use",-10.705951690673828],["진",-10.706058502197266],["▁start",-10.706148147583008],["各",-10.706247329711914],["ero",-10.70627784729004],["श",-10.706425666809082],["ება",-10.70643138885498],["▁36",-10.706814765930176],["▁بين",-10.706880569458008],["ร",-10.707244873046877],["▁мне",-10.70763874053955],["ino",-10.708052635192873],["maan",-10.708166122436523],["ому",-10.708208084106444],["26",-10.708346366882324],["ሳ",-10.709010124206545],["ної",-10.70932388305664],["▁him",-10.709423065185549],["▁करने",-10.709444046020508],["▁αν",-10.709476470947266],["▁andere",-10.709565162658691],["▁mich",-10.709653854370115],["▁دارد",-10.709959030151367],["▁така",-10.710021018981934],["▁Со",-10.71018886566162],["▁acest",-10.710453033447266],["学",-10.710467338562012],["ያ",-10.710891723632812],["▁суд",-10.711015701293944],["▁Pra",-10.711151123046877],["▁gì",-10.71119785308838],["bel",-10.711240768432615],["غا",-10.711807250976562],["▁සහ",-10.711891174316406],["ք",-10.712056159973145],["▁pris",-10.712327003479004],["▁10.",-10.712358474731444],["▁điều",-10.712516784667969],["▁pola",-10.712570190429688],["▁tego",-10.712719917297363],["қа",-10.712748527526855],["▁atas",-10.71279525756836],["▁כי",-10.71292209625244],["▁ໃນ",-10.712994575500488],["▁կ",-10.713241577148438],["▁wieder",-10.71360206604004],["▁2005",-10.71376895904541],["vas",-10.714043617248535],["▁गरेको",-10.714064598083496],["ություն",-10.714484214782717],["▁400",-10.71467399597168],["▁pr",-10.714747428894045],["ix",-10.714842796325684],["▁ක්",-10.715021133422852],["lis",-10.715089797973633],["드",-10.715336799621582],["በት",-10.7153959274292],["ਮ",-10.715409278869627],["▁mint",-10.715805053710938],["ী",-10.71585464477539],["▁free",-10.71592903137207],["ët",-10.716062545776367],["σ",-10.71637725830078],["ത്തില്",-10.716434478759766],["nú",-10.7167329788208],["hr",-10.71687126159668],["▁yer",-10.717103004455566],["▁très",-10.717573165893556],["sar",-10.71796989440918],["▁frá",-10.718191146850586],["▁ده",-10.718219757080078],["▁jej",-10.718467712402344],["▁tanto",-10.718572616577148],["▁problema",-10.718631744384766],["වා",-10.718994140625],["ຂອງ",-10.719375610351562],["є",-10.719378471374512],["▁temps",-10.719576835632324],["▁só",-10.719855308532717],["ül",-10.719900131225586],["▁יש",-10.720137596130373],["ע",-10.720516204833984],["中心",-10.72059440612793],["▁nr",-10.720643997192385],["اس",-10.721052169799805],["ación",-10.72118854522705],["期",-10.721357345581056],["anta",-10.72138500213623],["ката",-10.721450805664062],["四",-10.721583366394045],["▁mid",-10.721607208251951],["ās",-10.7216796875],["▁nun",-10.721919059753418],["ಗಳನ್ನು",-10.72204875946045],["ved",-10.72205924987793],["▁good",-10.72280216217041],["▁έχει",-10.723183631896973],["▁Apple",-10.7232027053833],["している",-10.723410606384276],["2016",-10.723671913146973],["ган",-10.723712921142578],["ክ",-10.72380256652832],["bre",-10.724360466003418],["из",-10.724388122558594],["▁cookies",-10.724479675292969],["▁nya",-10.724528312683104],["頭",-10.724691390991213],["▁يا",-10.724736213684082],["▁barn",-10.725543022155762],["▁بي",-10.726119995117188],["▁تم",-10.726285934448242],["▁Ur",-10.726387977600098],["ବ",-10.726649284362791],["▁سی",-10.726777076721191],["▁Met",-10.72678565979004],["▁saj",-10.726792335510254],["▁daar",-10.727370262145996],["▁ये",-10.727376937866213],["太",-10.727684020996094],["▁דער",-10.727967262268066],["▁2018.",-10.728270530700684],["In",-10.72837734222412],["▁görə",-10.728431701660156],["▁ger",-10.72848892211914],["동",-10.728917121887209],["▁cele",-10.729155540466309],["nici",-10.729459762573242],["ног",-10.72956085205078],["orum",-10.729628562927246],["▁ਤੇ",-10.730502128601074],["ങ്ങള്",-10.730735778808594],["▁got",-10.731325149536133],["kt",-10.73136043548584],["▁website",-10.731548309326172],["isti",-10.731569290161133],["ும்",-10.7316312789917],["▁estis",-10.731664657592772],["近",-10.731950759887695],["ée",-10.732465744018556],["▁Can",-10.732691764831545],["liga",-10.73279857635498],["▁extra",-10.732888221740724],["▁alles",-10.7330322265625],["ዎች",-10.73315715789795],["ว",-10.733231544494627],["pun",-10.733648300170898],["▁vos",-10.733909606933594],["ით",-10.733959197998049],["▁wurde",-10.734667778015137],["▁kor",-10.735149383544922],["ら",-10.735257148742676],["神",-10.735264778137209],["ер",-10.735424995422363],["开",-10.735471725463867],["ώ",-10.736194610595703],["Е",-10.736416816711426],["ges",-10.73671531677246],["▁Copyright",-10.73701286315918],["▁weer",-10.73702621459961],["▁vas",-10.737570762634276],["▁यो",-10.738299369812012],["▁ර",-10.738570213317873],["▁thời",-10.738677978515623],["ರ್",-10.738680839538574],["ede",-10.738924980163574],["▁nag",-10.739065170288086],["ম",-10.739468574523926],["▁لیے",-10.739959716796877],["hal",-10.740022659301758],["ής",-10.740164756774902],["▁wedi",-10.740348815917969],["▁pel",-10.740400314331056],["ett",-10.740470886230469],["war",-10.740628242492676],["▁kaip",-10.740917205810549],["eri",-10.741018295288086],["▁вам",-10.741132736206056],["ering",-10.741786003112791],["▁--",-10.741857528686523],["▁Min",-10.741870880126951],["ಾ",-10.741888999938965],["▁vet",-10.74194049835205],["ตัว",-10.74199390411377],["▁kal",-10.74221897125244],["rd",-10.742291450500488],["ák",-10.74246311187744],["▁لاء",-10.742656707763672],["वर",-10.742989540100098],["區",-10.743034362792969],["▁Fe",-10.743206977844238],["▁когда",-10.743643760681152],["bus",-10.743752479553224],["▁gra",-10.7437744140625],["▁thông",-10.7438325881958],["▁ر",-10.744394302368164],["jer",-10.744752883911133],["▁Una",-10.744879722595217],["▁То",-10.74542236328125],["య",-10.74557113647461],["▁י",-10.745874404907228],["內",-10.745879173278809],["Se",-10.746171951293944],["owy",-10.746330261230469],["29",-10.746346473693848],["ማ",-10.74646282196045],["▁samt",-10.746781349182127],["▁bahwa",-10.74690055847168],["▁यह",-10.747032165527344],["ef",-10.74713134765625],["eta",-10.747294425964355],["larını",-10.747461318969728],["▁ktoré",-10.747920036315918],["osa",-10.748058319091797],["▁kuin",-10.748351097106934],["▁Sed",-10.748370170593262],["▁importante",-10.74862575531006],["▁rad",-10.74869155883789],["jet",-10.74899673461914],["વ",-10.749313354492188],["▁пад",-10.749343872070312],["▁Tur",-10.74946117401123],["면",-10.749700546264648],["▁tá",-10.74984645843506],["▁nhưng",-10.750580787658691],["ию",-10.751007080078123],["▁gibt",-10.751168251037598],["问题",-10.751283645629885],["το",-10.751420974731444],["ความ",-10.7515230178833],[":“",-10.75248908996582],["▁sun",-10.75261116027832],["▁If",-10.752625465393066],["也是",-10.752782821655272],["▁Super",-10.752845764160156],["inu",-10.752889633178713],["▁Pen",-10.752891540527344],["▁kama",-10.753039360046388],["▁ג",-10.75307273864746],["▁aki",-10.753399848937988],["▁fast",-10.753867149353027],["▁nec",-10.75400447845459],["ré",-10.754405975341797],["비",-10.75441551208496],["▁دې",-10.75446319580078],["ాలు",-10.754563331604004],["▁آهن",-10.75460147857666],["文化",-10.75461769104004],["တာ",-10.75467586517334],["当",-10.754714012145996],["ком",-10.755051612854004],["ე",-10.75543975830078],["500",-10.75562858581543],["ibus",-10.755718231201172],["гі",-10.755732536315918],["дер",-10.755988121032717],["ാന്",-10.756189346313477],["int",-10.756415367126465],["▁iPhone",-10.756632804870604],["使",-10.75676727294922],["ného",-10.756908416748049],["▁σας",-10.757271766662598],["ley",-10.757338523864746],["▁ა",-10.757678031921388],["rs",-10.757959365844728],["▁очень",-10.758337020874023],["ور",-10.758359909057615],["أ",-10.75847625732422],["pad",-10.758810997009276],["eni",-10.75894832611084],["ници",-10.758981704711914],["▁lei",-10.75900650024414],["▁استفاده",-10.75946044921875],["▁ilk",-10.759467124938965],["ovat",-10.759894371032717],["нето",-10.75990104675293],["▁tr",-10.759953498840332],["▁нај",-10.760133743286133],["▁tipo",-10.760289192199709],["▁cơ",-10.760348320007324],["▁Sen",-10.760369300842283],["යේ",-10.76052951812744],["stu",-10.760616302490234],["▁უ",-10.760737419128418],["▁masih",-10.760746955871582],["▁ك",-10.760943412780762],["▁1-",-10.760980606079102],["И",-10.761131286621094],["▁haar",-10.761351585388184],["ير",-10.761433601379396],["一些",-10.761630058288574],["▁आप",-10.761713981628418],["▁wo",-10.761848449707031],["▁ക",-10.761857986450195],["一個",-10.761988639831545],["mir",-10.762179374694824],["nta",-10.762411117553713],["▁Ма",-10.762804985046388],["▁մի",-10.762885093688965],["▁بل",-10.762911796569824],["▁design",-10.76347541809082],["Re",-10.763513565063477],["لو",-10.764111518859863],["▁една",-10.764486312866213],["ві",-10.764538764953612],["▁WA",-10.764578819274902],["▁който",-10.764742851257324],["▁Eesti",-10.76503562927246],["թ",-10.765069961547852],["õ",-10.765181541442873],["ora",-10.765288352966309],["ena",-10.765369415283203],["▁store",-10.765399932861328],["▁Alle",-10.765419006347656],["▁ਤੋਂ",-10.76590061187744],["▁meu",-10.766133308410645],["▁May",-10.766375541687012],["jon",-10.766386032104492],["ونو",-10.766711235046388],["▁його",-10.766850471496582],["▁عمل",-10.76705265045166],["▁bilo",-10.76706600189209],["lit",-10.767278671264648],["ется",-10.767311096191406],["lık",-10.76731777191162],["▁Ж",-10.76797580718994],["▁yg",-10.768024444580078],["▁help",-10.768178939819336],["ህ",-10.76831340789795],["▁même",-10.768348693847656],["希望",-10.768938064575195],["市场",-10.76902961730957],["▁ማ",-10.76941967010498],["▁gör",-10.769487380981444],["ก็",-10.769762992858888],["▁uchun",-10.770149230957031],["▁več",-10.770556449890137],["▁Hai",-10.770625114440918],["$",-10.770792007446287],["oni",-10.771036148071287],["iv",-10.771116256713867],["▁peut",-10.771170616149902],["▁trường",-10.771352767944336],["▁svoje",-10.771855354309082],["▁toe",-10.771899223327637],["▁സ്",-10.77232265472412],["्",-10.772406578063965],["▁Pan",-10.772722244262695],["色",-10.772724151611328],["ló",-10.772900581359863],["▁olla",-10.773226737976074],["▁1000",-10.773242950439451],["zer",-10.773547172546388],["stan",-10.773768424987791],["▁être",-10.773883819580078],["ան",-10.77399444580078],["▁был",-10.77444553375244],["大家",-10.774500846862791],["ಲು",-10.77479362487793],["of",-10.775018692016602],["ija",-10.775200843811035],["▁شهر",-10.775205612182615],["▁full",-10.775420188903809],["▁اما",-10.775484085083008],["才",-10.775501251220703],["▁Auto",-10.776008605957031],["https",-10.776067733764648],["can",-10.776803016662598],["ذ",-10.77696418762207],["▁нам",-10.777022361755373],["сь",-10.7770357131958],["愛",-10.777379989624023],["산",-10.777517318725586],["▁беше",-10.77760124206543],["▁байгаа",-10.778106689453123],["▁кон",-10.778284072875977],["▁badan",-10.778369903564451],["▁geen",-10.778613090515137],["▁κ",-10.77879810333252],["▁ۋە",-10.778902053833008],["個",-10.778973579406738],["juk",-10.77910614013672],["ተ",-10.77943992614746],["▁επι",-10.779478073120115],["नी",-10.779558181762695],["▁Ez",-10.779623985290527],["▁cao",-10.779850006103516],["研究",-10.7799072265625],["▁moi",-10.780196189880373],["▁back",-10.780427932739258],["▁στον",-10.780445098876951],["▁Jak",-10.780522346496582],["▁през",-10.78065013885498],["▁uw",-10.781108856201172],["服務",-10.781254768371582],["▁<",-10.781867027282717],["▁сте",-10.78188133239746],["▁Đ",-10.78224277496338],["▁ते",-10.782554626464844],["▁pār",-10.782587051391602],["ande",-10.782625198364258],["ms",-10.782703399658203],["ować",-10.783039093017578],["▁kl",-10.783164978027344],["iste",-10.783204078674316],["ทาง",-10.78369140625],["▁δια",-10.784197807312012],["ast",-10.784396171569824],["cs",-10.78484058380127],["▁Kom",-10.785274505615234],["公",-10.785534858703612],["▁34",-10.786049842834473],["▁чи",-10.786575317382812],["▁kort",-10.786677360534668],["don",-10.787264823913574],["ске",-10.787616729736328],["▁koja",-10.787864685058594],["tr",-10.787964820861816],["▁xe",-10.787981033325195],["thi",-10.788078308105469],["gang",-10.788108825683594],["▁gr",-10.788141250610352],["pat",-10.788232803344728],["▁แต่",-10.788252830505373],["▁quam",-10.788734436035156],["▁Հ",-10.788873672485352],["商",-10.789132118225098],["nan",-10.789274215698242],["▁із",-10.789695739746094],["cat",-10.789895057678224],["tzeko",-10.789909362792969],["▁buvo",-10.790003776550291],["▁ያ",-10.7901611328125],["asa",-10.790249824523926],["次",-10.790335655212402],["▁στις",-10.790352821350098],["受",-10.790406227111816],["най",-10.79042911529541],["он",-10.790435791015623],["ô",-10.790441513061523],["cí",-10.790453910827637],["▁عليه",-10.790460586547852],["▁Tá",-10.790624618530272],["▁suo",-10.79062557220459],["▁flere",-10.790696144104004],["▁лет",-10.790895462036133],["▁tij",-10.7910795211792],["lak",-10.791304588317873],["▁também",-10.791512489318848],["▁না",-10.791574478149414],["▁اپنے",-10.791595458984377],["бо",-10.79171371459961],["▁anni",-10.792156219482422],["ק",-10.792216300964355],["щи",-10.79232120513916],["▁bod",-10.79232406616211],["▁lugar",-10.79237461090088],["▁work",-10.792497634887695],["સ",-10.79251480102539],["ම්",-10.792562484741213],["社会",-10.79265308380127],["ോ",-10.792901992797852],["ത്ത",-10.793010711669922],["▁lên",-10.793105125427246],["▁თუ",-10.79356861114502],["▁ਸਿੰਘ",-10.79394817352295],["▁lagu",-10.794184684753418],["▁vis",-10.794464111328123],["▁αλλά",-10.79461669921875],["▁მი",-10.794632911682127],["tiva",-10.7947359085083],["吧",-10.79520320892334],["sson",-10.795212745666504],["▁disse",-10.7958402633667],["hin",-10.795955657958984],["▁bardzo",-10.796065330505373],["▁भने",-10.796178817749023],["▁л",-10.796210289001465],["▁року",-10.796224594116213],["రు",-10.796429634094238],["通过",-10.796585083007812],["▁Dia",-10.79660415649414],["▁Kung",-10.796649932861328],["mor",-10.79673671722412],["ui",-10.796866416931152],["▁service",-10.79715633392334],["ep",-10.797229766845703],["▁Ru",-10.797298431396484],["дын",-10.797323226928713],["▁38",-10.797767639160156],["▁tylko",-10.797812461853027],["▁ata",-10.798100471496582],["ella",-10.798112869262695],["ore",-10.79817008972168],["書",-10.798468589782717],["tia",-10.79861831665039],["▁use",-10.798662185668944],["非常",-10.798871040344238],["▁ég",-10.799383163452148],["▁էր",-10.799898147583008],["▁Он",-10.799917221069336],["tje",-10.799952507019045],["▁طرح",-10.799960136413574],["jes",-10.80001449584961],["▁goed",-10.800030708312988],["▁προ",-10.80013942718506],["▁general",-10.800183296203612],["arna",-10.800589561462402],["▁dom",-10.800716400146484],["▁zona",-10.801107406616213],["▁11.",-10.80150032043457],["isa",-10.801748275756836],["ਣ",-10.801804542541504],["ности",-10.80193042755127],["dem",-10.802172660827637],["▁khác",-10.802310943603516],["шы",-10.802464485168455],["ம",-10.80277156829834],["▁až",-10.803014755249023],["▁გამო",-10.803041458129885],["ர",-10.803166389465332],["▁very",-10.803234100341797],["▁треба",-10.80355167388916],["▁эле",-10.80429744720459],["ደ",-10.804325103759766],["▁μου",-10.80438995361328],["টি",-10.80458641052246],["цы",-10.804617881774902],["▁sala",-10.805023193359377],["▁Ф",-10.805062294006348],["ното",-10.80506992340088],["▁ма",-10.80515956878662],["czy",-10.80531120300293],["▁kali",-10.805414199829102],["▁Ei",-10.805607795715332],["▁تھا",-10.805622100830078],["▁radi",-10.805721282958984],["▁last",-10.806164741516112],["▁ج",-10.806365966796877],["▁đồng",-10.806385040283203],["▁hu",-10.806859016418455],["▁48",-10.807193756103516],["▁آ",-10.807448387145996],["ente",-10.80748176574707],["ஸ்",-10.807784080505373],["ությունը",-10.807821273803713],["▁এ",-10.807928085327148],["因",-10.808703422546388],["不是",-10.808731079101562],["lama",-10.809110641479492],["mek",-10.809208869934082],["▁jsou",-10.809354782104492],["чы",-10.809433937072754],["구",-10.809459686279297],["ቱ",-10.80951976776123],["▁aos",-10.809530258178713],["▁وم",-10.809613227844238],["age",-10.80976104736328],["▁pret",-10.809852600097656],["▁صورت",-10.81009578704834],["▁tapi",-10.8101167678833],["▁मा",-10.810269355773926],["▁beliau",-10.810458183288574],["post",-10.81084156036377],["tų",-10.81108856201172],["ação",-10.811339378356934],["ator",-10.811466217041016],["60",-10.811643600463867],["▁ban",-10.811649322509766],["▁đây",-10.811681747436523],["μα",-10.811915397644045],["▁USA",-10.811997413635254],["▁viên",-10.812334060668944],["ਤੇ",-10.812647819519045],["ស",-10.812650680541992],["nun",-10.812702178955078],["ndi",-10.812789916992188],["ες",-10.813000679016112],["▁hình",-10.813014030456545],["▁meget",-10.81391716003418],["▁cas",-10.814125061035156],["▁Star",-10.814203262329102],["Ma",-10.814335823059082],["▁vam",-10.81471347808838],["eu",-10.814953804016112],["▁apie",-10.815130233764648],["南",-10.815516471862791],["▁ima",-10.815749168395996],["▁Это",-10.816346168518066],["▁bank",-10.816513061523438],["▁hoe",-10.816648483276367],["型",-10.817046165466309],["▁प",-10.817784309387209],["卡",-10.817893028259276],["owa",-10.818401336669922],["bet",-10.818462371826172],["▁ito",-10.818462371826172],["ете",-10.818610191345217],["▁няма",-10.818657875061035],["▁sia",-10.818994522094728],["မွာ",-10.819430351257324],["▁już",-10.819583892822266],["45",-10.819838523864746],["▁හා",-10.820159912109377],["第",-10.820240020751951],["▁שה",-10.820266723632812],["დი",-10.820302963256836],["ក",-10.820395469665527],["φ",-10.820405006408691],["ында",-10.820520401000977],["▁ее",-10.82074737548828],["▁gang",-10.820775032043455],["▁mycket",-10.82078742980957],["ını",-10.82123851776123],["Na",-10.821249961853027],["▁kale",-10.821352005004885],["▁نہ",-10.821403503417969],["▁02",-10.821526527404783],["▁което",-10.821614265441896],["ym",-10.822121620178224],["တ",-10.822758674621582],["ych",-10.82281494140625],["ща",-10.822936058044434],["제",-10.823037147521973],["▁pli",-10.823394775390623],["ቀ",-10.82384967803955],["De",-10.823887825012209],["ෙ",-10.824095726013184],["▁iş",-10.824259757995604],["▁جس",-10.82446575164795],["▁İ",-10.824613571166992],["▁gali",-10.824639320373535],["送",-10.825035095214844],["▁هڪ",-10.825066566467283],["uma",-10.82533073425293],["▁//",-10.825491905212402],["ך",-10.825496673583984],["ൽ",-10.825663566589355],["▁37",-10.82607364654541],["lerin",-10.826125144958496],["意",-10.82638168334961],["ад",-10.826631546020508],["因为",-10.826781272888184],["တဲ့",-10.82686996459961],["▁تک",-10.826997756958008],["▁га",-10.827142715454102],["▁information",-10.827262878417969],["▁nova",-10.82750415802002],["tua",-10.827665328979492],["por",-10.82792854309082],["▁poder",-10.828006744384766],["бы",-10.82805061340332],["▁deur",-10.828091621398926],["ют",-10.828125953674316],["ಲ",-10.828876495361328],["▁mula",-10.829082489013672],["▁She",-10.829158782958984],["erna",-10.829288482666016],["lum",-10.829401969909668],["▁może",-10.830364227294922],["া",-10.830453872680664],["pre",-10.830784797668455],["▁besar",-10.830791473388672],["վում",-10.830989837646484],["流",-10.83144474029541],["目前",-10.831680297851562],["▁Kas",-10.831781387329102],["ност",-10.831989288330078],["▁Tri",-10.832602500915527],["▁(2",-10.832857131958008],["గ",-10.833157539367676],["▁नेपाल",-10.833346366882324],["▁чем",-10.83348560333252],["kla",-10.833625793457031],["▁así",-10.833803176879885],["▁Sport",-10.833930015563965],["mana",-10.83421802520752],["▁እ",-10.834291458129885],["κ",-10.83429718017578],["εί",-10.834325790405272],["index",-10.834335327148438],["ላ",-10.834525108337402],["▁Art",-10.834800720214844],["ني",-10.834887504577637],["活動",-10.83519458770752],["ነት",-10.8352689743042],["pur",-10.835406303405762],["▁enim",-10.835867881774902],["▁07",-10.835938453674316],["▁2-",-10.8363037109375],["सी",-10.836431503295898],["ፍ",-10.836652755737305],["数",-10.836665153503418],["▁đang",-10.836792945861816],["ー",-10.837047576904297],["rio",-10.83724308013916],["▁Mac",-10.837250709533691],["▁ਇਸ",-10.837599754333496],["aga",-10.83771800994873],["?!",-10.837801933288574],["ká",-10.838229179382324],["▁HD",-10.838457107543944],["▁Ale",-10.838593482971191],["▁only",-10.838662147521973],["▁هن",-10.838998794555664],["ына",-10.839088439941406],["▁д",-10.839192390441896],["▁forum",-10.839311599731444],["ँ",-10.839510917663574],["▁kap",-10.839679718017578],["城",-10.840067863464355],["体",-10.840167999267578],["▁due",-10.84018611907959],["ých",-10.840227127075195],["ish",-10.840250968933104],["▁vụ",-10.84082317352295],["▁cô",-10.840997695922852],["▁नाही",-10.841094017028809],["▁gal",-10.841205596923828],["୍",-10.84130859375],["▁jadi",-10.841353416442873],["▁zen",-10.841472625732422],["сан",-10.841561317443848],["«",-10.8416166305542],["▁partner",-10.841771125793455],["ଲ",-10.842217445373535],["ér",-10.842252731323242],["ク",-10.8424072265625],["lon",-10.84249496459961],["ља",-10.842495918273926],["▁حال",-10.84262752532959],["ନ",-10.84298038482666],["ക്ക്",-10.843168258666992],["▁Read",-10.843183517456056],["''",-10.84358024597168],["给",-10.843633651733398],["??",-10.8436861038208],["▁еще",-10.843794822692873],["▁Val",-10.843857765197754],["▁cùng",-10.844035148620604],["快",-10.844043731689451],["▁Hal",-10.844059944152832],["▁iar",-10.844175338745115],["ши",-10.844422340393066],["תי",-10.84466552734375],["chu",-10.844799041748049],["▁05",-10.844818115234377],["ys",-10.845044136047363],["▁මම",-10.845349311828612],["▁ជា",-10.84547996520996],["ति",-10.84577465057373],["▁quae",-10.845833778381348],["▁rok",-10.845916748046877],["▁multe",-10.84605598449707],["▁bit",-10.846177101135254],["▁kai",-10.846198081970217],["▁pare",-10.846915245056152],["安全",-10.847227096557615],["▁ты",-10.84729290008545],["وي",-10.847423553466797],["相",-10.847891807556152],["▁Top",-10.847952842712402],["larının",-10.848347663879396],["▁Media",-10.848435401916504],["ай",-10.849435806274414],["▁tutti",-10.849461555480955],["▁tư",-10.849676132202148],["ую",-10.849881172180176],["ግ",-10.85008430480957],["還",-10.851167678833008],["щ",-10.851223945617676],["γ",-10.8512601852417],["dig",-10.851587295532228],["▁Tre",-10.851605415344238],["▁Если",-10.8516206741333],["med",-10.852291107177734],["še",-10.852338790893556],["нын",-10.852426528930664],["▁primer",-10.85247039794922],["▁Türkiye",-10.85257053375244],["ವನ್ನು",-10.852721214294434],["▁heb",-10.852828979492188],["tá",-10.852914810180664],["低",-10.852917671203612],["▁Son",-10.852923393249512],["▁historia",-10.853193283081056],["▁tie",-10.853320121765137],["』",-10.853428840637209],["لىق",-10.85365390777588],["▁ise",-10.853711128234863],["▁2004",-10.85373878479004],["ব",-10.853742599487305],["amente",-10.85426425933838],["▁gir",-10.854891777038574],["▁ਕਿ",-10.855161666870115],["▁кој",-10.855401992797852],["▁تمام",-10.856165885925291],["▁42",-10.856231689453123],["▁mun",-10.856242179870604],["▁III",-10.856263160705566],["▁BBC",-10.856277465820312],["▁ذلك",-10.856608390808104],["▁гэж",-10.857420921325684],["chen",-10.857446670532228],["ା",-10.85753345489502],["80",-10.857918739318848],["▁Tag",-10.858001708984377],["▁אַ",-10.858354568481444],["▁Π",-10.858433723449709],["دى",-10.85861587524414],["▁Cha",-10.858646392822266],["▁red",-10.858806610107422],["▁Sy",-10.85883617401123],["тен",-10.85886287689209],["▁مان",-10.85944366455078],["ण",-10.859492301940918],["▁këtë",-10.859530448913574],["any",-10.85968780517578],["▁अब",-10.860337257385254],["ಕ್",-10.860566139221191],["▁где",-10.860600471496582],["ז",-10.860713958740234],["дин",-10.860756874084473],["ev",-10.860957145690918],["▁Vor",-10.861319541931152],["ంగా",-10.861327171325684],["▁Download",-10.861331939697266],["▁vom",-10.861384391784668],["▁55",-10.86164093017578],["▁Din",-10.86178970336914],["sy",-10.861842155456545],["ून",-10.862150192260742],["▁ساتھ",-10.862187385559082],["ல",-10.862250328063965],["▁What",-10.862308502197266],["▁един",-10.862339973449709],["log",-10.862459182739258],["́",-10.862573623657228],["▁Nem",-10.863129615783691],["女",-10.863333702087402],["yê",-10.863377571105955],["▁რა",-10.863560676574709],["ారు",-10.863572120666504],["▁prie",-10.863886833190918],["த",-10.864049911499023],["ও",-10.86412525177002],["▁등",-10.864546775817873],["xe",-10.86457061767578],["▁1)",-10.864715576171877],["▁06",-10.864858627319336],["પ",-10.864933013916016],["▁diese",-10.86549472808838],["▁մասին",-10.865608215332031],["▁kunnen",-10.86583137512207],["▁upang",-10.866209030151367],["▁ପାଇଁ",-10.866310119628906],["▁Wie",-10.866641998291016],["ക്",-10.86687183380127],["▁fare",-10.866920471191406],["▁mae",-10.866923332214355],["▁можна",-10.86697006225586],["▁אני",-10.866999626159668],["▁hành",-10.86725902557373],["▁کہا",-10.867341995239258],["oz",-10.867437362670898],["este",-10.867850303649902],["ί",-10.86794090270996],["▁porta",-10.868267059326172],["▁bukan",-10.86850357055664],["siz",-10.868746757507324],["зи",-10.868887901306152],["ff",-10.869224548339844],["▁paano",-10.869301795959473],["#",-10.869548797607422],["يه",-10.86955738067627],["่",-10.869744300842283],["▁други",-10.869864463806152],["▁Av",-10.869953155517578],["▁nes",-10.870500564575195],["▁Як",-10.870588302612305],["▁thu",-10.870729446411133],["▁material",-10.870805740356444],["▁thấy",-10.87099838256836],["دار",-10.871055603027344],["▁ໄດ້",-10.871060371398926],["nego",-10.871200561523438],["很多",-10.87126350402832],["ప",-10.87134838104248],["▁وجود",-10.871417999267578],["nı",-10.87143611907959],["ieren",-10.87172794342041],["sky",-10.871800422668455],["imin",-10.87218189239502],["▁које",-10.872371673583984],["▁hvis",-10.872556686401367],["▁membuat",-10.872810363769531],["▁art",-10.872992515563965],["ką",-10.87356948852539],["▁até",-10.87385368347168],["ன",-10.874095916748049],["何",-10.87415885925293],["▁Bank",-10.874387741088867],["ín",-10.874693870544434],["ട",-10.875144958496094],["tive",-10.875301361083984],["▁cần",-10.875570297241213],["▁Tra",-10.875625610351562],["▁degli",-10.87569808959961],["▁John",-10.875699043273926],["問題",-10.87595272064209],["该",-10.87602424621582],["şi",-10.876084327697754],["▁però",-10.8760986328125],["כ",-10.876280784606934],["大学",-10.87658977508545],["▁Так",-10.876714706420898],["ход",-10.876956939697266],["▁75",-10.87698745727539],["▁ingen",-10.877057075500488],["▁bas",-10.87716007232666],["▁نمی",-10.877225875854492],["▁noen",-10.877270698547363],["▁vers",-10.877381324768066],["▁Yang",-10.877419471740724],["發",-10.87757682800293],["▁beberapa",-10.877730369567873],["ਤ",-10.877748489379885],["▁2)",-10.8778076171875],["ທີ່",-10.878064155578612],["▁Cho",-10.878206253051758],["▁Som",-10.878353118896484],["lá",-10.878968238830566],["నే",-10.879149436950684],["▁allt",-10.87951374053955],["科",-10.879663467407228],["▁But",-10.879960060119627],["▁فى",-10.88010025024414],["раз",-10.880109786987305],["berg",-10.880253791809082],["sor",-10.880399703979492],["▁vita",-10.880401611328123],["▁timbang",-10.8806791305542],["يات",-10.880791664123535],["ነ",-10.88090705871582],["▁Í",-10.881032943725586],["▁obra",-10.881155967712402],["▁doen",-10.881340026855469],["ु",-10.8818941116333],["▁আ",-10.881898880004885],["▁نو",-10.88193130493164],["▁dua",-10.882014274597168],["يت",-10.88202953338623],["ské",-10.882036209106444],["ጋ",-10.882046699523926],["▁тоа",-10.882492065429688],["ische",-10.882609367370604],["▁была",-10.882789611816406],["▁против",-10.88316822052002],["▁long",-10.883191108703612],["▁وی",-10.883267402648926],["▁också",-10.883554458618164],["▁კი",-10.883586883544922],["▁जो",-10.883858680725098],["pis",-10.88399600982666],["ض",-10.884286880493164],["rii",-10.884716987609863],["▁Next",-10.88494110107422],["▁ol",-10.885103225708008],["▁није",-10.885601043701172],["▁ಎಂದು",-10.885746955871582],["这些",-10.885812759399414],["ων",-10.885926246643066],["眼",-10.886184692382812],["ман",-10.886436462402344],["▁menos",-10.886526107788086],["▁जा",-10.886659622192385],["pal",-10.88698959350586],["▁jih",-10.886990547180176],["▁trình",-10.887232780456545],["▁અ",-10.887323379516602],["की",-10.887481689453123],["inde",-10.887786865234377],["бу",-10.888385772705078],["wy",-10.889265060424805],["▁gå",-10.889277458190918],["31",-10.889328002929688],["▁För",-10.889432907104492],["▁Een",-10.889907836914062],["kr",-10.890789031982422],["學",-10.890941619873049],["No",-10.890947341918944],["nə",-10.891058921813965],["▁گیا",-10.891139030456545],["▁Che",-10.891315460205078],["▁Hindi",-10.891357421875],["▁можете",-10.89147663116455],["▁వి",-10.891531944274902],["یت",-10.891910552978516],["▁ගැන",-10.891925811767578],["▁Vy",-10.891927719116213],["▁nhận",-10.89215087890625],["器",-10.892234802246094],["ണ",-10.892481803894045],["▁sử",-10.892627716064451],["▁12.",-10.893101692199709],["▁now",-10.893266677856444],["form",-10.89346694946289],["stvo",-10.8935546875],["90",-10.89376735687256],["ዳ",-10.89376735687256],["વા",-10.893913269042969],["اب",-10.89394760131836],["lja",-10.893965721130373],["▁tv",-10.894023895263672],["▁much",-10.894160270690918],["99",-10.894209861755373],["יות",-10.894495010375977],["▁ανα",-10.894618034362791],["▁عام",-10.894635200500488],["선",-10.894790649414062],["線",-10.89484977722168],["技术",-10.894887924194336],["工",-10.894925117492676],["▁ville",-10.895346641540527],["த்தில்",-10.895573616027832],["rie",-10.895865440368652],["vé",-10.895920753479004],["▁также",-10.896084785461426],["▁há",-10.896169662475586],["▁حکومت",-10.896211624145508],["▁Blog",-10.89626407623291],["like",-10.896495819091797],["▁сме",-10.89659023284912],["▁Til",-10.89662265777588],["owej",-10.896631240844728],["pie",-10.89674186706543],["ია",-10.89691925048828],["▁›",-10.896985054016112],["تر",-10.897383689880373],["信",-10.898185729980469],["isme",-10.898314476013184],["sim",-10.898366928100586],["ean",-10.8983793258667],["▁Для",-10.89843463897705],["ರಿ",-10.898496627807615],["zel",-10.898567199707031],["ligt",-10.898752212524414],["▁loo",-10.89915943145752],["β",-10.899182319641112],["▁duke",-10.89925479888916],["▁ق",-10.899341583251951],["▁सरकार",-10.899725914001465],["tim",-10.89977741241455],["som",-10.89990234375],["知",-10.899979591369627],["▁مو",-10.900099754333496],["ový",-10.900100708007812],["њу",-10.900351524353027],["Л",-10.90035915374756],["ры",-10.900392532348633],["▁Қазақстан",-10.900495529174805],["ικό",-10.90065860748291],["▁वा",-10.900851249694824],["▁sch",-10.900999069213867],["jam",-10.901041030883787],["ակ",-10.901092529296877],["ité",-10.90112590789795],["▁03",-10.90121078491211],["▁බව",-10.90128231048584],["尔",-10.901385307312012],["▁mens",-10.901424407958984],["치",-10.901495933532717],["▁partir",-10.901552200317385],["એ",-10.901566505432127],["ват",-10.901576042175291],["▁''",-10.901873588562012],["▁años",-10.90194320678711],["▁طرف",-10.901957511901855],["▁αυτό",-10.901962280273438],["▁וב",-10.902080535888672],["▁jer",-10.902090072631836],["స",-10.902128219604492],["▁живот",-10.90236473083496],["İ",-10.902393341064451],["vil",-10.902746200561523],["ث",-10.902826309204102],["▁first",-10.902997016906738],["▁гэта",-10.903087615966797],["ника",-10.903124809265137],["▁Real",-10.903190612792969],["▁diri",-10.903223991394045],["▁hin",-10.903255462646484],["သ",-10.903379440307615],["▁бе",-10.903419494628906],["bb",-10.903434753417969],["約",-10.903464317321776],["▁Σ",-10.903740882873535],["pin",-10.903768539428713],["▁fri",-10.904067993164062],["mur",-10.904098510742188],["▁ຂອງ",-10.904375076293944],["▁persona",-10.90459442138672],["率",-10.904818534851074],["xi",-10.904841423034668],["ени",-10.905144691467283],["▁todas",-10.905592918395996],["五",-10.905747413635254],["▁более",-10.9058198928833],["動",-10.906079292297363],["▁Wi",-10.906620979309082],["ува",-10.906654357910156],["▁hann",-10.906709671020508],["片",-10.906890869140623],["요",-10.906970977783203],["ые",-10.906996726989746],["能力",-10.907113075256348],["ા",-10.90715789794922],["情",-10.90720272064209],["▁tej",-10.907206535339355],["ام",-10.907227516174316],["sid",-10.907408714294434],["▁phát",-10.907462120056152],["▁Free",-10.90748119354248],["▁say",-10.907816886901855],["▁day",-10.907822608947754],["▁ወደ",-10.908207893371582],["▁مورد",-10.908397674560549],["ડ",-10.90863800048828],["▁før",-10.90880012512207],["▁vse",-10.908917427062988],["▁love",-10.90937614440918],["▁jika",-10.90943431854248],["vä",-10.90944766998291],["dat",-10.909542083740234],["stand",-10.909613609313965],["нд",-10.909698486328123],["▁ग",-10.909817695617676],["▁será",-10.910088539123535],["还是",-10.910120010375977],["ces",-10.910429000854492],["▁puede",-10.910670280456545],["▁היא",-10.910693168640137],["▁(19",-10.910896301269531],["▁Gi",-10.910965919494627],["▁سایت",-10.911160469055176],["isto",-10.91116714477539],["▁Eu",-10.911173820495604],["▁nyt",-10.911197662353516],["▁trước",-10.91124439239502],["ings",-10.911248207092283],["▁क",-10.911262512207031],["します",-10.911410331726074],["cji",-10.911505699157717],["▁بین",-10.91167163848877],["▁ដែល",-10.911717414855955],["የ",-10.911882400512695],["▁Как",-10.912134170532228],["▁Об",-10.912330627441406],["▁suis",-10.913183212280272],["ելու",-10.913227081298828],["된",-10.913257598876951],["лан",-10.913568496704102],["▁እንደ",-10.913737297058104],["urile",-10.913823127746582],["▁ब",-10.913959503173828],["版",-10.914022445678713],["ico",-10.914039611816406],["▁Foto",-10.914047241210938],["▁करें",-10.914072036743164],["ڻ",-10.91444206237793],["▁main",-10.91452407836914],["▁إن",-10.914694786071776],["▁nje",-10.914992332458496],["cht",-10.91519546508789],["▁کرده",-10.915205001831056],["▁aussi",-10.915313720703123],["要求",-10.91547393798828],["▁tự",-10.91575813293457],["▁Ми",-10.915763854980469],["▁Ε",-10.916226387023926],["▁(5)",-10.916393280029297],["ਰ",-10.91656494140625],["▁sp",-10.91666316986084],["▁vår",-10.916712760925291],["ím",-10.916735649108888],["Я",-10.91709804534912],["▁minden",-10.91749095916748],["▁tiek",-10.917505264282228],["▁mang",-10.917834281921388],["▁bere",-10.918051719665527],["Ш",-10.91805934906006],["kke",-10.918098449707031],["▁līdz",-10.91829776763916],["▁सु",-10.9183988571167],["▁pus",-10.9185152053833],["取",-10.918656349182127],["▁Park",-10.918853759765623],["▁گفت",-10.919224739074709],["jú",-10.919234275817873],["超",-10.919550895690918],["ската",-10.919652938842772],["itas",-10.919893264770508],["tí",-10.919893264770508],["sini",-10.919952392578123],["лю",-10.919987678527832],["dha",-10.920226097106934],["▁නම්",-10.920281410217283],["▁kimi",-10.920343399047852],["▁firma",-10.920384407043455],["stva",-10.920598983764648],["▁유",-10.920682907104492],["ครับ",-10.921245574951172],["▁välja",-10.921357154846191],["▁ker",-10.921395301818848],["ons",-10.92167854309082],["lje",-10.9218111038208],["▁والم",-10.922090530395508],["นั้น",-10.922367095947266],["▁اند",-10.922369003295898],["现在",-10.922435760498049],["▁Са",-10.922821044921877],["hed",-10.922823905944824],["▁కూడా",-10.922913551330566],["▁kiel",-10.922914505004885],["▁លោក",-10.92296028137207],["tions",-10.923048973083496],["▁egin",-10.923226356506348],["▁sitten",-10.923284530639648],["lay",-10.923389434814451],["代",-10.923646926879885],["▁ที่",-10.923726081848145],["▁første",-10.923821449279783],["▁ami",-10.924050331115724],["nym",-10.924607276916504],["▁دیگر",-10.924643516540527],["▁със",-10.92480182647705],["Le",-10.925646781921388],["▁सा",-10.9257173538208],["▁andra",-10.925944328308104],["ட்",-10.926251411437988],["వి",-10.926323890686035],["raz",-10.926548957824709],["يل",-10.926761627197266],["రా",-10.926804542541504],["▁Në",-10.926852226257324],["▁2017.",-10.927071571350098],["▁rin",-10.927111625671388],["ई",-10.927119255065918],["▁kg",-10.927128791809082],["ස",-10.927226066589355],["sed",-10.927251815795898],["▁Was",-10.92783546447754],["ル",-10.927881240844728],["ában",-10.927983283996582],["сын",-10.928071975708008],["ntes",-10.928150177001951],["場",-10.928921699523926],["▁sendiri",-10.92895793914795],["▁bem",-10.92898654937744],["▁ଏହି",-10.928999900817873],["省",-10.9290132522583],["不能",-10.929126739501951],["pri",-10.929638862609863],["教",-10.92980670928955],["ву",-10.929951667785645],["而且",-10.930083274841309],["▁buat",-10.930215835571287],["ხ",-10.930413246154783],["▁well",-10.93041706085205],["ће",-10.93047332763672],["신",-10.93061351776123],["▁seorang",-10.93100643157959],["▁list",-10.931191444396973],["▁tentang",-10.93133544921875],["nio",-10.931719779968262],["▁serta",-10.931865692138672],["▁před",-10.932403564453123],["▁них",-10.9324369430542],["▁memiliki",-10.932449340820312],["pel",-10.932464599609377],["▁។",-10.932543754577637],["机",-10.932788848876951],["ช",-10.93320655822754],["ση",-10.933443069458008],["▁НА",-10.933463096618652],["▁250",-10.933637619018556],["▁buhok",-10.933870315551758],["ట",-10.93392276763916],["▁شي",-10.934130668640137],["zia",-10.934226036071776],["▁onde",-10.93463134765625],["▁ش",-10.934762001037598],["හ",-10.9348726272583],["▁bus",-10.934947967529297],["▁été",-10.93516731262207],["▁ia",-10.935396194458008],["മായ",-10.93552017211914],["▁народ",-10.935545921325684],["▁cấp",-10.935559272766112],["ola",-10.935579299926758],["bud",-10.935651779174805],["▁ע",-10.935863494873049],["▁sz",-10.935873031616213],["▁کشور",-10.935920715332031],["▁իր",-10.935927391052246],["▁క",-10.935952186584473],["▁Inter",-10.936461448669434],["▁તે",-10.936497688293455],["▁molt",-10.936552047729492],["▁meng",-10.936717987060549],["▁съ",-10.936834335327148],["vik",-10.93685531616211],["TV",-10.936902046203612],["只",-10.93701171875],["▁Air",-10.937220573425291],["▁Vo",-10.937444686889648],["вы",-10.937618255615234],["okat",-10.93792724609375],["▁गया",-10.938032150268556],["ប",-10.938228607177734],["▁think",-10.938271522521973],["ნი",-10.938440322875977],["因此",-10.938799858093262],["▁وأ",-10.938807487487791],["▁jeho",-10.939143180847168],["រ",-10.939281463623049],["2015",-10.93929386138916],["용",-10.93943977355957],["▁país",-10.939517974853516],["▁§",-10.93960666656494],["▁special",-10.940034866333008],["▁mei",-10.94013786315918],["▁خبر",-10.940471649169922],["真",-10.940519332885742],["▁dire",-10.940570831298828],["▁नि",-10.94062042236328],["▁sing",-10.940656661987305],["▁home",-10.940692901611328],["▁years",-10.940733909606934],["站",-10.940826416015623],["▁sammen",-10.94105052947998],["cher",-10.9411039352417],["▁نیز",-10.941132545471191],["lü",-10.941283226013184],["▁Kur",-10.9414644241333],["▁tao",-10.941558837890623],["▁SA",-10.941611289978027],["ാണ്",-10.94163703918457],["▁Maria",-10.941828727722168],["oi",-10.94184398651123],["▁pala",-10.9422607421875],["的时候",-10.942447662353516],["ڪ",-10.942730903625488],["包括",-10.942962646484377],["ság",-10.942965507507324],["gt",-10.942991256713867],["xo",-10.943779945373535],["▁uso",-10.944289207458496],["▁Hier",-10.944320678710938],["▁120",-10.94435691833496],["ಟ್",-10.94447135925293],["式",-10.944541931152344],["مان",-10.944621086120604],["usta",-10.944745063781738],["გ",-10.944747924804688],["▁vì",-10.944846153259276],["ない",-10.945178031921388],["▁kup",-10.945801734924316],["▁کسی",-10.945903778076172],["▁dalla",-10.945962905883787],["ування",-10.946188926696776],["▁dir",-10.946293830871582],["▁nghiệp",-10.946351051330566],["▁Če",-10.946480751037598],["▁ውስጥ",-10.94649600982666],["▁Ser",-10.946544647216797],["อ",-10.946558952331545],["pet",-10.946587562561035],["▁प्रदेश",-10.947040557861328],["利用",-10.947108268737791],["经济",-10.947152137756348],["信息",-10.947158813476562],["ಗ",-10.947188377380373],["ி",-10.94750690460205],["▁ide",-10.94773006439209],["pon",-10.94776725769043],["▁یو",-10.947796821594238],["▁රී",-10.947806358337402],["▁DI",-10.947880744934082],["ൂ",-10.947965621948242],["▁Pod",-10.947969436645508],["▁ces",-10.948415756225586],["▁set",-10.948626518249512],["nä",-10.948715209960938],["ins",-10.948890686035156],["▁system",-10.948989868164062],["டி",-10.949398040771484],["-2",-10.949474334716797],["wr",-10.949507713317873],["ገ",-10.949906349182127],["▁ime",-10.949966430664062],["▁중",-10.950014114379885],["för",-10.950031280517578],["▁этого",-10.9501371383667],["▁کم",-10.950150489807127],["elor",-10.950160026550291],["▁خ",-10.950186729431152],["▁teh",-10.950203895568848],["▁arasında",-10.950305938720703],["title",-10.9504976272583],["▁lalu",-10.950617790222168],["2.",-10.950624465942385],["▁Ex",-10.950648307800291],["國",-10.950675010681152],["rang",-10.95076847076416],["▁sí",-10.95083999633789],["eks",-10.951148986816406],["ટ",-10.951263427734377],["ныя",-10.951334953308104],["▁उ",-10.951396942138672],["▁มี",-10.951421737670898],["▁pul",-10.951456069946287],["mise",-10.951515197753906],["▁Tr",-10.952043533325195],["▁More",-10.95210075378418],["▁tres",-10.952180862426758],["jal",-10.952235221862791],["▁Ban",-10.95225429534912],["▁perlu",-10.952522277832031],["▁leur",-10.95253086090088],["▁طور",-10.952852249145508],["bile",-10.953058242797852],["ική",-10.9533052444458],["Sa",-10.95371913909912],["over",-10.954078674316406],["▁sekä",-10.954203605651855],["ğ",-10.954357147216797],["stä",-10.954594612121582],["▁सं",-10.954601287841797],["▁contact",-10.95468807220459],["dh",-10.954754829406738],["▁cái",-10.954916954040527],["米",-10.955302238464355],["▁novo",-10.956113815307615],["http",-10.956482887268066],["했다",-10.95668888092041],["时间",-10.957509994506836],["▁არის",-10.957568168640137],["▁nagy",-10.95759391784668],["డ",-10.957709312438965],["stad",-10.95773696899414],["гу",-10.957745552062988],["像",-10.957866668701172],["▁fara",-10.95797634124756],["▁kamu",-10.957979202270508],["lich",-10.958048820495604],["▁سو",-10.958243370056152],["കള്",-10.95841121673584],["▁Aus",-10.958548545837402],["qu",-10.95865535736084],["▁br",-10.958693504333496],["▁Wy",-10.958854675292969],["▁mặt",-10.958887100219728],["▁Л",-10.959178924560549],["ዋ",-10.959336280822754],["ご",-10.95936107635498],["沒有",-10.95964527130127],["qa",-10.95964813232422],["▁onze",-10.959840774536133],["न्",-10.960076332092283],["▁tele",-10.960591316223145],["▁Zeit",-10.960779190063477],["▁ہم",-10.9612398147583],["▁spre",-10.961465835571287],["火",-10.961801528930664],["▁Де",-10.961954116821287],["▁করে",-10.962006568908691],["kki",-10.962268829345703],["ное",-10.962420463562012],["ost",-10.962897300720217],["平",-10.963000297546388],["▁ky",-10.9630126953125],["ním",-10.963068962097168],["бе",-10.963114738464355],["kka",-10.963241577148438],["tina",-10.963359832763672],["cija",-10.96374225616455],["vid",-10.963763236999512],["ች",-10.963886260986328],["請",-10.963912010192873],["দের",-10.964101791381836],["ト",-10.96424388885498],["▁Македонија",-10.964263916015623],["ić",-10.964439392089844],["集",-10.964449882507324],["▁important",-10.964486122131348],["▁tim",-10.964725494384766],["ava",-10.96491527557373],["▁یې",-10.964935302734377],["▁då",-10.965234756469728],["▁Ten",-10.965500831604004],["▁год",-10.965543746948242],["▁Und",-10.965564727783203],["রা",-10.965728759765623],["▁ry",-10.965778350830078],["čka",-10.96583366394043],["▁الع",-10.966026306152344],["▁kwenye",-10.96607780456543],["▁മ",-10.966346740722656],["ség",-10.966469764709473],["▁شرکت",-10.966479301452637],["▁hafa",-10.96651840209961],["▁hasta",-10.966607093811035],["lash",-10.966876983642578],["mai",-10.966943740844728],["など",-10.967028617858888],["▁Pour",-10.967300415039062],["ich",-10.967405319213867],["▁Οι",-10.96742057800293],["▁War",-10.96756649017334],["▁pen",-10.967737197875977],["▁деген",-10.967859268188477],["ሰ",-10.967992782592772],["ውን",-10.968170166015623],["▁olduğu",-10.968192100524902],["zie",-10.968206405639648],["else",-10.968218803405762],["▁kanë",-10.968387603759766],["国际",-10.96847915649414],["▁Ре",-10.968645095825195],["▁Ц",-10.968936920166016],["▁nás",-10.968968391418455],["▁Untuk",-10.969444274902344],["▁अपने",-10.969487190246582],["tio",-10.969518661499023],["▁salah",-10.96981716156006],["▁უნდა",-10.969844818115234],["▁mam",-10.97007656097412],["եց",-10.970121383666992],["▁standard",-10.970187187194824],["▁sebuah",-10.97031593322754],["ած",-10.970348358154297],["стан",-10.970361709594728],["ます",-10.970556259155272],["▁aga",-10.97060489654541],["명",-10.970749855041504],["▁eða",-10.971089363098145],["▁khách",-10.971165657043455],["ší",-10.97118854522705],["▁ក្នុង",-10.971309661865234],["▁Des",-10.971395492553713],["▁article",-10.971447944641112],["▁nii",-10.97163200378418],["all",-10.971650123596191],["▁Chi",-10.9717435836792],["▁Plan",-10.971758842468262],["▁năng",-10.972171783447266],["▁کریں",-10.97223949432373],["rad",-10.972375869750977],["دىن",-10.972516059875488],["▁niya",-10.972792625427246],["▁ту",-10.972881317138672],["ими",-10.973106384277344],["יה",-10.973307609558104],["▁sou",-10.973382949829102],["▁India",-10.973512649536133],["มาก",-10.97385025024414],["▁svar",-10.97386074066162],["▁Las",-10.973958969116213],["tivo",-10.974431991577148],["uan",-10.974445343017578],["▁हा",-10.97453498840332],["▁មាន",-10.974603652954102],["▁ý",-10.974753379821776],["▁sehingga",-10.974817276000977],["▁39",-10.97484302520752],["▁අ",-10.975550651550291],["▁should",-10.975564002990724],["▁side",-10.975884437561035],["其他",-10.976147651672363],["istä",-10.976310729980469],["lib",-10.976627349853516],["star",-10.976736068725586],["▁maj",-10.976767539978027],["立",-10.976771354675291],["ünk",-10.976791381835938],["ሉ",-10.977397918701172],["▁fie",-10.977566719055176],["niu",-10.977646827697754],["മായി",-10.977822303771973],["▁دست",-10.977931022644045],["▁date",-10.97793960571289],["▁spa",-10.977968215942385],["▁rumah",-10.978204727172852],["dı",-10.978227615356444],["▁ი",-10.978300094604492],["tó",-10.97834587097168],["▁Κ",-10.97836971282959],["ээ",-10.978403091430664],["▁sai",-10.978446006774902],["▁Cor",-10.978707313537598],["ଳ",-10.978731155395508],["nden",-10.978745460510254],["▁کنند",-10.978954315185549],["ద",-10.97898006439209],["day",-10.97898292541504],["▁leis",-10.979255676269531],["த்து",-10.979317665100098],["▁fu",-10.979449272155762],["▁będzie",-10.979609489440918],["▁один",-10.97961139678955],["зна",-10.979935646057127],["▁сайт",-10.98019313812256],["▁asi",-10.980428695678713],["미",-10.98058032989502],["▁take",-10.98069190979004],["လ",-10.980767250061035],["▁lá",-10.980891227722168],["▁ප",-10.980905532836914],["ٰ",-10.981088638305664],["▁ਸ",-10.98134708404541],["看到",-10.981499671936035],["ቅ",-10.98159885406494],["ಯಲ್ಲಿ",-10.981734275817873],["▁Au",-10.981772422790527],["▁bağlı",-10.981793403625488],["ባ",-10.982128143310549],["ının",-10.9827880859375],["▁App",-10.982844352722168],["▁туралы",-10.982917785644531],["▁tua",-10.983036041259766],["▁hanno",-10.98305320739746],["lek",-10.98319149017334],["▁pole",-10.983316421508787],["▁mine",-10.983901977539062],["נ",-10.984054565429688],["adh",-10.98409938812256],["▁ص",-10.984315872192385],["▁اي",-10.984407424926758],["dio",-10.984545707702637],["▁bé",-10.984551429748535],["ена",-10.984580993652344],["ू",-10.984862327575684],["oli",-10.98490047454834],["▁th",-10.984980583190918],["▁لئے",-10.985011100769045],["リ",-10.985098838806152],["▁té",-10.985274314880373],["ከ",-10.985284805297852],["死",-10.98560905456543],["하게",-10.985615730285645],["▁azt",-10.98561954498291],["คน",-10.985657691955566],["▁65",-10.98603343963623],["▁Esta",-10.986345291137695],["tai",-10.986615180969238],["ách",-10.98676300048828],["్",-10.986852645874023],["▁ara",-10.986926078796388],["େ",-10.98740291595459],["▁3-",-10.987468719482422],["▁phẩm",-10.987549781799316],["合作",-10.987632751464844],["ින්",-10.987730026245115],["▁Mag",-10.987785339355469],["iko",-10.987807273864746],["▁werk",-10.988153457641602],["ወ",-10.988617897033691],["▁१",-10.98880386352539],["▁popular",-10.988837242126465],["▁same",-10.988974571228027],["包",-10.989018440246582],["لي",-10.98911952972412],["vr",-10.989157676696776],["▁tau",-10.989356994628906],["غ",-10.989378929138184],["▁proces",-10.989638328552246],["ña",-10.989721298217772],["zem",-10.990179061889648],["▁Ana",-10.990195274353027],["▁работа",-10.990446090698242],["▁даже",-10.990671157836914],["ill",-10.990771293640137],["不同",-10.99081039428711],["▁đối",-10.991025924682615],["▁kala",-10.991055488586426],["▁كما",-10.991244316101074],["▁ông",-10.991369247436523],["ит",-10.991538047790527],["bir",-10.9915771484375],["ね",-10.991693496704102],["建设",-10.99178695678711],["▁quis",-10.991968154907228],["▁която",-10.992082595825195],["िक",-10.992128372192385],["▁2002",-10.99233627319336],["▁hefur",-10.992476463317873],["▁али",-10.992693901062012],["▁Р",-10.993029594421388],["号",-10.99315357208252],["▁které",-10.993298530578612],["▁spo",-10.993402481079102],["如何",-10.993786811828612],["ත",-10.993967056274414],["▁naman",-10.994288444519045],["▁sans",-10.994383811950684],["▁pois",-10.994409561157228],["▁ఒక",-10.99445343017578],["gra",-10.994563102722168],["dk",-10.99464225769043],["▁Ako",-10.994891166687012],["▁04",-10.995050430297852],["itate",-10.99506378173828],["▁modern",-10.995194435119627],["▁algo",-10.995306015014648],["cions",-10.99530792236328],["▁estar",-10.995513916015623],["▁hiyo",-10.995574951171877],["من",-10.995607376098633],["として",-10.99578857421875],["aal",-10.995875358581545],["lardan",-10.995882034301758],["▁York",-10.995923042297363],["▁કરી",-10.996050834655762],["▁Barcelona",-10.99609661102295],["▁hans",-10.99619960784912],["70",-10.996241569519045],["▁Ба",-10.996258735656738],["-1",-10.996356964111328],["▁било",-10.996359825134276],["mann",-10.99639129638672],["▁нема",-10.996521949768066],["น้ํา",-10.996729850769045],["▁ଅ",-10.996801376342772],["sem",-10.996877670288086],["▁бір",-10.996906280517578],["▁loro",-10.996990203857422],["▁gut",-10.997320175170898],["이다",-10.997380256652832],["세",-10.997725486755373],["dim",-10.997858047485352],["anti",-10.99790859222412],["▁off",-10.997976303100586],["▁saa",-10.998003005981444],["▁fo",-10.998577117919922],["▁empresa",-10.998690605163574],["ivo",-10.998699188232422],["ଟି",-10.998706817626951],["마",-10.998960494995115],["ট",-10.999223709106444],["lä",-10.999616622924805],["lus",-11.00009536743164],["ează",-11.000100135803224],["▁خلال",-11.000139236450195],["▁через",-11.000163078308104],["▁mari",-11.000253677368164],["▁Ap",-11.000393867492676],["Ж",-11.000497817993164],["гийн",-11.000713348388672],["ios",-11.000731468200684],["â",-11.00076389312744],["▁too",-11.000832557678224],["▁ਹਨ",-11.000847816467283],["▁litt",-11.001175880432127],["▁sil",-11.001429557800291],["方法",-11.001856803894045],["▁پس",-11.00193214416504],["▁coa",-11.002227783203123],["▁nada",-11.002229690551758],["ón",-11.0022611618042],["▁iyi",-11.002394676208496],["▁molto",-11.002595901489258],["อยู่",-11.002617835998535],["tā",-11.002660751342772],["▁tā",-11.002693176269531],["▁керек",-11.003057479858398],["▁보",-11.0034761428833],["▁organiz",-11.003710746765137],["▁could",-11.004385948181152],["σε",-11.004393577575684],["ʻ",-11.004576683044434],["▁kol",-11.004645347595217],["▁Que",-11.005121231079102],["dro",-11.005536079406738],["▁Norge",-11.005544662475586],["▁הת",-11.00580883026123],["▁vores",-11.006040573120115],["▁سب",-11.006121635437012],["ทํา",-11.00621223449707],["يء",-11.006491661071776],["위",-11.006494522094728],["aya",-11.00649642944336],["▁หรือ",-11.00698471069336],["isen",-11.00727367401123],["▁negara",-11.007384300231934],["입니다",-11.007415771484377],["သူ",-11.007575035095217],["ningen",-11.007723808288574],["ub",-11.007739067077637],["▁bộ",-11.007861137390137],["ono",-11.008081436157228],["▁Э",-11.00820255279541],["ű",-11.008255958557127],["ľ",-11.00826930999756],["▁тому",-11.008313179016112],["▁kau",-11.008427619934082],["▁주",-11.00845718383789],["接",-11.008532524108888],["skih",-11.008563995361328],["Ф",-11.008584976196287],["空",-11.00872039794922],["▁между",-11.008732795715332],["35",-11.008782386779783],["ive",-11.00887393951416],["ۍ",-11.008889198303224],["lərin",-11.009197235107422],["▁13.",-11.009201049804688],["▁tras",-11.009584426879885],["▁tous",-11.00967025756836],["▁वर्ष",-11.009840965270996],["kami",-11.009902000427246],["▁روی",-11.010133743286133],["放",-11.010149002075195],["▁kang",-11.01046371459961],["▁יותר",-11.010579109191896],["▁pře",-11.010946273803713],["▁bản",-11.010993957519531],["此",-11.011046409606934],["▁stil",-11.011231422424316],["▁Ai",-11.01136302947998],["ard",-11.01142692565918],["給",-11.01150894165039],["▁centro",-11.011590957641602],["▁hij",-11.011608123779297],["luk",-11.011615753173828],["ро",-11.011720657348633],["▁جن",-11.011818885803224],["plan",-11.011960983276367],["▁hội",-11.011991500854492],["zon",-11.01216983795166],["bh",-11.012237548828123],["ská",-11.012358665466309],["表",-11.012375831604004],["eva",-11.012725830078123],["▁lista",-11.01274871826172],["dol",-11.013066291809082],["pol",-11.01350212097168],["보",-11.013664245605469],["▁Trump",-11.013721466064451],["▁Zo",-11.013945579528809],["▁IN",-11.013993263244627],["ური",-11.01419448852539],["き",-11.01424789428711],["ған",-11.014299392700195],["▁fine",-11.014445304870604],["▁การ",-11.014662742614746],["▁үшін",-11.014663696289062],["ვე",-11.014711380004885],["产品",-11.014719009399414],["▁kinh",-11.01491928100586],["με",-11.014986991882324],["▁Ка",-11.015110969543455],["▁tế",-11.015347480773926],["▁இந்த",-11.015562057495115],["▁week",-11.01567554473877],["▁44",-11.015751838684082],["民",-11.015807151794434],["iki",-11.015857696533203],["જ",-11.016019821166992],["▁Kun",-11.01602554321289],["▁Ако",-11.01602840423584],["ской",-11.016308784484863],["▁വി",-11.016316413879396],["▁tiene",-11.016432762145996],["ମ",-11.016497611999512],["▁Sant",-11.016615867614746],["風",-11.01675033569336],["ses",-11.016971588134766],["գ",-11.017040252685549],["▁doar",-11.017549514770508],["ement",-11.017558097839355],["▁dân",-11.017637252807615],["කට",-11.017657279968262],["rod",-11.017831802368164],["▁lot",-11.017924308776855],["▁hvordan",-11.018308639526367],["▁Cara",-11.018314361572266],["bas",-11.0187406539917],["▁Mor",-11.01876735687256],["▁내",-11.018776893615724],["▁grup",-11.018805503845217],["hor",-11.018965721130373],["ρα",-11.019166946411133],["cı",-11.019336700439451],["▁även",-11.019579887390137],["▁proti",-11.019598007202148],["נה",-11.019600868225098],["tom",-11.019749641418455],["北",-11.020112991333008],["нов",-11.020119667053224],["단",-11.020421028137209],["▁Galicia",-11.02043342590332],["▁Ini",-11.020638465881348],["▁част",-11.020663261413574],["zan",-11.020873069763184],["▁начин",-11.020910263061523],["că",-11.02092742919922],["แบบ",-11.02096176147461],["▁사",-11.020978927612305],["病",-11.021263122558594],["▁many",-11.02129077911377],["ുന്ന",-11.021809577941896],["อย่าง",-11.02194881439209],["தி",-11.02205753326416],["▁massage",-11.02215576171875],["▁【",-11.02219295501709],["▁VI",-11.022443771362305],["▁बि",-11.02260971069336],["▁bine",-11.022683143615724],["▁אל",-11.022712707519531],["uh",-11.022774696350098],["alla",-11.022798538208008],["▁__",-11.022869110107422],["▁सर्व",-11.02298355102539],["200",-11.023048400878906],["oje",-11.023049354553224],["▁Ко",-11.023334503173828],["马",-11.023344039916992],["▁IT",-11.023507118225098],["▁אם",-11.023755073547363],["▁α",-11.02379035949707],["▁hit",-11.02380084991455],["در",-11.023843765258787],["osta",-11.023847579956056],["因為",-11.023942947387695],["ந்த",-11.02394676208496],["stre",-11.024629592895508],["▁dva",-11.024651527404783],["лен",-11.024968147277832],["λ",-11.0250244140625],["▁Tha",-11.025110244750977],["нің",-11.025187492370604],["▁Ö",-11.025352478027344],["pil",-11.025601387023926],["▁osa",-11.025683403015137],["▁Vol",-11.025982856750488],["▁radio",-11.026126861572266],["႔",-11.026598930358888],["▁trebuie",-11.026631355285645],["uni",-11.026741981506348],["ός",-11.026745796203612],["▁حق",-11.026751518249512],["fin",-11.026755332946776],["owego",-11.026755332946776],["ām",-11.026803970336914],["▁Song",-11.026921272277832],["▁Als",-11.027022361755373],["▁घर",-11.027042388916016],["ვა",-11.027067184448242],["▁трябва",-11.027103424072266],["▁foarte",-11.027280807495115],["bur",-11.027527809143066],["സി",-11.027597427368164],["▁тут",-11.02773094177246],["▁tâm",-11.02773666381836],["ುವ",-11.027826309204102],["een",-11.027959823608398],["第一",-11.028401374816896],["▁прав",-11.028562545776367],["fra",-11.028836250305176],["▁ஆ",-11.029013633728027],["▁syn",-11.029196739196776],["▁Th",-11.029213905334473],["▁क्या",-11.029382705688477],["▁ugu",-11.02938747406006],["解",-11.02953815460205],["▁Lietuvos",-11.029603958129885],["يم",-11.029787063598633],["▁ക്ക്",-11.029854774475098],["▁тем",-11.0299072265625],["essa",-11.029908180236816],["יים",-11.029989242553713],["にも",-11.03024673461914],["▁എ",-11.030265808105469],["▁nok",-11.030293464660645],["ரி",-11.030346870422363],["▁κατα",-11.030356407165527],["▁कहा",-11.03059196472168],["kę",-11.03088092803955],["imas",-11.030929565429688],["▁ሲ",-11.030940055847168],["▁دانلود",-11.03116512298584],["sis",-11.031338691711426],["▁multi",-11.03152847290039],["ük",-11.031533241271973],["▁waxaa",-11.03188705444336],["개",-11.031976699829102],["球",-11.032134056091309],["տ",-11.0322265625],["▁kiu",-11.0325288772583],["▁ול",-11.03280544281006],["time",-11.032899856567385],["ив",-11.033235549926758],["▁mala",-11.033445358276367],["▁Mer",-11.03354835510254],["अ",-11.033819198608398],["▁nahi",-11.03386688232422],["лы",-11.033980369567873],["▁өз",-11.034125328063965],["ರೆ",-11.034520149230955],["्य",-11.034551620483398],["食",-11.034660339355469],["▁របស់",-11.034887313842772],["▁such",-11.035000801086426],["हरु",-11.035030364990234],["▁ekonomi",-11.035466194152832],["▁този",-11.035542488098145],["king",-11.035768508911133],["▁leg",-11.035773277282717],["▁대",-11.035837173461914],["▁gian",-11.036128997802734],["▁2003",-11.036210060119627],["▁valor",-11.036247253417969],["▁ی",-11.036406517028809],["▁буде",-11.036554336547852],["out",-11.036591529846191],["▁star",-11.03663158416748],["rán",-11.036806106567385],["ında",-11.037074089050291],["yə",-11.037310600280762],["▁Але",-11.0375337600708],["言",-11.03754425048828],["dag",-11.037843704223633],["▁kultur",-11.0378999710083],["azione",-11.038167953491213],["▁tình",-11.038270950317385],["▁اسلام",-11.038277626037598],["▁bola",-11.038325309753418],["▁tip",-11.03835678100586],["kra",-11.038484573364258],["直",-11.0385160446167],["▁ומ",-11.038578033447266],["ã",-11.038875579833984],["▁politik",-11.038921356201172],["ೆ",-11.03897762298584],["ळ",-11.03936004638672],["ux",-11.039363861083984],["▁_--",-11.039385795593262],["▁sang",-11.03966236114502],["▁رد",-11.03969383239746],["▁가",-11.039809226989746],["保",-11.039850234985352],["▁natural",-11.039983749389648],["sit",-11.040108680725098],["▁කරන",-11.040155410766602],["▁고",-11.04018497467041],["~",-11.040306091308594],["▁September",-11.040363311767578],["ಲಿ",-11.040763854980469],["ائي",-11.040783882141112],["որ",-11.040797233581545],["▁eget",-11.040932655334473],["ער",-11.041213035583496],["▁الإ",-11.041245460510254],["み",-11.041325569152832],["▁Red",-11.041327476501465],["▁these",-11.041397094726562],["▁Tai",-11.041455268859863],["ಡ",-11.04166030883789],["ssä",-11.041817665100098],["▁poz",-11.041818618774414],["sak",-11.041871070861816],["ние",-11.0419921875],["▁Те",-11.042108535766602],["院",-11.0424222946167],["▁igen",-11.042454719543455],["▁PC",-11.042466163635254],["లి",-11.04253387451172],["उ",-11.042633056640623],["▁என்று",-11.042859077453612],["住",-11.043098449707031],["▁Dette",-11.043137550354004],["स्",-11.043441772460938],["lab",-11.043561935424805],["յան",-11.043689727783203],["▁더",-11.043927192687988],["▁thi",-11.044097900390623],["യാണ്",-11.04413604736328],["▁ihr",-11.044368743896484],["iť",-11.04463005065918],["▁Af",-11.04466438293457],["▁bảo",-11.04478359222412],["▁Cymru",-11.044857025146484],["ža",-11.044870376586914],["सा",-11.044910430908203],["gon",-11.044950485229492],["▁jó",-11.045103073120115],["阿",-11.045162200927734],["笑",-11.04519271850586],["ane",-11.045287132263184],["asta",-11.045292854309082],["▁jeden",-11.04544162750244],["mist",-11.045662879943848],["▁ကို",-11.045689582824709],["▁ٿو",-11.045747756958008],["▁rol",-11.045879364013672],["▁sedan",-11.04592990875244],["▁st",-11.046082496643066],["▁їх",-11.046210289001465],["kou",-11.046253204345703],["singaw",-11.046334266662598],["▁دل",-11.046356201171877],["▁देश",-11.046401977539062],["amo",-11.046452522277832],["▁වන",-11.04654026031494],["▁mijn",-11.046581268310549],["啊",-11.04667854309082],["رو",-11.046772003173828],["ச்",-11.046785354614258],["▁אז",-11.046792030334473],["स्य",-11.046966552734377],["itat",-11.047012329101562],["lem",-11.04704761505127],["ување",-11.04706573486328],["না",-11.04714298248291],["設計",-11.047195434570312],["▁bag",-11.0474853515625],["ି",-11.047979354858398],["Fi",-11.048052787780762],["جي",-11.048431396484377],["மா",-11.048516273498535],["zio",-11.048587799072266],["▁Bra",-11.048973083496094],["▁مت",-11.049219131469728],["▁wan",-11.049257278442385],["eko",-11.049297332763672],["少",-11.049302101135254],["бра",-11.049376487731934],["eli",-11.049545288085938],["ୀ",-11.049860000610352],["▁нет",-11.049863815307615],["▁بىر",-11.05005168914795],["кон",-11.05012035369873],["maa",-11.050193786621094],["▁tú",-11.050223350524902],["▁ಕ",-11.050305366516112],["▁ഇ",-11.050341606140137],["▁dah",-11.050440788269045],["▁viel",-11.050593376159668],["當",-11.050636291503906],["▁transport",-11.050728797912598],["งาน",-11.050856590270996],["▁really",-11.050872802734377],["▁máy",-11.050890922546388],["▁թե",-11.051029205322266],["▁ଆ",-11.051192283630373],["амі",-11.051484107971191],["▁też",-11.051535606384276],["▁kop",-11.051750183105469],["പ്പ",-11.05198860168457],["inte",-11.052376747131348],["회",-11.05239200592041],["半",-11.052733421325684],["ному",-11.052902221679688],["ří",-11.052979469299316],["企業",-11.053098678588867],["▁DO",-11.053112030029297],["▁prej",-11.053152084350586],["▁всички",-11.053793907165527],["Š",-11.053982734680176],["anna",-11.05404567718506],["▁tempat",-11.054058074951172],["йте",-11.054092407226562],["ది",-11.054594039916992],["tii",-11.054787635803224],["▁Dis",-11.05487060546875],["ав",-11.054945945739746],["ване",-11.05509090423584],["ម",-11.055227279663086],["Al",-11.055410385131836],["fen",-11.055456161499023],["šu",-11.055669784545898],["hoz",-11.055720329284668],["▁dá",-11.055875778198242],["▁яго",-11.05606174468994],["▁menggunakan",-11.05618381500244],["▁sor",-11.056463241577148],["ця",-11.056527137756348],["った",-11.056547164916992],["▁اللہ",-11.056611061096191],["▁ple",-11.056641578674316],["▁mau",-11.056708335876465],["ای",-11.0568265914917],["mie",-11.057007789611816],["▁قد",-11.057290077209473],["▁control",-11.05733871459961],["اد",-11.057968139648438],["▁lehet",-11.058032035827637],["dam",-11.058070182800291],["▁ہوئے",-11.05878448486328],["መ",-11.058974266052246],["aria",-11.059354782104492],["共",-11.059465408325195],["▁suka",-11.059657096862791],["▁גע",-11.059812545776367],["nal",-11.059962272644045],["▁noe",-11.060086250305176],["ding",-11.06030559539795],["▁vậy",-11.06076431274414],["▁ਵਿਚ",-11.06084442138672],["▁można",-11.060874938964844],["ア",-11.060903549194336],["▁proses",-11.060914039611816],["▁pis",-11.060914993286133],["▁Mais",-11.060979843139648],["▁България",-11.061040878295898],["را",-11.06108856201172],["▁đề",-11.06112289428711],["▁wê",-11.061291694641112],["▁seda",-11.06136417388916],["建",-11.061491012573242],["▁toàn",-11.06158447265625],["▁peu",-11.061649322509766],["▁ని",-11.06180477142334],["▁איך",-11.062088966369627],["▁اين",-11.06231689453125],["▁ح",-11.06247615814209],["ాన్ని",-11.062524795532228],["led",-11.06272315979004],["▁43",-11.062750816345217],["gue",-11.062807083129885],["kun",-11.062861442565918],["ള",-11.063226699829102],["▁opera",-11.064096450805664],["▁kalau",-11.06412124633789],["ген",-11.064359664916992],["تي",-11.064674377441406],["▁وب",-11.06473445892334],["网",-11.064806938171388],["्या",-11.064860343933104],["周",-11.064921379089355],["▁ven",-11.06509017944336],["▁Dalam",-11.06520175933838],["▁ન",-11.065258026123049],["▁point",-11.065327644348145],["para",-11.065335273742676],["ám",-11.065369606018066],["健康",-11.06545639038086],["▁leo",-11.065515518188477],["iert",-11.06569480895996],["▁15.",-11.065744400024414],["ას",-11.065855979919434],["達",-11.065885543823242],["▁sao",-11.066025733947754],["մ",-11.06641960144043],["▁kanssa",-11.066493034362791],["aus",-11.066662788391112],["سى",-11.067039489746094],["▁kry",-11.067258834838867],["રી",-11.067378044128418],["▁nám",-11.067419052124023],["실",-11.067495346069336],["ף",-11.067556381225586],["ط",-11.067599296569824],["pit",-11.06809425354004],["▁नेपाली",-11.068206787109377],["▁više",-11.068359375],["▁ବି",-11.068482398986816],["ใจ",-11.068614959716797],["▁Bet",-11.06867218017578],["▁kết",-11.068700790405272],["▁گ",-11.068852424621582],["▁hacer",-11.068881034851074],["▁себя",-11.068977355957031],["▁tips",-11.069086074829102],["ių",-11.069299697875977],["▁alta",-11.069600105285645],["오",-11.06963062286377],["зе",-11.069646835327148],["▁چه",-11.069815635681152],["▁អ្នក",-11.069958686828612],["電",-11.07009220123291],["owania",-11.070186614990234],["▁план",-11.07027816772461],["▁roman",-11.070340156555176],["kö",-11.07043170928955],["▁ці",-11.070446014404297],["oldid",-11.070700645446776],["▁ק",-11.070708274841309],["▁malo",-11.071029663085938],["يو",-11.07144832611084],["zna",-11.071578979492188],["ლი",-11.071630477905272],["lap",-11.071663856506348],["▁phòng",-11.07180404663086],["увати",-11.071931838989258],["▁laga",-11.071946144104004],["pt",-11.072108268737791],["▁tay",-11.072293281555176],["▁tác",-11.072360038757324],["▁600",-11.072393417358398],["mbi",-11.072639465332031],["cin",-11.07298755645752],["▁অ",-11.073193550109863],["未",-11.073479652404783],["▁ainda",-11.073695182800291],["▁nostra",-11.07374095916748],["▁digital",-11.073774337768556],["▁지",-11.07387351989746],["പ",-11.073904037475586],["чка",-11.07392406463623],["首",-11.074057579040527],["ப்பு",-11.07420539855957],["wie",-11.074286460876465],["யை",-11.074524879455566],["со",-11.07453155517578],["格",-11.07493782043457],["▁rồi",-11.075261116027832],["门",-11.075435638427734],["▁الف",-11.075451850891112],["▁być",-11.075531005859377],["▁sér",-11.075812339782717],["▁సినిమా",-11.075987815856934],["▁εν",-11.076010704040527],["ū",-11.076022148132324],["ٹ",-11.07607364654541],["ຍ",-11.076290130615234],["esti",-11.07629680633545],["식",-11.076528549194336],["▁2016.",-11.076568603515623],["▁antes",-11.076725959777832],["ete",-11.076786041259766],["▁stra",-11.076987266540527],["▁үчүн",-11.07707977294922],["▁день",-11.077106475830078],["pp",-11.077187538146973],["▁ка",-11.07736587524414],["യെ",-11.077455520629885],["bro",-11.077474594116213],["▁14.",-11.07749843597412],["IT",-11.077704429626465],["▁lượng",-11.077798843383787],["▁నా",-11.078001976013184],["的是",-11.078057289123535],["木",-11.078154563903809],["▁在",-11.078317642211914],["के",-11.078474044799805],["hus",-11.078596115112305],["ங்கள்",-11.078624725341797],["พ",-11.078889846801758],["ત",-11.078929901123049],["iden",-11.078986167907717],["zar",-11.079017639160156],["က်",-11.079102516174316],["▁waa",-11.079171180725098],["▁এই",-11.07920265197754],["lie",-11.079224586486816],["▁دیا",-11.079267501831056],["▁Gar",-11.079302787780762],["▁க",-11.079334259033203],["dagi",-11.079338073730469],["▁disa",-11.079351425170898],["ካ",-11.079657554626465],["▁కు",-11.079764366149902],["하",-11.07976531982422],["▁>>",-11.0801362991333],["▁ideal",-11.080222129821776],["ු",-11.080342292785645],["▁නිසා",-11.080371856689451],["▁pää",-11.080405235290527],["km",-11.08041763305664],["nic",-11.080442428588867],["eb",-11.080445289611816],["▁thai",-11.08083152770996],["▁DA",-11.080931663513184],["▁City",-11.080999374389648],["umu",-11.081077575683594],["▁مختلف",-11.081079483032228],["▁бойынша",-11.081195831298828],["تى",-11.081600189208984],["ವಾಗಿ",-11.081974983215332],["eket",-11.08202075958252],["يد",-11.082690238952637],["▁einfach",-11.082711219787598],["نه",-11.082731246948242],["▁también",-11.08301830291748],["прав",-11.083158493041992],["▁tutto",-11.083184242248535],["ń",-11.08359146118164],["▁غير",-11.08374309539795],["▁ар",-11.083778381347656],["▁Previous",-11.08387279510498],["ps",-11.083943367004396],["تی",-11.084142684936523],["▁नै",-11.084181785583496],["▁ήταν",-11.084187507629396],["inės",-11.084375381469728],["▁ד",-11.084376335144045],["▁две",-11.084490776062012],["▁kul",-11.08471393585205],["non",-11.08489990234375],["▁serie",-11.085287094116213],["Ne",-11.085351943969728],["▁gel",-11.085490226745604],["▁बाद",-11.085843086242676],["▁Radio",-11.085862159729004],["さ",-11.086222648620604],["▁Sz",-11.08625602722168],["ян",-11.086542129516602],["ిన",-11.086746215820312],["▁aquest",-11.087111473083496],["两",-11.087159156799316],["以上",-11.087206840515137],["▁නෑ",-11.0872220993042],["▁nich",-11.087481498718262],["тын",-11.087535858154297],["cion",-11.087870597839355],["tok",-11.088004112243652],["▁الى",-11.088008880615234],["▁timp",-11.088054656982422],["石",-11.088139533996582],["▁시",-11.088151931762695],["▁toate",-11.088513374328612],["bis",-11.088557243347168],["inių",-11.088702201843262],["▁pl",-11.088854789733888],["nh",-11.088936805725098],["▁ketika",-11.088985443115234],["▁volgende",-11.089091300964355],["32",-11.089278221130373],["▁р",-11.08940315246582],["▁väl",-11.08950138092041],["09",-11.089569091796877],["▁stato",-11.08969783782959],["ました",-11.08993148803711],["تها",-11.089950561523438],["ными",-11.090011596679688],["ds",-11.09005069732666],["▁፡",-11.090102195739746],["▁Martin",-11.090144157409668],["pra",-11.09038543701172],["اق",-11.090489387512209],["्स",-11.090641975402832],["▁frem",-11.090702056884766],["ingen",-11.09072971343994],["▁במ",-11.090859413146973],["客",-11.09095573425293],["▁41",-11.090968132019045],["ნა",-11.090970993041992],["▁produk",-11.091064453125],["只是",-11.091193199157717],["▁dieser",-11.091313362121582],["▁AS",-11.091382026672363],["玩",-11.091425895690918],["▁än",-11.09157657623291],["▁ден",-11.091670989990234],["▁który",-11.091734886169434],["wer",-11.091752052307127],["sat",-11.0923433303833],["யா",-11.092388153076172],["▁hadde",-11.092476844787598],["mb",-11.092516899108888],["py",-11.092674255371094],["▁bad",-11.092737197875977],["Ha",-11.09286403656006],["ving",-11.092873573303224],["amento",-11.093069076538086],["kala",-11.093186378479004],["▁Bre",-11.09321403503418],["များ",-11.093669891357422],["▁دور",-11.093907356262209],["▁рад",-11.093908309936523],["▁ktorý",-11.094173431396484],["gor",-11.09432315826416],["▁Tan",-11.09447193145752],["▁!!",-11.094684600830078],["▁chất",-11.094696044921877],["дың",-11.094735145568848],["ப",-11.094837188720703],["▁cal",-11.095196723937988],["lom",-11.09539031982422],["▁Sta",-11.095436096191406],["ලි",-11.095438957214355],["▁اب",-11.095662117004396],["▁ווי",-11.095704078674316],["nov",-11.09604549407959],["▁ore",-11.096363067626951],["方式",-11.096518516540527],["▁સાથે",-11.096550941467283],["▁pin",-11.096713066101074],["▁신",-11.096720695495604],["ból",-11.09684944152832],["音",-11.096932411193848],["▁Gra",-11.097009658813477],["台灣",-11.097047805786133],["▁quá",-11.09720230102539],["лся",-11.097223281860352],["▁idea",-11.097305297851562],["▁19.",-11.097424507141112],["▁seks",-11.09752368927002],["▁ທ່ານ",-11.097747802734377],["ου",-11.098249435424805],["▁Maar",-11.098390579223633],["▁ras",-11.098398208618164],["ห",-11.098873138427734],["▁través",-11.098920822143556],["▁cosa",-11.099102020263672],["DA",-11.099527359008787],["ниот",-11.099621772766112],["ಡಿ",-11.099783897399902],["▁dobro",-11.09978485107422],["▁muốn",-11.09986686706543],["▁dus",-11.099932670593262],["▁shi",-11.100035667419434],["主要",-11.100056648254396],["сон",-11.100116729736328],["차",-11.100345611572266],["sun",-11.100351333618164],["ใช้",-11.100397109985352],["ቃ",-11.100433349609377],["၏",-11.10048484802246],["う",-11.100634574890137],["▁කරන්න",-11.100713729858398],["ého",-11.100760459899902],["▁пера",-11.100794792175291],["▁questa",-11.101149559020996],["▁Land",-11.101383209228516],["▁रहे",-11.101487159729004],["▁жа",-11.101752281188965],["▁mempunyai",-11.101768493652344],["▁One",-11.101881980895996],["list",-11.10201930999756],["rus",-11.10206699371338],["▁جدید",-11.102438926696776],["▁Sal",-11.10283088684082],["லி",-11.10292625427246],["▁bằng",-11.102960586547852],["skap",-11.103005409240724],["itet",-11.103398323059082],["目",-11.103445053100586],["▁일",-11.103545188903809],["▁hade",-11.103553771972656],["▁quả",-11.103934288024902],["тра",-11.104043960571287],["ائی",-11.10414981842041],["भ",-11.104260444641112],["kah",-11.10431957244873],["ري",-11.104406356811523],["▁бъде",-11.104504585266112],["డ్",-11.104676246643066],["isk",-11.104742050170898],["交",-11.104822158813477],["ško",-11.105289459228516],["vě",-11.105297088623049],["▁;)",-11.105352401733398],["▁Wat",-11.105525970458984],["rsi",-11.10554027557373],["▁dunia",-11.105912208557127],["Á",-11.105990409851074],["▁ସେ",-11.106017112731934],["▁utan",-11.10618782043457],["жи",-11.10633373260498],["ತ",-11.106359481811523],["ien",-11.106600761413574],["երի",-11.10665225982666],["หน้า",-11.106739044189451],["گ",-11.106781005859377],["ಂ",-11.1068115234375],["▁volta",-11.106841087341309],["▁제",-11.106950759887695],["ыя",-11.107107162475586],["▁year",-11.107293128967283],["▁akkor",-11.107317924499512],["▁cuando",-11.107415199279783],["▁ár",-11.107577323913574],["▁්",-11.107579231262209],["制",-11.107789993286133],["▁الن",-11.108022689819336],["Vi",-11.108366012573242],["▁Sin",-11.10840129852295],["▁არა",-11.108469009399414],["▁थियो",-11.108552932739258],["ilo",-11.108564376831056],["eke",-11.108579635620115],["▁conta",-11.108635902404783],["lad",-11.108654022216797],["▁κατά",-11.108656883239746],["直接",-11.10867404937744],["▁Of",-11.108749389648438],["ence",-11.109137535095217],["▁იყო",-11.109156608581545],["▁Ó",-11.109164237976074],["τι",-11.10936164855957],["apa",-11.109447479248049],["▁قال",-11.109624862670898],["▁two",-11.109671592712402],["▁massa",-11.109777450561523],["其中",-11.109896659851074],["▁още",-11.109899520874023],["çi",-11.10991668701172],["▁Ay",-11.110081672668455],[":00",-11.11011028289795],["收",-11.11042022705078],["▁sine",-11.110610961914062],["ary",-11.110703468322754],["ības",-11.110721588134766],["▁because",-11.110764503479004],["▁থেকে",-11.111075401306152],["stro",-11.111209869384766],["რ",-11.11122226715088],["▁cer",-11.11148452758789],["É",-11.111658096313477],["▁eftir",-11.112060546875],["ture",-11.112164497375488],["pul",-11.11219882965088],["▁Como",-11.11228084564209],["▁paar",-11.112361907958984],["▁mail",-11.112391471862791],["▁(0)",-11.112419128417969],["▁ago",-11.112483978271484],["ord",-11.112574577331545],["oc",-11.112852096557615],["▁Tor",-11.112903594970703],["წ",-11.112982749938965],["▁האט",-11.113100051879885],["落",-11.113110542297363],["▁chủ",-11.113332748413086],["▁מת",-11.113408088684082],["eze",-11.113574028015137],["lere",-11.113682746887209],["▁World",-11.113794326782228],["▁წლის",-11.113844871520996],["ეს",-11.114012718200684],["ium",-11.114068031311035],["香港",-11.114081382751465],["คุณ",-11.114089012145996],["ып",-11.114106178283691],["▁것",-11.114280700683594],["ike",-11.114402770996094],["▁agar",-11.114418029785156],["▁grad",-11.114668846130373],["وی",-11.11467170715332],["▁уз",-11.114702224731444],["▁дел",-11.114707946777344],["▁غیر",-11.114726066589355],["田",-11.11485195159912],["▁ක",-11.114867210388184],["▁Sol",-11.114937782287598],["却",-11.11500072479248],["ਟ",-11.115182876586914],["▁פאר",-11.115313529968262],["ດ",-11.115315437316896],["pak",-11.115418434143066],["▁ialah",-11.115840911865234],["▁다",-11.115857124328612],["▁halamang",-11.116312980651855],["▁ăn",-11.116551399230955],["我的",-11.116629600524902],["商品",-11.11673355102539],["ए",-11.116943359375],["NA",-11.116947174072266],["▁öz",-11.116991996765137],["▁información",-11.11700439453125],["▁අතර",-11.11703872680664],["▁كه",-11.11728858947754],["Me",-11.117308616638184],["▁وزیر",-11.117490768432615],["▁इ",-11.117531776428224],["▁हुने",-11.11758518218994],["sins",-11.117586135864258],["▁कारण",-11.117698669433594],["డి",-11.117897033691406],["SA",-11.118021965026855],["elt",-11.11805248260498],["▁소",-11.11816692352295],["ES",-11.11821460723877],["цца",-11.11845588684082],["▁سن",-11.11856746673584],["į",-11.118972778320312],["强",-11.119147300720217],["什么",-11.119155883789062],["▁game",-11.119410514831545],["▁look",-11.11941623687744],["тин",-11.119441032409668],["ใหม่",-11.119664192199709],["▁ст",-11.119699478149414],["世",-11.11977767944336],["▁tốt",-11.119803428649902],["ေတြ",-11.11996078491211],["관",-11.120051383972168],["▁kis",-11.120156288146973],["կ",-11.120320320129396],["ario",-11.1204252243042],["ቶ",-11.120561599731444],["▁Ç",-11.120930671691896],["▁edib",-11.121094703674316],["ön",-11.121214866638184],["深",-11.121280670166016],["▁rá",-11.121437072753906],["▁කළ",-11.121487617492676],["▁keine",-11.121644020080566],["▁Δ",-11.121786117553713],["යට",-11.122010231018066],["noj",-11.122035026550291],["ун",-11.122097969055176],["வி",-11.122236251831056],["об",-11.122268676757812],["▁εκ",-11.12245750427246],["▁þar",-11.12246036529541],["조",-11.122514724731444],["ř",-11.12271785736084],["▁මට",-11.122722625732422],["ute",-11.1227445602417],["▁waktu",-11.122838973999023],["▁tính",-11.122929573059082],["01",-11.12294101715088],["▁նաեւ",-11.122950553894045],["▁علي",-11.122964859008787],["mə",-11.123170852661133],["▁ՀՀ",-11.123220443725586],["所有",-11.12323760986328],["å",-11.123292922973633],["▁സ",-11.123384475708008],["▁ना",-11.123419761657717],["▁SE",-11.12342357635498],["▁වි",-11.123575210571287],["▁także",-11.123745918273926],["taka",-11.12387466430664],["አ",-11.123920440673828],["▁Tar",-11.124000549316406],["▁також",-11.124082565307615],["▁čas",-11.12424373626709],["ann",-11.124284744262695],["▁siya",-11.124335289001465],["▁David",-11.124510765075684],["Be",-11.124520301818848],["中國",-11.12470531463623],["便",-11.124709129333496],["kus",-11.124837875366213],["▁საქართველოს",-11.124956130981444],["▁بخش",-11.124971389770508],["ผู้",-11.125054359436035],["nit",-11.12521743774414],["hla",-11.125245094299316],["우",-11.12526512145996],["、「",-11.125332832336426],["▁did",-11.125336647033691],["场",-11.12562370300293],["با",-11.125853538513184],["▁~",-11.126018524169922],["oso",-11.126092910766602],["▁great",-11.126120567321776],["▁consectetur",-11.126230239868164],["▁tyre",-11.126365661621094],["oval",-11.126445770263672],["布",-11.12653636932373],["だ",-11.126554489135742],["▁کرتے",-11.12663459777832],["▁2001",-11.126749038696287],["ира",-11.127453804016112],["▁(6)",-11.127503395080566],["ota",-11.127538681030272],["▁انجام",-11.127630233764648],["мер",-11.127677917480469],["gur",-11.127911567687988],["ell",-11.12794589996338],["▁hver",-11.12795066833496],["▁aut",-11.12798023223877],["▁lige",-11.127981185913086],["▁pháp",-11.128056526184082],["ਿਆ",-11.128081321716309],["▁fakt",-11.128090858459473],["taa",-11.128230094909668],["▁대한",-11.128236770629885],["▁están",-11.12850284576416],["项目",-11.128582954406738],["ಸಿ",-11.128698348999023],["社",-11.128808975219728],["▁dintre",-11.128883361816406],["ať",-11.128884315490724],["▁vain",-11.128960609436035],["▁аз",-11.129018783569336],["hez",-11.129433631896973],["のは",-11.129456520080566],["ier",-11.129545211791992],["▁حقوق",-11.129664421081545],["▁fall",-11.1296968460083],["▁bur",-11.13001823425293],["களை",-11.13002586364746],["gin",-11.130034446716309],["mba",-11.130046844482422],["▁kerana",-11.13005542755127],["▁կարող",-11.130109786987305],["每",-11.130167007446287],["ئي",-11.13021469116211],["▁людей",-11.13040256500244],["cal",-11.130585670471191],["yar",-11.130643844604492],["ୁ",-11.13068389892578],["孩子",-11.130769729614258],["▁nama",-11.130799293518066],["분",-11.130892753601074],["ony",-11.130900382995604],["▁خلاف",-11.131010055541992],["▁reg",-11.131176948547363],["imit",-11.131391525268556],["rek",-11.131433486938477],["야",-11.131460189819336],["ጠ",-11.131539344787598],["▁thường",-11.131616592407228],["▁այն",-11.131709098815918],["▁hiệu",-11.131970405578612],["ғы",-11.132039070129396],["▁خو",-11.132247924804688],["▁bija",-11.13234519958496],["hol",-11.132452011108398],["▁isə",-11.132468223571776],["▁Amerika",-11.132767677307127],["▁16.",-11.132818222045898],["kker",-11.132827758789062],["▁isso",-11.133108139038086],["ਦ",-11.133116722106934],["ପ",-11.133164405822754],["▁fem",-11.133186340332031],["zbekiston",-11.133195877075195],["常",-11.133200645446776],["▁pēc",-11.133233070373535],["油",-11.13323974609375],["▁PDF",-11.133338928222656],["▁игра",-11.134096145629885],["▁سا",-11.134254455566406],["dus",-11.134271621704102],["▁its",-11.134297370910645],["▁vera",-11.134565353393556],["hel",-11.134947776794434],["▁افراد",-11.13524055480957],["▁live",-11.135255813598633],["▁comment",-11.135400772094728],["▁muss",-11.135419845581056],["▁kde",-11.135443687438965],["▁Ар",-11.135671615600586],["▁See",-11.135839462280272],["75",-11.136157989501951],["uda",-11.136357307434082],["ούν",-11.136360168457031],["ಯನ್ನು",-11.13652229309082],["▁prav",-11.136602401733398],["лись",-11.136770248413086],["▁във",-11.13697624206543],["ила",-11.136991500854492],["完成",-11.137152671813965],["▁život",-11.137236595153809],["ുക",-11.137287139892578],["▁xuất",-11.137375831604004],["▁apo",-11.137414932250977],["▁ڪرڻ",-11.13767433166504],["दा",-11.137770652770996],["▁안",-11.138062477111816],["sion",-11.138267517089844],["ಳ",-11.138286590576172],["▁Се",-11.138306617736816],["isten",-11.1383638381958],["ნ",-11.138436317443848],["ത്തെ",-11.13854694366455],["对于",-11.138625144958496],["▁(3",-11.13865089416504],["▁life",-11.138665199279783],["кан",-11.13884449005127],["房",-11.13903522491455],["▁Zu",-11.139092445373535],["jat",-11.139105796813965],["lıq",-11.13912868499756],["ró",-11.139245986938477],["ಣ",-11.139280319213867],["Ц",-11.139416694641112],["你的",-11.13943576812744],["тың",-11.13955307006836],["หรือ",-11.139878273010254],["▁toda",-11.13998317718506],["▁ЗА",-11.140301704406738],["टी",-11.140399932861328],["▁può",-11.140427589416504],["▁Latvijas",-11.14055061340332],["వా",-11.140830039978027],["лык",-11.140918731689451],["▁melakukan",-11.141191482543944],["έ",-11.141472816467283],["ප",-11.141542434692385],["▁ligger",-11.14165210723877],["gir",-11.141709327697754],["▁Ir",-11.141765594482422],["▁ਵੀ",-11.141864776611328],["kul",-11.141912460327148],["▁điểm",-11.141939163208008],["kaan",-11.141940116882324],["jā",-11.14194679260254],["▁समय",-11.142024993896484],["ടി",-11.142105102539062],["Р",-11.142107963562012],["ίας",-11.142236709594728],["kol",-11.1422700881958],["▁ਕਰ",-11.142316818237305],["ování",-11.142474174499512],["▁др",-11.142508506774902],["ங்க",-11.142576217651367],["tag",-11.14264965057373],["ვი",-11.14323616027832],["ต่อ",-11.143346786499023],["▁trí",-11.143370628356934],["▁prije",-11.143380165100098],["▁độ",-11.143387794494627],["nji",-11.14345932006836],["유",-11.143487930297852],["▁нужно",-11.143527030944824],["नि",-11.143577575683594],["▁šo",-11.143625259399414],["mmer",-11.143919944763184],["ových",-11.14393138885498],["▁Yo",-11.144046783447266],["ობა",-11.14426040649414],["în",-11.14462661743164],["▁२",-11.144767761230469],["sing",-11.145059585571287],["test",-11.145101547241213],["▁Gu",-11.145132064819336],["▁cảm",-11.14526653289795],["▁big",-11.145304679870604],["igh",-11.145437240600586],["代表",-11.1456937789917],["▁ज",-11.145758628845217],["▁რომელიც",-11.145822525024414],["▁перед",-11.145981788635254],["به",-11.14617919921875],["esc",-11.146238327026367],["osti",-11.146319389343262],["开始",-11.146404266357422],["gun",-11.146485328674316],["▁personas",-11.14654541015625],["ար",-11.14678192138672],["ጣ",-11.146835327148438],["ણ",-11.14700412750244],["▁úr",-11.147252082824709],["▁mé",-11.147309303283691],["ده",-11.147351264953612],["నా",-11.147665977478027],["▁ใน",-11.14772129058838],["nings",-11.147771835327148],["今年",-11.148088455200195],["Ja",-11.148265838623049],["ፈ",-11.148451805114746],["▁hoặc",-11.148486137390137],["port",-11.148542404174805],["▁لو",-11.14861011505127],["لى",-11.14869213104248],["▁vê",-11.148734092712402],["理",-11.148749351501465],["▁Sau",-11.14884090423584],["적인",-11.148920059204102],["酒店",-11.148996353149414],["unda",-11.149027824401855],["ase",-11.149229049682615],["inə",-11.14955234527588],["▁ਕੇ",-11.149561882019045],["ök",-11.150212287902832],["選",-11.15024185180664],["▁nhiên",-11.150251388549805],["ór",-11.15047550201416],["▁tiếp",-11.150784492492676],["▁mål",-11.150954246520996],["▁unui",-11.151045799255373],["▁loc",-11.151077270507812],["▁qual",-11.151195526123049],["▁teen",-11.151391983032228],["▁Ս",-11.151411056518556],["活动",-11.151422500610352],["件",-11.15158462524414],["▁plat",-11.151589393615724],["sp",-11.151771545410156],["▁waxa",-11.15185546875],["alt",-11.152057647705078],["ර්",-11.152073860168455],["▁فقط",-11.15211009979248],["vali",-11.152191162109377],["ထား",-11.152362823486328],["ଙ୍କୁ",-11.152427673339844],["ிய",-11.152689933776855],["▁tiu",-11.152783393859863],["jí",-11.152856826782228],["ង",-11.15294361114502],["jä",-11.15299129486084],["▁mask",-11.153017044067385],["▁kus",-11.15318775177002],["ము",-11.153409957885742],["قا",-11.15345573425293],["ено",-11.153518676757812],["▁download",-11.153566360473633],["ງ",-11.153692245483398],["▁setiap",-11.153742790222168],["යා",-11.153751373291016],["und",-11.153879165649414],["лась",-11.153902053833008],["▁ради",-11.154085159301758],["▁olduğunu",-11.154091835021973],["ไทย",-11.154094696044922],["̣",-11.154151916503906],["інің",-11.154229164123535],["▁oldu",-11.15453815460205],["▁מו",-11.154563903808594],["ee",-11.15463924407959],["uj",-11.154805183410645],["管",-11.15496063232422],["▁energi",-11.155040740966797],["ବା",-11.155080795288086],["про",-11.155123710632324],["▁quanto",-11.155145645141602],["재",-11.155180931091309],["▁svo",-11.15522575378418],["城市",-11.155355453491213],["00",-11.15546703338623],["miş",-11.155476570129396],["总",-11.155598640441896],["žu",-11.155736923217772],["▁Монгол",-11.15577507019043],["ум",-11.1559419631958],["学校",-11.156099319458008],["す",-11.156171798706056],["hle",-11.156271934509276],["▁այդ",-11.156288146972656],["ലി",-11.156466484069824],["▁چند",-11.156548500061035],["пе",-11.156939506530762],["▁thiết",-11.157011032104492],["▁정",-11.157060623168944],["没",-11.157149314880373],["வை",-11.157337188720703],["▁మ",-11.157721519470217],["▁tới",-11.157732963562012],["▁then",-11.15800666809082],["▁Lorem",-11.15815544128418],["zza",-11.15818691253662],["▁April",-11.158228874206545],["kii",-11.158380508422852],["▁tetapi",-11.15842056274414],["enda",-11.15848445892334],["လာ",-11.15886688232422],["▁17.",-11.158992767333984],["put",-11.15906047821045],["▁Afrika",-11.159077644348145],["fr",-11.159342765808104],["ጥ",-11.15937328338623],["了解",-11.159383773803713],["▁US",-11.159425735473633],["ட",-11.15959358215332],["방",-11.15982723236084],["▁20.",-11.159945487976074],["▁mind",-11.160090446472168],["дзе",-11.160194396972656],["shme",-11.160600662231444],["▁اليوم",-11.160711288452148],["dre",-11.16073226928711],["▁비",-11.160754203796388],["ray",-11.160958290100098],["▁همه",-11.161046981811523],["중",-11.16127872467041],["gmail",-11.161495208740234],["ೇ",-11.161523818969728],["ndan",-11.16153049468994],["▁marca",-11.161917686462402],["▁hệ",-11.161992073059082],["李",-11.16201114654541],["ech",-11.162243843078612],["sur",-11.162323951721191],["▁after",-11.162342071533203],["▁seva",-11.162631034851074],["mata",-11.162908554077148],["▁തന്നെ",-11.162912368774414],["▁امام",-11.162945747375488],["▁right",-11.162976264953612],["▁hva",-11.162981033325195],["од",-11.16310977935791],["ସ",-11.163405418395996],["slag",-11.163424491882324],["vara",-11.16342830657959],["ப்ப",-11.163658142089844],["▁ml",-11.163681030273438],["ド",-11.163726806640623],["ský",-11.1637601852417],["ör",-11.163823127746582],["bla",-11.16387939453125],["▁zna",-11.163991928100586],["bli",-11.164156913757324],["▁کوئی",-11.164298057556152],["▁news",-11.16443157196045],["▁biz",-11.164618492126465],["ння",-11.16462230682373],["▁huo",-11.16480541229248],["▁već",-11.16481876373291],["▁کتاب",-11.16484832763672],["▁maak",-11.16486930847168],["uti",-11.164898872375488],["▁treba",-11.164956092834473],["uoti",-11.164987564086914],["стра",-11.165143013000488],["gre",-11.165204048156738],["สําหรับ",-11.165255546569824],["ult",-11.165349006652832],["ður",-11.165446281433104],["ī",-11.165472030639648],["▁بۇ",-11.165698051452637],["▁Cum",-11.165727615356444],["▁ogni",-11.165810585021973],["▁Social",-11.165918350219728],["uit",-11.165921211242676],["iad",-11.165924072265623],["▁Pred",-11.166114807128906],["ID",-11.166281700134276],["▁داد",-11.166301727294922],["▁боюнча",-11.166335105895996],["风",-11.166418075561523],["能够",-11.166855812072754],["fri",-11.166951179504396],["თან",-11.167214393615724],["全球",-11.167272567749023],["ance",-11.167288780212402],["▁bro",-11.167312622070312],["▁album",-11.1674222946167],["tari",-11.167513847351074],["▁ski",-11.167569160461426],["▁아",-11.167718887329102],["ტი",-11.167768478393556],["▁również",-11.167779922485352],["ques",-11.16786289215088],["তা",-11.167865753173828],["tay",-11.167906761169434],["方面",-11.167928695678713],["cem",-11.168242454528809],["▁jedno",-11.16824436187744],["wikipedia",-11.16833209991455],["▁notre",-11.16848087310791],["▁büyük",-11.168583869934082],["при",-11.168745040893556],["▁रहा",-11.168841361999512],["▁trị",-11.168997764587402],["▁ક",-11.169000625610352],["▁team",-11.169198036193848],["▁который",-11.169224739074709],["news",-11.16929531097412],["മാണ്",-11.169317245483398],["вар",-11.169594764709473],["bili",-11.169610023498535],["دي",-11.16982650756836],["lov",-11.169866561889648],["▁למ",-11.170032501220703],["û",-11.170069694519045],["▁করা",-11.170123100280762],["कार",-11.170212745666504],["...”",-11.170281410217283],["▁ting",-11.17031478881836],["انه",-11.170358657836914],["rk",-11.170422554016112],["್",-11.170477867126465],["Po",-11.170493125915527],["▁кто",-11.17057991027832],["히",-11.170845985412598],["▁בה",-11.171431541442873],["хи",-11.17175579071045],["只有",-11.171875953674316],["▁PM",-11.172161102294922],["▁mem",-11.172179222106934],["自然",-11.172255516052246],["פ",-11.172301292419434],["▁manera",-11.172324180603027],["vir",-11.172377586364746],["ост",-11.172410011291504],["▁maken",-11.172562599182127],["▁زندگی",-11.17284870147705],["eti",-11.17304515838623],["任",-11.173209190368652],["లా",-11.173218727111816],["▁ans",-11.17326831817627],["▁chức",-11.173285484313965],["্",-11.17336654663086],["▁שי",-11.173370361328123],["까지",-11.173483848571776],["ଲା",-11.173506736755373],["▁இ",-11.173550605773926],["▁tạo",-11.173590660095217],["▁49",-11.17372989654541],["szt",-11.173843383789062],["ków",-11.173872947692873],["▁52",-11.173900604248049],["▁Τα",-11.174087524414062],["▁lokal",-11.174097061157228],["▁Sha",-11.17416286468506],["▁case",-11.174217224121094],["▁deve",-11.174271583557127],["▁00",-11.174681663513184],["▁made",-11.174697875976562],["つ",-11.17473602294922],["isha",-11.174749374389648],["▁तक",-11.175025939941406],["aha",-11.175068855285645],["▁موجود",-11.175138473510742],["字",-11.175251007080078],["Bu",-11.175315856933594],["°",-11.17544937133789],["ml",-11.17568588256836],["▁Kontakt",-11.175793647766112],["ría",-11.175801277160645],["ગ",-11.17587184906006],["är",-11.175899505615234],["百",-11.176006317138672],["▁נישט",-11.176102638244627],["▁Microsoft",-11.176298141479492],["▁21.",-11.176431655883787],["ист",-11.17658805847168],["▁deres",-11.176668167114258],["wyr",-11.176674842834473],["ിന്റെ",-11.176772117614746],["ind",-11.17682933807373],["An",-11.176885604858398],["学生",-11.176913261413574],["▁antara",-11.176941871643066],["▁ან",-11.177000045776367],["▁fazer",-11.177041053771973],["ေရး",-11.177091598510742],["▁ის",-11.17740249633789],["ाने",-11.177574157714844],["話",-11.178021430969238],["fan",-11.178155899047852],["ाच्या",-11.178159713745115],["▁folk",-11.178167343139648],["ral",-11.178455352783203],["▁عنوان",-11.178592681884766],["▁מיט",-11.178829193115234],["一次",-11.179034233093262],["古",-11.17910385131836],["encia",-11.179120063781738],["▁sống",-11.17920207977295],["▁Paris",-11.179234504699709],["▁bun",-11.179309844970703],["学习",-11.179354667663574],["???",-11.179428100585938],["rne",-11.179529190063477],["▁Vis",-11.179590225219728],["イ",-11.179713249206545],["▁বি",-11.1798734664917],["▁causa",-11.180106163024902],["▁stop",-11.180235862731934],["▁ጋር",-11.18032455444336],["ത്തിന്റെ",-11.180468559265137],["یر",-11.181215286254885],["▁жыл",-11.181349754333496],["▁edir",-11.18142318725586],["OS",-11.181528091430664],["무",-11.181636810302734],["ուն",-11.1816987991333],["▁taip",-11.181966781616213],["▁:-)",-11.182005882263184],["pla",-11.182144165039062],["▁Kat",-11.182194709777832],["▁സി",-11.182535171508787],["آ",-11.182541847229004],["डी",-11.182849884033203],["▁työ",-11.182860374450684],["bol",-11.183050155639648],["『",-11.18311882019043],["▁elle",-11.183452606201172],["zin",-11.183504104614258],["يون",-11.183541297912598],["ລາວ",-11.183589935302734],["ít",-11.18360996246338],["▁هغه",-11.184344291687012],["▁mos",-11.18450164794922],["▁ਸੀ",-11.18474578857422],["▁4-",-11.184758186340332],["▁direkt",-11.184775352478027],["ਆਂ",-11.18478012084961],["І",-11.184786796569824],["али",-11.184831619262695],["▁original",-11.184853553771973],["▁لي",-11.18496322631836],["ára",-11.185076713562012],["▁والے",-11.18512725830078],["▁він",-11.185208320617676],["▁uku",-11.18529987335205],["▁โดย",-11.18530559539795],["清",-11.185359954833984],["info",-11.185430526733398],["黑",-11.185434341430664],["▁मन",-11.18575382232666],["ศ",-11.185798645019531],["▁loại",-11.185811042785645],["ню",-11.18581199645996],["▁Far",-11.185871124267578],["ać",-11.185911178588867],["വി",-11.185979843139648],["rem",-11.186327934265137],["▁больше",-11.186331748962402],["лет",-11.186372756958008],["▁Мо",-11.186447143554688],["pus",-11.186758041381836],["jte",-11.186915397644045],["▁liv",-11.187063217163086],["他的",-11.187063217163086],["เพื่อ",-11.187093734741213],["iza",-11.187209129333496],["▁EN",-11.187260627746582],["lerini",-11.187345504760742],["ppe",-11.187408447265623],["air",-11.187448501586914],["▁부",-11.187471389770508],["кова",-11.187572479248049],["ld",-11.187870025634766],["▁Bal",-11.188034057617188],["호",-11.188040733337402],["llä",-11.188055038452148],["▁baie",-11.188063621520996],["▁jen",-11.18813419342041],["ਹ",-11.188187599182127],["inta",-11.188265800476074],["hod",-11.188271522521973],["เลย",-11.188522338867188],["▁Ker",-11.188682556152344],["科技",-11.188773155212402],["▁mah",-11.1887788772583],["toj",-11.188929557800291],["ിൽ",-11.189057350158691],["▁cre",-11.189499855041504],["2014",-11.189535140991213],["買",-11.189894676208496],["▁Bur",-11.189956665039062],["或者",-11.190128326416016],["▁tussen",-11.19029426574707],["тор",-11.19032382965088],["gia",-11.190340042114258],["星",-11.190496444702148],["任何",-11.190644264221191],["▁ער",-11.190664291381836],["▁Nord",-11.190840721130373],["▁ప",-11.190960884094238],["ischen",-11.191010475158691],["esi",-11.19109344482422],["▁Sar",-11.191097259521484],["▁ე",-11.191105842590332],["▁vị",-11.19127082824707],["▁ఇ",-11.191425323486328],["▁tài",-11.191818237304688],["Ş",-11.192133903503418],["▁راه",-11.192306518554688],["▁↑",-11.192441940307615],["这种",-11.19246768951416],["▁gjøre",-11.192523002624512],["▁cam",-11.192607879638672],["▁giới",-11.192639350891112],["▁નથી",-11.192668914794922],["▁በመ",-11.192756652832031],["जी",-11.193021774291992],["▁sampai",-11.193085670471191],["▁jednak",-11.193097114562988],["▁én",-11.193181991577148],["▁paling",-11.193195343017578],["бек",-11.193353652954102],["ој",-11.193660736083984],["▁át",-11.193660736083984],["ży",-11.193670272827148],["▁Oslo",-11.193857192993164],["▁pur",-11.194059371948242],["東",-11.194212913513184],["▁bliver",-11.194255828857422],["▁water",-11.194318771362305],["不要",-11.194469451904297],["bí",-11.194652557373049],["mad",-11.194684028625488],["ница",-11.194764137268066],["րդ",-11.19482135772705],["र्",-11.194860458374023],["作为",-11.194926261901855],["部分",-11.194926261901855],["▁kada",-11.195040702819824],["▁اپنی",-11.19504737854004],["▁էլ",-11.195219039916992],["▁find",-11.195379257202148],["▁mano",-11.195561408996582],["▁σου",-11.195707321166992],["▁pia",-11.196128845214844],["▁bệnh",-11.196157455444336],["▁Jahr",-11.196253776550291],["▁आहेत",-11.196288108825684],["波",-11.19637680053711],["▁mee",-11.196611404418944],["▁Bor",-11.196704864501951],["ሪ",-11.196928977966309],["▁họ",-11.196985244750977],["▁نور",-11.19699478149414],["▁pop",-11.19700050354004],["▁pernah",-11.197097778320312],["ਪ",-11.197117805480955],["®",-11.197227478027344],["▁nivel",-11.197246551513672],["▁moja",-11.197269439697266],["ness",-11.197345733642578],["▁болон",-11.197396278381348],["▁اسلامی",-11.197540283203123],["▁(18",-11.197627067565918],["間",-11.197906494140623],["▁Gratis",-11.19796085357666],["bes",-11.197965621948242],["ақ",-11.198026657104492],["▁where",-11.198165893554688],["▁dute",-11.19818878173828],["▁pré",-11.198351860046388],["দ",-11.198631286621094],["tą",-11.198641777038574],["ként",-11.19875144958496],["▁szerint",-11.198781967163086],["动",-11.198820114135742],["ေန",-11.198885917663574],["▁mua",-11.19909954071045],["▁ZA",-11.199135780334473],["▁ishte",-11.199139595031738],["称",-11.199200630187988],["ân",-11.19932746887207],["▁пов",-11.199467658996582],["▁ترین",-11.199481964111328],["gna",-11.199604034423828],["မှာ",-11.199620246887209],["ról",-11.199678421020508],["▁Од",-11.19988250732422],["ey",-11.20029067993164],["今天",-11.200397491455078],["ungen",-11.200421333312988],["▁기",-11.200502395629885],["▁still",-11.200557708740234],["▁Mon",-11.200590133666992],["ո",-11.200591087341309],["▁оны",-11.200653076171877],["進",-11.200728416442873],["ઓ",-11.200922966003418],["owanie",-11.200960159301758],["▁أي",-11.20118522644043],["▁ڪيو",-11.201272010803224],["▁Bei",-11.201290130615234],["ото",-11.20131015777588],["प्र",-11.201376914978027],["▁સ",-11.201459884643556],["▁fun",-11.201482772827148],["ਸ਼",-11.20150375366211],["▁biri",-11.201528549194336],["មាន",-11.20153522491455],["▁marketing",-11.201557159423828],["▁گا",-11.201567649841309],["ໃນ",-11.201667785644531],["▁organ",-11.20169162750244],["款",-11.201709747314451],["ظ",-11.20173454284668],["▁Беларусі",-11.201801300048828],["平台",-11.201807975769045],["张",-11.201950073242188],["▁Pero",-11.202016830444336],["▁przed",-11.202309608459473],["Ã",-11.202619552612305],["▁tiempo",-11.202656745910645],["▁कुछ",-11.202874183654783],["ppa",-11.203167915344238],["Σ",-11.203181266784668],["▁tren",-11.203476905822754],["প",-11.203601837158203],["支持",-11.20368766784668],["▁több",-11.203739166259766],["▁בין",-11.2037935256958],["yang",-11.20385456085205],["፡",-11.20393180847168],["یل",-11.203944206237791],["ख",-11.203977584838867],["нага",-11.203998565673828],["ਰੀ",-11.204264640808104],["▁ר",-11.204300880432127],["ន",-11.204344749450684],["fel",-11.20439338684082],["ני",-11.204547882080078],["那么",-11.204776763916016],["फ",-11.20480728149414],["way",-11.20503044128418],["▁cat",-11.205422401428224],["▁mora",-11.205552101135254],["▁हजार",-11.205862998962402],["▁memang",-11.2058744430542],["▁može",-11.206430435180664],["ppi",-11.20644760131836],["巴",-11.206628799438477],["pos",-11.206706047058104],["▁chiar",-11.20678997039795],["▁dun",-11.20680046081543],["▁Sh",-11.20684814453125],["▁เป็น",-11.207042694091797],["▁horas",-11.207056999206545],["▁olyan",-11.207159996032717],["니",-11.207395553588867],["▁samme",-11.207415580749512],["▁mondo",-11.207422256469728],["lli",-11.207437515258787],["вали",-11.20759391784668],["”。",-11.20763111114502],["tore",-11.207711219787598],["▁göra",-11.207791328430176],["olo",-11.207874298095703],["▁vya",-11.207892417907717],["மை",-11.208088874816896],["▁oni",-11.208162307739258],["▁ពី",-11.208232879638672],["ਡ",-11.20845890045166],["▁dove",-11.208497047424316],["▁מי",-11.208516120910645],["一般",-11.208541870117188],["δ",-11.2086763381958],["▁sh",-11.20878791809082],["ခဲ့",-11.20901107788086],["ियों",-11.209035873413086],["▁Sun",-11.20921230316162],["larına",-11.209222793579102],["ตาม",-11.209239959716797],["▁съм",-11.209304809570312],["ការ",-11.20931339263916],["▁મ",-11.209404945373535],["vät",-11.209442138671877],["ės",-11.209643363952637],["爱",-11.210128784179688],["▁zich",-11.210192680358888],["sche",-11.210214614868164],["▁sera",-11.21023178100586],["▁Β",-11.210290908813477],["▁gor",-11.210314750671388],["dit",-11.210424423217772],["▁down",-11.210432052612305],["這個",-11.210451126098633],["mbo",-11.210493087768556],["bin",-11.210603713989258],["dr",-11.2106351852417],["nk",-11.210705757141112],["▁posto",-11.210744857788086],["сто",-11.210820198059082],["▁svoj",-11.210871696472168],["ати",-11.210952758789062],["▁yake",-11.211146354675291],["ოს",-11.211224555969238],["ről",-11.21124267578125],["▁حيث",-11.211292266845703],["▁tamén",-11.21129322052002],["▁hər",-11.211329460144045],["▁način",-11.211629867553713],["▁November",-11.21169662475586],["▁sei",-11.211837768554688],["ٽ",-11.212162971496582],["▁برنامه",-11.2122802734375],["▁kim",-11.212566375732422],["itu",-11.212705612182615],["мет",-11.21272087097168],["▁ස",-11.212950706481934],["▁đường",-11.21301555633545],["러",-11.213278770446776],["▁오",-11.213371276855469],["들이",-11.213590621948242],["▁fest",-11.213740348815918],["ურ",-11.213829040527344],["▁pár",-11.21394157409668],["▁toho",-11.214410781860352],["▁ona",-11.214545249938965],["▁þá",-11.214681625366213],["まで",-11.215073585510254],["▁Auf",-11.215106964111328],["τά",-11.215133666992188],["mel",-11.215174674987791],["οί",-11.2152681350708],["mia",-11.215299606323242],["反",-11.215466499328612],["▁800",-11.21555519104004],["甚至",-11.21557331085205],["▁bütün",-11.215662002563477],["▁IV",-11.215713500976562],["ץ",-11.215913772583008],["cre",-11.216065406799316],["кой",-11.21608829498291],["Ar",-11.216121673583984],["▁zá",-11.21616554260254],["ល",-11.216269493103027],["▁outros",-11.216330528259276],["▁якія",-11.21649932861328],["▁kontrol",-11.216524124145508],["▁fog",-11.21653175354004],["▁46",-11.216594696044922],["▁hom",-11.216597557067873],["El",-11.217084884643556],["▁بعض",-11.217681884765623],["▁!!!",-11.217742919921877],["▁však",-11.21779727935791],["နဲ႔",-11.217942237854004],["▁ingin",-11.217967987060549],["早",-11.218341827392578],["▁nay",-11.218451499938965],["▁тази",-11.218464851379396],["▁semana",-11.218749046325684],["▁Це",-11.218899726867676],["Ε",-11.218951225280762],["▁بات",-11.219501495361328],["tica",-11.219767570495604],["▁thức",-11.21983814239502],["▁были",-11.219849586486816],["▁show",-11.219915390014648],["ün",-11.219930648803713],["당",-11.219938278198242],["ତ",-11.219942092895508],["▁logo",-11.219996452331545],["▁Qui",-11.22007656097412],["▁dela",-11.220144271850586],["处",-11.220219612121582],["▁fet",-11.22053337097168],["熱",-11.220723152160645],["ود",-11.220839500427246],["▁Don",-11.220942497253418],["▁tsy",-11.221014022827148],["nga",-11.221024513244627],["▁կամ",-11.221113204956056],["より",-11.221161842346191],["stri",-11.221322059631348],["▁Mat",-11.221369743347168],["▁47",-11.221506118774414],["▁č",-11.221518516540527],["ška",-11.221622467041016],["ību",-11.221776008605955],["▁tahu",-11.22197151184082],["【",-11.222021102905272],["▁Program",-11.222142219543455],["ké",-11.222146034240724],["▁mini",-11.222253799438477],["▁selle",-11.222301483154297],["ா",-11.222434043884276],["chy",-11.222482681274414],["▁bên",-11.223058700561523],["氣",-11.223138809204102],["ssen",-11.223163604736328],["فر",-11.223166465759276],["...\"",-11.22317600250244],["▁Hu",-11.223332405090332],["▁fins",-11.22340965270996],["rna",-11.22342300415039],["▁üle",-11.22353744506836],["▁ganz",-11.223546028137209],["▁අපි",-11.223645210266112],["▁Fu",-11.223873138427734],["▁mene",-11.224081039428713],["יק",-11.22408676147461],["▁آنها",-11.22414207458496],["▁مردم",-11.224185943603516],["dores",-11.224221229553224],["▁terhadap",-11.22422695159912],["▁Club",-11.224302291870115],["非",-11.224302291870115],["zet",-11.22439670562744],["▁حتى",-11.224620819091797],["▁Μ",-11.224725723266602],["ບ",-11.22483253479004],["html",-11.224905967712402],["▁Italia",-11.225098609924316],["nat",-11.22529411315918],["tare",-11.225371360778809],["▁strani",-11.22541332244873],["▁kunt",-11.225550651550291],["▁senza",-11.22558307647705],["nci",-11.225610733032228],["hem",-11.22562026977539],["千",-11.22563362121582],["▁ως",-11.225693702697754],["▁str",-11.225762367248535],["▁mye",-11.225790977478027],["▁wij",-11.22592067718506],["prav",-11.225955963134766],["▁ку",-11.225975036621094],["04",-11.226028442382812],["wn",-11.226059913635254],["ീ",-11.22618007659912],["▁Khan",-11.226183891296388],["పై",-11.226551055908203],["▁Anna",-11.226585388183594],["▁turi",-11.226654052734377],["▁Dat",-11.226799011230469],["ese",-11.226916313171388],["دە",-11.227001190185549],["вен",-11.227067947387695],["▁Раз",-11.22717571258545],["▁interes",-11.227187156677246],["▁possible",-11.227343559265137],["▁କ",-11.227405548095703],["‬",-11.227506637573242],["▁אבל",-11.227744102478027],["開始",-11.22774600982666],["▁энэ",-11.227755546569824],["兩",-11.227828025817873],["▁مثل",-11.227831840515137],["▁going",-11.227846145629885],["çe",-11.227954864501951],["minen",-11.228039741516112],["▁grupo",-11.228196144104004],["ným",-11.228216171264648],["2013",-11.228348731994627],["яз",-11.228388786315918],["tif",-11.228694915771484],["ದು",-11.228715896606444],["300",-11.228793144226074],["ン",-11.228849411010742],["即",-11.229043960571287],["▁án",-11.229084014892578],["▁nap",-11.229187965393066],["▁18.",-11.229498863220217],["eur",-11.229660987854004],["▁זיך",-11.229788780212402],["టి",-11.229815483093262],["▁jsme",-11.22982120513916],["▁అని",-11.229825019836426],["▁libro",-11.229876518249512],["▁сите",-11.229928970336914],["esta",-11.229985237121582],["ða",-11.230103492736816],["▁ਅ",-11.230106353759766],["▁fue",-11.230133056640623],["▁cầu",-11.230158805847168],["eaza",-11.23024082183838],["מה",-11.230649948120115],["စ",-11.230717658996582],["產品",-11.23084831237793],["rom",-11.230905532836914],["▁Все",-11.231043815612791],["To",-11.23114013671875],["▁باز",-11.23145580291748],["ских",-11.23154354095459],["▁nome",-11.23158359527588],["▁aja",-11.231619834899902],["En",-11.231623649597168],["▁бас",-11.231767654418944],["声",-11.23198699951172],["▁പ്ര",-11.232192993164062],["vien",-11.232319831848145],["▁duhet",-11.232467651367188],["▁кра",-11.232499122619627],["▁Saya",-11.232739448547363],["▁ха",-11.23299789428711],["▁egen",-11.233179092407228],["▁رہے",-11.233309745788574],["alo",-11.233351707458496],["ණ",-11.233444213867188],["条",-11.23348617553711],["وب",-11.233539581298828],["TA",-11.233675956726074],["ん",-11.233720779418944],["ё",-11.233745574951172],["yle",-11.23378849029541],["NI",-11.233800888061523],["ულ",-11.234030723571776],["▁Santa",-11.234208106994627],["▁ๆ",-11.234244346618652],["mě",-11.234305381774902],["▁код",-11.2345552444458],["▁rus",-11.23458194732666],["がある",-11.234760284423828],["▁بم",-11.234829902648926],["▁Мы",-11.234871864318848],["werk",-11.234889030456545],["▁Mari",-11.235062599182127],["▁bili",-11.235363960266112],["州",-11.235408782958984],["▁vám",-11.235466003417969],["▁বা",-11.23556900024414],["god",-11.235710144042969],["pot",-11.235991477966309],["▁έχουν",-11.236310005187988],["▁Γ",-11.236326217651367],["ട്ട",-11.236440658569336],["▁fac",-11.236485481262209],["yla",-11.236543655395508],["ें",-11.236552238464355],["ній",-11.236722946166992],["пре",-11.23674488067627],["▁yêu",-11.236777305603027],["die",-11.237015724182127],["بل",-11.237064361572266],["she",-11.237079620361328],["rez",-11.237238883972168],["往",-11.237396240234377],["inė",-11.23751449584961],["體",-11.237688064575195],["▁deux",-11.237699508666992],["▁lor",-11.237933158874512],["▁π",-11.238012313842772],["▁iba",-11.238253593444824],["气",-11.238515853881836],["ている",-11.238635063171388],["ட்ட",-11.238691329956056],["▁mur",-11.23875331878662],["▁klub",-11.238778114318848],["▁gerne",-11.23887062072754],["ets",-11.239068031311035],["▁Stra",-11.239091873168944],["anie",-11.239389419555664],["fat",-11.2395658493042],["味",-11.23974609375],["ала",-11.239751815795898],["▁tant",-11.239861488342283],["▁היה",-11.239908218383787],["▁chun",-11.23993968963623],["cos",-11.239948272705078],["▁cand",-11.2400484085083],["AL",-11.240059852600098],["▁дела",-11.24009895324707],["ful",-11.240416526794434],["ดู",-11.24066162109375],["вал",-11.240692138671877],["切",-11.240911483764648],["▁Ч",-11.240997314453123],["공",-11.241158485412598],["▁سنڌ",-11.24131202697754],["nus",-11.241557121276855],["ικά",-11.241691589355469],["▁rum",-11.24170207977295],["▁új",-11.241756439208984],["ана",-11.241769790649414],["će",-11.241777420043944],["šta",-11.24192714691162],["▁oslo",-11.242173194885254],["ür",-11.242639541625977],["▁زمان",-11.242685317993164],["amos",-11.24273681640625],["▁ซึ่ง",-11.242786407470703],["▁sulla",-11.242822647094728],["vers",-11.242844581604004],["▁detta",-11.24297332763672],["▁wax",-11.243020057678224],["▁ди",-11.243233680725098],["▁Fra",-11.24326229095459],["▁doch",-11.24348258972168],["ils",-11.243552207946776],["skom",-11.243573188781738],["ዝ",-11.24372673034668],["▁doanh",-11.24372673034668],["ച്ച",-11.243732452392578],["ेर",-11.24376106262207],["▁mert",-11.243958473205566],["▁град",-11.244001388549805],["стро",-11.24403476715088],["ən",-11.244071006774902],["▁Mark",-11.244094848632812],["馬",-11.244107246398926],["▁ჩა",-11.24421215057373],["ligi",-11.244576454162598],["lia",-11.24457836151123],["▁keď",-11.244668960571287],["сть",-11.244806289672852],["બ",-11.244858741760254],["ску",-11.244918823242188],["ിച്ച",-11.245081901550291],["▁vielä",-11.24511432647705],["▁2015.",-11.245304107666016],["▁չի",-11.245309829711914],["▁principal",-11.245372772216797],["▁Հայաստանի",-11.24541187286377],["▁kara",-11.245424270629885],["▁Wenn",-11.245593070983888],["▁klo",-11.24571704864502],["ском",-11.24575901031494],["▁đại",-11.245843887329102],["▁место",-11.245965003967283],["들",-11.246031761169434],["▁tad",-11.246047973632812],["▁Els",-11.246066093444824],["▁случае",-11.24614143371582],["경",-11.246312141418455],["▁Menschen",-11.24637222290039],["▁شو",-11.24669361114502],["▁hora",-11.246694564819336],["▁tag",-11.24674129486084],["▁aktiv",-11.246774673461914],["▁64",-11.24686336517334],["rzy",-11.246973037719728],["血",-11.247174263000488],["▁ነው።",-11.24728298187256],["▁CD",-11.247801780700684],["פי",-11.24783420562744],["рі",-11.247918128967283],["技術",-11.24803066253662],["华",-11.248102188110352],["▁dur",-11.248215675354004],["sus",-11.248358726501465],["▁pad",-11.248366355895996],["모",-11.24844455718994],["ēt",-11.248453140258787],["ania",-11.248861312866213],["▁poco",-11.24888801574707],["▁مل",-11.249248504638672],["bri",-11.249308586120604],["ໄດ້",-11.249492645263672],["▁rasa",-11.249683380126951],["▁Ла",-11.249730110168455],["▁Това",-11.249914169311523],["▁chu",-11.249974250793455],["ont",-11.250136375427246],["▁många",-11.250176429748535],["▁каб",-11.250380516052246],["▁នេះ",-11.250476837158203],["▁ولا",-11.250812530517578],["▁pana",-11.250890731811523],["▁PS",-11.250924110412598],["▁hard",-11.25094985961914],["ର୍",-11.251177787780762],["▁hel",-11.251313209533691],["nder",-11.251404762268066],["atu",-11.251422882080078],["لار",-11.251494407653809],["▁ه",-11.251514434814451],["▁Peter",-11.251630783081056],["分析",-11.251679420471191],["▁Mis",-11.251996994018556],["ില്ല",-11.252013206481934],["inis",-11.252044677734377],["▁bilder",-11.252220153808594],["να",-11.25234031677246],["▁üzrə",-11.252504348754885],["연",-11.252520561218262],["छ",-11.252525329589844],["號",-11.252567291259766],["▁ew",-11.252677917480469],["わ",-11.252774238586426],["——",-11.252880096435549],["rel",-11.253046989440918],["▁ச",-11.25315761566162],["▁global",-11.25317096710205],["takse",-11.253180503845217],["rer",-11.253263473510742],["வு",-11.253463745117188],["▁anal",-11.253569602966309],["▁aasta",-11.253686904907228],["▁esa",-11.253735542297363],["▁než",-11.253753662109377],["לי",-11.253820419311523],["▁tháng",-11.253881454467772],["▁Hor",-11.254000663757324],["▁nisi",-11.254048347473145],["▁lớn",-11.25407600402832],["上的",-11.254136085510254],["awa",-11.254172325134276],["人的",-11.254570960998535],["▁klar",-11.254849433898926],["修",-11.254855155944824],["▁ጊዜ",-11.254915237426758],["▁lle",-11.254922866821287],["▁වූ",-11.254979133605955],["具有",-11.255035400390623],["▁text",-11.255061149597168],["▁اول",-11.25511646270752],["▁ශ්",-11.255413055419922],["▁آب",-11.255606651306152],["หา",-11.255629539489746],["▁điện",-11.25565242767334],["isht",-11.25607204437256],["▁ਕਰਨ",-11.256088256835938],["▁koko",-11.256135940551758],["են",-11.256305694580078],["▁dopo",-11.256319046020508],["ø",-11.2567777633667],["kos",-11.25691032409668],["..!",-11.256935119628906],["▁나",-11.257231712341309],["▁нэг",-11.257616996765137],["▁Fer",-11.257969856262209],["▁još",-11.257980346679688],["čí",-11.258065223693848],["▁страна",-11.258234977722168],["▁Soomaaliya",-11.258255958557127],["ция",-11.258322715759276],["မွ",-11.258376121520996],["▁עד",-11.25838851928711],["▁hot",-11.258668899536133],["▁dina",-11.258846282958984],["تے",-11.25886058807373],["▁sitä",-11.258885383605955],["▁mus",-11.258980751037598],["NE",-11.259003639221191],["하여",-11.259079933166504],["▁svi",-11.25917911529541],["▁Sur",-11.259183883666992],["▁Sp",-11.25928020477295],["ttu",-11.25929355621338],["▁IP",-11.259429931640623],["▁Ger",-11.259571075439451],["Ba",-11.2595853805542],["▁prezent",-11.2596435546875],["▁terra",-11.259716033935549],["▁menu",-11.259720802307127],["ร์",-11.25981330871582],["女性",-11.2598295211792],["▁होता",-11.259929656982422],["▁할",-11.26000690460205],["BA",-11.26014518737793],["ээр",-11.260170936584473],["گە",-11.260234832763672],["ມ",-11.26027011871338],["▁being",-11.26039695739746],["ፋ",-11.260717391967772],["進行",-11.260814666748049],["▁mewn",-11.261014938354492],["دو",-11.261098861694336],["சி",-11.261137008666992],["▁система",-11.26117992401123],["若",-11.26117992401123],["▁nang",-11.26121997833252],["▁закон",-11.261234283447266],["▁الس",-11.261395454406738],["▁bhfuil",-11.261581420898438],["Ü",-11.261977195739746],["에는",-11.262045860290527],["ində",-11.262181282043455],["ু",-11.262249946594238],["enza",-11.262293815612791],["▁سه",-11.262374877929688],["▁Pada",-11.262452125549316],["anda",-11.262532234191896],["▁August",-11.262747764587402],["他們",-11.262807846069336],["▁finans",-11.26308536529541],["▁mille",-11.263116836547852],["▁Kal",-11.263224601745604],["▁support",-11.26331901550293],["▁giúp",-11.263372421264648],["▁नाम",-11.263595581054688],["▁business",-11.263785362243652],["▁Ty",-11.264008522033691],["کی",-11.264288902282717],["NG",-11.264548301696776],["وس",-11.264551162719728],["!)",-11.264853477478027],["mą",-11.264893531799316],["▁Tanzania",-11.265162467956545],["▁چ",-11.265189170837402],["▁twee",-11.26524543762207],["▁едно",-11.265299797058104],["▁نیست",-11.265694618225098],["環境",-11.26572322845459],["今",-11.265789031982422],["ére",-11.26580810546875],["▁Ihnen",-11.265830039978027],["pper",-11.26584529876709],["ls",-11.265901565551758],["▁Tin",-11.265976905822754],["▁stre",-11.266047477722168],["▁ግን",-11.266179084777832],["▁όπως",-11.26621150970459],["▁بىلەن",-11.266352653503418],["▁khu",-11.266427993774414],["dzie",-11.266441345214844],["▁masuk",-11.266477584838867],["▁gy",-11.266613006591797],["lta",-11.266671180725098],["vos",-11.266754150390623],["Ի",-11.26693630218506],["▁pet",-11.266948699951172],["▁tanpa",-11.26707935333252],["▁проблем",-11.267107963562012],["▁q",-11.26711654663086],["夜",-11.267159461975098],["▁який",-11.267226219177246],["▁এবং",-11.267547607421877],["▁က",-11.267664909362791],["▁ਨਹੀਂ",-11.267748832702637],["▁ყველა",-11.267809867858888],["kim",-11.267946243286133],["▁gol",-11.267962455749512],["内容",-11.26803970336914],["tzea",-11.268102645874023],["更多",-11.26813793182373],["▁before",-11.268195152282717],["▁sobie",-11.268231391906738],["▁must",-11.268290519714355],["▁pil",-11.268394470214844],["失",-11.268471717834473],["Ч",-11.26852321624756],["▁mejor",-11.268566131591797],["▁Ved",-11.268589973449709],["เ",-11.268712043762209],["▁Esperanto",-11.268954277038574],["▁всего",-11.268960952758787],["越",-11.269206047058104],["▁வி",-11.269208908081056],["▁presente",-11.269290924072266],["▁бити",-11.26949691772461],["▁vẫn",-11.26974391937256],["▁göre",-11.269803047180176],["現在",-11.269804000854492],["ണം",-11.269831657409668],["▁Commons",-11.269906997680664],["лення",-11.270023345947266],["▁Festival",-11.27012825012207],["▁kế",-11.270303726196287],["▁tel",-11.2703275680542],["▁mia",-11.270362854003906],["iri",-11.270973205566406],["मध्ये",-11.27106475830078],["რი",-11.271306991577148],["▁گزارش",-11.271425247192385],["▁mali",-11.271443367004396],["品牌",-11.27181911468506],["Ka",-11.27194595336914],["ért",-11.27220630645752],["▁így",-11.27248191833496],["ლ",-11.272554397583008],["▁무",-11.27257251739502],["sinin",-11.272622108459473],["iau",-11.272720336914062],["ojnë",-11.27281093597412],["▁sitt",-11.272811889648438],["▁области",-11.272819519042969],["ရာ",-11.272960662841797],["▁انسان",-11.273100852966309],["▁liên",-11.273158073425291],["}",-11.273359298706056],["▁saber",-11.273364067077637],["▁ලෙස",-11.273419380187988],["▁eerste",-11.27365779876709],["▁nacional",-11.273798942565918],["▁եմ",-11.273980140686035],["▁Aku",-11.274195671081545],["▁Бо",-11.274261474609377],["ust",-11.27427101135254],["▁melalui",-11.274356842041016],["▁Wikipedia",-11.274361610412598],["▁días",-11.274405479431152],["ئی",-11.27443790435791],["▁году",-11.27453327178955],["▁año",-11.274690628051758],["▁dra",-11.274728775024414],["▁PER",-11.274739265441896],["▁више",-11.274824142456056],["дағы",-11.27489948272705],["指",-11.274914741516112],["▁mein",-11.274927139282228],["中的",-11.27495002746582],["▁casino",-11.275229454040527],["▁ආ",-11.27523708343506],["บ้าน",-11.27532958984375],["여",-11.275630950927734],["一直",-11.275751113891602],["▁Kui",-11.275846481323242],["▁inform",-11.275873184204102],["้",-11.275930404663086],["வர்",-11.276016235351562],["▁Plus",-11.276123046875],["こと",-11.276291847229004],["하기",-11.276339530944824],["▁diesem",-11.276397705078123],["这样",-11.276412010192873],["ीय",-11.276629447937012],["lgan",-11.276660919189451],["▁добро",-11.276692390441896],["▁πολύ",-11.276968002319336],["▁phần",-11.27716827392578],["tika",-11.277181625366213],["05",-11.277331352233888],["Ó",-11.277361869812012],["▁خواهد",-11.277373313903809],["ానికి",-11.277405738830566],["šanas",-11.277416229248049],["larni",-11.277667999267578],["▁sok",-11.277887344360352],["▁enn",-11.278440475463867],["wal",-11.278514862060549],["ղ",-11.279105186462402],["政策",-11.279107093811035],["行业",-11.279141426086426],["ove",-11.27919101715088],["ец",-11.279199600219728],["▁പി",-11.279244422912598],["しています",-11.27934455871582],["▁പ",-11.279356002807615],["▁parë",-11.279406547546388],["排",-11.279409408569336],["აც",-11.279457092285156],["ត",-11.279531478881836],["▁Công",-11.279542922973633],["ave",-11.27963638305664],["▁اسان",-11.27973747253418],["MA",-11.279763221740724],["▁mense",-11.279791831970217],["▁її",-11.279791831970217],["▁Кыргыз",-11.27988624572754],["rê",-11.28000545501709],["▁sina",-11.28006076812744],["▁yên",-11.280177116394045],["▁ach",-11.280301094055176],["ವಿ",-11.280466079711914],["▁شامل",-11.280774116516112],["▁cei",-11.280795097351074],["▁behar",-11.28081512451172],["▁tun",-11.280865669250488],["▁cor",-11.280920028686523],["▁Pak",-11.28092098236084],["▁Brasil",-11.28105640411377],["ਜ਼",-11.281126022338867],["უ",-11.28121566772461],["สินค้า",-11.281272888183594],["▁김",-11.281307220458984],["مر",-11.281621932983398],["▁болгон",-11.281679153442385],["αν",-11.281695365905762],["TE",-11.28174114227295],["elo",-11.281750679016112],["dz",-11.281766891479492],["▁CO",-11.281787872314451],["ե",-11.281816482543944],["▁જે",-11.282087326049805],["문",-11.282113075256348],["▁זו",-11.282362937927246],["ത്തിന്",-11.282451629638672],["ço",-11.282513618469238],["▁Voor",-11.2826509475708],["ළ",-11.282663345336914],["نی",-11.282670974731444],["▁Israel",-11.282721519470217],["▁ditt",-11.282979011535645],["ams",-11.282991409301758],["▁био",-11.283039093017578],["کر",-11.28310203552246],["ladi",-11.283347129821776],["ëve",-11.283448219299316],["发现",-11.28348159790039],["band",-11.283589363098145],["▁dövlət",-11.283692359924316],["▁Ada",-11.28369426727295],["യിൽ",-11.284024238586426],["hl",-11.284110069274902],["물",-11.284343719482422],["▁Kim",-11.284552574157717],["▁систем",-11.284582138061523],["▁момент",-11.284737586975098],["▁edi",-11.284899711608888],["▁1999",-11.284919738769531],["▁Sub",-11.284948348999023],["▁UN",-11.28495979309082],["skog",-11.285041809082031],["জ",-11.285144805908203],["找",-11.285255432128906],["▁Կ",-11.285306930541992],["▁ਹੋ",-11.285364151000977],["▁해",-11.285369873046877],["ഹ",-11.28550148010254],["▁fod",-11.28550148010254],["tv",-11.285642623901367],["▁produkt",-11.285686492919922],["த்தை",-11.285762786865234],["▁علی",-11.285828590393066],["rai",-11.28594970703125],["▁atá",-11.285993576049805],["umi",-11.286028861999512],["▁Србије",-11.286078453063965],["▁svet",-11.286090850830078],["▁gode",-11.286224365234377],["ран",-11.286370277404783],["▁Shi",-11.286556243896484],["▁ويو",-11.286624908447266],["ében",-11.2866792678833],["▁komen",-11.286707878112791],["▁anderen",-11.286730766296388],["▁hau",-11.286735534667969],["צ",-11.286752700805664],["▁Hvis",-11.28696346282959],["▁Tro",-11.286974906921388],["▁હોય",-11.287100791931152],["▁କରି",-11.287116050720217],["▁through",-11.287227630615234],["▁never",-11.28724193572998],["દ",-11.287336349487305],["ග",-11.287443161010742],["▁Ihre",-11.28747844696045],["▁után",-11.287643432617188],["▁hen",-11.287717819213867],["电",-11.28775405883789],["რა",-11.288022994995115],["▁dio",-11.288066864013672],["成为",-11.288222312927246],["-3",-11.288434982299805],["▁quy",-11.288453102111816],["08",-11.288533210754396],["ਜ",-11.28869915008545],["作品",-11.288850784301758],["▁Roma",-11.289178848266602],["▁Kami",-11.289246559143066],["ներին",-11.289361000061035],["▁pedig",-11.289517402648926],["▁مر",-11.289628982543944],["ље",-11.289691925048828],["連",-11.289766311645508],["▁موقع",-11.289824485778809],["▁Με",-11.289867401123049],["▁veya",-11.290098190307615],["Do",-11.290132522583008],["▁Anti",-11.290206909179688],["형",-11.29030418395996],["▁Dengan",-11.290313720703123],["Pa",-11.29037094116211],["▁bata",-11.290374755859377],["▁give",-11.29047679901123],["▁vēl",-11.29065227508545],["▁এক",-11.29075527191162],["▁adipiscing",-11.290969848632812],["▁Pag",-11.290977478027344],["سا",-11.290998458862305],["▁artikel",-11.291049003601074],["▁jog",-11.291102409362791],["bon",-11.29111671447754],["された",-11.291202545166016],["▁review",-11.291276931762695],["▁ನ",-11.291300773620604],["ør",-11.29132843017578],["ლა",-11.29150390625],["▁bes",-11.291654586791992],["▁Tom",-11.291680335998535],["^",-11.291690826416016],["▁동",-11.291692733764648],["aca",-11.291765213012695],["LA",-11.29188632965088],["▁International",-11.29190444946289],["▁होते",-11.292120933532717],["GB",-11.292146682739258],["▁cea",-11.292208671569824],["▁viac",-11.292502403259276],["▁үй",-11.29251766204834],["-19",-11.292573928833008],["ose",-11.2926025390625],["▁olen",-11.292633056640623],["▁gebruik",-11.292679786682127],["▁també",-11.292773246765137],["07",-11.292794227600098],["▁trang",-11.292820930480955],["▁लाख",-11.292820930480955],["▁insan",-11.292902946472168],["kir",-11.29339599609375],["▁ملک",-11.293426513671877],["▁cr",-11.29345703125],["六",-11.293478965759276],["▁slik",-11.293612480163574],["শ",-11.293761253356934],["▁було",-11.293805122375488],["▁dibdib",-11.293822288513184],["iye",-11.293888092041016],["▁cael",-11.293947219848633],["▁those",-11.294076919555664],["▁قانون",-11.294102668762209],["萬",-11.294135093688965],["▁Ev",-11.294388771057127],["ობის",-11.294473648071287],["家庭",-11.29451847076416],["▁mungkin",-11.294760704040527],["ौ",-11.29476833343506],["ması",-11.29483127593994],["▁mnie",-11.294857025146484],["дық",-11.294872283935549],["工程",-11.294923782348633],["▁ea",-11.295029640197754],["▁festival",-11.295083999633787],["▁த",-11.295221328735352],["adas",-11.295470237731934],["▁කියලා",-11.2955322265625],["DE",-11.295660972595217],["▁punto",-11.295730590820312],["▁يې",-11.295912742614746],["igen",-11.29603672027588],["นา",-11.296069145202637],["▁aansoek",-11.29608917236328],["llen",-11.296194076538086],["▁etwas",-11.296314239501951],["gro",-11.296409606933594],["had",-11.296439170837402],["▁små",-11.296615600585938],["▁kaya",-11.296807289123535],["▁გადა",-11.296819686889648],["റ്റ",-11.296859741210938],["▁حل",-11.297202110290527],["▁Mr",-11.297374725341797],["▁मी",-11.297589302062988],["dek",-11.29768180847168],["え",-11.297773361206056],["ሮ",-11.2979154586792],["ták",-11.297928810119627],["wel",-11.298013687133787],["め",-11.298168182373049],["€",-11.29835033416748],["▁dedi",-11.298510551452637],["▁majd",-11.29858112335205],["▁alte",-11.298609733581545],["▁hii",-11.29888916015625],["гер",-11.29890251159668],["از",-11.298965454101562],["▁kuma",-11.299057960510254],["▁ás",-11.299102783203123],["▁sido",-11.29910659790039],["▁2014.",-11.29915714263916],["nza",-11.299262046813965],["▁جب",-11.299437522888184],["्न",-11.29952907562256],["▁PAS",-11.299615859985352],["▁legal",-11.299738883972168],["▁Kako",-11.299741744995115],["▁3)",-11.299750328063965],["▁který",-11.29981517791748],["ции",-11.299848556518556],["精神",-11.299941062927246],["영",-11.300012588500977],["▁donde",-11.300028800964355],["ջ",-11.300054550170898],["▁Ram",-11.300286293029783],["col",-11.300331115722656],["รับ",-11.300406455993652],["▁waren",-11.300615310668944],["പി",-11.300748825073242],["▁Sverige",-11.30077075958252],["igi",-11.300889015197754],["▁escort",-11.30095100402832],["ţie",-11.301019668579102],["▁Mos",-11.30103874206543],["▁dùng",-11.301063537597656],["▁erat",-11.30138111114502],["кер",-11.301435470581056],["▁Vous",-11.301465034484863],["tid",-11.301658630371094],["ھ",-11.301746368408203],["▁hierdie",-11.301889419555664],["ങ്ങൾ",-11.302009582519531],["▁edo",-11.302474975585938],["門",-11.302555084228516],["нда",-11.302711486816406],["hang",-11.302733421325684],["▁دولت",-11.302778244018556],["▁ihan",-11.303071022033691],["цу",-11.30325698852539],["▁massasje",-11.303301811218262],["▁daca",-11.30341911315918],["▁dum",-11.30344295501709],["aba",-11.303470611572266],["▁lag",-11.303561210632324],["ੋ",-11.303709983825684],["▁bersama",-11.303709983825684],["▁poli",-11.30371379852295],["▁nhìn",-11.30390453338623],["rada",-11.30398941040039],["室",-11.30401611328125],["จ",-11.304113388061523],["τη",-11.304123878479004],["▁tekst",-11.304213523864746],["یک",-11.304365158081056],["ři",-11.304366111755373],["Č",-11.304513931274414],["系统",-11.304543495178224],["ина",-11.30458641052246],["▁nepo",-11.304604530334473],["▁بازی",-11.304628372192385],["роб",-11.30470848083496],["ées",-11.304835319519045],["لی",-11.304871559143066],["르",-11.304879188537598],["▁dall",-11.304893493652344],["сці",-11.305031776428224],["▁Пра",-11.30512237548828],["શ",-11.30527114868164],["kam",-11.305293083190918],["ість",-11.305506706237791],["除了",-11.305693626403809],["रे",-11.305768013000488],["جو",-11.305830001831056],["▁Projekt",-11.305830001831056],["業",-11.305959701538086],["▁ай",-11.306123733520508],["oja",-11.306129455566406],["ace",-11.30630874633789],["▁ത",-11.306456565856934],["▁자",-11.306499481201172],["▁neste",-11.306669235229492],["这是",-11.306954383850098],["▁dagen",-11.306979179382324],["▁şey",-11.306986808776855],["bul",-11.306997299194336],["kg",-11.307064056396484],["▁eigen",-11.307095527648926],["▁داخل",-11.30717945098877],["dele",-11.307233810424805],["▁మా",-11.307299613952637],["投资",-11.307306289672852],["▁آر",-11.307527542114258],["▁după",-11.30760383605957],["▁något",-11.307657241821287],["▁тур",-11.307682991027832],["ത്ത്",-11.307708740234377],["क्",-11.307806015014648],["▁vấn",-11.307842254638672],["▁vind",-11.307916641235352],["थ",-11.308013916015623],["▁Бул",-11.308091163635254],["чки",-11.308143615722656],["▁оп",-11.308239936828612],["▁kuko",-11.308252334594728],["▁autem",-11.308364868164062],["сі",-11.308424949645996],["▁hän",-11.308433532714844],["精",-11.308436393737791],["von",-11.308466911315918],["jn",-11.308499336242676],["ալ",-11.30858039855957],["iet",-11.30860996246338],["▁банк",-11.30905055999756],["ST",-11.30905532836914],["▁صاحب",-11.309059143066406],["▁dilakukan",-11.309203147888184],["▁Hà",-11.309256553649902],["issä",-11.309346199035645],["生产",-11.309508323669434],["film",-11.309553146362305],["▁Би",-11.309585571289062],["男",-11.309602737426758],["gat",-11.309732437133787],["▁Egy",-11.309815406799316],["▁sekali",-11.309844970703123],[".000",-11.30997085571289],["dí",-11.310128211975098],["imiz",-11.310236930847168],["tera",-11.310336112976074],["▁actual",-11.310359954833984],["▁kind",-11.310413360595703],["οι",-11.310587882995604],["▁ነው፡፡",-11.31060791015625],["ഷ",-11.310661315917969],["▁pir",-11.310702323913574],["▁მაგრამ",-11.310747146606444],["▁России",-11.310754776000977],["▁tror",-11.31086254119873],["бай",-11.310919761657717],["зу",-11.311080932617188],["્યા",-11.311080932617188],["کو",-11.311226844787598],["▁PO",-11.311405181884766],["NO",-11.311553001403809],["▁ఎ",-11.311558723449709],["▁muda",-11.311789512634276],["▁Ü",-11.311911582946776],["▁také",-11.312116622924805],["ין",-11.3123140335083],["మా",-11.312429428100586],["▁dentro",-11.312484741210938],["▁یکی",-11.312570571899414],["▁genom",-11.312591552734377],["提高",-11.312650680541992],["▁verið",-11.312987327575684],["tung",-11.31313419342041],["功能",-11.313261032104492],["▁hoc",-11.313336372375488],["完全",-11.313451766967772],["▁Casa",-11.313592910766602],["▁Aber",-11.313657760620115],["rë",-11.31373691558838],["ος",-11.313743591308594],["▁այս",-11.313879013061523],["لىرى",-11.314010620117188],["▁ord",-11.3140230178833],["دی",-11.314081192016602],["▁Ama",-11.314105033874512],["▁klik",-11.314116477966309],["▁berkata",-11.314181327819824],["▁tarafından",-11.314512252807615],["ฟ",-11.314634323120115],["▁актив",-11.315104484558104],["▁бул",-11.315166473388672],["▁Xa",-11.315171241760254],["maq",-11.315185546875],["▁kategori",-11.31530475616455],["▁bao",-11.315446853637695],["ատ",-11.315458297729492],["imus",-11.315491676330566],["ù",-11.315505981445312],["yl",-11.315757751464844],["▁Մ",-11.31580638885498],["cja",-11.316207885742188],["村",-11.316216468811035],["▁دوست",-11.316333770751951],["э",-11.316399574279783],["▁জন্য",-11.316557884216309],["▁Kwa",-11.316575050354004],["▁ឆ្នាំ",-11.316874504089355],["▁There",-11.317059516906738],["▁kre",-11.317116737365724],["▁aŭ",-11.317342758178713],["▁کل",-11.317411422729492],["▁mü",-11.317438125610352],["tun",-11.317460060119627],["▁maa",-11.31767749786377],["▁privat",-11.317706108093262],["ま",-11.317715644836426],["▁ਇਹ",-11.317770957946776],["צה",-11.31804370880127],["▁shu",-11.318285942077637],["▁bom",-11.318317413330078],["мы",-11.318533897399902],["▁hela",-11.31861400604248],["ય",-11.318832397460938],["ສ",-11.318839073181152],["▁Ха",-11.318946838378906],["▁دنیا",-11.318964958190918],["▁کام",-11.31912326812744],["합니다",-11.319138526916504],["▁Sch",-11.319218635559082],["▁geht",-11.319286346435549],["▁kod",-11.319418907165527],["▁vært",-11.31948184967041],["▁Test",-11.31968593597412],["ijos",-11.319809913635254],["▁кога",-11.319835662841797],["▁familia",-11.319890975952148],["▁51",-11.320030212402344],["▁giải",-11.320097923278809],["▁స",-11.320189476013184],["uka",-11.320490837097168],["Ö",-11.320680618286133],["tada",-11.320688247680664],["▁υπο",-11.320696830749512],["▁jego",-11.32074737548828],["36",-11.320812225341797],["▁ան",-11.320836067199709],["▁két",-11.32111930847168],["cje",-11.321491241455078],["▁कम",-11.321603775024414],["▁ста",-11.32186508178711],["▁ست",-11.32203769683838],["ści",-11.322044372558594],["▁Ky",-11.322123527526855],["▁anno",-11.322301864624023],["▁Yn",-11.322421073913574],["tı",-11.32247257232666],["▁Tamil",-11.322498321533203],["▁उत्तर",-11.322586059570312],["كم",-11.32264518737793],["▁پنهنجي",-11.322906494140623],["▁hus",-11.32302474975586],["ette",-11.323094367980955],["▁Ah",-11.32329559326172],["▁BA",-11.323436737060549],["▁име",-11.32382106781006],["▁kri",-11.323887825012209],["求",-11.323894500732422],["ják",-11.323991775512695],["वे",-11.324161529541016],["▁sini",-11.324214935302734],["ավ",-11.324247360229492],["▁గా",-11.324265480041504],["▁እንዲ",-11.324265480041504],["мир",-11.324291229248049],["知道",-11.324339866638184],["อีก",-11.324398040771484],["ával",-11.324438095092772],["▁بیان",-11.324527740478516],["ଣ",-11.324570655822754],["▁Ste",-11.32460117340088],["▁noget",-11.324612617492676],["▁(7)",-11.324649810791016],["ラ",-11.324668884277344],["▁moc",-11.324803352355955],["iro",-11.32488250732422],["▁آباد",-11.32488250732422],["дің",-11.325127601623535],["usi",-11.325136184692385],["▁బ",-11.32517910003662],["ؤ",-11.325275421142578],["ple",-11.325310707092283],["iska",-11.32538604736328],["▁cá",-11.325453758239746],["▁tiền",-11.325482368469238],["八",-11.325570106506348],["fal",-11.325573921203612],["ప్",-11.32558822631836],["▁Och",-11.325600624084473],["▁bán",-11.325749397277832],["▁Բ",-11.325796127319336],["▁norske",-11.325926780700684],["防",-11.325957298278809],["වි",-11.325971603393556],["▁fá",-11.326042175292969],["▁180",-11.326184272766112],["തു",-11.32629680633545],["▁ton",-11.326313018798828],["▁hul",-11.326448440551758],["▁ماه",-11.326493263244627],["▁politika",-11.326519966125488],["▁thị",-11.326567649841309],["gil",-11.32659149169922],["TO",-11.326630592346191],["一下",-11.326871871948242],["▁వ",-11.326974868774414],["bio",-11.327067375183104],["▁sel",-11.327116012573242],["北京",-11.32717227935791],["က္",-11.327180862426758],["▁щоб",-11.327195167541504],["▁Quốc",-11.327347755432127],["langan",-11.327363014221191],["▁rast",-11.32751750946045],["▁medio",-11.327616691589355],["▁hingga",-11.327627182006836],["▁인",-11.32763385772705],["За",-11.327664375305176],["▁ĉi",-11.327930450439451],["▁pang",-11.328312873840332],["▁Samsung",-11.328387260437012],["න්න",-11.328444480895996],["ກ",-11.328462600708008],["Ա",-11.328545570373535],["sol",-11.328567504882812],["▁grund",-11.328585624694824],["డు",-11.32859992980957],["গ",-11.328667640686035],["side",-11.329096794128418],["▁band",-11.329105377197266],["НА",-11.329203605651855],["▁بد",-11.3294677734375],["▁وسلم",-11.329586029052734],["▁giờ",-11.32968521118164],["რე",-11.329704284667969],["▁Cer",-11.329893112182615],["ihin",-11.329943656921388],["▁पा",-11.329981803894045],["US",-11.330242156982422],["эн",-11.330265045166016],["▁finden",-11.330458641052246],["аар",-11.330510139465332],["nc",-11.330531120300291],["▁mara",-11.330764770507812],["▁повече",-11.33083152770996],["▁especial",-11.331015586853027],["ow",-11.33102798461914],["▁द",-11.331148147583008],["▁gara",-11.331183433532717],["gli",-11.331222534179688],["▁memberikan",-11.331225395202637],["▁лица",-11.331263542175291],["▁hati",-11.331276893615724],["▁صرف",-11.331403732299805],["вод",-11.331517219543455],["士",-11.331592559814451],["ism",-11.331639289855955],["▁simple",-11.33168888092041],["ule",-11.331778526306152],["▁pay",-11.331791877746582],["處",-11.331799507141112],["▁दिया",-11.331825256347656],["▁asta",-11.331846237182615],["▁ہونے",-11.331869125366213],["yı",-11.332049369812012],["but",-11.332175254821776],["▁коли",-11.3322172164917],["hető",-11.332249641418455],["▁شروع",-11.332316398620604],["▁står",-11.332613945007324],["料",-11.33263874053955],["oko",-11.332694053649902],["▁brand",-11.332738876342772],["gos",-11.33277702331543],["dhi",-11.333020210266112],["ajn",-11.333162307739258],["▁primera",-11.33320426940918],["▁Tam",-11.33322525024414],["▁свет",-11.33332633972168],["▁както",-11.33351993560791],["care",-11.33375072479248],["thu",-11.33382511138916],["一起",-11.333833694458008],["рт",-11.333847999572754],["▁всех",-11.333942413330078],["ского",-11.33410358428955],["▁байсан",-11.33415699005127],["▁спорт",-11.334216117858888],["▁karakter",-11.334283828735352],["tako",-11.334315299987791],["▁биде",-11.334360122680664],["▁Kr",-11.334365844726562],["▁dokument",-11.334380149841309],["▁داشته",-11.334753036499023],["▁kvalitet",-11.335165023803713],["▁هستند",-11.335256576538086],["iai",-11.33533763885498],["95",-11.33538055419922],["ኝ",-11.335412979125977],["zni",-11.33541774749756],["sje",-11.335421562194824],["안",-11.335808753967283],["larında",-11.335859298706056],["选择",-11.335907936096191],["kle",-11.335939407348633],["ბა",-11.335949897766112],["твор",-11.33602237701416],["dere",-11.336071968078612],["تو",-11.336119651794434],["ជា",-11.336336135864258],["▁pic",-11.33649444580078],["рек",-11.33652400970459],["应",-11.336627006530762],["▁మీ",-11.33668327331543],["▁ப",-11.336921691894531],["▁Å",-11.336996078491213],["▁mama",-11.337133407592772],["▁الر",-11.33719253540039],["szer",-11.337669372558594],["▁donc",-11.337695121765137],["▁تن",-11.337822914123535],["▁pena",-11.337849617004396],["▁וואס",-11.337850570678713],["分享",-11.337970733642578],["zh",-11.338139533996582],["香",-11.338167190551758],["ரு",-11.338183403015137],["шка",-11.338189125061035],["IN",-11.338194847106934],["▁profesional",-11.33828067779541],["ация",-11.338303565979004],["▁porn",-11.338415145874023],["之后",-11.338475227355955],["重要",-11.33860206604004],["▁ønsker",-11.338727951049805],["Si",-11.338735580444336],["ต้อง",-11.338764190673828],["lė",-11.3388090133667],["帶",-11.33882999420166],["ขึ้น",-11.338837623596191],["▁dai",-11.33888816833496],["▁кор",-11.338966369628906],["ының",-11.339055061340332],["▁encore",-11.33910083770752],["бар",-11.339362144470217],["展",-11.339435577392578],["нь",-11.339654922485352],["ид",-11.340020179748535],["いる",-11.340057373046877],["▁πιο",-11.340087890625],["案",-11.340324401855469],["ще",-11.3403959274292],["nel",-11.340459823608398],["▁milli",-11.340578079223633],["kirin",-11.340736389160156],["▁tikai",-11.34083652496338],["▁لیکن",-11.340847969055176],["▁mirë",-11.34097957611084],["พระ",-11.341032028198242],["▁reklam",-11.341136932373049],["▁Madrid",-11.341233253479004],["သည်",-11.34127140045166],["kiem",-11.341300010681152],["▁dahil",-11.341352462768556],["▁მისი",-11.341378211975098],["yer",-11.341443061828612],["երը",-11.341534614562988],["▁promo",-11.341609954833984],["▁22.",-11.341729164123535],["cü",-11.341733932495115],["41",-11.341771125793455],["▁එ",-11.341854095458984],["այ",-11.341949462890623],["ði",-11.342138290405272],["▁пе",-11.342248916625977],["оз",-11.342292785644531],["bang",-11.342333793640137],["▁Del",-11.342572212219238],["ö",-11.342597007751465],["גע",-11.342710494995115],["▁Mü",-11.34304904937744],["ningar",-11.34315299987793],["▁रही",-11.343156814575195],["دىكى",-11.343198776245115],["▁LED",-11.343231201171877],["▁kay",-11.343235969543455],["▁Jika",-11.343238830566406],["ே",-11.343329429626465],["So",-11.343469619750977],["auto",-11.343515396118164],["▁свою",-11.343863487243652],["andi",-11.343878746032717],["▁sector",-11.343907356262209],["хан",-11.34420108795166],["eau",-11.344368934631348],["肉",-11.34459114074707],["▁Naj",-11.344593048095703],["▁проект",-11.344813346862791],["AM",-11.344834327697754],["ovanie",-11.34487533569336],["青",-11.344923973083496],["국",-11.345049858093262],["▁болуп",-11.345324516296388],["teen",-11.345346450805664],["▁hvad",-11.34539031982422],["である",-11.345622062683104],["▁būti",-11.345654487609863],["ಪ",-11.34574031829834],["▁sud",-11.345820426940918],["▁श",-11.345837593078612],["▁53",-11.345911979675291],["▁değil",-11.345932006835938],["いた",-11.346293449401855],["illä",-11.346327781677246],["ில்",-11.346388816833496],["▁þess",-11.346457481384276],["kut",-11.346482276916504],["ются",-11.34657096862793],["▁Inc",-11.346617698669434],["完",-11.34664821624756],["▁іх",-11.346821784973145],["oma",-11.346966743469238],["sty",-11.3472318649292],["▁עס",-11.34732723236084],["曾",-11.34755802154541],["aja",-11.347607612609863],["▁Pla",-11.347725868225098],["▁direct",-11.347981452941896],["▁ام",-11.348094940185549],["▁heel",-11.348419189453123],["▁σ",-11.348615646362305],["▁mukaan",-11.348669052124023],["▁Φ",-11.3486967086792],["ίες",-11.348748207092283],["kie",-11.348847389221191],["ถึง",-11.349234580993652],["▁machen",-11.349486351013184],["grad",-11.349535942077637],["ith",-11.349720001220703],["▁ئي",-11.349735260009766],["▁bê",-11.349876403808594],["ਬ",-11.34998893737793],["▁things",-11.35024070739746],["▁5-",-11.350260734558104],["▁ljudi",-11.3504638671875],["cel",-11.350642204284668],["դ",-11.350873947143556],["▁wana",-11.350971221923828],["cul",-11.35121250152588],["▁ಒಂದು",-11.351253509521484],["▁Jahren",-11.351258277893066],["▁Une",-11.35130500793457],["金融",-11.351317405700684],["მე",-11.351557731628418],["▁때",-11.352120399475098],["▁работы",-11.352370262145996],["▁raha",-11.352389335632324],["▁Biz",-11.35243034362793],["▁terus",-11.352446556091309],["▁54",-11.352492332458496],["dzi",-11.35250473022461],["▁ಮಾಡ",-11.352702140808104],["isia",-11.352736473083496],["დება",-11.352956771850586],["照",-11.35319709777832],["ですが",-11.353245735168455],["inni",-11.353248596191406],["पा",-11.353404998779297],["▁KU",-11.35379123687744],["sek",-11.35382080078125],["▁poi",-11.353841781616213],["▁vitae",-11.353912353515623],["lec",-11.353944778442385],["▁dine",-11.353991508483888],["▁Nacional",-11.354082107543944],["▁world",-11.354275703430176],["டு",-11.354294776916504],["报",-11.35435676574707],["军",-11.35456085205078],["iset",-11.354584693908691],["▁සඳහා",-11.354604721069336],["线",-11.35461711883545],["▁Kara",-11.354870796203612],["app",-11.354920387268066],["ţii",-11.354995727539062],["▁อ",-11.355207443237305],["▁zuen",-11.355259895324709],["到了",-11.355379104614258],["zik",-11.355432510375977],["rim",-11.355477333068848],["bie",-11.35557746887207],["्ने",-11.35589599609375],["06",-11.356145858764648],["▁Pol",-11.356215476989746],["▁familie",-11.356228828430176],["ții",-11.356249809265137],["▁London",-11.356351852416992],["存在",-11.356352806091309],["ern",-11.356411933898926],["▁sc",-11.356635093688965],["▁करा",-11.35676383972168],["сэн",-11.356804847717283],["▁eines",-11.356898307800291],["mış",-11.35690975189209],["先生",-11.357107162475586],["kä",-11.357372283935549],["bl",-11.357394218444824],["▁bài",-11.357595443725586],["▁vasta",-11.35763168334961],["▁bulan",-11.357684135437012],["iche",-11.357772827148438],["ments",-11.35782241821289],["▁suas",-11.35789966583252],["▁дома",-11.358047485351562],["▁havde",-11.358064651489258],["ዘ",-11.358068466186523],["▁văn",-11.358274459838867],["▁vero",-11.35834789276123],["成功",-11.358455657958984],["से",-11.358643531799316],["▁trop",-11.35879898071289],["▁공",-11.358839988708496],["cije",-11.358917236328123],["▁ihre",-11.358942985534668],["▁bodo",-11.35906219482422],["ουμε",-11.359137535095217],["▁fy",-11.359245300292969],["mani",-11.359301567077637],["ție",-11.359323501586914],["▁eri",-11.359524726867676],["gere",-11.359543800354004],["ամ",-11.359649658203123],["班",-11.359654426574709],["ವು",-11.359790802001951],["င္",-11.35995101928711],["PS",-11.360062599182127],["▁gas",-11.360119819641112],["▁ມີ",-11.360240936279297],["▁wees",-11.360264778137209],["▁taas",-11.3602933883667],["▁ಮ",-11.360411643981934],["網",-11.360458374023438],["▁Creative",-11.360528945922852],["ാൻ",-11.360593795776367],["að",-11.360614776611328],["▁RE",-11.360655784606934],["▁vien",-11.36067008972168],["▁50%",-11.360702514648438],["▁могут",-11.36073398590088],["ците",-11.36086082458496],["▁dha",-11.360987663269045],["带",-11.361026763916016],["yu",-11.361027717590332],["▁sure",-11.361154556274414],["নি",-11.361188888549805],["▁Vai",-11.36154079437256],["ул",-11.361547470092772],["▁ለመ",-11.3616361618042],["▁General",-11.361791610717772],["Mo",-11.36188793182373],["stav",-11.36190700531006],["组织",-11.362125396728516],["▁නො",-11.362271308898926],["став",-11.362347602844238],["MP",-11.362433433532717],["語",-11.362540245056152],["▁aur",-11.362608909606934],["alan",-11.36271858215332],["OK",-11.36274528503418],["▁මා",-11.362760543823242],["тары",-11.36297607421875],["政",-11.36299991607666],["▁komme",-11.363136291503906],["ວ່າ",-11.363154411315918],["▁China",-11.36322021484375],["▁عمر",-11.363245010375977],["सं",-11.36345100402832],["▁ومن",-11.36349105834961],["▁groot",-11.363511085510254],["▁56",-11.363539695739746],["owym",-11.36363697052002],["▁visi",-11.363893508911133],["적으로",-11.36417293548584],["ос",-11.364294052124023],["いい",-11.364486694335938],["unt",-11.364507675170898],["ಿದೆ",-11.364513397216797],["ನಿ",-11.364543914794922],["▁Энэ",-11.36455249786377],["▁Cal",-11.364556312561035],["▁integr",-11.364738464355469],["বি",-11.364880561828612],["▁aina",-11.364941596984863],["▁sak",-11.365097999572754],["▁bay",-11.365182876586914],["ohet",-11.36519718170166],["lic",-11.365224838256836],["ван",-11.365424156188965],["дей",-11.3655366897583],["▁نظام",-11.365572929382324],["ာ",-11.365690231323242],["▁gur",-11.36585521697998],["ավոր",-11.365961074829102],["SE",-11.36600399017334],["ತಿ",-11.366045951843262],["▁ef",-11.366045951843262],["▁naše",-11.366150856018066],["吗",-11.366225242614746],["▁lama",-11.36627960205078],["▁foar",-11.366362571716309],["נות",-11.366477966308594],["▁ollut",-11.366662979125977],["tz",-11.366669654846191],["▁зна",-11.366775512695312],["▁ପ",-11.366864204406738],["ਲਾ",-11.36691665649414],["اف",-11.36694622039795],["市場",-11.366965293884276],["सँग",-11.366972923278809],["iek",-11.367108345031738],["们",-11.367198944091797],["▁dėl",-11.36746597290039],["fall",-11.367642402648926],["▁адам",-11.367671012878418],["▁Parti",-11.367671966552734],["▁bene",-11.367700576782228],["單",-11.36776065826416],["▁جان",-11.367830276489258],["▁melihat",-11.368026733398438],["ência",-11.368027687072754],["ио",-11.368063926696776],["शी",-11.368121147155762],["▁ảnh",-11.36818790435791],["▁stand",-11.368436813354492],["私",-11.368496894836426],["▁призна",-11.36862850189209],["▁minut",-11.368694305419922],["▁مطابق",-11.368695259094238],["▁tog",-11.36875820159912],["▁मे",-11.368794441223145],["▁karşı",-11.3688325881958],["κα",-11.368868827819824],["bia",-11.36886978149414],["rup",-11.368871688842772],["▁damit",-11.369083404541016],["nad",-11.369364738464355],["▁마",-11.369617462158203],["ኛ",-11.370034217834473],["▁Pentru",-11.370099067687988],["▁Fri",-11.370363235473633],["▁ח",-11.370436668395996],["▁ସ",-11.370460510253906],["▁acum",-11.370477676391602],["ရေး",-11.370516777038574],["려",-11.37062644958496],["▁हुए",-11.370628356933594],["alta",-11.370774269104004],["▁לו",-11.370774269104004],["ү",-11.370782852172852],["יו",-11.370871543884276],["▁élet",-11.371047973632812],["33",-11.371139526367188],["达",-11.3711519241333],["학",-11.371333122253418],["▁बस",-11.371346473693848],["plat",-11.371525764465332],["bot",-11.37153434753418],["▁page",-11.37156105041504],["▁gada",-11.371637344360352],["-10",-11.371661186218262],["▁pessoas",-11.371665954589844],["▁ਹੀ",-11.371728897094728],["▁gyda",-11.371875762939451],["▁калі",-11.37188720703125],["udi",-11.371891021728516],["▁95",-11.372029304504396],["足",-11.372097969055176],["▁park",-11.372315406799316],["▁Rais",-11.3724365234375],["वी",-11.372448921203612],["▁él",-11.372454643249512],["مه",-11.37256908416748],["▁lê",-11.372604370117188],["▁Gal",-11.37269401550293],["▁Att",-11.372767448425291],["▁olur",-11.372994422912598],["朝",-11.3732271194458],["▁Các",-11.37325668334961],["張",-11.373414993286133],["▁אי",-11.373486518859863],["▁ह",-11.37359619140625],["ček",-11.37368106842041],["▁báo",-11.373696327209473],["spe",-11.373814582824709],["لىك",-11.373822212219238],["▁bel",-11.37399959564209],["▁अन्य",-11.374045372009276],["יר",-11.374115943908691],["mini",-11.374127388000488],["Э",-11.37417221069336],["初",-11.37417221069336],["▁కా",-11.37421703338623],["▁gen",-11.374350547790527],["aka",-11.374573707580566],["бер",-11.374700546264648],["▁nulla",-11.374825477600098],["▁lần",-11.37482738494873],["ทั้ง",-11.374841690063477],["▁unde",-11.375018119812012],["ude",-11.375064849853516],["▁зі",-11.375076293945312],["stel",-11.375306129455566],["▁महिला",-11.375309944152832],["iş",-11.375319480895996],["▁össze",-11.375374794006348],["nam",-11.375457763671877],["ၿပီး",-11.375570297241213],["ىنى",-11.375638961791992],["▁रहेको",-11.375761985778809],["书",-11.375778198242188],["▁gente",-11.37582778930664],["தான்",-11.375856399536133],["▁thương",-11.375943183898926],["▁sier",-11.3760347366333],["kes",-11.376148223876951],["ρο",-11.376348495483398],["า",-11.376461029052734],["▁Live",-11.376863479614258],["▁två",-11.376893997192385],["▁пак",-11.377022743225098],["char",-11.37706470489502],["▁grandes",-11.377296447753906],["เรา",-11.37736988067627],["ρί",-11.37747573852539],["查",-11.377483367919922],["▁لپاره",-11.377495765686035],["▁stabil",-11.377497673034668],["▁भारतीय",-11.377520561218262],["зі",-11.377548217773438],["▁masyarakat",-11.377625465393066],["▁алып",-11.378106117248535],["▁Jos",-11.378107070922852],["▁Mad",-11.378108024597168],["▁MA",-11.378113746643066],["▁Hotels",-11.378151893615724],["▁β",-11.378270149230955],["▁olisi",-11.378323554992676],["46",-11.378352165222168],["▁dana",-11.378576278686523],["kai",-11.3789701461792],["▁sosial",-11.379029273986816],["轉",-11.379202842712402],["nir",-11.379212379455566],["應",-11.379257202148438],["▁magyar",-11.379342079162598],["пи",-11.37937831878662],["▁تحت",-11.379453659057615],["ഡ്",-11.379471778869627],["▁чу",-11.379499435424805],["تان",-11.379515647888184],["llo",-11.37984561920166],["48",-11.379852294921877],["▁masalah",-11.379854202270508],["▁места",-11.379926681518556],["队",-11.379938125610352],["нан",-11.37995433807373],["▁nakon",-11.380199432373049],["ına",-11.380349159240724],["▁Sri",-11.380396842956545],["▁trở",-11.38040256500244],["▁ثم",-11.380417823791504],["▁Ol",-11.38053035736084],["ность",-11.38059139251709],["str",-11.380602836608888],["yin",-11.380653381347656],["ะ",-11.380666732788086],["დან",-11.38066864013672],["▁đẹp",-11.380840301513672],["род",-11.380857467651367],["▁eile",-11.380941390991213],["▁ز",-11.38111400604248],["ാം",-11.381317138671877],["anak",-11.38133144378662],["লা",-11.381423950195312],["▁Nr",-11.381440162658691],["▁вер",-11.38149070739746],["vanje",-11.38152313232422],["▁maior",-11.381562232971191],["▁Full",-11.381576538085938],["வா",-11.381611824035645],["ának",-11.381619453430176],["▁Zi",-11.381916046142578],["uli",-11.381933212280272],["▁într",-11.382126808166504],["▁58",-11.382206916809082],["ther",-11.38224983215332],["කි",-11.382367134094238],["නි",-11.382389068603516],["ಿಗೆ",-11.382560729980469],["39",-11.38256549835205],["នេះ",-11.382613182067873],["▁sent",-11.382798194885254],["ਿ",-11.382889747619627],["▁న",-11.383028984069824],["▁કરો",-11.383092880249023],["▁verk",-11.3831787109375],["AN",-11.383366584777832],["▁जी",-11.3834867477417],["▁cookie",-11.383575439453123],["▁ese",-11.383668899536133],["انی",-11.383726119995115],["▁Lei",-11.38376522064209],["wch",-11.383803367614746],["पी",-11.383859634399414],["▁đều",-11.384082794189451],["nsa",-11.384088516235352],["提升",-11.38412857055664],["υς",-11.3842134475708],["▁tale",-11.384231567382812],["▁Dom",-11.38431167602539],["▁Tel",-11.384413719177246],["▁Forum",-11.3844575881958],["▁ک",-11.38455581665039],["ļ",-11.384561538696287],["tig",-11.38463306427002],["ായി",-11.38467025756836],["▁ε",-11.384732246398926],["▁name",-11.384751319885254],["▁makan",-11.384824752807615],["▁mwaka",-11.384883880615234],["며",-11.384953498840332],["▁جائے",-11.384957313537598],["▁בא",-11.385088920593262],["cións",-11.385122299194336],["全国",-11.3851318359375],["▁बन",-11.385246276855469],["▁mě",-11.385354042053224],["▁bile",-11.385367393493652],["금",-11.385490417480469],["▁Ю",-11.38556957244873],["Α",-11.3856782913208],["▁85",-11.38572597503662],["▁nə",-11.385926246643066],["▁godina",-11.386055946350098],["コ",-11.386070251464844],["aran",-11.386072158813477],["isch",-11.386146545410156],["tól",-11.386462211608888],["▁bawah",-11.386555671691896],["เข้า",-11.3865966796875],["▁jul",-11.386733055114746],["riya",-11.38678741455078],["लो",-11.386825561523438],["νο",-11.38693904876709],["形",-11.386978149414062],["發展",-11.3870849609375],["ింది",-11.387167930603027],["cas",-11.387182235717772],["▁watu",-11.38721752166748],["奇",-11.38722324371338],["▁format",-11.387364387512209],["▁Catalunya",-11.387564659118652],["ී",-11.387572288513184],["ug",-11.387622833251951],["ိ",-11.387681007385254],["▁aqui",-11.387768745422363],["▁tuli",-11.387850761413574],["ació",-11.387950897216797],["ï",-11.388117790222168],["less",-11.388123512268066],["▁لت",-11.388145446777344],["▁является",-11.38824462890625],["▁bonus",-11.388349533081056],["▁한국",-11.388388633728027],["nál",-11.38865089416504],["▁تهران",-11.388654708862305],["▁tập",-11.388716697692873],["ಹ",-11.388738632202148],["კ",-11.388760566711426],["▁РФ",-11.388779640197754],["तो",-11.3888578414917],["gs",-11.388886451721191],["▁жұмыс",-11.388955116271973],["▁intr",-11.389216423034668],["▁energia",-11.389256477355955],["▁ଏକ",-11.389325141906738],["ــ",-11.389338493347168],["▁ന",-11.38946533203125],["▁후",-11.389695167541504],["认为",-11.389710426330566],["▁ಸ",-11.39006519317627],["▁pihak",-11.390085220336914],["וני",-11.390091896057127],["νη",-11.390135765075684],["▁ఏ",-11.390192985534668],["ٍ",-11.390267372131348],["图",-11.39027976989746],["座",-11.390353202819824],["▁სი",-11.390419960021973],["▁προσ",-11.39048194885254],["▁צ",-11.390487670898438],["KA",-11.390564918518066],["基",-11.390827178955078],["▁mindre",-11.390926361083984],["▁استان",-11.391018867492676],["▁nagyon",-11.391053199768066],["ъ",-11.391178131103516],["isin",-11.39129638671875],["▁hóa",-11.391324043273926],["▁process",-11.391351699829102],["SP",-11.391412734985352],["▁Nội",-11.391423225402832],["▁Су",-11.391528129577637],["質",-11.391582489013672],["ใหญ่",-11.391608238220217],["▁сум",-11.391650199890137],["ೋ",-11.391656875610352],["သော",-11.391831398010254],["▁triển",-11.391847610473633],["▁तीन",-11.391966819763184],["-18",-11.39238166809082],["ো",-11.392393112182615],["▁؟",-11.392597198486328],["▁ander",-11.39281177520752],["وت",-11.392817497253418],["写",-11.39293098449707],["▁미",-11.392934799194336],["▁(4",-11.39302921295166],["거",-11.393147468566896],["▁בית",-11.393390655517578],["▁continua",-11.393474578857422],["比較",-11.393531799316406],["▁daudz",-11.39367961883545],["▁tiếng",-11.39387035369873],["▁fund",-11.39395236968994],["aire",-11.39400863647461],["活",-11.39405345916748],["ñ",-11.394078254699709],["▁vista",-11.394085884094238],["ének",-11.394092559814451],["▁72",-11.394121170043944],["కా",-11.394247055053713],["總",-11.394335746765137],["лары",-11.394390106201172],["▁profil",-11.394659996032717],["▁mắt",-11.394787788391112],["差",-11.394797325134276],["▁топ",-11.394810676574709],["▁cuenta",-11.394837379455566],["▁`",-11.395010948181152],["▁prema",-11.395211219787598],["▁غ",-11.395244598388672],["▁●",-11.395289421081545],["ρι",-11.39533233642578],["▁말",-11.395370483398438],["არ",-11.395469665527344],["yon",-11.395583152770996],["ლე",-11.39564609527588],["hon",-11.395737648010254],["ლის",-11.395794868469238],["▁ම",-11.395861625671388],["ət",-11.396018981933594],["gh",-11.396036148071287],["▁Ng",-11.396084785461426],["kit",-11.396187782287598],["▁ए",-11.396302223205566],["▁ori",-11.39634895324707],["heim",-11.396382331848145],["یا",-11.396681785583496],["▁Kinder",-11.396756172180176],["win",-11.39677619934082],["▁când",-11.396820068359377],["ια",-11.397209167480469],["▁lesz",-11.397224426269531],["▁pres",-11.397302627563477],["▁مواد",-11.397546768188477],["sze",-11.397594451904297],["マ",-11.397664070129396],["▁нови",-11.39768123626709],["ír",-11.397889137268066],["▁Ра",-11.398222923278809],["▁saam",-11.39851188659668],["Li",-11.39859676361084],["ეთ",-11.398601531982422],["000",-11.398736000061035],["လို",-11.398771286010742],["▁viele",-11.398804664611816],["ș",-11.398855209350586],["▁साल",-11.39890956878662],["03",-11.399048805236816],["한다",-11.399049758911133],["ös",-11.399103164672852],["▁අද",-11.399166107177734],["ję",-11.39938259124756],["pres",-11.399454116821287],["▁soll",-11.399459838867188],["▁NO",-11.399484634399414],["▁magna",-11.399622917175291],["ayo",-11.39969253540039],["tic",-11.399713516235352],["思",-11.399781227111816],["▁glad",-11.399897575378418],["тка",-11.400028228759766],["参加",-11.400073051452637],["強",-11.400364875793455],["▁Nous",-11.400470733642578],["▁strategi",-11.400543212890623],["తి",-11.400568008422852],["▁ఉన్న",-11.400632858276367],["▁bahawa",-11.400749206542969],["បាន",-11.400762557983398],["▁menn",-11.40098762512207],["▁tinggi",-11.401042938232422],["ദ",-11.401114463806152],["▁diferentes",-11.401140213012695],["▁Vel",-11.401288032531738],["▁ш",-11.401345252990724],["▁ఓ",-11.40146541595459],["▁люди",-11.40146827697754],["▁мал",-11.401491165161133],["rka",-11.40152359008789],["▁cos",-11.40166473388672],["▁savu",-11.401726722717283],["ပြီး",-11.401772499084473],["ره",-11.401845932006836],["ým",-11.401872634887695],["02",-11.401899337768556],["▁ön",-11.401928901672363],["▁Rad",-11.40207290649414],["▁кар",-11.402098655700684],["ുന്നു",-11.402745246887209],["▁ເປັນ",-11.402765274047852],["▁خپل",-11.402862548828123],["曲",-11.402892112731934],["▁belum",-11.402908325195312],["▁23.",-11.40292739868164],["▁הה",-11.40295124053955],["▁человек",-11.403024673461914],["ออก",-11.403114318847656],["▁Just",-11.403204917907717],["▁حاصل",-11.403210639953612],["šti",-11.40328311920166],["kali",-11.403302192687988],["öl",-11.403691291809082],["▁cuộc",-11.403698921203612],["rei",-11.403730392456056],["▁leh",-11.403783798217772],["▁1998",-11.40385627746582],["▁Fo",-11.403935432434082],["▁ప్ర",-11.40395450592041],["▁tener",-11.403997421264648],["しました",-11.404006004333496],["される",-11.40410041809082],["▁አንድ",-11.40417766571045],["▁Over",-11.404341697692873],["▁کردن",-11.404515266418455],["▁مجلس",-11.404671669006348],["เงิน",-11.40475082397461],["▁многу",-11.405033111572266],["▁જો",-11.405112266540527],["▁दे",-11.405139923095703],["▁בו",-11.405177116394045],["除",-11.405261993408203],["▁ancora",-11.405272483825684],["▁ris",-11.405301094055176],["▁قیمت",-11.405491828918455],["შ",-11.40558910369873],["▁হবে",-11.405843734741213],["▁хүн",-11.405893325805664],["▁Fel",-11.405901908874512],["圖",-11.405941009521484],["▁jalan",-11.406075477600098],["▁música",-11.40617847442627],["▁ნა",-11.40618133544922],["▁Tas",-11.406194686889648],["یہ",-11.406195640563965],["▁avant",-11.406224250793455],["ador",-11.406721115112305],["ність",-11.406875610351562],["ाचे",-11.406890869140623],["▁tijd",-11.406896591186523],["▁dil",-11.40693473815918],["chod",-11.406949043273926],["▁mp",-11.407061576843262],["▁იმ",-11.40706729888916],["▁dự",-11.407093048095703],["▁көп",-11.407413482666016],["▁lực",-11.407428741455078],["共同",-11.40751838684082],["tit",-11.407621383666992],["▁lange",-11.407630920410156],["▁sett",-11.40778350830078],["▁ישראל",-11.408028602600098],["liv",-11.408246994018556],["লে",-11.408374786376951],["sio",-11.408430099487305],["▁aug",-11.408498764038086],["▁Mp",-11.40874195098877],["▁BE",-11.408818244934082],["▁विकास",-11.408960342407228],["▁Res",-11.409069061279297],["▁ən",-11.40911865234375],["▁Under",-11.409131050109863],["▁Hay",-11.409186363220217],["▁ერთ",-11.409210205078123],["▁ទៅ",-11.409324645996094],["▁فیلم",-11.409399032592772],["▁Bay",-11.40943431854248],["之一",-11.40956974029541],["▁ih",-11.40957736968994],["ową",-11.409747123718262],["inio",-11.40975284576416],["同时",-11.409866333007812],["kis",-11.410158157348633],["▁sha",-11.410161972045898],["▁pertama",-11.410171508789062],["voj",-11.41018009185791],["▁otro",-11.410208702087402],["▁كانت",-11.410238265991213],["▁quo",-11.41042709350586],["▁alltid",-11.410452842712402],["▁già",-11.410562515258787],["ಗಳಲ್ಲಿ",-11.410645484924316],["▁могу",-11.410858154296877],["▁онлайн",-11.411016464233398],["▁παρα",-11.411176681518556],["وار",-11.411252975463867],["法律",-11.41128158569336],["пен",-11.411306381225586],["▁bort",-11.41137409210205],["தா",-11.41160488128662],["▁question",-11.411640167236328],["ాడు",-11.41180419921875],["nne",-11.411867141723633],["▁२०",-11.411894798278809],["▁३",-11.412062644958496],["▁fått",-11.41207790374756],["▁этот",-11.412198066711426],["」,",-11.41221523284912],["▁inne",-11.412225723266602],["റി",-11.412257194519045],["已經",-11.412331581115724],["▁ditu",-11.412397384643556],["▁enda",-11.412424087524414],["үү",-11.412455558776855],["▁þ",-11.41246223449707],["▁ezt",-11.41246509552002],["▁אחד",-11.41268825531006],["-2018",-11.412766456604004],["रो",-11.413199424743652],["▁procent",-11.413277626037598],["▁زیر",-11.413411140441896],["▁ол",-11.41343116760254],["▁AM",-11.413509368896484],["kirja",-11.413673400878906],["还有",-11.413724899291992],["▁काठमाडौं",-11.414109230041504],["▁হয়",-11.41411304473877],["只要",-11.414121627807615],["▁spor",-11.41425323486328],["結果",-11.41425895690918],["ис",-11.414289474487305],["▁մեր",-11.414750099182127],["▁период",-11.414755821228027],["CO",-11.414813041687012],["▁gaat",-11.414892196655272],["▁тези",-11.41507053375244],["▁visita",-11.415217399597168],["▁presidente",-11.415360450744627],["▁verir",-11.415360450744627],["ская",-11.415486335754396],["sjon",-11.415514945983888],["▁ம",-11.415594100952148],["网站",-11.415639877319336],["PO",-11.415770530700684],["▁නි",-11.415813446044922],["と思います",-11.415813446044922],["▁glas",-11.415912628173828],["gio",-11.415977478027344],["ht",-11.416049003601074],["▁ата",-11.416061401367188],["▁үйл",-11.41608715057373],["acji",-11.416144371032717],["▁жол",-11.41620635986328],["▁தமிழ்",-11.416220664978027],["▁cur",-11.416295051574709],["▁sebelum",-11.41630744934082],["▁මෙම",-11.416321754455566],["▁πως",-11.416367530822754],["AS",-11.41639518737793],["。」",-11.416461944580078],["努力",-11.416666030883787],["▁Jesus",-11.416730880737305],["▁š",-11.416794776916504],["મા",-11.41682243347168],["מי",-11.416845321655272],["▁proprio",-11.41690158843994],["▁בר",-11.416914939880373],["▁خدا",-11.417001724243164],["ula",-11.4171142578125],["▁dike",-11.417122840881348],["લા",-11.417137145996094],["ਤਾ",-11.41725730895996],["▁интернет",-11.417349815368652],["vez",-11.417396545410156],["ाई",-11.417485237121582],["源",-11.417523384094238],["雨",-11.417622566223145],["ตา",-11.417659759521484],["▁sva",-11.417694091796877],["ச",-11.417783737182615],["▁Anh",-11.41781997680664],["请",-11.418095588684082],["ako",-11.418171882629396],["▁posta",-11.418231010437012],["▁ના",-11.418274879455566],["▁olika",-11.418291091918944],["▁Bakı",-11.418509483337402],["▁tour",-11.4185209274292],["▁anys",-11.418521881103516],["提",-11.418601036071776],["ετε",-11.418852806091309],["また",-11.418853759765623],["▁כדי",-11.418890953063965],["▁حضرت",-11.418901443481444],["▁Ա",-11.418902397155762],["ष",-11.41921329498291],["江",-11.41923713684082],["▁гар",-11.41941738128662],["лож",-11.41953945159912],["તી",-11.419676780700684],["▁ক",-11.419708251953123],["ň",-11.419719696044922],["▁byl",-11.419754028320312],["▁sida",-11.419767379760742],["ვ",-11.419781684875488],["▁دغه",-11.41985321044922],["На",-11.419926643371582],["▁Ли",-11.420052528381348],["▁Sul",-11.420140266418455],["▁ES",-11.420180320739746],["▁ему",-11.420217514038086],["قى",-11.420289993286133],["▁nici",-11.420353889465332],["ον",-11.420382499694824],["土",-11.420588493347168],["朋友",-11.42068099975586],["drž",-11.420742988586426],["லை",-11.420917510986328],["ског",-11.42096996307373],["tru",-11.42122745513916],["ţia",-11.421246528625488],["సి",-11.421627044677734],["▁yıl",-11.421649932861328],["▁май",-11.421687126159668],["cam",-11.421697616577148],["▁EL",-11.42178440093994],["▁कार्य",-11.42182159423828],["▁civil",-11.42182731628418],["▁Пре",-11.421876907348633],["▁என்ன",-11.421896934509276],["rze",-11.4219331741333],["ელი",-11.421957969665527],["▁للم",-11.42209243774414],["单",-11.42220973968506],["▁diverse",-11.422213554382324],["▁try",-11.422354698181152],["▁Off",-11.422374725341797],["▁kerja",-11.422417640686035],["انو",-11.422460556030272],["ható",-11.422511100769045],["▁Apa",-11.422704696655272],["▁hak",-11.422752380371094],["și",-11.42279815673828],["ек",-11.422941207885742],["ଦ",-11.422982215881348],["▁nieuwe",-11.42300033569336],["ούς",-11.423006057739258],["bank",-11.423013687133787],["▁open",-11.423019409179688],["▁AL",-11.42324924468994],["▁больш",-11.423263549804688],["သာ",-11.423468589782717],["▁nego",-11.423667907714844],["該",-11.423784255981444],["▁cultura",-11.423853874206545],["itatea",-11.423911094665527],["干",-11.4239501953125],["▁labai",-11.423978805541992],["▁jobb",-11.42401123046875],["▁Trung",-11.424036979675291],["gg",-11.424105644226074],["▁diye",-11.424118995666504],["ං",-11.424156188964844],["ခ",-11.424240112304688],["▁ବା",-11.42431640625],["▁רק",-11.424399375915527],["ats",-11.42441463470459],["▁nä",-11.424428939819336],["ற",-11.424473762512209],["▁Գ",-11.424483299255373],["emos",-11.424598693847656],["▁onun",-11.424610137939451],["卻",-11.424752235412598],["▁dịch",-11.424763679504396],["▁falta",-11.424787521362305],["ată",-11.424817085266112],["▁الل",-11.425148963928224],["▁zwei",-11.42543125152588],["▁преди",-11.425439834594728],["మ్",-11.425612449645996],["اني",-11.425615310668944],["▁student",-11.42586612701416],["bare",-11.425980567932127],["▁když",-11.42612075805664],["報",-11.42612648010254],["tka",-11.42617893218994],["▁tapa",-11.426217079162598],["▁begin",-11.426222801208496],["tab",-11.426276206970217],["▁something",-11.42632007598877],["ನ್ನು",-11.426323890686035],["▁JA",-11.426356315612791],["යෙන්",-11.426384925842283],["▁kanyang",-11.426554679870604],["ೀ",-11.426727294921877],["▁mismo",-11.426740646362305],["skim",-11.426769256591797],["▁Ac",-11.42682933807373],["মা",-11.426905632019045],["ایی",-11.426920890808104],["īgi",-11.426953315734863],["ပ",-11.427059173583984],["▁απ",-11.4270601272583],["ppen",-11.42707061767578],["▁hace",-11.427074432373049],["jak",-11.427168846130373],["▁nej",-11.427193641662598],["ând",-11.427204132080078],["▁ví",-11.427241325378418],["▁metų",-11.427263259887695],["▁Dy",-11.427285194396973],["▁bedre",-11.42729949951172],["ქ",-11.427322387695312],["▁видео",-11.427451133728027],["မှု",-11.427471160888672],["tě",-11.427671432495115],["हरू",-11.427807807922363],["▁kamera",-11.427864074707031],["▁ina",-11.428022384643556],["Ta",-11.428128242492676],["ത്തിൽ",-11.428159713745115],["▁Pat",-11.428190231323242],["▁aquí",-11.428271293640137],["ኩ",-11.428335189819336],["▁central",-11.428433418273926],["▁Ме",-11.428468704223633],["▁finde",-11.428497314453123],["▁premier",-11.42855167388916],["blog",-11.428634643554688],["▁hidup",-11.428647994995115],["できる",-11.428865432739258],["业",-11.428893089294434],["▁ва",-11.429021835327148],["нен",-11.429059982299805],["校",-11.429099082946776],["来说",-11.429161071777344],["▁бути",-11.429241180419922],["▁Hari",-11.429319381713867],["rian",-11.429431915283203],["ठ",-11.429498672485352],["▁כמו",-11.42963409423828],["▁fond",-11.429647445678713],["ència",-11.429789543151855],["ait",-11.429844856262209],["▁trọng",-11.429871559143066],["▁būs",-11.429893493652344],["▁nom",-11.429913520812988],["SI",-11.430041313171388],["▁Hot",-11.430252075195312],["got",-11.430254936218262],["ορ",-11.43027400970459],["ční",-11.430421829223633],["▁National",-11.43047332763672],["▁wake",-11.430618286132812],["屋",-11.430625915527344],["▁Për",-11.430731773376465],["ሽ",-11.430789947509766],["ਰਾ",-11.430906295776367],["에게",-11.430975914001465],["▁kuna",-11.431024551391602],["▁שנ",-11.431037902832031],["真的",-11.431049346923828],["▁थी",-11.431260108947754],["ിയ",-11.431269645690918],["▁fatto",-11.43133544921875],["▁Max",-11.431594848632812],["кра",-11.431600570678713],["larini",-11.43165397644043],["▁dugu",-11.431883811950684],["ário",-11.432034492492676],["▁That",-11.432035446166992],["▁TO",-11.432154655456545],["rse",-11.432169914245604],["erte",-11.432244300842283],["▁tiden",-11.432406425476074],["▁μετά",-11.432672500610352],["▁zal",-11.432694435119627],["▁bla",-11.432899475097656],["▁अध्यक्ष",-11.433028221130373],["▁ז",-11.433042526245115],["▁pari",-11.43323802947998],["▁Instagram",-11.433344841003418],["▁faut",-11.433427810668944],["▁ім",-11.433456420898438],["▁jetzt",-11.433530807495115],["钱",-11.433605194091797],["इ",-11.433615684509276],["▁France",-11.433645248413086],["▁kohta",-11.433847427368164],["▁била",-11.433880805969238],["пер",-11.433945655822754],["酒",-11.434063911437988],["ኑ",-11.43422794342041],["▁റെ",-11.434319496154783],["▁ոչ",-11.43433952331543],["▁цього",-11.43453311920166],["▁Χ",-11.434629440307615],["ပါ။",-11.434857368469238],["▁около",-11.43492317199707],["ење",-11.434981346130373],["-12",-11.435091018676758],["IS",-11.435161590576172],["ති",-11.43525505065918],["▁chưa",-11.435303688049316],["▁يمكن",-11.435340881347656],["▁NE",-11.435376167297363],["▁tablet",-11.435516357421877],["▁Pir",-11.435538291931152],["▁grand",-11.43559455871582],["▁←",-11.43560791015625],["▁હતી",-11.435717582702637],["ging",-11.43572998046875],["▁són",-11.435734748840332],["Ä",-11.43573760986328],["▁comp",-11.436087608337402],["ər",-11.436208724975586],["ลง",-11.436415672302246],["▁Ok",-11.436532974243164],["നി",-11.436598777770996],["▁प्रति",-11.43665885925293],["▁hoạt",-11.436676025390623],["িক",-11.436716079711914],["▁других",-11.43674087524414],["қ",-11.43678379058838],["ിച്ചു",-11.43681812286377],["سر",-11.437142372131348],["▁aad",-11.437211990356444],["னை",-11.437219619750977],["▁tor",-11.437298774719238],["ım",-11.437740325927734],["▁bất",-11.43783473968506],["լ",-11.437952041625977],["▁работи",-11.437970161437988],["ତା",-11.438014030456545],["且",-11.43812370300293],["▁олон",-11.438124656677246],["gó",-11.438128471374512],["▁şekilde",-11.43837547302246],["▁lett",-11.438468933105469],["港",-11.438508987426758],["లను",-11.438769340515137],["▁devam",-11.43882656097412],["▁nema",-11.43898105621338],["▁jeszcze",-11.43899154663086],["▁смо",-11.439026832580566],["▁SI",-11.439041137695312],["II",-11.439068794250488],["▁own",-11.439139366149902],["▁गरेका",-11.43915843963623],["▁бил",-11.43926239013672],["。(",-11.439278602600098],["▁bl",-11.439388275146484],["得到",-11.439473152160645],["ሩ",-11.439491271972656],["▁chọn",-11.439491271972656],["▁spel",-11.43959140777588],["hir",-11.43960189819336],["▁euros",-11.439615249633787],["▁Day",-11.439632415771484],["▁primo",-11.439697265625],["mark",-11.439749717712402],["nni",-11.43976879119873],["▁Dal",-11.439814567565918],["环境",-11.43984317779541],["වත්",-11.439847946166992],["റ്റ്",-11.439889907836914],["▁(8)",-11.439935684204102],["▁Qu",-11.440123558044434],["vio",-11.440126419067385],["▁lidt",-11.440274238586426],["▁दी",-11.440279960632324],["گی",-11.440448760986328],["వు",-11.440500259399414],["▁terjadi",-11.440675735473633],["▁tum",-11.440728187561035],["sä",-11.440889358520508],["▁62",-11.440902709960938],["▁Ку",-11.440911293029783],["sport",-11.441076278686523],["බ",-11.441083908081056],["mg",-11.441121101379396],["λα",-11.441288948059082],["▁tiêu",-11.441339492797852],["نو",-11.441421508789062],["▁రా",-11.441452980041504],["▁tìm",-11.441625595092772],["лось",-11.441690444946287],["По",-11.44172191619873],["算",-11.441761016845703],["▁700",-11.441923141479492],["カ",-11.442017555236816],["▁staat",-11.44216251373291],["생",-11.44230842590332],["▁peng",-11.442330360412598],["▁aici",-11.442357063293455],["叫",-11.442378044128418],["ുള്ള",-11.442668914794922],["▁Č",-11.442684173583984],["ቆ",-11.442770957946776],["▁Uz",-11.442971229553224],["▁ನಿಮ್ಮ",-11.443055152893066],["▁erre",-11.443099975585938],["58",-11.443233489990234],["stru",-11.443241119384766],["чин",-11.443260192871094],["cc",-11.443315505981444],["lina",-11.443445205688477],["হ",-11.44347858428955],["ಡ್",-11.44367218017578],["▁segundo",-11.443726539611816],["mä",-11.44381332397461],["▁প্র",-11.443848609924316],["िंग",-11.44387435913086],["프",-11.444049835205078],["די",-11.444185256958008],["▁Nova",-11.444212913513184],["▁Cam",-11.444291114807127],["▁adres",-11.444308280944824],["▁כך",-11.444348335266112],["▁Black",-11.444456100463867],["bak",-11.44451904296875],["▁nell",-11.444535255432127],["▁Cel",-11.44467830657959],["▁99",-11.444747924804688],["hari",-11.444775581359863],["▁पण",-11.444791793823242],["miseks",-11.444805145263672],["▁Lan",-11.4448823928833],["▁ដោយ",-11.44496250152588],["qi",-11.44497299194336],["▁ሊ",-11.44504165649414],["▁Net",-11.445244789123535],["ιά",-11.445311546325684],["гор",-11.44541072845459],["▁ඇත",-11.445570945739746],["▁ос",-11.4456148147583],["▁Up",-11.445757865905762],["▁vei",-11.445881843566896],["▁ख",-11.446080207824709],["▁жизни",-11.446147918701172],["▁unei",-11.446173667907717],["▁استعمال",-11.446222305297852],["៖",-11.446292877197266],["િ",-11.446294784545898],["▁гр",-11.44633674621582],["數",-11.446337699890137],["MI",-11.446496963500977],["▁مصر",-11.446504592895508],["皮",-11.446562767028809],["llinen",-11.446614265441896],["ေ",-11.446669578552246],["ை",-11.446805000305176],["▁premi",-11.446866989135742],["届",-11.446895599365234],["影响",-11.446919441223145],["▁gera",-11.447016716003418],["TM",-11.447134971618652],["▁tüm",-11.44721794128418],["▁harga",-11.447225570678713],["▁]",-11.447303771972656],["ວ",-11.447409629821776],["taj",-11.44754123687744],["▁tím",-11.447574615478516],["▁նոր",-11.447596549987791],["短",-11.447633743286133],["樂",-11.447813034057615],["сыз",-11.447994232177734],["être",-11.448175430297852],["▁ಅವರ",-11.44826602935791],["▁повеќе",-11.44829559326172],["לה",-11.448296546936035],["▁κι",-11.44838047027588],["▁anima",-11.44841766357422],["▁hér",-11.448541641235352],["із",-11.44873046875],["ках",-11.448753356933594],["▁पहले",-11.448800086975098],["进",-11.448972702026367],["ans",-11.449017524719238],["組",-11.449158668518066],["data",-11.449274063110352],["▁töö",-11.44945240020752],["යන්",-11.44947624206543],["ਨਾ",-11.44953441619873],["დის",-11.449609756469728],["▁någon",-11.449665069580078],["▁मु",-11.449678421020508],["▁Tele",-11.449687957763672],["▁داده",-11.44976043701172],["verk",-11.449771881103516],["அ",-11.449884414672852],["▁Ven",-11.44992733001709],["ոն",-11.449938774108888],["ในการ",-11.449992179870604],["▁57",-11.45003890991211],["▁manusia",-11.45008659362793],["ujú",-11.450100898742676],["▁oko",-11.450233459472656],["▁xem",-11.450303077697754],["▁😉",-11.450373649597168],["▁quem",-11.450456619262695],["▁vás",-11.450491905212402],["▁ವಿ",-11.45050048828125],["れ",-11.450529098510742],["毛",-11.450643539428713],["▁Oh",-11.450738906860352],["增加",-11.45077896118164],["▁μόνο",-11.45092487335205],["▁selbst",-11.450927734375],["borg",-11.451009750366213],["▁trata",-11.451078414916992],["系統",-11.45117473602295],["nova",-11.451247215270996],["▁млн",-11.451249122619627],["кс",-11.451322555541992],["ër",-11.451324462890623],["shëm",-11.451338768005373],["▁дело",-11.45136547088623],["▁ireo",-11.451373100280762],["▁petit",-11.45138931274414],["லா",-11.45155143737793],["▁little",-11.451579093933104],["▁izango",-11.45158576965332],["sul",-11.451661109924316],[".10.",-11.45176601409912],["sında",-11.451799392700195],["▁baada",-11.45193099975586],["だった",-11.452133178710938],["▁norsk",-11.452181816101074],["▁פי",-11.45242404937744],["raj",-11.4524564743042],["ney",-11.452479362487791],["link",-11.452584266662598],["βα",-11.452771186828612],["▁enkelt",-11.452795028686523],["кор",-11.453011512756348],["բ",-11.453099250793455],["▁sedang",-11.453266143798828],["▁त्या",-11.45327091217041],["▁było",-11.453275680541992],["▁सि",-11.453310012817385],["کار",-11.45335578918457],["▁ថា",-11.453474044799805],["வே",-11.4535493850708],["▁ಇ",-11.453596115112305],["തി",-11.45359992980957],["▁sê",-11.45362949371338],["zko",-11.453739166259766],["▁آئی",-11.453789710998535],["▁fort",-11.453801155090332],["▁terug",-11.453819274902344],["දා",-11.45388889312744],["▁कोई",-11.454020500183104],["▁Paul",-11.454029083251951],["áin",-11.454060554504396],["▁marka",-11.454081535339355],["저",-11.454124450683594],["▁افغان",-11.454172134399414],["▁בע",-11.454211235046388],["▁клас",-11.454222679138184],["▁हम",-11.454327583312988],["▁გან",-11.454442977905272],["nka",-11.45468521118164],["▁nhau",-11.454710960388184],["▁zaidi",-11.454745292663574],["▁Milli",-11.454769134521484],["ző",-11.454843521118164],["ന്റെ",-11.454961776733398],["▁څخه",-11.455249786376951],["▁meno",-11.455275535583496],["▁vairāk",-11.4553804397583],["оў",-11.45542049407959],["▁جانب",-11.45545482635498],["ဆို",-11.455474853515623],["രി",-11.455524444580078],["ì",-11.455527305603027],["мін",-11.45567226409912],["▁Sem",-11.455755233764648],["šanu",-11.455775260925291],["▁arte",-11.45603370666504],["мат",-11.456184387207031],["ເປັນ",-11.456448554992676],["park",-11.456524848937988],["推",-11.45655345916748],["மாக",-11.456602096557615],["▁eo",-11.456660270690918],["▁συν",-11.456671714782717],["သား",-11.456695556640623],["▁lavoro",-11.456709861755373],["▁خط",-11.456710815429688],["战",-11.456761360168455],["ଲେ",-11.456814765930176],["▁banda",-11.456929206848145],["video",-11.45702838897705],["▁ปี",-11.45704174041748],["▁Amb",-11.457143783569336],["▁metu",-11.457268714904783],["اں",-11.457304000854492],["800",-11.457368850708008],["kort",-11.457377433776855],["▁Gre",-11.457530975341797],["▁Það",-11.457566261291504],["▁Bio",-11.4575834274292],["lio",-11.45760440826416],["लि",-11.45762062072754],["▁dessa",-11.45773220062256],["▁काही",-11.45773696899414],["▁дня",-11.457932472229004],["になる",-11.457988739013672],["▁hatte",-11.458075523376465],["▁интерес",-11.458148956298828],["eret",-11.458375930786133],["▁ret",-11.458575248718262],["▁أنه",-11.458643913269045],["▁dog",-11.45872974395752],["syon",-11.458733558654783],["▁Berlin",-11.45875072479248],["▁када",-11.458879470825195],["▁USB",-11.45893096923828],["▁soit",-11.458995819091797],["▁ži",-11.459029197692873],["▁wakati",-11.459110260009766],["▁склад",-11.459155082702637],["▁већ",-11.459178924560549],["▁estos",-11.459206581115724],["իկ",-11.459291458129885],["ijo",-11.459515571594238],["のですが",-11.459733963012695],["▁mars",-11.459775924682615],["▁dẫn",-11.459832191467283],["▁1,5",-11.459877967834473],["اط",-11.4598970413208],["pr",-11.459908485412598],["zat",-11.459949493408203],["34",-11.460028648376465],["▁توجه",-11.460111618041992],["▁cred",-11.460122108459473],["▁fino",-11.460280418395996],["個人",-11.460285186767578],["网络",-11.460301399230955],["▁quello",-11.4603271484375],["▁stort",-11.460336685180664],["▁मात्र",-11.460347175598145],["▁arra",-11.46037769317627],["pod",-11.460759162902832],["▁Kaj",-11.460887908935549],["▁ela",-11.46094036102295],["▁کن",-11.46095371246338],["ڈ",-11.460956573486328],["▁público",-11.461042404174805],["▁lúc",-11.461050987243652],["▁х",-11.461076736450195],["गा",-11.461152076721191],["▁aller",-11.461332321166992],["maya",-11.46134090423584],["创新",-11.461418151855469],["▁Türk",-11.461450576782228],["選擇",-11.461488723754885],["▁cena",-11.46170139312744],["▁gri",-11.461725234985352],["▁Tapi",-11.46177577972412],["ācijas",-11.461886405944824],["▁yaitu",-11.462084770202637],["▁ვერ",-11.462176322937012],["▁jenter",-11.462308883666992],["iamo",-11.462401390075684],["ums",-11.462409973144531],["▁vetëm",-11.462438583374023],["▁capital",-11.462563514709473],["▁스",-11.46257209777832],["tine",-11.462645530700684],["▁fois",-11.46274757385254],["▁Мар",-11.46281623840332],["▁човек",-11.462825775146484],["▁sami",-11.462894439697266],["之前",-11.462930679321287],["യ്",-11.46297550201416],["▁old",-11.463078498840332],["▁Für",-11.463129997253418],["ális",-11.463171005249023],["▁შემდეგ",-11.46325397491455],["▁head",-11.463360786437988],["дат",-11.463411331176758],["▁모",-11.463440895080566],["▁פ",-11.463595390319824],["▁Jy",-11.463667869567873],["ovali",-11.46367073059082],["-5",-11.463788986206056],["ภาพ",-11.463860511779783],["野",-11.464094161987305],["▁उप",-11.46410083770752],["素",-11.464173316955566],["дел",-11.464269638061523],["▁Movie",-11.46428680419922],["▁mbi",-11.46432113647461],["Ο",-11.464327812194824],["出来",-11.464338302612305],["▁úgy",-11.464421272277832],["វ",-11.46470546722412],["▁болып",-11.46473217010498],["္",-11.464835166931152],["派",-11.464883804321287],["▁друг",-11.46501636505127],["▁ल",-11.465073585510254],["冷",-11.465110778808594],["dul",-11.46518325805664],["▁langsung",-11.465242385864258],["▁arba",-11.465489387512209],["tuar",-11.465579986572266],["ਦਾ",-11.465664863586426],["▁cidade",-11.465692520141602],["ping",-11.465919494628906],["▁הנ",-11.46595859527588],["▁Oo",-11.465964317321776],["▁oficial",-11.466012954711914],["னி",-11.466104507446287],["▁rằng",-11.466147422790527],["▁הש",-11.466270446777344],["▁महा",-11.466304779052734],["▁Pie",-11.46630573272705],["▁պետք",-11.466395378112791],["ują",-11.466419219970703],["▁mitt",-11.46645164489746],["ंग",-11.466453552246094],["交流",-11.46656894683838],["▁원",-11.466634750366213],["▁ਇੱਕ",-11.466869354248049],["sc",-11.466904640197754],["▁น",-11.466959953308104],["rig",-11.46706199645996],["▁weiter",-11.467305183410645],["▁क्षेत्र",-11.46739673614502],["cze",-11.467646598815918],["lığı",-11.467721939086914],["ère",-11.467777252197266],["▁leta",-11.467805862426758],["ulo",-11.467886924743652],["ел",-11.467939376831056],["ngu",-11.467949867248535],["тик",-11.46798610687256],["▁önce",-11.468059539794922],["基本",-11.46819019317627],["ći",-11.468220710754396],["मी",-11.46823024749756],["▁मैं",-11.46836280822754],["ből",-11.468488693237305],["▁شوي",-11.468572616577148],["▁proc",-11.468661308288574],["ує",-11.468682289123535],["▁Kol",-11.468730926513672],["▁Под",-11.468768119812012],["章",-11.469021797180176],["▁වගේ",-11.469073295593262],["▁مون",-11.469078063964844],["是否",-11.469171524047852],["ик",-11.469531059265137],["▁kuri",-11.469612121582031],["▁πρέπει",-11.469992637634276],["্য",-11.470019340515137],["▁Vad",-11.470226287841797],["▁empresas",-11.470270156860352],["▁прави",-11.470355987548828],["جا",-11.470467567443848],["jev",-11.47046947479248],["शा",-11.470641136169434],["▁dni",-11.470687866210938],["▁Vir",-11.470837593078612],["▁sense",-11.471006393432615],["▁इन",-11.471281051635742],["وم",-11.471435546875],["▁saka",-11.471534729003906],["▁जानकारी",-11.471534729003906],["▁भ",-11.471580505371094],["మి",-11.47161865234375],["Ge",-11.471691131591797],["▁manier",-11.471717834472656],["▁első",-11.47174835205078],["▁ٿا",-11.471863746643066],["inter",-11.471906661987305],["▁passa",-11.471924781799316],["▁thật",-11.47199821472168],["选",-11.472013473510742],["hur",-11.472126960754396],["▁xu",-11.472132682800291],["よ",-11.47214698791504],["▁Hel",-11.472277641296388],["jų",-11.472393989562988],["aux",-11.47255039215088],["▁nic",-11.472579002380373],["▁Já",-11.472739219665527],["▁Ud",-11.472856521606444],["тік",-11.47296905517578],["ші",-11.472970008850098],["႕",-11.473036766052246],["ある",-11.47325611114502],["▁تاریخ",-11.473329544067385],["▁زبان",-11.473417282104492],["國際",-11.473578453063965],["▁Hun",-11.473735809326172],["▁것이",-11.47402000427246],["vod",-11.474028587341309],["听",-11.474194526672363],["체",-11.474201202392578],["▁putea",-11.474263191223145],["내",-11.474356651306152],["▁selama",-11.474377632141112],["2)",-11.474565505981444],["▁Ed",-11.474690437316896],["▁Ara",-11.474721908569336],["čių",-11.474791526794434],["unun",-11.474889755249023],["ските",-11.474896430969238],["▁سازمان",-11.474952697753906],["▁байх",-11.474980354309082],["▁دار",-11.475021362304688],["ୟ",-11.475029945373535],["tais",-11.475122451782228],["▁број",-11.475128173828123],["โดย",-11.475398063659668],["▁kredit",-11.47540283203125],["▁kutoka",-11.475431442260742],["Te",-11.475573539733888],["▁uur",-11.475618362426758],["ణ",-11.4756498336792],["ட்டி",-11.475650787353516],["ენ",-11.475690841674805],["▁nota",-11.475741386413574],["وه",-11.475749969482422],["▁اینکه",-11.475760459899902],["ries",-11.47590446472168],["λη",-11.475997924804688],["▁선",-11.476154327392578],["制度",-11.47617530822754],["فت",-11.476181030273438],["▁بیشتر",-11.47621250152588],["ल्या",-11.47628688812256],["地方",-11.476325035095217],["▁다른",-11.476871490478516],["▁juu",-11.47714614868164],["▁بہت",-11.47716999053955],["▁lat",-11.477176666259766],["▁પ્ર",-11.477225303649902],["致",-11.477269172668455],["▁שמ",-11.477309226989746],["▁così",-11.47740364074707],["dhe",-11.47745418548584],["用户",-11.477527618408203],["值",-11.477676391601562],["▁вот",-11.477678298950195],["ുകള്",-11.47773551940918],["▁بند",-11.47780704498291],["ію",-11.478007316589355],["▁לש",-11.478156089782717],["uku",-11.478198051452637],["ाला",-11.478605270385742],["▁جنهن",-11.478641510009766],["త్",-11.478753089904783],["▁risk",-11.478767395019531],["▁weg",-11.478880882263184],["▁PRO",-11.478900909423828],["▁lịch",-11.478915214538574],["łam",-11.479106903076172],["cor",-11.479240417480469],["價",-11.479268074035645],["ath",-11.479371070861816],["▁2013.",-11.479430198669434],["kwa",-11.479433059692385],["▁ilgili",-11.479517936706545],["▁خاص",-11.479534149169922],["▁조",-11.479783058166504],["▁χ",-11.47978401184082],["ď",-11.479848861694336],["▁Não",-11.47990894317627],["mé",-11.479935646057127],["▁Bon",-11.47994899749756],["ေတာ့",-11.480022430419922],["مۇ",-11.480108261108398],["▁setelah",-11.48011589050293],["▁ነገር",-11.480120658874512],["shop",-11.48016357421875],["ուր",-11.480230331420898],["▁nesta",-11.480294227600098],["▁wanita",-11.480360984802246],["▁primeiro",-11.480544090270996],["▁haqqında",-11.480609893798828],["யின்",-11.480642318725586],["radi",-11.48074436187744],["▁West",-11.480857849121094],["०",-11.481199264526367],["▁Alt",-11.481233596801758],["abb",-11.481277465820312],["pä",-11.481343269348145],["調",-11.481441497802734],["▁mea",-11.481566429138184],["▁وهو",-11.481816291809082],["▁कार्यक्रम",-11.481836318969728],["▁itse",-11.481938362121582],["tà",-11.481992721557615],["ðar",-11.482048988342283],["▁sum",-11.482137680053713],["▁φ",-11.48215389251709],["▁Má",-11.482256889343262],["has",-11.482388496398926],["▁número",-11.482471466064451],["▁बहुत",-11.482531547546388],["福",-11.482538223266602],["▁gjort",-11.482614517211914],["บน",-11.482644081115724],["▁aquesta",-11.48266887664795],["ība",-11.482694625854492],["̀",-11.482818603515623],["rg",-11.48293399810791],["র্",-11.48324489593506],["▁kuni",-11.483254432678224],["菜",-11.48327922821045],["▁gjithë",-11.483287811279297],["▁Muhammad",-11.48331356048584],["кою",-11.483355522155762],["ень",-11.483473777770996],["▁november",-11.483576774597168],["▁melhor",-11.483577728271484],["▁color",-11.483652114868164],["▁güzel",-11.483851432800291],["▁política",-11.483954429626465],["▁निर्माण",-11.48398780822754],["ران",-11.484136581420898],["Mi",-11.484289169311523],["ней",-11.48432159423828],["▁temu",-11.484363555908203],["▁iets",-11.48439121246338],["華",-11.484492301940918],["▁1997",-11.484552383422852],["▁tên",-11.484569549560549],["▁rest",-11.484619140625],["▁ня",-11.484675407409668],["▁mudah",-11.48476219177246],["cum",-11.484793663024902],["▁Come",-11.484834671020508],["mina",-11.484846115112305],["ან",-11.484968185424805],["▁uten",-11.484989166259766],["▁تولید",-11.485048294067385],["oh",-11.48507308959961],["ღ",-11.485085487365724],["รถ",-11.485124588012695],["▁nostro",-11.485150337219238],["▁sekolah",-11.485151290893556],["હ",-11.485248565673828],["vanja",-11.48532009124756],["▁Ни",-11.48533535003662],["rí",-11.48540496826172],["ними",-11.485440254211426],["▁має",-11.485458374023438],["才能",-11.485474586486816],["约",-11.485519409179688],["▁ngoài",-11.485572814941406],["ME",-11.485623359680176],["ції",-11.485675811767578],["第二",-11.485852241516112],["▁Daten",-11.48611068725586],["నీ",-11.4861421585083],["▁mercado",-11.486292839050291],["long",-11.48634910583496],["qo",-11.486387252807615],["▁हर",-11.486410140991213],["iniai",-11.486515045166016],["▁İstanbul",-11.486549377441406],["권",-11.486583709716797],["改",-11.486748695373535],["λι",-11.486860275268556],["Co",-11.486896514892578],["▁peste",-11.486897468566896],["dla",-11.486908912658691],["tto",-11.486973762512209],["법",-11.487015724182127],["▁kembali",-11.48730754852295],["κο",-11.48733901977539],["lou",-11.48752212524414],["▁ру",-11.487558364868164],["ությամբ",-11.48768711090088],["▁fazla",-11.487733840942385],["▁dacă",-11.487874031066896],["需求",-11.487884521484377],["menn",-11.487902641296388],["▁Hand",-11.48794937133789],["▁muut",-11.487993240356444],["▁ဦး",-11.488401412963867],["კა",-11.488431930541992],["▁μπορεί",-11.488463401794434],["▁type",-11.488502502441406],["ରା",-11.488703727722168],["▁MP",-11.488924980163574],["▁kra",-11.489042282104492],["難",-11.489046096801758],["▁sar",-11.48911476135254],["เรื่อง",-11.489276885986328],["▁وفي",-11.489341735839844],["▁دانشگاه",-11.4893798828125],["ева",-11.489657402038574],["ків",-11.4896879196167],["фа",-11.489691734313965],["▁Tal",-11.489727973937988],["▁thống",-11.489775657653809],["▁recept",-11.489819526672363],["▁sila",-11.490013122558594],["▁They",-11.490026473999023],["laga",-11.490166664123535],["▁10%",-11.49025058746338],["▁Mil",-11.490403175354004],["ंड",-11.49040412902832],["▁राज्य",-11.490452766418455],["▁{",-11.490466117858888],["šte",-11.490549087524414],["なら",-11.490570068359377],["dina",-11.490620613098145],["▁ren",-11.490683555603027],["ි",-11.490761756896973],["kre",-11.490866661071776],["▁αυτή",-11.490942001342772],["▁koncert",-11.490975379943848],["▁తన",-11.49117660522461],["▁דעם",-11.491281509399414],["▁ён",-11.491313934326172],["owi",-11.49142837524414],["даг",-11.491461753845217],["2011",-11.491766929626465],["rp",-11.491771697998049],["▁vz",-11.49180030822754],["lui",-11.491829872131348],["kh",-11.491999626159668],["Mu",-11.49212646484375],["wat",-11.492178916931152],["ார்",-11.49222469329834],["▁đổi",-11.49229907989502],["▁aika",-11.492300033569336],["чі",-11.492305755615234],["ستان",-11.492377281188965],["▁Meg",-11.492476463317873],["2012",-11.492591857910156],["種",-11.492602348327637],["ność",-11.492619514465332],["▁school",-11.49265956878662],["nah",-11.49273681640625],["бор",-11.492752075195312],["▁Ham",-11.492804527282717],["400",-11.492900848388672],["▁quyền",-11.493012428283691],["▁vier",-11.493069648742676],["▁kurs",-11.493163108825684],["▁tap",-11.49323844909668],["או",-11.493383407592772],["him",-11.493464469909668],["ගෙන",-11.493597984313965],["▁stran",-11.493612289428713],["▁جنگ",-11.493612289428713],["हा",-11.493622779846191],["街",-11.49372673034668],["ください",-11.493756294250488],["nn",-11.493908882141112],["ံ",-11.494120597839355],["600",-11.494145393371582],["მო",-11.494170188903809],["liche",-11.49422550201416],["യോ",-11.494227409362791],["▁тим",-11.494258880615234],["үн",-11.494537353515623],["居",-11.494569778442385],["▁primeira",-11.494817733764648],["▁server",-11.494930267333984],["▁perché",-11.494951248168944],["实现",-11.495068550109863],["▁rätt",-11.495123863220217],["orang",-11.49516773223877],["▁когато",-11.495272636413574],["▁Vil",-11.49535846710205],["应该",-11.495384216308594],["▁العام",-11.49553680419922],["英",-11.495691299438477],["ように",-11.495758056640623],["iyo",-11.495787620544434],["ವೇ",-11.495814323425291],["设计",-11.495829582214355],["εται",-11.495841026306152],["TI",-11.495850563049316],["ilen",-11.496198654174805],["▁april",-11.496292114257812],["ဝ",-11.496469497680664],["▁Tidak",-11.496576309204102],["▁seit",-11.496606826782228],["aji",-11.496747016906738],["ville",-11.496768951416016],["▁Bil",-11.496821403503418],["▁permet",-11.49685287475586],["▁väga",-11.496855735778809],["▁paa",-11.496870994567873],["▁argument",-11.496933937072754],["▁کړي",-11.49693775177002],["Pro",-11.49710178375244],["கா",-11.497300148010254],["▁الخ",-11.497323989868164],["▁වී",-11.497798919677734],["▁hang",-11.497838973999023],["▁этой",-11.497869491577148],["кар",-11.497889518737791],["▁sheegay",-11.497954368591309],["▁Group",-11.49798583984375],["▁nord",-11.497994422912598],["SK",-11.498284339904783],["ction",-11.498306274414062],["▁mucho",-11.498312950134276],["▁nytt",-11.498438835144045],["交通",-11.498465538024902],["ích",-11.49850082397461],["ائ",-11.498580932617188],["شن",-11.498598098754885],["Com",-11.498631477355955],["vati",-11.498658180236816],["▁Nur",-11.498749732971191],["യാ",-11.498881340026855],["▁הם",-11.498900413513184],["具",-11.4989013671875],["▁region",-11.498910903930664],["▁sim",-11.499004364013672],["▁status",-11.499015808105469],["ezi",-11.499134063720703],["紅",-11.4991455078125],["बा",-11.49916172027588],["sız",-11.499211311340332],["亚",-11.499220848083496],["级",-11.499310493469238],["▁sebe",-11.499412536621094],["▁Deze",-11.49959659576416],["양",-11.499706268310549],["тов",-11.499871253967283],["▁هل",-11.499907493591309],["▁nchini",-11.50001335144043],["slu",-11.50002670288086],["nau",-11.500046730041504],["タ",-11.500421524047852],["cis",-11.500455856323242],["急",-11.500462532043455],["ика",-11.50057315826416],["▁вид",-11.500606536865234],["sso",-11.500762939453123],["▁kaikki",-11.50076961517334],["న్న",-11.500855445861816],["sil",-11.500900268554688],["▁unsere",-11.500903129577637],["▁하는",-11.500956535339355],["▁cela",-11.500957489013672],["▁kwamba",-11.50099277496338],["▁орган",-11.501007080078123],["▁thân",-11.501011848449709],["专业",-11.501029014587402],["ผม",-11.501058578491213],["▁seine",-11.501134872436523],["▁WordPress",-11.50118350982666],["▁tega",-11.501191139221191],["כל",-11.50145435333252],["▁ই",-11.50156021118164],["▁digunakan",-11.501716613769531],["န",-11.501717567443848],["▁رئيس",-11.501811981201172],["ที่มี",-11.5018310546875],["keun",-11.50185203552246],["ට්",-11.501875877380373],["ous",-11.501986503601074],["▁ት",-11.501989364624023],["▁ادامه",-11.502058029174805],["ሌ",-11.502077102661133],["turi",-11.50213623046875],["▁mij",-11.50214385986328],["▁Poli",-11.502157211303713],["gla",-11.502251625061035],["もの",-11.502467155456545],["þ",-11.502630233764648],["▁στους",-11.502646446228027],["▁மு",-11.502772331237791],["▁És",-11.503019332885742],["▁mitä",-11.503063201904297],["тің",-11.503141403198242],["alis",-11.503170013427734],["62",-11.503183364868164],["▁domu",-11.503273010253906],["▁kel",-11.503278732299805],["ants",-11.503300666809082],["ండి",-11.503340721130373],["▁Let",-11.503477096557615],["▁sko",-11.503500938415527],["▁ենք",-11.503584861755373],["arte",-11.503676414489746],["▁few",-11.503704071044922],["人员",-11.503835678100586],["▁nữa",-11.503949165344238],["ск",-11.504032135009766],["▁दो",-11.504061698913574],["ецца",-11.504098892211914],["▁avea",-11.50409984588623],["ρε",-11.50422477722168],["CE",-11.504310607910156],["▁KA",-11.504316329956056],["▁हुन",-11.504392623901367],["əm",-11.504671096801758],["ाचा",-11.504722595214844],["▁ಹಾಗೂ",-11.504830360412598],["▁ந",-11.504852294921877],["▁vari",-11.504881858825684],["▁favor",-11.50490665435791],["dum",-11.505048751831056],["မႈ",-11.50505542755127],["▁PR",-11.505362510681152],["▁cukup",-11.50555419921875],["▁aceasta",-11.505611419677734],["▁Sir",-11.505680084228516],["not",-11.50570011138916],["kad",-11.505789756774902],["服",-11.505793571472168],["▁mér",-11.505807876586914],["től",-11.50585651397705],["ového",-11.505895614624023],["▁κάθε",-11.506095886230469],["▁علم",-11.506338119506836],["▁јер",-11.506378173828123],["▁سم",-11.506446838378906],["▁श्री",-11.506476402282717],["▁huis",-11.506519317626951],["▁taka",-11.506528854370115],["Ə",-11.506583213806152],["的な",-11.50658893585205],["jin",-11.506622314453123],["▁alam",-11.506649017333984],["जा",-11.506665229797363],["siya",-11.506872177124023],["نې",-11.506884574890137],["則",-11.50694179534912],["ரா",-11.506966590881348],["▁sowie",-11.507014274597168],["▁ساعت",-11.507120132446287],["▁drugi",-11.507222175598145],["лог",-11.50723934173584],["▁quốc",-11.5073881149292],["▁گئی",-11.507399559020996],["▁பா",-11.50746726989746],["微",-11.507776260375977],["▁facebook",-11.507810592651367],["▁mọi",-11.507866859436035],["reg",-11.507977485656738],["ithe",-11.508091926574709],["▁मोदी",-11.508127212524414],["▁mida",-11.508148193359377],["▁alan",-11.508234024047852],["że",-11.508275032043455],["лек",-11.508294105529783],["▁අපේ",-11.50833511352539],["▁zato",-11.508426666259766],["kot",-11.508587837219238],["ಜ",-11.508587837219238],["ತ್",-11.508610725402832],["مل",-11.508638381958008],["▁толькі",-11.508645057678224],["付",-11.50868034362793],["owo",-11.508750915527344],["ευ",-11.50876808166504],["צי",-11.508818626403809],["配",-11.508888244628906],["▁Fryslân",-11.508918762207031],["▁Ž",-11.508981704711914],["▁passe",-11.508995056152344],["mh",-11.509027481079102],["ॉ",-11.509140968322754],["lərinin",-11.509164810180664],["niki",-11.509278297424316],["LI",-11.509389877319336],["▁როგორც",-11.509411811828612],["imet",-11.509626388549805],["板",-11.509638786315918],["IP",-11.509697914123535],["▁around",-11.509763717651367],["ၾက",-11.509765625],["▁Mur",-11.509780883789062],["▁Når",-11.509825706481934],["▁laat",-11.509839057922363],["تون",-11.509878158569336],["▁ohne",-11.51000690460205],["ಾಗ",-11.510281562805176],["▁مهم",-11.510384559631348],["▁content",-11.510476112365724],["եք",-11.51057243347168],["▁пот",-11.510623931884766],["▁яка",-11.510672569274902],["37",-11.510753631591797],["စာ",-11.510794639587402],["▁ব",-11.511063575744627],["▁мало",-11.51107406616211],["आ",-11.511112213134766],["▁która",-11.511171340942385],["рас",-11.51132869720459],["dne",-11.511361122131348],["▁เพราะ",-11.511361122131348],["▁בת",-11.511397361755373],["idi",-11.51142120361328],["系",-11.511422157287598],["nap",-11.51149559020996],["▁rep",-11.511597633361816],["▁Spa",-11.511727333068848],["▁حضور",-11.51174259185791],["▁lainnya",-11.511821746826172],["▁Бу",-11.511832237243652],["લી",-11.511916160583496],["▁tudo",-11.511969566345217],["▁faci",-11.51207447052002],["нова",-11.51226043701172],["實",-11.512445449829102],["▁tê",-11.512447357177734],["რო",-11.512537002563477],["河",-11.512616157531738],["له",-11.512617111206056],["▁ს",-11.51267147064209],["եղ",-11.512677192687988],["pati",-11.512682914733888],["▁software",-11.512691497802734],["▁quyết",-11.512703895568848],["liği",-11.512721061706545],["▁dzieci",-11.51280403137207],["▁hr",-11.512864112854004],["▁Ihr",-11.51286792755127],["lərə",-11.512929916381836],["ቡ",-11.51300811767578],["▁вече",-11.513036727905272],["ులు",-11.513053894042969],["票",-11.513117790222168],["▁сега",-11.513259887695312],["▁rundt",-11.513275146484377],["▁mostra",-11.513297080993652],["▁سفر",-11.513409614562988],["▁boli",-11.513445854187012],["38",-11.513535499572754],["ებში",-11.513551712036133],["▁사용",-11.51357364654541],["▁2-3",-11.513598442077637],["▁មិន",-11.513622283935549],["▁olub",-11.51371955871582],["ною",-11.51386547088623],["лей",-11.513945579528809],["eux",-11.513964653015137],["baar",-11.513999938964844],["▁single",-11.5142183303833],["ժ",-11.514469146728516],["问",-11.514570236206056],["▁пока",-11.514649391174316],["接受",-11.514866828918455],["▁경우",-11.51503849029541],["▁eso",-11.515084266662598],["▁Mkuu",-11.515193939208984],["від",-11.51520824432373],["rov",-11.515219688415527],["ನೆ",-11.515223503112791],["▁кара",-11.515320777893066],["拍",-11.51533317565918],["▁دوران",-11.515375137329102],["▁past",-11.515467643737791],["gl",-11.51551342010498],["▁ਉਸ",-11.51557445526123],["▁teknik",-11.515729904174805],["▁kör",-11.515740394592283],["▁عند",-11.515819549560549],["▁Roz",-11.516060829162598],["▁Nach",-11.516216278076172],["▁industri",-11.516228675842283],["▁viene",-11.516294479370115],["يان",-11.516315460205078],["▁ला",-11.516356468200684],["▁kraj",-11.516484260559082],["ojë",-11.51656723022461],["▁mí",-11.516583442687988],["cle",-11.5166597366333],["▁상",-11.516708374023438],["lai",-11.516785621643066],["-20",-11.516849517822266],["▁cost",-11.51688003540039],["▁debe",-11.516971588134766],["eh",-11.517078399658203],["▁firm",-11.517210960388184],["ေသာ",-11.51762866973877],["▁какво",-11.517691612243652],["AB",-11.517717361450195],["aigh",-11.517742156982422],["▁phương",-11.517810821533203],["▁дали",-11.517861366271973],["fici",-11.518061637878418],["မှ",-11.518150329589844],["▁första",-11.518260955810549],["▁अपनी",-11.518335342407228],["lica",-11.518385887145996],["rib",-11.518386840820312],["cker",-11.518492698669434],["▁اړه",-11.51866340637207],["jum",-11.518696784973145],["▁необходимо",-11.518731117248535],["гар",-11.51874542236328],["డం",-11.51877212524414],["dé",-11.518816947937012],["▁jų",-11.51886749267578],["▁එකක්",-11.518928527832031],["▁Neu",-11.518962860107422],["▁pula",-11.51897430419922],["wang",-11.519054412841797],["▁Trans",-11.51913833618164],["kop",-11.51914882659912],["▁надо",-11.519176483154297],["iam",-11.519227981567385],["▁ner",-11.519227981567385],["ingu",-11.519386291503906],["▁વ",-11.519408226013184],["▁вже",-11.519475936889648],["▁ก",-11.519615173339844],["anno",-11.519659996032717],["程",-11.51972198486328],["yd",-11.519730567932127],["νε",-11.519755363464355],["▁2020",-11.519818305969238],["以下",-11.519930839538574],["ಯಾ",-11.519997596740724],["ෝ",-11.520130157470703],["▁എന്ന്",-11.52013111114502],["▁лі",-11.520296096801758],["▁dok",-11.520356178283691],["▁մեջ",-11.520630836486816],["▁jis",-11.520746231079102],["▁Kul",-11.52075481414795],["▁оно",-11.52078342437744],["эд",-11.520994186401367],["massa",-11.521109580993652],["กว่า",-11.521288871765137],["nna",-11.52143669128418],["site",-11.52152156829834],["▁होती",-11.521565437316896],["とは",-11.52159309387207],["▁ہر",-11.52169418334961],["▁nếu",-11.521954536437988],["▁바",-11.522150039672852],["▁asa",-11.522191047668455],["還是",-11.522193908691406],["lerde",-11.52220058441162],["▁cerca",-11.522212982177734],["▁seguir",-11.522247314453123],["యా",-11.522357940673828],["▁мора",-11.522650718688965],["▁اهو",-11.52277374267578],["ਗ",-11.522787094116213],["▁재",-11.523000717163086],["▁måste",-11.52315902709961],["▁PA",-11.523189544677734],["ሎ",-11.523194313049316],["▁ती",-11.52319622039795],["▁सो",-11.523266792297363],["▁beim",-11.52380657196045],["▁tomu",-11.524112701416016],["▁mira",-11.524255752563477],["ծ",-11.524336814880373],["▁يوم",-11.52456283569336],["▁ඇ",-11.524600982666016],["ੀਆਂ",-11.524615287780762],["▁ვ",-11.52462387084961],["ロ",-11.524737358093262],["▁mensen",-11.524782180786133],["島",-11.52524185180664],["▁tăng",-11.525320053100586],["▁сарын",-11.52537441253662],["assa",-11.52538013458252],["▁воз",-11.525485038757324],["eiro",-11.525510787963867],["▁περι",-11.525617599487305],["იდან",-11.525656700134276],["▁sociale",-11.525853157043455],["في",-11.525911331176758],["▁بدون",-11.52596950531006],["▁सेवा",-11.525985717773438],["▁жер",-11.526049613952637],["▁زیادہ",-11.526067733764648],["▁moins",-11.526098251342772],["ант",-11.526116371154783],["gno",-11.526140213012695],["▁ಪ್ರ",-11.52631950378418],["▁שב",-11.52634620666504],["控制",-11.52638816833496],["НО",-11.52646827697754],["占",-11.526567459106444],["▁doma",-11.526580810546877],["პ",-11.526592254638672],["上海",-11.526816368103027],["▁monde",-11.526822090148926],["त्",-11.526985168457031],["રા",-11.527029037475586],["▁natur",-11.527225494384766],["lev",-11.527396202087402],["▁Inn",-11.527481079101562],["余",-11.52749252319336],["аж",-11.527505874633787],["▁liệu",-11.527679443359377],["▁Deus",-11.527755737304688],["▁59",-11.528118133544922],["▁neue",-11.52819538116455],["间",-11.528326034545898],["uko",-11.52837085723877],["▁yfir",-11.528376579284668],["▁hoàn",-11.528402328491213],["▁SMS",-11.528429985046388],["ยา",-11.5285062789917],["▁하",-11.528536796569824],["มัน",-11.528546333312988],["▁痞客邦",-11.528626441955566],["▁pal",-11.52886962890625],["▁nekaj",-11.52889919281006],["ခံ",-11.528923988342283],["▁tulee",-11.529111862182615],["ాల",-11.529214859008787],["▁करते",-11.529306411743164],["博",-11.529479026794434],["▁ولی",-11.529707908630373],["▁කිරීම",-11.529767990112305],["UL",-11.52980136871338],["▁fit",-11.529828071594238],["▁lời",-11.530001640319824],["rdi",-11.530007362365724],["▁yi",-11.53001308441162],["▁vô",-11.53008270263672],["▁São",-11.530089378356934],["▁MU",-11.530518531799316],["ું",-11.53053855895996],["эр",-11.530540466308594],["లే",-11.530556678771973],["▁etmək",-11.530603408813477],["ського",-11.530667304992676],["存",-11.530678749084473],["▁saab",-11.530712127685549],["ಕೆ",-11.5308198928833],["▁high",-11.530935287475586],["Ho",-11.531030654907228],["▁يكون",-11.5313720703125],["▁ملی",-11.531455993652344],["十",-11.53152561187744],["selt",-11.531760215759276],["čne",-11.531824111938477],["행",-11.53184986114502],["ရင္",-11.531951904296877],["▁չ",-11.53260898590088],["ଟ",-11.53264331817627],["fon",-11.5326566696167],["▁Mei",-11.532662391662598],["▁Not",-11.532678604125977],["▁Mga",-11.532722473144531],["▁この",-11.53272819519043],["점",-11.532771110534668],["▁presta",-11.53296947479248],["cca",-11.53302001953125],["▁הי",-11.533021926879885],["▁gener",-11.533164024353027],["▁Fan",-11.533175468444824],["▁Medi",-11.533183097839355],["▁visa",-11.53318977355957],["▁Αν",-11.533211708068848],["WA",-11.533354759216309],["ଲି",-11.533390045166016],["▁dati",-11.533448219299316],["ldi",-11.533529281616213],["▁شخص",-11.53369140625],["kata",-11.53370189666748],["angan",-11.533820152282717],["ția",-11.5338773727417],["▁soos",-11.533943176269531],["eer",-11.533978462219238],["▁geri",-11.534005165100098],["rî",-11.534083366394045],["▁eins",-11.53414535522461],["kara",-11.534523963928224],["ками",-11.53454875946045],["気",-11.534639358520508],["关系",-11.534686088562012],["koa",-11.534732818603516],["çı",-11.534865379333496],["ဟာ",-11.534875869750977],["kem",-11.53489875793457],["茶",-11.534930229187012],["▁함께",-11.53512477874756],["ନି",-11.535185813903809],["dala",-11.535228729248049],["рэ",-11.535236358642578],["εις",-11.535273551940918],["▁brez",-11.535481452941896],["දි",-11.535540580749512],["▁Ye",-11.535552024841309],["▁पार्टी",-11.535555839538574],["▁outras",-11.53561305999756],["▁How",-11.535832405090332],["առ",-11.535870552062988],["▁66",-11.535991668701172],["ราคา",-11.536232948303224],["望",-11.536297798156738],["▁dupa",-11.536299705505373],["рам",-11.536370277404783],["▁دهد",-11.536401748657228],["▁hyn",-11.536551475524902],["omis",-11.536582946777344],["▁toujours",-11.536686897277832],["▁chuyện",-11.536694526672363],["τος",-11.536765098571776],["adda",-11.536898612976074],["kry",-11.53699016571045],["エ",-11.537110328674316],["▁איר",-11.537225723266602],["▁zbog",-11.53725242614746],["4%",-11.537261009216309],["သြား",-11.53730297088623],["▁eks",-11.537335395812988],["▁위해",-11.537466049194336],["▁бес",-11.537474632263184],["▁pracy",-11.537508010864258],["▁mogu",-11.53756618499756],["ां",-11.53757095336914],["▁овој",-11.537623405456545],["▁bû",-11.537626266479492],["گر",-11.537763595581056],["▁នឹង",-11.53776741027832],["łem",-11.538249969482422],["▁male",-11.538268089294434],["▁werd",-11.53867530822754],["▁nunc",-11.53872299194336],["ција",-11.538795471191406],["▁тех",-11.538870811462402],["▁именно",-11.538897514343262],["▁توسط",-11.539063453674316],["▁ନ",-11.539254188537598],["▁đơn",-11.539300918579102],["하지",-11.539305686950684],["▁byť",-11.539324760437012],["ঁ",-11.539499282836914],["▁ME",-11.539605140686035],["Η",-11.539657592773438],["жы",-11.53976058959961],["▁posible",-11.539766311645508],["මි",-11.53984832763672],["▁lah",-11.53989028930664],["底",-11.539966583251951],["▁hava",-11.539992332458496],["മാ",-11.540079116821287],["▁play",-11.54013442993164],["▁Có",-11.540241241455078],["▁آزاد",-11.540390968322754],["nimi",-11.540404319763184],["!!!!",-11.54046630859375],["▁persone",-11.540586471557615],["ቁ",-11.540667533874512],["▁พ",-11.541062355041504],["ća",-11.541083335876465],["▁таксама",-11.541143417358398],["▁realizar",-11.54115104675293],["ству",-11.541308403015137],["ización",-11.541382789611816],["▁motiv",-11.541474342346191],["jad",-11.541593551635742],["▁sebab",-11.541672706604004],["▁kecil",-11.541722297668455],["ლო",-11.541729927062988],["лаа",-11.541939735412598],["▁ժամանակ",-11.541952133178713],["▁AB",-11.542012214660645],["kup",-11.542043685913086],["మైన",-11.542058944702148],["▁worm",-11.542163848876951],["Про",-11.54235553741455],["▁Ú",-11.542415618896484],["ості",-11.54245662689209],["▁Autor",-11.54246425628662],["▁robot",-11.542499542236328],["▁Tour",-11.542510986328123],["▁Ну",-11.542744636535645],["▁nội",-11.542747497558594],["▁قابل",-11.5427827835083],["تم",-11.542810440063477],["▁Dil",-11.542853355407717],["▁aiz",-11.543001174926758],["spel",-11.54301643371582],["▁ID",-11.543227195739746],["▁bang",-11.543231010437012],["按",-11.54336643218994],["▁state",-11.543412208557127],["ėjo",-11.54343318939209],["▁always",-11.543566703796388],["▁blogg",-11.543633460998535],["▁Best",-11.543675422668455],["▁altri",-11.543695449829102],["▁थिए",-11.543779373168944],["▁fick",-11.544048309326172],["▁რაც",-11.544079780578612],["rata",-11.544111251831056],["unu",-11.544153213500977],["▁luar",-11.54442310333252],["▁кур",-11.544466018676758],["may",-11.544469833374023],["куп",-11.54473114013672],["▁Play",-11.544753074645996],["▁път",-11.544755935668944],["▁پا",-11.544793128967283],["转",-11.544951438903809],["мин",-11.545047760009766],["▁teraz",-11.545083999633787],["▁nepa",-11.5451021194458],["zim",-11.54512882232666],["ær",-11.54512882232666],["▁keer",-11.545218467712402],["红",-11.545218467712402],["ಿದ್ದಾರೆ",-11.545279502868652],["mpi",-11.54530429840088],["три",-11.54530429840088],["▁denna",-11.545331001281738],["▁จาก",-11.545411109924316],["相关",-11.545498847961426],["▁мир",-11.545499801635742],["▁services",-11.545586585998535],["レ",-11.54562759399414],["▁जब",-11.545653343200684],["▁passer",-11.545729637145996],["洗",-11.545822143554688],["inti",-11.545865058898926],["▁sk",-11.546063423156738],["ដ",-11.546218872070312],["沒",-11.54623031616211],["均",-11.546250343322754],["Ç",-11.54627799987793],["▁두",-11.546388626098633],["▁والا",-11.546425819396973],["cka",-11.54647445678711],["AI",-11.54649257659912],["▁version",-11.546734809875488],["▁qilish",-11.546760559082031],["更加",-11.546775817871094],["▁५",-11.546852111816406],["▁Ι",-11.546966552734377],["理解",-11.546996116638184],["kha",-11.547002792358398],["▁صدر",-11.547082901000977],["▁kuu",-11.547218322753906],["▁apenas",-11.547219276428224],["▁Team",-11.547473907470703],["hit",-11.547683715820312],["eksi",-11.547684669494627],["▁бизнес",-11.547839164733888],["рат",-11.547877311706545],["эг",-11.547930717468262],["▁stress",-11.547978401184082],["▁lider",-11.54819679260254],["ไว้",-11.548236846923828],["▁automat",-11.548240661621094],["ус",-11.548297882080078],["记者",-11.54832649230957],["ър",-11.548380851745604],["sho",-11.54843521118164],["kli",-11.548529624938965],["▁paljon",-11.548566818237305],["见",-11.548617362976074],["hara",-11.548628807067873],["▁typ",-11.548748970031738],["▁kum",-11.548836708068848],["ildi",-11.548852920532228],["▁ра",-11.548914909362791],["ថា",-11.54900074005127],["▁yw",-11.549195289611816],["▁önemli",-11.54921054840088],["ებული",-11.549212455749512],["วัน",-11.549217224121094],["▁mikro",-11.549286842346191],["▁Comments",-11.549297332763672],["▁شدن",-11.549482345581056],["عت",-11.549497604370115],["аа",-11.54950714111328],["raf",-11.549554824829102],["▁inom",-11.549556732177734],["▁وزارت",-11.549562454223633],["ડી",-11.549588203430176],["▁file",-11.54959201812744],["2010",-11.549765586853027],["▁hắn",-11.54982566833496],["▁(9)",-11.549846649169922],["kken",-11.549907684326172],["aza",-11.549965858459473],["klad",-11.550053596496582],["▁رنگ",-11.550063133239746],["fur",-11.550220489501951],["▁själv",-11.550248146057127],["▁igjen",-11.550470352172852],["ред",-11.550515174865724],["пра",-11.550790786743164],["ಲ್ಲಿ",-11.550869941711426],["மி",-11.550873756408691],["▁dh",-11.551167488098145],["成為",-11.551194190979004],["той",-11.551207542419434],["▁Här",-11.551260948181152],["▁بازار",-11.551314353942873],["▁بالا",-11.551562309265137],["▁itt",-11.551592826843262],["ाइ",-11.55174160003662],["eerd",-11.551752090454102],["▁nawet",-11.551952362060549],["▁кредит",-11.551953315734863],["iku",-11.552001953125],["▁sui",-11.552124977111816],["▁sekarang",-11.552210807800291],["▁గ",-11.552275657653809],["كە",-11.552406311035156],["▁istifadə",-11.552408218383787],["▁يو",-11.552417755126951],["dil",-11.552550315856934],["▁port",-11.552657127380373],["▁거",-11.552666664123535],["ก่อน",-11.552721977233888],["▁đất",-11.552740097045898],["▁sale",-11.552811622619627],["▁milyon",-11.552882194519045],["▁tử",-11.55305004119873],["▁fire",-11.55308437347412],["Μ",-11.553118705749512],["▁jää",-11.553125381469728],["ư",-11.553173065185549],["歌",-11.55320167541504],["▁sách",-11.553364753723145],["▁Պ",-11.553396224975586],["കൾ",-11.553531646728516],["▁የመ",-11.553804397583008],["год",-11.553829193115234],["▁ور",-11.553879737854004],["HA",-11.55388641357422],["kari",-11.55395221710205],["▁pasti",-11.553977966308594],["gol",-11.553985595703123],["▁camp",-11.55404567718506],["▁אויף",-11.554082870483398],["▁ibu",-11.554149627685549],["▁tento",-11.55417823791504],["▁nez",-11.554186820983888],["▁ary",-11.554232597351074],["▁où",-11.554235458374023],["▁amor",-11.55429458618164],["дай",-11.554366111755373],["▁administra",-11.55437469482422],["▁хто",-11.554472923278809],["▁वे",-11.554486274719238],["ique",-11.55454158782959],["▁blive",-11.55461311340332],["аць",-11.554619789123535],["▁libre",-11.55468463897705],["▁MO",-11.554692268371582],["▁lin",-11.55479907989502],["imą",-11.554814338684082],["සි",-11.554994583129885],["遠",-11.555098533630373],["▁මගේ",-11.555249214172363],["▁rei",-11.555251121520996],["зы",-11.555283546447754],["▁जीवन",-11.55550479888916],["▁(5",-11.555547714233398],["▁esi",-11.55579662322998],["▁világ",-11.55589771270752],["ации",-11.555935859680176],["▁ស",-11.555957794189451],["▁ph",-11.556018829345703],["▁better",-11.556053161621094],["վի",-11.556134223937988],["पि",-11.556192398071287],["Ú",-11.556312561035156],["▁थे",-11.556416511535645],["▁మరియు",-11.556462287902832],["▁بسیار",-11.556512832641602],["워",-11.556546211242676],["▁fordi",-11.556635856628418],["▁song",-11.55673122406006],["aste",-11.556760787963867],["ament",-11.556771278381348],["іп",-11.556784629821776],["وف",-11.556845664978027],["ሻ",-11.5569486618042],["投",-11.55702781677246],["▁faz",-11.557037353515623],["游戏",-11.557337760925291],["61",-11.557365417480469],["學生",-11.557374954223633],["▁podľa",-11.557607650756836],["▁siitä",-11.557631492614746],["▁trabajo",-11.557632446289062],["▁viết",-11.557653427124023],["ují",-11.557787895202637],["udo",-11.557801246643066],["43",-11.55783462524414],["vert",-11.557896614074709],["▁meses",-11.557989120483398],["▁тільки",-11.558059692382812],["▁zł",-11.558100700378418],["▁lòng",-11.558157920837402],["▁Col",-11.558170318603516],["▁Daniel",-11.558192253112791],["מו",-11.55823040008545],["σαν",-11.558292388916016],["یه",-11.558399200439451],["ाची",-11.558439254760742],["正在",-11.558566093444824],["▁کچھ",-11.558602333068848],["නා",-11.558807373046877],["▁saman",-11.558852195739746],["터",-11.558992385864258],["-4",-11.559001922607422],["▁այլ",-11.559682846069336],["qe",-11.559741973876951],["ੈ",-11.559771537780762],["сно",-11.55981731414795],["爾",-11.55988311767578],["ଶ",-11.559979438781738],["EN",-11.559981346130373],["▁연",-11.560015678405762],["▁واحد",-11.560135841369627],["▁mendapatkan",-11.560152053833008],["▁Ano",-11.560381889343262],["▁بها",-11.560648918151855],["计划",-11.560670852661133],["ικές",-11.56068229675293],["▁Україні",-11.560686111450195],["▁роботи",-11.560687065124512],["▁امریکا",-11.560725212097168],["/1",-11.560770988464355],["▁Pop",-11.560808181762695],["▁24.",-11.5608549118042],["யாக",-11.56092357635498],["3%",-11.561017036437988],["▁Center",-11.561156272888184],["вала",-11.56117820739746],["▁deyil",-11.56120491027832],["93",-11.56121063232422],["ăm",-11.561308860778809],["でした",-11.561335563659668],["▁አይ",-11.561345100402832],["ení",-11.561395645141602],["್ಯ",-11.5614595413208],["asan",-11.561467170715332],["▁hab",-11.5617094039917],["留",-11.561742782592772],["ధ",-11.561844825744627],["虽然",-11.561875343322754],["▁بو",-11.561986923217772],["аас",-11.562002182006836],["▁kemudian",-11.562067985534668],["▁veľmi",-11.562067985534668],["dora",-11.5620756149292],["▁ildə",-11.562158584594728],["权",-11.56221866607666],["▁ٹی",-11.562456130981444],["▁nombre",-11.562556266784668],["美國",-11.562679290771484],["ðu",-11.562748908996582],["ებით",-11.56280517578125],["▁Ing",-11.562861442565918],["副",-11.562950134277344],["ળ",-11.562982559204102],["▁Michael",-11.563014030456545],["გა",-11.56302547454834],["▁장",-11.56307601928711],["шки",-11.563081741333008],["ಮ್",-11.563112258911133],["টা",-11.56311321258545],["▁Sel",-11.563152313232422],["▁wêreld",-11.563273429870604],["▁mba",-11.563298225402832],["▁etiam",-11.563353538513184],["name",-11.563433647155762],["▁baina",-11.563433647155762],["kā",-11.5635404586792],["▁Го",-11.563685417175291],["aq",-11.563802719116213],["şe",-11.56381893157959],["eel",-11.563858032226562],["▁menye",-11.563923835754396],["िया",-11.563929557800291],["ppu",-11.564004898071287],["比较",-11.564167022705078],["親",-11.564169883728027],["दार",-11.564179420471191],["嗎",-11.564201354980469],["ី",-11.56422996520996],["णे",-11.56442165374756],["▁ket",-11.564422607421877],["ચ",-11.564434051513672],["▁हुआ",-11.564485549926758],["मान",-11.564502716064451],["▁Ə",-11.564504623413086],["96",-11.564598083496094],["▁વિ",-11.564661026000977],["nă",-11.564774513244627],["▁individual",-11.564903259277344],["▁lí",-11.565018653869627],["▁25.",-11.565023422241213],["ņ",-11.565126419067385],["피",-11.565200805664062],["▁damer",-11.565208435058594],["某",-11.56527042388916],["たい",-11.565305709838867],["跑",-11.56532859802246],["ری",-11.565452575683594],["ιν",-11.565488815307615],["▁cen",-11.56557273864746],["55",-11.565778732299805],["ໃຫ້",-11.56581211090088],["▁tərəfindən",-11.565825462341309],["▁HA",-11.565885543823242],["▁бу",-11.565908432006836],["▁acu",-11.565929412841797],["九",-11.56598663330078],["Je",-11.566041946411133],["iyê",-11.566062927246094],["రీ",-11.566210746765137],["▁dalka",-11.566228866577148],["▁சி",-11.5662841796875],["▁run",-11.566335678100586],["cé",-11.566364288330078],["▁ký",-11.566370010375977],["▁Reg",-11.56641960144043],["GE",-11.566519737243652],["cus",-11.566536903381348],["लं",-11.566596031188965],["▁jälkeen",-11.56663417816162],["ลูก",-11.566648483276367],["usa",-11.566720962524414],["teur",-11.566813468933104],["有些",-11.566814422607422],["▁მათ",-11.567026138305664],["▁жылы",-11.567198753356934],["▁ú",-11.567293167114258],["▁بارے",-11.567374229431152],["▁september",-11.567445755004885],["▁वह",-11.56745147705078],["лай",-11.56755828857422],["▁هایی",-11.567594528198242],["ää",-11.56771469116211],["別",-11.567866325378418],["wen",-11.568028450012209],["▁centre",-11.568037986755373],["ค่ะ",-11.568096160888672],["▁الص",-11.568122863769531],["tır",-11.568148612976074],["▁ઉ",-11.568263053894045],["51",-11.56837272644043],["▁1990",-11.568384170532228],["苦",-11.56844425201416],["జ",-11.56844997406006],["▁innan",-11.568544387817385],["視",-11.568766593933104],["▁пол",-11.568830490112305],["ୋ",-11.56885051727295],["μη",-11.568900108337402],["▁پیدا",-11.56890106201172],["ാല്",-11.568949699401855],["mista",-11.569007873535156],["▁pasado",-11.569013595581056],["▁61",-11.569062232971191],["ปี",-11.569156646728516],["▁የአ",-11.569276809692385],["വര്",-11.569280624389648],["▁programma",-11.56938648223877],["▁pouco",-11.569388389587402],["▁neque",-11.569392204284668],["mez",-11.569401741027832],["▁Design",-11.569461822509766],["▁стандарт",-11.569539070129396],["▁fase",-11.56955909729004],["ph",-11.56961154937744],["box",-11.569774627685549],["▁taj",-11.569838523864746],["▁Gel",-11.569987297058104],["дык",-11.570000648498535],["▁mio",-11.570030212402344],["▁sagt",-11.57003116607666],["▁نواز",-11.570103645324709],["сты",-11.570117950439451],["▁sat",-11.570194244384766],["▁next",-11.570199012756348],["ുന്നത്",-11.57022190093994],["▁Prof",-11.570245742797852],["ós",-11.570249557495115],["▁deste",-11.570283889770508],["▁võib",-11.570324897766112],["▁alap",-11.570405006408691],["▁ଏ",-11.570423126220703],["▁biệt",-11.570493698120115],["▁þú",-11.570511817932127],["즈",-11.570595741271973],["自分の",-11.57069969177246],["▁Denne",-11.570734977722168],["▁agora",-11.570755004882812],["▁Vin",-11.57080078125],["gum",-11.57087516784668],["▁서",-11.57097625732422],["員",-11.571002006530762],["交易",-11.571043014526367],["▁Us",-11.571087837219238],["▁cop",-11.571224212646484],["천",-11.57125186920166],["▁ಅವರು",-11.571282386779783],["▁Ná",-11.571340560913086],["▁doktor",-11.571358680725098],["▁£",-11.571380615234377],["列",-11.571426391601562],["▁constitu",-11.57143497467041],["▁faoi",-11.57148265838623],["RE",-11.571500778198242],["▁yok",-11.571619033813477],["▁fokus",-11.5717191696167],["▁тях",-11.57174015045166],["▁Sk",-11.571746826171877],["รา",-11.571803092956545],["வும்",-11.57183074951172],["می",-11.571891784667969],["ић",-11.571914672851562],["▁penting",-11.57192325592041],["lada",-11.57200527191162],["▁130",-11.572036743164062],["▁շատ",-11.572086334228516],["▁kõik",-11.572137832641602],["rú",-11.57217788696289],["▁зв",-11.57219123840332],["▁call",-11.572248458862305],["erer",-11.572264671325684],["▁ceva",-11.572267532348633],["▁এর",-11.572395324707031],["▁ປະ",-11.572614669799805],["▁OS",-11.572623252868652],["▁ስለ",-11.57272243499756],["▁Uni",-11.57274341583252],["拿",-11.572757720947266],["ಲೆ",-11.57285213470459],["ನಾ",-11.57290744781494],["ରି",-11.57292938232422],["▁беру",-11.572979927062988],["买",-11.57308864593506],["▁alleen",-11.573147773742676],["▁Ти",-11.573160171508787],["šo",-11.57331085205078],["neet",-11.573376655578612],["▁địa",-11.573378562927246],["▁उनले",-11.5734224319458],["热",-11.573427200317385],["▁był",-11.573532104492188],["のか",-11.573705673217772],["ിലെ",-11.573777198791504],["ที่จะ",-11.573904037475586],["ള്",-11.573932647705078],["Í",-11.57396125793457],["▁Data",-11.573994636535645],["іне",-11.574015617370604],["▁लेख",-11.574073791503906],["倒",-11.574081420898438],["ује",-11.574125289916992],["▁used",-11.57417106628418],["dami",-11.574252128601074],["▁kla",-11.574263572692873],["▁잘",-11.574313163757324],["机构",-11.57437801361084],["▁đ",-11.574416160583496],["▁nhập",-11.574463844299316],["bg",-11.574498176574709],["▁quelques",-11.574753761291504],["ନ୍",-11.57481288909912],["▁trẻ",-11.57501220703125],["буд",-11.575021743774414],["▁några",-11.575047492980955],["▁став",-11.575074195861816],["▁тухай",-11.57532024383545],["ନା",-11.575343132019045],["று",-11.575373649597168],["▁teu",-11.575393676757812],["sum",-11.57557773590088],["这一",-11.575578689575195],["▁ቤት",-11.575589179992676],["76",-11.5756196975708],["ovej",-11.575714111328123],["▁kendi",-11.57574462890625],["▁ඔබ",-11.575901985168455],["VI",-11.57591438293457],["▁vaan",-11.575922966003418],["▁KO",-11.57595443725586],["გი",-11.576019287109377],["ји",-11.576177597045898],["ኔ",-11.576224327087402],["▁às",-11.576273918151855],["ओं",-11.576339721679688],["▁thứ",-11.576372146606444],["ине",-11.576380729675291],["▁тя",-11.576586723327637],["▁خدمات",-11.57659149169922],["נת",-11.57660961151123],["▁Kai",-11.576615333557127],["▁Here",-11.576662063598633],["▁hra",-11.57691764831543],["JA",-11.57697582244873],["press",-11.577019691467283],["▁1995",-11.57704734802246],["▁ولكن",-11.577055931091309],["sama",-11.57707977294922],["▁bai",-11.577194213867188],["há",-11.577274322509766],["▁forhold",-11.577451705932615],["▁nostri",-11.5775785446167],["лас",-11.577619552612305],["▁essa",-11.577651023864746],["Da",-11.57774543762207],["▁მას",-11.577754974365234],["нат",-11.577820777893066],["Π",-11.577887535095217],["pé",-11.577935218811035],["语",-11.577960014343262],["▁Hur",-11.57796573638916],["▁성",-11.578067779541016],["▁കാ",-11.578155517578123],["▁средства",-11.57827377319336],["▁bul",-11.57839012145996],["▁huu",-11.57848834991455],["чно",-11.578510284423828],["▁sada",-11.578535079956056],["▁vừa",-11.578537940979004],["ява",-11.578605651855469],["▁След",-11.578617095947266],["ema",-11.57871437072754],["▁നല്",-11.578723907470703],["▁తో",-11.578728675842283],["▁لی",-11.578733444213867],["owane",-11.578801155090332],["界",-11.57895851135254],["జ్",-11.579018592834473],["▁गर्दै",-11.57912826538086],["▁1996",-11.579136848449709],["тел",-11.579253196716309],["UM",-11.57931137084961],["hra",-11.579437255859377],["▁दिवस",-11.579554557800291],["iel",-11.57966423034668],["▁desta",-11.579667091369627],["lung",-11.579702377319336],["њи",-11.579784393310549],["▁pří",-11.580190658569336],["čky",-11.580307006835938],["pag",-11.58030891418457],["▁meter",-11.580389976501465],["▁گے",-11.580633163452148],["▁podem",-11.580731391906738],["eira",-11.58076286315918],["▁every",-11.58084774017334],["▁kura",-11.58100128173828],["AR",-11.581013679504396],["yada",-11.581048965454102],["bru",-11.581134796142578],["σα",-11.581626892089844],["▁prae",-11.58169937133789],["▁portal",-11.581751823425291],["mbang",-11.58177661895752],["▁weet",-11.581782341003418],["رت",-11.581838607788086],["▁nunca",-11.581851959228516],["toa",-11.581852912902832],["யும்",-11.581891059875488],["我国",-11.58189296722412],["▁эти",-11.5819091796875],["▁телефон",-11.581957817077637],["▁مش",-11.582113265991213],["▁selalu",-11.582115173339844],["▁कु",-11.582130432128906],["▁isto",-11.58226490020752],["▁siempre",-11.582380294799805],["ოთ",-11.582422256469728],["▁Како",-11.582504272460938],["계",-11.582534790039062],["▁malam",-11.58254337310791],["情况",-11.582557678222656],["救",-11.58257293701172],["Con",-11.58258819580078],["▁ym",-11.58262825012207],["▁69",-11.58263111114502],["▁club",-11.582658767700195],["衣",-11.582736015319824],["▁പറഞ്ഞു",-11.582839012145996],["duk",-11.58286190032959],["ေပး",-11.582903861999512],["žo",-11.582989692687988],["▁fal",-11.583011627197266],["▁السلام",-11.583063125610352],["spor",-11.583210945129396],["یان",-11.583264350891112],["▁micro",-11.583345413208008],["▁ہوا",-11.583358764648438],["rok",-11.58339023590088],["uro",-11.583480834960938],["▁ö",-11.583498001098633],["▁pasa",-11.583741188049316],["▁Love",-11.58375072479248],["spo",-11.583795547485352],["utu",-11.583949089050291],["群",-11.58408546447754],["▁bed",-11.58411979675293],["ጉ",-11.58419132232666],["cken",-11.58424949645996],["▁сто",-11.58430004119873],["ဒ",-11.584301948547363],["支",-11.58441162109375],["ტ",-11.584436416625977],["▁ਪ",-11.584453582763672],["▁forte",-11.584454536437988],["▁بنا",-11.584470748901367],["▁э",-11.584603309631348],["▁gro",-11.584660530090332],["▁nett",-11.584805488586426],["lse",-11.5848388671875],["▁nelle",-11.584901809692385],["注意",-11.58497714996338],["▁зам",-11.585009574890137],["▁gjør",-11.58507251739502],["难",-11.585129737854004],["▁թ",-11.585152626037598],["▁пи",-11.585165023803713],["ага",-11.5852632522583],["▁namun",-11.585328102111816],["ости",-11.58535861968994],["▁tegen",-11.585397720336914],["▁ثبت",-11.585531234741213],["ক্ষ",-11.58553409576416],["mək",-11.585540771484377],["▁Posted",-11.585817337036133],["▁آخر",-11.585830688476562],["▁ಎ",-11.58585262298584],["▁Най",-11.585872650146484],["▁राज",-11.585957527160645],["8%",-11.585996627807615],["▁jour",-11.586079597473145],["ட்டு",-11.586127281188965],["▁March",-11.586129188537598],["▁Bij",-11.586217880249023],["▁ktorá",-11.586224555969238],["▁Dövlət",-11.586247444152832],["▁Gy",-11.586270332336426],["isse",-11.58634090423584],["▁qədər",-11.58646297454834],["▁یاد",-11.586502075195312],["段",-11.586503982543944],["má",-11.58657169342041],["▁خرید",-11.586780548095703],["▁गई",-11.586783409118652],["▁sens",-11.586793899536133],["▁stan",-11.586886405944824],["▁share",-11.58693790435791],["மான",-11.586953163146973],["▁mg",-11.586970329284668],["▁الح",-11.587071418762209],["82",-11.587215423583984],["MO",-11.587328910827637],["rta",-11.587447166442873],["▁Ros",-11.587447166442873],["▁директор",-11.587522506713867],["這些",-11.587562561035156],["mand",-11.587566375732422],["бі",-11.587590217590332],["▁בכל",-11.587654113769531],["▁Mes",-11.587689399719238],["关于",-11.587769508361816],["▁בי",-11.587770462036133],["▁pasar",-11.587776184082031],["经",-11.58781623840332],["師",-11.587854385375977],["στ",-11.587894439697266],["▁य",-11.587897300720217],["▁آج",-11.587900161743164],["▁جدا",-11.587984085083008],["ेन",-11.58802890777588],["ਾਈ",-11.588054656982422],["nosť",-11.588074684143066],["▁bahan",-11.588111877441406],["▁Gor",-11.588296890258787],["▁avoir",-11.588296890258787],["▁حد",-11.58836555480957],["▁olması",-11.588367462158203],["▁xəbər",-11.588508605957031],["限",-11.588541984558104],["▁הח",-11.588552474975586],["▁कांग्रेस",-11.58866024017334],["▁datang",-11.588700294494627],["▁Ia",-11.58872413635254],["▁68",-11.588730812072754],["डा",-11.588791847229004],["▁SP",-11.588849067687988],["ANA",-11.588926315307615],["▁пример",-11.588936805725098],["▁bog",-11.588940620422363],["ມີ",-11.588948249816896],["▁Uhr",-11.58899211883545],["▁сви",-11.589000701904297],["▁خوش",-11.589067459106444],["රු",-11.589070320129396],["rica",-11.589139938354492],["파",-11.58914279937744],["▁эрх",-11.58914852142334],["▁sot",-11.589173316955566],["nger",-11.58920192718506],["▁ਮ",-11.589213371276855],["▁110",-11.5892915725708],["▁kogu",-11.589590072631836],["sco",-11.589629173278809],["▁הו",-11.589646339416504],["▁본",-11.589765548706056],["▁სხვა",-11.589768409729004],["ас",-11.589788436889648],["▁63",-11.589836120605469],["നം",-11.590082168579102],["ddi",-11.590083122253418],["▁gjitha",-11.59017562866211],["▁مطالب",-11.59026336669922],["gov",-11.590335845947266],["ték",-11.59046745300293],["▁Бұл",-11.590531349182127],["ပေး",-11.59068489074707],["Atom",-11.590707778930664],["のです",-11.590982437133787],["▁Eine",-11.59106159210205],["について",-11.5911226272583],["怎么",-11.591157913208008],["cza",-11.591232299804688],["▁futuro",-11.591259956359863],["▁bolo",-11.591429710388184],["టీ",-11.591443061828612],["っている",-11.591496467590332],["ності",-11.591645240783691],["▁छैन",-11.591678619384766],["жу",-11.591683387756348],["దు",-11.591686248779297],["提出",-11.591757774353027],["条件",-11.59197235107422],["▁ajo",-11.59211540222168],["▁எ",-11.592292785644531],["▁sort",-11.592474937438965],["lly",-11.59249782562256],["ћи",-11.59249782562256],["ିତ",-11.592559814453123],["▁käsi",-11.592609405517578],["вай",-11.5926513671875],["şa",-11.59269905090332],["lē",-11.592738151550291],["ယ",-11.59275245666504],["▁исто",-11.592913627624512],["▁‫",-11.592974662780762],["▁sáng",-11.593061447143556],["▁mina",-11.593168258666992],["มือ",-11.593188285827637],["Ko",-11.593304634094238],["ží",-11.593320846557615],["▁benar",-11.593408584594728],["മ്പ",-11.593478202819824],["不断",-11.59361743927002],["ា",-11.593774795532228],["olla",-11.593785285949709],["▁primi",-11.593810081481934],["kite",-11.593838691711426],["▁hoy",-11.593955993652344],["▁prostor",-11.593998908996582],["lk",-11.59404182434082],["新的",-11.594078063964844],["átor",-11.594257354736328],["▁هزار",-11.59455680847168],["▁फिल्म",-11.59463882446289],["ball",-11.594650268554688],["пу",-11.594655990600586],["▁били",-11.594686508178713],["▁ले",-11.594802856445312],["נים",-11.594970703125],["ዉ",-11.594990730285645],["tza",-11.59511375427246],["▁Office",-11.595230102539062],["اه",-11.595245361328123],["▁tham",-11.595370292663574],["▁kirja",-11.595379829406738],["We",-11.595396041870115],["▁tio",-11.595489501953123],["▁tetap",-11.59549617767334],["▁món",-11.595540046691896],["នៅ",-11.595610618591309],["ണ്",-11.595630645751951],["▁ней",-11.595712661743164],["們",-11.595788955688477],["ந்து",-11.595927238464355],["▁individu",-11.596084594726562],["/5",-11.596104621887209],["ೂ",-11.59625244140625],["важ",-11.59626293182373],["nico",-11.596266746520996],["vý",-11.596423149108888],["கு",-11.596612930297852],["ларды",-11.596715927124023],["▁Deutschland",-11.596768379211426],["▁اطلاعات",-11.59682559967041],["▁kedua",-11.596843719482422],["نده",-11.596856117248535],["▁tổ",-11.597079277038574],["wyd",-11.597124099731444],["▁böyük",-11.597259521484377],["のが",-11.59731674194336],["▁hyvin",-11.597393989562988],["▁Rio",-11.59739875793457],["▁дом",-11.597432136535645],["ေလး",-11.597450256347656],["▁qarşı",-11.597460746765137],["iadau",-11.597466468811035],["遊戲",-11.59747314453125],["为了",-11.597494125366213],["▁baza",-11.59749984741211],["ovo",-11.597503662109377],["▁असे",-11.597509384155272],["▁করতে",-11.597650527954102],["▁RM",-11.597804069519045],["ilu",-11.597882270812988],["fre",-11.597905158996582],["nach",-11.597986221313477],["vus",-11.598055839538574],["▁එම",-11.598093032836914],["▁ഞാന്",-11.598114967346191],["վ",-11.598170280456545],["▁KE",-11.598217964172363],["91",-11.598219871520996],["▁विचार",-11.598255157470703],["▁информация",-11.598299026489258],["▁thích",-11.598417282104492],["6%",-11.598532676696776],["▁memberi",-11.598557472229004],["▁खेल",-11.59860134124756],["▁Saint",-11.59886646270752],["▁eden",-11.59887981414795],["▁اخبار",-11.59891128540039],["▁عبد",-11.598995208740234],["▁മാ",-11.599088668823242],["有限公司",-11.599133491516112],["gri",-11.599143028259276],["▁слова",-11.599143981933594],["▁presenta",-11.59917449951172],["▁Əliyev",-11.599428176879885],["▁οποία",-11.599443435668944],["ยัง",-11.599591255187988],["кам",-11.59962272644043],["▁nh",-11.599637031555176],["лик",-11.599658966064451],["ခြင်း",-11.5997314453125],["▁факт",-11.599766731262209],["idos",-11.599815368652344],["▁karo",-11.599897384643556],["▁uga",-11.5999116897583],["▁води",-11.599929809570312],["▁PP",-11.59995174407959],["▁இது",-11.59996223449707],["шу",-11.600141525268556],["▁हुन्छ",-11.600179672241213],["▁hyvä",-11.60033893585205],["tten",-11.60049533843994],["ాయి",-11.600507736206056],["資料",-11.600525856018066],["います",-11.600635528564451],["▁EUR",-11.600713729858398],[":15",-11.600778579711914],["yen",-11.600896835327148],["یی",-11.600964546203612],["▁Ska",-11.601319313049316],["ked",-11.601335525512695],["▁બે",-11.601460456848145],["▁lille",-11.601520538330078],["feira",-11.601703643798828],["ার",-11.601777076721191],["mah",-11.601797103881836],["92",-11.601799011230469],["▁TH",-11.601804733276367],["PC",-11.601888656616213],["▁problemas",-11.60193157196045],["影",-11.602221488952637],["PA",-11.602347373962402],["▁بس",-11.602365493774414],["▁jedna",-11.602446556091309],["雷",-11.602452278137209],["▁chế",-11.602473258972168],["▁Jahre",-11.60250473022461],["▁hotell",-11.602511405944824],["57",-11.602517127990724],["▁بزرگ",-11.602699279785156],["七",-11.60278034210205],["税",-11.602788925170898],["▁yng",-11.602860450744627],["Ra",-11.60288429260254],["▁gay",-11.602931022644045],["模式",-11.602954864501951],["▁പാ",-11.602985382080078],["ији",-11.603050231933594],["디",-11.603065490722656],["▁egiten",-11.603073120117188],["▁zou",-11.603144645690918],["演",-11.603326797485352],["▁estado",-11.603346824645996],["업",-11.603448867797852],["ric",-11.603466987609863],["▁હ",-11.603501319885254],["ጅ",-11.60356903076172],["▁फिर",-11.603572845458984],["▁നി",-11.603792190551758],["▁Pin",-11.603837966918944],["▁fotos",-11.60385799407959],["▁plu",-11.603897094726562],["ല്ല",-11.60394287109375],["▁انتخاب",-11.603952407836914],["▁ինչ",-11.60400390625],["сті",-11.604290962219238],["yr",-11.604329109191896],["实",-11.604373931884766],["ሙ",-11.60438346862793],["രു",-11.604485511779783],["▁studio",-11.604572296142578],["ера",-11.604586601257324],["▁ish",-11.604634284973145],["▁לפני",-11.604659080505373],["▁kart",-11.604660987854004],["RA",-11.60466194152832],["▁доста",-11.604691505432127],["igu",-11.604731559753418],["bad",-11.604775428771973],["포",-11.604860305786133],["變",-11.604864120483398],["종",-11.604957580566406],["▁උ",-11.605005264282228],["▁رہا",-11.605355262756348],["ნის",-11.605398178100586],["▁Ай",-11.60542106628418],["ësh",-11.605499267578123],["▁nimi",-11.605539321899414],["戰",-11.605539321899414],["▁osób",-11.605578422546388],["▁parece",-11.605591773986816],["▁rit",-11.605768203735352],["▁þegar",-11.605856895446776],["▁Tags",-11.60615062713623],["פת",-11.606154441833496],["▁બ",-11.606289863586426],["▁read",-11.606356620788574],["▁transfer",-11.60673999786377],["mun",-11.606769561767578],["▁دارند",-11.60677433013916],["▁kjo",-11.606916427612305],["അ",-11.60693073272705],["tú",-11.60703468322754],["ో",-11.607051849365234],["ரை",-11.607061386108398],["▁آل",-11.60720443725586],["▁وقد",-11.607211112976074],["▁suatu",-11.607316970825195],["dah",-11.607319831848145],["▁dobre",-11.607439041137695],["град",-11.607444763183594],["▁unserer",-11.607492446899414],["▁လူ",-11.607510566711426],["ях",-11.607547760009766],["▁feel",-11.60758113861084],["▁років",-11.60759449005127],["▁Main",-11.607651710510254],["건",-11.60765266418457],["رس",-11.607730865478516],["試",-11.607786178588867],["▁тако",-11.607799530029297],["اش",-11.60782527923584],["եւ",-11.607962608337402],["lun",-11.60802936553955],["ол",-11.60804557800293],["▁significa",-11.608077049255373],["ക്കു",-11.60808277130127],["▁цел",-11.608092308044434],["media",-11.608125686645508],["▁sektor",-11.608139038085938],["發現",-11.608155250549316],["hand",-11.60829734802246],["بي",-11.608412742614746],["54",-11.608549118041992],["▁Mobil",-11.608595848083496],["▁eh",-11.608631134033203],["▁bolj",-11.608707427978516],["▁Ag",-11.60887336730957],["▁galego",-11.608980178833008],["vai",-11.609045028686523],["질",-11.609172821044922],["▁Ελλάδα",-11.609217643737791],["▁эл",-11.609258651733398],["▁talent",-11.609420776367188],["▁Service",-11.609431266784668],["▁구",-11.609458923339844],["AD",-11.609475135803224],["▁DVD",-11.609614372253418],["▁System",-11.609623908996582],["ሊ",-11.609989166259766],["ација",-11.610055923461914],["lju",-11.61009407043457],["yên",-11.610102653503418],["▁бюджет",-11.610284805297852],["▁உ",-11.610447883605955],["▁도",-11.610451698303224],["liśmy",-11.610501289367676],["▁세",-11.610559463500977],["▁نقل",-11.610599517822266],["tila",-11.610708236694336],["lei",-11.61072826385498],["jel",-11.611011505126951],["▁veliko",-11.611016273498535],["▁મા",-11.611047744750977],["▁dunha",-11.611099243164062],["由于",-11.611278533935549],["نت",-11.611294746398926],["ड़",-11.611455917358398],["Ž",-11.611468315124512],["▁പുതിയ",-11.611468315124512],["▁спо",-11.611529350280762],["▁Grund",-11.611578941345217],["▁bha",-11.611594200134276],["कै",-11.611812591552734],["尼",-11.61190414428711],["rī",-11.611921310424805],["▁meine",-11.612010955810549],["▁Что",-11.612051963806152],["▁Estado",-11.612064361572266],["▁prvi",-11.6121187210083],["▁sino",-11.612125396728516],["▁Кон",-11.612141609191896],["▁ನಲ್ಲಿ",-11.6121826171875],["▁Namun",-11.612269401550291],["▁Welt",-11.612324714660645],["так",-11.612550735473633],["ள",-11.612554550170898],["痛",-11.61258316040039],["oro",-11.612781524658203],["▁लोक",-11.612847328186035],["▁yana",-11.612854957580566],["ნე",-11.61286449432373],["mil",-11.612990379333496],["▁vite",-11.613037109375],["...)",-11.613078117370604],["▁thêm",-11.613121032714844],["▁font",-11.61320686340332],["▁jotka",-11.613300323486328],["▁fly",-11.613360404968262],["▁quasi",-11.61337947845459],["▁publik",-11.61341953277588],["sza",-11.61346435546875],["▁என்ற",-11.61350917816162],["館",-11.61366367340088],["dad",-11.613728523254396],["▁hasil",-11.613798141479492],["티",-11.614026069641112],["타",-11.61402988433838],["▁հ",-11.614051818847656],["ขาย",-11.61407470703125],["רי",-11.61416721343994],["▁ಹ",-11.614184379577637],["▁10-",-11.61419677734375],["其实",-11.61432647705078],["oedd",-11.61433219909668],["▁آموزش",-11.61436939239502],["eth",-11.614459037780762],["自由",-11.614474296569824],["▁ڪ",-11.614538192749023],["65",-11.614561080932615],["▁направи",-11.614574432373049],["▁Ort",-11.6146879196167],["၀",-11.614852905273438],["▁गए",-11.614925384521484],["▁Ө",-11.614927291870115],["▁hum",-11.614956855773926],["크",-11.615002632141112],["▁बारे",-11.61501121520996],["▁tarif",-11.615056037902832],["▁пен",-11.615078926086426],["ơ",-11.615100860595703],["▁gyfer",-11.61510181427002],["▁αντι",-11.615238189697266],["▁Και",-11.61530590057373],["▁Berg",-11.615346908569336],["▁laikā",-11.615473747253418],["▁led",-11.615488052368164],["根",-11.615490913391112],["лах",-11.615508079528809],["igt",-11.615543365478516],["menti",-11.615553855895996],["▁አስ",-11.615739822387695],["▁job",-11.615796089172363],["მი",-11.615832328796388],["▁waxay",-11.615839004516602],["造成",-11.615959167480469],["운",-11.615965843200684],["▁Robert",-11.616028785705566],["▁2012.",-11.616172790527344],["保持",-11.616211891174316],["pia",-11.616257667541504],["▁Kurdistanê",-11.61631679534912],["▁vissza",-11.616321563720703],["ندي",-11.616351127624512],["▁После",-11.6163969039917],["▁Lai",-11.616408348083496],["退",-11.616541862487791],["▁eich",-11.616639137268066],["ಟಿ",-11.616888046264648],["▁။",-11.61692237854004],["44",-11.616986274719238],["▁ඒක",-11.617108345031738],["లకు",-11.617170333862305],["িত",-11.61726188659668],["▁пут",-11.617277145385742],["mena",-11.617352485656738],["▁passar",-11.617456436157228],["lid",-11.617545127868652],["想要",-11.61756420135498],["▁Ş",-11.617721557617188],["drag",-11.617761611938477],["던",-11.617953300476074],["/2",-11.618005752563477],["▁fre",-11.618114471435549],["▁assim",-11.61820125579834],["▁такой",-11.618220329284668],["▁Të",-11.618249893188477],["▁око",-11.618326187133787],["ען",-11.618500709533691],["▁není",-11.618508338928224],["ño",-11.618510246276855],["▁ਉਹ",-11.6185884475708],["ndu",-11.61860179901123],["▁nor",-11.61875820159912],["▁вода",-11.618770599365234],["▁26.",-11.618852615356444],["்",-11.619022369384766],["ంలో",-11.619054794311523],["▁තමයි",-11.619115829467772],["▁быў",-11.619142532348633],["▁phí",-11.619173049926758],["ിക്ക",-11.6193208694458],["δο",-11.619401931762695],["▁27.",-11.619429588317873],["पुर",-11.619571685791016],["▁හෝ",-11.619668960571287],["ER",-11.619834899902344],["▁Tek",-11.619918823242188],["▁විය",-11.619935035705566],["▁сейчас",-11.619943618774414],["▁эмес",-11.619958877563477],["▁Bas",-11.6199951171875],["▁Vý",-11.62004280090332],["▁vit",-11.620097160339355],["▁होने",-11.620149612426758],["建立",-11.620160102844238],["▁28.",-11.620184898376465],["lichen",-11.62020492553711],["不会",-11.620328903198242],["角",-11.62034511566162],["ேன்",-11.620370864868164],["▁लि",-11.62046718597412],["වේ",-11.62047290802002],["▁nærheten",-11.620515823364258],["ského",-11.620518684387209],["ಿದ್ದ",-11.620655059814451],["tava",-11.620694160461426],["hui",-11.620928764343262],["ської",-11.620928764343262],["ologi",-11.62094020843506],["命",-11.621009826660156],["▁tým",-11.621064186096191],["łu",-11.621066093444824],["ቶች",-11.621248245239258],["nto",-11.621333122253418],["▁Pal",-11.62134075164795],["▁բոլոր",-11.621365547180176],["▁tidligere",-11.621383666992188],["▁भर",-11.621477127075195],["ବି",-11.621546745300291],["▁University",-11.62158203125],["lyn",-11.621664047241213],["▁więcej",-11.62171459197998],["▁بوده",-11.62173557281494],["▁edən",-11.622027397155762],["▁rat",-11.622055053710938],["▁ute",-11.622075080871582],["పు",-11.622148513793944],["▁ବ",-11.62217140197754],["стер",-11.622179985046388],["ຄວາມ",-11.622200965881348],["▁compte",-11.62221908569336],["でしょう",-11.622286796569824],["ശ",-11.622337341308594],["eme",-11.622525215148926],["景",-11.622544288635254],["ssi",-11.622611045837402],["▁lập",-11.622635841369627],["ський",-11.622693061828612],["کا",-11.622711181640623],["未来",-11.622715950012209],["▁вони",-11.622753143310549],["▁सकते",-11.622794151306152],["kı",-11.622976303100586],["ണ്ട",-11.62311553955078],["▁ሰ",-11.623228073120115],["▁мо",-11.623346328735352],["▁bre",-11.623456954956056],["▁Ry",-11.62348175048828],["▁fl",-11.62348461151123],["草",-11.623544692993164],["▁Est",-11.623607635498049],["čas",-11.623754501342772],["根据",-11.62380027770996],["▁kemi",-11.623845100402832],["▁sous",-11.623907089233398],["▁urte",-11.623942375183104],["▁العالم",-11.623976707458496],["ມາ",-11.623988151550291],["▁Danmark",-11.623992919921877],["▁২০১৮",-11.624011039733888],["▁Mah",-11.6240816116333],["▁Blogger",-11.62409782409668],["▁жил",-11.624153137207031],["▁पूर्व",-11.624213218688965],["ावर",-11.624369621276855],["▁عدد",-11.624412536621094],["្",-11.624433517456056],["▁þetta",-11.624502182006836],["▁أكثر",-11.62450885772705],["▁zelf",-11.62461757659912],["政治",-11.62463665008545],["▁bhí",-11.624638557434082],["▁точно",-11.62467098236084],["لة",-11.62480354309082],["春",-11.62484645843506],["tav",-11.62512493133545],["ਦੇ",-11.625242233276367],["▁прес",-11.625422477722168],["ေတာ္",-11.62547779083252],["úr",-11.625484466552734],["λε",-11.625659942626951],["zor",-11.625706672668455],["▁تھے",-11.625775337219238],["▁هیچ",-11.625782012939451],["cou",-11.625798225402832],["▁prefer",-11.62582015991211],["▁olaraq",-11.62593936920166],["பி",-11.626053810119627],["▁vend",-11.626124382019045],["ჭ",-11.626290321350098],["▁въз",-11.626307487487791],["osi",-11.626338005065918],["▁bland",-11.626462936401367],["ിന്",-11.626825332641602],["▁Roman",-11.626836776733398],["ሃ",-11.626867294311523],["▁Dans",-11.626888275146484],["▁Thu",-11.62694263458252],["事件",-11.626968383789062],["一年",-11.627020835876465],["改革",-11.627082824707031],["指出",-11.627212524414062],["ਵ",-11.627259254455566],["▁글",-11.627333641052246],["back",-11.627479553222656],["▁щодо",-11.62756061553955],["正式",-11.627608299255373],["Δ",-11.627635955810549],["▁모든",-11.627640724182127],["ał",-11.627659797668455],["вед",-11.627789497375488],["▁ավելի",-11.627806663513184],["▁Bel",-11.627825736999512],["▁سبب",-11.627830505371094],["bou",-11.627897262573242],["lot",-11.627930641174316],["ió",-11.627970695495604],["▁között",-11.628045082092283],["▁ofte",-11.628057479858398],["ский",-11.628183364868164],["kään",-11.628213882446287],["த்தி",-11.628214836120604],["▁each",-11.62826442718506],["▁YouTube",-11.628460884094238],["ché",-11.62864875793457],["▁sức",-11.62876796722412],["teko",-11.62878704071045],["larla",-11.628788948059082],["टा",-11.62880516052246],["cit",-11.628844261169434],["lú",-11.62889289855957],["Bo",-11.628913879394531],["vam",-11.628920555114746],["ades",-11.629080772399902],["▁വ",-11.629138946533203],["ein",-11.629210472106934],["▁ještě",-11.62924098968506],["lep",-11.629247665405272],["syn",-11.629328727722168],["kant",-11.62939167022705],["▁mellom",-11.62942600250244],["яс",-11.62957763671875],["帮助",-11.62958526611328],["sos",-11.629596710205078],["▁keluar",-11.629754066467283],["▁67",-11.62975788116455],["Man",-11.62977695465088],["▁genera",-11.629881858825684],["▁140",-11.629919052124023],["uch",-11.630041122436523],["לו",-11.630060195922852],["idan",-11.630078315734863],["令",-11.630154609680176],["▁sv",-11.630253791809082],["▁být",-11.630258560180664],["▁سیاسی",-11.630367279052734],["▁член",-11.630391120910645],["▁ಬಗ್ಗೆ",-11.630496978759766],["вање",-11.630534172058104],["▁tutte",-11.630586624145508],["▁Fall",-11.630634307861328],["▁lelaki",-11.63064956665039],["9%",-11.630743980407717],["အား",-11.630781173706056],["▁kok",-11.630858421325684],["ഡ",-11.630887031555176],["▁Δεν",-11.63097095489502],["uju",-11.630989074707031],["▁log",-11.631032943725586],["▁چيو",-11.631052017211914],["दी",-11.631061553955078],["▁ඔහු",-11.631063461303713],["बी",-11.631125450134276],["MB",-11.631168365478516],["▁trabalho",-11.631169319152832],["▁Για",-11.631199836730955],["▁گئے",-11.631296157836914],["▁کوي",-11.63164234161377],["▁qa",-11.631723403930664],["स्त",-11.63174819946289],["▁ဒီ",-11.631772994995115],["یو",-11.631857872009276],["▁см",-11.63188934326172],["ജി",-11.632074356079102],["అ",-11.63210391998291],["▁Ele",-11.632124900817873],["装",-11.63217830657959],["▁которых",-11.632224082946776],["▁ہوں",-11.63223648071289],["▁العمل",-11.632269859313965],["dob",-11.632328987121582],["ไม่ได้",-11.632436752319336],["esh",-11.632695198059082],["ικής",-11.632756233215332],["▁unu",-11.632853507995604],["▁rund",-11.632935523986816],["iy",-11.633074760437012],["rri",-11.633137702941896],["სი",-11.633156776428224],["tara",-11.633177757263184],["▁zelo",-11.633203506469728],["▁Auch",-11.633221626281738],["▁sätt",-11.633240699768066],["▁20%",-11.63324737548828],["bok",-11.633256912231444],["بی",-11.633308410644531],["rol",-11.633478164672852],["▁quand",-11.633633613586426],["Ro",-11.633666038513184],["必",-11.633740425109863],["urilor",-11.633828163146973],["社會",-11.6339750289917],["▁bästa",-11.63402271270752],["stin",-11.634025573730469],["uoja",-11.634099960327148],["рах",-11.63415241241455],["▁içinde",-11.634185791015623],["čio",-11.634329795837402],["bah",-11.63436508178711],["▁quia",-11.634390830993652],["တွင်",-11.634418487548828],["▁پھر",-11.634477615356444],["▁کرنا",-11.63448429107666],["сен",-11.634493827819824],["ције",-11.634552001953123],["ines",-11.634560585021973],["裡",-11.63467025756836],["zó",-11.634723663330078],["52",-11.63479995727539],["▁Bad",-11.634836196899414],["sme",-11.63499641418457],["▁власти",-11.635004997253418],["інде",-11.635034561157228],["sə",-11.635039329528809],["▁nodig",-11.63506031036377],["▁கா",-11.6350679397583],["端",-11.635106086730955],["▁termasuk",-11.635109901428224],["▁zor",-11.635177612304688],["▁توان",-11.63517951965332],["▁sở",-11.6351957321167],["йн",-11.635297775268556],["станов",-11.635303497314451],["uja",-11.635339736938477],["▁४",-11.635370254516602],["тур",-11.63540744781494],["uto",-11.635436058044434],["TS",-11.635448455810549],["레",-11.63548755645752],["▁check",-11.63550853729248],["的に",-11.635541915893556],["罗",-11.635580062866213],["▁trend",-11.63576889038086],["▁Ди",-11.63577651977539],["▁γ",-11.635777473449709],["čni",-11.635797500610352],["88",-11.63581085205078],["رات",-11.635834693908691],["▁sakit",-11.635869026184082],["바",-11.635878562927246],["út",-11.636075973510742],["▁ве",-11.636183738708496],["▁România",-11.636255264282228],["▁Link",-11.636297225952148],["ря",-11.63629913330078],["zwa",-11.636346817016602],["▁tamin",-11.636435508728027],["▁Kultur",-11.63644027709961],["ய்",-11.636475563049316],["▁ken",-11.636537551879885],["▁Она",-11.636567115783691],["ishi",-11.636589050292969],["발",-11.636627197265623],["▁mann",-11.636642456054688],["别",-11.636699676513672],["▁Nga",-11.636832237243652],["▁ໃຫ້",-11.6368989944458],["▁sendo",-11.636980056762695],["rais",-11.636984825134276],["▁pont",-11.637152671813965],["▁будзе",-11.637200355529783],["▁बार",-11.63722324371338],["连",-11.63724136352539],["lerine",-11.637295722961426],["ові",-11.63737678527832],["▁Grand",-11.637415885925291],["運",-11.637421607971191],["istas",-11.637447357177734],["▁norge",-11.637471199035645],["▁දී",-11.637474060058594],["anto",-11.637479782104492],["2%",-11.637491226196287],["▁입",-11.637723922729492],["ität",-11.638031959533691],["▁болады",-11.638041496276855],["lise",-11.6381254196167],["역",-11.638147354125977],["▁सबै",-11.638236045837402],["ছে",-11.638250350952148],["larga",-11.638375282287598],["ņu",-11.638447761535645],["კვ",-11.638531684875488],["▁Trong",-11.63854694366455],["シ",-11.638568878173828],["▁किसी",-11.638786315917969],["造",-11.638998031616213],["▁otros",-11.63910675048828],["민",-11.639147758483888],["urs",-11.639240264892578],["72",-11.639424324035645],["▁несколько",-11.63942527770996],["nyt",-11.639508247375488],["mien",-11.63953685760498],["▁वाले",-11.639602661132812],["▁element",-11.63961696624756],["ြ",-11.639646530151367],["sent",-11.639695167541504],["数据",-11.639822006225586],["ಟ",-11.639864921569824],["▁vật",-11.639894485473633],["ੜ",-11.63991641998291],["▁kulit",-11.639952659606934],["▁niż",-11.640089988708496],["▁1992",-11.640218734741213],["▁homo",-11.640419960021973],["▁kamp",-11.64046859741211],["င်",-11.640497207641602],["極",-11.640522956848145],["ір",-11.640527725219728],["▁plej",-11.640600204467772],["mbra",-11.640625],["▁toutes",-11.640690803527832],["ข้อมูล",-11.640783309936523],["▁још",-11.640874862670898],["▁Беларусь",-11.640893936157228],["ιών",-11.640915870666504],["ικών",-11.64096736907959],["▁rom",-11.641068458557127],["▁Хо",-11.641105651855469],["▁penyakit",-11.641164779663086],["▁μέσα",-11.641165733337402],["▁ਕੀਤਾ",-11.641168594360352],["ことを",-11.641209602355955],["legt",-11.641316413879396],["▁сайта",-11.64133358001709],["以前",-11.641371726989746],["hay",-11.641404151916504],["▁membantu",-11.641460418701172],["▁защото",-11.641505241394045],["▁xo",-11.641571998596191],["▁tradi",-11.64167594909668],["▁corpo",-11.64170742034912],["▁всегда",-11.641742706298828],["▁दुई",-11.64175510406494],["ttä",-11.641756057739258],["เมือง",-11.641847610473633],["▁گروه",-11.641860961914062],["▁යන",-11.642061233520508],["ης",-11.642091751098633],["▁vrijeme",-11.642107009887695],["▁Nor",-11.6421480178833],["▁book",-11.642168045043944],["▁baba",-11.642318725585938],["▁fil",-11.642459869384766],["وان",-11.642680168151855],["▁पु",-11.642745018005373],["▁derfor",-11.642791748046877],["él",-11.642848014831545],["▁رسول",-11.64287567138672],["▁saada",-11.642887115478516],["▁შესახებ",-11.642903327941896],["વી",-11.642950057983398],["شان",-11.642986297607422],["▁ενώ",-11.643244743347168],["בי",-11.643247604370115],["දී",-11.643274307250977],["されている",-11.643377304077148],["▁makanan",-11.643383026123049],["uru",-11.64340114593506],["ácia",-11.643424034118652],["ču",-11.643465042114258],["liste",-11.643503189086914],["▁1991",-11.643624305725098],["▁czas",-11.643644332885742],["weg",-11.643647193908691],["▁våra",-11.643678665161133],["▁الق",-11.643696784973145],["σι",-11.643718719482422],["必要",-11.643739700317385],["▁Raz",-11.643741607666016],["都有",-11.64376449584961],["▁भन्ने",-11.643806457519531],["▁mẹ",-11.643855094909668],["vej",-11.643939971923828],["эл",-11.64404582977295],["▁Telugu",-11.644071578979492],["▁front",-11.644078254699709],["ცი",-11.644081115722656],["个人",-11.644091606140137],["sir",-11.644158363342283],["▁ను",-11.644244194030762],["kap",-11.644404411315918],["▁MB",-11.644438743591309],["ять",-11.644468307495115],["▁vastu",-11.64448356628418],["▁different",-11.644609451293944],["▁ទី",-11.64463996887207],["▁प्रमुख",-11.64484405517578],["▁metal",-11.644868850708008],["▁еден",-11.64498519897461],["▁sasa",-11.645071029663086],["SH",-11.645103454589844],["▁வ",-11.645133972167969],["တယ္",-11.64516544342041],["▁sondern",-11.64518928527832],["त्य",-11.64523696899414],["larından",-11.645313262939451],["လို႔",-11.645341873168944],["▁лише",-11.645505905151367],["▁ਆ",-11.645513534545898],["▁ant",-11.645538330078123],["▁document",-11.645565032958984],["▁నుంచి",-11.6455659866333],["▁gjennom",-11.645609855651855],["▁Nel",-11.645620346069336],["▁گل",-11.645624160766602],["▁När",-11.645913124084473],["▁දැන්",-11.64628791809082],["▁AC",-11.64629364013672],["▁স",-11.646382331848145],["▁όμως",-11.646392822265623],["▁عمران",-11.646489143371582],["sana",-11.646493911743164],["▁நான்",-11.646581649780272],["▁sarà",-11.64659595489502],["85",-11.646697998046877],["▁neki",-11.6467924118042],["▁siz",-11.64686107635498],["▁کنیم",-11.647050857543944],["▁Baş",-11.647058486938477],["▁tut",-11.647135734558104],["вр",-11.647167205810549],["▁ਗਿਆ",-11.64732551574707],["дук",-11.647428512573242],["▁while",-11.647433280944824],["▁sollte",-11.647459983825684],["ické",-11.64753246307373],["оч",-11.647537231445312],["gis",-11.647570610046388],["地区",-11.64765167236328],["▁klas",-11.647852897644045],["▁hali",-11.647871971130373],["گان",-11.647950172424316],["▁Mir",-11.647954940795898],["üü",-11.64797019958496],["AK",-11.648000717163086],["etako",-11.648030281066896],["▁сөз",-11.648056030273438],["▁१०",-11.648119926452637],["59",-11.648197174072266],["ạ",-11.648222923278809],["49",-11.648225784301758],["▁pagina",-11.648482322692873],["ბ",-11.648540496826172],["▁ପରେ",-11.6485595703125],["▁Bol",-11.648567199707031],["▁Mont",-11.64857578277588],["▁monitor",-11.648653984069824],["כן",-11.64873218536377],["▁نوع",-11.64889907836914],["blo",-11.648921012878418],["▁ឲ្យ",-11.648943901062012],["/10",-11.64897346496582],["▁order",-11.648983001708984],["raš",-11.64901065826416],["▁bình",-11.649032592773438],["▁Kita",-11.649042129516602],["▁ને",-11.6490478515625],["izi",-11.649060249328612],["eniu",-11.649109840393066],["▁product",-11.649218559265137],["▁नयाँ",-11.64932346343994],["▁TA",-11.649396896362305],["問",-11.649421691894531],["▁Ph",-11.649450302124023],["nike",-11.649542808532717],["tään",-11.649733543395996],["cil",-11.64974880218506],["▁pov",-11.64979648590088],["ข",-11.649815559387209],["▁begitu",-11.649904251098633],["▁6-",-11.649930000305176],["▁Br",-11.649994850158691],["▁통해",-11.65004062652588],["ಲ್ಲ",-11.650071144104004],["▁broj",-11.650115966796877],["东",-11.650137901306152],["▁Euskal",-11.650309562683104],["▁echt",-11.65038013458252],["▁Hon",-11.650473594665527],["▁गरिएको",-11.650485038757324],["udu",-11.65049648284912],["▁TE",-11.65052604675293],["▁wali",-11.650594711303713],["▁گی",-11.65063762664795],["ões",-11.65078830718994],["▁президент",-11.650802612304688],["42",-11.65096378326416],["▁අය",-11.651012420654297],["He",-11.651317596435549],["ນີ້",-11.651317596435549],["原因",-11.651359558105469],["▁ran",-11.651421546936035],["▁الج",-11.6514310836792],["▁Yeni",-11.6514892578125],["▁sydd",-11.651498794555664],["عد",-11.651700019836426],["êm",-11.651700973510742],["fla",-11.65173053741455],["▁пет",-11.651751518249512],["▁major",-11.652064323425291],["anza",-11.652106285095217],["▁House",-11.65223503112793],["▁Concello",-11.652252197265623],["▁év",-11.65227222442627],["णी",-11.652294158935549],["▁qué",-11.652356147766112],["▁Jam",-11.65235710144043],["ვის",-11.652413368225098],["▁estão",-11.652427673339844],["业务",-11.652435302734377],["▁sci",-11.652457237243652],["vous",-11.652538299560549],["jung",-11.652541160583496],["現",-11.652615547180176],["▁кал",-11.652873992919922],["员",-11.652915954589844],["▁документ",-11.65293788909912],["áil",-11.65307331085205],["SO",-11.653076171875],["ल्",-11.653125762939451],["▁mera",-11.653189659118652],["ут",-11.65328311920166],["▁qəbul",-11.653443336486816],["כת",-11.653547286987305],["▁فيها",-11.65355110168457],["eń",-11.65358829498291],["▁velit",-11.653590202331545],["最近",-11.653615951538086],["▁улсын",-11.653643608093262],["тық",-11.653690338134766],["▁esti",-11.653726577758787],["카",-11.653860092163086],["▁været",-11.653910636901855],["பா",-11.653970718383787],["չ",-11.654122352600098],["▁Ак",-11.654212951660156],["▁बात",-11.654340744018556],["▁doğru",-11.654351234436035],["ዛ",-11.654473304748535],["▁nove",-11.654475212097168],["ঃ",-11.65457820892334],["ಧ",-11.65464973449707],["けど",-11.654653549194336],["党",-11.654664039611816],["▁cro",-11.654687881469728],["整",-11.654826164245604],["▁تور",-11.654837608337402],["▁datos",-11.654854774475098],["▁lado",-11.65487003326416],["▁काय",-11.654891967773438],["▁داشت",-11.654966354370115],["▁ove",-11.655084609985352],["kti",-11.655115127563477],["費",-11.655136108398438],["ム",-11.65532398223877],["▁호텔",-11.655324935913086],["雪",-11.655378341674805],["▁libero",-11.65540599822998],["ované",-11.655527114868164],["adu",-11.655542373657228],["▁sito",-11.655557632446287],["▁अधिक",-11.655590057373049],["▁binnen",-11.655637741088867],["▁પ",-11.655702590942385],["άν",-11.655722618103027],["▁juba",-11.65585708618164],["▁raport",-11.656006813049316],["▁december",-11.656352043151855],["zes",-11.656354904174805],["▁depuis",-11.65635871887207],["hli",-11.65641975402832],["▁jenis",-11.656635284423828],["▁hỏi",-11.656641960144045],["атор",-11.656679153442385],["▁dol",-11.65671730041504],["itä",-11.656798362731934],["မိ",-11.65693473815918],["▁ביותר",-11.656937599182127],["감",-11.656974792480469],["nke",-11.657061576843262],["▁компании",-11.657100677490234],["▁تھا۔",-11.657103538513184],["клад",-11.657148361206056],["▁numero",-11.657158851623535],["jus",-11.65717315673828],["tiv",-11.657275199890137],["▁Вас",-11.65736198425293],["▁esto",-11.657370567321776],["▁varit",-11.657463073730469],["ките",-11.657485961914062],["▁එය",-11.657489776611328],["▁மற்றும்",-11.657530784606934],["▁我",-11.657538414001465],["▁nuestro",-11.657544136047363],["▁buna",-11.657565116882324],["▁ešte",-11.657569885253906],["CA",-11.657649993896484],["লি",-11.657690048217772],["вя",-11.657878875732422],["šana",-11.658028602600098],["▁struktur",-11.658036231994627],["沙",-11.658071517944336],["rni",-11.658080101013184],["uð",-11.658096313476562],["▁ваш",-11.658116340637209],["شت",-11.658123970031738],["▁phụ",-11.658145904541016],["وال",-11.65843677520752],["▁डा",-11.65850830078125],["▁Log",-11.658547401428224],["▁metus",-11.658560752868652],["▁mok",-11.658571243286133],["▁ერთი",-11.658594131469728],["ovala",-11.658596992492676],["rako",-11.65861701965332],["▁finne",-11.65861988067627],["▁ଏବଂ",-11.65870475769043],["▁bereits",-11.658843994140623],["هه",-11.658864974975586],["▁Sara",-11.659012794494627],["▁mampu",-11.659071922302246],["plo",-11.659099578857422],["עס",-11.659134864807127],["操作",-11.659154891967772],["▁köz",-11.65915584564209],["▁spot",-11.65915584564209],["▁kost",-11.659183502197266],["erat",-11.659219741821287],["あ",-11.659228324890137],["▁mulai",-11.659379959106444],["出现",-11.65939235687256],["▁pravi",-11.659480094909668],["nl",-11.65949249267578],["тери",-11.659520149230955],["▁হয়েছে",-11.659555435180664],["ပဲ",-11.65959930419922],["▁moderne",-11.659611701965332],["▁fiecare",-11.659613609313965],["гр",-11.659735679626465],["▁cili",-11.65987491607666],["▁Bro",-11.65997314453125],["වට",-11.65999698638916],["음",-11.660064697265623],["▁pueden",-11.660109519958496],["▁projektu",-11.66019344329834],["节",-11.660284996032717],["ään",-11.660304069519045],["▁болно",-11.660407066345217],["▁elő",-11.66041660308838],["▁byla",-11.660423278808594],["입",-11.660447120666504],["▁عکس",-11.660487174987791],["▁მის",-11.66069221496582],["▁dib",-11.660714149475098],["ення",-11.660737991333008],["▁договор",-11.660881996154783],["▁visu",-11.66100025177002],["نس",-11.661046981811523],["▁lassen",-11.661078453063965],["що",-11.661121368408203],["umo",-11.661127090454102],["იან",-11.661145210266112],["ሬ",-11.661187171936035],["НИ",-11.66126537322998],["vano",-11.661357879638672],["tev",-11.661405563354492],["▁Пред",-11.66142749786377],["▁defini",-11.66153621673584],["ає",-11.6617431640625],["acht",-11.661783218383787],["▁Sim",-11.661818504333496],["▁biasa",-11.661865234375],["▁बा",-11.662100791931152],["▁kru",-11.66218090057373],["▁1994",-11.662185668945312],["▁Mart",-11.662254333496094],["場合",-11.662346839904783],["मे",-11.662405014038086],["ातील",-11.662408828735352],["තා",-11.662428855895996],["▁ĝi",-11.662479400634766],["へ",-11.6625394821167],["lerinin",-11.662559509277344],["▁après",-11.662620544433594],["▁कुरा",-11.662668228149414],["▁elke",-11.662805557250977],["DO",-11.66283130645752],["▁novi",-11.66287899017334],["тя",-11.662911415100098],["那些",-11.662935256958008],["យ",-11.66300106048584],["▁gegen",-11.663023948669434],["교",-11.6632080078125],["յանը",-11.663338661193848],["▁إذا",-11.66334342956543],["▁شب",-11.66334342956543],["atan",-11.663374900817873],["നെ",-11.66339111328125],["▁anders",-11.663409233093262],["වන",-11.66347599029541],["▁свои",-11.663512229919434],["▁විසින්",-11.663559913635254],["ása",-11.663603782653809],["лася",-11.663647651672363],["ซื้อ",-11.663660049438477],["ikka",-11.66366195678711],["ék",-11.663687705993652],["中央",-11.663690567016602],["▁bud",-11.66371726989746],["▁chuyên",-11.663744926452637],["▁ond",-11.6637601852417],["iä",-11.663840293884276],["▁ఆయన",-11.663912773132324],["股",-11.663973808288574],["υρ",-11.66399383544922],["▁جمع",-11.664045333862305],["▁ለማ",-11.664141654968262],["▁stat",-11.664234161376951],["daki",-11.664292335510254],["اع",-11.66452407836914],["ફ",-11.66458797454834],["▁ní",-11.664822578430176],["▁aldrig",-11.66482639312744],["τες",-11.664835929870604],["▁Porno",-11.664851188659668],["▁የሚያ",-11.664908409118652],["▁rakyat",-11.664922714233398],["也有",-11.665106773376465],["ელ",-11.665115356445312],["▁atunci",-11.66522216796875],["▁size",-11.66528606414795],["▁meni",-11.66529369354248],["▁num",-11.66529369354248],["ため",-11.665302276611328],["▁ред",-11.665369987487791],["stup",-11.665409088134766],["\\\\",-11.66547966003418],["▁Maka",-11.66562271118164],["▁سندس",-11.665786743164062],["▁apar",-11.666033744812012],["▁ዓ",-11.666333198547363],["▁הג",-11.666345596313477],["▁ایم",-11.666462898254396],["▁usa",-11.666595458984377],["▁Villa",-11.66661262512207],["iske",-11.666650772094728],["▁voir",-11.666702270507812],["▁लेकिन",-11.666702270507812],["▁mennesker",-11.666715621948242],["▁müssen",-11.66673469543457],["тся",-11.666780471801758],["岁",-11.66679859161377],["▁Pil",-11.66685962677002],["▁यस",-11.666875839233398],["▁dựng",-11.666913032531738],["יי",-11.666973114013672],["▁Model",-11.66698932647705],["▁Sala",-11.66713809967041],["▁centra",-11.66716194152832],["如此",-11.66716480255127],["ဦး",-11.66718578338623],["▁እየ",-11.667193412780762],["▁решение",-11.667194366455078],["atori",-11.667237281799316],["ရွိ",-11.667255401611328],["▁mellan",-11.66732120513916],["▁gw",-11.66738224029541],["그",-11.667414665222168],["eng",-11.66742706298828],["gut",-11.667436599731444],["64",-11.667439460754396],["▁ضد",-11.667478561401367],["▁കെ",-11.667502403259276],["▁చాలా",-11.667600631713867],["▁한다",-11.66767692565918],["▁days",-11.667686462402344],["十分",-11.667741775512695],["Не",-11.667781829833984],["▁line",-11.667964935302734],["▁Fin",-11.668008804321287],["▁iad",-11.668102264404297],["▁beaucoup",-11.668188095092772],["▁dont",-11.66820240020752],["▁Tim",-11.668294906616213],["▁stati",-11.668378829956056],["▁Thai",-11.66840648651123],["▁visto",-11.668441772460938],["人民",-11.668477058410645],["▁जाने",-11.668535232543944],["一样",-11.66854190826416],["ায়",-11.668641090393066],["▁Jun",-11.668652534484863],["▁ego",-11.668676376342772],["47",-11.668706893920898],["▁^",-11.668845176696776],["▁ĉe",-11.66893196105957],["toria",-11.66895866394043],["不过",-11.6690034866333],["▁ڪو",-11.669008255004885],["▁აღ",-11.66904354095459],["離",-11.669054985046388],["云",-11.669127464294434],["sını",-11.669172286987305],["▁Տ",-11.66921615600586],["▁aange",-11.669225692749023],["צו",-11.669347763061523],["▁outro",-11.669452667236328],["ମା",-11.669472694396973],["▁таких",-11.669556617736816],["uose",-11.669684410095217],["Đ",-11.669684410095217],["lice",-11.669702529907228],["▁дээр",-11.669764518737791],["siga",-11.669787406921388],["гон",-11.669857025146484],["اك",-11.669934272766112],["▁सबसे",-11.6699857711792],["▁qeyd",-11.670014381408691],["步",-11.67003059387207],["▁Galaxy",-11.670069694519045],["ሞ",-11.670090675354004],["▁koos",-11.670111656188965],["ciu",-11.670357704162598],["▁paket",-11.670404434204102],["▁’’",-11.670418739318848],["▁khó",-11.670524597167969],["▁bắt",-11.670572280883787],["▁پہلے",-11.670573234558104],["▁χρόνια",-11.670574188232422],["▁ไม่",-11.670697212219238],["▁కోసం",-11.670849800109863],["水平",-11.670860290527344],["katu",-11.670909881591797],["ندو",-11.67092227935791],["▁सी",-11.670930862426758],["▁gusto",-11.67096710205078],["▁idi",-11.671187400817873],["▁fiind",-11.671245574951172],["▁trung",-11.671273231506348],["▁suur",-11.67128849029541],["▁עוד",-11.67130184173584],["گاه",-11.671324729919434],["▁present",-11.671356201171877],["▁pit",-11.671374320983888],["רת",-11.67144775390625],["▁barədə",-11.67149257659912],["ξ",-11.671643257141112],["dt",-11.671663284301758],["▁информации",-11.67176914215088],["-15",-11.67197322845459],["▁Пе",-11.671978950500488],["ue",-11.671995162963867],["▁форма",-11.672159194946287],["след",-11.672208786010742],["▁автор",-11.672308921813965],["ഗ",-11.672318458557127],["▁která",-11.672368049621582],["▁свой",-11.67237377166748],["▁prova",-11.672405242919922],["▁بت",-11.6724214553833],["ರಾ",-11.672479629516602],["▁сказал",-11.672652244567873],["川",-11.672654151916504],["cco",-11.672710418701172],["▁жок",-11.67280101776123],["สี",-11.672813415527344],["cara",-11.672874450683594],["▁dabei",-11.67288875579834],["нка",-11.672930717468262],["jom",-11.673012733459473],["▁giao",-11.67309856414795],["dni",-11.67330837249756],["pass",-11.673328399658203],["เขา",-11.673401832580566],["▁mois",-11.673501968383787],["နိုင်",-11.673563957214355],["▁প",-11.673590660095217],["▁preko",-11.67359733581543],["mod",-11.673611640930176],["▁report",-11.673620223999023],["▁Ons",-11.673624038696287],["иться",-11.673709869384766],["ασ",-11.673750877380373],["설",-11.673788070678713],["▁pati",-11.673813819885254],["▁ახალი",-11.673835754394531],["▁povas",-11.673845291137695],["▁daugiau",-11.67385196685791],["▁kleine",-11.67385959625244],["▁nơi",-11.67386245727539],["ങ്ങ",-11.673895835876465],["▁курс",-11.67397689819336],["간",-11.674019813537598],["▁आई",-11.674078941345217],["網站",-11.674124717712402],["ето",-11.6741943359375],["▁після",-11.674474716186523],["▁kyllä",-11.67448902130127],["▁света",-11.674521446228027],["tjes",-11.674537658691406],["yor",-11.674541473388672],["▁again",-11.674600601196287],["MS",-11.67467975616455],["▁uden",-11.67475414276123],["▁الان",-11.674837112426758],["▁160",-11.674860000610352],["▁Centro",-11.674989700317385],["rė",-11.675054550170898],["▁ಆಗ",-11.675058364868164],["▁sesuai",-11.675067901611328],["夫",-11.67523193359375],["fir",-11.67524528503418],["том",-11.675339698791504],["ome",-11.67534351348877],["ッ",-11.675579071044922],["▁project",-11.675619125366213],["▁Global",-11.675705909729004],["▁yli",-11.675789833068848],["▁guru",-11.67579746246338],["יש",-11.675830841064451],["▁nogle",-11.675841331481934],["בר",-11.675874710083008],["▁hozzá",-11.675949096679688],["بار",-11.675955772399902],["▁haber",-11.675958633422852],["▁என",-11.676024436950684],["運動",-11.676093101501465],["▁վրա",-11.676127433776855],["5%",-11.676187515258787],["▁staan",-11.676227569580078],["ării",-11.676234245300291],["▁Mini",-11.676257133483888],["▁chuyển",-11.67625904083252],["相關",-11.676366806030272],["rb",-11.676380157470703],["▁Ø",-11.676441192626951],["kho",-11.676472663879396],["alle",-11.676570892333984],["ое",-11.676654815673828],["▁جواب",-11.676794052124023],["不過",-11.676809310913086],["ρά",-11.676902770996094],["▁dois",-11.677104949951172],["stitu",-11.677116394042969],["▁hev",-11.677132606506348],["bed",-11.677193641662598],["▁vide",-11.677194595336914],["▁20-",-11.67728328704834],["▁Wer",-11.677294731140137],["▁ಬ",-11.67745304107666],["▁kazi",-11.677576065063477],["هن",-11.677579879760742],["arak",-11.677631378173828],["▁maha",-11.677879333496094],["▁bok",-11.678117752075195],["▁polis",-11.678159713745115],["▁ჩვენ",-11.6781644821167],["设备",-11.678202629089355],["▁ember",-11.67827033996582],["อาหาร",-11.678339958190918],["נט",-11.678384780883787],["▁Pos",-11.678422927856444],["်",-11.67849063873291],["rul",-11.67851448059082],["▁1989",-11.678526878356934],["▁وزير",-11.678571701049805],["▁giáo",-11.678608894348145],["কা",-11.678681373596191],["▁oh",-11.678707122802734],["例",-11.678791999816896],["▁prieš",-11.678839683532717],["iran",-11.67885398864746],["▁бай",-11.678872108459473],["рав",-11.678959846496582],["អ",-11.678979873657228],["▁เพื่อ",-11.67898654937744],["Ө",-11.679065704345703],["▁mengenai",-11.679276466369627],["▁הס",-11.679298400878906],["▁Wij",-11.679311752319336],["▁الو",-11.67937469482422],["ពី",-11.67938232421875],["然后",-11.679415702819824],["hre",-11.679494857788086],["-17",-11.679498672485352],["县",-11.679502487182615],["ジ",-11.679548263549805],["▁ис",-11.679620742797852],["夏",-11.679630279541016],["DS",-11.679670333862305],["▁Open",-11.679815292358398],["-16",-11.679990768432615],["jó",-11.680059432983398],["რის",-11.680115699768066],["луу",-11.68013858795166],["▁хора",-11.680146217346191],["ખ",-11.680164337158203],["ford",-11.68022346496582],["घ",-11.680479049682615],["▁yol",-11.680514335632324],["▁fag",-11.680520057678224],["▁Out",-11.680546760559082],["ступ",-11.680562019348145],["▁نورو",-11.680645942687988],["▁наш",-11.680681228637695],["▁bagian",-11.680697441101074],["▁säger",-11.680804252624512],["роз",-11.680893898010254],["-6",-11.680960655212402],["يس",-11.68117618560791],["▁Game",-11.68120288848877],["大學",-11.681232452392578],["▁96",-11.681283950805664],["▁δύο",-11.68137550354004],["दि",-11.681401252746582],["▁Sand",-11.681456565856934],["цион",-11.681652069091797],["ਨੀ",-11.681684494018556],["▁сама",-11.68181610107422],["81",-11.681924819946287],["▁oktober",-11.68198013305664],["Հ",-11.682084083557127],["▁daug",-11.682194709777832],["avimo",-11.682228088378906],["▁είχε",-11.682283401489258],["cov",-11.682323455810549],["▁Saat",-11.682337760925291],["▁강",-11.682378768920898],["erek",-11.682387351989746],["bles",-11.682401657104492],["vre",-11.682435989379885],["▁kong",-11.682435989379885],["pes",-11.68244457244873],["ກັບ",-11.682454109191896],["▁BiH",-11.682456016540527],["▁пу",-11.68245792388916],["▁نشان",-11.682470321655272],["73",-11.68263339996338],["▁Ад",-11.68279266357422],["fest",-11.682839393615724],["▁altre",-11.682844161987305],["▁Чи",-11.682853698730469],["▁term",-11.683076858520508],["▁thing",-11.68321704864502],["dě",-11.683329582214355],["ният",-11.683391571044922],["▁luo",-11.683457374572754],["▁slu",-11.6834716796875],["еді",-11.683526992797852],["▁nchi",-11.683576583862305],["▁текст",-11.683621406555176],["▁pagal",-11.683631896972656],["дор",-11.683697700500488],["ถูก",-11.683753967285156],["ズ",-11.683834075927734],["▁servis",-11.683880805969238],["burg",-11.68391227722168],["▁fik",-11.684074401855469],["kil",-11.684096336364746],["▁nhanh",-11.68409824371338],["▁môže",-11.684188842773438],["▁siku",-11.684261322021484],["人生",-11.684318542480469],["యం",-11.68437671661377],["ส่ง",-11.684391021728516],["ઝ",-11.684419631958008],["▁mám",-11.68445110321045],["jumu",-11.684593200683594],["ıl",-11.684625625610352],["odd",-11.6846284866333],["▁egyik",-11.68474292755127],["ੂ",-11.684865951538086],["▁تھی",-11.68487548828125],["▁Кар",-11.684884071350098],["▁hilo",-11.684986114501951],["不知道",-11.685115814208984],["▁mener",-11.68521785736084],["旅游",-11.685242652893066],["ण्ड",-11.685246467590332],["mene",-11.685263633728027],["ения",-11.685302734375],["客户",-11.685306549072266],["▁bent",-11.685386657714844],["يف",-11.685418128967283],["游",-11.685495376586914],["▁bona",-11.685544967651367],["ಶ",-11.685550689697266],["▁клуб",-11.68558120727539],["▁cri",-11.685585021972656],["▁sotto",-11.68563175201416],["▁completa",-11.68568229675293],["▁род",-11.685698509216309],["▁sti",-11.685710906982422],["▁Ү",-11.685809135437012],["rap",-11.685821533203123],["▁білім",-11.685894012451172],["▁همراه",-11.68593692779541],["▁тоже",-11.685946464538574],["▁bedste",-11.68600368499756],["пан",-11.68618106842041],["隊",-11.686308860778809],["▁legge",-11.686358451843262],["▁bru",-11.68637466430664],["ិ",-11.68638801574707],["官",-11.686397552490234],["▁lahat",-11.686546325683594],["▁Produkt",-11.686553955078123],["bau",-11.686555862426758],["▁juni",-11.68657398223877],["▁29.",-11.686580657958984],["不同的",-11.686583518981934],["▁might",-11.686667442321776],["නය",-11.68679904937744],["nay",-11.686811447143556],["▁platform",-11.686901092529297],["▁આવી",-11.687026023864746],["zeit",-11.687064170837402],["ім",-11.687105178833008],["▁féidir",-11.687127113342283],["▁circa",-11.68729019165039],["ээс",-11.687312126159668],["▁Га",-11.68735122680664],["▁annet",-11.687352180480955],["▁ಮೇಲೆ",-11.68743133544922],["▁Kap",-11.687567710876465],["▁meie",-11.68757438659668],["是一个",-11.68765926361084],["▁kunna",-11.687668800354004],["nor",-11.6876802444458],["▁Nederland",-11.68776798248291],["▁ලබා",-11.687776565551758],["▁VE",-11.687848091125488],["cima",-11.687868118286133],["▁ankaŭ",-11.688035011291504],["▁hướng",-11.688352584838867],["▁apabila",-11.688372611999512],["▁jan",-11.68842887878418],["▁ін",-11.688456535339355],["▁amely",-11.688530921936035],["▁Mak",-11.688605308532717],["шта",-11.688810348510742],["▁всеки",-11.688838958740234],["▁firmy",-11.68886661529541],["ични",-11.688972473144531],["▁krav",-11.688982009887695],["ового",-11.688993453979492],["56",-11.6890287399292],["▁Shu",-11.689096450805664],["▁AR",-11.689193725585938],["▁времени",-11.68923568725586],["ระบบ",-11.689248085021973],["起来",-11.689313888549805],["was",-11.689409255981444],["▁alguns",-11.689417839050291],["wiki",-11.689431190490724],["也不",-11.689507484436035],["▁beli",-11.689642906188965],["▁izay",-11.689678192138672],["▁keep",-11.68988800048828],["▁nov",-11.689889907836914],["▁quản",-11.689939498901367],["▁ሰው",-11.689966201782228],["▁kasi",-11.689998626708984],["ncia",-11.69002914428711],["нии",-11.690051078796388],["лад",-11.690059661865234],["▁Music",-11.69008445739746],["лат",-11.690085411071776],["▁ম",-11.690120697021484],["▁단",-11.690218925476074],["▁komt",-11.690227508544922],["tuk",-11.690322875976562],["CC",-11.69034194946289],["уу",-11.690387725830078],["相信",-11.690766334533691],["▁bilər",-11.690800666809082],["ключ",-11.690853118896484],["▁Kha",-11.690877914428713],["▁Tot",-11.690898895263672],["nių",-11.690980911254885],["år",-11.69101905822754],["▁გ",-11.691030502319336],["▁zile",-11.691060066223145],["דה",-11.691119194030762],["▁ame",-11.691169738769531],["qua",-11.691200256347656],["质量",-11.691280364990234],["▁आफ्नो",-11.691316604614258],["▁toute",-11.691384315490724],["▁ئۇ",-11.691561698913574],["▁poti",-11.691572189331056],["tol",-11.691575050354004],["čno",-11.691587448120115],["获得",-11.69177532196045],["▁ਪਰ",-11.69179344177246],["▁unge",-11.691816329956056],["tve",-11.691845893859863],["իս",-11.691904067993164],["▁ສະ",-11.692005157470703],["▁mode",-11.692030906677246],["▁අප",-11.692082405090332],["系列",-11.692160606384276],["TT",-11.69216537475586],["▁හරි",-11.692237854003906],["▁მა",-11.69223976135254],["▁obraz",-11.69227409362793],["▁विश्व",-11.692313194274902],["▁permite",-11.692461013793944],["▁발",-11.692468643188477],["▁оста",-11.692472457885742],["▁dice",-11.692499160766602],["නේ",-11.692502975463867],["▁taga",-11.692503929138184],["▁അത്",-11.69254207611084],["▁kole",-11.692591667175291],["lā",-11.692606925964355],["▁дуже",-11.69265365600586],["▁ידי",-11.69265842437744],["▁temat",-11.692707061767578],["▁Ор",-11.692787170410156],["▁ይህ",-11.692848205566406],["▁anul",-11.692872047424316],["دان",-11.69292163848877],["arekin",-11.69309425354004],["nai",-11.6931734085083],["▁Pot",-11.69344425201416],["▁MI",-11.69346046447754],["ண",-11.693472862243652],["▁story",-11.69349765777588],["▁prote",-11.693546295166016],["يق",-11.693649291992188],["牌",-11.693714141845703],["▁Vas",-11.693878173828123],["▁በአ",-11.693923950195312],["ริ",-11.693997383117676],["annya",-11.69402027130127],["▁busca",-11.694059371948242],["ικού",-11.694146156311035],["透過",-11.694162368774414],["ประ",-11.694430351257324],["sime",-11.694554328918455],["ица",-11.694583892822266],["▁Gr",-11.694598197937012],["▁הב",-11.694619178771973],["āt",-11.69467830657959],["節",-11.694720268249512],["國家",-11.69480800628662],["top",-11.694918632507324],["▁Tips",-11.694928169250488],["ij",-11.694978713989258],["▁Wo",-11.694993019104004],["zam",-11.695037841796877],["▁moje",-11.695319175720217],["▁έ",-11.69534397125244],["▁Port",-11.695364952087402],["▁suso",-11.695427894592283],["▁چي",-11.695481300354004],["▁Start",-11.695595741271973],["▁എന്റെ",-11.695609092712402],["tuko",-11.695630073547363],["然",-11.695745468139648],["▁ബ",-11.695783615112305],["ጭ",-11.695802688598633],["級",-11.695807456970217],["ผ่าน",-11.695887565612791],["sex",-11.695893287658691],["▁добре",-11.695917129516602],["▁שלא",-11.696109771728516],["קס",-11.696142196655272],["▁1:",-11.696155548095703],["لەر",-11.696356773376465],["▁denn",-11.696362495422363],["取得",-11.6964693069458],["▁curso",-11.696481704711914],["통",-11.69651699066162],["▁Σε",-11.69668674468994],["لې",-11.696709632873535],["ání",-11.696722030639648],["▁रूप",-11.696769714355469],["▁мемлекеттік",-11.69688606262207],["ello",-11.696972846984863],["ለው",-11.69698715209961],["▁więc",-11.697128295898438],["▁bot",-11.697134017944336],["2000",-11.697193145751951],["▁forme",-11.697216033935549],["يز",-11.697396278381348],["▁មួយ",-11.69740104675293],["▁என்",-11.697480201721191],["▁chứng",-11.6975679397583],["ış",-11.69765853881836],["▁minha",-11.697927474975586],["▁Diese",-11.6979341506958],["▁indi",-11.69804573059082],["ධ",-11.698067665100098],["▁Rus",-11.698084831237791],["▁travail",-11.698112487792969],["▁હતો",-11.698116302490234],["▁existe",-11.698195457458496],["▁Những",-11.69838047027588],["▁kartu",-11.69838523864746],["nye",-11.698436737060549],["▁campo",-11.698461532592772],["▁Telefon",-11.698468208312988],["▁ngay",-11.69849967956543],["▁организации",-11.69849967956543],["▁Bộ",-11.698568344116213],["cen",-11.69857692718506],["על",-11.69858741760254],["▁күн",-11.69865894317627],["▁ceea",-11.698712348937988],["ľa",-11.69877815246582],["助",-11.69901180267334],["▁kỳ",-11.69905948638916],["ቦ",-11.699198722839355],["▁heç",-11.699237823486328],["国内",-11.69929313659668],["િત",-11.699295043945312],["ött",-11.699345588684082],["▁tuổi",-11.699352264404297],["nner",-11.699397087097168],["tega",-11.699565887451172],["پر",-11.69960594177246],["▁haya",-11.699679374694824],["▁جاری",-11.69972324371338],["..?",-11.699867248535156],["keta",-11.699871063232422],["▁vitamin",-11.699914932250977],["ሚ",-11.699971199035645],["gni",-11.700114250183104],["▁1993",-11.700243949890137],["▁đánh",-11.700267791748049],["▁Omrop",-11.700276374816896],["▁toegang",-11.700364112854004],["け",-11.70037078857422],["▁ଏହା",-11.700422286987305],["▁Už",-11.700462341308594],["ندا",-11.700507164001465],["▁بول",-11.700569152832031],["▁thuật",-11.700571060180664],["කින්",-11.700572967529297],["▁berbagai",-11.700587272644045],["▁fő",-11.700672149658203],["ণ",-11.700675964355469],["▁futbol",-11.700705528259276],["▁iso",-11.700851440429688],["▁قتل",-11.701021194458008],["ഡി",-11.701107025146484],["hou",-11.701200485229492],["tico",-11.701220512390137],["▁Không",-11.70124053955078],["▁Vid",-11.701312065124512],["താ",-11.701397895812988],["ായ",-11.701424598693848],["료",-11.70154094696045],["▁Mel",-11.701552391052246],["▁mental",-11.70155906677246],["zz",-11.701619148254396],["▁വാ",-11.701672554016112],["ಿರುವ",-11.701696395874023],["ψ",-11.70176124572754],["▁pozna",-11.701814651489258],["강",-11.701868057250977],["▁bikin",-11.70196533203125],["▁гор",-11.702008247375488],["남",-11.702017784118652],["84",-11.702040672302246],["▁авто",-11.702059745788574],["્ય",-11.702085494995115],["ago",-11.702115058898926],["бір",-11.702186584472656],["▁mitte",-11.702266693115234],["▁computer",-11.702272415161133],["▁nghệ",-11.702412605285645],["▁ersten",-11.70260524749756],["Ка",-11.702686309814451],["▁저",-11.702688217163086],["▁життя",-11.702722549438477],["亦",-11.702725410461426],["토",-11.702728271484377],["antes",-11.702786445617676],["▁president",-11.702816009521484],["▁احمد",-11.702837944030762],["hana",-11.70285701751709],["පු",-11.703070640563965],["▁ନେଇ",-11.703152656555176],["inga",-11.7031888961792],["▁wol",-11.703259468078612],["▁бр",-11.70328140258789],["ড",-11.703383445739746],["▁rada",-11.703434944152832],["▁같은",-11.703452110290527],["ונים",-11.703471183776855],["▁yazı",-11.703533172607422],["▁tubuh",-11.703560829162598],["▁ứng",-11.70364761352539],["INA",-11.703722953796388],["mang",-11.703775405883787],["▁Czy",-11.703778266906738],["晚",-11.703853607177734],["KO",-11.703868865966797],["jana",-11.703903198242188],["▁همین",-11.703953742980955],["MAN",-11.703993797302246],["果",-11.704001426696776],["ቢ",-11.704183578491213],["▁cura",-11.704268455505373],["kse",-11.70436668395996],["தை",-11.704381942749023],["▁vu",-11.704456329345703],["▁إلا",-11.704466819763184],["තු",-11.70462417602539],["tý",-11.704647064208984],["▁rock",-11.704682350158691],["ಗಳಿಗೆ",-11.704852104187012],["Կ",-11.704875946044922],["ать",-11.705056190490724],["▁ସହ",-11.70506191253662],["କ୍",-11.705073356628418],["▁4)",-11.705180168151855],["▁verden",-11.70533561706543],["하며",-11.705357551574709],["▁gaz",-11.705397605895996],["▁tots",-11.705426216125488],["1%",-11.705583572387695],["▁вопрос",-11.705958366394045],["▁چا",-11.7059965133667],["▁iOS",-11.70603370666504],["▁Sor",-11.706045150756836],["▁nuestra",-11.706072807312012],["▁bylo",-11.70610237121582],["▁가장",-11.706217765808104],["▁ram",-11.706256866455078],["ണി",-11.706290245056152],["▁vedere",-11.70630931854248],["▁කතා",-11.70631504058838],["▁చేయ",-11.706413269042969],["ydd",-11.706527709960938],["▁Dag",-11.70657444000244],["ierung",-11.706594467163086],["▁toimi",-11.706640243530272],["čné",-11.70671272277832],["മി",-11.706822395324709],["▁sahip",-11.706830024719238],["▁Daar",-11.706843376159668],["דר",-11.706939697265623],["牛",-11.707003593444824],["▁instrument",-11.707049369812012],["▁Minh",-11.707050323486328],["▁spe",-11.707054138183594],["▁ничего",-11.70705509185791],["Қ",-11.707064628601074],["的话",-11.7070894241333],["▁sahaja",-11.707120895385742],["▁diam",-11.707134246826172],["rita",-11.7071533203125],["▁contre",-11.707294464111328],["олог",-11.70730209350586],["▁должны",-11.707367897033691],["▁wî",-11.707377433776855],["หนึ่ง",-11.707430839538574],["▁ମଧ୍ୟ",-11.707576751708984],["▁2011.",-11.707603454589844],["▁weekend",-11.707629203796388],["αλ",-11.707704544067385],["▁mă",-11.707799911499023],["▁seja",-11.707863807678224],["אי",-11.708011627197266],["__",-11.708015441894531],["UN",-11.708060264587402],["▁alatt",-11.708139419555664],["還有",-11.708274841308594],["▁gjorde",-11.708322525024414],["ූ",-11.708388328552246],["▁pagbaba",-11.708438873291016],["▁number",-11.708465576171877],["▁پو",-11.708471298217772],["▁videre",-11.7085542678833],["▁plast",-11.708674430847168],["▁penge",-11.708884239196776],["▁està",-11.70889377593994],["▁entra",-11.708991050720217],["▁ambiente",-11.709023475646973],["현",-11.709148406982422],["▁انتخابات",-11.709198951721191],["पछि",-11.709325790405272],["全面",-11.70940113067627],["▁ਬ",-11.709458351135254],["▁बे",-11.709465980529783],["يح",-11.709567070007324],["▁livet",-11.7095947265625],["፥",-11.709835052490234],["дик",-11.709909439086914],["▁වෙලා",-11.709970474243164],["いて",-11.709972381591797],["ओ",-11.710001945495604],["หลัง",-11.710140228271484],["▁ilin",-11.710243225097656],["alam",-11.710346221923828],["▁nữ",-11.71035861968994],["▁found",-11.710373878479004],["▁समाचार",-11.710456848144531],["туу",-11.710503578186035],["▁προς",-11.710543632507324],["▁foram",-11.71058750152588],["roz",-11.710619926452637],["اري",-11.710674285888672],["▁ගන්න",-11.710830688476562],["▁CH",-11.71115779876709],["套",-11.711284637451172],["cchi",-11.711321830749512],["▁Info",-11.711356163024902],["ហ",-11.711411476135254],["▁sm",-11.71141242980957],["web",-11.711450576782228],["▁annat",-11.71147632598877],["▁aktual",-11.711493492126465],["▁घ",-11.711493492126465],["Di",-11.71149730682373],["ikan",-11.71164608001709],["なかった",-11.71169662475586],["ίου",-11.711709976196287],["ично",-11.711966514587402],["анд",-11.711987495422363],["▁पि",-11.712066650390623],["远",-11.71212100982666],["▁bizi",-11.712262153625488],["нин",-11.712300300598145],["▁Kam",-11.712580680847168],["ამ",-11.71260929107666],["▁شوند",-11.712642669677734],["ink",-11.712671279907228],["ności",-11.712693214416504],["စား",-11.712776184082031],["olog",-11.712821006774902],["需",-11.71288013458252],["▁88",-11.712936401367188],["▁તમે",-11.712946891784668],["▁км",-11.712963104248049],["▁కి",-11.713086128234863],["材料",-11.713092803955078],["▁Į",-11.713252067565918],["णा",-11.713263511657717],["權",-11.713274002075195],["אל",-11.7134428024292],["ANG",-11.713489532470703],["evi",-11.71351146697998],["ალ",-11.713608741760254],["产业",-11.71363353729248],["▁тра",-11.713685035705566],["几",-11.713752746582031],["ום",-11.713754653930664],["bay",-11.713796615600586],["ጀ",-11.713817596435549],["▁Gran",-11.713821411132812],["mā",-11.713824272155762],["dai",-11.713838577270508],["ագ",-11.713842391967772],["▁sir",-11.713960647583008],["riz",-11.71396255493164],["க்கும்",-11.71399974822998],["▁United",-11.71403694152832],["▁verður",-11.71419620513916],["▁Ан",-11.71430492401123],["▁Jadi",-11.714315414428713],["▁pamoja",-11.714324951171877],["ହ",-11.714426040649414],["▁7-",-11.714463233947754],["1000",-11.714611053466797],["▁कार्यालय",-11.714640617370604],["▁TR",-11.71473503112793],["ologia",-11.714739799499512],["▁خوب",-11.714788436889648],["▁avut",-11.714881896972656],["심",-11.714987754821776],["▁ấy",-11.715126037597656],["▁pé",-11.715160369873049],["ръ",-11.715200424194336],["ók",-11.715201377868652],["▁मह",-11.71530532836914],["▁चुनाव",-11.715326309204102],["▁Ég",-11.715346336364746],["▁fr",-11.7153959274292],["▁teknologi",-11.71542263031006],["ສະ",-11.71552848815918],["лом",-11.71589183807373],["ଜ",-11.71604061126709],["▁nila",-11.716119766235352],["▁lát",-11.716243743896484],["Մ",-11.716323852539062],["▁많은",-11.716375350952148],["ήσει",-11.716444969177246],["軍",-11.716510772705078],["▁fikk",-11.71663761138916],["▁राम",-11.71664333343506],["တြင္",-11.716650009155272],["ൈ",-11.716669082641602],["▁mand",-11.7166748046875],["▁human",-11.71674633026123],["जन",-11.716794967651367],["book",-11.71688175201416],["áció",-11.71690845489502],["当然",-11.71697998046875],["标准",-11.71698760986328],["▁segunda",-11.717081069946287],["임",-11.7171049118042],["だけ",-11.717123031616213],["改善",-11.717275619506836],["▁invest",-11.71728801727295],["▁faktor",-11.717374801635742],["यो",-11.717458724975586],["తా",-11.717559814453123],["▁explica",-11.717596054077148],["▁لها",-11.717689514160156],["▁ඉ",-11.717690467834473],["dri",-11.717729568481444],["เมื่อ",-11.717740058898926],["▁77",-11.717768669128418],["ller",-11.717855453491213],["ád",-11.717897415161133],["час",-11.717972755432127],["▁عدم",-11.717985153198242],["ники",-11.71804428100586],["ток",-11.71804904937744],["▁დიდი",-11.71816349029541],["▁nhỏ",-11.718255996704102],["▁mumkin",-11.71828842163086],["anka",-11.71831512451172],["▁eius",-11.718341827392578],["▁кол",-11.718358039855955],["▁സാ",-11.718360900878906],["Su",-11.718412399291992],["▁menga",-11.718416213989258],["prac",-11.718439102172852],["▁части",-11.71845531463623],["▁ես",-11.718491554260254],["▁бұл",-11.718538284301758],["▁வேண்டும்",-11.718562126159668],["▁selepas",-11.718572616577148],["▁Văn",-11.71874713897705],["▁दि",-11.718844413757324],["▁kuru",-11.718945503234863],["rou",-11.71908187866211],["ňa",-11.719331741333008],["მის",-11.71938133239746],["љ",-11.719440460205078],["解决",-11.719517707824709],["▁சு",-11.719599723815918],["▁ainsi",-11.719671249389648],["sine",-11.71993923187256],["▁جون",-11.71994400024414],["▁кан",-11.719980239868164],["▁деца",-11.720029830932615],["▁уу",-11.720051765441896],["eller",-11.720065116882324],["▁tell",-11.720245361328123],["доб",-11.720300674438477],["▁rem",-11.720344543457031],["▁Big",-11.72043800354004],["组",-11.720459938049316],["▁dieses",-11.720779418945312],["uksen",-11.720799446105955],["cev",-11.720847129821776],["▁ਕ",-11.720906257629396],["anga",-11.72099494934082],["ders",-11.72102165222168],["▁drei",-11.721174240112305],["જી",-11.721268653869627],["▁भाग",-11.721358299255373],["హ",-11.721402168273926],["▁osoba",-11.721480369567873],["▁birlikte",-11.721503257751465],["ισ",-11.721517562866213],["粉",-11.72165298461914],["ări",-11.721678733825684],["69",-11.721765518188477],["▁perfekt",-11.721766471862791],["▁XX",-11.721772193908691],["แต่",-11.721794128417969],["▁BO",-11.721929550170898],["▁здоров",-11.721976280212402],["▁Casino",-11.72221851348877],["▁будут",-11.7222318649292],["λή",-11.722295761108398],["చ",-11.722299575805664],["▁perquè",-11.722329139709473],["kta",-11.722469329833984],["掉",-11.72250747680664],["▁bh",-11.722575187683104],["53",-11.722593307495115],["▁luôn",-11.722675323486328],["acak",-11.72276496887207],["оп",-11.722872734069824],["leme",-11.722902297973633],["▁Page",-11.72297477722168],["▁práce",-11.72297477722168],["▁கு",-11.722990036010742],["▁پوء",-11.722992897033691],["▁හ",-11.723212242126465],["עת",-11.723254203796388],["▁Asia",-11.723270416259766],["▁그리고",-11.723294258117676],["▁потому",-11.723502159118652],["▁Now",-11.72352695465088],["▁Stadt",-11.723630905151367],[")。",-11.72368049621582],["부터",-11.723697662353516],["▁hag",-11.723764419555664],["▁Anne",-11.72380542755127],["▁omdat",-11.723835945129396],["isesti",-11.72389316558838],["ють",-11.723920822143556],["▁Business",-11.724085807800291],["志",-11.724139213562012],["▁kilo",-11.724148750305176],["▁ಕನ್ನಡ",-11.724207878112791],["▁رئیس",-11.724217414855955],["▁jetë",-11.724225044250488],["RO",-11.724343299865724],["Un",-11.724345207214355],["ντ",-11.72438621520996],["儿",-11.724416732788086],["ଇ",-11.724656105041504],["▁Donec",-11.724692344665527],["ቸው",-11.72477912902832],["ość",-11.724817276000977],["ax",-11.724834442138672],["升",-11.72497844696045],["rika",-11.725052833557127],["▁انهن",-11.725092887878418],["▁teori",-11.725127220153809],["▁විට",-11.725183486938477],["kü",-11.72536563873291],["▁جهان",-11.725384712219238],["▁нещо",-11.725420951843262],["جر",-11.725444793701172],["ез",-11.725522994995115],["▁ວ່າ",-11.725577354431152],["▁kjer",-11.725594520568848],["нима",-11.725720405578612],["▁movie",-11.725723266601562],["▁bele",-11.725737571716309],["ਅ",-11.725738525390623],["τό",-11.725793838500977],["▁Paulo",-11.725893020629885],["rlo",-11.725918769836426],["▁était",-11.725933074951172],["ću",-11.72597599029541],["▁گو",-11.72599983215332],["▁করুন",-11.726109504699709],["▁Kad",-11.726375579833984],["▁mobile",-11.726410865783691],["зо",-11.72661018371582],["▁nghĩ",-11.726638793945312],["特別",-11.726669311523438],["λο",-11.72670078277588],["▁افزایش",-11.726739883422852],["क्ष",-11.726760864257812],["▁vaše",-11.72677230834961],["ತೆ",-11.726873397827148],["ků",-11.726877212524414],["▁Band",-11.726898193359377],["▁personer",-11.726978302001951],["▁เมื่อ",-11.726990699768066],["elu",-11.727044105529783],["▁olmak",-11.727325439453123],["ור",-11.72734260559082],["▁Cap",-11.72734260559082],["▁trợ",-11.72738265991211],["gul",-11.727391242980955],["нер",-11.727401733398438],["ém",-11.727411270141602],["▁iaitu",-11.727445602416992],["▁Ва",-11.727551460266112],["mut",-11.727554321289062],["▁газар",-11.727593421936035],["▁Hem",-11.727604866027832],["దా",-11.72771453857422],["▁OK",-11.72776222229004],["▁informa",-11.727774620056152],["▁ನಿ",-11.727834701538086],["▁Shah",-11.7278413772583],["▁sedikit",-11.72800636291504],["▁pap",-11.728020668029783],["ене",-11.728050231933594],["▁heute",-11.728078842163086],["キ",-11.728078842163086],["▁pasta",-11.728087425231934],["כה",-11.72811222076416],["人才",-11.728201866149902],["▁어",-11.728226661682127],["ડા",-11.728245735168455],["্যা",-11.72837734222412],["▁muka",-11.728378295898438],["▁sap",-11.728418350219728],["▁tục",-11.728426933288574],["gaan",-11.728443145751951],["pada",-11.72844696044922],["rz",-11.728497505187988],["gus",-11.728548049926758],["şti",-11.728574752807615],["▁Kosovës",-11.72864055633545],["▁روش",-11.728641510009766],["▁zien",-11.72867202758789],["▁ව",-11.728726387023926],["行政",-11.728734970092772],["▁Tôi",-11.728793144226074],["さん",-11.728872299194336],["‘‘",-11.728997230529783],["▁tích",-11.729007720947266],["▁좋은",-11.729042053222656],["ତି",-11.729079246520996],["则",-11.729171752929688],["ანი",-11.729180335998535],["发生",-11.729185104370115],["▁xã",-11.729191780090332],["ۋ",-11.72938632965088],["▁natura",-11.729414939880373],["▁Hol",-11.729548454284668],["▁ደግሞ",-11.729584693908691],["▁лучше",-11.729663848876951],["ย์",-11.729689598083496],["▁resto",-11.72970485687256],["njem",-11.729723930358888],["▁body",-11.729729652404783],["になって",-11.729850769042969],["▁cualquier",-11.729891777038574],["▁director",-11.729955673217772],["հ",-11.729963302612305],["▁ilmu",-11.729998588562012],["idh",-11.730182647705078],["▁AN",-11.730217933654783],["▁часа",-11.730316162109377],["فة",-11.730319023132324],["更新",-11.73036289215088],["▁आता",-11.730385780334473],["▁yet",-11.730425834655762],["▁Gaeilge",-11.730535507202148],["▁private",-11.730567932128906],["▁صفحه",-11.730613708496094],["▁fot",-11.730640411376951],["▁kila",-11.730645179748535],["▁آیا",-11.730884552001951],["▁company",-11.730891227722168],["jë",-11.730920791625977],["▁participar",-11.731042861938477],["sik",-11.731093406677246],["▁bør",-11.731149673461914],["▁समस्या",-11.73119831085205],["▁الش",-11.731287002563477],["▁inay",-11.731304168701172],["lles",-11.731340408325195],["αρ",-11.731362342834473],["▁όταν",-11.731416702270508],["▁kurang",-11.73148250579834],["▁moral",-11.731581687927246],["aci",-11.731649398803713],["ур",-11.731849670410156],["▁grote",-11.731904029846191],["્",-11.732007026672363],["Ն",-11.732072830200195],["▁նա",-11.732072830200195],["шен",-11.732086181640623],["▁Entre",-11.732135772705078],["遊",-11.732234001159668],["▁Bạn",-11.73224639892578],["▁কি",-11.732270240783691],["▁veldig",-11.732314109802246],["▁εξ",-11.732382774353027],["ವಾದ",-11.73261547088623],["▁Long",-11.73265266418457],["ús",-11.732654571533203],["▁فيه",-11.7327241897583],["▁quindi",-11.732744216918944],["yicha",-11.732748031616213],["ální",-11.732769012451172],["루",-11.73279857635498],["ሱ",-11.73289680480957],["▁dnes",-11.732932090759276],["▁DAN",-11.733186721801758],["ağı",-11.73320770263672],["んだ",-11.73320770263672],["▁фото",-11.733343124389648],["▁दिल्ली",-11.733393669128418],["▁berada",-11.733407020568848],["ARA",-11.733424186706545],["▁ново",-11.733449935913086],["ило",-11.733560562133787],["inne",-11.733561515808104],["avo",-11.733566284179688],["7%",-11.733656883239746],["чна",-11.733920097351074],["jot",-11.733936309814451],["▁kiuj",-11.733939170837402],["▁Kir",-11.734050750732422],["▁later",-11.734174728393556],["vala",-11.734197616577148],["▁Rock",-11.734282493591309],["ติ",-11.734367370605469],["rä",-11.734436988830566],["sted",-11.734444618225098],["mari",-11.734492301940918],["必须",-11.73459529876709],["▁සිදු",-11.734611511230469],["▁Ook",-11.734644889831545],["▁буй",-11.734735488891602],["▁Porn",-11.734808921813965],["破",-11.734891891479492],["▁hết",-11.73503589630127],["られる",-11.735193252563477],["▁محل",-11.735321998596191],["▁анти",-11.735352516174316],["▁family",-11.735381126403809],["▁kommen",-11.735383987426758],["▁ව්",-11.73547077178955],["▁CON",-11.73548412322998],["пад",-11.735496520996094],["aika",-11.735603332519531],["資訊",-11.73560619354248],["झ",-11.735873222351074],["▁rent",-11.735883712768556],["▁diğer",-11.735939979553224],["▁macam",-11.735984802246094],["吉",-11.736196517944336],["▁tho",-11.736213684082031],["ária",-11.73623752593994],["▁mạnh",-11.736329078674316],["▁ຈະ",-11.736353874206545],["teri",-11.73644733428955],["jm",-11.73651885986328],["▁Ef",-11.736560821533203],["▁ком",-11.736652374267578],["▁kiến",-11.736656188964844],["ेको",-11.736687660217283],["▁belə",-11.736705780029297],["ကာ",-11.73671817779541],["beli",-11.73689079284668],["▁elektron",-11.736968994140623],["治",-11.736978530883787],["▁ತ",-11.73698902130127],["▁ମା",-11.736995697021484],["▁üks",-11.73708438873291],["费",-11.737197875976562],["рин",-11.737215995788574],["its",-11.73723030090332],["ような",-11.737250328063965],["▁VIDEO",-11.737381935119627],["▁ఉ",-11.737529754638672],["ıyor",-11.73759937286377],["ific",-11.737735748291016],["▁December",-11.73785400390625],["tima",-11.737865447998049],["▁Ind",-11.737892150878906],["▁seluruh",-11.737918853759766],["iteit",-11.737991333007812],["學習",-11.73800277709961],["▁Tun",-11.738014221191406],["ಪ್",-11.738262176513672],["SC",-11.738276481628418],["▁условия",-11.738333702087402],["▁իսկ",-11.738495826721191],["价",-11.738616943359377],["職",-11.738646507263184],["اور",-11.73871612548828],["더",-11.738758087158203],["కో",-11.73878574371338],["UR",-11.738808631896973],["▁Има",-11.738810539245604],["▁hol",-11.73882293701172],["傳",-11.73884105682373],["lea",-11.738873481750488],["abi",-11.738885879516602],["▁utama",-11.738890647888184],["▁যে",-11.738980293273926],["期待",-11.739007949829102],["▁बदल",-11.739017486572266],["▁якщо",-11.739187240600586],["▁ולא",-11.739428520202637],["▁thủ",-11.73949146270752],["▁vaikka",-11.739553451538086],["خت",-11.739631652832031],["▁penis",-11.739633560180664],["ndra",-11.73966121673584],["yam",-11.739672660827637],["රි",-11.739742279052734],["өө",-11.73990535736084],["सम्म",-11.739978790283203],["▁اش",-11.740001678466797],["tang",-11.74020290374756],["▁تیم",-11.740241050720217],["ська",-11.740267753601074],["ጽ",-11.740310668945312],["▁gjatë",-11.740381240844728],["ців",-11.740402221679688],["▁կողմից",-11.7404203414917],["▁Nếu",-11.740436553955078],["▁ਪੰਜਾਬ",-11.740479469299316],["ned",-11.74050521850586],["Ni",-11.74050998687744],["▁একটি",-11.740642547607422],["▁power",-11.740717887878418],["▁тема",-11.74074935913086],["каз",-11.740837097167969],["כות",-11.741002082824709],["석",-11.741033554077148],["▁kerajaan",-11.741048812866213],["▁processo",-11.741050720214844],["ಿಸಿ",-11.74108600616455],["▁خانه",-11.741103172302246],["77",-11.741116523742676],["▁شا",-11.741307258605955],["▁ਤਾਂ",-11.741329193115234],["▁будь",-11.741453170776367],["cję",-11.741469383239746],[":「",-11.741518020629885],["ழ",-11.741575241088867],["ху",-11.741578102111816],["▁ак",-11.74160385131836],["▁ហើយ",-11.741650581359863],[".11.",-11.741666793823242],["▁kuitenkin",-11.741700172424316],["▁mūsų",-11.741707801818848],["▁ئە",-11.74170970916748],["▁США",-11.741731643676758],["▁tych",-11.741884231567385],["ସ୍",-11.7418851852417],["▁gure",-11.741897583007812],["кен",-11.741952896118164],["银行",-11.741971015930176],["▁interior",-11.741976737976074],["▁تعداد",-11.741999626159668],["▁zwischen",-11.742009162902832],["dien",-11.74202823638916],["zás",-11.742080688476562],["▁був",-11.742157936096191],["▁kishte",-11.742164611816406],["▁absolut",-11.742201805114746],["▁Европа",-11.742227554321287],["▁possibile",-11.742335319519045],["▁твор",-11.74237060546875],["オ",-11.742383003234863],["ming",-11.742589950561523],["▁energie",-11.742631912231444],["fil",-11.74266529083252],["itaj",-11.742701530456545],["аны",-11.742778778076172],["▁lähe",-11.74280834197998],["uhan",-11.742813110351562],["▁indica",-11.742905616760254],["▁zaradi",-11.742939949035645],["▁einmal",-11.743074417114258],["▁Başkanı",-11.743082046508787],["▁τ",-11.743179321289062],["▁göz",-11.743240356445312],["边",-11.743267059326172],["▁남",-11.743325233459473],["▁بشكل",-11.743338584899902],["▁.....",-11.743437767028809],["▁Prezident",-11.743457794189451],["غان",-11.743537902832031],["▁superior",-11.743555068969728],["▁thay",-11.743571281433104],["ník",-11.74359130859375],["ಲಾ",-11.74367332458496],["▁ପ୍ର",-11.74370574951172],["▁does",-11.743756294250488],["▁يت",-11.743800163269045],["66",-11.744027137756348],["ယူ",-11.744057655334473],["رن",-11.744102478027344],["പ്",-11.74414348602295],["▁España",-11.744171142578123],["AT",-11.744274139404297],["▁nin",-11.744370460510254],["лады",-11.744439125061035],["pha",-11.744462966918944],["▁menge",-11.744574546813965],["▁DU",-11.74459171295166],["▁ብ",-11.744732856750488],["▁μετα",-11.744745254516602],["▁tru",-11.744775772094728],["cción",-11.744900703430176],["▁کړئ",-11.744912147521973],["ēja",-11.744946479797363],["િક",-11.7449951171875],["ளி",-11.745126724243164],["ቲ",-11.745150566101074],["erar",-11.745265007019045],["▁persoas",-11.745301246643066],["▁حول",-11.745302200317385],["▁komentar",-11.745370864868164],["говор",-11.745405197143556],["▁حساب",-11.745471954345703],["▁idag",-11.745534896850586],["▁James",-11.745598793029783],["ають",-11.7456693649292],["▁minutes",-11.745758056640623],["ရှိ",-11.745773315429688],["武",-11.745795249938965],["▁друга",-11.745814323425291],["▁möglich",-11.74595069885254],["صف",-11.746054649353027],["▁günü",-11.746079444885254],["պ",-11.74610710144043],["资源",-11.746111869812012],["န္",-11.746212005615234],["szy",-11.746249198913574],["▁Sve",-11.746294975280762],["اي",-11.746297836303713],["▁kirin",-11.746338844299316],["▁celo",-11.746374130249023],["▁ون",-11.746545791625977],["雙",-11.746573448181152],["▁gør",-11.746816635131836],["▁නැහැ",-11.746831893920898],["ստ",-11.74684238433838],["ining",-11.746954917907717],["假",-11.74701690673828],["శ",-11.747088432312012],["tec",-11.747098922729492],["ιο",-11.747115135192873],["▁हाम्रो",-11.747151374816896],["▁भएका",-11.747157096862791],["edi",-11.747209548950195],["▁δε",-11.747209548950195],["เล่น",-11.747247695922852],["▁Elle",-11.747376441955566],["02.",-11.747389793395996],["▁dost",-11.747417449951172],["▁boek",-11.747594833374023],["etik",-11.747628211975098],["ију",-11.747657775878906],["стар",-11.747787475585938],["ット",-11.747841835021973],["YA",-11.747895240783691],["▁६",-11.748062133789062],["ských",-11.748336791992188],["▁کیلئے",-11.748441696166992],["سی",-11.748442649841309],["FA",-11.748559951782228],["avi",-11.748607635498049],["▁להיות",-11.748616218566896],["ип",-11.748644828796388],["▁іс",-11.74869441986084],["ivi",-11.74876594543457],["▁đặc",-11.748766899108888],["▁مرکز",-11.74884033203125],["。“",-11.748889923095703],["坐",-11.748957633972168],["lg",-11.748971939086914],["шко",-11.749181747436523],["IR",-11.749256134033203],["ڙ",-11.749275207519531],["շ",-11.749313354492188],["▁iku",-11.749342918395996],["idas",-11.74937343597412],["лин",-11.749403953552246],["▁باعث",-11.749409675598145],["▁لن",-11.749534606933594],["uksia",-11.74960708618164],["σης",-11.749710083007812],["ٌ",-11.749732971191406],["ीत",-11.749759674072266],["▁exp",-11.749826431274414],["▁¡",-11.74982738494873],["▁ച",-11.749886512756348],["▁गरी",-11.74994659423828],["▁келген",-11.749951362609863],["lî",-11.749972343444824],["裝",-11.750014305114746],["lut",-11.750099182128906],["tő",-11.750229835510254],["лив",-11.750251770019531],["ంచ",-11.750264167785645],["▁куп",-11.750357627868652],["ഴ",-11.750421524047852],["▁Europe",-11.75053882598877],["vē",-11.750642776489258],["▁తెలుగు",-11.750699996948242],["gem",-11.750762939453123],["▁barang",-11.750789642333984],["ttiin",-11.750863075256348],["▁ምን",-11.75087070465088],["▁оваа",-11.750975608825684],["▁пам",-11.751017570495604],["▁человека",-11.75124740600586],["▁leurs",-11.75125789642334],["zd",-11.751324653625488],["▁တ",-11.751327514648438],["▁papa",-11.751335144042969],["▁אשר",-11.751340866088867],["sına",-11.75134563446045],["架",-11.751514434814451],["қы",-11.751523971557615],["▁مد",-11.75155544281006],["龙",-11.751626014709473],["زم",-11.751660346984863],["▁درباره",-11.751672744750977],["▁Waziri",-11.75167465209961],["▁қа",-11.75171947479248],["▁Website",-11.751752853393556],["▁көр",-11.751806259155272],["ját",-11.751885414123535],["▁ಕೆ",-11.751923561096191],["▁часто",-11.751953125],["▁skriver",-11.751978874206545],["▁vårt",-11.752029418945312],["▁тебе",-11.75207233428955],["ტო",-11.752124786376951],["▁Bau",-11.752153396606444],["dığı",-11.752164840698242],["▁위한",-11.752175331115724],["lur",-11.752180099487305],["なく",-11.75233268737793],["čen",-11.752341270446776],["▁vele",-11.752373695373535],["▁sco",-11.752388954162598],["手机",-11.75247859954834],["TER",-11.752497673034668],["јте",-11.75256061553955],["▁fina",-11.752572059631348],["ово",-11.752575874328612],["เวลา",-11.752633094787598],["チ",-11.75263786315918],["▁dulu",-11.752663612365724],["สามารถ",-11.75271987915039],["▁dalle",-11.752763748168944],["▁שלי",-11.752917289733888],["▁ನಮ್ಮ",-11.752965927124023],["ทุก",-11.752972602844238],["▁মা",-11.753057479858398],["▁quali",-11.75308322906494],["部门",-11.753098487854004],["▁oku",-11.753124237060549],["رى",-11.753151893615724],["ады",-11.753304481506348],["97",-11.75331211090088],["▁muu",-11.753345489501951],["wing",-11.753477096557615],["හි",-11.753595352172852],["▁Ә",-11.753605842590332],["මා",-11.753628730773926],["▁הא",-11.753790855407717],["▁அந்த",-11.753813743591309],["电影",-11.753826141357422],["▁mukha",-11.7538480758667],["fn",-11.753988265991213],["ních",-11.754035949707031],["▁actividades",-11.754035949707031],["▁minim",-11.75404930114746],["▁George",-11.75413417816162],["▁liber",-11.754196166992188],["▁पी",-11.754199028015137],["推薦",-11.754257202148438],["นิ",-11.75437068939209],["▁dies",-11.754430770874023],["▁ได้",-11.754467964172363],["els",-11.754559516906738],["▁дана",-11.75462818145752],["▁urma",-11.754668235778809],["eix",-11.7546968460083],["▁anterior",-11.754716873168944],["▁bri",-11.755071640014648],["وز",-11.755178451538086],["▁juli",-11.7551851272583],["▁നിന്നും",-11.755295753479004],["ļu",-11.755352973937988],["▁Your",-11.755398750305176],["▁потом",-11.755436897277832],["▁одного",-11.755441665649414],["▁որը",-11.755454063415527],["ध",-11.755568504333496],["нем",-11.755575180053713],["▁76",-11.755586624145508],["▁Bila",-11.755608558654783],["最高",-11.755614280700684],["คือ",-11.755632400512695],["▁potom",-11.755666732788086],["ують",-11.755722999572754],["ႀကီး",-11.755746841430664],["wit",-11.75582504272461],["▁farklı",-11.756010055541992],["▁97",-11.756046295166016],["迷",-11.756101608276367],["izar",-11.756118774414062],["▁đạo",-11.756216049194336],["▁realiz",-11.756303787231444],["tys",-11.756335258483888],["▁Era",-11.756349563598633],["▁буду",-11.75635051727295],["ჩ",-11.75635814666748],["▁pek",-11.756367683410645],["▁Kunden",-11.756375312805176],["核",-11.756391525268556],["▁ਵਿ",-11.756446838378906],["bha",-11.756475448608398],["▁będą",-11.756540298461914],["▁municipal",-11.756540298461914],["වල",-11.756547927856444],["uzi",-11.756552696228027],["▁اعلام",-11.756563186645508],["▁flu",-11.756712913513184],["▁మన",-11.756721496582031],["▁recursos",-11.756725311279297],["变",-11.756771087646484],["출",-11.756784439086914],["楼",-11.756821632385254],["▁lekker",-11.756863594055176],["lana",-11.75694751739502],["pozi",-11.756996154785156],["▁When",-11.757112503051758],["▁اعلان",-11.757193565368652],["GA",-11.757214546203612],["▁dias",-11.757351875305176],["▁Bis",-11.757360458374023],["ାଇ",-11.757418632507324],["▁Ру",-11.757431030273438],["▁Gro",-11.757484436035156],["への",-11.7575044631958],["▁מע",-11.757614135742188],["מש",-11.757716178894045],["▁match",-11.757824897766112],["▁حسن",-11.757895469665527],["▁Ν",-11.75798797607422],["வில்",-11.757996559143066],["▁Parlament",-11.75802993774414],["ទ",-11.758070945739746],["▁vaid",-11.758082389831545],["▁sla",-11.758086204528809],["▁Klik",-11.758136749267578],["經",-11.75819492340088],["▁Mỹ",-11.758198738098145],["▁нар",-11.758209228515623],["▁kombin",-11.75827407836914],["▁والأ",-11.758293151855469],["▁langt",-11.75833511352539],["ése",-11.758400917053224],["▁aynı",-11.758407592773438],["积极",-11.758448600769045],["▁aman",-11.758452415466309],["▁350",-11.75858211517334],["▁regul",-11.758641242980955],["▁своей",-11.758665084838867],["▁sav",-11.758700370788574],["ច",-11.75871753692627],["సీ",-11.758756637573242],["론",-11.758806228637695],["▁spune",-11.75883960723877],["▁84",-11.75891399383545],["▁Email",-11.75902271270752],["ítás",-11.75903034210205],["▁පත්",-11.759051322937012],["94",-11.759102821350098],["▁ਨਾ",-11.75914478302002],["лага",-11.759157180786133],["▁akt",-11.75918197631836],["▁Ön",-11.759269714355469],["त्र",-11.759333610534668],["ುತ್ತದೆ",-11.759371757507324],["▁mong",-11.759469985961914],["rke",-11.759540557861328],["▁باش",-11.75959587097168],["▁ត្រូវ",-11.759657859802246],["rö",-11.75971221923828],["ђа",-11.759721755981444],["љу",-11.75979995727539],["ത്തി",-11.7598876953125],["ଛି",-11.759928703308104],["▁71",-11.759958267211914],["▁filme",-11.759969711303713],["lys",-11.759973526000977],["▁झ",-11.760013580322266],["▁центр",-11.760015487670898],["शन",-11.760026931762695],["သည္",-11.76004409790039],["▁Mann",-11.76006317138672],["▁धेरै",-11.760124206542969],["医院",-11.760174751281738],["▁läbi",-11.760205268859863],["nyi",-11.760251998901367],["▁کال",-11.760329246520996],["▁نامه",-11.760398864746094],["▁également",-11.76045036315918],["一定",-11.76046371459961],["▁Một",-11.760504722595217],["▁period",-11.76055145263672],["์",-11.760614395141602],["▁الك",-11.760647773742676],["▁которая",-11.760781288146973],["▁studi",-11.76086711883545],["исти",-11.760882377624512],["▁idee",-11.760892868041992],["▁73",-11.760955810546877],["ਤੀ",-11.76103687286377],["酸",-11.761068344116213],["▁amit",-11.76119613647461],["дна",-11.76119899749756],["▁Store",-11.761211395263672],["▁உள்ள",-11.76122760772705],["æl",-11.761276245117188],["▁ወ",-11.76130199432373],["汽车",-11.761393547058104],["աս",-11.761439323425291],["▁নিয়ে",-11.761451721191406],["▁English",-11.761503219604492],["▁wala",-11.76155948638916],["額",-11.761585235595703],["▁nagu",-11.761640548706056],["▁הזה",-11.761737823486328],["▁pensa",-11.76175308227539],["▁suite",-11.761781692504885],["▁xây",-11.761794090270996],["▁Christian",-11.761906623840332],["▁ekstra",-11.761911392211914],["▁alors",-11.76192855834961],["uran",-11.761980056762695],["▁kandidat",-11.762049674987791],["sí",-11.762195587158203],["▁bakal",-11.762206077575684],["ことが",-11.762269020080566],["▁orice",-11.762285232543944],["▁konto",-11.762300491333008],["jai",-11.762358665466309],["▁idő",-11.762406349182127],["acija",-11.762486457824709],["entes",-11.762505531311035],["dje",-11.762543678283691],["▁lov",-11.762569427490234],["▁gelen",-11.76258659362793],["ђе",-11.76260471343994],["層",-11.76262378692627],["▁punkt",-11.762645721435549],["▁اجتماعی",-11.76270580291748],["▁бала",-11.76274299621582],["▁파",-11.762772560119627],["▁ਜ",-11.76282024383545],["ори",-11.762850761413574],["▁coup",-11.762890815734863],["▁ಆದರೆ",-11.763070106506348],["▁Ul",-11.763104438781738],["recht",-11.763131141662598],["▁Thomas",-11.763301849365234],["ସି",-11.763311386108398],["▁plek",-11.76345443725586],["oti",-11.763550758361816],["тал",-11.763684272766112],["खा",-11.763690948486328],["Τ",-11.763702392578123],["gina",-11.763742446899414],["▁outra",-11.763760566711426],["zal",-11.763872146606444],["▁ні",-11.763904571533203],["▁કોઈ",-11.764076232910156],["▁ส",-11.764171600341797],["▁hwn",-11.764193534851074],["ลา",-11.764204025268556],["▁ຄວາມ",-11.764219284057615],["▁virus",-11.764262199401855],["▁proceso",-11.764402389526367],["念",-11.764471054077148],["▁univers",-11.764496803283691],["чен",-11.76453971862793],["喜欢",-11.764666557312012],["▁Pakistan",-11.764795303344728],["▁चार",-11.764815330505373],["▁uy",-11.764885902404783],["▁chur",-11.76490592956543],["▁oro",-11.76491641998291],["▁refer",-11.76494312286377],["▁wiele",-11.764975547790527],["▁آمریکا",-11.765023231506348],["滿",-11.76516342163086],["sione",-11.765213966369627],["▁طلب",-11.765363693237305],["▁astfel",-11.76536464691162],["AC",-11.765387535095217],["▁bunu",-11.765396118164062],["▁zoals",-11.76541805267334],["ದೆ",-11.765442848205566],["дав",-11.765453338623049],["Ke",-11.76545524597168],["▁hata",-11.765480041503906],["rci",-11.765512466430664],["Κ",-11.765609741210938],["▁əsas",-11.76562786102295],["▁വര്",-11.765738487243652],["寫",-11.765777587890623],["背",-11.765777587890623],["▁kena",-11.765888214111328],["pata",-11.765950202941896],["▁dünya",-11.765963554382324],["lož",-11.76602554321289],["र्थ",-11.766033172607422],["▁Mun",-11.766040802001951],["▁verdi",-11.766115188598633],["isation",-11.766265869140623],["▁član",-11.76645278930664],["▁කිරීමට",-11.766487121582031],["▁سخت",-11.766498565673828],["航",-11.766551971435549],["人们",-11.766571044921877],["▁biến",-11.766613006591797],["▁всё",-11.766629219055176],["▁kuch",-11.76667594909668],["守",-11.766676902770996],["▁tempor",-11.766683578491213],["lük",-11.76676082611084],["▁both",-11.766767501831056],["▁8-",-11.766860008239746],["▁lege",-11.766879081726074],["เล",-11.766919136047363],["▁चल",-11.76694679260254],["拥有",-11.766958236694336],["ונות",-11.767131805419922],["ški",-11.767146110534668],["фи",-11.767168045043944],["ရဲ႕",-11.767229080200195],["▁mati",-11.767441749572754],["력",-11.767566680908203],["这样的",-11.76757526397705],["чих",-11.767589569091797],["▁evento",-11.76773166656494],["ը՝",-11.76776123046875],["▁svoju",-11.7677640914917],["▁ಸಂ",-11.767892837524414],["影響",-11.767901420593262],["▁daw",-11.767906188964844],["නු",-11.767909049987791],["船",-11.76796531677246],["kl",-11.76796817779541],["노",-11.768014907836914],["易",-11.768022537231444],["ये",-11.768221855163574],["▁illa",-11.768235206604004],["科学",-11.768291473388672],["▁respect",-11.768378257751465],["▁العربية",-11.768497467041016],["ερ",-11.76850700378418],["▁Amazon",-11.768564224243164],["sht",-11.768593788146973],["▁nuovo",-11.768878936767578],["▁جهت",-11.768919944763184],["ુ",-11.768935203552246],["hid",-11.768996238708496],["▁ul",-11.769001007080078],["▁Bak",-11.76906967163086],["▁Nos",-11.76926326751709],["تىن",-11.769274711608888],["▁tä",-11.7693510055542],["聽",-11.769357681274414],["اپ",-11.769373893737791],["▁هناك",-11.769378662109377],["ವೆ",-11.769476890563965],["症",-11.769601821899414],["▁đồ",-11.769604682922363],["▁مخ",-11.769737243652344],["rə",-11.76978874206543],["▁(10",-11.769941329956056],["szám",-11.77007293701172],["ול",-11.77016544342041],["කර",-11.770416259765623],["ജ",-11.770434379577637],["▁kira",-11.770587921142578],["ція",-11.770658493041992],["పి",-11.77069854736328],["▁domin",-11.770719528198242],["▁जन",-11.77075481414795],["加入",-11.770864486694336],["▁wurden",-11.770867347717283],["Ս",-11.7709379196167],["▁तरह",-11.771014213562012],["会议",-11.771020889282228],["▁kurie",-11.77109146118164],[":10",-11.771096229553224],["▁78",-11.77120876312256],["▁ର",-11.771224975585938],["我们的",-11.771360397338867],["мент",-11.771437644958496],["▁стране",-11.771484375],["vek",-11.77153778076172],["▁jim",-11.771553993225098],["產",-11.771717071533203],["тно",-11.77176570892334],["▁lisää",-11.771782875061035],["▁قرآن",-11.77187728881836],["▁paper",-11.771990776062012],["try",-11.77200984954834],["▁74",-11.772024154663086],["▁yan",-11.772051811218262],["▁деле",-11.772135734558104],["▁Yu",-11.772184371948242],["700",-11.772205352783203],["治療",-11.772309303283691],["▁Street",-11.77231216430664],["ዊ",-11.772391319274902],["▁एवं",-11.77239227294922],["▁също",-11.77239990234375],["ТА",-11.772486686706545],["▁पद",-11.77252197265625],["ප්",-11.77263069152832],["тели",-11.772632598876951],["unum",-11.772698402404783],["▁mums",-11.772747993469238],["kuu",-11.7727689743042],["พา",-11.77277660369873],["▁wada",-11.772815704345703],["tze",-11.77286148071289],["▁тој",-11.773111343383787],["tty",-11.773136138916016],["ļa",-11.773183822631836],["lő",-11.773301124572754],["▁macht",-11.773303985595703],["▁अनुसार",-11.77332592010498],["ść",-11.773338317871094],["مي",-11.773354530334473],["▁Natur",-11.773367881774902],["boj",-11.773508071899414],["▁déjà",-11.773597717285156],["▁جی",-11.773635864257812],["ىسى",-11.77373504638672],["▁ڪم",-11.773831367492676],["оо",-11.773848533630373],["हि",-11.773929595947266],["开发",-11.774032592773438],["▁FC",-11.774038314819336],["سي",-11.77411651611328],["▁وو",-11.77412223815918],["버",-11.774125099182127],["▁dnia",-11.774127006530762],["▁гледа",-11.774133682250977],["▁bruger",-11.77414608001709],["ნო",-11.77414894104004],["▁galima",-11.774374008178713],["배",-11.774474143981934],["▁giảm",-11.774532318115234],["իչ",-11.774593353271484],["ათ",-11.774733543395996],["քի",-11.774750709533691],["円",-11.774812698364258],["ров",-11.774825096130373],["▁ვი",-11.774879455566406],["集团",-11.775012969970703],["▁चा",-11.775091171264648],["情報",-11.77514934539795],["▁ካ",-11.775150299072266],["cra",-11.775182723999023],["▁buah",-11.775203704833984],["▁personnes",-11.775278091430664],["PI",-11.775334358215332],["جن",-11.775351524353027],["▁Tahun",-11.775352478027344],["ości",-11.775418281555176],["skie",-11.775516510009766],["▁add",-11.775540351867676],["週",-11.775708198547363],["▁دن",-11.775733947753906],["▁prí",-11.775802612304688],["ક્ષ",-11.77583122253418],["▁phục",-11.77589225769043],["▁produkter",-11.775923728942873],["▁سوال",-11.77596664428711],["▁mellem",-11.776036262512209],["▁امروز",-11.776055335998535],["▁לב",-11.776076316833496],["▁palju",-11.776135444641112],["னா",-11.776140213012695],["▁როგორ",-11.776199340820312],["▁etdi",-11.776232719421388],["-9",-11.776246070861816],["▁конкурс",-11.776263236999512],["krat",-11.776302337646484],["▁eles",-11.776357650756836],["င္း",-11.776359558105469],["kum",-11.77644157409668],["▁متعلق",-11.776507377624512],["rant",-11.776530265808104],["▁quella",-11.7765531539917],["▁prof",-11.776674270629885],["▁сан",-11.776700973510742],["يش",-11.776702880859377],["▁فروش",-11.776761054992676],["larning",-11.776799201965332],["▁όλα",-11.776801109313965],["持",-11.776900291442873],["▁đủ",-11.776915550231934],["▁deel",-11.776942253112791],["ਸੀ",-11.77700424194336],["▁mogelijk",-11.77724266052246],["▁ایجاد",-11.77725315093994],["▁revi",-11.777328491210938],["lish",-11.77736759185791],["θε",-11.777386665344238],["▁коп",-11.777440071105955],["ūs",-11.77745532989502],["טי",-11.777514457702637],["▁እን",-11.777518272399902],["μο",-11.7775239944458],["▁tarde",-11.777535438537598],["▁rõ",-11.77756118774414],["▁Form",-11.777578353881836],["ٽي",-11.777803421020508],["老师",-11.77781581878662],["даа",-11.777833938598633],["乐",-11.77785587310791],["રો",-11.777872085571287],["▁dazu",-11.777881622314451],["▁проблеми",-11.777999877929688],["rade",-11.778047561645508],["▁تلاش",-11.778362274169922],["edu",-11.778371810913086],["▁μ",-11.778459548950195],["stoj",-11.7785005569458],["▁යා",-11.778509140014648],["▁tart",-11.77854824066162],["hai",-11.778613090515137],["сна",-11.778618812561035],["ęs",-11.778697967529297],["▁Nä",-11.77869987487793],["დეს",-11.778705596923828],["一种",-11.778779029846191],["▁зах",-11.778786659240724],["▁процес",-11.77892017364502],["▁bruk",-11.778923988342283],["▁área",-11.77893352508545],["మే",-11.778959274291992],["วง",-11.779078483581545],["wod",-11.779102325439451],["▁समाज",-11.779109954833984],["▁rede",-11.779237747192385],["▁pon",-11.77931308746338],["▁Kata",-11.779398918151855],["SS",-11.7794189453125],["▁eest",-11.779479026794434],["▁Person",-11.779495239257812],["▁sites",-11.779496192932127],["كى",-11.779518127441406],["▁ओ",-11.779553413391112],["־",-11.779570579528809],["▁encontrar",-11.77971363067627],["86",-11.779794692993164],["Per",-11.779841423034668],["गत",-11.779937744140623],["тель",-11.779969215393066],["skej",-11.780030250549316],["nā",-11.780147552490234],["▁қызмет",-11.78015422821045],["KE",-11.78016185760498],["▁SEO",-11.780198097229004],["▁ова",-11.780213356018066],["▁considera",-11.78023910522461],["▁bërë",-11.78024196624756],["hun",-11.780261993408203],["מן",-11.78036117553711],["双",-11.780417442321776],["▁jól",-11.780433654785156],["gene",-11.780445098876951],["mpa",-11.78044605255127],["ਲੀ",-11.780472755432127],["▁mimo",-11.780604362487791],["▁eli",-11.780658721923828],["▁महाराष्ट्र",-11.78071117401123],["找到",-11.780739784240724],["ук",-11.780741691589355],["LE",-11.780828475952148],["JE",-11.780848503112791],["ům",-11.78088092803955],["▁здесь",-11.780902862548828],["ತ್ತ",-11.781027793884276],["ынан",-11.781036376953123],["▁spraw",-11.781051635742188],["קה",-11.781126022338867],["▁între",-11.78119945526123],["пис",-11.781200408935549],["▁protiv",-11.781251907348633],["▁δ",-11.781272888183594],["ీ",-11.78127384185791],["▁여",-11.781326293945312],["খ",-11.78144073486328],["▁altijd",-11.781469345092772],["maz",-11.7815523147583],["▁hiểu",-11.781570434570312],["แห่ง",-11.781620979309082],["▁способ",-11.781622886657717],["tana",-11.781630516052246],["▁jā",-11.781660079956056],["cro",-11.781819343566896],["▁maga",-11.781904220581056],["▁rien",-11.781917572021484],["▁यांनी",-11.781932830810549],["▁toch",-11.782069206237791],["lī",-11.782132148742676],["▁لكن",-11.782196044921877],["کن",-11.782218933105469],["▁المت",-11.782326698303224],["▁баш",-11.782437324523926],["ilta",-11.78246021270752],["▁mayor",-11.782549858093262],["▁Sud",-11.782655715942385],["▁AK",-11.782699584960938],["▁फ",-11.782719612121582],["ével",-11.782734870910645],["▁prezident",-11.782757759094238],["▁سطح",-11.782824516296388],["▁Suomen",-11.78284740447998],["ລະ",-11.7828950881958],["▁cari",-11.78292465209961],["ブ",-11.782953262329102],["гы",-11.782984733581545],["▁ഗ",-11.783086776733398],["增长",-11.783163070678713],["cina",-11.783241271972656],["▁Мен",-11.783269882202148],["δα",-11.783280372619627],["יל",-11.78334617614746],["уваат",-11.783443450927734],["▁μία",-11.783447265625],["▁igual",-11.783449172973633],["▁ಸಿ",-11.78345012664795],["สุด",-11.783588409423828],["िन",-11.783754348754885],["ाय",-11.783758163452148],["ଏ",-11.783848762512209],["เด",-11.7838773727417],["▁Nov",-11.784244537353516],["▁leer",-11.784245491027832],["探",-11.784253120422363],["Lo",-11.784310340881348],["仍",-11.784311294555664],["▁gambar",-11.784364700317385],["▁Motor",-11.784504890441896],["龍",-11.784534454345703],["▁انہوں",-11.78457736968994],["左右",-11.784585952758787],["ciju",-11.784605026245115],["▁informatie",-11.784716606140137],["ኖ",-11.78484058380127],["引",-11.784865379333496],["▁keluarga",-11.785032272338867],["چى",-11.785082817077637],["▁osob",-11.7850980758667],["▁ஏ",-11.785109519958496],["▁вели",-11.78514003753662],["tää",-11.785178184509276],["▁cultural",-11.785202980041504],["▁food",-11.785253524780272],["liku",-11.785265922546388],["AP",-11.785372734069824],["▁Spiel",-11.785432815551758],["▁транспорт",-11.78553295135498],["dige",-11.785554885864258],["▁Artikel",-11.785594940185549],["גן",-11.785614967346191],["關",-11.78568172454834],["ct",-11.785682678222656],["▁фа",-11.785805702209473],["▁देख",-11.78581428527832],["▁minute",-11.785818099975586],["最後",-11.785836219787598],["աց",-11.785863876342772],["RS",-11.785998344421388],["▁избор",-11.786022186279297],["တွေ",-11.786038398742676],["აში",-11.786089897155762],["▁why",-11.7861328125],["들을",-11.786384582519531],["▁thanh",-11.78647804260254],["ial",-11.786603927612305],["вести",-11.786677360534668],["daj",-11.786845207214355],["▁dve",-11.786985397338867],["▁tahay",-11.787023544311523],["▁fru",-11.787027359008787],["▁bà",-11.787101745605469],["▁ڈی",-11.78713035583496],["▁course",-11.787148475646973],["▁кам",-11.78718376159668],["▁பு",-11.787346839904783],["niem",-11.78738021850586],["▁אותו",-11.787453651428224],["uks",-11.787480354309082],["કા",-11.78756046295166],["idades",-11.787614822387695],["▁Şi",-11.787636756896973],["▁ус",-11.787771224975586],["▁rela",-11.787890434265137],["▁đặt",-11.787928581237791],["▁đăng",-11.787930488586426],["▁כמה",-11.787936210632324],["ျဖစ္",-11.787938117980955],["▁account",-11.788008689880373],["յ",-11.78801727294922],["可是",-11.788148880004885],["mag",-11.788240432739258],["▁novembre",-11.788269996643066],["▁2010.",-11.788421630859377],["▁Sil",-11.788421630859377],["▁possono",-11.788442611694336],["▁(...)",-11.788481712341309],["יד",-11.788512229919434],["去年",-11.788524627685549],["▁dollar",-11.788548469543455],["직",-11.78868579864502],["schen",-11.788766860961914],["▁minimum",-11.788774490356444],["▁Από",-11.788812637329102],["继续",-11.7888822555542],["▁върху",-11.788936614990234],["ներից",-11.789022445678713],["▁தான்",-11.789058685302734],["成本",-11.789069175720217],["效果",-11.78907871246338],["kach",-11.789143562316896],["▁által",-11.789198875427246],["ව්",-11.789240837097168],["št",-11.789266586303713],["ବେ",-11.789307594299316],["களில்",-11.789332389831545],["ങ്ങളും",-11.78956413269043],["▁zakon",-11.789677619934082],["ద్",-11.789681434631348],["▁ಬಿ",-11.78980541229248],["mul",-11.78982162475586],["дон",-11.789907455444336],["مت",-11.78995132446289],["▁verder",-11.790132522583008],["▁hori",-11.790141105651855],["▁oldal",-11.790319442749023],["dot",-11.79033660888672],["▁semakin",-11.79035186767578],["▁würde",-11.790359497070312],["AA",-11.7904052734375],["ွ",-11.790488243103027],["▁þeim",-11.790616035461426],["邊",-11.790657997131348],["Ι",-11.790705680847168],["▁والی",-11.790772438049316],["tki",-11.790799140930176],["▁Informationen",-11.790799140930176],["纳",-11.79085922241211],["కే",-11.790864944458008],["mont",-11.790865898132324],["类",-11.790923118591309],["нос",-11.790985107421877],["dda",-11.790996551513672],["▁bada",-11.791033744812012],["ത്തു",-11.791154861450195],["pdf",-11.791242599487305],["▁पोस्ट",-11.791251182556152],["▁സര്",-11.791269302368164],["▁عدالت",-11.791288375854492],["гла",-11.791293144226074],["▁നിന്ന്",-11.79131317138672],["반",-11.791369438171388],["▁without",-11.791536331176758],["▁FA",-11.7915678024292],["rte",-11.791619300842283],["يين",-11.791650772094728],["કો",-11.791665077209473],["λέ",-11.791755676269531],["▁voit",-11.791770935058594],["▁Green",-11.791799545288086],["森",-11.79190444946289],["▁सुन",-11.791925430297852],["▁والت",-11.791953086853027],["овић",-11.791954040527344],["▁Via",-11.791979789733888],["jor",-11.791998863220217],["▁turn",-11.792006492614746],["▁vii",-11.792054176330566],["mesi",-11.792173385620115],["▁mają",-11.7921781539917],["ڙي",-11.792202949523926],["▁※",-11.792205810546877],["▁බල",-11.792278289794922],["ıb",-11.792318344116213],["дал",-11.792350769042969],["ON",-11.79238224029541],["ધ",-11.79244613647461],["▁digər",-11.79255199432373],["ame",-11.792570114135742],["▁Rama",-11.792725563049316],["▁зад",-11.792759895324709],["aw",-11.792791366577148],["▁sea",-11.792877197265623],["▁Chu",-11.792911529541016],["决定",-11.792929649353027],["▁وارد",-11.792983055114746],["▁mən",-11.793002128601074],["गी",-11.793094635009766],["▁значи",-11.793230056762695],["返",-11.793231964111328],["▁bike",-11.79323673248291],["רו",-11.793333053588867],["喝",-11.793368339538574],["▁hain",-11.793463706970217],["ంది",-11.793474197387695],["יז",-11.793476104736328],["lands",-11.793516159057615],["觉得",-11.793587684631348],["ദ്",-11.793593406677246],["ume",-11.793779373168944],["▁ਜੀ",-11.793789863586426],["▁kommt",-11.79383373260498],["άρ",-11.794054985046388],["তি",-11.794096946716309],["投資",-11.79409885406494],["▁ମ",-11.794172286987305],["▁سلام",-11.79420280456543],["▁ባ",-11.79420280456543],["▁baby",-11.794230461120604],["iji",-11.794265747070312],["▁garanti",-11.794279098510742],["▁change",-11.794370651245115],["▁1-2",-11.794443130493164],["hna",-11.79457664489746],["▁gero",-11.794607162475586],["യിലെ",-11.794642448425291],["▁31.",-11.79465103149414],["row",-11.794770240783691],["juma",-11.794784545898438],["▁jossa",-11.794852256774902],["▁мене",-11.79489803314209],["▁gai",-11.794979095458984],["▁ብቻ",-11.795008659362791],["▁ប្រ",-11.795110702514648],["▁място",-11.795127868652344],["▁salon",-11.795231819152832],["▁پي",-11.795241355895996],["kia",-11.795352935791016],["▁bỏ",-11.79539394378662],["EK",-11.795403480529783],["▁druge",-11.795421600341797],["▁កម្ពុជា",-11.795422554016112],["▁oso",-11.795530319213867],["▁Maha",-11.79582977294922],["▁optim",-11.795940399169922],["▁2:",-11.795990943908691],["▁sigur",-11.795990943908691],["gad",-11.796013832092283],["▁Chatroulette",-11.796014785766602],["保障",-11.796061515808104],["▁Kā",-11.7960844039917],["▁नेता",-11.796102523803713],["ទៅ",-11.796165466308594],["tah",-11.796232223510742],["▁комплекс",-11.79644775390625],["IV",-11.796489715576172],["▁Beste",-11.79656219482422],["ţe",-11.796578407287598],["dana",-11.796590805053713],["▁ഓ",-11.796598434448242],["▁《",-11.796646118164062],["只能",-11.796675682067873],["āli",-11.79671859741211],["之後",-11.796747207641602],["gle",-11.796791076660156],["▁camera",-11.796799659729004],["▁Rom",-11.796890258789062],["eco",-11.796940803527832],["mă",-11.796951293945312],["скиот",-11.79698657989502],["▁خیلی",-11.797040939331056],["▁дни",-11.79710578918457],["▁מאוד",-11.797110557556152],["ril",-11.797117233276367],["лап",-11.797137260437012],["▁RSS",-11.797146797180176],["▁Portugal",-11.797151565551758],["▁ملي",-11.797250747680664],["รัก",-11.7973051071167],["▁дзе",-11.797356605529783],["imu",-11.797398567199709],["▁ഉ",-11.797399520874023],["▁група",-11.797441482543944],["▁giorno",-11.79747486114502],["▁Comp",-11.797658920288086],["라고",-11.797731399536133],["rje",-11.797735214233398],["etti",-11.797746658325195],["▁հետո",-11.797836303710938],["مو",-11.797895431518556],["▁estat",-11.797985076904297],["▁Cup",-11.798088073730469],["07.",-11.798101425170898],["ecek",-11.798123359680176],["▁יו",-11.798134803771973],["▁aka",-11.798222541809082],["智",-11.798245429992676],["ನ್ನ",-11.798266410827637],["വാ",-11.798266410827637],["▁جميع",-11.798298835754396],["▁ม",-11.79833698272705],["▁sposób",-11.798385620117188],["▁ძალიან",-11.798385620117188],["▁Leben",-11.798510551452637],["▁meri",-11.798584938049316],["保护",-11.798625946044922],["▁Uma",-11.798733711242676],["处理",-11.798808097839355],["▁figura",-11.798810005187988],["▁свят",-11.798826217651367],["וי",-11.798864364624023],["BU",-11.798911094665527],["▁Bri",-11.79891872406006],["αι",-11.798924446105955],["▁केही",-11.798952102661133],["▁1980",-11.799050331115724],["cap",-11.799128532409668],["وع",-11.799145698547363],["▁questi",-11.799180030822754],["▁поради",-11.799256324768066],["▁سنگ",-11.799262046813965],["▁Problem",-11.79928493499756],["esse",-11.799288749694824],["▁تنها",-11.799288749694824],["▁सिंह",-11.799323081970217],["▁orden",-11.799335479736328],[".12.",-11.799362182617188],["კო",-11.799392700195312],["▁dhéanamh",-11.799403190612791],["最后",-11.79944896697998],["两个",-11.79946231842041],["▁liter",-11.799606323242188],["ಿನ",-11.799646377563477],["▁କରିବା",-11.799649238586426],["וח",-11.799701690673828],["odi",-11.799778938293455],["▁Улсын",-11.799800872802734],["Tu",-11.799848556518556],["rre",-11.799919128417969],["▁वो",-11.799935340881348],["▁لري",-11.799969673156738],["нэ",-11.799992561340332],["▁ala",-11.800021171569824],["▁92",-11.80019474029541],["▁American",-11.800277709960938],["▁hope",-11.80028247833252],["▁정보",-11.800355911254885],["▁مسلم",-11.800456047058104],["▁durant",-11.800466537475586],["▁តាម",-11.800481796264648],["▁Cre",-11.800575256347656],["मि",-11.80060863494873],["ич",-11.800658226013184],["மே",-11.800758361816406],["ದಿಂದ",-11.800798416137695],["旅行",-11.800824165344238],["▁fundit",-11.80083179473877],["▁smak",-11.800840377807615],["ә",-11.80087947845459],["ांना",-11.80093479156494],["▁suma",-11.800959587097168],["速",-11.800959587097168],["дагы",-11.80096435546875],["▁በተ",-11.801029205322266],["▁ļoti",-11.801106452941896],["▁могат",-11.801345825195312],["▁wirklich",-11.801365852355955],["mada",-11.801458358764648],["▁області",-11.801610946655272],["iš",-11.801623344421388],["▁شریک",-11.801837921142578],["▁सब",-11.801860809326172],["▁another",-11.80191707611084],["сында",-11.802215576171877],["kaz",-11.802233695983888],["ונה",-11.80226707458496],["ebb",-11.802388191223145],["țe",-11.802449226379396],["▁tiga",-11.80250072479248],["▁organiza",-11.802525520324709],["这么",-11.802526473999023],["নে",-11.802538871765137],["▁것이다",-11.802571296691896],["lej",-11.802578926086426],["▁ž",-11.802629470825195],["mais",-11.802638053894045],["່",-11.802651405334473],["aku",-11.802687644958496],["▁otra",-11.80273151397705],["▁մեծ",-11.802803993225098],["▁Prze",-11.802896499633787],["ството",-11.802940368652344],["hän",-11.802968978881836],["จริง",-11.803023338317873],["▁هاي",-11.803080558776855],["▁ર",-11.803088188171388],["rent",-11.80312442779541],["▁ինչպես",-11.80314826965332],["үп",-11.80317497253418],["▁تواند",-11.803190231323242],["▁लोगों",-11.803208351135254],["^^",-11.80321979522705],["ಜಿ",-11.803236961364746],["▁gdje",-11.803264617919922],["▁125",-11.803354263305664],["Γ",-11.803362846374512],["▁каже",-11.803425788879396],["骨",-11.803444862365724],["cional",-11.803462028503418],["دل",-11.803474426269531],["▁nú",-11.803592681884766],["▁rezultat",-11.80369472503662],["▁toga",-11.803733825683594],["ጫ",-11.803786277770996],["▁uusi",-11.80379581451416],["tov",-11.803820610046388],["▁IS",-11.803919792175291],["▁Hans",-11.804046630859377],["jnë",-11.804051399230955],["▁บาท",-11.804117202758787],["63",-11.80413818359375],["实施",-11.804143905639648],["▁עו",-11.80416488647461],["▁82",-11.804189682006836],["ളി",-11.804200172424316],["▁ਜਾ",-11.804361343383787],["ené",-11.804390907287598],["️",-11.804508209228516],["▁sho",-11.804518699645996],["▁Name",-11.804649353027344],["▁Japan",-11.80466079711914],["psi",-11.804686546325684],["置",-11.804757118225098],["▁ග",-11.804856300354004],["▁због",-11.804893493652344],["▁haft",-11.805116653442385],["雅",-11.80514430999756],["ష్",-11.805258750915527],["UK",-11.805277824401855],["▁മു",-11.80528163909912],["待",-11.805293083190918],["git",-11.805350303649902],["71",-11.805373191833496],["ൊ",-11.805448532104492],["▁:-",-11.80545711517334],["▁đình",-11.805535316467283],["▁जान",-11.805662155151367],["母",-11.805705070495604],["▁مس",-11.80580711364746],["▁деятельности",-11.805874824523926],["rile",-11.805891036987305],["كس",-11.805996894836426],["عة",-11.806099891662598],["▁koju",-11.806121826171877],["стр",-11.806256294250488],["▁sĩ",-11.806262969970703],["▁واري",-11.806282043457031],["bek",-11.806431770324709],["▁Lang",-11.80644702911377],["▁perfect",-11.806478500366213],["▁nende",-11.806483268737791],["▁ring",-11.806503295898438],["▁Ren",-11.806524276733398],["КА",-11.806551933288574],["misel",-11.806653022766112],["นะ",-11.806696891784668],["태",-11.80672550201416],["гө",-11.806785583496094],["sas",-11.806807518005373],["▁Selain",-11.806888580322266],["яў",-11.806902885437012],["▁වැඩ",-11.806930541992188],["▁hivyo",-11.80693244934082],["▁هدف",-11.806979179382324],["▁minu",-11.806994438171388],["punkt",-11.807061195373535],["jük",-11.807124137878418],["ћу",-11.807156562805176],["▁نفر",-11.807165145874023],["▁rak",-11.807178497314451],["▁Azərbaycanda",-11.807247161865234],["聲",-11.807291030883787],["▁පා",-11.807306289672852],["▁sup",-11.807342529296877],["▁cine",-11.807406425476074],["▁រ",-11.807517051696776],["عي",-11.80772590637207],["nės",-11.807772636413574],["สูง",-11.807788848876951],["▁ਇਕ",-11.807900428771973],["幾",-11.807906150817873],["▁Ao",-11.80791473388672],["▁یوه",-11.80805206298828],["▁alkohol",-11.808151245117188],["▁mie",-11.80823802947998],["нде",-11.808363914489746],["▁fact",-11.808367729187012],["▁neza",-11.808387756347656],["၁",-11.808433532714844],["▁lucru",-11.808585166931152],["▁Budapest",-11.80869197845459],["mate",-11.808764457702637],["▁Kre",-11.808773040771484],["▁κα",-11.808794975280762],["▁alto",-11.80890941619873],["▁maja",-11.808917045593262],["Ku",-11.8090238571167],["Ă",-11.809066772460938],["▁پاس",-11.809096336364746],["▁गो",-11.8091402053833],["▁mendapat",-11.809158325195312],["▁ممکن",-11.809301376342772],["▁હતું",-11.809304237365724],["2009",-11.809405326843262],["▁virkelig",-11.809426307678224],["ക്കി",-11.80942726135254],["▁ним",-11.809430122375488],["চ",-11.809593200683594],["rana",-11.809701919555664],["▁kişi",-11.809707641601562],["▁हि",-11.80972957611084],["▁Hoe",-11.809754371643066],["▁yap",-11.809815406799316],["▁jūs",-11.80985164642334],["▁알",-11.809860229492188],["▁Pr",-11.809924125671388],["▁liste",-11.809925079345703],["▁aba",-11.809966087341309],["▁должен",-11.809988975524902],["▁סי",-11.809999465942385],["▁धर्म",-11.810025215148926],["▁рэ",-11.810047149658203],["▁mó",-11.810050010681152],["全部",-11.810151100158691],["īs",-11.81017780303955],["она",-11.810233116149902],["▁מא",-11.810288429260254],["▁Sab",-11.810458183288574],["parti",-11.810490608215332],["iranje",-11.810606002807615],["▁위",-11.81076717376709],["▁cita",-11.810820579528809],["穿",-11.810820579528809],["なので",-11.810909271240234],["iĝis",-11.810925483703612],["▁μην",-11.811012268066406],["▁giorni",-11.81101417541504],["▁tus",-11.811029434204102],["ware",-11.81103801727295],["ละ",-11.811089515686035],["▁пар",-11.811115264892578],["ଗ",-11.811156272888184],["კი",-11.81121063232422],["▁Central",-11.81121063232422],["mond",-11.811266899108888],["▁گرفته",-11.811291694641112],["▁acord",-11.811319351196287],["报告",-11.811430931091309],["▁услуги",-11.811463356018066],["▁weil",-11.811467170715332],["ੰ",-11.811540603637695],["▁national",-11.811591148376465],["▁кап",-11.81159782409668],["хо",-11.811606407165527],["受到",-11.811609268188477],["ബി",-11.811711311340332],["oto",-11.811765670776367],["现",-11.811822891235352],["Ver",-11.811861038208008],["▁kvinner",-11.81186580657959],["stvu",-11.811890602111816],["▁pjesë",-11.811898231506348],["关",-11.812013626098633],["▁Are",-11.812044143676758],["▁phố",-11.812137603759766],["cik",-11.812198638916016],["lika",-11.812199592590332],["ини",-11.812212944030762],["▁ellen",-11.812315940856934],["יט",-11.812336921691896],["שה",-11.81243133544922],["erade",-11.812437057495115],["▁Они",-11.812469482421877],["▁هئي",-11.812614440917969],["▁Sva",-11.812662124633787],["▁regula",-11.812694549560549],["remos",-11.812700271606444],["ácie",-11.812785148620604],["란",-11.812912940979004],["▁distribu",-11.812968254089355],["▁musik",-11.813006401062012],["держ",-11.813047409057615],["警",-11.813103675842283],["▁пер",-11.81310749053955],["考",-11.81312656402588],["▁tiết",-11.81313133239746],["▁cứu",-11.813135147094728],["▁blok",-11.813243865966797],["▁bhi",-11.81334400177002],["▁ర",-11.813475608825684],["▁susi",-11.813613891601562],["▁арга",-11.813623428344728],["▁тест",-11.813701629638672],["▁kompleks",-11.813730239868164],["▁yung",-11.813743591308594],["▁ç",-11.813765525817873],["wia",-11.813796997070312],["▁didn",-11.813949584960938],["▁सदस्य",-11.813965797424316],["▁Gen",-11.814085006713867],["aro",-11.81422233581543],["▁eskorte",-11.814258575439451],["▁فکر",-11.814410209655762],["▁між",-11.81446933746338],["▁Від",-11.814637184143066],["dü",-11.814652442932127],["そう",-11.8147554397583],["ျခင္း",-11.814759254455566],["▁exemplo",-11.814811706542969],["▁مطلب",-11.814866065979004],["▁leve",-11.81501293182373],["כי",-11.815030097961426],["▁efekt",-11.815107345581056],["での",-11.815107345581056],["▁nhiệm",-11.815145492553713],["▁chí",-11.815214157104492],["久",-11.81528091430664],["▁योजना",-11.815338134765623],["▁осы",-11.815340042114258],["男性",-11.815383911132812],["πα",-11.815423011779783],["vie",-11.815436363220217],["래",-11.815465927124023],["kri",-11.815476417541504],["▁gần",-11.815488815307615],["▁Якщо",-11.815510749816896],["▁prava",-11.815558433532717],["▁стар",-11.815619468688965],["▁жаңа",-11.81572437286377],["▁Bez",-11.815757751464844],["▁నేను",-11.815767288208008],["▁SO",-11.815790176391602],["▁self",-11.815814971923828],["dî",-11.815817832946776],["रण",-11.81588649749756],["▁problém",-11.81598949432373],["▁また",-11.816020011901855],["îne",-11.816048622131348],["▁lingua",-11.816088676452637],["▁ڪئي",-11.816094398498535],["▁stata",-11.816225051879885],["nę",-11.816264152526855],["▁Бог",-11.81635284423828],["▁важно",-11.816357612609863],["Ch",-11.816471099853516],["▁Лу",-11.816489219665527],["▁brug",-11.816763877868652],["▁Norsk",-11.816862106323242],["▁diren",-11.816903114318848],["▁zurück",-11.81692123413086],["▁Alla",-11.816967964172363],[":45",-11.81698989868164],["▁박",-11.817039489746094],["fl",-11.817044258117676],["되는",-11.817049026489258],["▁nim",-11.817099571228027],["▁ກັບ",-11.817179679870604],["▁đưa",-11.817214965820312],["▁불",-11.817225456237791],["фер",-11.81722927093506],["▁intre",-11.817262649536133],["▁δι",-11.81728458404541],["▁humor",-11.817489624023438],["hil",-11.817561149597168],["▁natürlich",-11.817561149597168],["▁आपको",-11.81758975982666],["▁яе",-11.817667961120604],["含",-11.817671775817873],["▁nostre",-11.817709922790527],["▁məlumat",-11.817803382873535],["asyon",-11.81781768798828],["▁واقع",-11.81782054901123],["▁Camp",-11.817824363708496],["יב",-11.81788444519043],["τέ",-11.817890167236328],["▁гэсэн",-11.817910194396973],["rut",-11.8179292678833],["▁nguyên",-11.8179349899292],["▁negeri",-11.817991256713867],["નાં",-11.818239212036133],["dade",-11.818259239196776],["普",-11.818355560302734],["xan",-11.818427085876465],["▁atë",-11.818459510803224],["▁hjá",-11.818567276000977],["▁వా",-11.818607330322266],["եմ",-11.818665504455566],["▁የኢትዮጵያ",-11.818705558776855],["▁slut",-11.818706512451172],["kku",-11.818779945373535],["68",-11.818903923034668],["▁ព័ត៌មាន",-11.819029808044434],["▁professor",-11.819131851196287],["▁familiar",-11.819147109985352],["▁mö",-11.81917953491211],["▁vode",-11.81923770904541],["teurs",-11.819250106811523],["▁Jawa",-11.819284439086914],["▁từng",-11.819416999816896],["nts",-11.819422721862791],["യുള്ള",-11.819427490234377],["ekin",-11.819622039794922],["▁خاطر",-11.819640159606934],["▁Без",-11.819653511047363],["▁Obama",-11.819698333740234],["ற்ற",-11.8197603225708],["bú",-11.819890975952148],["▁առաջ",-11.81991481781006],["▁Teil",-11.81993007659912],["तील",-11.819950103759766],["▁până",-11.819994926452637],["▁drog",-11.820083618164062],["▁කියල",-11.820158004760742],["йки",-11.820234298706056],["лег",-11.82033634185791],["▁Melayu",-11.820338249206545],["▁သ",-11.820355415344238],["▁menarik",-11.820411682128906],["ിക്കുന്ന",-11.820489883422852],["▁gusta",-11.82050609588623],["▁trei",-11.82053279876709],["折",-11.820642471313477],["▁lit",-11.820650100708008],["▁विशेष",-11.820664405822754],["ออนไลน์",-11.820671081542969],["posta",-11.820708274841309],["▁94",-11.820775032043455],["▁away",-11.82082462310791],["▁ქ",-11.820889472961426],["▁bayan",-11.821014404296877],["▁kanske",-11.821036338806152],["CD",-11.82104206085205],["ship",-11.821162223815918],["▁trai",-11.821189880371094],["инин",-11.82132053375244],["▁καθώς",-11.821374893188477],["▁kiện",-11.821409225463867],["▁نن",-11.82142734527588],["的地方",-11.82158088684082],["▁sted",-11.82164192199707],["▁పై",-11.821683883666992],["ਖ",-11.821703910827637],["▁مناسب",-11.821748733520508],["අ",-11.821889877319336],["pare",-11.821931838989258],["▁경",-11.82198429107666],["▁всі",-11.822141647338867],["▁ditë",-11.822224617004396],["fit",-11.822226524353027],["▁FOR",-11.822264671325684],["▁bruker",-11.822310447692873],["▁Über",-11.822367668151855],["伊",-11.82237720489502],["bos",-11.822479248046877],["ೈ",-11.822500228881836],["<",-11.822518348693848],["▁teda",-11.822555541992188],["▁eng",-11.822586059570312],["方便",-11.822607040405272],["▁darbo",-11.82262897491455],["ún",-11.822677612304688],["▁neuen",-11.822793960571287],["೦",-11.822880744934082],["▁danas",-11.82292366027832],["ِّ",-11.82297134399414],["▁gaar",-11.823016166687012],["▁Unternehmen",-11.823027610778809],["貨",-11.823039054870604],["▁corre",-11.823101043701172],["μά",-11.82310676574707],["ाः",-11.823153495788574],["▁Այս",-11.823333740234377],["ტა",-11.823410034179688],["▁своје",-11.82342529296875],["▁бер",-11.82357692718506],["COM",-11.82370662689209],["▁heller",-11.82371425628662],["taja",-11.823766708374023],["▁odio",-11.823777198791504],["▁совет",-11.823798179626465],["▁Ní",-11.823853492736816],["ຈະ",-11.823862075805664],["▁Przy",-11.823875427246094],["每天",-11.823957443237305],["▁пост",-11.823980331420898],["gne",-11.824015617370604],["▁නැති",-11.824028968811035],["▁ಪ",-11.82404327392578],["scri",-11.824131965637209],["iin",-11.824158668518066],["▁area",-11.824235916137695],["▁प्राप्त",-11.824328422546388],["ngi",-11.824335098266602],["дун",-11.824413299560549],["ណ",-11.824539184570312],["▁ശ",-11.824552536010742],["íme",-11.824609756469728],["Az",-11.82464599609375],["▁बोल",-11.824654579162598],["成立",-11.824661254882812],["▁panel",-11.824666976928713],["handel",-11.824710845947266],["▁treball",-11.82487678527832],["▁finner",-11.82490348815918],["▁ສໍາລັບ",-11.82501220703125],["στα",-11.82510471343994],["abil",-11.8251314163208],["bs",-11.825191497802734],["lini",-11.825255393981934],["▁biznes",-11.82542324066162],["▁Efter",-11.825465202331545],["дээ",-11.825469017028809],["▁özel",-11.825613975524902],["▁tiên",-11.825679779052734],["▁leik",-11.82573127746582],["▁yil",-11.825775146484377],["▁ahli",-11.825790405273438],["▁interesse",-11.825867652893066],["▁(6",-11.825873374938965],["友",-11.825885772705078],["▁steht",-11.825960159301758],["oba",-11.825963020324709],["ዕ",-11.8260498046875],["là",-11.826096534729004],["▁Jean",-11.826147079467772],["有人",-11.826189994812012],["ონ",-11.826196670532228],["ഷ്",-11.826251983642578],["▁לאחר",-11.826261520385742],["▁gøre",-11.82626247406006],["免費",-11.826272010803224],["▁neo",-11.82638168334961],["▁යන්න",-11.826419830322266],["電話",-11.82643222808838],["▁söz",-11.826557159423828],["▁Adam",-11.826586723327637],["▁всичко",-11.826590538024902],["▁ט",-11.826622009277344],["▁сделать",-11.826630592346191],["▁libera",-11.826708793640137],["ikke",-11.826722145080566],["▁स्",-11.826739311218262],["▁cung",-11.826772689819336],["▁алу",-11.826789855957031],["aine",-11.826815605163574],["▁jours",-11.826830863952637],["ခ်",-11.82685375213623],["рал",-11.826892852783203],["▁photo",-11.826894760131836],["▁pê",-11.826936721801758],["▁mark",-11.826948165893556],["結",-11.82711696624756],["▁gram",-11.827190399169922],["▁բ",-11.82730197906494],["нік",-11.827378273010254],["▁98",-11.827384948730469],["રે",-11.827409744262695],["▁spi",-11.82742977142334],["wis",-11.827492713928224],["rā",-11.827569007873535],["ają",-11.82759952545166],["색",-11.82761573791504],["hold",-11.82765007019043],["ൾ",-11.827670097351074],["▁unor",-11.827778816223145],["▁Një",-11.827792167663574],["วิ",-11.827957153320312],["目的",-11.828072547912598],["▁внимание",-11.828120231628418],["▁°",-11.82814598083496],["मै",-11.82814884185791],["▁পা",-11.828154563903809],["rot",-11.828266143798828],["rado",-11.82828140258789],["▁których",-11.828290939331056],["設定",-11.828335762023926],["đe",-11.82857894897461],["скі",-11.828605651855469],["▁doua",-11.828692436218262],["▁skil",-11.828702926635742],["▁الد",-11.82875156402588],["rium",-11.828765869140623],["xu",-11.828781127929688],["გან",-11.828808784484863],["ပ္",-11.828948974609377],["▁රි",-11.828980445861816],["▁ପା",-11.829056739807127],["銀行",-11.829094886779783],["에서는",-11.82913589477539],["▁health",-11.82917594909668],["▁munka",-11.829214096069336],["▁район",-11.829350471496582],["ेल",-11.82936668395996],["▁труд",-11.829402923583984],["▁NATO",-11.82942008972168],["กระ",-11.829549789428713],["կան",-11.829550743103027],["▁behöver",-11.82973575592041],["▁живота",-11.82978343963623],["▁viser",-11.829854011535645],["▁ری",-11.829900741577148],["رد",-11.829920768737791],["▁podemos",-11.829936027526855],["▁አል",-11.829955101013184],["ால்",-11.83002471923828],["▁kuid",-11.830053329467772],["▁Aur",-11.830348014831545],["▁sering",-11.830403327941896],["對於",-11.83046531677246],["воз",-11.830519676208496],["▁mogą",-11.830618858337402],["-14",-11.83065700531006],["▁ši",-11.830657958984377],["▁çıkar",-11.830692291259766],["▁isi",-11.830724716186523],["▁lợi",-11.83078670501709],["급",-11.830802917480469],["▁aliaj",-11.830857276916504],["▁більше",-11.83085823059082],["cích",-11.830914497375488],["▁тип",-11.830967903137209],["IC",-11.83101749420166],["季",-11.831067085266112],["▁چار",-11.83107852935791],["▁jedan",-11.83108615875244],["യ്ക്ക്",-11.831132888793944],["▁आर्थिक",-11.831135749816896],["▁MS",-11.831238746643066],["▁пор",-11.831286430358888],["倍",-11.831439018249512],["▁شریف",-11.831457138061523],["▁Mod",-11.83148193359375],["enne",-11.831609725952148],["LAR",-11.83177661895752],["▁להת",-11.831913948059082],["රා",-11.831934928894045],["vent",-11.83200454711914],["sca",-11.832035064697266],["품",-11.832038879394531],["▁Comment",-11.832109451293944],["ติด",-11.832146644592283],["▁30%",-11.8321533203125],["▁ഷ",-11.832197189331056],["▁oggi",-11.83229637145996],["▁eshte",-11.83230209350586],["▁системи",-11.832472801208496],["▁Digital",-11.832474708557127],["▁цьому",-11.83254051208496],["▁Федерации",-11.832542419433594],["▁දින",-11.832616806030272],["дзі",-11.832687377929688],["▁ಉ",-11.83270263671875],["▁2,5",-11.83279037475586],["HD",-11.832880973815918],["▁spus",-11.832929611206056],["▁dato",-11.8329439163208],["▁arm",-11.832958221435549],["청",-11.832977294921877],["▁София",-11.833003044128418],["цыі",-11.83302402496338],["omba",-11.833078384399414],["▁ಎಂಬ",-11.83313274383545],["yal",-11.833178520202637],["ADA",-11.833224296569824],["魚",-11.83323860168457],["▁kasih",-11.83328628540039],["wak",-11.833353996276855],["Mar",-11.833413124084473],["收入",-11.83344268798828],["▁bord",-11.83359146118164],["▁Ле",-11.83362102508545],["ក្នុង",-11.833632469177246],["naj",-11.833740234375],["▁investi",-11.833770751953123],["BI",-11.833868026733398],["▁súas",-11.833951950073242],["린",-11.833969116210938],["▁በማ",-11.834024429321287],["▁vă",-11.834035873413086],["ването",-11.834113121032717],["զ",-11.834153175354004],["▁elektro",-11.834187507629396],["▁pea",-11.83419704437256],["▁Nag",-11.834250450134276],["หลาย",-11.834305763244627],["▁unik",-11.834309577941896],["同時",-11.83431339263916],["ಬೇಕು",-11.834385871887209],["▁Musik",-11.83442497253418],["▁kawasan",-11.834447860717772],["降",-11.834480285644531],["ije",-11.834538459777832],["▁çi",-11.834555625915527],["▁cáo",-11.834572792053224],["గ్",-11.834577560424805],["ጨ",-11.834582328796388],["ញ",-11.834589958190918],["▁ሁሉ",-11.834623336791992],["nca",-11.834667205810549],["▁تل",-11.834670066833496],["क्स",-11.834677696228027],["ె",-11.834701538085938],["进入",-11.834734916687012],["記",-11.83476734161377],["skar",-11.83486270904541],["ήσεις",-11.834884643554688],["эх",-11.834890365600586],["usu",-11.83493709564209],["▁mahu",-11.834967613220217],["ура",-11.83497142791748],["▁kamer",-11.83498191833496],["ىنىڭ",-11.835027694702148],["VA",-11.835041046142578],["ల్లో",-11.83521842956543],["▁basa",-11.835339546203612],["Υ",-11.835426330566406],["▁ел",-11.835444450378418],["▁Amma",-11.835463523864746],["联",-11.835464477539062],["▁дво",-11.835530281066896],["▁پاسخ",-11.835538864135742],["經濟",-11.835663795471191],["74",-11.835716247558594],["▁cliente",-11.835775375366213],["▁Kh",-11.835820198059082],["宝",-11.8360013961792],["bila",-11.836078643798828],["öv",-11.8361234664917],["▁بې",-11.836130142211914],["습니다",-11.836139678955078],["ମ୍",-11.836236000061035],["iso",-11.83625030517578],["▁කියන",-11.836284637451172],["▁ગ",-11.83628749847412],["▁Ду",-11.836295127868652],["▁гу",-11.836356163024902],["▁veces",-11.836438179016112],["کل",-11.836442947387695],["ڭ",-11.836570739746094],["▁^^",-11.836573600769045],["ias",-11.83664894104004],["▁kez",-11.836711883544922],["▁autres",-11.836719512939451],["▁cửa",-11.836721420288086],["ု",-11.836750030517578],["▁ଆଜି",-11.836770057678224],["▁Haus",-11.83681297302246],["▁예",-11.836926460266112],["▁nuevo",-11.836983680725098],["▁fur",-11.836989402770996],["▁kanal",-11.83700466156006],["▁Lê",-11.837111473083496],["快速",-11.837199211120604],["▁കു",-11.837210655212402],["hati",-11.837224006652832],["დე",-11.837323188781738],["的小",-11.837407112121582],["SU",-11.837447166442873],["2008",-11.83745002746582],["▁sekitar",-11.837576866149902],["EL",-11.837605476379396],["▁була",-11.83767032623291],["富",-11.837722778320312],["▁hold",-11.837724685668944],["▁liten",-11.837899208068848],["»:",-11.83802890777588],["데",-11.838058471679688],["你们",-11.838065147399902],["▁deri",-11.8380765914917],["gua",-11.83809757232666],["作用",-11.838138580322266],["mica",-11.838151931762695],["▁უფრო",-11.83828830718994],["▁equipo",-11.83830738067627],["▁ہوتا",-11.838313102722168],["▁حالت",-11.838332176208496],["聯",-11.838401794433594],["ibh",-11.838471412658691],["▁darba",-11.838529586791992],["kro",-11.83855438232422],["赛",-11.838665962219238],["▁tangan",-11.838666915893556],["▁vest",-11.838780403137209],["▁ose",-11.838889122009276],["▁gây",-11.838936805725098],["▁dung",-11.83898639678955],["▁Partner",-11.839009284973145],["zek",-11.839163780212402],["陈",-11.839177131652832],["్యా",-11.83924674987793],["run",-11.839280128479004],["ബ",-11.83928394317627],["▁انقلاب",-11.839287757873535],["▁laŭ",-11.83937168121338],["ndum",-11.83938694000244],["▁Dating",-11.839393615722656],["▁Ты",-11.839439392089844],["▁gab",-11.83950138092041],["陳",-11.839543342590332],["▁जना",-11.839670181274414],["▁pravo",-11.839715957641602],["▁मान",-11.839729309082031],["ቤ",-11.839764595031738],["▁mele",-11.83986473083496],["lain",-11.839958190917969],["▁шта",-11.839964866638184],["IL",-11.839990615844728],["▁graag",-11.8400297164917],["▁ball",-11.84004020690918],["▁উ",-11.840163230895996],["ฯ",-11.840201377868652],["ကြ",-11.840206146240234],["tou",-11.840286254882812],["▁fără",-11.840312004089355],["▁خارج",-11.840340614318848],["čke",-11.840354919433594],["▁reading",-11.840392112731934],["▁chia",-11.840435981750488],["▁Mana",-11.840459823608398],["▁sistemi",-11.84047794342041],["▁última",-11.840521812438965],["▁**",-11.84054470062256],["ফ",-11.840636253356934],["rra",-11.84084701538086],["▁भए",-11.840971946716309],["öt",-11.841041564941406],["ονται",-11.841083526611328],["永",-11.841092109680176],["bert",-11.841115951538086],["▁kaum",-11.841130256652832],["▁бет",-11.841193199157717],["နဲ့",-11.841212272644045],["▁байдаг",-11.841252326965332],["▁danske",-11.841288566589355],["เด็ก",-11.841320991516112],["▁qualquer",-11.841360092163086],["Es",-11.841382026672363],["▁poste",-11.841401100158691],["mati",-11.841495513916016],["媒体",-11.84149932861328],["▁जे",-11.841583251953123],["itti",-11.841588020324709],["uš",-11.84158992767334],["აა",-11.841599464416504],["▁svojim",-11.84184741973877],["നു",-11.841869354248049],["▁пат",-11.841898918151855],["▁acaba",-11.841941833496094],["▁servi",-11.842013359069824],["▁looking",-11.842019081115724],["▁sıra",-11.842127799987791],["ాలి",-11.842233657836914],["▁دلیل",-11.842445373535156],["จัด",-11.842520713806152],["▁होगा",-11.842531204223633],["▁배",-11.842535018920898],["rau",-11.842561721801758],["oor",-11.842669486999512],["▁оның",-11.842705726623535],["▁मुख्य",-11.842705726623535],["fun",-11.842778205871582],["▁Kalau",-11.842809677124023],["▁ly",-11.842845916748049],["▁való",-11.842876434326172],["ಮಾ",-11.84299087524414],["▁81",-11.843050956726074],["專",-11.843072891235352],["▁Life",-11.84311294555664],["▁शेयर",-11.843117713928224],["rish",-11.84315586090088],["▁poden",-11.84315586090088],["▁صلى",-11.843179702758787],["▁հայ",-11.843201637268066],["▁Ent",-11.843377113342283],["▁TU",-11.843424797058104],["▁qal",-11.843592643737791],["▁personale",-11.843765258789062],["▁Пи",-11.843839645385742],["的大",-11.843853950500488],["νό",-11.843876838684082],["యి",-11.843907356262209],["▁oltre",-11.843929290771484],["ბი",-11.843954086303713],["េ",-11.843961715698242],["▁Mala",-11.843999862670898],["niz",-11.844002723693848],["nul",-11.844085693359377],["▁আর",-11.844090461730955],["▁நா",-11.844099044799805],["тро",-11.844209671020508],["▁စ",-11.844283103942873],["buk",-11.844311714172363],["▁box",-11.844318389892578],["gada",-11.84437656402588],["shin",-11.844472885131836],["▁thư",-11.844560623168944],["▁sát",-11.844569206237791],["▁ennen",-11.84459114074707],["▁sezon",-11.844624519348145],["എസ്",-11.84463119506836],["▁개",-11.844632148742676],["▁snel",-11.844806671142578],["▁Azərbaycanın",-11.844808578491213],["▁steeds",-11.844904899597168],["تك",-11.844908714294434],["▁quale",-11.84498119354248],["▁kies",-11.845020294189451],["▁Kau",-11.845080375671388],["EC",-11.845243453979492],["▁Вам",-11.845244407653809],["ξε",-11.84536075592041],["ئے",-11.845420837402344],["বে",-11.845479011535645],["▁Dunia",-11.845545768737791],["▁ки",-11.845559120178224],["▁voz",-11.845563888549805],["▁Romania",-11.845608711242676],["▁بررسی",-11.845626831054688],["▁Opera",-11.845898628234863],["▁доступ",-11.84593391418457],["▁buku",-11.845973014831545],["თვის",-11.845986366271973],["tish",-11.846067428588867],["ası",-11.846156120300291],["יון",-11.846229553222656],["▁آغاز",-11.846324920654297],["ૂ",-11.846346855163574],["uga",-11.84638500213623],["վող",-11.846424102783203],["▁stora",-11.846433639526367],["▁rapport",-11.846471786499023],["▁piano",-11.846548080444336],["有关",-11.8466157913208],["wah",-11.846697807312012],["▁اصلی",-11.846802711486816],["▁uzun",-11.846818923950195],["单位",-11.846829414367676],["rti",-11.846835136413574],["য়া",-11.846850395202637],["నం",-11.8468599319458],["lal",-11.846867561340332],["▁kini",-11.846989631652832],["קי",-11.847089767456056],["เครื่อง",-11.84711456298828],["▁مرد",-11.847147941589355],["כם",-11.847211837768556],["▁Khi",-11.847284317016602],["पर",-11.847295761108398],["▁प्रधानमन्त्री",-11.847477912902832],["heit",-11.847561836242676],["üm",-11.847563743591309],["θεί",-11.847590446472168],["ਰੇ",-11.847651481628418],["llu",-11.847657203674316],["▁punya",-11.84767723083496],["▁Hva",-11.84774112701416],["ძ",-11.847764015197754],["매",-11.84779453277588],["ที",-11.847935676574709],["tár",-11.84799098968506],["话",-11.848050117492676],["ଥ",-11.84810733795166],["ović",-11.84813404083252],["ගෙ",-11.84823513031006],["ซี",-11.848278999328612],["▁oedd",-11.848289489746094],["Jo",-11.848328590393066],["▁හැකි",-11.848379135131836],["skem",-11.848398208618164],["군",-11.848426818847656],["▁წ",-11.848451614379885],["▁analiz",-11.848474502563477],["▁short",-11.848495483398438],["속",-11.848587989807127],["wu",-11.848594665527344],["ास",-11.848594665527344],["▁форум",-11.8486909866333],["cep",-11.848708152770996],["▁stol",-11.848713874816896],["ങ്ക",-11.848731994628906],["▁המו",-11.848742485046388],[":20",-11.84878921508789],["▁atsi",-11.848831176757812],["您的",-11.84888744354248],["▁vivo",-11.848954200744627],["维",-11.84897518157959],["ադ",-11.849143028259276],["RI",-11.84922218322754],["дзя",-11.849237442016602],["▁baz",-11.849245071411133],["lok",-11.849275588989258],["שי",-11.849288940429688],["Google",-11.84938907623291],["▁vidi",-11.849442481994627],["이나",-11.849485397338867],["▁bởi",-11.849527359008787],["ٹی",-11.84959888458252],["oo",-11.84962272644043],["▁ôl",-11.849635124206545],["▁Է",-11.849642753601074],["▁había",-11.84974479675293],["▁bahasa",-11.849748611450195],["▁rett",-11.849903106689451],["жен",-11.849908828735352],["▁лишь",-11.849946975708008],["产",-11.849979400634766],["▁खा",-11.850032806396484],["▁Suid",-11.850075721740724],["▁kwe",-11.85013484954834],["经营",-11.850196838378906],["▁situation",-11.850292205810549],["▁mục",-11.850313186645508],["▁ହୋଇ",-11.850321769714355],["lista",-11.850347518920898],["ంద",-11.850443840026855],["▁емес",-11.850455284118652],["▁nič",-11.85048484802246],["▁2.0",-11.850614547729492],["เกิด",-11.850635528564451],["لام",-11.850659370422363],["▁ભ",-11.850671768188477],["哈",-11.850696563720703],["▁gdy",-11.850740432739258],["▁열",-11.850749015808104],["ями",-11.850773811340332],["▁කල",-11.85083293914795],["jski",-11.85087776184082],["98",-11.850895881652832],["▁Bin",-11.850953102111816],["נס",-11.850981712341309],["ుడు",-11.85100269317627],["▁конечно",-11.851119995117188],["ffe",-11.851123809814451],["患者",-11.851170539855955],["ināt",-11.851190567016602],["▁reag",-11.85128402709961],["▁програм",-11.85153865814209],["-11",-11.851572036743164],["▁tə",-11.851602554321287],["最新",-11.851664543151855],["▁እንደሚ",-11.851733207702637],["owany",-11.85177993774414],["ggi",-11.851792335510254],["tés",-11.851805686950684],["稱",-11.85180950164795],["すると",-11.85182285308838],["elles",-11.85183811187744],["టు",-11.851885795593262],["iana",-11.851892471313477],["开展",-11.851936340332031],["▁ಸುದ್ದಿ",-11.851971626281738],["ovanje",-11.851996421813965],["▁mul",-11.852010726928713],["▁nàng",-11.852028846740724],["▁Donald",-11.852113723754885],["ОН",-11.85216999053955],["niku",-11.85217571258545],["▁Nå",-11.852222442626951],["խ",-11.85232925415039],["อะไร",-11.85234546661377],["じ",-11.852386474609377],["▁turist",-11.852397918701172],["▁бъдат",-11.852412223815918],["▁daerah",-11.852415084838867],["▁celor",-11.852423667907717],["▁duen",-11.852455139160156],["享受",-11.852527618408203],["ீ",-11.852594375610352],["▁Dari",-11.852603912353516],["▁produktu",-11.852609634399414],["പ്പ്",-11.852611541748049],["-7",-11.852743148803713],["另外",-11.85283088684082],["spec",-11.852835655212402],["▁glo",-11.852845191955566],["off",-11.852897644042969],["▁merk",-11.85300350189209],["ydi",-11.853060722351074],["▁každý",-11.853102684020996],["▁tất",-11.853102684020996],["▁ము",-11.853269577026367],["▁hjem",-11.853318214416504],["isko",-11.853419303894045],["▁сол",-11.853423118591309],["ฟรี",-11.853522300720217],["bt",-11.853525161743164],["руч",-11.85354995727539],["▁진",-11.853625297546388],["▁Ча",-11.85365104675293],["रु",-11.853690147399902],["▁casos",-11.853757858276367],["▁mooi",-11.853825569152832],["▁bana",-11.853851318359377],["销售",-11.853886604309082],["yti",-11.85389518737793],["mik",-11.85401725769043],["Ti",-11.854034423828123],["cula",-11.854183197021484],["ంత",-11.85419464111328],["Za",-11.854198455810549],["▁ಚಿತ್ರ",-11.854228973388672],["▁бел",-11.854236602783203],["хме",-11.854246139526367],["vita",-11.854290008544922],["▁אך",-11.854334831237791],["ໄປ",-11.85434341430664],["▁deixar",-11.854458808898926],["سو",-11.854516983032228],["▁розвитку",-11.854537010192873],["נד",-11.85462474822998],["▁มา",-11.854655265808104],["tisk",-11.854816436767578],["▁result",-11.854860305786133],["system",-11.854866027832031],["ถ",-11.854876518249512],["▁Российской",-11.854900360107422],["ගන්න",-11.854940414428713],["▁స్",-11.85499668121338],["▁ամեն",-11.855002403259276],["▁varia",-11.855026245117188],["▁త",-11.855113983154297],["▁probleme",-11.855147361755373],["лива",-11.855198860168455],["▁grunn",-11.855223655700684],["oon",-11.855244636535645],["uer",-11.855449676513672],["ാന",-11.855555534362791],["als",-11.855575561523438],["▁kein",-11.85560417175293],["▁jelas",-11.855697631835938],["▁մեկ",-11.855875968933104],["▁vä",-11.855917930603027],["▁فلم",-11.855932235717772],["▁Miss",-11.855935096740724],["▁această",-11.85597324371338],["▁همچنین",-11.85597324371338],["▁đời",-11.855975151062012],["▁neve",-11.856125831604004],["▁kä",-11.856143951416016],["▁Line",-11.856172561645508],["▁ап",-11.85621452331543],["ებზე",-11.85627269744873],["▁לכל",-11.856311798095703],["▁thuộc",-11.856337547302246],["னர்",-11.85643482208252],["రే",-11.856439590454102],["▁fél",-11.856557846069336],["▁Qa",-11.856649398803713],["▁informasi",-11.856856346130373],["erende",-11.856873512268066],["▁контрол",-11.856950759887695],["▁justo",-11.856951713562012],["▁मिल",-11.856958389282228],["▁chiến",-11.857086181640623],["▁since",-11.857115745544434],["टर",-11.857155799865724],["▁sulle",-11.85719871520996],["iksi",-11.857218742370604],["ထ",-11.857259750366213],["cent",-11.857277870178224],["▁Ту",-11.85732078552246],["▁største",-11.857341766357422],["▁పార్టీ",-11.85736846923828],["யே",-11.857377052307127],["ዱ",-11.857383728027344],["סט",-11.857433319091797],["▁שם",-11.857443809509276],["▁രൂപ",-11.857498168945312],["价格",-11.857538223266602],["▁tah",-11.857545852661133],["▁zgod",-11.857662200927734],["արկ",-11.857810020446776],["▁dro",-11.857851028442385],["▁ovo",-11.857890129089355],["ики",-11.857906341552734],["▁ابن",-11.858112335205078],["ټ",-11.85822868347168],["▁хар",-11.858271598815918],["▁مقابل",-11.858274459838867],["▁SU",-11.85831356048584],["sida",-11.85855197906494],["▁Jest",-11.85862636566162],["cur",-11.858660697937012],["فل",-11.858826637268066],["▁दा",-11.858848571777344],["зер",-11.859073638916016],["તો",-11.85907745361328],["šli",-11.85909652709961],["вид",-11.85914134979248],["其實",-11.859170913696287],["hé",-11.859198570251465],["▁siste",-11.859455108642578],["شا",-11.859469413757324],["uso",-11.859514236450195],["▁Бе",-11.859597206115724],["ировать",-11.85960578918457],["▁رہی",-11.859619140625],["▁करना",-11.859634399414062],["ole",-11.859699249267578],["▁양",-11.859728813171388],["▁сами",-11.85976219177246],["▁market",-11.85977840423584],["iter",-11.859808921813965],["▁ของ",-11.859862327575684],["▁પછી",-11.859933853149414],["▁Region",-11.859945297241213],["▁Ende",-11.859957695007324],["▁vui",-11.859965324401855],["▁почти",-11.860010147094728],["רים",-11.86003589630127],["▁Santiago",-11.860076904296877],["▁ਨ",-11.86010456085205],["不可",-11.86011028289795],["よく",-11.860116004943848],["tage",-11.860139846801758],["ät",-11.860200881958008],["▁sna",-11.860239028930664],["집",-11.86025047302246],["▁İlham",-11.860318183898926],["gie",-11.860349655151367],["дэг",-11.860390663146973],["▁xác",-11.860482215881348],["hely",-11.860588073730469],["▁giver",-11.860610961914062],["▁tuleb",-11.86065673828125],["▁ያለ",-11.860681533813477],["ଭ",-11.860740661621094],["sser",-11.860746383666992],["▁मो",-11.860816955566406],["▁religi",-11.860845565795898],["бри",-11.860851287841797],["лата",-11.860915184020996],["עד",-11.860965728759766],["รี",-11.861021995544434],["▁प्रयोग",-11.861026763916016],["▁سیاست",-11.861031532287598],["션",-11.861042976379396],["▁१९",-11.861127853393556],["▁oz",-11.861178398132324],["▁inspir",-11.861212730407717],["כו",-11.86128044128418],["เท",-11.861284255981444],["đa",-11.861305236816406],["▁sadece",-11.8614501953125],["Bi",-11.861495018005373],["etta",-11.861525535583496],["▁آهنگ",-11.86154556274414],["▁AP",-11.861591339111328],["▁प्रा",-11.861644744873049],["恩",-11.861682891845703],["▁שני",-11.861698150634766],["ลิ",-11.861730575561523],["▁តែ",-11.861737251281738],["கி",-11.86180019378662],["▁тэр",-11.86180591583252],["▁alternativ",-11.86181926727295],["▁जाता",-11.86182975769043],["sled",-11.861849784851074],["▁ನನ್ನ",-11.86191749572754],["īga",-11.861929893493652],["skoj",-11.861949920654297],["▁rond",-11.862140655517578],["▁според",-11.862387657165527],["سن",-11.862399101257324],["देखि",-11.862478256225586],["▁ወይም",-11.862486839294434],["τή",-11.862510681152344],["ումը",-11.862513542175291],["▁Stockholm",-11.862549781799316],["▁실",-11.862570762634276],["▁সা",-11.86260986328125],["ряд",-11.862619400024414],["سم",-11.86274528503418],["▁чита",-11.862810134887695],["▁views",-11.862815856933594],["▁samen",-11.862832069396973],["ዶ",-11.862838745117188],["▁Pur",-11.863143920898438],["▁consider",-11.863186836242676],["pala",-11.863191604614258],["chten",-11.863300323486328],["▁això",-11.863354682922363],["roj",-11.863422393798828],["▁madh",-11.863423347473145],["▁đâu",-11.863478660583496],["▁Maa",-11.863600730895996],["äl",-11.863759994506836],["děl",-11.86376953125],["য়ে",-11.86376953125],["▁بے",-11.863840103149414],["мын",-11.863880157470703],["▁tincidunt",-11.86390781402588],["▁Raj",-11.863945960998535],["ували",-11.863995552062988],["טער",-11.864046096801758],["▁Abdul",-11.864072799682615],["▁Public",-11.86414909362793],["เปิด",-11.864212036132812],["››",-11.864323616027832],["▁ارسال",-11.864336013793944],["▁vra",-11.864424705505373],["teľ",-11.86449146270752],["▁done",-11.864496231079102],["▁lid",-11.864614486694336],["ซ",-11.864617347717283],["lán",-11.864628791809082],["▁بیا",-11.864630699157717],["▁बैंक",-11.86465835571289],["▁دون",-11.865020751953123],["▁عليها",-11.865022659301758],["她的",-11.865035057067873],["▁szám",-11.865142822265623],["▁обо",-11.865235328674316],["▁।।",-11.865266799926758],["چە",-11.865324020385742],["▁tjetër",-11.865357398986816],["▁వార్తలు",-11.865365982055664],["亮",-11.865438461303713],["▁izany",-11.865464210510254],["ూ",-11.865508079528809],["▁परि",-11.86552906036377],["▁fai",-11.865572929382324],["май",-11.865591049194336],["▁сред",-11.865617752075195],["ава",-11.865618705749512],["เอ",-11.86570644378662],["▁σήμερα",-11.865721702575684],["▁Gul",-11.865777969360352],["▁tieto",-11.86581039428711],["▁nuwe",-11.86586570739746],["▁nisu",-11.865917205810549],["▁duy",-11.865967750549316],["▁LI",-11.86599349975586],["年的",-11.866084098815918],["ವರು",-11.86611557006836],["▁့",-11.866117477416992],["っ",-11.866199493408203],["сер",-11.86623477935791],["▁היו",-11.866253852844238],["rab",-11.866272926330566],["ෑ",-11.866297721862791],["▁bilgi",-11.866358757019045],["▁програма",-11.866437911987305],["▁experience",-11.866558074951172],["▁előtt",-11.866561889648438],["期間",-11.866578102111816],["সা",-11.866668701171877],["プ",-11.866686820983888],["▁KI",-11.866722106933594],["▁ligne",-11.866776466369627],["▁όχι",-11.86681079864502],["atur",-11.866878509521484],["▁şu",-11.866896629333496],["感じ",-11.866926193237305],["▁ST",-11.866988182067873],["▁enige",-11.866995811462402],["دم",-11.867019653320312],["▁Part",-11.867036819458008],["תו",-11.86704921722412],["办",-11.867053985595703],["▁различни",-11.867057800292969],["CH",-11.86709976196289],["šie",-11.867189407348633],["電影",-11.867241859436035],["▁գ",-11.867280960083008],["іл",-11.867368698120115],["▁další",-11.867395401000977],["▁kullan",-11.867430686950684],["▁Most",-11.867448806762695],["ches",-11.86745834350586],["οπ",-11.867471694946287],["▁کس",-11.86748504638672],["领域",-11.867646217346191],["ահ",-11.867871284484863],["ыр",-11.86790370941162],["▁clientes",-11.867955207824709],["LO",-11.86798858642578],["▁संघ",-11.867997169494627],["▁نرم",-11.868014335632324],["מל",-11.868030548095703],["▁usta",-11.868054389953612],["▁niveau",-11.868109703063965],["mény",-11.86815071105957],["Та",-11.868304252624512],["▁maz",-11.868431091308594],["▁innych",-11.868471145629885],["▁Portal",-11.868609428405762],["海外",-11.868654251098633],["▁ಗೆ",-11.868661880493164],["▁ఉంది",-11.868669509887695],["anan",-11.868734359741213],["lás",-11.868738174438477],["களுக்கு",-11.868751525878906],["ং",-11.868800163269045],["chan",-11.868876457214355],["▁ой",-11.868921279907228],["▁ایس",-11.868959426879885],["▁हुई",-11.869001388549805],["ところ",-11.86929416656494],["▁casi",-11.86932373046875],["▁Karl",-11.869328498840332],["▁nghiệm",-11.869355201721191],["▁Ста",-11.86937427520752],["▁інших",-11.869384765625],["▁hoved",-11.86940860748291],["jun",-11.869710922241213],["၍",-11.869718551635742],["▁ניתן",-11.869721412658691],["ต่างๆ",-11.869854927062988],["▁estava",-11.869894981384276],["Die",-11.869924545288086],["▁गा",-11.869935035705566],["кла",-11.86999225616455],["▁PE",-11.870003700256348],["vær",-11.87003231048584],["▁χωρίς",-11.87008285522461],["▁Þ",-11.870108604431152],["▁modul",-11.870197296142578],["ந",-11.870200157165527],["ใส่",-11.870222091674805],["製",-11.870223999023438],["▁Кор",-11.87038516998291],["▁मराठी",-11.87048625946045],["师",-11.870656967163086],["ዩ",-11.870701789855955],["সি",-11.870787620544434],["飯店",-11.870850563049316],["▁Cur",-11.87087059020996],["▁ас",-11.870882987976074],["Ga",-11.87105941772461],["▁sillä",-11.871131896972656],["▁preto",-11.871134757995604],["▁sest",-11.87116527557373],["▁Somaliland",-11.871188163757324],["通常",-11.87123966217041],["რჩ",-11.871240615844728],["▁bastante",-11.871325492858888],["▁kn",-11.871331214904783],["liq",-11.871440887451172],["▁ұ",-11.87144374847412],["托",-11.871475219726562],["▁vedno",-11.871506690979004],["▁ڪنهن",-11.871551513671877],["▁št",-11.871618270874023],["▁ط",-11.871655464172363],["▁ndër",-11.87172508239746],["wir",-11.871818542480469],["的问题",-11.871893882751465],["gui",-11.872069358825684],["▁ذ",-11.872116088867188],["र्स",-11.87225341796875],["sies",-11.872275352478027],["ivat",-11.872323989868164],["▁۲",-11.87234878540039],["▁घटना",-11.872430801391602],["മ്മ",-11.872440338134766],["nite",-11.872570991516112],["▁avait",-11.872653007507324],["နေ",-11.872675895690918],["▁болот",-11.872703552246094],["▁stav",-11.872725486755373],["▁मृत्यु",-11.872753143310549],["ています",-11.872797966003418],["▁série",-11.87286376953125],["čia",-11.872915267944336],["mål",-11.872926712036133],["种",-11.872962951660156],["bab",-11.872971534729004],["▁With",-11.87297534942627],["▁onu",-11.873002052307127],["▁Điều",-11.873006820678713],["▁câu",-11.873024940490724],["▁sad",-11.873095512390137],["ulu",-11.87309741973877],["acha",-11.873196601867676],["زا",-11.873239517211914],["ながら",-11.873351097106934],["▁khiến",-11.873398780822754],["▁disc",-11.873401641845703],["gara",-11.873480796813965],["発",-11.873538970947266],["▁Road",-11.873542785644531],["▁miatt",-11.873562812805176],["apos",-11.873590469360352],["▁přes",-11.873686790466309],["ffa",-11.873783111572266],["▁τι",-11.873788833618164],["▁ə",-11.873801231384276],["▁Sama",-11.873815536499023],["而是",-11.873842239379885],["▁vali",-11.873846054077148],["čan",-11.873929977416992],["ներն",-11.87393569946289],["rati",-11.873939514160156],["▁sola",-11.87395191192627],["ขอ",-11.873981475830078],["ент",-11.874014854431152],["ekî",-11.87407112121582],["ဂ",-11.87409496307373],["start",-11.874098777770996],["▁narod",-11.874190330505373],["▁ई",-11.874229431152344],["ส่วน",-11.874285697937012],["жан",-11.874483108520508],["▁gew",-11.874556541442873],["▁lek",-11.874613761901855],["kod",-11.874648094177246],["▁শ",-11.874711036682127],["良い",-11.87477970123291],["ès",-11.874818801879885],["▁fiz",-11.87482738494873],["▁Aga",-11.874829292297363],["▁meio",-11.87483024597168],["▁José",-11.874885559082031],["peri",-11.87489891052246],["▁kah",-11.874944686889648],["控",-11.875038146972656],["▁room",-11.875079154968262],["▁Edi",-11.8751220703125],["нес",-11.875129699707031],["▁Ett",-11.87515354156494],["▁vale",-11.875176429748535],["בל",-11.87520694732666],["ష",-11.875288009643556],["▁ضمن",-11.875322341918944],["▁rang",-11.875335693359377],["▁record",-11.875418663024902],["ព",-11.875435829162598],["▁Pay",-11.875517845153809],["▁mynd",-11.87551975250244],["83",-11.875547409057615],["▁cando",-11.875564575195312],["▁Power",-11.875703811645508],["ינו",-11.875866889953612],["▁ega",-11.87590217590332],["啦",-11.875908851623535],["▁mani",-11.876038551330566],["▁tačiau",-11.876091003417969],["▁евро",-11.876097679138184],["▁luft",-11.876203536987305],["族",-11.87624168395996],["▁plo",-11.876259803771973],["▁mesta",-11.876372337341309],["мар",-11.876490592956545],["▁peso",-11.876522064208984],["较",-11.87653636932373],["▁جائیں",-11.876568794250488],["▁тым",-11.876587867736816],["▁iyong",-11.876615524291992],["لت",-11.876620292663574],["ירה",-11.876656532287598],["▁Bild",-11.876683235168455],["▁לע",-11.876686096191406],["మ్మ",-11.876716613769531],["▁Abu",-11.87676239013672],["為了",-11.87677764892578],["什麼",-11.87689208984375],["ả",-11.876935005187988],["▁dolore",-11.877010345458984],["▁זאת",-11.87703800201416],["▁py",-11.877086639404297],["IM",-11.877159118652344],["AZ",-11.877227783203123],["▁گھر",-11.877254486083984],["▁grave",-11.8772554397583],["▁access",-11.877259254455566],["▁kos",-11.877305030822754],["ંગ",-11.877315521240234],["奥",-11.877376556396484],["▁blowjob",-11.877399444580078],["▁Beliau",-11.877508163452148],["▁Kurd",-11.877528190612791],["▁багато",-11.877636909484863],["كا",-11.877806663513184],["▁каза",-11.877853393554688],["ລ",-11.877872467041016],["ията",-11.877887725830078],["endi",-11.877979278564451],["▁lloc",-11.878006935119627],["▁své",-11.878029823303224],["เอา",-11.87803840637207],["达到",-11.878046035766602],["劇",-11.878050804138184],["▁Ја",-11.878064155578612],["kirina",-11.878074645996094],["▁Juni",-11.87812328338623],["▁ну",-11.87816047668457],["စ္",-11.878170013427734],["▁мэдээ",-11.878190994262695],["kaa",-11.878275871276855],["▁internacional",-11.87827968597412],["▁филм",-11.878294944763184],["▁schnell",-11.878299713134766],["ቻ",-11.878332138061523],["▁vede",-11.878439903259276],["ház",-11.878644943237305],["▁BI",-11.878753662109377],["ଚ",-11.878755569458008],["按照",-11.87881565093994],["ປ",-11.878829002380373],["▁Mara",-11.878835678100586],["ท่าน",-11.878908157348633],["網路",-11.878999710083008],["дө",-11.879002571105955],["ですね",-11.879003524780272],["▁opis",-11.87900733947754],["sť",-11.87907600402832],["vest",-11.879094123840332],["故事",-11.879096031188965],["▁času",-11.879310607910156],["▁اسے",-11.879319190979004],["▁คน",-11.879324913024902],["▁spille",-11.879352569580078],["सु",-11.879387855529783],["ดา",-11.879411697387695],["គ",-11.87944793701172],["deki",-11.87948226928711],["PR",-11.87948513031006],["tö",-11.879487037658691],["▁þeirra",-11.879569053649902],["▁verde",-11.879599571228027],["ίζει",-11.879602432250977],["нее",-11.879603385925291],["ћа",-11.879624366760254],["▁three",-11.879730224609377],["▁viena",-11.879754066467283],["▁tava",-11.879782676696776],["▁money",-11.879825592041016],["▁leva",-11.879860877990724],["ρω",-11.879863739013672],["פו",-11.879901885986328],["▁depan",-11.879925727844238],["ζ",-11.879940032958984],["▁હતા",-11.879977226257324],["ён",-11.880115509033203],["เจ้า",-11.88012409210205],["▁UK",-11.880133628845217],["▁koma",-11.88015365600586],["▁nové",-11.880237579345703],["тки",-11.880243301391602],["tív",-11.880279541015623],["▁وهي",-11.880290031433104],["іі",-11.880343437194824],["▁86",-11.880414962768556],["साठी",-11.880484580993652],["▁instal",-11.880484580993652],["▁gdzie",-11.880526542663574],["лт",-11.8805513381958],["▁أحد",-11.880571365356444],["▁3000",-11.880632400512695],["▁הע",-11.880651473999023],["களின்",-11.880680084228516],["▁निर्णय",-11.88070583343506],["น่า",-11.880800247192385],["▁senti",-11.880810737609863],["วา",-11.880830764770508],["▁temperatura",-11.88084316253662],["▁materia",-11.880855560302734],["▁bizim",-11.880866050720217],["▁הר",-11.881030082702637],["▁njih",-11.881096839904783],["▁före",-11.881134033203123],["ους",-11.881160736083984],["▁helst",-11.881247520446776],["▁donne",-11.881304740905762],["▁ndaj",-11.881317138671877],["CI",-11.88132667541504],["▁hea",-11.881428718566896],["▁clar",-11.88144588470459],["meni",-11.881566047668455],["▁уште",-11.88157844543457],["▁recent",-11.88159465789795],["ונג",-11.881780624389648],["▁particular",-11.881780624389648],["נית",-11.881817817687988],["▁facer",-11.881975173950195],["▁oyna",-11.881997108459473],["▁nasıl",-11.882040977478027],["▁образ",-11.882075309753418],["▁जि",-11.882150650024414],["ோ",-11.882190704345703],["▁festa",-11.88219928741455],["예",-11.8822603225708],["▁Gud",-11.88235569000244],["ალი",-11.88243579864502],["lės",-11.882464408874512],["nil",-11.882556915283203],["rina",-11.882576942443848],["▁nev",-11.882624626159668],["▁oper",-11.882719039916992],["▁тууралуу",-11.882922172546388],["рок",-11.882943153381348],["ющих",-11.882969856262209],["08.",-11.882978439331056],["pti",-11.883081436157228],["്യ",-11.883112907409668],["▁വര",-11.883262634277344],["’",-11.883286476135254],["low",-11.883294105529783],["▁သူ",-11.883294105529783],["กา",-11.883296966552734],["▁tämä",-11.88332748413086],["▁contro",-11.883389472961426],["▁objekt",-11.883400917053224],["رز",-11.883430480957031],["ชีวิต",-11.883527755737305],["▁genau",-11.883562088012695],["▁Той",-11.883670806884766],["stat",-11.88369846343994],["▁предлага",-11.883744239807127],["▁Nik",-11.883798599243164],["uu",-11.88380241394043],["▁մ",-11.883898735046388],["形成",-11.88393211364746],["సా",-11.883963584899902],["рын",-11.884005546569824],["▁Hat",-11.884044647216797],["бан",-11.88405418395996],["▁mỗi",-11.884073257446287],["/4",-11.884099006652832],["▁lig",-11.884169578552246],["▁دے",-11.884177207946776],["▁मुंबई",-11.88418674468994],["▁kunde",-11.884199142456056],["▁beter",-11.884243965148926],["▁cứ",-11.884268760681152],["१",-11.884286880493164],["part",-11.884329795837402],["▁level",-11.884343147277832],["▁Show",-11.884349822998049],["▁using",-11.884371757507324],["ເ",-11.884380340576172],["▁Тур",-11.884527206420898],["▁էին",-11.884629249572754],["ගත",-11.88467502593994],["▁මහතා",-11.884721755981444],["▁sicher",-11.884729385375977],["▁आपल्या",-11.884747505187988],["-8",-11.884757041931152],["ение",-11.884801864624023],["▁says",-11.884974479675291],["jek",-11.884984970092772],["▁وڃي",-11.885079383850098],["▁жеке",-11.885127067565918],["▁સા",-11.885139465332031],["▁nito",-11.885168075561523],["adores",-11.885178565979004],["樓",-11.88518524169922],["▁tjera",-11.885194778442385],["ڊ",-11.8853120803833],["මින්",-11.885539054870604],["ром",-11.88565444946289],["▁allen",-11.885767936706545],["▁života",-11.885812759399414],["цыя",-11.885825157165527],["▁último",-11.885945320129396],["▁jangan",-11.88597583770752],["ետ",-11.886058807373049],["▁عوام",-11.886085510253906],["▁facut",-11.88611888885498],["BO",-11.88615894317627],["utan",-11.886163711547852],["▁eman",-11.88626194000244],["ැ",-11.886265754699709],["वार",-11.886361122131348],["▁서비스",-11.886629104614258],["▁enam",-11.886670112609863],["цій",-11.886688232421877],["▁mencari",-11.886741638183594],["작",-11.8867826461792],["▁ISO",-11.886800765991213],["▁eens",-11.886805534362791],["לא",-11.88683795928955],["追",-11.886842727661133],["▁яких",-11.886881828308104],["▁forskellige",-11.88698387145996],["чат",-11.886995315551758],["▁सभी",-11.887001037597656],["▁boj",-11.88702392578125],["zbek",-11.887045860290527],["▁차",-11.887077331542969],["kte",-11.887100219726562],["▁tyd",-11.887200355529783],["후",-11.887248039245604],["ੱ",-11.8872709274292],["isce",-11.887310028076172],["വ്",-11.887325286865234],["ления",-11.887383460998535],["▁मला",-11.887396812438965],["ば",-11.887404441833496],["▁LE",-11.88753604888916],["▁Ši",-11.887650489807127],["▁우리",-11.887723922729492],["▁night",-11.887744903564451],["▁tienen",-11.887767791748049],["स्ट",-11.88784122467041],["▁Ро",-11.887874603271484],["▁tamen",-11.887877464294434],["▁Við",-11.887914657592772],["▁duten",-11.88798999786377],["▁confi",-11.887994766235352],["▁persones",-11.888033866882324],["غل",-11.888099670410156],["مى",-11.888218879699709],["▁тот",-11.888237953186035],["▁Mem",-11.888323783874512],["スト",-11.88834285736084],["▁לת",-11.88845443725586],["▁79",-11.888505935668944],["fs",-11.888572692871094],["นัก",-11.888572692871094],["ското",-11.888591766357422],["▁ନି",-11.888604164123535],["τικό",-11.888629913330078],["▁precisa",-11.888710975646973],["▁گر",-11.888713836669922],["yg",-11.888748168945312],["বা",-11.888782501220703],["ナ",-11.888788223266602],["sad",-11.888833045959473],["▁valsts",-11.888849258422852],["ပါတယ္။",-11.888875961303713],["▁مزید",-11.888875961303713],["▁93",-11.888909339904783],["フ",-11.888938903808594],["lf",-11.888965606689451],["angkan",-11.889074325561523],["tut",-11.889180183410645],["▁Mata",-11.889237403869627],["▁conserva",-11.889249801635742],["vje",-11.889322280883787],["ź",-11.889334678649902],["тися",-11.889436721801758],["ья",-11.88958740234375],["▁cual",-11.889604568481444],["▁sexy",-11.889643669128418],["▁succes",-11.88967227935791],["▁limit",-11.889686584472656],["▁berlaku",-11.889689445495604],["飞",-11.889724731445312],["▁yılında",-11.889738082885742],["▁cit",-11.889758110046388],["▁13,",-11.889813423156738],["IA",-11.889843940734863],["ovou",-11.889866828918455],["▁chị",-11.889933586120604],["▁केले",-11.890002250671388],["ği",-11.890077590942385],["▁trá",-11.890085220336914],["PP",-11.890251159667969],["歲",-11.890259742736816],["date",-11.89026927947998],["▁Ach",-11.89028549194336],["质",-11.890295028686523],["▁ор",-11.890389442443848],["▁САД",-11.890580177307127],["קו",-11.890628814697266],["▁विभाग",-11.89070987701416],["▁specialist",-11.89072608947754],["▁sorte",-11.890759468078612],["▁moto",-11.890820503234863],["дік",-11.891051292419434],["▁ہمارے",-11.891073226928713],["▁tỉnh",-11.89109230041504],["▁немесе",-11.891112327575684],["▁ഹ",-11.891151428222656],["▁eum",-11.89116668701172],["owie",-11.89121150970459],["▁hafi",-11.891212463378906],["DI",-11.891230583190918],["▁habari",-11.891347885131836],["▁ਜੋ",-11.891364097595217],["▁гол",-11.891365051269531],["▁wäre",-11.891387939453123],["изм",-11.89146327972412],["EM",-11.891523361206056],["ions",-11.891578674316406],["σεις",-11.891661643981934],["▁برابر",-11.891700744628906],["ക്ഷ",-11.891782760620115],["▁ສ",-11.891818046569824],["▁fata",-11.891844749450684],["▁ghi",-11.891852378845217],["્સ",-11.891934394836426],["Տ",-11.89194679260254],["▁Papa",-11.892043113708496],["овых",-11.892062187194824],["פים",-11.89207935333252],["年度",-11.892126083374023],["дж",-11.892210960388184],["ਥ",-11.892239570617676],["anja",-11.89224910736084],["ുകൾ",-11.892294883728027],["ਲੇ",-11.892471313476562],["▁nær",-11.892476081848145],["▁Tema",-11.89247703552246],["▁მხოლოდ",-11.89250659942627],["▁nosa",-11.8925199508667],["āk",-11.892537117004396],["אן",-11.892792701721191],["▁grado",-11.893081665039062],["類",-11.893170356750488],["iwa",-11.893181800842283],["▁چی",-11.893224716186523],["▁avere",-11.893237113952637],["▁Република",-11.893305778503418],["▁chung",-11.893312454223633],["▁volna",-11.893360137939451],["කා",-11.893498420715332],["นํา",-11.893521308898926],["ដែល",-11.89357089996338],["lerinde",-11.893582344055176],["▁[1]",-11.893630981445312],["▁1500",-11.893632888793944],["▁podle",-11.893657684326172],["ச்ச",-11.893662452697754],["▁đàn",-11.893716812133787],["qué",-11.893781661987305],["▁هست",-11.893790245056152],["▁قسم",-11.893840789794922],["ინ",-11.893860816955566],["▁Opp",-11.893865585327148],["สาว",-11.893892288208008],["ungs",-11.893965721130373],["gres",-11.894013404846191],["่ง",-11.894033432006836],["valt",-11.894047737121582],["不會",-11.894064903259276],["ști",-11.89410400390625],["mään",-11.89412784576416],["လဲ",-11.894185066223145],["▁thought",-11.89418888092041],["▁Maj",-11.894195556640623],["▁systém",-11.894201278686523],["ന്ന",-11.894254684448242],["іс",-11.894282341003418],["ox",-11.894292831420898],["ским",-11.894316673278809],["uur",-11.894332885742188],["▁certo",-11.894359588623049],["▁islam",-11.894411087036133],["▁ricerca",-11.89444065093994],["▁terdapat",-11.89445972442627],["▁ಮಾಡಿ",-11.89449405670166],["▁questions",-11.894495010375977],["леп",-11.894659042358398],["創",-11.894694328308104],["▁дарга",-11.894728660583496],["▁କି",-11.894829750061035],["▁ราคา",-11.894901275634766],["▁diesen",-11.89493179321289],["▁পর",-11.894994735717772],["▁Mam",-11.895012855529783],["klo",-11.895026206970217],["▁ära",-11.89507293701172],["ญ",-11.895106315612791],["▁seria",-11.89511775970459],["komst",-11.895176887512209],["▁3.0",-11.895182609558104],["▁Роз",-11.8952054977417],["▁معلومات",-11.895207405090332],["免",-11.895216941833496],["▁minder",-11.895228385925291],["िएको",-11.895249366760254],["▁مدیریت",-11.895323753356934],["ndar",-11.895392417907717],["ації",-11.895429611206056],["où",-11.895438194274902],["ingar",-11.895485877990724],["▁Zá",-11.895636558532717],["▁ដើម្បី",-11.895655632019045],["सि",-11.89570426940918],["ED",-11.89580535888672],["▁gael",-11.89590549468994],["▁וי",-11.895938873291016],["台北",-11.89609718322754],["黃",-11.896160125732422],["tea",-11.896240234375],["▁mengatakan",-11.896297454833984],["ети",-11.896428108215332],["▁ingkang",-11.89650058746338],["调",-11.896512985229492],["▁bax",-11.896573066711426],["園",-11.896581649780272],["▁vilket",-11.896592140197754],["があります",-11.896764755249023],["▁fut",-11.896768569946287],["▁الذين",-11.896838188171388],["▁큰",-11.896925926208496],["▁Қ",-11.896930694580078],["ミ",-11.89694595336914],["▁Monte",-11.896981239318848],["▁Labels",-11.896997451782228],["▁स्थानीय",-11.897051811218262],["UT",-11.89711570739746],["nél",-11.897150993347168],["druk",-11.897159576416016],["」。",-11.897164344787598],["▁bly",-11.897170066833496],["lka",-11.897196769714355],["▁ـ",-11.897218704223633],["不到",-11.897261619567873],["▁μέχρι",-11.897416114807127],["▁стол",-11.897465705871582],["تن",-11.897513389587402],["bun",-11.897515296936035],["yje",-11.897627830505373],["יע",-11.89763641357422],["▁sólo",-11.897644996643066],["Fa",-11.897671699523926],["pek",-11.89767360687256],["▁akar",-11.897704124450684],["▁Joan",-11.89774227142334],["▁плат",-11.897743225097656],["▁Ford",-11.897767066955566],["▁നിര്",-11.89777946472168],["▁Chính",-11.89780044555664],["▁phân",-11.897823333740234],["▁chr",-11.897855758666992],["▁Аз",-11.89787483215332],["ılan",-11.898072242736816],["▁15-",-11.898079872131348],["▁tend",-11.898187637329102],["યા",-11.898383140563965],["▁arti",-11.89838409423828],["ሕ",-11.898452758789062],["▁mix",-11.898475646972656],["▁kli",-11.898509979248049],["▁restaurant",-11.898533821105955],["▁Само",-11.898576736450195],["rma",-11.898624420166016],["▁ever",-11.898658752441406],["اری",-11.898672103881836],["▁จ",-11.89870262145996],["▁AT",-11.8987398147583],["ಯೇ",-11.89885139465332],["ପି",-11.899042129516602],["лон",-11.899075508117676],["×",-11.899089813232422],["▁시작",-11.89912223815918],["more",-11.899136543273926],["ಪ್ಪ",-11.899151802062988],["ቴ",-11.899206161499023],["нал",-11.899234771728516],["▁einige",-11.89933967590332],["ότητα",-11.899415969848633],["▁sabe",-11.899426460266112],["вања",-11.899468421936035],["▁Win",-11.899534225463867],["нице",-11.899602890014648],["▁sexual",-11.899654388427734],["▁Sky",-11.899712562561035],["▁reform",-11.899768829345703],["ทอง",-11.899867057800291],["ാത്ത",-11.899953842163086],["▁jums",-11.89995574951172],["▁આપ",-11.899978637695312],["▁takie",-11.900047302246094],["程度",-11.900084495544434],["lau",-11.900176048278809],["ְ",-11.900191307067873],["▁فا",-11.900362014770508],["▁samma",-11.90036392211914],["ಪಿ",-11.900416374206545],["▁цар",-11.900537490844728],["▁Liga",-11.90056610107422],["ged",-11.900615692138672],["gam",-11.900622367858888],["eurs",-11.900738716125488],["容",-11.9007568359375],["க்கி",-11.900758743286133],["بو",-11.900774002075195],["▁لك",-11.900778770446776],["▁olsun",-11.90084457397461],["др",-11.900851249694824],["KI",-11.90086269378662],["каў",-11.90088176727295],["▁SC",-11.900897979736328],["▁عرب",-11.900956153869627],["文件",-11.901044845581056],["▁😃",-11.901105880737305],["▁mak",-11.901121139526367],["پا",-11.901138305664062],["▁between",-11.901168823242188],["▁traballo",-11.901193618774414],["▁happy",-11.90119743347168],["▁nós",-11.90124797821045],["▁مست",-11.901341438293455],["Du",-11.90137767791748],["1)",-11.901497840881348],["ほど",-11.90151309967041],["参与",-11.90157699584961],["甲",-11.901577949523926],["ትን",-11.901772499084473],["▁zim",-11.90184497833252],["▁ቀን",-11.901860237121582],["▁carte",-11.901869773864746],["專業",-11.901899337768556],["▁medzi",-11.902027130126951],["依",-11.90203857421875],["vā",-11.90206813812256],["▁30-",-11.902070999145508],["ész",-11.90207576751709],["▁חי",-11.90208625793457],["停",-11.902105331420898],["▁Pala",-11.902113914489746],["▁může",-11.90211582183838],["▁dre",-11.902198791503906],["▁تت",-11.902254104614258],["dav",-11.902328491210938],["פס",-11.902393341064451],["▁vực",-11.90248203277588],["tsu",-11.90259075164795],["▁miesto",-11.902594566345217],["▁mise",-11.902667999267578],["▁wszystko",-11.90267276763916],["▁JE",-11.902715682983398],["決定",-11.902722358703612],["IK",-11.902731895446776],["ിക",-11.902874946594238],["ATA",-11.902896881103516],["RU",-11.902918815612791],["▁सकता",-11.903076171875],["kita",-11.90313720703125],["•",-11.903164863586426],["▁gott",-11.903168678283691],["▁гана",-11.903228759765623],["▁سازی",-11.9032564163208],["▁depois",-11.903289794921877],["▁dễ",-11.903307914733888],["事情",-11.903363227844238],["▁கோ",-11.903382301330566],["▁afin",-11.903393745422363],["mara",-11.903412818908691],["▁डि",-11.903424263000488],["▁đúng",-11.90342617034912],["▁چین",-11.903430938720703],["ික",-11.903470039367676],["各種",-11.903498649597168],["NET",-11.903570175170898],["ଷ",-11.903635025024414],["▁Sho",-11.903639793395996],["IO",-11.903719902038574],["isiin",-11.903769493103027],["تين",-11.903786659240724],["▁sentido",-11.903838157653809],["ėti",-11.903923988342283],["▁نسبت",-11.903949737548828],["Ма",-11.903968811035156],["確認",-11.904077529907228],["▁regel",-11.904086112976074],["▁ನೀಡ",-11.904114723205566],["▁ვა",-11.904114723205566],["çu",-11.904197692871094],["▁мед",-11.904197692871094],["otto",-11.904207229614258],["мор",-11.904218673706056],["לת",-11.904251098632812],["kkel",-11.904266357421877],["īgu",-11.90433120727539],["▁شوی",-11.9043550491333],["密",-11.90435791015625],["רה",-11.90439224243164],["하면",-11.90447235107422],["ແລະ",-11.904549598693848],["較",-11.90463161468506],["ście",-11.904644012451172],["▁hộ",-11.904732704162598],["▁קו",-11.904776573181152],["อร์",-11.904797554016112],["▁obliga",-11.904810905456545],["▁බ",-11.904829978942873],["ಿದ",-11.904878616333008],["▁edilməsi",-11.904888153076172],["▁offre",-11.90494155883789],["▁иска",-11.904947280883787],["Mail",-11.90495491027832],["ಯಿಂದ",-11.904999732971191],["ís",-11.905062675476074],["ossa",-11.905159950256348],["tib",-11.905244827270508],["▁Frank",-11.905255317687988],["heten",-11.905312538146973],["▁tomar",-11.905363082885742],["▁또는",-11.905406951904297],["▁prac",-11.905447006225586],["▁גר",-11.905505180358888],["▁zamanda",-11.905546188354492],["ඩි",-11.905683517456056],["ята",-11.905720710754396],["▁cảnh",-11.905741691589355],["ћ",-11.90574836730957],["▁তা",-11.90579891204834],["вин",-11.905885696411133],["▁добра",-11.905966758728027],["▁seksi",-11.905990600585938],["ရန်",-11.906012535095217],["үүд",-11.906170845031738],["▁ц",-11.906170845031738],["▁बना",-11.90617847442627],["▁FOTO",-11.906253814697266],["▁Jūs",-11.906325340270996],["itan",-11.906329154968262],["▁لیگ",-11.906330108642578],["ხვ",-11.90639591217041],["▁например",-11.906401634216309],["委",-11.906447410583496],["▁Му",-11.906458854675291],["▁19,",-11.90647029876709],["▁زن",-11.906590461730955],["▁Dé",-11.906607627868652],["წყ",-11.906648635864258],["▁предмет",-11.906657218933104],["▁Съ",-11.906676292419434],["ター",-11.906709671020508],["▁89",-11.90673828125],["CP",-11.906761169433594],["ږ",-11.906776428222656],["▁Скопје",-11.90678596496582],["▁(10)",-11.906798362731934],["▁dưới",-11.90682315826416],["▁Lind",-11.906872749328612],["▁ją",-11.907050132751465],["ឆ្នាំ",-11.907086372375488],["들의",-11.90711784362793],["▁gun",-11.907147407531738],["▁წელს",-11.907153129577637],["АР",-11.907198905944824],["介绍",-11.907200813293455],["iny",-11.90721607208252],["▁innlegg",-11.907236099243164],["របស់",-11.907245635986328],["бол",-11.90726375579834],["▁זיין",-11.90727996826172],["dyn",-11.907459259033203],["▁따라",-11.907504081726074],["勝",-11.90750789642334],["▁partie",-11.907548904418944],["▁pública",-11.907572746276855],["īgā",-11.907687187194824],["▁yksi",-11.90770149230957],["▁בני",-11.907713890075684],["▁культур",-11.907840728759766],["້",-11.907930374145508],["▁SK",-11.907966613769531],["fort",-11.907974243164062],["▁ರ",-11.907981872558594],["medi",-11.907994270324709],["asti",-11.908028602600098],["・・・",-11.908129692077637],["▁ü",-11.908302307128906],["▁ihn",-11.908330917358398],["ವರ",-11.908390998840332],["কার",-11.908395767211914],["性的",-11.908441543579102],["▁Dio",-11.908729553222656],["▁Escort",-11.908787727355955],["▁болох",-11.9088773727417],["▁образом",-11.908896446228027],["▁իրենց",-11.909103393554688],["▁Seri",-11.909114837646484],["▁აქვს",-11.909160614013672],["关注",-11.90916347503662],["▁dad",-11.90916633605957],["сць",-11.909249305725098],["Go",-11.909283638000488],["▁mort",-11.909347534179688],["▁mūsu",-11.909350395202637],["▁Cultura",-11.909398078918455],["雖然",-11.909432411193848],["တို႔",-11.90947151184082],["▁bentuk",-11.90947151184082],["▁були",-11.909541130065918],["ಿತ",-11.909578323364258],["ष्ट",-11.909692764282228],["▁לנו",-11.909784317016602],["▁implica",-11.909887313842772],["▁Personal",-11.909892082214355],["الم",-11.909905433654783],["▁yapı",-11.90992259979248],["▁dago",-11.910008430480955],["ىي",-11.910027503967283],["松",-11.910040855407717],["▁справ",-11.910073280334473],["▁ນີ້",-11.910114288330078],["▁pali",-11.910143852233888],["essä",-11.910199165344238],["▁JavaScript",-11.91023063659668],["ស្",-11.91036319732666],["ਦੀ",-11.910391807556152],["fec",-11.910392761230469],["stellen",-11.91045093536377],["▁ስ",-11.91045379638672],["як",-11.910507202148438],["▁second",-11.910626411437988],["HO",-11.9108304977417],["αγ",-11.910966873168944],["▁którym",-11.911060333251951],["പ്ര",-11.911067008972168],["▁합니다",-11.911130905151367],["tionen",-11.911133766174316],["▁الأول",-11.911218643188477],["食品",-11.911320686340332],["▁بحث",-11.91141128540039],["kî",-11.9114408493042],["acja",-11.911518096923828],["уп",-11.911524772644045],["▁situación",-11.911566734313965],["▁mad",-11.911640167236328],["▁ŝi",-11.911654472351074],["▁දෙ",-11.91168212890625],["▁ਪ੍ਰ",-11.911700248718262],["iški",-11.911725044250488],["▁glede",-11.911728858947754],["▁viss",-11.91198444366455],["▁יום",-11.912059783935549],["teli",-11.912164688110352],["▁Washington",-11.912198066711426],["弱",-11.912449836730955],["rate",-11.912471771240234],["▁هئا",-11.91252613067627],["伤",-11.912708282470703],["▁behov",-11.912775993347168],["▁saját",-11.912785530090332],["堂",-11.912869453430176],["들은",-11.912917137145996],["▁khí",-11.912920951843262],["mol",-11.912962913513184],["▁(8",-11.912969589233398],["▁hoje",-11.913045883178713],["нае",-11.913140296936035],["▁Läs",-11.913164138793944],["▁chce",-11.913273811340332],["ನೇ",-11.913290023803713],["▁Xunta",-11.913357734680176],["供",-11.91340160369873],["τρ",-11.91347599029541],["limi",-11.913514137268066],["▁luni",-11.913603782653809],["رب",-11.913728713989258],["eres",-11.913826942443848],["▁دوره",-11.913880348205566],["▁چون",-11.91391658782959],["ជ",-11.913928985595703],["▁ubi",-11.91397476196289],["▁‘‘",-11.913984298706056],["▁மா",-11.913995742797852],["▁Tie",-11.914033889770508],["▁hospital",-11.91412353515625],["▁iPad",-11.914427757263184],["lý",-11.91453456878662],["无法",-11.914566040039062],["oir",-11.914568901062012],["tle",-11.91461944580078],["öm",-11.91461944580078],["▁недо",-11.914624214172363],["▁проблема",-11.914626121520996],["gé",-11.914775848388672],["ides",-11.914871215820312],["▁guide",-11.914915084838867],["▁màu",-11.914934158325195],["ович",-11.915000915527344],["有效",-11.91504192352295],["▁usar",-11.915057182312012],["▁നല്ല",-11.91514492034912],["▁prendre",-11.915218353271484],["▁منذ",-11.915226936340332],["ชม",-11.915310859680176],["▁ក",-11.915432929992676],["团",-11.91549587249756],["▁fé",-11.915496826171877],["などの",-11.915507316589355],["▁चि",-11.91551685333252],["既",-11.915542602539062],["▁igra",-11.915589332580566],["န်",-11.9156494140625],["▁wer",-11.915696144104004],["ujemy",-11.915709495544434],["▁става",-11.915793418884276],["▁रा",-11.915794372558594],["gabe",-11.915800094604492],["ggio",-11.91582489013672],["▁ahol",-11.915892601013184],["▁città",-11.91594696044922],["▁admin",-11.916019439697266],["mma",-11.91610050201416],["nější",-11.916141510009766],["▁miss",-11.91623306274414],["flu",-11.91626262664795],["GI",-11.916266441345217],["▁پایان",-11.916268348693848],["▁пара",-11.916319847106934],["▁kto",-11.91648006439209],["itus",-11.916534423828123],["▁Chat",-11.916595458984377],["hala",-11.9166898727417],["ኮ",-11.91671371459961],["▁trả",-11.916762351989746],["▁форм",-11.91678524017334],["ဘ",-11.916789054870604],["▁παρά",-11.916793823242188],["ejo",-11.916834831237791],["иш",-11.916857719421388],["▁قوم",-11.916873931884766],["ილ",-11.916908264160156],["vs",-11.916922569274902],["DER",-11.91698932647705],["▁informasjon",-11.917040824890137],["▁gặp",-11.917080879211426],["▁дітей",-11.91708278656006],["ส์",-11.917084693908691],["▁nghe",-11.91714859008789],["▁jamais",-11.917278289794922],["▁chú",-11.917288780212402],["ყ",-11.917325019836426],["▁комп",-11.917436599731444],["▁bheith",-11.91746711730957],["stein",-11.917470932006836],["▁yapılan",-11.917487144470217],["▁NU",-11.91762638092041],["▁yê",-11.91763687133789],["เม",-11.917670249938965],["ούμε",-11.917710304260254],["قة",-11.91774082183838],["گو",-11.917749404907228],["▁søker",-11.917781829833984],["bara",-11.91789722442627],[".01.",-11.91797161102295],["rede",-11.918105125427246],["బ్",-11.918109893798828],["ភាព",-11.918129920959473],["čiai",-11.918131828308104],["iks",-11.918164253234863],["mız",-11.91819953918457],["令人",-11.918253898620604],["▁შ",-11.918309211730955],["ટી",-11.918333053588867],["▁مگر",-11.918353080749512],["になります",-11.918375968933104],["vend",-11.918404579162598],["サ",-11.91845703125],["▁रु",-11.918460845947266],["▁간",-11.918489456176758],["▁сот",-11.91852569580078],["тарды",-11.918662071228027],["▁قىلىش",-11.918665885925291],["▁ബി",-11.918768882751465],["ଂ",-11.918821334838867],["▁geldi",-11.918845176696776],["▁vệ",-11.918926239013672],["ਵਾ",-11.918952941894531],["▁కొత్త",-11.918994903564451],["▁оқу",-11.91899585723877],["▁cet",-11.91899871826172],["nimo",-11.919050216674805],["tę",-11.919206619262695],["▁egun",-11.919228553771973],["▁hecho",-11.919229507446287],["▁daga",-11.919264793395996],["skab",-11.919322967529297],["▁rokov",-11.919330596923828],["▁truyền",-11.919374465942385],["▁таа",-11.91939640045166],["nó",-11.919397354125977],["▁средств",-11.919515609741213],["▁projekta",-11.919525146484377],["▁atque",-11.919580459594728],["英国",-11.919697761535645],["ILI",-11.91972827911377],["▁арқылы",-11.919757843017578],["rif",-11.91980266571045],["اح",-11.919803619384766],["▁Respublikasının",-11.919806480407717],["学院",-11.91981315612793],["▁İl",-11.919814109802246],["/3",-11.919878959655762],["によって",-11.919896125793455],["aeth",-11.919903755187988],["íte",-11.919921875],["yev",-11.91992473602295],["▁Ly",-11.919964790344238],["したい",-11.919971466064451],["▁ថ្ងៃ",-11.920042991638184],["▁numai",-11.920121192932127],["▁році",-11.92016315460205],["சு",-11.92019748687744],["tse",-11.920236587524414],["▁بدن",-11.920236587524414],["дов",-11.920254707336426],["▁이용",-11.92026710510254],["증",-11.9202880859375],["격",-11.920392036437988],["Ы",-11.920413970947266],["ਕਾ",-11.920425415039062],["▁በሚ",-11.920434951782228],["▁דו",-11.920446395874023],["▁ก็",-11.920446395874023],["▁имеет",-11.920451164245604],["राज",-11.92054271697998],["Χ",-11.920574188232422],["โอ",-11.920598030090332],["▁360",-11.920631408691406],["ዲ",-11.920655250549316],["▁Τ",-11.920687675476074],["▁방",-11.92070770263672],["гра",-11.920804977416992],["Pe",-11.920817375183104],["▁lấy",-11.920907020568848],["▁Oktober",-11.920907974243164],["ră",-11.920931816101074],["SM",-11.920960426330566],["тон",-11.92098617553711],["▁regional",-11.921088218688965],["▁code",-11.921182632446287],["▁inca",-11.92123031616211],["▁plant",-11.92125129699707],["▁kommentar",-11.921252250671388],["▁своите",-11.921284675598145],["લો",-11.92135238647461],["▁túl",-11.921387672424316],["▁click",-11.921408653259276],["時代",-11.92142391204834],["یوں",-11.921449661254885],["▁აქ",-11.921449661254885],["▁වල",-11.921598434448242],["▁ahaa",-11.921601295471191],["▁papel",-11.921648025512695],["ној",-11.921673774719238],["▁ഏ",-11.921695709228516],["יך",-11.921704292297363],["▁szó",-11.921704292297363],["лов",-11.921714782714844],["هما",-11.921727180480955],["ျ",-11.921730995178224],["▁acordo",-11.92174243927002],["▁يتم",-11.921745300292969],["▁què",-11.92175579071045],["ਿਆਂ",-11.921846389770508],["tong",-11.922016143798828],["dą",-11.922036170959473],["▁материал",-11.9220552444458],["▁luật",-11.922067642211914],["▁tượng",-11.922100067138672],["的发展",-11.922101974487305],["▁لوگوں",-11.92214012145996],["ських",-11.922260284423828],["▁७",-11.92227554321289],["بة",-11.922286033630373],["▁Dra",-11.922303199768066],["散",-11.922307014465332],["dė",-11.92233657836914],["▁اے",-11.92243480682373],["uc",-11.922465324401855],["▁sehen",-11.922491073608398],["▁సా",-11.922532081604004],["▁ಹೊಸ",-11.92265510559082],["▁ذات",-11.922682762145996],["▁ПО",-11.922769546508787],["▁cât",-11.922776222229004],["67",-11.922832489013672],["્યો",-11.9228515625],["փ",-11.923033714294434],["ച",-11.923059463500977],["еш",-11.923073768615724],["ruh",-11.923088073730469],["▁marzo",-11.92316436767578],["▁ڪا",-11.923239707946776],["▁osoby",-11.923245429992676],["द्",-11.923270225524902],["bruk",-11.923327445983888],["ലാ",-11.923330307006836],["imh",-11.923345565795898],["lát",-11.923412322998049],["tant",-11.923417091369627],["вој",-11.923439979553224],["▁الب",-11.923454284667969],["▁ගෙන",-11.923483848571776],["▁audio",-11.92349338531494],["极",-11.923539161682127],["▁Gold",-11.923643112182615],["▁Jakarta",-11.923650741577148],["▁zuten",-11.923661231994627],["威",-11.923748970031738],["▁div",-11.923750877380373],["▁Centre",-11.923824310302734],["вър",-11.92383861541748],["▁करता",-11.923857688903809],["▁מען",-11.923986434936523],["▁Len",-11.923991203308104],["▁chance",-11.924036026000977],["puol",-11.924059867858888],["кри",-11.924151420593262],["▁ам",-11.924165725708008],["▁basis",-11.92416763305664],["▁பல",-11.924179077148438],["▁đa",-11.924219131469728],["ffer",-11.924254417419434],["daan",-11.924293518066406],["мыз",-11.924306869506836],["▁June",-11.924330711364746],["▁ເ",-11.92439079284668],["այի",-11.92441177368164],["čku",-11.92442512512207],["mens",-11.924445152282717],["/2018",-11.924489974975586],["▁9-",-11.924556732177734],["work",-11.924643516540527],["кий",-11.924697875976562],["▁aceste",-11.924702644348145],["▁należy",-11.92478847503662],["塔",-11.924882888793944],["▁rezerv",-11.92491340637207],["צר",-11.924920082092283],["овете",-11.92494297027588],["acaq",-11.92507553100586],["stus",-11.925122261047363],["▁illetve",-11.92513942718506],["▁emo",-11.925360679626465],["▁సి",-11.92537784576416],["lín",-11.925399780273438],["ටි",-11.925414085388184],["▁cố",-11.925459861755373],["වු",-11.9254732131958],["ിനെ",-11.925496101379396],["獲得",-11.925519943237305],["▁कुनै",-11.925528526306152],["▁contract",-11.92563247680664],["▁Ora",-11.925643920898438],["fte",-11.925649642944336],["μι",-11.925894737243652],["▁ਆਪਣੇ",-11.925904273986816],["лаг",-11.925965309143066],["▁طراحی",-11.925965309143066],["escu",-11.925978660583496],["▁વધુ",-11.926024436950684],["לים",-11.92604923248291],["▁erst",-11.926067352294922],["jām",-11.926106452941896],["▁house",-11.926114082336426],["新聞",-11.926186561584473],["▁klein",-11.92625904083252],["בו",-11.926313400268556],["rende",-11.926365852355955],["вати",-11.926387786865234],["эм",-11.92642879486084],["ынын",-11.92647647857666],["лоо",-11.926519393920898],["▁lakini",-11.926579475402832],["▁مال",-11.92667007446289],["▁Iš",-11.926750183105469],["Ю",-11.926898002624512],["ვით",-11.92691135406494],["人士",-11.92691135406494],["▁coś",-11.926932334899902],["超过",-11.926959037780762],["▁теле",-11.926977157592772],["▁طريق",-11.927038192749023],["▁təhsil",-11.9270601272583],["ائر",-11.927081108093262],["▁dù",-11.9271240234375],["iera",-11.927156448364258],["uze",-11.927162170410156],["▁kontroll",-11.927202224731444],["▁басқа",-11.927244186401367],["toja",-11.927289009094238],["▁TP",-11.927328109741213],["rro",-11.92733097076416],["▁Fest",-11.927343368530272],["▁لأ",-11.927358627319336],["▁apoi",-11.92742156982422],["▁najbolj",-11.9274263381958],["ሸ",-11.927433013916016],["น้อย",-11.927471160888672],["▁nəfər",-11.927492141723633],["landı",-11.927510261535645],["eiros",-11.927531242370604],["eras",-11.92766571044922],["▁номер",-11.92769718170166],["▁estamos",-11.927734375],["进一步",-11.927751541137695],["一点",-11.927826881408691],["▁Municipal",-11.927831649780272],["盛",-11.927926063537598],["工具",-11.927935600280762],["▁råd",-11.927948951721191],["ပြ",-11.927966117858888],["▁ગુજરાતી",-11.928050994873049],["推出",-11.928140640258787],["▁agama",-11.92815399169922],["▁ocupa",-11.92825698852539],["lerden",-11.928258895874023],["mbu",-11.928321838378906],["▁מס",-11.928326606750488],["zul",-11.928342819213867],["遇到",-11.9283447265625],["▁ambayo",-11.928363800048828],["▁кое",-11.928385734558104],["ĝo",-11.92843246459961],["▁কা",-11.928433418273926],["hm",-11.92845058441162],["▁הפ",-11.928502082824709],["zzo",-11.92851734161377],["וב",-11.928547859191896],["▁allem",-11.928630828857422],["调查",-11.928634643554688],["▁Sto",-11.928640365600586],["▁fol",-11.928718566894531],["old",-11.92878246307373],["më",-11.92880630493164],["cla",-11.928955078125],["▁profit",-11.928963661193848],["▁तु",-11.9290132522583],["好的",-11.929055213928224],["▁900",-11.929080963134766],["▁нешто",-11.929136276245115],["▁secondo",-11.929176330566406],["류",-11.929191589355469],["ሲ",-11.92919921875],["▁എല്ലാ",-11.929224014282228],["fanya",-11.929265975952148],["▁szak",-11.92930507659912],["вест",-11.929365158081056],["έρ",-11.929372787475586],["▁Tä",-11.929402351379396],["▁pá",-11.929437637329102],["▁Buch",-11.92945384979248],["вам",-11.929512977600098],["▁చేసిన",-11.929573059082031],["▁દ",-11.929598808288574],["▁מר",-11.929685592651367],["▁cổ",-11.929720878601074],["iyor",-11.92977809906006],["▁dort",-11.92996883392334],["▁atra",-11.929971694946287],["ью",-11.930075645446776],["▁tās",-11.930136680603027],["▁October",-11.93015193939209],["▁GPS",-11.930206298828123],["▁cik",-11.93021297454834],["▁турган",-11.930347442626951],["ազ",-11.930408477783203],["สม",-11.93045711517334],["лийн",-11.930482864379885],["▁holde",-11.930485725402832],["mų",-11.930532455444336],["kira",-11.930562019348145],["▁saan",-11.930591583251951],["▁Kumar",-11.9306001663208],["最大",-11.93060302734375],["▁países",-11.930609703063965],["δε",-11.930644035339355],["▁max",-11.93075180053711],["▁ჩემი",-11.930771827697754],["haus",-11.930828094482422],["stil",-11.930846214294434],["▁semper",-11.93093204498291],["▁faktisk",-11.930954933166504],["▁toán",-11.931017875671388],["▁agosto",-11.931130409240724],["mot",-11.93113136291504],["▁kula",-11.93120002746582],["传",-11.931256294250488],["uş",-11.931265830993652],["▁हुन्",-11.931268692016602],["sının",-11.931509017944336],["dic",-11.931546211242676],["jah",-11.931602478027344],["▁aver",-11.931617736816406],["בן",-11.931652069091797],["▁થઈ",-11.931700706481934],["ške",-11.93175983428955],["UP",-11.931934356689451],["▁vast",-11.931939125061035],["一切",-11.93195343017578],["▁დღეს",-11.932029724121094],["▁træ",-11.932036399841309],["▁bonne",-11.93203830718994],["טע",-11.932089805603027],["ڪي",-11.932137489318848],["▁мас",-11.932185173034668],["這樣",-11.932242393493652],["ям",-11.932257652282717],["እ",-11.932413101196287],["oğlu",-11.93248462677002],["valo",-11.932514190673828],["ଲ୍",-11.932541847229004],["擁有",-11.932605743408203],["tui",-11.932607650756836],["▁Што",-11.93263816833496],["يرة",-11.93280792236328],["ద్ద",-11.932872772216797],["▁төр",-11.932910919189451],["▁১",-11.932938575744627],["DU",-11.932965278625488],["▁càng",-11.932973861694336],["▁reserva",-11.932998657226562],["こ",-11.93310832977295],["utta",-11.933125495910645],["▁Ré",-11.933181762695312],["iba",-11.933197975158691],["▁گهر",-11.933242797851562],["անց",-11.933267593383787],["▁intra",-11.93337631225586],["ირ",-11.933453559875488],["indu",-11.933489799499512],["▁Qo",-11.933552742004396],["Nu",-11.933561325073242],["▁توهان",-11.93366527557373],["jąc",-11.933691024780272],["靠",-11.933761596679688],["politik",-11.933847427368164],["▁Че",-11.933852195739746],["▁fint",-11.933873176574709],["▁ahora",-11.933951377868652],["ബ്",-11.933965682983398],["圈",-11.934009552001951],["вет",-11.934026718139648],["имо",-11.93403434753418],["▁tinh",-11.93407917022705],["▁ഇന്ത്യ",-11.934115409851074],["▁کابل",-11.934182167053224],["тыр",-11.934185981750488],["ල්ල",-11.934200286865234],["ntu",-11.934210777282717],["εία",-11.934470176696776],["▁davlat",-11.934494972229004],["▁minutos",-11.93449592590332],["▁Жа",-11.934497833251951],["▁gift",-11.93450164794922],["▁բան",-11.93451976776123],["Er",-11.934590339660645],["▁गते",-11.93462371826172],["▁którzy",-11.93480396270752],["▁CE",-11.934805870056152],["భ",-11.934809684753418],["▁prodotti",-11.934828758239746],["라는",-11.934927940368652],["lerinden",-11.935158729553224],["ելի",-11.935192108154297],["▁முதல்",-11.935221672058104],["▁erg",-11.93527126312256],["▁gjen",-11.93540859222412],["▁اثر",-11.93545150756836],["▁Island",-11.93554401397705],["iyan",-11.93556022644043],["▁Serikali",-11.935593605041504],["suk",-11.935596466064451],["▁rud",-11.935676574707031],["▁الز",-11.935697555541992],["يب",-11.935729026794434],["nnen",-11.935789108276367],["▁можуть",-11.935979843139648],["րի",-11.936105728149414],["▁diện",-11.936108589172363],["bord",-11.936165809631348],["▁politi",-11.936325073242188],["▁வர",-11.936341285705566],["喜",-11.936366081237791],["画",-11.93639850616455],["▁berri",-11.936446189880373],["လည္း",-11.936452865600586],["▁error",-11.93646240234375],["▁davam",-11.936504364013672],["賽",-11.93654727935791],["▁სულ",-11.936638832092283],["突然",-11.93677806854248],["2-",-11.936845779418944],["▁yahay",-11.936847686767578],["īt",-11.936856269836426],["▁class",-11.936860084533691],["වක්",-11.936894416809082],["▁πλ",-11.936903953552246],["▁стоит",-11.936944961547852],["▁imi",-11.936949729919434],["рап",-11.936960220336914],["との",-11.936982154846191],["▁stres",-11.937005043029783],["యు",-11.937049865722656],["▁mất",-11.937139511108398],["▁ciutat",-11.937141418457031],["▁башка",-11.937169075012209],["能夠",-11.937170028686523],["▁няколко",-11.937172889709473],["▁ቢ",-11.937195777893066],["▁synes",-11.937220573425291],["aad",-11.937226295471191],["ത്ര",-11.937307357788086],["ලු",-11.937386512756348],["▁lume",-11.937461853027344],["aŭ",-11.93752384185791],["▁жизнь",-11.937528610229492],["chte",-11.937549591064451],["▁Hij",-11.93757152557373],["▁Аб",-11.937641143798828],["▁केली",-11.937651634216309],["อา",-11.937726020812988],["ових",-11.937742233276367],["▁naka",-11.937742233276367],["▁כבר",-11.93775463104248],["For",-11.937793731689451],["พร้อม",-11.937803268432615],["ङ",-11.937954902648926],["lö",-11.938015937805176],["▁કરવા",-11.938015937805176],["离",-11.938187599182127],["▁Civil",-11.93819808959961],["HT",-11.93820095062256],["▁улс",-11.938207626342772],["バ",-11.938220024108888],["▁օր",-11.938268661499023],["dev",-11.938271522521973],["冰",-11.938292503356934],["▁რე",-11.938342094421388],["▁ari",-11.938432693481444],["▁प्रश्न",-11.938441276550291],["מר",-11.938499450683594],["tani",-11.938542366027832],["▁вэ",-11.93864631652832],["▁jonka",-11.938650131225586],["dest",-11.93878173828125],["añ",-11.938791275024414],["▁રીતે",-11.938836097717283],["ങ്ങള",-11.939101219177246],["▁gamle",-11.939146995544434],["دې",-11.939176559448242],["სთან",-11.939230918884276],["▁فن",-11.939290046691896],["नु",-11.939291954040527],["▁বাংলাদেশ",-11.93929958343506],["▁എം",-11.939301490783691],["▁þeir",-11.93936824798584],["▁എസ്",-11.939414978027344],["ည",-11.93943214416504],["備",-11.93952465057373],["ணி",-11.939532279968262],["▁hukum",-11.939573287963867],["▁vähän",-11.939577102661133],["▁använda",-11.939579010009766],["▁नगर",-11.939647674560549],["▁دوم",-11.939653396606444],["▁hemen",-11.939661979675291],["ძე",-11.939720153808594],["▁것은",-11.939732551574709],["▁škol",-11.939814567565918],["ుల",-11.939863204956056],["▁פֿאַר",-11.93987274169922],["uste",-11.939888954162598],["▁ຄົນ",-11.939911842346191],["ጎ",-11.939929008483888],["▁mapa",-11.940018653869627],["▁consum",-11.94004726409912],["až",-11.940091133117676],["▁спа",-11.940143585205078],["▁vist",-11.940217971801758],["▁1988",-11.940235137939451],["▁ഏറ്റവും",-11.940260887145996],["▁Лі",-11.940299034118652],["▁diversi",-11.94033432006836],["▁valamint",-11.940345764160156],["▁inden",-11.940350532531738],["▁కానీ",-11.940357208251951],["ப்பட்ட",-11.9403657913208],["ज़",-11.940534591674805],["dica",-11.940587043762209],["▁unos",-11.940593719482422],["理由",-11.9406156539917],["▁ඔය",-11.940632820129396],["▁mellett",-11.940718650817873],["prot",-11.940770149230955],["ൊരു",-11.940818786621094],["▁1945",-11.940834045410156],["▁süre",-11.940863609313965],["示",-11.940978050231934],["▁oblasti",-11.94101619720459],["dok",-11.94102668762207],["安排",-11.941052436828612],["▁termin",-11.941096305847168],["▁fotograf",-11.941157341003418],["layan",-11.94128704071045],["位置",-11.941302299499512],["▁امر",-11.941429138183594],["▁Thông",-11.941446304321287],["▁хо",-11.94158935546875],["▁Good",-11.941621780395508],["▁Note",-11.94173812866211],["äh",-11.941754341125488],["容易",-11.94191074371338],["▁parasito",-11.9419527053833],["▁මහ",-11.94198226928711],["ຢູ່",-11.94204807281494],["方案",-11.942106246948242],["οντας",-11.94214916229248],["מת",-11.942174911499023],["音樂",-11.94221019744873],["▁exemple",-11.942212104797363],["рд",-11.942216873168944],["▁सुरु",-11.942300796508787],["资金",-11.942363739013672],["▁આવે",-11.942367553710938],["▁стала",-11.942368507385254],["dari",-11.94239330291748],["સ્ટ",-11.942471504211426],["יג",-11.942547798156738],["▁වැඩි",-11.942575454711914],["▁lewe",-11.942608833312988],["đ",-11.942739486694336],["▁welche",-11.942770957946776],["▁offer",-11.942840576171877],["视",-11.942973136901855],["▁trực",-11.943007469177246],["Ø",-11.9430513381958],["▁Quando",-11.943102836608888],["錢",-11.943224906921388],["ghi",-11.943315505981444],["▁solid",-11.943357467651367],["▁Jeśli",-11.943438529968262],["ье",-11.943439483642578],["вой",-11.94351863861084],["teng",-11.943605422973633],["▁tir",-11.943615913391112],["▁gach",-11.943632125854492],["▁Mungu",-11.943636894226074],["ква",-11.943653106689451],["ілі",-11.943653106689451],["ुन",-11.943714141845703],["▁două",-11.943824768066406],["▁nincs",-11.943825721740724],["內容",-11.943830490112305],["sau",-11.943836212158203],["ಯು",-11.943918228149414],["ాను",-11.94399642944336],["ությունների",-11.944198608398438],["▁нова",-11.944281578063965],["▁ав",-11.944345474243164],["▁amikor",-11.944374084472656],["▁né",-11.944390296936035],["领导",-11.944397926330566],["▁robi",-11.944464683532717],["▁cin",-11.944537162780762],["境",-11.94472599029541],["▁komplet",-11.944779396057127],["▁ول",-11.94479751586914],["▁جاتا",-11.944808959960938],["▁tine",-11.944924354553224],["สวย",-11.944928169250488],["립",-11.944942474365234],["ائل",-11.944998741149902],["คู่",-11.94501495361328],["egy",-11.945050239562988],["rü",-11.945147514343262],["čný",-11.945159912109377],["oji",-11.945167541503906],["▁θε",-11.945183753967283],["▁presentar",-11.94527530670166],["▁매",-11.94528579711914],["키",-11.945311546325684],["人は",-11.945344924926758],["pic",-11.945402145385742],["▁objetivo",-11.94541072845459],["နာ",-11.945422172546388],["▁jejich",-11.94542407989502],["▁comprar",-11.945486068725586],["▁skole",-11.94556999206543],["▁hizmet",-11.945649147033691],["▁kilka",-11.945667266845703],["保護",-11.945735931396484],["ებ",-11.94574737548828],["ांनी",-11.94575309753418],["▁paggamot",-11.945755958557127],["▁terbaik",-11.945791244506836],["▁nuestros",-11.945813179016112],["ologie",-11.945818901062012],["▁گذشته",-11.945829391479492],["тре",-11.945930480957031],["ცა",-11.945930480957031],["▁હું",-11.945958137512209],["▁ung",-11.945982933044434],["เรียน",-11.946030616760254],["▁materiale",-11.946093559265137],["▁ਕੀਤੀ",-11.946146965026855],["▁яшчэ",-11.946155548095703],["紙",-11.946228981018066],["▁лице",-11.946243286132812],["▁Iran",-11.946308135986328],["ەت",-11.946314811706545],["စု",-11.94643783569336],["▁estaba",-11.946527481079102],["▁klart",-11.946560859680176],["لف",-11.946572303771973],["▁vraiment",-11.946609497070312],["▁lé",-11.94674301147461],["ทําให้",-11.946746826171877],["▁Ք",-11.94684886932373],["▁власт",-11.94685173034668],["迪",-11.946890830993652],["▁Geld",-11.946931838989258],["vn",-11.946993827819824],["讲",-11.947028160095217],["生命",-11.947057723999023],["เพียง",-11.94719696044922],["▁fle",-11.94719696044922],["▁dello",-11.9473876953125],["▁عبر",-11.947455406188965],["uw",-11.947522163391112],["▁गरे",-11.947529792785645],["▁glu",-11.947553634643556],["ଛନ୍ତି",-11.947579383850098],["ଙ୍ଗ",-11.94760799407959],["▁oleks",-11.947632789611816],["▁bàn",-11.947670936584473],["▁obras",-11.947681427001951],["program",-11.94772720336914],["ient",-11.947874069213867],["▁बताए",-11.947883605957031],["▁فل",-11.947924613952637],["dó",-11.947957038879396],["юць",-11.948038101196287],["!»",-11.948041915893556],["坚持",-11.94805145263672],["ტე",-11.948086738586426],["श्च",-11.948092460632324],["▁tức",-11.94817352294922],["▁яны",-11.948201179504396],["րա",-11.94820785522461],["чке",-11.948335647583008],["▁His",-11.948458671569824],["dega",-11.948487281799316],["언",-11.948490142822266],["▁pouvez",-11.948513984680176],["▁Federal",-11.948553085327148],["▁dəfə",-11.948657989501951],["എ",-11.948663711547852],["ctor",-11.948678016662598],["ký",-11.948678016662598],["వే",-11.948701858520508],["▁rata",-11.948753356933594],["▁ges",-11.94875717163086],["▁smart",-11.94882583618164],["介紹",-11.948875427246094],["▁скоро",-11.948931694030762],["ác",-11.948944091796877],["▁നീ",-11.948970794677734],["▁ঢাকা",-11.94897747039795],["rite",-11.949018478393556],["▁protest",-11.949053764343262],["కీ",-11.949135780334473],["▁stu",-11.949148178100586],["▁onda",-11.94919490814209],["▁ئى",-11.949196815490724],["▁teha",-11.949265480041504],["ghe",-11.949359893798828],["▁وقال",-11.949362754821776],["pai",-11.94936752319336],["ποι",-11.949481010437012],["▁provincia",-11.949491500854492],["▁మె",-11.949502944946287],["▁ευ",-11.94955825805664],["▁kak",-11.94957447052002],["ေအာင္",-11.949580192565918],["▁dadka",-11.9495849609375],["ելով",-11.949684143066406],["▁Modern",-11.949687957763672],["ായിരുന്നു",-11.949772834777832],["▁डॉ",-11.949847221374512],["▁wame",-11.949911117553713],["vna",-11.949941635131836],["डे",-11.949994087219238],["டை",-11.9500093460083],["ின்",-11.950011253356934],["liyi",-11.950013160705566],["ακ",-11.950136184692385],["فق",-11.950185775756836],["▁Bha",-11.950223922729492],["▁representa",-11.950258255004885],["并不",-11.950277328491213],["▁King",-11.95032787322998],["▁həm",-11.950334548950195],["▁emas",-11.950419425964355],["▁୨",-11.950459480285645],["▁goede",-11.95050048828125],["▁khoản",-11.950516700744627],["merk",-11.95058250427246],["ค่า",-11.950688362121582],["рак",-11.950693130493164],["▁believe",-11.950708389282228],["ици",-11.950712203979492],["ကုိ",-11.95073699951172],["▁87",-11.950782775878906],["▁Cat",-11.950885772705078],["▁hún",-11.95094871520996],["獎",-11.950965881347656],["alka",-11.951142311096191],["ៗ",-11.951171875],["▁ref",-11.951236724853516],["▁Đại",-11.951265335083008],["ապ",-11.951305389404297],["ԱՐ",-11.951312065124512],["▁večer",-11.951340675354004],["ização",-11.951354026794434],["jur",-11.951476097106934],["inį",-11.951505661010742],["tala",-11.951661109924316],["▁juta",-11.951668739318848],["▁vinden",-11.95173168182373],["ұ",-11.951732635498049],["▁facile",-11.951760292053224],["▁nổi",-11.951807022094728],["баев",-11.951818466186523],["фе",-11.951916694641112],["▁Mereka",-11.951961517333984],["培训",-11.951961517333984],["▁adresa",-11.952012062072754],["lanan",-11.952038764953612],["ατ",-11.95212173461914],["▁Like",-11.952177047729492],["▁כש",-11.952178955078123],["ууд",-11.952205657958984],["▁bulate",-11.952322006225586],["eja",-11.952388763427734],["亲",-11.9524507522583],["▁memper",-11.9524564743042],["▁açık",-11.95248794555664],["▁Kos",-11.952553749084473],["▁lupa",-11.952625274658203],["гля",-11.952726364135742],["▁první",-11.952762603759766],["tere",-11.952766418457031],["▁olun",-11.95276927947998],["ראה",-11.952800750732422],["▁पहिलो",-11.952847480773926],["▁ਵੱਲੋਂ",-11.952848434448242],["▁کہنا",-11.953103065490724],["δι",-11.95319366455078],["▁abi",-11.953201293945312],["▁син",-11.953227996826172],["sang",-11.953267097473145],["覺得",-11.953267097473145],["zá",-11.953272819519045],["auf",-11.953312873840332],["▁Seite",-11.953320503234863],["▁End",-11.953324317932127],["▁modelo",-11.953351020812988],["▁trecut",-11.953356742858888],["▁չէ",-11.953372955322266],["▁одно",-11.95340061187744],["▁SH",-11.953446388244627],["▁жив",-11.95346450805664],["▁ezin",-11.95346736907959],["▁ให้",-11.953484535217283],["▁Î",-11.9534912109375],["▁supra",-11.953500747680664],["左",-11.953532218933104],["▁செய்ய",-11.953557014465332],["mmel",-11.953609466552734],["▁verso",-11.953660011291504],["▁altid",-11.95367431640625],["▁माग",-11.95370864868164],["▁מח",-11.953725814819336],["▁കോ",-11.953728675842283],["▁Master",-11.953763008117676],["dama",-11.953819274902344],["▁Wil",-11.953876495361328],["▁زمین",-11.953929901123049],["▁тре",-11.953938484191896],["ік",-11.95400333404541],["સી",-11.954167366027832],["eza",-11.954216003417969],["ക്കും",-11.954262733459473],["▁מש",-11.954277992248535],["看看",-11.954289436340332],["spon",-11.954306602478027],["ATI",-11.954339981079102],["▁اسی",-11.954368591308594],["▁próximo",-11.954541206359863],["▁RS",-11.95456600189209],["ദ്ധ",-11.954587936401367],["ండ",-11.954663276672363],["▁ഇത്",-11.954761505126951],["▁стане",-11.954792022705078],["schaft",-11.95483684539795],["▁wannan",-11.95497989654541],["▁Beach",-11.954981803894045],["▁vise",-11.954983711242676],["▁kalo",-11.955021858215332],["抗",-11.955028533935549],["sma",-11.955044746398926],["▁ott",-11.955209732055664],["▁today",-11.955248832702637],["laki",-11.95527458190918],["reiz",-11.955282211303713],["メ",-11.955344200134276],["fy",-11.955360412597656],["▁terre",-11.955392837524414],["▁hvert",-11.955459594726562],["▁Få",-11.955474853515623],["▁بندی",-11.955524444580078],["urinn",-11.955543518066406],["รูป",-11.955633163452148],["彩",-11.955756187438965],["▁แล้ว",-11.955788612365724],["▁նրա",-11.955805778503418],["\")",-11.95594596862793],["▁labor",-11.955965995788574],["并且",-11.95603370666504],["ျပီး",-11.95606803894043],["جد",-11.956099510192873],["କା",-11.95611572265625],["▁kle",-11.956156730651855],["▁texto",-11.956231117248535],["方向",-11.956241607666016],["▁správ",-11.956298828125],["89",-11.95631217956543],["Բ",-11.95633602142334],["▁års",-11.956400871276855],["▁អ",-11.956414222717283],["▁aunque",-11.956432342529297],["работ",-11.956486701965332],["▁Kali",-11.956497192382812],["▁اشاره",-11.956501960754396],["▁канал",-11.956533432006836],["或是",-11.956626892089844],["ვარ",-11.95666217803955],["▁khai",-11.956685066223145],["▁stad",-11.956689834594728],["▁hala",-11.95681381225586],["lərini",-11.956954002380373],["င်း",-11.95695686340332],["лес",-11.957018852233888],["bere",-11.957040786743164],["▁isku",-11.957093238830566],["▁Gan",-11.957103729248049],["▁своих",-11.957182884216309],["▁Ол",-11.95718765258789],["▁당",-11.95720672607422],["▁dor",-11.957222938537598],["▁chân",-11.957252502441406],["чо",-11.95734691619873],["▁ਫ",-11.957348823547363],["過去",-11.957353591918944],["▁centrum",-11.957358360290527],["లేదు",-11.957420349121094],["zone",-11.95744800567627],["▁८",-11.957483291625977],["▁viktig",-11.957526206970217],["▁January",-11.957640647888184],["▁cepat",-11.957645416259766],["▁abbiamo",-11.957669258117676],["▁commodo",-11.957672119140623],["▁cyn",-11.95770263671875],["രാ",-11.957728385925291],["rung",-11.95773696899414],["није",-11.957805633544922],["▁Še",-11.957845687866213],["▁\\\\",-11.957904815673828],["रि",-11.957951545715332],["▁áll",-11.958035469055176],["一家",-11.958040237426758],["▁hil",-11.958139419555664],["奖",-11.958185195922852],["▁مه",-11.958200454711914],["▁precum",-11.958328247070312],["▁бор",-11.958369255065918],["▁nėra",-11.95847225189209],["▁metod",-11.95855712890625],["▁khoa",-11.958565711975098],["түү",-11.95860195159912],["▁compra",-11.958603858947754],["eus",-11.95860481262207],["yk",-11.958666801452637],["▁sek",-11.958723068237305],["пат",-11.958724975585938],["ાઈ",-11.958773612976074],["▁Syn",-11.958831787109377],["▁срок",-11.958844184875488],["▁Prima",-11.958856582641602],["▁deixa",-11.95889377593994],["▁drie",-11.958988189697266],["结果",-11.959012985229492],["თი",-11.959160804748535],["āla",-11.959217071533203],["▁dài",-11.959285736083984],["消息",-11.959310531616213],["▁кино",-11.959339141845703],["њето",-11.959360122680664],["сына",-11.959369659423828],["ես",-11.959407806396484],["▁דא",-11.959532737731934],["▁norma",-11.95954418182373],["▁Mə",-11.959609985351562],["aires",-11.959646224975586],["▁जारी",-11.95966911315918],["▁اردو",-11.959735870361328],["ọ",-11.95980739593506],["▁تكون",-11.95982837677002],["▁Xu",-11.959925651550291],["lav",-11.959948539733888],["▁տ",-11.959969520568848],["▁ekspert",-11.959994316101074],["▁количество",-11.960002899169922],["▁yarat",-11.960017204284668],["ของคุณ",-11.960026741027832],["▁ন",-11.960063934326172],["rid",-11.96018886566162],["ಕ್ಷ",-11.960196495056152],["شی",-11.960201263427734],["▁satt",-11.960208892822266],["▁Vse",-11.960233688354492],["▁нэр",-11.960268020629885],["เอง",-11.96027660369873],["▁pomoc",-11.960289001464844],["▁able",-11.960301399230955],["最大的",-11.96035861968994],["▁지역",-11.960389137268066],["78",-11.960412979125977],["▁USD",-11.960450172424316],["▁stesso",-11.960472106933594],["▁विषय",-11.96049976348877],["▁Time",-11.960687637329102],["▁अहिले",-11.960705757141112],["amba",-11.960847854614258],["ores",-11.960851669311523],["▁Gli",-11.960943222045898],["Or",-11.960945129394531],["▁raibh",-11.960945129394531],["гал",-11.9613037109375],["רות",-11.961430549621582],["▁pose",-11.961448669433594],["运",-11.961463928222656],["mam",-11.96152400970459],["▁pense",-11.961549758911133],["▁منها",-11.961549758911133],["▁position",-11.961567878723145],["▁ලංකා",-11.961569786071776],["▁本",-11.96157169342041],["無法",-11.961578369140623],["▁შორის",-11.961615562438965],["▁према",-11.961679458618164],["ሁ",-11.961698532104492],["▁wollen",-11.961703300476074],["trice",-11.961706161499023],["▁ಮು",-11.961725234985352],["▁వే",-11.961743354797363],["▁morgen",-11.961804389953612],["▁agli",-11.961841583251951],["▁dục",-11.96186351776123],["▁možné",-11.961867332458496],["וו",-11.961896896362305],["▁party",-11.961915969848633],["▁ramai",-11.961949348449709],["블",-11.961977005004885],["جه",-11.962006568908691],["▁شر",-11.96207332611084],["ък",-11.962092399597168],["▁kanggo",-11.962105751037598],["tama",-11.962117195129396],["▁vuoden",-11.96212673187256],["กิน",-11.962166786193848],["▁bring",-11.962180137634276],["▁sand",-11.962265014648438],["▁HTML",-11.962331771850586],["▁Nguyễn",-11.962392807006836],["ۈپ",-11.96247100830078],["▁12-",-11.96255588531494],["夢",-11.962690353393556],["▁Health",-11.962712287902832],["▁henne",-11.96278190612793],["▁ዘ",-11.962787628173828],["▁ٽي",-11.962849617004396],["▁Karena",-11.962850570678713],["▁(7",-11.962854385375977],["ель",-11.962916374206545],["▁κάνει",-11.962916374206545],["▁januar",-11.962963104248049],["stop",-11.963001251220703],["▁કરે",-11.963014602661133],["Ү",-11.963129043579102],["▁Times",-11.963135719299316],["▁valg",-11.963146209716797],["▁avez",-11.963164329528809],["▁TripAdvisor",-11.963191032409668],["▁kvar",-11.963461875915527],["印",-11.963504791259766],["ngo",-11.963539123535156],["ներով",-11.963542938232422],["tsa",-11.963555335998535],["등",-11.963566780090332],["▁ato",-11.963584899902344],["▁որոնք",-11.96359157562256],["▁शब्द",-11.963594436645508],["▁שלנו",-11.963644981384276],["▁Arab",-11.963668823242188],["landi",-11.963685035705566],["gado",-11.963749885559082],["मो",-11.963899612426758],["χα",-11.963909149169922],["▁सय",-11.963951110839844],["▁difícil",-11.963993072509766],["ort",-11.964073181152344],["▁ziren",-11.96408748626709],["▁svenska",-11.964141845703123],["▁mac",-11.96422004699707],["скай",-11.964250564575195],["▁स्व",-11.9642972946167],["▁प्रहरी",-11.964458465576172],["ကြီး",-11.964478492736816],["▁Menteri",-11.964484214782717],["▁aastal",-11.964509963989258],["▁première",-11.96456527709961],["▁Mol",-11.964584350585938],["▁мог",-11.964619636535645],["▁ਦੀਆਂ",-11.964624404907228],["▁участие",-11.964629173278809],["▁মে",-11.964707374572754],["ĝ",-11.96473217010498],["▁Dei",-11.964751243591309],["▁شبکه",-11.964800834655762],["▁ABŞ",-11.964814186096191],["bina",-11.96482276916504],["tsi",-11.964825630187988],["ZE",-11.96491813659668],["▁awal",-11.96509075164795],["вор",-11.965100288391112],["▁vždy",-11.965188026428224],["▁laten",-11.96522331237793],["▁mentre",-11.965229034423828],["ኒ",-11.96523380279541],["lais",-11.965320587158203],["oda",-11.965359687805176],["මු",-11.965398788452148],["▁Người",-11.96540641784668],["▁hitta",-11.96543788909912],["▁ką",-11.965438842773438],["▁људи",-11.965487480163574],["▁berikut",-11.965545654296877],["公共",-11.965590476989746],["▁срещу",-11.96559715270996],["▁Bli",-11.965692520141602],["▁League",-11.965716361999512],["▁kia",-11.965761184692385],["▁matka",-11.965794563293455],["▁cosas",-11.965848922729492],["▁kopi",-11.965856552124023],["ajiem",-11.965863227844238],["▁लोग",-11.965866088867188],["▁havas",-11.965933799743652],["▁projet",-11.965935707092283],["เสียง",-11.9659423828125],["▁sob",-11.965950012207031],["目标",-11.96601104736328],["▁decir",-11.966017723083496],["▁School",-11.966042518615724],["サイト",-11.966119766235352],["▁vak",-11.966123580932615],["▁Tämä",-11.966127395629885],["յի",-11.966157913208008],["foto",-11.96616554260254],["دة",-11.966263771057127],["മു",-11.96629238128662],["ālā",-11.966301918029783],["▁drugih",-11.966347694396973],["ρη",-11.96636962890625],["лын",-11.966407775878906],["ична",-11.966485023498535],["mk",-11.966489791870115],["▁አበባ",-11.966527938842772],["▁సం",-11.966629028320312],["▁Smart",-11.966644287109377],["換",-11.966689109802246],["▁לפי",-11.96670627593994],["ntis",-11.966737747192385],["zí",-11.96679401397705],["sinde",-11.966808319091797],["▁Hum",-11.966808319091797],["khe",-11.96681022644043],["▁برخی",-11.966818809509276],["ווי",-11.966830253601074],["▁była",-11.966940879821776],["▁Nas",-11.967055320739746],["▁sec",-11.967170715332031],["▁진행",-11.967209815979004],["▁చెప్ప",-11.967222213745115],["▁podczas",-11.967226028442385],["▁diper",-11.967257499694824],["▁newydd",-11.96726417541504],["▁būt",-11.967275619506836],["ਣਾ",-11.967297554016112],["一樣",-11.967304229736328],["▁geld",-11.96730899810791],["▁bild",-11.967309951782228],["ۈش",-11.967313766479492],["ಂತೆ",-11.967413902282717],["UNG",-11.967415809631348],["▁hanem",-11.967430114746094],["▁رس",-11.967476844787598],["ót",-11.967514038085938],["γα",-11.967557907104492],["▁obchod",-11.967558860778809],["ชา",-11.967615127563477],["150",-11.967623710632324],["बि",-11.967658042907717],["▁நீ",-11.967673301696776],["జి",-11.967778205871582],["▁musí",-11.96779441833496],["ваць",-11.967818260192873],["tog",-11.967955589294434],["▁생각",-11.96799087524414],["ခ်က္",-11.968015670776367],["▁दर",-11.968016624450684],["▁klient",-11.968059539794922],["▁torna",-11.968077659606934],["нні",-11.968291282653809],["▁chaque",-11.968300819396973],["▁čia",-11.968318939208984],["▁83",-11.968343734741213],["▁stare",-11.96836280822754],["‪",-11.968371391296388],["▁chuẩn",-11.968403816223145],["čki",-11.968433380126951],["well",-11.9685640335083],["ತಾ",-11.96867561340332],["▁bomo",-11.968708992004396],["▁perempuan",-11.9688081741333],["▁bulunan",-11.968809127807615],["▁spil",-11.968865394592283],["jući",-11.96888256072998],["īgs",-11.968947410583496],["▁elu",-11.96898078918457],["▁territorio",-11.969088554382324],["ifik",-11.969196319580078],["mbar",-11.969252586364746],["▁خبریں",-11.969273567199709],["▁jusqu",-11.969276428222656],["▁Raja",-11.969329833984377],["ඩ්",-11.969362258911133],["مال",-11.969371795654297],["Pre",-11.969507217407228],["ట్లు",-11.969559669494627],["県",-11.969596862792969],["▁claro",-11.96960163116455],["▁لوگ",-11.969606399536133],["▁فإن",-11.969642639160156],["Ex",-11.969663619995115],["تل",-11.96978759765625],["▁Oy",-11.969808578491213],["▁ኢትዮጵያ",-11.96987247467041],["CS",-11.969970703125],["lga",-11.970059394836426],["kraft",-11.97006893157959],["ներում",-11.970115661621094],["wala",-11.970121383666992],["▁eikä",-11.970173835754396],["▁vem",-11.970205307006836],["bod",-11.970206260681152],["▁Nar",-11.970376968383787],["▁mengambil",-11.970433235168455],["থ",-11.970455169677734],["▁akik",-11.97057056427002],["▁Nav",-11.970595359802246],["▁tadi",-11.970661163330078],["vă",-11.970706939697266],["dle",-11.970752716064451],["▁խ",-11.97077751159668],["▁jauh",-11.970819473266602],["هي",-11.970826148986816],["▁קל",-11.970831871032717],["▁เรา",-11.970842361450195],["▁استخدام",-11.970845222473145],["arra",-11.970921516418455],["▁назад",-11.970993995666504],["▁գր",-11.971116065979004],["87",-11.971135139465332],["late",-11.971283912658691],["▁olma",-11.971386909484863],["తు",-11.971399307250977],["史",-11.971416473388672],["୍ୟ",-11.971539497375488],["▁Mus",-11.971540451049805],["▁ट",-11.971563339233398],["▁ຢູ່",-11.971604347229004],["သွား",-11.971624374389648],["▁аж",-11.971652030944824],["▁Sai",-11.971675872802734],["ერ",-11.97178554534912],["▁چرا",-11.971901893615724],["두",-11.971911430358888],["イン",-11.971925735473633],["اک",-11.972016334533691],["ଟ୍",-11.97203254699707],["▁halv",-11.97206687927246],["▁universitet",-11.972210884094238],["▁boa",-11.972212791442873],["são",-11.972421646118164],["på",-11.972464561462402],["ორ",-11.972492218017578],["span",-11.972562789916992],["yî",-11.972648620605469],["▁ചില",-11.972750663757324],["▁Multi",-11.972826957702637],["▁small",-11.97283172607422],["hte",-11.97284984588623],["▁♥",-11.972942352294922],["▁tanah",-11.973000526428224],["▁1970",-11.973116874694824],["spa",-11.97318172454834],["▁иш",-11.973221778869627],["▁Muqdisho",-11.97323989868164],["▁सूचना",-11.973243713378906],["▁возможность",-11.973268508911133],["ash",-11.97334098815918],["▁Vám",-11.973371505737305],["ංග",-11.973376274108888],["ზ",-11.97354507446289],["ଡ",-11.97362232208252],["▁plats",-11.973706245422363],["मु",-11.973793983459473],["해야",-11.97380256652832],["एं",-11.973804473876951],["ุ",-11.973849296569824],["▁Stan",-11.973875045776367],["ระดับ",-11.973912239074709],["▁spolu",-11.973992347717283],["ಡೆ",-11.974034309387209],["责任",-11.974074363708496],["▁pool",-11.974143028259276],["▁bera",-11.974148750305176],["▁եք",-11.974242210388184],["ward",-11.974281311035156],["▁həyata",-11.974372863769531],["▁وه",-11.974394798278809],["▁ساخت",-11.97443675994873],["▁সংবাদ",-11.97445297241211],["▁Pon",-11.97446060180664],["▁միայն",-11.974464416503906],["▁នៃ",-11.97447109222412],["▁deb",-11.974552154541016],["▁સં",-11.974560737609863],["▁Bagi",-11.974580764770508],["lisi",-11.974592208862305],["▁தா",-11.974632263183594],["▁pam",-11.97470474243164],["▁sede",-11.974708557128906],["phi",-11.974849700927734],["▁varje",-11.974862098693848],["▁малко",-11.974891662597656],["▁ھ",-11.974952697753906],["▁ಬಾ",-11.975005149841309],["egi",-11.975040435791016],["ρό",-11.975089073181152],["ston",-11.975152015686035],["▁vaja",-11.975160598754885],["eld",-11.975171089172363],["▁fantastisk",-11.975171089172363],["黄",-11.975252151489258],["ทั้งหมด",-11.975296020507812],["不少",-11.975361824035645],["vul",-11.975372314453123],["▁քանի",-11.975383758544922],["▁opge",-11.975526809692385],["പ്പി",-11.975593566894531],["互联网",-11.975601196289062],["▁xuống",-11.975672721862791],["▁altres",-11.975778579711914],["▁RA",-11.975796699523926],["▁eivät",-11.975838661193848],["一位",-11.97591781616211],["्यो",-11.975927352905272],["ாக",-11.97593116760254],["人の",-11.976004600524902],["▁otras",-11.976030349731444],["▁propria",-11.97612476348877],["мож",-11.976140975952148],["▁சா",-11.976244926452637],["▁moeten",-11.976311683654783],["kš",-11.976313591003418],["HE",-11.976394653320312],["▁Bag",-11.976485252380373],["cə",-11.976534843444824],["▁போ",-11.97654628753662],["▁данас",-11.97663402557373],["nez",-11.976726531982422],["ভ",-11.976797103881836],["▁Tru",-11.97681999206543],["▁කටයුතු",-11.976882934570312],["▁работ",-11.976916313171388],["▁کمک",-11.976919174194336],["父母",-11.977006912231444],["북",-11.977028846740724],["▁blitt",-11.977072715759276],["增",-11.977073669433594],["领",-11.977131843566896],["▁Är",-11.977134704589844],["▁ros",-11.977139472961426],["ITA",-11.977194786071776],["罪",-11.97722053527832],["▁çdo",-11.97722625732422],["ointi",-11.977312088012695],["▁Ple",-11.977500915527344],["рма",-11.977580070495604],["▁Dünya",-11.977649688720703],["rc",-11.977672576904297],["ವಾ",-11.977716445922852],["▁කො",-11.977770805358888],["▁प्रकार",-11.977774620056152],["▁having",-11.977797508239746],["更多的",-11.977825164794922],["▁È",-11.977849006652832],["身体",-11.977871894836426],["▁इति",-11.97789478302002],["▁féin",-11.97793960571289],["Ј",-11.977970123291016],["ае",-11.978023529052734],["▁Hall",-11.978092193603516],["▁nevoie",-11.978102684020996],["ウ",-11.9781494140625],["▁RO",-11.978167533874512],["▁konkret",-11.978227615356444],["TU",-11.978232383728027],["▁эр",-11.978252410888672],["▁afirma",-11.978260040283203],["tér",-11.978269577026367],["▁знаю",-11.978373527526855],["▁zamanı",-11.978376388549805],["▁Pel",-11.978384017944336],["▁apud",-11.978438377380373],["cept",-11.978452682495115],["▁darah",-11.97848892211914],["nije",-11.978708267211914],["ປະ",-11.978739738464355],["▁cun",-11.978781700134276],["▁uch",-11.978793144226074],["рад",-11.978836059570312],["跳",-11.978858947753906],["၇",-11.978890419006348],["▁nekoliko",-11.978914260864258],["▁થાય",-11.978914260864258],["▁havia",-11.979055404663086],["▁khá",-11.979096412658691],["▁zde",-11.979131698608398],["▁mesa",-11.979199409484863],["рос",-11.979208946228027],["رة",-11.97922420501709],["ാതെ",-11.979312896728516],["▁tilbake",-11.979323387145996],["atus",-11.979410171508787],["喜歡",-11.979557991027832],["ión",-11.979571342468262],["▁către",-11.97959327697754],["▁anu",-11.979646682739258],["▁یعنی",-11.979717254638672],["▁लगा",-11.979728698730469],["▁quel",-11.97973346710205],["▁først",-11.979811668395996],["кө",-11.979812622070312],["▁Nhà",-11.97983169555664],["▁virtu",-11.979880332946776],["▁Вер",-11.98001194000244],["▁الغ",-11.9800443649292],["obra",-11.980082511901855],["的事情",-11.98008918762207],["▁pilihan",-11.980106353759766],["万元",-11.980140686035156],["▁WhatsApp",-11.980202674865724],["▁kaki",-11.980216026306152],["▁autre",-11.980283737182615],["бли",-11.98038101196289],["ธ",-11.980386734008787],["▁ilyen",-11.980392456054688],["ТО",-11.980425834655762],["梅",-11.980469703674316],["▁Putin",-11.980533599853516],["▁праз",-11.980534553527832],["▁‹‹",-11.980538368225098],["▁våre",-11.980563163757324],["▁blant",-11.980576515197754],["▁газ",-11.980589866638184],["▁Meng",-11.980621337890623],["tting",-11.980673789978027],["▁EM",-11.980693817138672],["做好",-11.980741500854492],["▁สําหรับ",-11.98074436187744],["▁uitge",-11.980790138244627],["▁ത്ത",-11.980839729309082],["மு",-11.98088550567627],["▁vand",-11.980931282043455],["▁etti",-11.980949401855469],["▁чрез",-11.980965614318848],["▁כאן",-11.98097324371338],["ේදී",-11.981012344360352],["历史",-11.981022834777832],["ಂತ",-11.981040000915527],["▁Bru",-11.981045722961426],["áž",-11.981094360351562],["▁stále",-11.981123924255373],["▁ਗ",-11.981123924255373],["▁אר",-11.981213569641112],["▁koska",-11.981281280517578],["సు",-11.981342315673828],["ába",-11.98134994506836],["▁Thema",-11.981367111206056],["▁utili",-11.981395721435549],["▁inde",-11.981414794921877],["▁സു",-11.98155403137207],["нг",-11.981565475463867],["是在",-11.98157024383545],["▁prea",-11.9816255569458],["换",-11.981648445129396],["tez",-11.98170280456543],["▁کنم",-11.98183536529541],["▁Ot",-11.98193073272705],["lərində",-11.98194980621338],["▁годзе",-11.982008934020996],["▁åter",-11.98204231262207],["▁בן",-11.982064247131348],["▁द्वारा",-11.982064247131348],["▁지난",-11.982112884521484],["▁copii",-11.9821138381958],["▁recht",-11.98214626312256],["份",-11.982158660888672],["▁Aquest",-11.982247352600098],["στο",-11.982292175292969],["的工作",-11.982295989990234],["▁Qua",-11.982316970825195],["▁Aj",-11.982332229614258],["ଡି",-11.98242473602295],["▁hayo",-11.982481002807615],["断",-11.98250675201416],["ního",-11.98252773284912],["▁bych",-11.982569694519045],["▁căn",-11.982613563537598],["hau",-11.98261833190918],["▁Kuala",-11.982664108276367],["▁xalq",-11.98269271850586],["▁កម្មវិធី",-11.982693672180176],["▁potem",-11.982799530029297],["的方式",-11.98282241821289],["▁قدر",-11.982831001281738],["▁rein",-11.982890129089355],["เห็น",-11.982943534851074],["▁større",-11.98301887512207],["▁Put",-11.983019828796388],["▁Ն",-11.983057975769045],["▁Jul",-11.983116149902344],["▁основа",-11.983193397521973],["oare",-11.983211517333984],["ító",-11.983216285705566],["ร้าน",-11.98326587677002],["▁hê",-11.983342170715332],["▁achter",-11.98336124420166],["▁11-",-11.98338508605957],["▁beri",-11.983418464660645],["▁ఇది",-11.98342514038086],["ဆ",-11.983453750610352],["▁tule",-11.983516693115234],["▁Tik",-11.983522415161133],["яв",-11.983601570129396],["▁fácil",-11.983753204345703],["▁വേ",-11.983772277832031],["▁kiểm",-11.983798027038574],["▁цели",-11.983830451965332],["▁À",-11.983840942382812],["▁Negeri",-11.983872413635254],["▁शिक्षा",-11.983882904052734],["▁ایرانی",-11.983914375305176],["▁والد",-11.984052658081056],["▁какой",-11.98405647277832],["▁मत",-11.984066009521484],["▁Pia",-11.984177589416504],["▁gọi",-11.984206199645996],["▁ہوئی",-11.984357833862305],["НЕ",-11.984378814697266],["推动",-11.984393119812012],["▁hjemme",-11.9844388961792],["▁분",-11.984457015991213],["rø",-11.984484672546388],["▁music",-11.9845552444458],["措施",-11.984560012817385],["▁Hei",-11.984648704528809],["▁BMW",-11.984797477722168],["jama",-11.984837532043455],["▁نیاز",-11.984902381896973],["▁राष्ट्रिय",-11.984928131103516],["▁zusammen",-11.984957695007324],["▁αυτά",-11.98497486114502],["▁thuốc",-11.98503589630127],["▁šport",-11.985088348388672],["gress",-11.985093116760254],["श्",-11.985148429870604],["절",-11.985196113586426],["вая",-11.985273361206056],["▁없는",-11.98529052734375],["▁savoir",-11.985333442687988],["▁Masa",-11.985369682312012],["▁cad",-11.985370635986328],["▁kha",-11.98538875579834],["▁verschillende",-11.985503196716309],["▁soci",-11.985525131225586],["▁boste",-11.98553466796875],["▁Theo",-11.9855375289917],["▁أم",-11.985541343688965],["▁такое",-11.985546112060549],["▁sint",-11.985673904418944],["▁Manuel",-11.98573398590088],["iques",-11.985759735107422],["판",-11.985793113708496],["talan",-11.98586082458496],["▁pagar",-11.985864639282228],["▁γιατί",-11.985918998718262],["վեց",-11.98592472076416],["▁attività",-11.986037254333496],["▁trova",-11.986041069030762],["▁sien",-11.986056327819824],["▁παρ",-11.98606300354004],["ುವುದು",-11.986065864562988],["尽",-11.98608112335205],["વું",-11.986251831054688],["▁vere",-11.986284255981444],["که",-11.986300468444824],["▁resultados",-11.986331939697266],["ister",-11.986482620239258],["▁golf",-11.98657512664795],["▁страни",-11.98657512664795],["As",-11.986581802368164],["▁hou",-11.986591339111328],["▁ജന",-11.986668586730955],["هر",-11.986699104309082],["▁único",-11.986750602722168],["湖",-11.98675537109375],["ੁ",-11.986909866333008],["▁यहाँ",-11.986988067626951],["▁chose",-11.987008094787598],["lub",-11.98701286315918],["▁Thi",-11.987031936645508],["aug",-11.98704433441162],["▁нес",-11.987069129943848],["▁doda",-11.987072944641112],["▁것을",-11.98708724975586],["▁тек",-11.987127304077148],["ձ",-11.98717975616455],["▁demande",-11.987208366394045],["nými",-11.987222671508787],["rja",-11.98734188079834],["▁момента",-11.987370491027832],["▁ବୋଲି",-11.987483024597168],["ував",-11.987486839294434],["ႏိုင္",-11.987497329711914],["▁hinna",-11.987504005432127],["▁ធ្វើ",-11.987519264221191],["▁Registr",-11.98753261566162],["▁continu",-11.987548828125],["යෝ",-11.987550735473633],["▁இல்லை",-11.987550735473633],["кы",-11.987554550170898],["уға",-11.987577438354492],["藏",-11.98760223388672],["▁5000",-11.987682342529297],["اث",-11.98789119720459],["▁High",-11.987909317016602],["zne",-11.987940788269045],["ALA",-11.987942695617676],["▁लेकर",-11.987943649291992],["▁hồ",-11.988073348999023],["自身",-11.988197326660156],["était",-11.988210678100586],["▁Кара",-11.988210678100586],["▁chơi",-11.988299369812012],["maktadır",-11.98836898803711],["네",-11.988381385803224],["会社",-11.988388061523438],["▁miejsce",-11.98841953277588],["▁تلك",-11.988554954528809],["▁Estados",-11.988592147827148],["▁consequat",-11.988595008850098],["▁terve",-11.988595008850098],["▁Пере",-11.988615036010742],["▁hizo",-11.988654136657717],["▁least",-11.98866081237793],["pred",-11.988677024841309],["▁amigos",-11.988714218139648],["▁tilbage",-11.988714218139648],["Ki",-11.98884391784668],["▁vuosi",-11.988890647888184],["는데",-11.988919258117676],["رم",-11.988964080810549],[":1",-11.988972663879396],["Ո",-11.989020347595217],["ांच्या",-11.989093780517578],["▁непо",-11.989093780517578],["ws",-11.989131927490234],["utus",-11.98914623260498],["рон",-11.98914909362793],["tati",-11.989202499389648],["law",-11.989314079284668],["▁ል",-11.989325523376465],["▁autobus",-11.989330291748049],["▁event",-11.98940086364746],["▁тус",-11.989424705505373],["يي",-11.989453315734863],["▁आपण",-11.989453315734863],["ables",-11.989493370056152],["ирани",-11.989501953125],["mpe",-11.989789009094238],["лена",-11.98982048034668],["യി",-11.99000644683838],["专家",-11.99002170562744],["▁seiner",-11.990036010742188],["▁tiến",-11.9900484085083],["▁ս",-11.990275382995604],["▁Mobile",-11.990313529968262],["▁શું",-11.990315437316896],["하다",-11.990345001220703],["▁Setelah",-11.990352630615234],["νομ",-11.99037742614746],["亞",-11.990394592285156],["▁Nhưng",-11.990416526794434],["兵",-11.990416526794434],["▁komunik",-11.990471839904783],["▁грн",-11.99048900604248],["▁vais",-11.990586280822754],["rij",-11.99059772491455],["▁altında",-11.990607261657717],["人が",-11.990625381469728],["LS",-11.990640640258787],["▁серед",-11.990657806396484],["▁सार्वजनिक",-11.990762710571287],["ಕಾ",-11.990768432617188],["▁դա",-11.990775108337402],["անի",-11.99079418182373],["▁veure",-11.99082851409912],["tiga",-11.990883827209473],["▁טוב",-11.990893363952637],["▁متن",-11.990920066833496],["▁môžete",-11.990931510925291],["key",-11.990951538085938],["▁ул",-11.99106502532959],["బ",-11.991125106811523],["▁bagaimana",-11.991151809692385],["cut",-11.991209030151367],["▁Avropa",-11.991257667541504],["ьный",-11.991268157958984],["last",-11.991283416748049],["▁Así",-11.991294860839844],["ený",-11.99139404296875],["▁fat",-11.99140453338623],["▁strane",-11.99146842956543],["▁Калі",-11.9915132522583],["vjet",-11.991535186767578],["▁dito",-11.991536140441896],["תם",-11.9915771484375],["▁sati",-11.991643905639648],["▁andet",-11.991646766662598],["▁komma",-11.991847038269045],["▁пари",-11.991877555847168],["▁Marketing",-11.991901397705078],["▁ប្រទេស",-11.99190902709961],["▁sesuatu",-11.99199676513672],["▁tomto",-11.992106437683104],["વો",-11.992112159729004],["▁yaxşı",-11.992149353027344],["উ",-11.992215156555176],["▁seis",-11.99233055114746],["тата",-11.99234390258789],["innen",-11.9923734664917],["▁Србија",-11.992438316345217],["เป",-11.99245262145996],["▁clients",-11.992484092712402],["مار",-11.992505073547363],["▁समेत",-11.992511749267578],["tivi",-11.992589950561523],["▁oferta",-11.99273681640625],["適",-11.992751121520996],["ଟା",-11.992762565612791],["رك",-11.992768287658691],["的時候",-11.992803573608398],["▁magaalada",-11.992819786071776],["▁которой",-11.992830276489258],["llis",-11.992876052856444],["餐",-11.992932319641112],["hotel",-11.993021965026855],["▁решения",-11.993053436279297],["▁Teh",-11.993071556091309],["▁จะ",-11.993101119995115],["zana",-11.993210792541504],["ിരുന്നു",-11.993304252624512],["▁Ве",-11.993318557739258],["▁joten",-11.993377685546877],["edd",-11.993409156799316],["▁Lee",-11.99343967437744],["▁Ken",-11.99350643157959],["inas",-11.993522644042969],["テ",-11.993544578552246],["刘",-11.993553161621094],["▁זיי",-11.993582725524902],["▁भा",-11.99362087249756],["▁AD",-11.993653297424316],["▁Ис",-11.993659019470217],["ík",-11.993664741516112],["sya",-11.993674278259276],["రో",-11.993711471557615],["▁jumlah",-11.993754386901855],["▁supaya",-11.99378490447998],["▁Juli",-11.993898391723633],["ተው",-11.993913650512695],["കള",-11.993937492370604],["▁ξε",-11.993974685668944],["▁нов",-11.994096755981444],["▁कई",-11.99411392211914],["▁uni",-11.994114875793455],["ahkan",-11.99413013458252],["▁గురించి",-11.994144439697266],["지만",-11.994170188903809],["ujeme",-11.994223594665527],["▁мор",-11.994288444519045],["ሰብ",-11.99433708190918],["เน",-11.994348526000977],["▁پل",-11.994402885437012],["▁පැ",-11.994424819946287],["กลาง",-11.994431495666504],["▁osta",-11.994434356689451],["▁Kot",-11.994473457336426],["яване",-11.994484901428224],["ált",-11.994499206542969],["▁pays",-11.994626998901367],["▁privind",-11.99466037750244],["畫",-11.994831085205078],["kej",-11.994987487792969],["၂",-11.994994163513184],["條",-11.995009422302246],["▁giữa",-11.99507999420166],["jums",-11.995101928710938],["▁이상",-11.995136260986328],["รอ",-11.995165824890137],["と思う",-11.995206832885742],["▁лично",-11.99528694152832],["▁gehiago",-11.995294570922852],["pí",-11.99534034729004],["ผล",-11.995363235473633],["谁",-11.995366096496582],["▁қазақ",-11.995386123657228],["ешь",-11.995413780212402],["Да",-11.99550437927246],["▁детей",-11.995516777038574],["▁million",-11.99557113647461],["ipun",-11.995573043823242],["▁liga",-11.995701789855955],["▁available",-11.995708465576172],["▁செ",-11.995720863342283],["▁πα",-11.995790481567385],["blogspot",-11.995814323425291],["▁hore",-11.995837211608888],["▁walang",-11.995882987976074],["tate",-11.995899200439451],["▁predstavlja",-11.995916366577148],["руш",-11.99599838256836],["▁plaats",-11.99601936340332],["▁Euroopa",-11.996119499206545],["▁Bill",-11.996123313903809],["▁један",-11.996135711669922],["▁Hin",-11.996193885803224],["▁hawa",-11.996201515197754],["▁Ant",-11.996407508850098],["認為",-11.996421813964844],["نج",-11.996447563171388],["▁flera",-11.996535301208496],["skega",-11.996560096740724],["され",-11.99661350250244],["▁poza",-11.99665069580078],["indi",-11.996657371520996],["▁Έ",-11.996676445007324],["▁tani",-11.996715545654297],["شي",-11.996722221374512],["mula",-11.996793746948242],["▁Mc",-11.996807098388672],["чни",-11.99687385559082],["▁conform",-11.996903419494627],["▁ilalim",-11.996965408325195],["박",-11.996965408325195],["▁ultra",-11.997004508972168],["dde",-11.997014045715332],["▁win",-11.997034072875977],["LY",-11.99703598022461],["lainen",-11.997071266174316],["▁kurd",-11.997076034545898],["asia",-11.997092247009276],["▁питання",-11.997130393981934],["▁visit",-11.997163772583008],["▁විද්",-11.997227668762209],["றி",-11.997251510620115],["មួយ",-11.997313499450684],["小時",-11.99732494354248],["▁shop",-11.997334480285645],["מים",-11.997443199157717],["lement",-11.997474670410156],["GO",-11.997514724731444],["يك",-11.997516632080078],["▁cy",-11.99752712249756],["▁ние",-11.99753475189209],["SZ",-11.997543334960938],["ಟ್ಟ",-11.997591972351074],["aí",-11.997662544250488],["bei",-11.997673988342283],["▁jij",-11.997762680053713],["▁모두",-11.997787475585938],["▁făcut",-11.997788429260254],["▁história",-11.997790336608888],["▁pik",-11.997820854187012],["▁Set",-11.99783706665039],["▁ára",-11.99797248840332],["oka",-11.998102188110352],["▁Dir",-11.998102188110352],["▁aantal",-11.998116493225098],["▁bardziej",-11.998204231262209],["▁pais",-11.998213768005373],["▁ಇದು",-11.998246192932127],["ясь",-11.998286247253418],["▁august",-11.998324394226074],["▁Fort",-11.998391151428224],["▁рече",-11.99839973449707],["ellä",-11.99843978881836],["▁එහෙම",-11.9984769821167],["▁Weg",-11.998478889465332],["建筑",-11.998576164245604],["▁ເພື່ອ",-11.998594284057615],["哪",-11.99876594543457],["▁బా",-11.998772621154783],["uv",-11.998785972595217],["▁Hen",-11.99878978729248],["以来",-11.998926162719728],["▁chama",-11.998977661132812],["睡",-11.998979568481444],["▁ihrer",-11.99900245666504],["▁بأن",-11.999011039733888],["▁คุณ",-11.999068260192873],["とか",-11.999102592468262],["чил",-11.999130249023438],["▁hazır",-11.99913501739502],["▁առաջին",-11.99917697906494],["▁Mask",-11.999238967895508],["▁نیوز",-11.999242782592772],["▁tíma",-11.99937629699707],["▁mese",-11.999387741088867],["▁पास",-11.999547004699709],["▁nghĩa",-11.999566078186035],["тите",-11.999567985534668],["lust",-11.999571800231934],["ျပ",-11.99963092803955],["▁puta",-11.999711036682127],["▁ಸಾ",-11.99972915649414],["▁året",-11.999755859375],["வின்",-11.999855041503906],["▁प्रेम",-11.999923706054688],["autres",-11.999995231628418],["▁Firma",-12.000228881835938],["нски",-12.000259399414062],["▁අර",-12.000268936157228],["▁oan",-12.000272750854492],[":11",-12.0003080368042],["だと",-12.00034523010254],["سل",-12.000405311584473],["▁επ",-12.000513076782228],["arta",-12.00059700012207],["▁chez",-12.000696182250977],["တင္",-12.000702857971191],["AU",-12.000747680664062],["▁NI",-12.000936508178713],["▁nosso",-12.001033782958984],["▁euch",-12.0010347366333],["UA",-12.001075744628906],["▁विभिन्न",-12.001092910766602],["▁smartphone",-12.00109577178955],["▁drejt",-12.001116752624512],["▁свој",-12.001251220703123],["ຕ",-12.00134563446045],["▁servizio",-12.001349449157717],["▁ల",-12.00139045715332],["▁දේ",-12.00141429901123],["cke",-12.001420021057127],["estä",-12.001437187194824],["▁ажил",-12.00143814086914],["light",-12.001497268676758],["рус",-12.001529693603516],["兰",-12.001534461975098],["▁रि",-12.001561164855955],["▁children",-12.00160312652588],["▁האם",-12.00168514251709],["▁eros",-12.00171184539795],["브",-12.00180721282959],["规定",-12.001836776733398],["行動",-12.001840591430664],["▁guerra",-12.001941680908203],["بد",-12.001997947692873],["▁times",-12.002017974853516],["ама",-12.00205898284912],["▁व्यक्ति",-12.00210189819336],["▁91",-12.002103805541992],["▁кезде",-12.0023193359375],["▁מיר",-12.002331733703612],["▁después",-12.00234031677246],["▁belajar",-12.002357482910156],["ß",-12.002363204956056],["iner",-12.00236701965332],["ഇ",-12.002385139465332],["ικ",-12.002403259277344],["▁cadrul",-12.002446174621582],["тап",-12.002519607543944],["klar",-12.00253963470459],["▁високо",-12.002575874328612],["▁stehen",-12.002581596374512],["▁kunst",-12.002605438232422],["▁შენ",-12.002677917480469],["코",-12.002678871154783],["▁Institut",-12.002685546875],["тері",-12.00273323059082],["▁interessant",-12.002737045288086],["ped",-12.002748489379885],["▁bättre",-12.002756118774414],["▁Arbeit",-12.002799034118652],["客戶",-12.002821922302246],["▁morte",-12.002867698669434],["▁Ä",-12.002914428710938],["通過",-12.00300121307373],["▁resultado",-12.003012657165527],["▁често",-12.00303554534912],["▁Íslands",-12.003056526184082],["الي",-12.003207206726074],["▁ປີ",-12.003233909606934],["▁ഒ",-12.00328254699707],["▁weitere",-12.00330638885498],["▁делать",-12.003342628479004],["ыл",-12.003344535827637],["▁pitää",-12.00334930419922],["▁числе",-12.003360748291016],["šen",-12.003450393676758],["▁парламент",-12.003454208374023],["ग्",-12.003562927246094],["▁جڏهن",-12.003588676452637],["▁لازم",-12.00360107421875],["▁ادا",-12.003602981567385],["तु",-12.00364875793457],["eşte",-12.003684043884276],["▁میان",-12.003745079040527],["rug",-12.003769874572754],["▁bydd",-12.003807067871094],["▁BER",-12.003824234008787],["一定要",-12.00383186340332],["羅",-12.00395393371582],["準備",-12.003995895385742],["▁znam",-12.00400447845459],["▁usaha",-12.004009246826172],["▁számára",-12.004020690917969],["▁ekran",-12.004120826721191],["sala",-12.00413417816162],["목",-12.004195213317873],["BE",-12.004266738891602],["dá",-12.004289627075195],["▁Сан",-12.00429344177246],["実",-12.00429630279541],["人員",-12.004335403442385],["▁اساس",-12.004434585571287],["▁عالم",-12.004497528076172],["▁Cette",-12.00450325012207],["مة",-12.004509925842283],["▁XIX",-12.004530906677246],["▁infra",-12.004600524902344],["uf",-12.004615783691406],["ily",-12.004626274108888],["kaj",-12.004626274108888],["ASI",-12.00462818145752],["▁oči",-12.004683494567873],["elig",-12.004777908325195],["避免",-12.004802703857422],["anje",-12.00482940673828],["▁שלך",-12.004843711853027],["▁교육",-12.004844665527344],["▁komputer",-12.004852294921877],["▁xxx",-12.004863739013672],["KS",-12.00489330291748],["fræði",-12.005023956298828],["irati",-12.005060195922852],["זה",-12.00510311126709],["▁variant",-12.005119323730469],["atie",-12.005133628845217],["務",-12.005187034606934],["дү",-12.00519561767578],["▁رقم",-12.00531768798828],["▁ಹಾ",-12.00534725189209],["ηθεί",-12.00545883178711],["గి",-12.005511283874512],["ژ",-12.005640029907228],["승",-12.005640029907228],["бира",-12.005654335021973],["应用",-12.00576400756836],["uel",-12.005833625793455],["▁Rei",-12.005837440490724],["rekin",-12.005844116210938],["▁passat",-12.005925178527832],["phe",-12.005931854248049],["▁হ",-12.005934715270996],["▁سیستم",-12.00609016418457],["▁کاهش",-12.00609302520752],["▁ведь",-12.006097793579102],["▁دستگاه",-12.00611400604248],["เค",-12.00612735748291],["▁menerima",-12.006226539611816],["пар",-12.00624179840088],["▁mở",-12.006272315979004],["pena",-12.00627613067627],["▁кри",-12.006321907043455],["▁paso",-12.006393432617188],["射",-12.00640106201172],["nur",-12.0064115524292],["▁اندر",-12.00649642944336],["▁Nabi",-12.00650691986084],["ใคร",-12.006519317626951],["нию",-12.00652027130127],["▁edilib",-12.006595611572266],["泰",-12.006632804870604],["▁qualche",-12.006641387939451],["/6",-12.006830215454102],["▁Dumnezeu",-12.006925582885742],["ENT",-12.006940841674805],["▁aparat",-12.006978034973145],["▁مهر",-12.007003784179688],["▁суб",-12.007036209106444],["θέ",-12.007057189941406],["▁اسم",-12.007184028625488],["▁pamat",-12.007207870483398],["▁سابق",-12.00723934173584],["ayaan",-12.007284164428713],["▁Bilder",-12.007316589355469],["dang",-12.007342338562012],["ыс",-12.007390975952148],["聚",-12.007406234741213],["▁cómo",-12.007433891296388],["▁کړې",-12.007445335388184],["▁ʼ",-12.00745964050293],["▁სამ",-12.007503509521484],["▁enough",-12.007564544677734],["▁wrth",-12.007569313049316],["२",-12.00757122039795],["▁вона",-12.007630348205566],["▁Juan",-12.007662773132324],["torio",-12.00767707824707],["ώντας",-12.007684707641602],["▁طالبانو",-12.007707595825195],["玉",-12.007712364196776],["勢",-12.007722854614258],["জা",-12.00775909423828],["ګ",-12.007814407348633],["▁bosh",-12.007957458496094],["▁minimal",-12.007969856262209],["▁principi",-12.007990837097168],["στη",-12.008016586303713],["οδ",-12.00804042816162],["yt",-12.008051872253418],["▁reste",-12.008074760437012],["ento",-12.00815200805664],["▁katanya",-12.00817584991455],["▁сада",-12.008331298828123],["كي",-12.008356094360352],["▁стан",-12.008382797241213],["각",-12.008411407470703],["感到",-12.008444786071776],["▁tiež",-12.008479118347168],["कडे",-12.008488655090332],["▁स्वास्थ्य",-12.008517265319824],["▁možete",-12.008553504943848],["▁Pris",-12.008662223815918],["▁alisema",-12.00875759124756],["lərinə",-12.008758544921877],["ohen",-12.008854866027832],["▁Baba",-12.00886344909668],["▁vs",-12.0089111328125],["▁família",-12.00901699066162],["▁wanneer",-12.00902271270752],["設備",-12.00906753540039],["▁ڀ",-12.009112358093262],["EU",-12.00911808013916],["▁väg",-12.009150505065918],["ēr",-12.00917625427246],["▁LG",-12.009214401245115],["▁frente",-12.009259223937988],["loj",-12.009271621704102],["▁últimos",-12.00937271118164],["秀",-12.009379386901855],["▁ചെയ്ത",-12.009476661682127],["▁بأ",-12.009519577026367],["▁Президент",-12.00953769683838],["▁byt",-12.00965976715088],["ຜູ້",-12.00979232788086],["▁많이",-12.00985622406006],["▁этих",-12.00988483428955],["▁Orang",-12.009909629821776],["▁spill",-12.009939193725586],["▁1985",-12.009961128234863],["റ്റി",-12.010000228881836],["ується",-12.010004043579102],["▁ჩვენი",-12.010120391845703],["細",-12.010220527648926],["vana",-12.010236740112305],["▁рекао",-12.010356903076172],["▁بهتر",-12.01035976409912],["▁mena",-12.01042366027832],["▁voru",-12.01046657562256],["▁لے",-12.01052474975586],["▁orta",-12.010564804077148],["▁manifesta",-12.010571479797363],["▁emp",-12.010577201843262],["▁soli",-12.010586738586426],["未來",-12.010644912719728],["ทีม",-12.010671615600586],["▁luận",-12.010702133178713],["▁செய்திகள்",-12.010724067687988],["▁ಒ",-12.010848999023438],["rac",-12.010886192321776],["▁skrive",-12.010887145996094],["▁Dacă",-12.010915756225586],["별",-12.0109281539917],["▁Will",-12.010941505432127],["ষ",-12.010961532592772],["hd",-12.011054039001465],["▁المن",-12.01107692718506],["▁đầy",-12.011112213134766],["נק",-12.01113986968994],["▁Selv",-12.011180877685549],["уз",-12.011187553405762],["▁وس",-12.011268615722656],["cak",-12.011292457580566],["爆",-12.011367797851562],["PM",-12.011430740356444],["▁liegt",-12.01147747039795],["1-",-12.011510848999023],["▁plusieurs",-12.011531829833984],["ਂ",-12.01157283782959],["▁during",-12.01157569885254],["▁أيضا",-12.011595726013184],["▁kommentarer",-12.011632919311523],["äck",-12.011695861816406],["▁যা",-12.011716842651367],["زی",-12.011810302734377],["▁dikkat",-12.01181983947754],["▁אחר",-12.011829376220703],["▁pilot",-12.011831283569336],["▁gemaakt",-12.011841773986816],["สร้าง",-12.011905670166016],["▁diễn",-12.01195240020752],["▁avtomobil",-12.011958122253418],["νες",-12.012042999267578],["জন",-12.012065887451172],["▁agua",-12.012075424194336],["curi",-12.01209831237793],["▁zone",-12.012250900268556],["▁conseguir",-12.012336730957031],["▁wszystkich",-12.012372016906738],["毒",-12.012409210205078],["▁НАТО",-12.012411117553713],["▁rendszer",-12.012422561645508],["▁지원",-12.012470245361328],["▁болсон",-12.01250457763672],["ALI",-12.012511253356934],["就可以",-12.01254940032959],["رام",-12.012558937072754],["၃",-12.012565612792969],["▁часу",-12.012601852416992],["ikat",-12.012612342834473],["▁цаг",-12.012621879577637],["▁बु",-12.012622833251951],["jim",-12.012650489807127],["▁הבית",-12.01266384124756],["一天",-12.012673377990724],["▁praw",-12.012681007385254],["אות",-12.012736320495604],["▁болсо",-12.01280403137207],["ದ್ದ",-12.012819290161133],["▁ضرورت",-12.012822151184082],["▁роман",-12.012866020202637],["▁அது",-12.0129976272583],["just",-12.013054847717283],["דע",-12.013108253479004],["▁rekord",-12.013110160827637],["ται",-12.01314926147461],["iyên",-12.013150215148926],["టా",-12.01332950592041],["On",-12.013345718383787],["یس",-12.01335620880127],["▁रे",-12.01345443725586],["承",-12.013511657714844],["▁regular",-12.013522148132324],["▁પા",-12.013559341430664],["တိ",-12.013594627380373],["▁وايي",-12.013679504394531],["tē",-12.013757705688477],["出現",-12.013788223266602],["lius",-12.01382827758789],["▁GB",-12.013836860656738],["▁Genel",-12.013851165771484],["もう",-12.01392650604248],["▁budget",-12.014002799987791],["▁cambio",-12.014032363891602],["▁chiếc",-12.014057159423828],["ႏွင့္",-12.014098167419434],["▁honor",-12.014100074768066],["▁አቶ",-12.014140129089355],["▁তার",-12.014161109924316],["ukku",-12.014188766479492],["▁fim",-12.014188766479492],["▁leder",-12.014252662658691],["ռ",-12.014294624328612],["LER",-12.014345169067385],["▁bí",-12.014389038085938],["say",-12.014389991760254],["▁kokku",-12.014427185058594],["▁കൈ",-12.014429092407228],["ukan",-12.014432907104492],["▁мой",-12.014444351196287],["્યુ",-12.01447296142578],["▁المتحدة",-12.014473915100098],["▁baca",-12.014506340026855],["▁جانے",-12.01454257965088],["▁omkring",-12.014565467834473],["真正",-12.014566421508787],["reng",-12.014599800109863],["▁ಭ",-12.01463222503662],["▁වෙන",-12.014642715454102],["▁annan",-12.01468276977539],["前に",-12.014694213867188],["▁siger",-12.0147066116333],["▁Қа",-12.014721870422363],["alu",-12.014863014221191],["▁carne",-12.014912605285645],["ાય",-12.01494312286377],["ाव",-12.015008926391602],["▁qenë",-12.015018463134766],["Ca",-12.015043258666992],["вання",-12.01504611968994],["ನು",-12.01508617401123],["移",-12.015103340148926],["σο",-12.015209197998049],["vice",-12.01522731781006],["À",-12.015233993530272],["▁tilbud",-12.015236854553224],["▁Լ",-12.015252113342283],["抽",-12.015271186828612],["dun",-12.015291213989258],["▁pani",-12.015313148498535],["oga",-12.015337944030762],["ଖ",-12.015352249145508],["▁Dol",-12.01552963256836],["▁kufanya",-12.015630722045898],["▁ປ",-12.015713691711426],["▁Hus",-12.01579761505127],["▁Бар",-12.015802383422852],["賣",-12.015878677368164],["▁170",-12.015992164611816],["第一次",-12.015997886657717],["न्स",-12.0160493850708],["▁espera",-12.016225814819336],["▁tradicional",-12.016244888305664],["▁ямар",-12.016295433044434],["▁जिल्ला",-12.01641082763672],["▁tali",-12.01653003692627],["▁relación",-12.016575813293455],["▁kontra",-12.016590118408203],["▁2016,",-12.016613960266112],["tër",-12.016637802124023],["pio",-12.016700744628906],["文章",-12.016766548156738],["িং",-12.01681423187256],["▁Anak",-12.016844749450684],["рен",-12.016875267028809],["▁víz",-12.016912460327148],["talo",-12.016980171203612],["တော်",-12.016996383666992],["กร",-12.017107009887695],["▁ഇട",-12.017170906066896],["▁авах",-12.017288208007812],["▁Watch",-12.017293930053713],["▁July",-12.017325401306152],["乳",-12.017379760742188],["▁kép",-12.01741886138916],["▁evitar",-12.01743507385254],["▁breve",-12.01750946044922],["地區",-12.017550468444824],["гөн",-12.017655372619627],["унун",-12.017675399780272],["▁दु",-12.017707824707031],["রে",-12.017708778381348],["第三",-12.017781257629396],["團",-12.01778507232666],["▁Jen",-12.017797470092772],["든",-12.017828941345217],["▁concert",-12.017900466918944],["BS",-12.017902374267578],["won",-12.017922401428224],["哥",-12.017963409423828],["▁тај",-12.01805591583252],["傷",-12.018335342407228],["▁Під",-12.01833724975586],["йт",-12.018393516540527],["malla",-12.018407821655272],["nama",-12.018415451049805],["▁حسین",-12.018446922302246],["mbe",-12.01844882965088],["▁былі",-12.01846408843994],["IE",-12.018512725830078],["▁rreth",-12.01855182647705],["нун",-12.018596649169922],["▁3-4",-12.018632888793944],["lne",-12.018634796142578],["ulla",-12.018646240234377],["bild",-12.018662452697754],["▁관련",-12.018682479858398],["▁мол",-12.018683433532717],["▁clo",-12.018715858459473],["▁반",-12.018741607666016],["ācija",-12.01876735687256],["▁सह",-12.018773078918455],["▁város",-12.01878547668457],["▁experiencia",-12.018803596496582],["各种",-12.018877983093262],["λί",-12.018890380859377],["អ្នក",-12.019076347351074],["▁skr",-12.019118309020996],["▁möchte",-12.019120216369627],["▁audi",-12.019171714782717],["▁více",-12.019190788269045],["▁하나",-12.01924991607666],["иот",-12.01927661895752],["ախ",-12.0193452835083],["▁ansvar",-12.019449234008787],["▁mol",-12.019488334655762],["▁İş",-12.019506454467772],["dl",-12.019536018371582],["yun",-12.01955223083496],["旅遊",-12.019613265991213],["▁getur",-12.019652366638184],["▁uygun",-12.019658088684082],["кол",-12.01966381072998],["▁faj",-12.01984977722168],["rama",-12.01988697052002],["▁اتفاق",-12.01990032196045],["▁म्हणून",-12.01996898651123],["▁असा",-12.01997184753418],["▁Magyar",-12.020009994506836],["mı",-12.020130157470703],["▁उपलब्ध",-12.020207405090332],["▁ум",-12.020241737365724],["▁правила",-12.02035140991211],["лэг",-12.020393371582031],["等等",-12.02039623260498],["▁fej",-12.020414352416992],["▁सम",-12.020444869995115],["tech",-12.020466804504396],["okkal",-12.020522117614746],["▁Ski",-12.020550727844238],["▁consulta",-12.02061367034912],["ione",-12.020668983459473],["šanās",-12.020708084106444],["▁ceny",-12.020736694335938],["եկ",-12.020800590515137],["▁контакт",-12.020833969116213],["vė",-12.020851135253906],["TC",-12.020889282226562],["ást",-12.020896911621094],["온",-12.02090072631836],["▁företag",-12.020918846130373],["RT",-12.02098560333252],["手機",-12.021020889282228],["▁sl",-12.021029472351074],["lata",-12.021077156066896],["▁takže",-12.021088600158691],["▁болған",-12.02109146118164],["▁sư",-12.021093368530272],["▁Kjo",-12.0211181640625],["nça",-12.021138191223145],["وات",-12.021146774291992],["▁virtual",-12.021191596984863],["otta",-12.021224975585938],["▁Bình",-12.02127742767334],["▁aínda",-12.021278381347656],["jt",-12.021286010742188],["床",-12.021390914916992],["▁සු",-12.021425247192385],["timi",-12.021458625793455],["▁tellus",-12.021459579467772],["гур",-12.021551132202148],["▁profesor",-12.02155876159668],["▁reis",-12.021618843078612],["▁tü",-12.021620750427246],["tál",-12.021634101867676],["▁hữu",-12.021656036376951],["▁putin",-12.021736145019531],["ေလ",-12.021803855895996],["hä",-12.021886825561523],["▁fundamental",-12.021890640258787],["Գ",-12.021997451782228],["▁carta",-12.022101402282717],["空間",-12.02212142944336],["デ",-12.022146224975586],["▁umum",-12.022151947021484],["▁note",-12.022157669067385],["quam",-12.022205352783203],["ள்",-12.022239685058594],["anse",-12.022261619567873],["▁болж",-12.022302627563477],["ρέ",-12.022315979003906],["900",-12.022318840026855],["▁bahkan",-12.022324562072754],["сам",-12.022453308105469],["▁गु",-12.022476196289062],["▁Latvijā",-12.022504806518556],["▁Республикасынын",-12.022520065307615],["front",-12.022550582885742],["▁ختم",-12.022568702697754],["▁reikia",-12.022574424743652],["照片",-12.022629737854004],["▁kota",-12.022643089294434],["ыш",-12.02266788482666],["▁Или",-12.02269172668457],["因素",-12.022759437561035],["▁estilo",-12.022921562194824],["หลัก",-12.02292823791504],["▁কথা",-12.022933959960938],["שים",-12.02293586730957],["sig",-12.022972106933594],["▁पुलिस",-12.022995948791504],["▁jeśli",-12.023006439208984],["तः",-12.023009300231934],["▁रुपमा",-12.023053169250488],["hti",-12.02306842803955],["லாம்",-12.023175239562988],["rát",-12.023228645324709],["▁unul",-12.023322105407717],["achadh",-12.023333549499512],["▁अस",-12.023364067077637],["CK",-12.023382186889648],["صار",-12.0234956741333],["rett",-12.023541450500488],["▁ಬರ",-12.023571014404297],["▁papier",-12.023633003234863],["▁నుండి",-12.023641586303713],["▁menyebabkan",-12.023778915405272],["sChat",-12.023794174194336],["có",-12.023808479309082],["só",-12.023841857910156],["▁څه",-12.02401351928711],["gens",-12.024151802062988],["에도",-12.024152755737305],["▁חלק",-12.024174690246582],["̉",-12.024203300476074],["▁ریاست",-12.024212837219238],["▁биле",-12.024224281311035],["▁κατ",-12.024258613586426],["▁кого",-12.024262428283691],["▁czasu",-12.024347305297852],["▁lys",-12.024357795715332],["сының",-12.024408340454102],["▁doing",-12.024419784545898],["sper",-12.024429321289062],["▁card",-12.024484634399414],["▁vegna",-12.024627685546877],["▁미국",-12.024641036987305],["รวม",-12.024657249450684],["▁install",-12.02468967437744],["сла",-12.024701118469238],["▁TL",-12.024794578552246],["▁integra",-12.024803161621094],["▁putem",-12.024805068969728],["ual",-12.024991989135742],["▁nime",-12.025017738342283],["▁Кан",-12.02504825592041],["▁uvijek",-12.025053977966309],["▁ausge",-12.025089263916016],["サービス",-12.025141716003418],[".04.20",-12.025182723999023],["場合は",-12.02523136138916],["isë",-12.02527141571045],["բեր",-12.025317192077637],["索",-12.025390625],["עה",-12.025405883789062],["ಲೇ",-12.025413513183594],["▁pou",-12.02542495727539],["▁tõ",-12.025521278381348],["▁руб",-12.025557518005373],["▁يجب",-12.025586128234863],["byt",-12.025593757629396],["▁style",-12.025596618652344],["▁druk",-12.025713920593262],["▁Has",-12.025749206542969],["▁வந்த",-12.025769233703612],["▁børn",-12.025790214538574],["veri",-12.025802612304688],["電子",-12.025860786437988],["▁მიერ",-12.025903701782228],["tunk",-12.025919914245604],["▁נו",-12.025921821594238],["▁dot",-12.026006698608398],["dlo",-12.02602195739746],["מות",-12.026041984558104],["▁kvinder",-12.026116371154783],["▁stellen",-12.026161193847656],["зем",-12.026214599609377],["药",-12.026305198669434],["▁åt",-12.026318550109863],["▁چشم",-12.026342391967772],["▁zin",-12.026351928710938],["▁kuo",-12.026358604431152],["▁Kes",-12.026375770568848],["申请",-12.02638053894043],["gala",-12.026413917541504],["▁පෙර",-12.02641773223877],["▁relax",-12.026537895202637],["▁ait",-12.026568412780762],["ize",-12.02658462524414],["▁amat",-12.026605606079102],["▁Να",-12.026618003845217],["▁actually",-12.026618957519531],["▁ultima",-12.026622772216797],["▁adi",-12.026715278625488],["▁разбира",-12.026715278625488],["▁promet",-12.026739120483398],["کس",-12.026740074157717],["▁Coruña",-12.02675724029541],["itev",-12.026778221130373],["▁gleich",-12.026918411254885],["▁making",-12.026934623718262],["ப்பா",-12.02699089050293],["▁konflikt",-12.027023315429688],["▁වෙන්න",-12.027043342590332],["杀",-12.027124404907228],["▁zer",-12.027257919311523],["▁80%",-12.027265548706056],["▁jä",-12.027283668518066],["团队",-12.027284622192385],["Afrika",-12.027329444885254],["▁نفس",-12.027419090270996],["ౌ",-12.027446746826172],["ിട്ടുണ്ട്",-12.027521133422852],["ඩ",-12.027535438537598],["▁теперь",-12.027636528015137],["staan",-12.027750968933104],["減",-12.0277681350708],["▁kunder",-12.02783203125],["▁ც",-12.027874946594238],["▁Hos",-12.027875900268556],["24.",-12.027949333190918],["lés",-12.02802276611328],["кана",-12.028031349182127],["ıldı",-12.028119087219238],["จน",-12.028155326843262],["съ",-12.02816104888916],["dür",-12.028192520141602],["▁spun",-12.028234481811523],["පා",-12.028280258178713],["▁allerede",-12.028302192687988],["▁Daha",-12.028342247009276],["▁oameni",-12.028347969055176],["▁хотя",-12.028424263000488],["చి",-12.028474807739258],["▁सन्",-12.028550148010254],["▁working",-12.02866268157959],["▁Negara",-12.028666496276855],["สิ่ง",-12.028693199157717],["▁पक्ष",-12.028780937194824],["gai",-12.028803825378418],["aar",-12.028810501098633],["▁Pres",-12.028836250305176],["▁sky",-12.028847694396973],["플",-12.02885627746582],["play",-12.028908729553224],["ભ",-12.028913497924805],["დო",-12.028918266296388],["λά",-12.028985977172852],["基金",-12.029083251953123],["▁gee",-12.029183387756348],["nță",-12.029204368591309],["▁بیش",-12.029236793518066],["▁blevet",-12.029260635375977],["▁nekā",-12.029284477233888],["展示",-12.029289245605469],["جان",-12.029290199279783],["▁interessante",-12.029309272766112],["реж",-12.02932834625244],["▁Hrvatske",-12.029338836669922],["▁hafta",-12.02934455871582],["▁Že",-12.029414176940918],["Ω",-12.02943992614746],["▁Си",-12.029518127441406],["▁չեն",-12.029744148254396],["▁көз",-12.029751777648926],["▁nibh",-12.029804229736328],["▁nichts",-12.029879570007324],["▁luz",-12.029892921447754],["▁تاريخ",-12.02993869781494],["▁Care",-12.029986381530762],["lič",-12.02999496459961],["дра",-12.030024528503418],["יין",-12.030029296875],["▁yaz",-12.03004550933838],["iendo",-12.030123710632324],["വര",-12.03017234802246],["▁mezi",-12.030174255371094],["ښت",-12.030181884765623],["pool",-12.030187606811523],["▁sejak",-12.030233383178713],["Պ",-12.030238151550291],["ално",-12.030259132385254],["▁Standard",-12.030272483825684],["▁Sat",-12.030376434326172],["▁behandling",-12.030508995056152],["▁Zna",-12.030558586120604],["ებულ",-12.030674934387209],["▁evolu",-12.030713081359863],["▁teks",-12.030752182006836],["▁ass",-12.030842781066896],["▁وضع",-12.030856132507324],["▁intens",-12.03092098236084],["▁손",-12.030962944030762],["▁связи",-12.031027793884276],["eo",-12.031079292297363],["▁oyun",-12.031083106994627],["▁શકે",-12.031167030334473],["ková",-12.031173706054688],["▁היום",-12.0311861038208],["ىغا",-12.031224250793455],["iere",-12.031264305114746],["▁cs",-12.031316757202148],["▁psi",-12.031365394592283],["еж",-12.031381607055664],["ēm",-12.031449317932127],["//",-12.031556129455566],["▁اقتصادی",-12.031586647033691],["▁Door",-12.031594276428224],["土地",-12.031598091125488],["न्य",-12.031644821166992],["ԱՆ",-12.031645774841309],["06.",-12.031654357910156],["▁сал",-12.031658172607422],["▁gud",-12.031716346740724],["ಷ",-12.031744956970217],["▁mandat",-12.03176212310791],["ματ",-12.0318021774292],["▁Get",-12.0318021774292],["▁բայց",-12.03188419342041],["成長",-12.031959533691406],["▁mindig",-12.031981468200684],["▁ряд",-12.032031059265137],["ል፡፡",-12.032054901123049],["भर",-12.032068252563477],["pää",-12.032105445861816],["▁geo",-12.032133102416992],["▁rot",-12.032163619995115],["enta",-12.03221607208252],["▁konu",-12.032322883605955],["koak",-12.03235149383545],["پور",-12.032383918762209],["▁Там",-12.032435417175291],["▁başka",-12.03249168395996],["▁temas",-12.032504081726074],["වී",-12.032511711120604],["▁ಬಂದ",-12.032532691955566],["亿",-12.03259563446045],["atra",-12.032687187194824],["Th",-12.03269100189209],["धि",-12.032700538635254],["▁bou",-12.032702445983888],["▁двух",-12.032721519470217],["▁minst",-12.032745361328123],["▁temp",-12.032777786254885],[".05.",-12.032818794250488],["▁dansk",-12.03285312652588],["▁(15",-12.032952308654783],["▁prime",-12.032958030700684],["ēs",-12.032998085021973],["φα",-12.033072471618652],["શે",-12.033088684082031],["▁Ін",-12.033090591430664],["лээ",-12.03310203552246],["ගෙන්",-12.033141136169434],["siden",-12.03317165374756],["▁לח",-12.033188819885254],["▁muita",-12.0332670211792],["▁instru",-12.033278465270996],["stor",-12.033307075500488],["نز",-12.033382415771484],["▁ద",-12.03346061706543],["▁राष्ट्र",-12.033467292785645],["রি",-12.033514976501465],["▁asemenea",-12.033598899841309],["▁толкова",-12.033610343933104],["▁پول",-12.033717155456545],["igo",-12.033778190612791],["▁Daca",-12.03378200531006],["യു",-12.03383731842041],["▁usu",-12.033867835998535],["玩家",-12.033977508544922],["του",-12.034019470214844],["▁Lietuvoje",-12.034031867980955],["▁שלו",-12.03403663635254],["▁մոտ",-12.03407096862793],["ಣೆ",-12.034160614013672],["▁sev",-12.034161567687988],["wydd",-12.034192085266112],["120",-12.034219741821287],["rę",-12.034276008605955],["養",-12.034276008605955],["▁خل",-12.03433609008789],["tasun",-12.034456253051758],["lsa",-12.03447723388672],["▁Liber",-12.034503936767578],["▁HP",-12.034506797790527],["bide",-12.034537315368652],["▁direkte",-12.034631729125977],["▁действия",-12.034643173217772],["இ",-12.034786224365234],["▁hesab",-12.034855842590332],["▁Рас",-12.034924507141112],["▁ле",-12.034941673278809],["▁دانش",-12.034975051879885],["▁queda",-12.035001754760742],["ери",-12.035009384155272],["▁velmi",-12.035036087036133],["▁vsak",-12.035106658935549],["син",-12.035111427307127],["Hi",-12.035131454467772],["▁maupun",-12.035377502441406],["貼",-12.035457611083984],["▁شکل",-12.035465240478516],["чү",-12.035518646240234],["▁DEL",-12.035541534423828],["▁kore",-12.03555965423584],["▁ત",-12.03561782836914],["▁Till",-12.035621643066406],["▁terror",-12.035635948181152],["шла",-12.035680770874023],["▁თქვენ",-12.03574275970459],["tique",-12.035792350769045],["▁жалпы",-12.035802841186523],["▁vot",-12.035805702209473],["▁حتی",-12.03584098815918],["▁قول",-12.035922050476074],["▁тогда",-12.035932540893556],["lni",-12.035937309265137],["OP",-12.036123275756836],["▁obat",-12.036138534545898],["▁وجہ",-12.036165237426758],["amu",-12.03620147705078],["irea",-12.036219596862791],["imų",-12.036230087280272],["▁bilang",-12.036279678344728],["▁bruke",-12.036444664001465],["▁дуу",-12.036459922790527],["▁დაა",-12.036516189575195],["▁Samo",-12.036519050598145],["ビ",-12.036551475524902],["▁فعالیت",-12.03667163848877],["ຫ",-12.036730766296388],["▁wyda",-12.036736488342283],["▁সময়",-12.03676414489746],["▁кыргыз",-12.03682804107666],["▁Site",-12.036834716796877],["整个",-12.036946296691896],["▁서울",-12.03700065612793],["▁tehdä",-12.03701877593994],["താണ്",-12.037091255187988],["ಗಿ",-12.037152290344238],["▁close",-12.037175178527832],["▁Harry",-12.03719425201416],["▁nevy",-12.03735065460205],["ሪያ",-12.037435531616213],["▁2008.",-12.037482261657717],["eit",-12.037569046020508],["ଉ",-12.037582397460938],["▁وذلك",-12.037590026855469],["קט",-12.037615776062012],["ച്ചു",-12.037616729736328],["▁Transport",-12.037744522094728],["▁गीत",-12.037829399108888],["满",-12.03791046142578],["▁təşkil",-12.037927627563477],["овой",-12.03797721862793],["АТ",-12.038208961486816],["▁простор",-12.038211822509766],["▁жена",-12.038256645202637],["දු",-12.03825855255127],["▁moda",-12.038370132446287],["▁свое",-12.038371086120604],["▁አን",-12.03842544555664],["ູ",-12.03844165802002],["vän",-12.038457870483398],[":12",-12.038463592529297],["▁tab",-12.038469314575195],["▁photos",-12.038474082946776],["▁asia",-12.03847599029541],["кул",-12.038487434387209],["▁ча",-12.038503646850586],["мал",-12.03850555419922],["▁пос",-12.038507461547852],["vesti",-12.0385160446167],["▁kolor",-12.038558959960938],["▁destek",-12.03866958618164],["▁2009.",-12.03869342803955],["▁appar",-12.038700103759766],["▁egész",-12.038721084594728],["fic",-12.038850784301758],["ари",-12.038857460021973],["telen",-12.038914680480955],["▁پاک",-12.038920402526855],["▁임",-12.038976669311523],["▁наше",-12.039040565490724],["▁disco",-12.039070129394531],["▁zap",-12.039091110229492],["▁getting",-12.039177894592283],["▁діяльності",-12.039193153381348],["▁lokale",-12.03928565979004],["ання",-12.039331436157228],["▁kdy",-12.039342880249023],["äng",-12.03939437866211],["ukset",-12.039474487304688],["kommen",-12.039496421813965],["Å",-12.039499282836914],["▁Είναι",-12.03951358795166],["പ്പെട്ട",-12.03951930999756],["▁соответствии",-12.03962516784668],["אר",-12.039628028869627],["▁ihm",-12.039724349975586],["▁مار",-12.03974437713623],["▁kiedy",-12.03982925415039],["ARI",-12.039846420288086],["เสีย",-12.039898872375488],["▁тие",-12.039959907531738],["త్త",-12.039971351623535],["▁Respublikasi",-12.040029525756836],["▁miten",-12.040060997009276],["!“",-12.040069580078123],["ત્ર",-12.040078163146973],["▁ita",-12.040102005004885],["▁stal",-12.040175437927246],["υν",-12.040240287780762],["ғ",-12.040295600891112],["▁එක්",-12.040343284606934],["ದಿ",-12.04037380218506],["▁comercial",-12.040380477905272],["▁прет",-12.040390014648438],["yil",-12.04039478302002],["▁postup",-12.040400505065918],["▁مصرف",-12.040453910827637],["▁Alex",-12.04045867919922],["ЛА",-12.040510177612305],["▁1984",-12.04058074951172],["▁peli",-12.040639877319336],["ској",-12.04077434539795],["▁Гэта",-12.04083251953125],["bona",-12.040863990783691],["▁erhalten",-12.040865898132324],["▁Ще",-12.0408935546875],["▁przypadku",-12.04092025756836],["▁sentir",-12.040938377380373],["iser",-12.040940284729004],["▁እየተ",-12.041004180908203],["▁monta",-12.041011810302734],["iyet",-12.041024208068848],["▁вида",-12.041146278381348],["▁ette",-12.041147232055664],["知识",-12.04114818572998],["▁rek",-12.041168212890623],["onte",-12.041186332702637],["eno",-12.041293144226074],["овий",-12.041353225708008],["ولو",-12.041357040405272],["▁klimat",-12.04136085510254],["▁tycker",-12.041418075561523],["▁trebui",-12.041433334350586],["▁Rak",-12.041457176208496],["▁problemi",-12.041476249694824],["▁již",-12.041515350341797],["▁цена",-12.041531562805176],["▁خون",-12.041552543640137],["jih",-12.04156494140625],["لان",-12.041614532470703],["▁때문에",-12.041638374328612],["ệ",-12.041685104370115],["▁aon",-12.041746139526367],["▁90%",-12.041796684265137],["▁անգամ",-12.041799545288086],["ац",-12.041813850402832],["ჩე",-12.04184341430664],["▁iti",-12.041871070861816],["▁dura",-12.04190444946289],["แรง",-12.041919708251951],["▁101",-12.041963577270508],["▁Kra",-12.042049407958984],["nţă",-12.0420503616333],["▁ਚ",-12.0420503616333],["▁하고",-12.04206657409668],[":01",-12.042139053344728],["▁наступ",-12.042163848876951],["必須",-12.042171478271484],["▁ရ",-12.042183876037598],["治疗",-12.042189598083496],["▁세계",-12.042193412780762],["▁አዲስ",-12.042195320129396],["달",-12.042237281799316],["▁pili",-12.042243003845217],["โม",-12.042279243469238],["див",-12.04228687286377],["ając",-12.042407035827637],["gend",-12.042421340942385],["lima",-12.042423248291016],["ета",-12.042424201965332],["కొ",-12.042428970336914],["өр",-12.042469024658203],["ues",-12.042488098144531],["sili",-12.042658805847168],["ช่วย",-12.042658805847168],["▁milf",-12.042780876159668],["▁avons",-12.042835235595703],["▁runt",-12.042877197265623],["miento",-12.042895317077637],["▁kuku",-12.042917251586914],["▁publica",-12.042935371398926],["ovaný",-12.042959213256836],["▁блок",-12.04311180114746],["▁mạng",-12.043119430541992],["講",-12.043135643005373],["飛",-12.043160438537598],["▁Мі",-12.043161392211914],["▁ഇവിടെ",-12.043182373046877],["▁binne",-12.043222427368164],["פל",-12.043245315551758],["▁(17",-12.043252944946287],["mic",-12.04334831237793],["▁тысяч",-12.04336929321289],["mmen",-12.043377876281738],["aman",-12.043415069580078],["▁Press",-12.04343318939209],["▁ellos",-12.043439865112305],["▁करून",-12.043449401855469],["秒",-12.043484687805176],["▁Ин",-12.0435152053833],["越来越",-12.043526649475098],["▁sukses",-12.043556213378906],["▁Finn",-12.043597221374512],["ገባ",-12.043883323669434],["▁நன்றி",-12.043953895568848],["eya",-12.04395580291748],["μπ",-12.044158935546877],["▁doit",-12.044174194335938],["ښ",-12.04417896270752],["Lu",-12.044214248657228],["סי",-12.044346809387209],["▁timpul",-12.044355392456056],["▁ciudad",-12.044373512268066],["ፅ",-12.044377326965332],["来自",-12.044450759887695],["mier",-12.04448127746582],["佳",-12.044543266296388],["ations",-12.04465389251709],["▁qab",-12.04467487335205],["แม่",-12.044677734375],["nata",-12.044782638549805],["bola",-12.04480266571045],["▁또",-12.04483127593994],["तर",-12.044859886169434],["tku",-12.044894218444824],["一名",-12.04494857788086],["▁eğitim",-12.044968605041504],["లీ",-12.044981956481934],["▁resta",-12.045025825500488],["אַל",-12.045032501220703],["ٿ",-12.04508113861084],["体系",-12.045186042785645],["▁Rat",-12.045196533203123],["▁Bran",-12.04521656036377],["▁diz",-12.045235633850098],["ภายใน",-12.045251846313477],["▁svært",-12.045254707336426],["▁კო",-12.04531478881836],["▁group",-12.045414924621582],["▁школа",-12.045453071594238],["▁چیز",-12.045490264892578],["▁فوج",-12.045530319213867],["▁😀",-12.045531272888184],["▁ayrı",-12.045634269714355],["▁יכול",-12.045634269714355],["▁dafür",-12.045687675476074],["▁ඉන්න",-12.04571533203125],["tavad",-12.045764923095703],["izm",-12.045785903930664],["▁cent",-12.045832633972168],["▁өс",-12.045883178710938],["ไฟ",-12.045893669128418],["▁እያ",-12.045973777770996],["PL",-12.046012878417969],["▁باشند",-12.046080589294434],["▁nedo",-12.04608154296875],["▁каде",-12.046093940734863],["▁wszystkim",-12.046120643615724],["lock",-12.046131134033203],["▁тебя",-12.04624080657959],["▁vár",-12.046283721923828],["▁Click",-12.046372413635254],["▁Tau",-12.046406745910645],["▁gry",-12.046406745910645],["▁اون",-12.046469688415527],["▁۱",-12.046480178833008],["▁ත",-12.046491622924805],["▁mut",-12.046500205993652],["▁আমি",-12.046514511108398],["亿元",-12.046555519104004],["▁өдөр",-12.046565055847168],["زل",-12.046581268310549],["τικά",-12.04658317565918],["mission",-12.04662036895752],["նի",-12.046636581420898],["▁hakkında",-12.04678440093994],["АЙ",-12.046818733215332],["muş",-12.04682159423828],["役",-12.046849250793455],["▁bố",-12.0469970703125],["▁مدد",-12.04701328277588],["▁თუმცა",-12.047035217285156],["HI",-12.047042846679688],["stava",-12.04714584350586],["ense",-12.047161102294922],["▁5)",-12.047164916992188],["▁대해",-12.047215461730955],["▁qualité",-12.0473051071167],["ांची",-12.047346115112305],["öd",-12.047369956970217],["葉",-12.04738998413086],["ajo",-12.047420501708984],["那个",-12.047449111938477],["TEN",-12.047450065612791],["ისა",-12.047479629516602],["▁Amin",-12.047503471374512],["▁päivä",-12.047530174255373],["trauk",-12.04759407043457],["▁enfants",-12.04763412475586],["▁visual",-12.04769802093506],["▁ske",-12.047707557678224],["lj",-12.047728538513184],["申請",-12.047834396362305],["▁všetky",-12.047863960266112],["ณ",-12.047908782958984],["▁વાત",-12.04792308807373],["ales",-12.047962188720703],["▁cazul",-12.047992706298828],["▁cool",-12.04807472229004],["▁сказать",-12.04814624786377],["▁змін",-12.048206329345703],["▁بالم",-12.048224449157717],["▁ሰዎች",-12.04826831817627],["▁iştirak",-12.048287391662598],["▁उन्होंने",-12.048295974731444],["▁وجه",-12.04835605621338],["▁ಮೂಲಕ",-12.048373222351074],["ಬ",-12.04839324951172],["дары",-12.048455238342283],["ksa",-12.048501968383787],["▁käy",-12.048507690429688],["сьці",-12.04855251312256],["▁Однако",-12.048589706420898],["文字",-12.048640251159668],["olu",-12.048686027526855],["▁daya",-12.048742294311523],["▁categoria",-12.0487699508667],["▁(12",-12.048800468444824],["▁تع",-12.048827171325684],["▁Hvordan",-12.048850059509276],["аз",-12.048905372619627],["▁though",-12.048922538757324],["▁Von",-12.048931121826172],["▁kraju",-12.048972129821776],["▁cinco",-12.049009323120115],["▁lika",-12.04902458190918],["▁105",-12.049057006835938],["▁низ",-12.049169540405272],["▁شركة",-12.04917335510254],["ਵਾਂ",-12.049312591552734],["▁Tem",-12.049356460571287],["дно",-12.049368858337402],["תה",-12.049406051635742],["▁радио",-12.049448013305664],["▁kasar",-12.049455642700195],["▁คือ",-12.04946231842041],["▁Λ",-12.049476623535156],[":25",-12.04958152770996],["▁арналған",-12.049604415893556],["იანი",-12.04975700378418],["▁mitään",-12.049776077270508],["inger",-12.049781799316406],["▁Itali",-12.0498628616333],["▁bloc",-12.0498628616333],["see",-12.049874305725098],["اتی",-12.049894332885742],["翻",-12.05000114440918],["▁burada",-12.050054550170898],["综合",-12.050098419189451],["สาย",-12.050130844116213],["മുള്ള",-12.05022144317627],["▁dise",-12.05027198791504],["ท์",-12.050304412841797],["▁Jon",-12.05039405822754],["▁fleste",-12.050423622131348],["▁Според",-12.050517082214355],["▁áp",-12.050597190856934],["표",-12.05063247680664],["例如",-12.050663948059082],["▁магазин",-12.050711631774902],["▁הק",-12.050792694091797],["▁bú",-12.05083465576172],["▁pob",-12.050835609436035],["▁pok",-12.050864219665527],["▁වේ",-12.051060676574709],["지는",-12.05108642578125],["▁JO",-12.05116367340088],["님",-12.051219940185549],["▁کردند",-12.051229476928713],["gawa",-12.05128574371338],["▁बढी",-12.051314353942873],["▁kino",-12.051431655883787],["▁сил",-12.051518440246582],["kula",-12.051548957824709],["▁novel",-12.051580429077148],["/2017",-12.05161952972412],["ansa",-12.05168628692627],["িয়ে",-12.051751136779783],["▁serve",-12.051767349243164],["▁merasa",-12.051932334899902],["導",-12.05197048187256],["รู้",-12.05198860168457],["▁хорошо",-12.051998138427734],["▁Ži",-12.052059173583984],["čko",-12.052082061767578],["ილი",-12.052091598510742],["▁gång",-12.052111625671388],["ದೇ",-12.052127838134766],["隔",-12.052181243896484],["▁təqdim",-12.0521879196167],["▁इसके",-12.0523099899292],["нар",-12.05235767364502],["ţa",-12.052369117736816],["▁5%",-12.052386283874512],["▁ഐ",-12.052529335021973],["▁(11)",-12.052550315856934],["เดิน",-12.05258560180664],["pay",-12.05264949798584],["▁सरकारले",-12.052667617797852],["ely",-12.052680015563965],["imento",-12.052700996398926],["дам",-12.052745819091797],["రం",-12.05282974243164],["EP",-12.052874565124512],["▁ಕಾ",-12.052892684936523],["יס",-12.052908897399902],["▁nouveau",-12.052922248840332],["जे",-12.052931785583496],["לט",-12.052960395812988],["জি",-12.052978515625],["elijk",-12.053011894226074],["▁kerak",-12.053017616271973],["ável",-12.053037643432615],["グ",-12.053065299987791],["▁mettre",-12.053095817565918],["▁студент",-12.053123474121094],["▁realmente",-12.053167343139648],["▁leid",-12.05318546295166],["фі",-12.05319118499756],["▁būtų",-12.053208351135254],["▁чего",-12.053295135498049],["▁protein",-12.05331325531006],["▁stadig",-12.053339004516602],["▁ф",-12.05338191986084],["bij",-12.0534086227417],["ಬಿ",-12.053412437438965],["▁компания",-12.053521156311035],["길",-12.053560256958008],["▁oft",-12.053594589233398],["мір",-12.053635597229004],["టం",-12.053677558898926],["▁periodo",-12.05369472503662],["▁לכם",-12.05370044708252],["3)",-12.05371379852295],["▁хан",-12.053730964660645],["▁нем",-12.053765296936035],["▁kamar",-12.053770065307615],["▁такого",-12.0537748336792],["gues",-12.053851127624512],["dara",-12.05398178100586],["...!",-12.05398941040039],["▁quelle",-12.053993225097656],["spre",-12.054032325744627],["▁សម្រាប់",-12.054032325744627],["aksi",-12.054254531860352],["နှင့်",-12.05426025390625],["▁یه",-12.054340362548828],["▁large",-12.054408073425291],["τική",-12.054492950439451],["ଡ଼",-12.054545402526855],["ställ",-12.054572105407717],["טל",-12.054594993591309],["ılması",-12.054630279541016],["▁gav",-12.054640769958496],["સા",-12.05472183227539],["▁continue",-12.054726600646973],["納",-12.05475616455078],["硬",-12.054773330688477],["▁پنجاب",-12.054818153381348],["▁bildirib",-12.054839134216309],["rien",-12.054925918579102],["パ",-12.054936408996582],["▁hjälp",-12.05500030517578],["▁snart",-12.055027961730955],["▁ked",-12.055079460144045],["คิด",-12.05508518218994],["消",-12.055109977722168],["▁Ки",-12.055215835571287],["▁posiada",-12.05539321899414],["dhu",-12.055431365966797],["▁PT",-12.05544090270996],["iger",-12.055460929870604],["rob",-12.05546760559082],["▁тук",-12.055521965026855],["നും",-12.055529594421388],["▁Вот",-12.0556058883667],["▁శ్రీ",-12.055607795715332],["▁motivo",-12.055617332458496],["▁joy",-12.05567741394043],["▁landet",-12.05568504333496],["▁झाले",-12.055734634399414],["▁ደ",-12.055832862854004],["baba",-12.05588436126709],["ап",-12.05595874786377],["▁tiks",-12.055974960327148],["ಿಲ್ಲ",-12.056034088134766],["▁similar",-12.05604076385498],["▁קי",-12.05606460571289],["▁Mein",-12.05615520477295],["▁gracias",-12.05617332458496],["elijke",-12.056217193603516],["യായി",-12.05624294281006],["▁geniş",-12.056282043457031],["▁тога",-12.05632781982422],["發生",-12.05634880065918],["▁hạn",-12.056370735168455],["kuwa",-12.056431770324709],["比如",-12.05644702911377],["▁aplica",-12.056488037109377],["iaeth",-12.056500434875488],["▁মধ্যে",-12.056525230407717],["▁всем",-12.056538581848145],["▁QUE",-12.056625366210938],["▁persoane",-12.056669235229492],["▁Amsterdam",-12.056685447692873],["lip",-12.056711196899414],["chat",-12.056806564331056],["▁پولیس",-12.056967735290527],["▁autori",-12.056978225708008],["輪",-12.057005882263184],["рож",-12.057025909423828],["lê",-12.057049751281738],["ир",-12.057138442993164],["▁kuidas",-12.05718231201172],["▁Eh",-12.057207107543944],["▁nega",-12.057207107543944],["▁programu",-12.05722999572754],["良",-12.05728244781494],["▁tez",-12.057311058044434],["вать",-12.057326316833496],["gger",-12.057425498962402],["мон",-12.057475090026855],["▁आले",-12.05763816833496],["▁Kota",-12.05765151977539],["kultur",-12.057689666748049],["▁effet",-12.057779312133787],["▁Gam",-12.057799339294434],["▁다시",-12.057896614074709],["▁Kong",-12.057909965515137],["▁21,",-12.057929039001465],["▁다양한",-12.057931900024414],["教授",-12.057987213134766],["nţa",-12.057998657226562],["箱",-12.058011054992676],["▁Rai",-12.05815315246582],["▁צי",-12.058154106140137],["▁ала",-12.058159828186035],["当时",-12.05816650390625],["လိုက္",-12.05824375152588],["uus",-12.058297157287598],["藥",-12.058303833007812],["▁walio",-12.058341979980469],["▁chuid",-12.058404922485352],["အတြက္",-12.05841064453125],["▁হা",-12.058445930480955],["▁Terra",-12.058609008789062],["▁Hồ",-12.058669090270996],["▁Ring",-12.05872917175293],["İN",-12.05873966217041],["মান",-12.058836936950684],["λυ",-12.058890342712402],["▁joc",-12.058929443359377],["▁mmoja",-12.058956146240234],["▁pere",-12.059088706970217],["▁char",-12.059208869934082],["▁Mul",-12.059218406677246],["艺术",-12.059255599975586],["▁Më",-12.059293746948242],["专",-12.05938720703125],["联系",-12.059478759765623],["▁ध",-12.059500694274902],["ίων",-12.059514045715332],["▁came",-12.059515953063965],["akan",-12.059518814086914],["▁үү",-12.059544563293455],["ится",-12.059618949890137],["ళ",-12.0596342086792],["麻",-12.059643745422363],["▁view",-12.05965518951416],["▁очи",-12.05967140197754],["▁ਸਾਹਿਬ",-12.05968189239502],["▁Tip",-12.059710502624512],["▁دریافت",-12.05971336364746],["istes",-12.059810638427734],["▁suoi",-12.059828758239746],["glas",-12.059843063354492],["bung",-12.05984592437744],["▁ನಾ",-12.059855461120604],["слу",-12.059861183166504],["كت",-12.059897422790527],["юсь",-12.059925079345703],["हरुको",-12.05995750427246],["▁Visa",-12.060056686401367],["ирање",-12.060096740722656],["കളും",-12.060107231140137],["▁įsi",-12.06013011932373],["opera",-12.060176849365234],["kunta",-12.060189247131348],["▁onların",-12.06019401550293],["重新",-12.060256958007812],["▁livre",-12.060291290283203],["▁registr",-12.060381889343262],["ச்சி",-12.060465812683104],["▁अपना",-12.060628890991213],["▁crea",-12.060730934143066],["▁teman",-12.060761451721191],["▁Ví",-12.060805320739746],["▁mä",-12.060837745666504],["ıp",-12.060856819152832],["▁피",-12.061165809631348],["IX",-12.06120777130127],["▁Lab",-12.061223030090332],["▁друго",-12.061306953430176],["然而",-12.061307907104492],["보다",-12.061358451843262],["ಿದ್ದು",-12.06137752532959],["uff",-12.06141185760498],["tė",-12.06142234802246],["思想",-12.06145477294922],["▁Він",-12.061546325683594],["▁νέα",-12.061606407165527],["tasuna",-12.061630249023438],["縣",-12.061663627624512],["жээ",-12.061723709106444],["▁mel",-12.061781883239746],["▁memoria",-12.06182861328125],["ମାନେ",-12.061863899230955],["▁Kla",-12.061976432800291],["▁ถ้า",-12.061978340148926],["gji",-12.06203269958496],["▁gent",-12.06208610534668],["▁કો",-12.06210231781006],["ตั้ง",-12.062156677246094],["ტის",-12.062167167663574],["ară",-12.062323570251465],["ことは",-12.062433242797852],["▁pove",-12.0624418258667],["▁jednom",-12.062618255615234],["▁выда",-12.062626838684082],["▁tagasi",-12.062673568725586],["▁संस्था",-12.06271743774414],["vne",-12.062744140625],["▁Læs",-12.062782287597656],["ünü",-12.062793731689451],["▁οποίο",-12.062817573547363],["▁ყველაზე",-12.062875747680664],["不仅",-12.062949180603027],["▁sano",-12.062955856323242],["▁belle",-12.06300449371338],["ųjų",-12.063048362731934],["פות",-12.063058853149414],["ingiz",-12.063084602355955],["▁ಇಲ್ಲ",-12.06313419342041],["▁Olen",-12.063203811645508],["▁ευρώ",-12.06321620941162],["特别",-12.063249588012695],["ical",-12.063255310058594],["wys",-12.06328296661377],["》,",-12.063292503356934],["▁उस",-12.063305854797363],["▁bruge",-12.06331729888916],["▁één",-12.06333065032959],["▁ЕУ",-12.063343048095703],["aime",-12.063380241394045],["▁мала",-12.063396453857422],["▁جر",-12.063398361206056],["▁effect",-12.06340217590332],["▁situs",-12.063438415527344],["▁명",-12.06351089477539],["▁නව",-12.063544273376465],["beri",-12.06356716156006],["談",-12.06358528137207],["厚",-12.063593864440918],["γκ",-12.063651084899902],["▁kuva",-12.063668251037598],["abo",-12.063719749450684],["/11",-12.063751220703123],["ሶ",-12.063798904418944],["dou",-12.063831329345703],["▁tör",-12.063946723937988],["▁ortaya",-12.06396770477295],["家族",-12.06399917602539],["క్ష",-12.06406593322754],["ರಿಗೆ",-12.0640869140625],["▁dzięki",-12.06409740447998],["▁sommer",-12.06411075592041],["ంటే",-12.064126014709473],["ିକ",-12.064159393310549],["▁Що",-12.064169883728027],["Ya",-12.06421184539795],["▁17,",-12.064266204833984],["▁पूर्ण",-12.064269065856934],["▁wielu",-12.064273834228516],["suz",-12.064275741577148],["▁стара",-12.064286231994627],["▁напад",-12.064330101013184],["แรก",-12.064364433288574],["▁hast",-12.06444263458252],["▁талаар",-12.06448459625244],["ეთის",-12.064488410949709],["лъ",-12.06450080871582],["▁hap",-12.064560890197754],["into",-12.064566612243652],["ште",-12.064566612243652],["▁CC",-12.064574241638184],["▁אתה",-12.06464958190918],["▁zit",-12.064653396606444],["▁Ան",-12.064802169799805],["▁کننده",-12.064921379089355],["τρο",-12.064967155456545],["وری",-12.06497287750244],["พิ",-12.065048217773438],["áz",-12.065067291259766],["▁única",-12.065067291259766],["сел",-12.065077781677246],["▁zawsze",-12.065133094787598],["▁සං",-12.065187454223633],["ម្",-12.065253257751465],["ରୀ",-12.065303802490234],["平均",-12.06533908843994],["▁komanda",-12.065369606018066],["▁koulu",-12.0653715133667],["▁देव",-12.0653715133667],["▁સુધી",-12.065425872802734],["▁არც",-12.065427780151367],["每年",-12.06548023223877],["್ಯಾ",-12.065500259399414],["σκ",-12.065519332885742],["vic",-12.06552028656006],["新闻",-12.06553840637207],["▁тро",-12.065543174743652],["ሀ",-12.065545082092283],["▁kér",-12.065545082092283],["тика",-12.065547943115234],["▁AV",-12.06556797027588],["▁Fre",-12.065576553344728],["unan",-12.065682411193848],["љи",-12.065732955932615],["婚",-12.065863609313965],["enti",-12.06591510772705],["▁balik",-12.065991401672363],["▁Bala",-12.06601333618164],["ለት",-12.066030502319336],["hot",-12.0660400390625],["▁Kö",-12.06613540649414],["▁çek",-12.066169738769531],["▁бур",-12.066227912902832],["▁raja",-12.066278457641602],["▁trưởng",-12.06631565093994],["▁хотел",-12.06635570526123],["▁boshqa",-12.066370010375977],["▁pomaga",-12.06637191772461],["▁America",-12.06654930114746],["▁zase",-12.066600799560549],["חת",-12.066648483276367],["▁vod",-12.066652297973633],["卖",-12.06668186187744],["ผิว",-12.066726684570312],["▁declara",-12.066750526428224],["meye",-12.066827774047852],["▁qualità",-12.066828727722168],["兒",-12.06694221496582],["▁ша",-12.067015647888184],["▁international",-12.067028999328612],["む",-12.06707000732422],["▁kodu",-12.06710720062256],["داد",-12.067118644714355],["там",-12.067124366760254],["耳",-12.06713581085205],["就会",-12.06720733642578],["▁gain",-12.067241668701172],["스트",-12.067252159118652],["▁Meer",-12.06726360321045],["▁nutri",-12.067293167114258],["த்தின்",-12.067317962646484],["▁proba",-12.067364692687988],["▁გე",-12.067408561706545],["▁Sti",-12.067442893981934],["▁አለ",-12.067449569702148],["▁Char",-12.067482948303224],["▁dona",-12.067570686340332],["▁skip",-12.067571640014648],["▁embargo",-12.067642211914062],["עצ",-12.067756652832031],["▁chút",-12.067840576171877],["显示",-12.067840576171877],["فا",-12.067903518676758],["▁Ala",-12.067923545837402],["нки",-12.068056106567385],["▁четири",-12.068109512329102],["▁Gå",-12.068208694458008],["▁oto",-12.068235397338867],["觀",-12.068352699279783],["▁<3",-12.068395614624023],["ፊ",-12.068403244018556],["▁Last",-12.06840991973877],["fond",-12.068413734436035],["▁веб",-12.06842041015625],["тык",-12.06842803955078],["hof",-12.068513870239258],["▁jedoch",-12.068546295166016],["ара",-12.068575859069824],["▁Audi",-12.068605422973633],["▁экен",-12.06862449645996],["ється",-12.068655967712402],["▁iste",-12.06866455078125],["▁ਕਰੋ",-12.068668365478516],["bran",-12.068675994873049],["ชอบ",-12.068914413452148],["肯定",-12.068925857543944],["課程",-12.068986892700195],["skrift",-12.069007873535156],["▁Brit",-12.069013595581056],["vari",-12.06907844543457],["ត្រ",-12.069131851196287],["ებმა",-12.0691499710083],["▁Sep",-12.06926727294922],["▁Pul",-12.069272994995115],["vyk",-12.069295883178713],["▁שיש",-12.069342613220217],["ZO",-12.069376945495604],["изма",-12.069385528564451],["▁характер",-12.069452285766602],["vije",-12.069487571716309],["▁tome",-12.069540023803713],["▁또한",-12.069592475891112],["▁wuxuu",-12.06964111328125],["inden",-12.069643020629885],["▁troch",-12.069645881652832],["arka",-12.06967067718506],["कि",-12.06972312927246],["еј",-12.06975555419922],["မ်း",-12.069880485534668],["ixo",-12.06989288330078],["リー",-12.06992530822754],["▁grazie",-12.06993007659912],["▁Gall",-12.069966316223145],["▁век",-12.069981575012209],["▁fler",-12.070000648498535],["တယ္။",-12.070034980773926],["▁н",-12.070113182067873],["Ju",-12.070351600646973],["!」",-12.070355415344238],["▁बी",-12.070387840270996],["▁complet",-12.070394515991213],["▁skin",-12.070414543151855],["▁تش",-12.070502281188965],["▁lepas",-12.070596694946287],["▁дава",-12.070707321166992],["▁прос",-12.070711135864258],["▁Србији",-12.070759773254396],["吸",-12.070770263671877],["▁bá",-12.07080364227295],["▁aking",-12.07082748413086],["bry",-12.07087516784668],["than",-12.070878982543944],["קל",-12.070929527282717],["▁entrada",-12.07093906402588],["▁1986",-12.070975303649902],["▁Fit",-12.07098388671875],["tot",-12.070995330810549],["pir",-12.071002006530762],["▁ola",-12.071074485778809],["▁Tako",-12.071133613586426],["пози",-12.071171760559082],["▁আপনার",-12.071199417114258],["იც",-12.07123565673828],["िर",-12.071264266967772],["නම්",-12.07129955291748],["ిక",-12.0713472366333],["有的",-12.07137966156006],["+1",-12.071507453918455],["န္း",-12.071630477905272],["▁människor",-12.07164478302002],["ibi",-12.071647644042969],["ZI",-12.071659088134766],["itud",-12.07184600830078],["▁dich",-12.071891784667969],["▁లేదు",-12.071919441223145],["更是",-12.072013854980469],["▁продукти",-12.072077751159668],["форм",-12.072089195251465],["▁mengalami",-12.072093963623049],["▁באתר",-12.072113990783691],["▁kundër",-12.072230339050291],["▁ერთად",-12.072239875793455],["▁Jack",-12.072280883789062],["洋",-12.0723237991333],["鄉",-12.07235336303711],["บอก",-12.072394371032717],["abilir",-12.072469711303713],["▁ຜູ້",-12.072501182556152],["æg",-12.072505950927734],["▁nanti",-12.072535514831545],["▁already",-12.07253646850586],["▁υπάρχει",-12.07254409790039],["щите",-12.072587013244627],["μερ",-12.072599411010742],["▁months",-12.072745323181152],["▁רע",-12.072818756103516],["pers",-12.072823524475098],["▁perc",-12.072834014892578],["▁Сто",-12.072916984558104],["▁hát",-12.072932243347168],["▁ହ",-12.072937965393066],["優惠",-12.07296085357666],["▁calidad",-12.07303237915039],["▁Ou",-12.073060989379885],["▁tối",-12.073129653930664],["▁Ball",-12.073145866394045],["ፉ",-12.073217391967772],["Ë",-12.073307037353516],["ባቸው",-12.073322296142578],["рка",-12.07334041595459],["▁tuaj",-12.073413848876951],["▁triệu",-12.073434829711914],["▁منطقه",-12.073570251464844],["ദി",-12.07362461090088],["▁тэ",-12.073634147644045],["▁портал",-12.07366943359375],["▁Frage",-12.073793411254885],["▁използва",-12.073822975158691],["ሆኑ",-12.073854446411133],["iškai",-12.073859214782717],["▁jeung",-12.074153900146484],["ець",-12.074170112609863],["▁lä",-12.074177742004396],["▁mengu",-12.07421588897705],["▁хоёр",-12.074236869812012],["이라고",-12.074256896972656],["lih",-12.074277877807615],["▁kỹ",-12.074352264404297],["▁కొ",-12.074358940124512],["ujte",-12.07436466217041],["መጣ",-12.07436752319336],["▁amma",-12.07436752319336],["▁vezes",-12.074440956115724],["บาง",-12.074478149414062],["▁онд",-12.07448673248291],["ars",-12.074572563171388],["nev",-12.074708938598633],["▁ندارد",-12.074769973754885],["▁ਉਨ੍ਹਾਂ",-12.074769973754885],["পি",-12.074785232543944],["ਫ",-12.074822425842283],["▁مقام",-12.074823379516602],["▁اوس",-12.074827194213867],["ਮਾ",-12.074836730957031],["▁tage",-12.074844360351562],["▁брат",-12.074870109558104],["▁strada",-12.07491683959961],["EST",-12.074931144714355],["▁қол",-12.074932098388672],["▁uang",-12.074963569641112],["▁хамт",-12.074991226196287],["▁Kunst",-12.075042724609377],["▁tvar",-12.075048446655272],["今日",-12.07508373260498],["ült",-12.07509994506836],["lund",-12.075100898742676],["ισμό",-12.07514762878418],["▁tưởng",-12.0752534866333],["림",-12.075262069702148],["▁tranh",-12.075303077697754],["▁২",-12.075337409973145],["▁mesma",-12.075343132019045],["vanju",-12.075424194335938],["▁produtos",-12.075454711914062],["ain",-12.07554054260254],["止",-12.075541496276855],["גל",-12.075621604919434],["登",-12.075624465942385],["คา",-12.075638771057127],["▁страх",-12.075648307800291],["Car",-12.07573413848877],["nang",-12.0757474899292],["zas",-12.075765609741213],["cola",-12.07586669921875],["▁FM",-12.075873374938965],["洛",-12.076061248779297],["▁Tuy",-12.076128005981444],["▁날",-12.076166152954102],["▁ging",-12.07618808746338],["▁vietā",-12.076199531555176],["▁chef",-12.076210975646973],["ず",-12.07632064819336],["▁១",-12.076354026794434],["▁SD",-12.07635498046875],["ranno",-12.076373100280762],["這是",-12.076397895812988],["▁geben",-12.076444625854492],["▁କା",-12.076444625854492],["▁riktigt",-12.076462745666504],["▁extrem",-12.076558113098145],["▁edición",-12.076600074768066],["hrá",-12.076661109924316],["▁planta",-12.076704025268556],["▁tenir",-12.076783180236816],["▁ruh",-12.07679557800293],["▁Пры",-12.076820373535156],["▁нису",-12.076838493347168],["▁وإ",-12.076949119567873],["یز",-12.077035903930664],["▁unseren",-12.07720947265625],["ωρ",-12.077224731445312],["nig",-12.077259063720703],["பு",-12.077308654785156],["观",-12.077313423156738],["▁ddim",-12.07732105255127],["▁ብዙ",-12.07734489440918],["▁tämän",-12.077346801757812],["keit",-12.077359199523926],["ուս",-12.077366828918455],["▁rau",-12.07737922668457],["▁проти",-12.07737922668457],["▁Öz",-12.077401161193848],["▁ป",-12.07745361328125],["▁Fr",-12.07746696472168],["▁bob",-12.077479362487791],["cing",-12.077484130859377],["▁sayı",-12.077526092529297],["AH",-12.077637672424316],["医",-12.077644348144531],["իր",-12.077669143676758],["▁الاست",-12.077776908874512],["tata",-12.07784366607666],["ед",-12.07787036895752],["異",-12.07787799835205],["▁అయితే",-12.077905654907228],["्र",-12.07794952392578],["PE",-12.077973365783691],["▁दिए",-12.077975273132324],["▁Пер",-12.077988624572754],["▁aliquam",-12.077998161315918],["irt",-12.078033447265623],["▁Unter",-12.078042030334473],["▁productos",-12.078062057495115],["▁webcam",-12.078083038330078],["▁Při",-12.078116416931152],["▁එක්ක",-12.078271865844728],["▁flo",-12.07827854156494],["温",-12.078292846679688],["偏",-12.078293800354004],["▁Վ",-12.07830810546875],["มิ",-12.078351020812988],["▁rub",-12.078354835510254],["▁זענען",-12.078356742858888],["ರೋ",-12.078425407409668],["▁punt",-12.07844066619873],["▁başladı",-12.078447341918944],["dera",-12.078481674194336],["▁xin",-12.07848834991455],["მდე",-12.078604698181152],["▁videos",-12.078710556030272],["ниците",-12.078728675842283],["ಮಿ",-12.078730583190918],["▁ово",-12.078742027282717],["▁ממש",-12.07880687713623],["▁yüksek",-12.078811645507812],["证",-12.078834533691406],["idu",-12.078864097595217],["▁лист",-12.07893180847168],["▁níos",-12.078943252563477],["▁ບໍ່",-12.078969955444336],["▁ties",-12.078972816467283],["wani",-12.078984260559082],["ninger",-12.079052925109863],["▁പറഞ്ഞ",-12.07906723022461],["zka",-12.079093933105469],["▁alguna",-12.07913875579834],["▁Bat",-12.07917022705078],["▁යුතු",-12.079193115234377],["▁شہر",-12.079235076904297],["▁volume",-12.079265594482422],["▁Jalan",-12.079302787780762],["▁vídeo",-12.079306602478027],["以后",-12.079327583312988],["▁rec",-12.07933235168457],["▁생",-12.079468727111816],["jö",-12.07955551147461],["▁قدرت",-12.07956600189209],["мов",-12.07958698272705],["▁mediante",-12.079605102539062],["DC",-12.07961368560791],["場所",-12.07962131500244],["▁lever",-12.079687118530272],["၅",-12.079718589782717],["▁таму",-12.0797700881958],["୪",-12.07979679107666],["資",-12.079833030700684],["ские",-12.07984447479248],["▁mamma",-12.07986831665039],["▁పి",-12.07991886138916],["▁informazioni",-12.079937934875488],["ಯೂ",-12.079996109008787],["▁roll",-12.0800142288208],["ulan",-12.08005142211914],["▁hours",-12.080072402954102],["料理",-12.08008861541748],["▁encara",-12.080103874206545],["▁mēs",-12.080114364624023],["▁потрібно",-12.08014965057373],["▁Pradesh",-12.080154418945312],["▁imp",-12.080192565917969],["▁될",-12.080263137817385],["据",-12.080269813537598],["mê",-12.080334663391112],["вол",-12.080348014831545],["▁დროს",-12.080351829528809],["nec",-12.080354690551758],["▁razvoj",-12.080371856689451],["되어",-12.08045482635498],["पत्र",-12.080485343933104],["енко",-12.08051872253418],["▁timer",-12.080531120300291],["▁arbejde",-12.080536842346191],["▁ဒါ",-12.08061695098877],["▁හි",-12.08062744140625],["▁blogger",-12.080678939819336],["▁Владимир",-12.08069133758545],["▁HO",-12.080706596374512],["yek",-12.080792427062988],["▁waki",-12.080869674682615],["mid",-12.080877304077148],["kép",-12.080907821655272],["▁назва",-12.080986976623535],["▁letzten",-12.081007957458496],["随着",-12.081022262573242],["▁ktorí",-12.081064224243164],["ทํางาน",-12.081144332885742],["เก",-12.08120059967041],["▁someone",-12.081212043762209],["▁मि",-12.081222534179688],["▁haha",-12.081249237060549],["хід",-12.081256866455078],["vede",-12.081286430358888],["▁BU",-12.081314086914062],["pho",-12.081315994262695],["▁zoek",-12.081316947937012],["لۇق",-12.081328392028809],["▁Pad",-12.08144187927246],["咖啡",-12.081467628479004],["ligen",-12.081490516662598],["應該",-12.081518173217772],["▁neat",-12.08158016204834],["▁зохион",-12.081663131713867],["vier",-12.081742286682127],["▁negli",-12.081742286682127],["▁նախագահ",-12.081790924072266],["▁Ankara",-12.08181858062744],["▁doesn",-12.08202838897705],["قر",-12.08213233947754],["kova",-12.082196235656738],["റു",-12.08222770690918],["ája",-12.082242012023926],["▁pergi",-12.082250595092772],["▁ورځ",-12.082293510437012],["▁toto",-12.08233642578125],["دن",-12.082359313964844],["善",-12.082369804382324],["▁Hvad",-12.08245849609375],["深入",-12.082484245300291],["▁эми",-12.082541465759276],["maal",-12.082558631896973],["▁mesto",-12.082568168640137],["lent",-12.082608222961426],["▁адрес",-12.08265209197998],["յանի",-12.082661628723145],["റിയ",-12.082762718200684],["▁observa",-12.082765579223633],["yong",-12.082798957824709],["▁parlament",-12.082840919494627],["▁Thành",-12.082857131958008],["▁சில",-12.08287525177002],["еб",-12.082881927490234],["▁Али",-12.08301830291748],["▁albo",-12.08302402496338],["▁Grup",-12.08304500579834],["▁mogli",-12.083054542541504],["▁NY",-12.083088874816896],["▁ತಮ್ಮ",-12.08310604095459],["▁못",-12.083151817321776],["▁1987",-12.083248138427734],["▁พร้อม",-12.083277702331545],["▁قىل",-12.083293914794922],["tation",-12.083321571350098],["▁taba",-12.083337783813477],["▁سنڌي",-12.083341598510742],["工业",-12.083352088928224],["чай",-12.083355903625488],["▁visas",-12.083364486694336],["▁alma",-12.083416938781738],["▁מספר",-12.083505630493164],["சா",-12.08354377746582],["▁Ras",-12.083600044250488],["▁doctor",-12.083611488342283],["准备",-12.083619117736816],["▁curs",-12.083646774291992],["▁pensar",-12.083687782287598],["▁સે",-12.083715438842772],["▁diversos",-12.083792686462402],["▁Hierdie",-12.083795547485352],["୨",-12.08380889892578],["▁ക്ക",-12.083853721618652],["▁chết",-12.083873748779297],["▁dê",-12.084051132202148],["▁اف",-12.084094047546388],["▁सहयोग",-12.08413314819336],["▁(0",-12.084141731262209],["莫",-12.0841646194458],["详细",-12.084183692932127],["nki",-12.084189414978027],["entre",-12.084258079528809],["▁cél",-12.084263801574709],["લે",-12.08428955078125],["▁юу",-12.084429740905762],["▁اک",-12.084434509277344],["téri",-12.084486961364746],["▁funkci",-12.084503173828123],["▁märk",-12.084508895874023],["गे",-12.084565162658691],["ဖြစ်",-12.084639549255373],["▁wohl",-12.084705352783203],["▁адзін",-12.084779739379885],["▁cerita",-12.084805488586426],["▁дори",-12.084830284118652],["结",-12.08483600616455],["▁artist",-12.08486557006836],["lijk",-12.084918022155762],["護",-12.085058212280272],["▁فال",-12.08505916595459],["ttaa",-12.08509635925293],["▁gc",-12.0851469039917],["lene",-12.085174560546877],["rgan",-12.085214614868164],["▁membawa",-12.085248947143556],["▁buy",-12.085328102111816],["▁물",-12.085352897644045],["▁ผม",-12.085355758666992],["▁കഥ",-12.085390090942385],["▁yma",-12.08539867401123],["ობით",-12.085407257080078],["ungan",-12.08548355102539],["▁talk",-12.085498809814451],["▁пас",-12.085498809814451],["▁مرکزی",-12.085543632507324],["▁שהוא",-12.085590362548828],["▁තවත්",-12.085618019104004],["ők",-12.085658073425291],["вши",-12.085731506347656],["uren",-12.0857572555542],["▁1/2",-12.085795402526855],["icos",-12.085807800292969],["▁केला",-12.085838317871094],["▁දෙන්න",-12.085932731628418],["تە",-12.08593463897705],["▁кому",-12.085951805114746],["ツ",-12.085972785949709],["▁prix",-12.086013793945312],["▁antaŭ",-12.086027145385742],["▁haj",-12.086036682128906],["ไม่มี",-12.086050987243652],["▁جديد",-12.08607578277588],["抓",-12.086173057556152],["për",-12.08619213104248],["▁گرفت",-12.08623504638672],["▁مشکل",-12.086277961730955],["sub",-12.086307525634766],["▁пле",-12.086346626281738],["▁Гу",-12.086381912231444],["வன்",-12.086420059204102],["▁πριν",-12.08646297454834],["ೌ",-12.086465835571287],["wm",-12.0864839553833],["וז",-12.08653736114502],["▁செய்த",-12.0865478515625],["roc",-12.08663558959961],["tene",-12.086688041687012],["bbi",-12.086724281311035],["▁работе",-12.086769104003906],["▁interna",-12.086776733398438],["▁rod",-12.086844444274902],["lerle",-12.087000846862791],["ราช",-12.087084770202637],["дегі",-12.087101936340332],["▁வெ",-12.087138175964355],["▁teng",-12.087148666381836],["▁aikana",-12.087288856506348],["▁yoki",-12.08729076385498],["▁ასევე",-12.087382316589355],["▁نقش",-12.087441444396973],["▁moj",-12.087481498718262],["▁zij",-12.08749771118164],["ilishi",-12.087517738342283],["▁വന്ന",-12.087579727172852],["▁Mare",-12.08758544921875],["▁அவர்",-12.087602615356444],["න්ත",-12.087617874145508],["▁18-",-12.087686538696287],["▁खान",-12.08776569366455],["▁typu",-12.08780288696289],["▁kojima",-12.087817192077637],["▁μαζί",-12.087823867797852],["▁prøve",-12.087827682495115],["▁ጉዳይ",-12.087833404541016],["▁nasa",-12.087899208068848],["▁(+",-12.087915420532228],["▁kritik",-12.087930679321287],["剧",-12.088024139404297],["▁waxaana",-12.08807373046875],["ด้าน",-12.088088989257812],["▁nge",-12.088088989257812],["tali",-12.088167190551758],["▁출",-12.08818817138672],["封",-12.088190078735352],["ъл",-12.08821964263916],["▁Tab",-12.088231086730955],["▁rez",-12.08823585510254],["Azərbaycan",-12.08824348449707],["rir",-12.08824348449707],["▁pras",-12.08825397491455],["▁kapital",-12.088257789611816],["▁Preis",-12.088268280029297],["▁حاضر",-12.08827781677246],["erede",-12.088292121887209],["اج",-12.088318824768066],["▁परिवार",-12.088329315185549],["▁የማ",-12.088351249694824],["▁kuhusu",-12.08835792541504],["rf",-12.088375091552734],["kko",-12.088376998901367],["약",-12.088390350341797],["▁भाषा",-12.088396072387695],["▁naam",-12.088408470153809],["▁επί",-12.088418960571287],["▁çalış",-12.08848762512207],["▁ದಿನ",-12.088508605957031],["▁éves",-12.088526725769045],["▁LO",-12.088541030883787],["avimas",-12.088549613952637],["とても",-12.0885648727417],["▁tutta",-12.088571548461914],["ندڙ",-12.088622093200684],["▁swoje",-12.088735580444336],["▁Eg",-12.088743209838867],["▁muchos",-12.088750839233398],["▁stri",-12.088759422302246],["dii",-12.08876609802246],["▁Fal",-12.08879852294922],["▁것으로",-12.088820457458496],["แสดง",-12.088825225830078],["休",-12.088871955871582],["とも",-12.08887767791748],["tors",-12.088906288146973],["▁contribu",-12.088944435119627],["poli",-12.08904266357422],["ুল",-12.089112281799316],["▁жан",-12.089144706726074],["▁drugim",-12.089186668395996],["كل",-12.089227676391602],["▁علوم",-12.089252471923828],["▁другие",-12.089268684387209],["▁стороны",-12.089364051818848],["kulu",-12.089385986328123],["inka",-12.08941650390625],["ingi",-12.089516639709473],["正常",-12.089518547058104],["గు",-12.089574813842772],["ません",-12.089581489562988],["itra",-12.089587211608888],["▁feito",-12.08962345123291],["也可以",-12.089662551879885],["▁getir",-12.089685440063477],["▁produse",-12.08971118927002],["▁автомат",-12.089762687683104],["▁servir",-12.089781761169434],["ကျ",-12.08979320526123],["ítés",-12.089808464050291],["▁temperatur",-12.089838981628418],["残",-12.089862823486328],["र्क",-12.089900016784668],["▁zdrav",-12.089929580688477],["пус",-12.090142250061035],["▁kolme",-12.0902099609375],["▁tér",-12.09023380279541],["▁பி",-12.09023666381836],["▁заяв",-12.09026050567627],["▁nuova",-12.090335845947266],["oase",-12.090391159057615],["▁temporada",-12.09040641784668],["▁strony",-12.090413093566896],["▁নি",-12.09055519104004],["▁በኋላ",-12.090559005737305],["▁peuvent",-12.090564727783203],["▁izy",-12.090594291687012],["▁després",-12.09059715270996],["▁عضو",-12.090620040893556],["▁जन्म",-12.09071159362793],["▁ორ",-12.090749740600586],["iak",-12.090758323669434],["▁കഴിഞ്ഞ",-12.0907621383667],["unta",-12.090773582458496],["▁Unidos",-12.090806007385254],["▁установ",-12.09087371826172],["ौं",-12.090888023376465],["▁studie",-12.09089469909668],["misen",-12.090904235839844],["▁grandi",-12.090951919555664],["▁servizi",-12.090953826904297],["ŝ",-12.090974807739258],["▁gemeente",-12.091002464294434],["▁map",-12.091076850891112],["事業",-12.091175079345703],["▁మే",-12.091194152832031],["▁kulu",-12.091303825378418],["告",-12.091350555419922],["▁gene",-12.091360092163086],["▁ვარ",-12.091384887695312],["엔",-12.091450691223145],["antoj",-12.09145164489746],["▁კარ",-12.091477394104004],["▁vuotta",-12.091480255126951],["▁credit",-12.091575622558594],["▁मार",-12.091577529907228],["教学",-12.091601371765137],["гад",-12.091609001159668],["▁кај",-12.091612815856934],["▁cause",-12.091629028320312],["୦",-12.09166431427002],["▁fala",-12.09179401397705],["青年",-12.091851234436035],["▁celu",-12.091898918151855],["▁seseorang",-12.091907501220703],["▁పెద్ద",-12.091911315917969],["▁mane",-12.091954231262209],["BB",-12.09196949005127],["▁harmoni",-12.09202480316162],["▁gam",-12.092033386230469],["▁dobrze",-12.092034339904783],["sättning",-12.092039108276367],["▁links",-12.092076301574709],["кай",-12.092167854309082],["▁olacak",-12.092230796813965],["▁vou",-12.09226894378662],["▁combina",-12.09227180480957],["▁leto",-12.092277526855469],["OL",-12.09228229522705],["범",-12.092360496520996],["환",-12.092379570007324],["▁качестве",-12.092463493347168],["▁اله",-12.092554092407228],["▁historie",-12.09266471862793],["加强",-12.092687606811523],["▁حدود",-12.092689514160156],["கை",-12.09272003173828],["satt",-12.092727661132812],["lade",-12.092755317687988],["▁nueva",-12.092761039733888],["LU",-12.092822074890137],["▁Στο",-12.092849731445312],["жда",-12.092854499816896],["▁hånd",-12.092863082885742],["▁conclu",-12.092877388000488],["▁ŝ",-12.092907905578612],["feld",-12.09290885925293],["▁جامعه",-12.092927932739258],["▁شوې",-12.09294891357422],["▁resultat",-12.09300422668457],["rangan",-12.093009948730469],["fica",-12.093047142028809],["vasta",-12.093093872070312],["▁afla",-12.093116760253906],["ধ",-12.093148231506348],["▁भेट",-12.093215942382812],["▁duas",-12.093311309814451],["建议",-12.093332290649414],["▁સુ",-12.093372344970703],[":16",-12.09341049194336],["ας",-12.093428611755373],["压",-12.093490600585938],["ował",-12.093571662902832],["koli",-12.093672752380373],["лігі",-12.09375286102295],["▁жүр",-12.09376049041748],["stí",-12.093796730041504],["▁Thanks",-12.093852043151855],["▁امکان",-12.093873977661133],["/9",-12.093907356262209],["▁Sony",-12.093957901000977],["▁كار",-12.093982696533203],["dak",-12.093988418579102],["иране",-12.0940580368042],["▁बीच",-12.09406566619873],["▁konkurs",-12.094074249267578],["▁أخرى",-12.094208717346191],["sob",-12.094220161437988],["▁пр",-12.094230651855469],["ství",-12.09425163269043],["▁abril",-12.09426212310791],["▁Also",-12.094364166259766],["79",-12.094428062438965],["▁sagen",-12.094442367553713],["▁ella",-12.09447956085205],["โ",-12.09450340270996],["heden",-12.094542503356934],["ىيە",-12.094542503356934],["▁tika",-12.09462547302246],["康",-12.094634056091309],["▁следует",-12.094650268554688],["▁вообще",-12.094688415527344],["▁Plat",-12.094738006591797],["▁venit",-12.094804763793944],["nummer",-12.094878196716309],["▁haqida",-12.095011711120604],["ប្រ",-12.095036506652832],["▁White",-12.095115661621094],["ək",-12.095123291015623],["▁Hội",-12.095149993896484],["▁Prav",-12.09516143798828],["▁chịu",-12.095162391662598],["▁говори",-12.09517765045166],["▁माहिती",-12.09518051147461],["سب",-12.095199584960938],["▁ليس",-12.095200538635254],["ഫ്",-12.095239639282228],["ック",-12.095255851745604],["▁සිට",-12.095270156860352],["opp",-12.095285415649414],["▁okkur",-12.095303535461426],["▁такі",-12.095378875732422],["▁تھے۔",-12.095389366149902],["▁3:",-12.095394134521484],["Ad",-12.095484733581545],["▁1.5",-12.095503807067873],["▁Bara",-12.095535278320312],["ธรรม",-12.095541954040527],["▁nebude",-12.095623970031738],["นาย",-12.095708847045898],["รักษา",-12.095736503601074],["员工",-12.09573745727539],["▁Bed",-12.095853805541992],["News",-12.095917701721191],["▁можа",-12.09598445892334],["▁Indi",-12.096111297607422],["▁ମି",-12.096179962158203],["=\"",-12.09620475769043],["ై",-12.096312522888184],["▁καθ",-12.096327781677246],["ndə",-12.096346855163574],["פּ",-12.096365928649902],["▁bekend",-12.096443176269531],["▁khẩu",-12.096466064453123],["inda",-12.096482276916504],["▁larg",-12.096506118774414],["▁ਹੋਰ",-12.09667682647705],["▁මේක",-12.09669303894043],["▁Nyt",-12.09671115875244],["සා",-12.096830368041992],["2006",-12.096837997436523],["시간",-12.096840858459473],["kład",-12.096841812133787],["▁HU",-12.096899032592772],["▁HR",-12.096980094909668],["▁pune",-12.097013473510742],["▁زمینه",-12.097017288208008],["량",-12.097039222717283],["▁detail",-12.097084045410156],["▁רו",-12.097090721130373],["untur",-12.097107887268066],["▁Bla",-12.0971097946167],["▁знак",-12.0971097946167],["谈",-12.097200393676758],["▁авч",-12.097213745117188],["▁deja",-12.097290992736816],["▁වඩා",-12.097325325012209],["▁ئىش",-12.097352027893066],["▁Grad",-12.09736156463623],["seks",-12.097394943237305],["▁mtu",-12.097415924072266],["▁Andre",-12.097423553466797],["▁Fragen",-12.09748649597168],["▁Muslim",-12.097597122192385],["කු",-12.097686767578123],["▁mono",-12.097723007202148],["▁принцип",-12.097723960876465],["hum",-12.097835540771484],["▁دارای",-12.097848892211914],["▁juuri",-12.09789752960205],["чан",-12.097898483276367],["▁plac",-12.097930908203123],["▁திரு",-12.09801197052002],["▁akademi",-12.098106384277344],["page",-12.0982084274292],["▁долго",-12.098209381103516],["▁viime",-12.09824275970459],["▁جمهوری",-12.098312377929688],["▁brit",-12.09841537475586],["под",-12.09843635559082],["▁aspekt",-12.098477363586426],["▁ambao",-12.098481178283691],["怪",-12.098560333251951],["▁kvinnor",-12.098589897155762],["NS",-12.098639488220217],["▁стран",-12.098644256591797],["ფ",-12.098666191101074],["▁February",-12.098689079284668],["▁ndi",-12.098689079284668],["инг",-12.098692893981934],["▁leven",-12.098743438720703],["▁Mutta",-12.09877109527588],["んで",-12.098782539367676],["▁afecta",-12.098807334899902],["Wa",-12.09885025024414],["▁एका",-12.098909378051758],["čin",-12.099013328552246],["נא",-12.099069595336914],["eron",-12.099130630493164],["▁tud",-12.099166870117188],["舞",-12.099198341369627],["▁меѓу",-12.099210739135742],["▁nooit",-12.099261283874512],["▁такие",-12.099342346191406],["▁NG",-12.099462509155272],["hne",-12.099549293518066],["▁Baru",-12.099556922912598],["મી",-12.09958553314209],["戦",-12.099648475646973],["เกี่ยวกับ",-12.099653244018556],["▁cười",-12.099698066711426],["ελ",-12.099759101867676],["▁Tat",-12.099818229675291],["▁bron",-12.099844932556152],["рис",-12.09986400604248],["▁phong",-12.09986400604248],["▁खबर",-12.099870681762695],["▁μα",-12.09992790222168],["▁Kah",-12.099930763244627],["▁redes",-12.09994125366211],["▁sms",-12.099967956542969],["▁trening",-12.09997272491455],["多少",-12.100001335144045],["▁שר",-12.100114822387695],["▁ukuba",-12.10018825531006],["▁iya",-12.100257873535156],["▁tarixi",-12.100274085998535],["വർ",-12.10034465789795],["раг",-12.100449562072754],["ални",-12.10053825378418],["▁metode",-12.100615501403809],["▁gehen",-12.100715637207031],["ując",-12.100723266601562],["▁ပ",-12.100723266601562],["עות",-12.100743293762209],["อยู่ใน",-12.100808143615724],["▁өмнө",-12.100812911987305],["्दै",-12.100848197937012],["▁jár",-12.100882530212402],["ਏ",-12.100919723510742],["▁преку",-12.100961685180664],["āju",-12.101001739501951],["▁leat",-12.101006507873535],["▁እስከ",-12.101022720336914],["▁dam",-12.101025581359863],["▁সে",-12.101075172424316],["ได้รับ",-12.10116481781006],["▁numer",-12.101167678833008],["▁Word",-12.10119342803955],["▁këto",-12.10119342803955],["ຈາກ",-12.10127067565918],["▁Meri",-12.101298332214355],["解決",-12.101318359375],["tma",-12.101346015930176],["様",-12.101465225219728],["▁Rusiya",-12.101482391357422],["нне",-12.101484298706056],["▁Ahmad",-12.101612091064451],["ight",-12.10163688659668],["คํา",-12.101678848266602],["၄",-12.10186767578125],["▁black",-12.101872444152832],["▁mór",-12.10188102722168],["▁دفاع",-12.101881980895996],["ЕН",-12.101886749267578],["เบ",-12.101909637451172],["▁vuonna",-12.101968765258787],["▁പറയ",-12.10204792022705],["▁اقتصاد",-12.102049827575684],["▁além",-12.10215950012207],["STA",-12.102164268493652],["大量",-12.102275848388672],["▁khoảng",-12.102376937866213],["▁Sak",-12.10240077972412],["▁sắc",-12.102423667907717],["Ба",-12.102481842041016],["▁només",-12.102513313293455],["viz",-12.102563858032228],["ств",-12.102590560913086],["▁servicios",-12.102611541748049],["▁crear",-12.102631568908691],["▁Well",-12.102651596069336],["▁gul",-12.10273551940918],["CR",-12.102778434753418],["▁मध्ये",-12.102800369262695],["▁सुरक्षा",-12.10288429260254],["▁ارائه",-12.102893829345703],["▁यांच्या",-12.102917671203612],["шча",-12.10300350189209],["service",-12.103015899658203],["јући",-12.103023529052734],["一番",-12.103071212768556],["▁skup",-12.103084564208984],["almente",-12.103140830993652],["▁одна",-12.103179931640623],["▁vá",-12.103195190429688],["▁Voi",-12.10321044921875],["順",-12.103242874145508],["ଥିବା",-12.103243827819824],["▁waarin",-12.103275299072266],["▁હવે",-12.103375434875488],["mät",-12.103421211242676],["▁vive",-12.103466033935549],["бен",-12.103471755981444],["▁norm",-12.103493690490724],["▁Francisco",-12.10359001159668],["лау",-12.103590965270996],["бр",-12.103645324707031],["бро",-12.103754997253418],["ಣ್ಣ",-12.103787422180176],["缺",-12.103802680969238],["生產",-12.103985786437988],["▁վեր",-12.104023933410645],["▁chomh",-12.104113578796388],["телни",-12.10419464111328],["▁cila",-12.10422420501709],["▁рішення",-12.10426425933838],["▁влади",-12.104265213012695],["重点",-12.10427951812744],["▁hiç",-12.10430145263672],["▁собой",-12.104348182678224],["根本",-12.104362487792969],["пит",-12.10438060760498],[":13",-12.104445457458496],["▁precis",-12.104448318481444],["贸易",-12.104451179504396],["▁правил",-12.1045560836792],["▁ដល់",-12.104564666748049],["aði",-12.104618072509766],["ទី",-12.104662895202637],["▁ngươi",-12.104736328125],["▁ජාතික",-12.104762077331545],["iese",-12.104844093322754],["IZ",-12.104856491088867],["▁rou",-12.104877471923828],["agi",-12.104886054992676],["▁action",-12.104990005493164],["လို့",-12.105010032653809],["▁felis",-12.10502815246582],["▁Grande",-12.105082511901855],[":14",-12.10509967803955],["▁nossa",-12.10511589050293],["ใบ",-12.105180740356444],["čna",-12.10521125793457],["▁אינ",-12.10525131225586],["▁Nagy",-12.105328559875488],["өл",-12.105329513549805],["▁potreb",-12.105334281921388],["▁giả",-12.105344772338867],["άκι",-12.105361938476562],["▁кандидат",-12.105368614196776],["lərdə",-12.105379104614258],["的生活",-12.105384826660156],["▁drive",-12.105430603027344],["kade",-12.105450630187988],["حت",-12.10553741455078],["▁stvari",-12.105548858642578],["▁mente",-12.105563163757324],["мина",-12.105567932128906],["▁medlem",-12.105568885803224],["డానికి",-12.105591773986816],["ковић",-12.105616569519045],["此次",-12.105645179748535],["▁millones",-12.1056489944458],["▁сила",-12.105650901794434],["▁väldigt",-12.105653762817385],["ാര്",-12.10566234588623],["pta",-12.105690002441406],["שת",-12.105746269226074],["▁proto",-12.105795860290527],["大的",-12.105801582336426],["delen",-12.10587215423584],["▁ihren",-12.105937004089355],["ıcı",-12.105965614318848],["amh",-12.106050491333008],["▁Kup",-12.106051445007324],["▁вельмі",-12.106107711791992],["▁terlalu",-12.106108665466309],["▁гадоў",-12.106120109558104],["фор",-12.106160163879396],["よう",-12.106168746948242],["▁سوی",-12.106244087219238],["▁אחרי",-12.106317520141602],["iyay",-12.10634422302246],["ješ",-12.106353759765623],["פר",-12.106355667114258],["lių",-12.10637664794922],["分鐘",-12.1063871383667],["▁Sí",-12.106440544128418],["▁cru",-12.106484413146973],["▁mı",-12.106493949890137],["न्न",-12.106494903564451],["▁Server",-12.10651683807373],["▁finale",-12.106574058532717],["▁وأن",-12.10662841796875],["▁future",-12.106685638427734],["▁provide",-12.106807708740234],["▁виде",-12.106819152832031],["PT",-12.106828689575195],["▁lehen",-12.106840133666992],["▁Novo",-12.106887817382812],["▁alat",-12.10693073272705],["рет",-12.106934547424316],["▁exista",-12.10698127746582],["▁නැත",-12.107049942016602],["▁Alba",-12.107104301452637],["▁mức",-12.107152938842772],["ruar",-12.107194900512695],["▁gala",-12.107202529907228],["▁Mali",-12.107208251953123],["dov",-12.107240676879885],["▁Nakon",-12.10725212097168],["▁ም",-12.107439994812012],["▁በዚህ",-12.107440948486328],["▁यस्तो",-12.107494354248049],["▁Image",-12.10752296447754],["▁banka",-12.107551574707031],["laj",-12.107650756835938],["▁Ρ",-12.107650756835938],["▁народа",-12.107704162597656],["▁стил",-12.10773468017578],["ર્",-12.107738494873049],["eň",-12.1077880859375],["▁другой",-12.10781955718994],["項目",-12.107952117919922],["▁ఆమె",-12.107975959777832],["▁dí",-12.10798454284668],["▁thotë",-12.107996940612791],["Ρ",-12.108014106750488],["FI",-12.108055114746094],["▁jie",-12.10813045501709],["▁îl",-12.108176231384276],["கம்",-12.108196258544922],["υλ",-12.10826301574707],["▁light",-12.108301162719728],["▁ове",-12.10834789276123],["tuu",-12.108356475830078],["ඳ",-12.108356475830078],["▁pai",-12.108399391174316],["▁हत्या",-12.108431816101074],["▁మీరు",-12.108461380004885],["ىپ",-12.108479499816896],["θη",-12.108490943908691],["난",-12.108522415161133],["▁lice",-12.10858917236328],["▁심",-12.10859489440918],["▁ফ",-12.108601570129396],["▁Xe",-12.108660697937012],["▁درصد",-12.108753204345703],["ඟ",-12.10878086090088],["▁henkilö",-12.108811378479004],["▁зараз",-12.108814239501951],["知識",-12.108851432800291],["佛",-12.108882904052734],["bě",-12.10898208618164],["▁үр",-12.10898780822754],["view",-12.1090669631958],["سان",-12.1090669631958],["▁זי",-12.109067916870115],["ліся",-12.109167098999023],["▁לך",-12.109232902526855],["▁Alam",-12.109280586242676],["▁maith",-12.109286308288574],["▁Richard",-12.109378814697266],["▁four",-12.109463691711426],["nnan",-12.109502792358398],["▁پنج",-12.109526634216309],["кін",-12.10955047607422],["呼",-12.109648704528809],["аваць",-12.109668731689451],["සු",-12.10966968536377],["tern",-12.109711647033691],["▁ono",-12.109755516052246],["▁groep",-12.109777450561523],["Gu",-12.109795570373535],["▁शहर",-12.109822273254396],["際",-12.10985279083252],["▁gennem",-12.109870910644531],["右",-12.109891891479492],["願",-12.109977722167969],["▁мар",-12.110107421875],["▁граждан",-12.110112190246582],["▁కో",-12.110116004943848],["ുകയും",-12.110169410705566],["ела",-12.110184669494627],["炎",-12.1102294921875],["▁seç",-12.110243797302246],["‐",-12.110267639160156],["▁söyle",-12.110274314880373],["▁บ",-12.110278129577637],["▁कैसे",-12.110279083251951],["▁maison",-12.110283851623535],["iju",-12.11034870147705],["▁qi",-12.110368728637695],["▁Massage",-12.11038303375244],["▁puis",-12.11038303375244],["ෂ",-12.110430717468262],["丁",-12.110601425170898],["▁арт",-12.110640525817873],["▁remont",-12.110647201538086],["▁хэл",-12.11069679260254],["colo",-12.11070728302002],["nska",-12.110715866088867],["▁خدمت",-12.110769271850586],["▁key",-12.110793113708496],["gjen",-12.110836029052734],["分钟",-12.110886573791504],["▁پاڻ",-12.110957145690918],["▁리",-12.110984802246094],["лук",-12.110989570617676],["▁København",-12.111001014709473],["ഭ",-12.11104679107666],["▁garant",-12.111091613769531],["▁కాదు",-12.111214637756348],["sé",-12.11122989654541],["dagen",-12.111239433288574],["oloji",-12.111247062683104],["▁хората",-12.111255645751951],["▁قسمت",-12.111287117004396],["▁veri",-12.111315727233888],["ပ်",-12.111330032348633],["▁рада",-12.11134147644043],["▁کامل",-12.111361503601074],["လား",-12.111374855041504],["table",-12.11146068572998],["-13",-12.111468315124512],["ībā",-12.11147117614746],["▁dous",-12.111480712890623],["▁პირველი",-12.11154079437256],["cks",-12.111611366271973],["ທ",-12.111616134643556],["▁Же",-12.111638069152832],["▁შეიძლება",-12.111659049987791],["▁DJ",-12.11166763305664],["text",-12.111674308776855],["▁vraag",-12.111692428588867],["打造",-12.111733436584473],["▁cont",-12.11175537109375],["▁Lá",-12.111830711364746],["▁vec",-12.111859321594238],["▁албан",-12.11186981201172],["▁State",-12.1118803024292],["ान",-12.112000465393066],["▁مدت",-12.112101554870604],["▁मुझे",-12.112125396728516],["▁प्रतिशत",-12.112126350402832],["▁ללא",-12.11213493347168],["-2017",-12.112184524536133],["復",-12.112224578857422],["ଥିଲେ",-12.112227439880373],["▁debat",-12.11228370666504],["atge",-12.112327575683594],["یب",-12.112348556518556],["ກັນ",-12.112409591674805],["לב",-12.11241054534912],["ชาย",-12.11241626739502],["sien",-12.112483978271484],["ۆ",-12.112505912780762],["▁মো",-12.112543106079102],["stur",-12.112565994262695],["▁segala",-12.112576484680176],["▁Републике",-12.112587928771973],["уч",-12.112589836120604],["▁លើ",-12.112648963928224],["mál",-12.112664222717283],["▁apel",-12.112704277038574],["▁Detta",-12.112754821777344],["czą",-12.112787246704102],["▁قائم",-12.112797737121582],["▁kana",-12.112810134887695],["▁Gru",-12.112820625305176],["▁cabo",-12.112879753112791],["▁rea",-12.112899780273438],["نة",-12.11298942565918],["▁suure",-12.113025665283203],["▁મે",-12.113032341003418],["▁초",-12.113054275512695],["▁мэдээлэл",-12.11305809020996],["▁Poz",-12.113059043884276],["疾病",-12.113064765930176],["/12",-12.11307144165039],["pris",-12.113085746765137],["blad",-12.113092422485352],["额",-12.113093376159668],["NT",-12.113128662109377],["優",-12.113162994384766],["▁sklad",-12.113178253173828],["▁maailma",-12.113216400146484],["▁ప్రతి",-12.11329460144043],["arı",-12.113330841064451],["стор",-12.113369941711426],["▁dair",-12.113380432128906],["▁tratta",-12.113397598266602],["東京",-12.11346435546875],["▁தி",-12.113469123840332],["iyat",-12.113470077514648],["▁lưu",-12.113518714904783],[".1.",-12.11353874206543],["žia",-12.113709449768066],["▁roj",-12.113725662231444],["skan",-12.113758087158203],["tja",-12.113761901855469],["▁ਲ",-12.113819122314451],["rach",-12.113846778869627],["計",-12.113903045654297],["шту",-12.113903999328612],["▁Desde",-12.113916397094728],["tida",-12.11391830444336],["▁raya",-12.113932609558104],["ote",-12.114032745361328],["EZ",-12.11404514312744],["ើ",-12.114259719848633],["▁sider",-12.114304542541504],["▁групи",-12.114343643188477],["▁presso",-12.1143798828125],["公開",-12.114401817321776],["▁25-",-12.114425659179688],["kama",-12.11443042755127],["projekt",-12.114435195922852],["▁خوا",-12.114456176757812],["▁Respublikası",-12.114501953125],["SD",-12.11453914642334],["▁fecha",-12.114556312561035],["mektedir",-12.114575386047363],["ਟੀ",-12.114590644836426],["kú",-12.114686965942385],["▁servicio",-12.11475944519043],["▁politike",-12.114765167236328],["▁зь",-12.114885330200195],["▁hebt",-12.114897727966309],["▁vice",-12.11490535736084],["▁neden",-12.114941596984863],["▁teren",-12.115068435668944],["▁Blue",-12.115074157714844],["▁Our",-12.115086555480955],["▁فرد",-12.115139961242676],["ído",-12.115155220031738],[":40",-12.115159034729004],["▁czyli",-12.115194320678713],["▁വീ",-12.11521816253662],["ષ",-12.115228652954102],["▁please",-12.11523151397705],["▁системы",-12.115233421325684],["▁recherche",-12.115251541137695],["▁ajan",-12.11526870727539],["ели",-12.11528491973877],["▁informacija",-12.115351676940918],["så",-12.11544418334961],["▁noso",-12.115488052368164],["▁Ке",-12.115561485290527],["▁beton",-12.115567207336426],["▁autó",-12.115638732910156],["▁менее",-12.115734100341797],["▁kommun",-12.115784645080566],["ډ",-12.115938186645508],[":17",-12.115978240966797],["▁kwi",-12.115994453430176],["▁julle",-12.116080284118652],["ಳಿ",-12.11616039276123],["▁cursus",-12.116204261779783],["شر",-12.11628532409668],["▁thoại",-12.11630916595459],["▁ulko",-12.116333961486816],["▁16-",-12.116341590881348],["цию",-12.116373062133787],["▁alapján",-12.116418838500977],["▁gemacht",-12.116437911987305],["▁senior",-12.116483688354492],["لم",-12.11659049987793],["▁Fle",-12.116629600524902],["经过",-12.116713523864746],["▁ແມ່ນ",-12.116764068603516],["▁sábado",-12.116776466369627],["▁Studio",-12.116779327392578],["▁بولۇپ",-12.116783142089844],["▁rispetto",-12.116803169250488],["ذا",-12.116846084594728],["▁SM",-12.116851806640623],["▁pes",-12.116884231567385],["wert",-12.116931915283203],["▁auctor",-12.11695671081543],["ρου",-12.117053985595703],["صاب",-12.117217063903809],["▁поэтому",-12.1172456741333],["▁informações",-12.117294311523438],["▁Salaam",-12.11734390258789],["最终",-12.117386817932127],["wali",-12.117509841918944],["▁پسند",-12.1176118850708],["▁సు",-12.117635726928713],["ubi",-12.117674827575684],["▁Sang",-12.11767864227295],["▁ویژه",-12.117714881896973],["▁ಸು",-12.117742538452148],["▁yaxın",-12.117796897888184],["▁zonder",-12.117805480957031],["▁Mama",-12.117817878723145],["▁დრო",-12.117889404296877],["ිත",-12.117890357971191],["▁съд",-12.117895126342772],["▁io",-12.117901802062988],["▁закона",-12.117901802062988],["inę",-12.117974281311035],["င",-12.1179838180542],["บา",-12.11801052093506],["▁puno",-12.118013381958008],["▁minh",-12.118026733398438],["ရဲ့",-12.118081092834473],["▁skol",-12.1181001663208],["wyn",-12.118124961853027],["▁ઓ",-12.118160247802734],["▁decât",-12.118179321289062],["▁laki",-12.11818790435791],["ице",-12.118364334106444],["▁manual",-12.11846923828125],["▁futur",-12.118529319763184],["▁kdo",-12.11854648590088],["▁модел",-12.118634223937988],["endum",-12.118638038635254],["表現",-12.118660926818848],["▁daoine",-12.11866569519043],["▁چو",-12.11867904663086],["rò",-12.11875057220459],["łą",-12.118791580200195],["តែ",-12.118803977966309],["ზი",-12.118830680847168],["ೊ",-12.118847846984863],["▁הרב",-12.118903160095217],["nge",-12.118983268737791],["pina",-12.118998527526855],["育",-12.119011878967283],["▁żeby",-12.119126319885254],["▁عراق",-12.119140625],["借",-12.119150161743164],["▁ajal",-12.119155883789062],["▁ero",-12.119245529174805],["▁1920",-12.119329452514648],["大型",-12.119378089904783],["чар",-12.119393348693848],["▁មក",-12.119454383850098],["▁bina",-12.119479179382324],["tës",-12.119494438171388],["▁Ikke",-12.1195068359375],["體驗",-12.119565963745115],["▁karta",-12.119623184204102],["▁girl",-12.119657516479492],["▁Dhe",-12.119808197021484],["▁vše",-12.11981201171875],["▁Dank",-12.119820594787598],["firma",-12.119832992553713],["sut",-12.11987018585205],["dru",-12.119958877563477],["▁Costa",-12.119958877563477],["▁sinn",-12.119958877563477],["▁μπ",-12.120072364807127],["▁נע",-12.12009048461914],["වන්",-12.120111465454102],["▁режим",-12.1201171875],["▁iç",-12.12014102935791],["▁процесс",-12.120142936706545],["▁ويا",-12.120172500610352],["▁khả",-12.120326042175291],["▁අනුව",-12.120328903198242],["▁לק",-12.120375633239746],["нят",-12.12038803100586],["息",-12.12039279937744],["vê",-12.120463371276855],["lir",-12.120491027832031],["秋",-12.12051773071289],["ም።",-12.120519638061523],["ём",-12.120575904846191],["藝術",-12.12069320678711],["erad",-12.120741844177246],["▁UE",-12.120814323425291],["해서",-12.12081813812256],["▁наших",-12.120864868164062],["▁일본",-12.120901107788086],["轮",-12.120911598205566],["▁kõige",-12.120994567871094],["▁Soo",-12.12106990814209],["ชื่อ",-12.121086120605469],["lma",-12.1211576461792],["ხი",-12.121197700500488],["дийн",-12.121255874633787],["▁док",-12.121286392211914],["ระ",-12.121331214904783],["▁жал",-12.121333122253418],["▁staff",-12.121438026428224],["▁ਭ",-12.12144374847412],["▁եթե",-12.12147045135498],["▁рамках",-12.121495246887209],["agem",-12.121532440185549],["ቱን",-12.12156105041504],["โรค",-12.12157154083252],["▁lacus",-12.12161922454834],["مون",-12.121726989746094],["許多",-12.12175178527832],["▁følge",-12.121776580810549],["טה",-12.121855735778809],["似乎",-12.121885299682615],["尾",-12.121899604797363],["▁PRE",-12.121963500976562],["▁वर",-12.121973037719728],["▁իմ",-12.12197494506836],["▁भयो",-12.122021675109863],["▁trup",-12.122053146362305],["▁بلا",-12.12209701538086],["▁കേരള",-12.122100830078123],["▁ajili",-12.122164726257324],["ολογ",-12.122207641601562],["gü",-12.122210502624512],["▁proposta",-12.122353553771973],["▁Tuo",-12.122369766235352],["▁بانک",-12.12237548828125],["စ်",-12.122424125671388],["▁mô",-12.122448921203612],["nol",-12.122456550598145],["เพิ่ม",-12.122526168823242],["▁formal",-12.122562408447266],["ària",-12.122578620910645],["▁Sä",-12.12269401550293],["▁تان",-12.122716903686523],["どう",-12.122735023498535],["ţiei",-12.122735977172852],["▁gele",-12.122736930847168],["kové",-12.122760772705078],["ຊ",-12.122817039489746],["ких",-12.122819900512695],["▁कमी",-12.122876167297363],["▁۳",-12.122905731201172],["符合",-12.122918128967283],["araq",-12.12293529510498],["eken",-12.122971534729004],["空间",-12.123056411743164],["fru",-12.123091697692873],["itza",-12.12314224243164],["▁govern",-12.123164176940918],["إ",-12.12318515777588],["aĵo",-12.123210906982422],["▁drama",-12.12324047088623],["Am",-12.12327766418457],["程序",-12.123281478881836],["▁Бер",-12.123284339904783],["▁Всички",-12.1233491897583],["▁atención",-12.123360633850098],["通知",-12.123403549194336],["▁୫",-12.123451232910156],["▁izda",-12.123465538024902],["дуу",-12.123488426208496],["टे",-12.123528480529783],["▁മാത്രം",-12.123531341552734],["▁Vest",-12.123565673828123],["근",-12.123571395874023],["▁شرایط",-12.123615264892578],["▁чейин",-12.12369155883789],["cr",-12.123703956604004],["АЛ",-12.123741149902344],["▁mieux",-12.123757362365724],["тэ",-12.123759269714355],["▁ακόμα",-12.123794555664062],["ජ",-12.123958587646484],["▁cours",-12.123969078063965],["upp",-12.12397003173828],["пла",-12.123983383178713],["ყვა",-12.124005317687988],["▁व्यवस्था",-12.124115943908691],["iyyət",-12.12412929534912],["tej",-12.124137878417969],["फा",-12.124147415161133],["▁رب",-12.12416172027588],["▁אפשר",-12.124266624450684],["▁Mid",-12.1243314743042],["üb",-12.124340057373049],["▁pension",-12.124380111694336],["▁kaže",-12.124388694763184],["είς",-12.124390602111816],["▁olish",-12.124567031860352],["▁Spo",-12.124592781066896],["אַר",-12.124662399291992],["ינג",-12.124733924865724],["▁(16",-12.124740600585938],["նում",-12.124743461608888],["▁Aug",-12.124743461608888],["ekkel",-12.124811172485352],["▁office",-12.124869346618652],["▁table",-12.124913215637209],["▁acesta",-12.12496852874756],["▁professional",-12.124993324279783],["▁بهترین",-12.125008583068848],["▁orienta",-12.12501049041748],["ikus",-12.12502670288086],["ING",-12.125076293945312],["▁además",-12.125208854675291],["▁navn",-12.125218391418455],["▁training",-12.12523365020752],["▁Թ",-12.125243186950684],["▁ની",-12.125272750854492],["▁деньги",-12.125279426574709],["ahan",-12.125354766845703],["imai",-12.125367164611816],["▁warna",-12.12537670135498],["▁জন",-12.125383377075195],["▁громадян",-12.125386238098145],[")،",-12.125438690185549],["▁vodi",-12.125508308410645],["gende",-12.125515937805176],["▁Siz",-12.125557899475098],["▁බැ",-12.125565528869627],["▁Kel",-12.12562370300293],["标",-12.12567901611328],["▁metro",-12.125683784484863],["▁ହେବ",-12.125794410705566],["ոս",-12.125831604003906],["▁planet",-12.125837326049805],["ंद",-12.125871658325195],["▁kati",-12.125889778137209],["▁електро",-12.125972747802734],["သည္။",-12.126016616821287],["▁Mary",-12.126090049743652],["▁развития",-12.126150131225586],["になった",-12.12621021270752],["▁عبدال",-12.126213073730469],["▁डे",-12.126226425170898],["零",-12.126229286193848],["▁суу",-12.126232147216797],["▁informació",-12.126235008239746],["veni",-12.126314163208008],["чку",-12.12633228302002],["▁Tento",-12.12635898590088],["복",-12.12636375427246],["▁شعر",-12.126386642456056],["▁sekund",-12.126388549804688],["▁μη",-12.126424789428713],["▁villa",-12.126457214355469],["▁رضا",-12.12646484375],["ljen",-12.126479148864746],["ଷ୍ଟ",-12.126490592956545],["volu",-12.126524925231934],["▁hoa",-12.126750946044922],["訂",-12.126754760742188],["▁Ова",-12.126773834228516],["價格",-12.126792907714844],["▁qar",-12.126822471618652],["▁ikinci",-12.126824378967283],["▁ມາ",-12.126838684082031],["itis",-12.12687873840332],["▁генерал",-12.126897811889648],["▁gant",-12.12691593170166],["▁Brand",-12.126935005187988],["▁attraverso",-12.126935005187988],["戴",-12.127063751220703],["սի",-12.127073287963867],["▁സിനിമ",-12.127089500427246],["▁достаточно",-12.127090454101562],["▁vrt",-12.127195358276367],["▁dator",-12.127306938171388],["▁edin",-12.127410888671877],["▁viņa",-12.127449989318848],["vok",-12.12747573852539],["▁kuten",-12.127516746520996],["▁минут",-12.12752914428711],["VO",-12.127551078796388],["нибудь",-12.127558708190918],["▁կազմակերպ",-12.127586364746094],["▁በላይ",-12.127601623535156],["vár",-12.12761402130127],["▁testa",-12.127694129943848],["▁работу",-12.127715110778809],["▁चीन",-12.127782821655272],["▁teo",-12.12780475616455],["▁olunan",-12.127820014953612],["▁komfort",-12.12789249420166],["Ri",-12.127937316894531],["▁وح",-12.127943992614746],["נע",-12.12795066833496],["阳",-12.127962112426758],["ದ್ದು",-12.128008842468262],["▁բարձր",-12.128029823303224],["▁vardır",-12.12803840637207],["rmi",-12.128047943115234],["▁baixo",-12.128049850463867],["▁ขาย",-12.12806510925293],["ത്തിലെ",-12.128121376037598],["สอง",-12.128179550170898],["ネ",-12.128252983093262],["ცემ",-12.128325462341309],["ข่าว",-12.128378868103027],["▁client",-12.12844467163086],["▁altro",-12.128495216369627],["▁امریکی",-12.128503799438477],["▁tỷ",-12.128565788269045],["▁lagt",-12.128569602966309],["حل",-12.128680229187012],["▁insa",-12.128703117370604],["त्या",-12.128725051879885],["▁musi",-12.12881565093994],["▁noite",-12.12890625],["▁ונ",-12.128912925720217],["ต่าง",-12.128957748413086],["▁сегодня",-12.128973960876465],["▁country",-12.129009246826172],["▁mú",-12.12905979156494],["ларын",-12.129107475280762],["сін",-12.129114151000977],["ает",-12.12911605834961],["аль",-12.129125595092772],["杯",-12.12923812866211],["▁Nuk",-12.129383087158203],["matu",-12.129385948181152],["▁সঙ্গে",-12.129446029663086],["થ",-12.129446983337402],["▁rico",-12.129586219787598],["▁край",-12.129596710205078],["osios",-12.129720687866213],["▁Bai",-12.129742622375488],["られた",-12.12979221343994],["▁niti",-12.129817008972168],["šla",-12.129846572875977],["ុំ",-12.12985134124756],["▁storia",-12.12986946105957],["cada",-12.129905700683594],["▁Você",-12.129925727844238],["▁serikali",-12.129932403564451],["▁turun",-12.129974365234377],["▁sivu",-12.129979133605955],["-0",-12.129998207092283],["▁largo",-12.13001823425293],["▁gru",-12.130033493041992],["塞",-12.130149841308594],["▁digitale",-12.130187034606934],["పా",-12.13019561767578],["▁શ",-12.130231857299805],["ଦା",-12.130303382873535],["▁قدم",-12.130345344543455],["▁rê",-12.130374908447266],["MU",-12.130393028259276],["▁atât",-12.13040828704834],["uza",-12.130412101745604],["▁உங்கள்",-12.130420684814451],["▁возможно",-12.130451202392578],["▁ordina",-12.130453109741213],["▁220",-12.13047981262207],["▁talaga",-12.130528450012209],["ارت",-12.130529403686523],["sində",-12.130553245544434],["िका",-12.13059139251709],["▁tara",-12.130621910095217],["▁Mars",-12.13063907623291],["ські",-12.13064193725586],["▁tatt",-12.130645751953123],["ална",-12.130684852600098],["sión",-12.130736351013184],["▁19-",-12.130738258361816],["▁حمایت",-12.130866050720217],["увањето",-12.130931854248049],["▁ifade",-12.130998611450195],["▁Str",-12.131071090698242],["riko",-12.131078720092772],["už",-12.131080627441406],["▁mwa",-12.13114070892334],["▁zeer",-12.131205558776855],["▁meet",-12.131279945373535],["▁Kenya",-12.131300926208496],["▁코",-12.13134479522705],["gge",-12.13141632080078],["▁база",-12.131457328796388],["инде",-12.13147258758545],["▁ගිය",-12.131498336791992],["술",-12.13164234161377],["unar",-12.131648063659668],["▁Novi",-12.131662368774414],["ông",-12.131696701049805],["ొ",-12.131706237792969],["推进",-12.131708145141602],["сек",-12.131711959838867],["ЛИ",-12.131720542907717],["ដោយ",-12.13172721862793],["▁الث",-12.131783485412598],["▁THE",-12.131872177124023],["ችን",-12.131878852844238],["σχ",-12.131898880004885],["တော့",-12.131999969482422],["▁completo",-12.132002830505373],["▁бара",-12.132017135620115],["នឹង",-12.132018089294434],["ጡ",-12.132085800170898],["年代",-12.13216781616211],["▁вул",-12.132221221923828],["외",-12.13232421875],["▁Рад",-12.132390022277832],["ET",-12.132400512695312],["▁فصل",-12.132406234741213],["ນັ້ນ",-12.132461547851562],["uję",-12.132484436035156],["It",-12.132658004760742],["大会",-12.132701873779297],["िए",-12.132753372192385],["▁thần",-12.132757186889648],["▁عندما",-12.132786750793455],["нска",-12.132832527160645],["ូ",-12.132967948913574],["補",-12.1329984664917],["▁taon",-12.133021354675291],["户",-12.133031845092772],["xit",-12.133146286010742],["风险",-12.133172988891602],["▁الناس",-12.133186340332031],["▁Нова",-12.133244514465332],["▁dwa",-12.13324737548828],["▁більш",-12.133251190185549],["נג",-12.133340835571287],["▁의",-12.133461952209473],["ора",-12.133715629577637],["▁ένας",-12.133749008178713],["▁went",-12.133755683898926],["▁secret",-12.133779525756836],["▁Cena",-12.13385772705078],["алі",-12.133866310119627],["교육",-12.133915901184082],["▁lieu",-12.13392162322998],["TH",-12.133936882019045],["▁총",-12.133947372436523],["▁drink",-12.134007453918455],["hiya",-12.134020805358888],["司",-12.13404655456543],["▁sü",-12.134050369262695],["tî",-12.134072303771973],["出了",-12.134121894836426],["희",-12.13416862487793],["▁niitä",-12.134184837341309],["▁tempus",-12.134195327758787],["▁Golf",-12.13424587249756],["▁kontak",-12.134262084960938],["▁nazaj",-12.134275436401367],["▁sepse",-12.13430118560791],["देश",-12.134384155273438],["ყო",-12.134403228759766],["wor",-12.134441375732422],["▁ملت",-12.134454727172852],["пере",-12.13455581665039],["▁لأن",-12.134661674499512],["jących",-12.134685516357422],["കര",-12.134729385375977],["▁drept",-12.13478183746338],["▁schi",-12.134788513183594],["▁sådan",-12.134803771972656],["▁제공",-12.134827613830566],["▁შემო",-12.134842872619627],["▁poda",-12.134873390197754],["landa",-12.134881973266602],["zki",-12.1349458694458],["▁सर",-12.134949684143066],["rej",-12.134974479675291],["mē",-12.13497829437256],["▁juste",-12.135000228881836],["▁بھر",-12.135055541992188],["erunt",-12.135056495666504],["▁posti",-12.13506031036377],["ыми",-12.13506317138672],["ಂದ",-12.1350736618042],["▁kehidupan",-12.13512897491455],["▁আমাদের",-12.13512897491455],["эй",-12.135130882263184],["KU",-12.135224342346191],["▁아이",-12.13524055480957],["▁ਉ",-12.135255813598633],["▁байр",-12.13525676727295],["▁үг",-12.135262489318848],["timo",-12.135268211364746],["▁warga",-12.135289192199709],["kë",-12.135302543640137],["▁néz",-12.135337829589844],["▁tejto",-12.135339736938477],["▁easy",-12.13535213470459],["▁kojem",-12.135416030883787],["暴",-12.13543701171875],["▁చ",-12.135454177856444],["َّ",-12.135478973388672],["▁porte",-12.135485649108888],["sah",-12.135490417480469],["▁kinderen",-12.135516166687012],["ಯಾಗ",-12.13552474975586],["ንም",-12.135564804077148],["▁управління",-12.13560390472412],["▁azonban",-12.135655403137209],["joje",-12.13569450378418],["െങ്കിലും",-12.135702133178713],["▁leuk",-12.13570785522461],["بت",-12.135709762573242],["▁ډېر",-12.135772705078123],["ต้น",-12.135823249816896],["▁fikir",-12.135913848876951],["▁анын",-12.135937690734863],["考え",-12.13597297668457],["▁filmi",-12.136028289794922],["▁Promo",-12.136093139648438],["▁reforma",-12.136098861694336],["өн",-12.136181831359863],["▁Hak",-12.13623332977295],["▁должно",-12.136250495910645],["▁рік",-12.13625144958496],["▁Ζ",-12.136319160461426],["消费者",-12.13633632659912],["DR",-12.136348724365234],["ებას",-12.13640594482422],["ටත්",-12.136456489562988],["သိ",-12.136496543884276],["стој",-12.1365385055542],["▁κάτι",-12.136588096618652],["▁üzere",-12.1365966796875],["▁kuris",-12.136634826660156],["▁одной",-12.136643409729004],["վա",-12.136741638183594],["▁चाहिए",-12.13675308227539],["▁orient",-12.136794090270996],["iei",-12.136808395385742],["▁transforma",-12.136820793151855],["▁Klub",-12.136833190917969],["▁Bundes",-12.136841773986816],["өг",-12.136937141418455],["dali",-12.136938095092772],["ntar",-12.13701629638672],["คืน",-12.137027740478516],["ත්වය",-12.137072563171388],["▁بیماری",-12.13712215423584],["▁молод",-12.137167930603027],["ιού",-12.137182235717772],["篇",-12.137232780456545],["ങ്ങളുടെ",-12.1372709274292],["▁werken",-12.13727855682373],["laka",-12.137447357177734],["▁우",-12.137495040893556],["백",-12.137523651123049],["▁yek",-12.137526512145996],["▁teljes",-12.137545585632324],["ंट",-12.137595176696776],["вае",-12.137606620788574],["▁arma",-12.13765811920166],["▁mont",-12.137687683105469],["▁prove",-12.137782096862791],["▁alcuni",-12.137828826904297],["职",-12.137948989868164],["视频",-12.137953758239746],["/8",-12.13795566558838],["ธุรกิจ",-12.137980461120604],["▁właśnie",-12.137983322143556],["▁wananchi",-12.137989044189451],["▁кажа",-12.138023376464844],["为什么",-12.138039588928224],["▁Ваш",-12.138057708740234],["▁pendidikan",-12.13807773590088],["▁ndani",-12.138108253479004],["▁450",-12.138110160827637],["었다",-12.138129234313965],["▁vald",-12.13813018798828],["▁meta",-12.13816738128662],["▁kiam",-12.13820743560791],["▁half",-12.138222694396973],["tów",-12.1382417678833],["▁Qual",-12.138260841369627],["▁importantes",-12.138272285461426],["阶段",-12.138382911682127],["ključ",-12.138440132141112],["▁ընթացքում",-12.13845920562744],["नाथ",-12.138472557067873],["่า",-12.138479232788086],["▁кандай",-12.138484954833984],["You",-12.138486862182615],["kz",-12.13849925994873],["▁Kit",-12.13851833343506],["▁kuat",-12.13853645324707],["▁మీద",-12.13856029510498],["బి",-12.138564109802246],["▁Гор",-12.138564109802246],["рк",-12.138675689697266],["رج",-12.138701438903809],["▁oda",-12.138744354248049],["▁wszystkie",-12.138788223266602],["lage",-12.13886547088623],["ион",-12.138957977294922],["▁න",-12.139001846313477],["▁درخواست",-12.13901424407959],["▁plot",-12.139102935791016],["▁കൊണ്ട്",-12.139115333557127],["تنا",-12.139150619506836],["▁domov",-12.139190673828123],["рес",-12.13929843902588],["▁tena",-12.139371871948242],["είται",-12.139405250549316],["投入",-12.139487266540527],["→",-12.139500617980955],["dze",-12.13954257965088],["യില",-12.139626502990724],["rës",-12.139639854431152],["ө",-12.139646530151367],["▁равно",-12.139676094055176],["ације",-12.139841079711914],["▁akong",-12.139852523803713],["▁Cada",-12.139881134033203],["▁газрын",-12.139897346496582],["јан",-12.140002250671388],["▁Tum",-12.140007019042969],["▁potrebno",-12.140064239501951],["▁Và",-12.14009952545166],["ಬ್",-12.14011287689209],["▁края",-12.14013385772705],["▁kaksi",-12.14022159576416],["年前",-12.140275955200195],["▁ثابت",-12.14037036895752],["▁Stor",-12.140384674072266],["▁Кра",-12.140433311462402],["▁muslim",-12.140443801879885],["▁governo",-12.140475273132324],["▁produ",-12.14052677154541],["เก็บ",-12.140596389770508],["လွ",-12.140667915344238],["▁mesaj",-12.140692710876465],["▁shqiptare",-12.140722274780272],["AG",-12.14074993133545],["▁celebra",-12.140761375427246],["▁Mä",-12.140835762023926],["▁enemmän",-12.14084815979004],["និង",-12.14090633392334],["▁கொண்ட",-12.14091682434082],["▁տեղ",-12.1409273147583],["изира",-12.1409330368042],["▁unga",-12.140995979309082],["▁පු",-12.14105224609375],["diga",-12.141053199768066],["ými",-12.14106559753418],["▁Ingen",-12.141247749328612],["एस",-12.141274452209473],["ություններ",-12.141283988952637],["▁برس",-12.141304969787598],["▁निर्वाचन",-12.14140796661377],["▁Rod",-12.141425132751465],["▁încă",-12.141468048095703],["▁interven",-12.14146900177002],["▁стало",-12.141528129577637],["tán",-12.141566276550291],["▁peale",-12.141590118408203],["寄",-12.141669273376465],["溫",-12.141724586486816],["我想",-12.141763687133787],["▁kuji",-12.141796112060549],["▁quý",-12.141799926757812],["▁१५",-12.141815185546877],["▁olunub",-12.141820907592772],["▁Antonio",-12.141827583312988],["注",-12.141868591308594],["?」",-12.14189338684082],["▁호",-12.141895294189451],["▁त्यो",-12.141915321350098],["▁집",-12.141918182373049],["ांत",-12.142009735107422],["▁gái",-12.142020225524902],["ዮ",-12.142035484313965],["ڑ",-12.142071723937988],["МА",-12.142107963562012],["ļi",-12.142156600952148],["▁міста",-12.142218589782717],["▁maks",-12.142269134521484],["▁kegiatan",-12.142278671264648],["TP",-12.14228057861328],["▁karar",-12.14231300354004],["▁Card",-12.142315864562988],["гаа",-12.142383575439451],["cade",-12.142395973205566],["▁تماس",-12.142400741577148],["▁hall",-12.142417907714844],["เลือก",-12.14247703552246],["▁stað",-12.142508506774902],["Ê",-12.142520904541016],["ယ္",-12.142605781555176],["▁مى",-12.142606735229492],["▁Vigo",-12.142614364624023],["▁sveta",-12.142671585083008],["氏",-12.142730712890623],["ിച്ച്",-12.142740249633787],["▁Bot",-12.142743110656738],["tį",-12.14274787902832],["▁(12)",-12.142770767211914],["nske",-12.142773628234863],["▁Kis",-12.142809867858888],["▁Wasser",-12.142827033996582],["▁៖",-12.142913818359377],["▁yaptığı",-12.142931938171388],["▁yara",-12.14294147491455],["▁ያለው",-12.14299201965332],["▁Lau",-12.142993927001951],["▁ଗ",-12.142999649047852],["▁кийин",-12.143095970153809],["▁CA",-12.143210411071776],["▁phép",-12.143243789672852],["▁longo",-12.1432466506958],["父",-12.14339828491211],["▁miaka",-12.143407821655272],["▁ନାହିଁ",-12.143433570861816],["できない",-12.143460273742676],["▁വെ",-12.143477439880373],["mala",-12.143479347229004],["zg",-12.14351749420166],["▁bab",-12.143527030944824],["▁Ivan",-12.14366340637207],["貴",-12.143672943115234],["▁Συν",-12.14371395111084],["▁තිබේ",-12.143714904785156],["▁όπου",-12.143719673156738],["ড়া",-12.143787384033203],["тна",-12.143874168395996],["onta",-12.143891334533691],["▁Wall",-12.143919944763184],["▁සමග",-12.143928527832031],["LL",-12.143946647644045],["bing",-12.143959999084473],["▁రూ",-12.143978118896484],["ARE",-12.14405918121338],["fek",-12.144088745117188],["▁Ist",-12.14409065246582],["▁totes",-12.144142150878906],["▁aktivitet",-12.144193649291992],["▁توسعه",-12.144198417663574],["면서",-12.14423370361328],["▁performance",-12.144245147705078],["▁təmin",-12.144281387329102],["▁cinta",-12.144295692443848],["▁Songs",-12.144302368164062],["sod",-12.144338607788086],["tores",-12.144426345825195],["行为",-12.144463539123535],["ాలను",-12.144620895385742],["▁اچي",-12.14467430114746],["▁através",-12.14467716217041],["▁εδώ",-12.14467716217041],["▁უკვე",-12.144692420959473],["晚上",-12.144698143005373],["しか",-12.144701957702637],["▁Presiden",-12.144784927368164],["▁عمومی",-12.144797325134276],["▁Kä",-12.144804954528809],["تد",-12.144814491271973],["ዚ",-12.14482307434082],["不足",-12.144829750061035],["tne",-12.1448392868042],["▁weiß",-12.14487361907959],["▁vjet",-12.144908905029297],["פט",-12.14498233795166],["▁won",-12.144984245300291],["▁ខែ",-12.145057678222656],["▁дете",-12.145095825195312],["աբ",-12.145105361938477],["▁Eller",-12.145119667053224],["▁khỏe",-12.145153045654297],["▁محفوظ",-12.145153045654297],["▁GA",-12.145183563232422],["што",-12.145186424255373],["促进",-12.145224571228027],["▁Software",-12.145264625549316],["2007",-12.145273208618164],["κτ",-12.145277976989746],["▁concept",-12.14529514312744],["▁കൂടി",-12.145303726196287],["▁Eks",-12.145347595214844],["ώς",-12.14545440673828],["▁अव",-12.145458221435549],["▁riigi",-12.145465850830078],["▁poc",-12.145480155944824],["cari",-12.145537376403809],["▁зүйл",-12.14560317993164],["▁këtij",-12.14563274383545],["sele",-12.145655632019045],["halt",-12.145709037780762],["预",-12.14571762084961],["ട്ടി",-12.145784378051758],["▁მან",-12.145791053771973],["ഫ",-12.145866394042969],["ಎಸ್",-12.145869255065918],["ņa",-12.145873069763184],["egu",-12.145910263061523],["леж",-12.145962715148926],["▁مح",-12.14598274230957],["▁gadu",-12.146085739135742],["ၾကီး",-12.14619255065918],["▁esco",-12.14629364013672],["▁حکم",-12.146296501159668],["▁Bog",-12.146306991577148],["▁आरोप",-12.146309852600098],["▁දෙයක්",-12.146321296691896],["▁markii",-12.146366119384766],["▁אביב",-12.146397590637209],["▁آف",-12.146438598632812],["▁ગુજરાત",-12.146470069885254],["▁स्थान",-12.146482467651367],["▁Mau",-12.146539688110352],["fol",-12.146566390991213],["▁ټ",-12.146594047546388],["▁града",-12.146693229675291],["ників",-12.146703720092772],["▁выс",-12.146720886230469],["▁undir",-12.14675998687744],["支援",-12.146778106689451],["dakı",-12.146862983703612],["▁تب",-12.14689826965332],["▁кв",-12.146924018859863],["▁fondo",-12.146974563598633],["emo",-12.147035598754885],["▁sker",-12.147113800048828],["▁dobi",-12.147117614746094],["▁Kč",-12.147164344787598],["筆",-12.147165298461914],["▁Ft",-12.147193908691406],["▁tässä",-12.147201538085938],["กําลัง",-12.147289276123049],["▁развитие",-12.147356033325195],["▁entrega",-12.147360801696776],["▁hapa",-12.147403717041016],["yb",-12.14743423461914],["學校",-12.147479057312012],["▁Ար",-12.147507667541504],["▁celui",-12.147509574890137],["yim",-12.14759349822998],["wand",-12.147729873657228],["▁(9",-12.147729873657228],["աշ",-12.147749900817873],["cké",-12.147762298583984],["▁agen",-12.147765159606934],["▁کړی",-12.147789001464844],["טר",-12.147797584533691],["▁cư",-12.147834777832031],["▁gewoon",-12.147844314575195],["Art",-12.147879600524902],["ଥିଲା",-12.147902488708496],["▁pow",-12.147913932800291],["▁πά",-12.14793872833252],["יא",-12.148015975952148],["▁第",-12.148042678833008],["▁classe",-12.148109436035156],["▁sumber",-12.148120880126951],["▁ചെയ്യ",-12.14816951751709],["ങ്ങളില്",-12.148170471191406],["cti",-12.148194313049316],["زي",-12.148219108581545],[":50",-12.148436546325684],["▁okkar",-12.14845085144043],["▁Τι",-12.14846420288086],["▁बंद",-12.148473739624023],["智慧",-12.148480415344238],["▁ดู",-12.148493766784668],["▁rehetra",-12.148518562316896],["首先",-12.14854621887207],["▁వారి",-12.148550033569336],["ème",-12.148603439331056],["גר",-12.148653030395508],["就能",-12.148730278015137],["▁Arbeits",-12.14877223968506],["tioner",-12.14880657196045],["ຢ່າງ",-12.148826599121094],["▁עבור",-12.14883804321289],["▁הכי",-12.148853302001951],["겨",-12.148998260498049],["▁외",-12.14903163909912],["▁BR",-12.149041175842283],["▁pêş",-12.149093627929688],["▁пода",-12.149093627929688],["کے",-12.14913558959961],["▁masse",-12.149150848388672],["vno",-12.149198532104492],["持续",-12.149234771728516],["▁бал",-12.149255752563477],["▁الي",-12.149269104003906],["▁Daily",-12.1492919921875],["生物",-12.149293899536133],["▁ല",-12.149296760559082],["▁Vila",-12.149321556091309],["▁ታ",-12.149364471435549],["▁gak",-12.149372100830078],["都不",-12.149394035339355],["功",-12.149502754211426],["▁kitab",-12.149530410766602],["ыз",-12.149548530578612],["မယ္",-12.149577140808104],["ової",-12.1496000289917],["▁случай",-12.149652481079102],["লো",-12.14981460571289],["不用",-12.1498384475708],["▁Μα",-12.149846076965332],["驚",-12.14990520477295],["▁agent",-12.14992332458496],["雲",-12.149934768676758],["▁funk",-12.150002479553224],["▁mít",-12.150005340576172],["▁שנים",-12.150046348571776],["▁මං",-12.150069236755373],["▁Stat",-12.150076866149902],["▁සි",-12.150079727172852],["▁tão",-12.150132179260254],["कु",-12.150165557861328],["ٺ",-12.150201797485352],["▁orci",-12.150216102600098],["ender",-12.150223731994627],["Па",-12.150287628173828],["▁нея",-12.150303840637209],["▁koti",-12.150324821472168],["规划",-12.15038013458252],["orna",-12.15041732788086],["▁انت",-12.150433540344238],["▁período",-12.15044403076172],["▁friends",-12.15049648284912],["ບໍ່",-12.15054702758789],["tettu",-12.150670051574709],["▁kohe",-12.150795936584473],["任务",-12.15084743499756],["▁biểu",-12.15092658996582],["▁perioada",-12.150927543640137],["ża",-12.150951385498049],["▁elkaar",-12.150961875915527],["▁mineral",-12.151052474975586],["▁Gö",-12.151104927062988],["▁Krist",-12.15113925933838],["▁Gün",-12.15122127532959],["vang",-12.151222229003906],["▁pacient",-12.15123176574707],["▁דבר",-12.151246070861816],["ाउने",-12.151252746582031],["fundi",-12.151291847229004],["karan",-12.151297569274902],["വേ",-12.151325225830078],["ాలని",-12.151369094848633],["house",-12.151416778564451],["▁North",-12.151416778564451],["▁සිංහල",-12.1514253616333],["हे",-12.151503562927246],["读",-12.15159034729004],["함",-12.151596069335938],["方は",-12.151623725891112],["目標",-12.151686668395996],["ન્",-12.15175437927246],["eid",-12.151774406433104],["幸福",-12.151776313781738],["▁अनेक",-12.151814460754396],["place",-12.15185832977295],["▁1960",-12.15187644958496],["▁anything",-12.151906967163086],["િંગ",-12.151944160461426],["▁احساس",-12.15200424194336],["▁Akademi",-12.15205192565918],["න්ද",-12.152060508728027],["娱乐",-12.152076721191406],["▁hud",-12.152143478393556],["▁titul",-12.152148246765137],["▁stup",-12.152154922485352],["▁sag",-12.152182579040527],["Ե",-12.152193069458008],["▁అనే",-12.152244567871094],["振",-12.152244567871094],["馆",-12.15232753753662],["шти",-12.152335166931152],["anî",-12.152379035949709],["▁nek",-12.152403831481934],["ína",-12.152413368225098],["AMA",-12.152432441711426],["▁данни",-12.152436256408691],["▁tương",-12.152461051940918],["▁שת",-12.152490615844728],["參加",-12.15251922607422],["▁vrlo",-12.152548789978027],["▁הרבה",-12.15259838104248],["ters",-12.152620315551758],["गो",-12.15269947052002],["וג",-12.15272045135498],["блі",-12.152762413024902],["익",-12.152780532836914],["vaa",-12.152791976928713],["tino",-12.152803421020508],["ющие",-12.152804374694824],["úl",-12.152814865112305],["ект",-12.152831077575684],["▁First",-12.152836799621582],["OT",-12.152847290039062],["kė",-12.15285873413086],["▁permainan",-12.15285873413086],["▁risus",-12.152883529663086],["▁انداز",-12.152972221374512],["经验",-12.153075218200684],["▁ਤ",-12.1531343460083],["ık",-12.153162002563477],["▁зав",-12.153220176696776],["פור",-12.153335571289062],["▁бөгөөд",-12.153342247009276],["веде",-12.153350830078123],["▁ማለት",-12.153366088867188],["leng",-12.153377532958984],["laya",-12.153444290161133],[":19",-12.153467178344728],["▁yolu",-12.153508186340332],["▁носи",-12.153524398803713],["▁fiscal",-12.153545379638672],["▁Sono",-12.153582572937012],["▁qof",-12.153587341308594],["▁Material",-12.153594970703123],["▁biasanya",-12.15376091003418],["▁touch",-12.153766632080078],["瓦",-12.153768539428713],["mű",-12.153772354125977],["До",-12.153804779052734],["ጃ",-12.153822898864746],["ָ",-12.153974533081056],["▁Aan",-12.154022216796877],["▁مشکلات",-12.15405559539795],["kola",-12.154129981994627],["vere",-12.154145240783691],["พบ",-12.154169082641602],["▁үз",-12.15418529510498],["人口",-12.154194831848145],["▁Rose",-12.154211044311523],["▁таким",-12.154240608215332],["향",-12.154263496398926],["▁ני",-12.154274940490724],["樹",-12.154278755187988],["▁ps",-12.154287338256836],["▁रो",-12.154372215270996],["ullah",-12.154407501220703],["▁muchas",-12.154422760009766],["▁Intel",-12.154438972473145],["bari",-12.154471397399902],["aithe",-12.154491424560549],["▁ജി",-12.154512405395508],["rme",-12.154583930969238],["спо",-12.154593467712402],["▁invita",-12.154600143432615],["▁جوان",-12.15467643737793],["▁dui",-12.154733657836914],["▁навіть",-12.15479564666748],["▁म्हणजे",-12.154796600341797],["ন্ত",-12.154810905456545],["▁nerv",-12.154814720153809],["ugu",-12.154829025268556],["єм",-12.154836654663086],[":18",-12.15495777130127],["emi",-12.154995918273926],["▁Ер",-12.154999732971191],["▁retro",-12.155014991760254],["▁Ide",-12.155024528503418],["▁Бога",-12.155040740966797],["牙",-12.155233383178713],["EA",-12.155250549316406],["▁hardcore",-12.155288696289062],["▁arbets",-12.155332565307615],["▁empat",-12.155366897583008],["▁Send",-12.155465126037598],["▁rim",-12.155579566955566],["віт",-12.155678749084473],["avat",-12.155696868896484],["▁fica",-12.15575122833252],["illo",-12.155856132507324],["▁अगर",-12.15588665008545],["▁40%",-12.155889511108398],["jali",-12.155901908874512],["āls",-12.155981063842772],["それ",-12.156055450439451],["期间",-12.156136512756348],["live",-12.156139373779297],["DD",-12.156188011169434],["詳細",-12.156197547912598],["uji",-12.156261444091797],["▁sapien",-12.15628719329834],["sın",-12.156291961669922],["▁आपके",-12.15631103515625],["▁տեղի",-12.156338691711426],["▁darauf",-12.156350135803224],["▁مې",-12.156360626220703],["วัด",-12.156375885009766],["▁ता",-12.156421661376951],["▁таго",-12.156435012817385],["erd",-12.156463623046877],[":22",-12.156500816345217],["java",-12.156521797180176],["魔",-12.156536102294922],["▁(\"",-12.156638145446776],["▁блог",-12.156638145446776],["പ്പോള്",-12.156685829162598],["ლად",-12.156736373901367],["▁frei",-12.156835556030272],["nymi",-12.157022476196287],["▁Nej",-12.157115936279297],["▁Nah",-12.157151222229004],["谷",-12.157155990600586],["象",-12.15719223022461],["▁жакшы",-12.157220840454102],["max",-12.157283782958984],["▁zapo",-12.157352447509766],["wana",-12.15735912322998],["▁optimal",-12.157365798950195],["▁පුළුවන්",-12.157393455505373],["▁phone",-12.15739631652832],["▁বলেন",-12.157416343688965],["▁jeder",-12.15750789642334],["▁sign",-12.15754508972168],["▁نوشته",-12.157567977905272],["▁الرئيس",-12.15764045715332],["▁નહીં",-12.157710075378418],["▁rayon",-12.157774925231934],["▁algunos",-12.157843589782717],["▁วัน",-12.157879829406738],["▁dictum",-12.15788459777832],["biz",-12.157896041870115],["▁budú",-12.15794849395752],["招",-12.157959938049316],["▁ಎಲ್ಲ",-12.157966613769531],["ոլ",-12.158035278320312],["mum",-12.158058166503906],["▁mulig",-12.158140182495115],["▁Xo",-12.158143997192385],["ուլ",-12.158145904541016],["豆",-12.158187866210938],["▁keadaan",-12.158196449279783],["▁постоянно",-12.158207893371582],["▁ସା",-12.158226013183594],["omi",-12.158334732055664],["▁pats",-12.158365249633787],["▁میلیون",-12.158408164978027],["▁Ris",-12.158409118652344],["▁terme",-12.158422470092772],["pē",-12.158430099487305],["лам",-12.158466339111328],["▁중국",-12.158588409423828],["nnar",-12.158601760864258],["▁yihiin",-12.158631324768066],["koh",-12.158647537231444],["▁saw",-12.158647537231444],["▁araw",-12.158665657043455],["▁Мне",-12.15870761871338],["үй",-12.15871810913086],["聖",-12.15872287750244],["▁Text",-12.158723831176758],["▁Ри",-12.158732414245604],["νά",-12.158811569213867],["inha",-12.158885955810549],["▁Ár",-12.15890121459961],["elli",-12.158964157104492],["▁ഡ",-12.158982276916504],["▁спор",-12.159048080444336],["య్య",-12.15907859802246],["发布",-12.159090995788574],["بان",-12.159096717834473],["▁ڊ",-12.159099578857422],["▁되",-12.159106254577637],["▁všetko",-12.159173965454102],["▁фонд",-12.159198760986328],["▁තුළ",-12.159221649169922],["▁once",-12.159238815307615],["cznie",-12.1592435836792],["▁హ",-12.159263610839844],["▁करोड",-12.159337043762209],["rend",-12.159345626831056],["▁vär",-12.159440994262695],["kea",-12.159497261047363],["רק",-12.15950870513916],["▁enorme",-12.159561157226562],["▁həmin",-12.15956211090088],["▁novas",-12.159581184387209],["摩",-12.159601211547852],["▁ڏ",-12.159621238708496],["▁Fil",-12.15963363647461],["▁2013,",-12.159712791442873],["▁daxil",-12.15971565246582],["лите",-12.1597318649292],["EB",-12.15986156463623],["▁ገ",-12.15986156463623],["▁nouvelle",-12.159872055053713],["ján",-12.159927368164062],["ಸು",-12.159955024719238],["द्ध",-12.160117149353027],["гээ",-12.160158157348633],["▁Minu",-12.160202980041504],["▁tijdens",-12.16027545928955],["isering",-12.160277366638184],["▁Local",-12.160279273986816],["▁жаткан",-12.16033172607422],["▁diplom",-12.160333633422852],["konto",-12.160374641418455],["▁ilma",-12.160419464111328],["itse",-12.160489082336426],["сут",-12.160497665405272],["▁Hann",-12.160514831542969],["konna",-12.160585403442385],["▁تە",-12.160608291625977],["▁domingo",-12.160609245300291],["ழு",-12.16067600250244],["▁زنان",-12.16067600250244],["▁dosta",-12.160709381103516],["▁Май",-12.160723686218262],["▁gerade",-12.16074562072754],["▁بولغان",-12.160772323608398],["▁until",-12.160871505737305],["▁độc",-12.16088581085205],["▁заедно",-12.160944938659668],["тельно",-12.160956382751465],["▁võ",-12.161060333251951],["▁гэдэг",-12.161113739013672],["jate",-12.161118507385254],["ادي",-12.161140441894531],["ന്ത",-12.161314964294434],["رش",-12.16132640838623],["▁Pem",-12.161361694335938],["▁fydd",-12.161368370056152],["ئو",-12.161433219909668],["sema",-12.161493301391602],["▁पार",-12.16149616241455],["▁영",-12.161511421203612],["▁hadi",-12.161545753479004],["盤",-12.16158676147461],["อง",-12.16158962249756],["▁siihen",-12.161639213562012],["▁become",-12.161641120910645],["▁Bài",-12.161643981933594],["▁Shop",-12.161706924438477],["▁Beispiel",-12.16177463531494],["▁mengi",-12.161813735961914],["един",-12.161823272705078],["▁trouve",-12.161855697631836],["▁გაი",-12.161954879760742],["ቋ",-12.162017822265623],["teni",-12.162028312683104],["▁келе",-12.162066459655762],["▁९",-12.162075996398926],["ಗ್",-12.162134170532228],["▁kina",-12.162198066711426],["▁Ге",-12.16222858428955],["āciju",-12.16232681274414],["利益",-12.162352561950684],["ီ",-12.16241455078125],["▁yakın",-12.162416458129885],["▁ساز",-12.162424087524414],["laş",-12.162508010864258],["ต์",-12.162508964538574],["GY",-12.162514686584473],["aid",-12.162524223327637],["▁certain",-12.162531852722168],["▁press",-12.16256046295166],["โต",-12.16264820098877],["▁geven",-12.162687301635742],["▁саме",-12.16281032562256],["▁tentu",-12.162951469421388],["▁фестивал",-12.16305160522461],["жин",-12.163097381591797],["▁Article",-12.163115501403809],["▁другим",-12.163152694702148],["wed",-12.163175582885742],["וס",-12.16318702697754],["ický",-12.163196563720703],["▁کرتا",-12.163200378417969],["처",-12.16325569152832],["ילה",-12.163268089294434],["zze",-12.163273811340332],["▁Jung",-12.163277626037598],["aks",-12.163348197937012],["▁شان",-12.163348197937012],["▁mateix",-12.163352966308594],["ると",-12.16340160369873],["σω",-12.163407325744627],["বার",-12.163434028625488],["犯",-12.163496971130373],["▁mbalimbali",-12.163552284240724],["stal",-12.163578033447266],["▁سلامت",-12.163610458374023],["▁पानी",-12.163650512695312],["▁būtu",-12.163653373718262],["▁നാ",-12.163701057434082],["形式",-12.163702964782717],["▁seksual",-12.163705825805664],["sió",-12.163902282714844],["▁گا۔",-12.163915634155272],["Դ",-12.16399097442627],["ष्",-12.16402816772461],["▁priser",-12.164087295532228],["▁уста",-12.164143562316896],["▁especialmente",-12.16418170928955],["ël",-12.16419792175293],["淡",-12.164273262023926],["ireo",-12.164286613464355],["стой",-12.164297103881836],["▁Jar",-12.164315223693848],["hut",-12.164388656616213],["▁శ",-12.164471626281738],["▁jednu",-12.164549827575684],["▁vaxt",-12.164552688598633],["ият",-12.16456413269043],["▁rohkem",-12.164565086364746],["▁minister",-12.164576530456545],["▁Ці",-12.164612770080566],["▁allo",-12.164621353149414],["▁clic",-12.16466999053955],["▁ຈາກ",-12.164730072021484],["▁Kurs",-12.164783477783203],["માંથી",-12.16482639312744],["250",-12.164835929870604],["▁Durante",-12.16484546661377],["dow",-12.164875984191896],["▁مانند",-12.164875984191896],["ינ",-12.164895057678224],["▁ചെയ്തു",-12.164896965026855],["▁ລາວ",-12.164901733398438],["ову",-12.164926528930664],["vig",-12.164934158325195],["▁linn",-12.165003776550291],["/2015",-12.165010452270508],["▁պետական",-12.1650390625],["▁Sou",-12.165091514587402],["လူ",-12.165119171142578],["▁ορ",-12.165128707885742],["ôn",-12.165152549743652],["▁ниво",-12.165181159973145],["జీ",-12.165306091308594],["▁ühe",-12.165348052978516],["▁lik",-12.165358543395996],["▁situ",-12.165393829345703],["▁गरेर",-12.165427207946776],["aldi",-12.165432929992676],["svar",-12.16544246673584],["▁Kri",-12.165508270263672],["▁τόσο",-12.165510177612305],["neb",-12.165578842163086],["BC",-12.165584564208984],["▁natuurlijk",-12.1655855178833],["▁pendant",-12.165640830993652],["僅",-12.16574001312256],["▁ngayon",-12.165782928466797],["▁ít",-12.16586971282959],["нім",-12.165926933288574],["▁mál",-12.165987968444824],["▁კიდევ",-12.166023254394531],["▁Sistem",-12.166298866271973],["▁лег",-12.166338920593262],["▁tuna",-12.166367530822754],["▁منظور",-12.166410446166992],["▁sper",-12.16641902923584],["tetaan",-12.16645050048828],["▁gäller",-12.16648769378662],["▁nélkül",-12.166488647460938],["▁අවශ්",-12.166491508483888],["▁каждый",-12.16650390625],["▁respekt",-12.166563987731934],["HAN",-12.16657543182373],["теля",-12.166579246520996],["amme",-12.166590690612791],["цима",-12.166637420654297],["Ν",-12.16669750213623],["▁ജ",-12.166799545288086],["▁Dur",-12.166820526123049],["現場",-12.166836738586426],["▁tinggal",-12.166851997375488],["tæ",-12.166868209838867],["▁نمایش",-12.166879653930664],["őr",-12.16692352294922],["故",-12.16693115234375],["board",-12.166973114013672],["▁انہیں",-12.166990280151367],["▁पूरा",-12.166994094848633],["▁بدل",-12.167017936706545],["▁prov",-12.16706371307373],["ól",-12.167076110839844],["▁bide",-12.167121887207031],["▁vseh",-12.167197227478027],["▁której",-12.167213439941406],["hab",-12.167217254638672],["▁นาย",-12.167223930358888],["▁prvo",-12.167352676391602],["сни",-12.167444229125977],["▁misli",-12.167482376098633],["▁management",-12.167548179626465],["nese",-12.16757106781006],["▁modifica",-12.16758632659912],["лд",-12.167738914489746],["▁entrar",-12.167808532714844],["صر",-12.1678466796875],["tex",-12.167868614196776],["▁After",-12.167886734008787],["পুর",-12.167909622192385],["itz",-12.167920112609863],["vou",-12.167932510375977],["▁cân",-12.16793727874756],["▁שו",-12.167972564697266],["▁ಡಿ",-12.16798496246338],["رې",-12.16799545288086],["▁екс",-12.167997360229492],["▁کمی",-12.168048858642578],["▁herë",-12.168078422546388],["▁Side",-12.168224334716797],["መድ",-12.168241500854492],["انا",-12.168290138244627],["애",-12.16836929321289],["▁arri",-12.168381690979004],["ዴ",-12.168466567993164],["jska",-12.16847038269043],["אים",-12.168479919433594],["kanak",-12.16849422454834],["ctus",-12.168539047241213],["▁કરવામાં",-12.168545722961426],["skiego",-12.168548583984377],["ங்களை",-12.16855812072754],["▁заради",-12.168671607971191],["▁гарант",-12.168742179870604],["است",-12.168752670288086],["▁cụ",-12.168825149536133],["voor",-12.16882610321045],["door",-12.168828010559082],["ntas",-12.1688871383667],["▁రెండు",-12.168941497802734],["▁infatti",-12.168946266174316],["▁вкус",-12.168957710266112],["▁કા",-12.168965339660645],["▁بک",-12.16901683807373],["▁Milan",-12.169039726257324],["▁پوست",-12.169039726257324],["атив",-12.16907024383545],["ပုံ",-12.169071197509766],["▁چھ",-12.16911792755127],["ொ",-12.169154167175291],["▁ўсё",-12.169235229492188],["nė",-12.1692533493042],["▁spol",-12.169254302978516],["值得",-12.169262886047363],["èn",-12.169315338134766],["▁ನೀವು",-12.16943645477295],["riad",-12.169447898864746],["▁მთავარი",-12.16946506500244],["άλ",-12.169471740722656],["ଅ",-12.169471740722656],["öö",-12.16948127746582],["ţă",-12.169610023498535],["▁هفته",-12.169626235961914],["▁1982",-12.169668197631836],["▁***",-12.169760704040527],["▁Gol",-12.169788360595703],["пол",-12.169891357421877],["dka",-12.16991138458252],["▁কিছু",-12.16992473602295],["にある",-12.169963836669922],["کت",-12.170024871826172],["本当に",-12.170079231262209],["▁കാണ",-12.170146942138672],["sok",-12.17017650604248],["бле",-12.170198440551758],["▁시간",-12.17034149169922],["فی",-12.170366287231444],["▁Chủ",-12.17043113708496],["▁રહે",-12.170454025268556],["下さい",-12.17046070098877],["tero",-12.170469284057615],["▁Cele",-12.170487403869627],["成果",-12.170502662658691],["Net",-12.170525550842283],["▁kit",-12.17054557800293],["▁stund",-12.170562744140623],["答",-12.17058277130127],["▁Nuo",-12.170604705810549],["▁iga",-12.170624732971191],["▁East",-12.170700073242188],["製作",-12.170757293701172],["NN",-12.17097282409668],["لاش",-12.17104721069336],["ሐ",-12.17104721069336],["▁करण्यात",-12.17107391357422],["▁corso",-12.171086311340332],["▁мають",-12.17113971710205],["vei",-12.171198844909668],["▁(13",-12.171257972717283],["jne",-12.17127799987793],["▁פֿון",-12.17141819000244],["▁fáil",-12.171442985534668],["領",-12.171449661254885],["々",-12.17145538330078],["pok",-12.17145824432373],["shë",-12.17149543762207],["▁22,",-12.171567916870115],["改变",-12.171624183654783],["▁гэтым",-12.171669960021973],["▁ေ",-12.171700477600098],["▁صنعت",-12.171741485595703],["这里",-12.171839714050291],["▁имат",-12.171865463256836],["▁iemand",-12.171921730041504],["▁Республикасының",-12.17192554473877],["▁Marie",-12.171929359436035],["BER",-12.171930313110352],["▁sağ",-12.171967506408691],["▁ਸਾ",-12.172021865844728],["ahi",-12.172035217285156],["禁",-12.17205047607422],["ન્સ",-12.17212200164795],["وک",-12.172136306762695],["▁Jangan",-12.172142028808594],["аш",-12.172152519226074],["ações",-12.172174453735352],["▁Mercedes",-12.172205924987791],["▁positive",-12.172215461730955],["義",-12.172245979309082],["tili",-12.172317504882812],["tice",-12.172319412231444],["▁извор",-12.172340393066406],["iq",-12.172354698181152],["▁ქართული",-12.172447204589844],["▁დე",-12.1725435256958],["скую",-12.172585487365724],["ffi",-12.17263412475586],["Са",-12.17263889312744],["▁annak",-12.172640800476074],["▁pain",-12.172741889953612],["影片",-12.172799110412598],["▁contar",-12.172845840454102],["▁výrob",-12.172846794128418],["▁ferie",-12.172853469848633],["▁پاڪستان",-12.172880172729492],["▁dende",-12.172901153564451],["▁спи",-12.172907829284668],["▁hefyd",-12.172908782958984],["▁odgovor",-12.172926902770996],["kuva",-12.172935485839844],["▁utiliza",-12.172965049743652],["▁azért",-12.172986030578612],["▁مق",-12.173004150390623],["時に",-12.173032760620115],["▁ბა",-12.173049926757812],["ნება",-12.173050880432127],["нете",-12.173075675964355],["نظر",-12.17311191558838],["ඹ",-12.173118591308594],["▁تیر",-12.173194885253906],["▁වලට",-12.173227310180664],["機能",-12.17326831817627],["▁insanlar",-12.17331886291504],["ლები",-12.17333984375],["▁kroz",-12.173340797424316],["▁წა",-12.173348426818848],["殺",-12.173429489135742],["▁1918",-12.173575401306152],["▁dul",-12.173601150512695],["▁oluştur",-12.17361545562744],["ဖ",-12.173622131347656],["▁педагог",-12.173696517944336],["▁болды",-12.173723220825195],["زو",-12.173742294311523],["kör",-12.173758506774902],["樣",-12.173770904541016],["လည်း",-12.173803329467772],["ต้องการ",-12.173827171325684],["▁majú",-12.17386531829834],["▁phạm",-12.17386531829834],["▁Հանրապետության",-12.17386531829834],["▁życia",-12.173867225646973],["гын",-12.173884391784668],["оци",-12.173890113830566],["▁keh",-12.17392921447754],["▁küll",-12.17396068572998],["নী",-12.173979759216309],["чета",-12.174028396606444],["▁jira",-12.17404842376709],["支付",-12.17417335510254],["▁urna",-12.174241065979004],["ಾಗಿ",-12.17431354522705],["▁Hrvatskoj",-12.174362182617188],["▁nguồn",-12.174362182617188],["▁produits",-12.174372673034668],["特色",-12.174457550048828],["▁údajov",-12.174518585205078],["stellung",-12.174541473388672],["▁бра",-12.17454433441162],["३",-12.174616813659668],["▁ಹೇಳ",-12.17464828491211],["lariga",-12.17468547821045],["▁нашата",-12.174744606018066],["elő",-12.17479419708252],["▁perusahaan",-12.174853324890137],["▁cenu",-12.174860954284668],["▁شیئر",-12.1748628616333],["glio",-12.174893379211426],["壓",-12.174918174743652],["▁kurz",-12.17494773864746],["ซา",-12.175082206726074],["▁yếu",-12.175088882446287],["୫",-12.175113677978516],["▁പല",-12.175138473510742],["სრულ",-12.175247192382812],["وش",-12.17541217803955],["ۈ",-12.175436973571776],["▁trách",-12.175451278686523],["▁Politik",-12.17549991607666],["多くの",-12.175500869750977],["▁PU",-12.175573348999023],["rla",-12.175579071044922],["▁మంది",-12.175617218017578],["istu",-12.175667762756348],["မည္",-12.175700187683104],["-21",-12.17571258544922],["ebilir",-12.175745964050291],["▁weten",-12.175745964050291],["WE",-12.175758361816406],["▁ഇനി",-12.175763130187988],["уы",-12.175843238830566],["▁երբ",-12.175850868225098],["▁Vill",-12.175872802734377],["▁дугаар",-12.17589282989502],["▁볼",-12.17592430114746],["ෙක්",-12.17597770690918],["▁ڪي",-12.17602252960205],["▁প্রতি",-12.176066398620604],["作為",-12.176095962524414],["▁kent",-12.176161766052246],["▁saker",-12.176196098327637],["▁партнер",-12.176220893859863],["▁sambil",-12.17626953125],["▁vente",-12.176277160644531],["行动",-12.176286697387695],["▁aukera",-12.176289558410645],["▁??",-12.176298141479492],["▁acceso",-12.1763277053833],["▁Internacional",-12.17634391784668],["▁१२",-12.176349639892578],["▁isla",-12.176352500915527],["▁مسائل",-12.176389694213867],["ॅ",-12.176416397094728],["▁മെ",-12.176416397094728],["▁sơ",-12.176441192626951],["მს",-12.176468849182127],["ვალ",-12.176481246948242],["מע",-12.176488876342772],["เว",-12.17654800415039],["▁besser",-12.176578521728516],["▁Baby",-12.176580429077148],["න්ට",-12.176602363586426],["▁comments",-12.176675796508787],["▁İlk",-12.176701545715332],["ијата",-12.17672061920166],["▁ସି",-12.176727294921877],["κά",-12.176730155944824],["▁வே",-12.17675495147705],["▁افزود",-12.17683219909668],["▁organización",-12.176885604858398],["處理",-12.176892280578612],["▁2015,",-12.176898956298828],["ισμού",-12.176912307739258],["▁قر",-12.176955223083496],["OR",-12.177021980285645],["▁AG",-12.17702293395996],["▁आएको",-12.17702293395996],["▁Familie",-12.17703342437744],["▁fronte",-12.177066802978516],["▁tax",-12.177083015441896],["▁1941",-12.177149772644045],["▁23,",-12.177220344543455],["bé",-12.177245140075684],["▁amar",-12.177264213562012],["▁ਕਾ",-12.17729949951172],["▁explo",-12.1773042678833],["너",-12.177346229553224],["▁почему",-12.177350997924805],["δρ",-12.177468299865724],["▁పో",-12.177545547485352],["▁yaşa",-12.177556037902832],["tī",-12.177594184875488],["町",-12.177620887756348],["▁bani",-12.17771339416504],["under",-12.177714347839355],["▁Universidade",-12.177742958068848],["▁ремонт",-12.17774486541748],["ढ",-12.177997589111328],["рай",-12.178003311157228],["▁పెట్ట",-12.178017616271973],["▁samband",-12.178092956542969],["▁Datuk",-12.178177833557127],["rsa",-12.178208351135254],["▁Questo",-12.178213119506836],["▁musical",-12.178245544433594],["▁چل",-12.178269386291504],["▁Ziel",-12.178282737731934],["▁todella",-12.178314208984377],["▁soprattutto",-12.178318977355955],["▁Nes",-12.178324699401855],["▁dele",-12.178325653076172],["भि",-12.178335189819336],["▁arcu",-12.178335189819336],["▁ніж",-12.178346633911133],["▁акт",-12.178478240966797],["யி",-12.178534507751465],["▁ਗੁਰੂ",-12.178571701049805],["自分",-12.178627967834473],["▁gla",-12.178654670715332],["glia",-12.178678512573242],["læg",-12.17878532409668],["▁فوق",-12.178832054138184],["▁ចូល",-12.178876876831056],["Il",-12.1788969039917],["muk",-12.178905487060549],["▁ගත",-12.178918838500977],["▁đá",-12.178947448730469],["▁લોકો",-12.178972244262695],["▁ति",-12.179040908813477],["▁sujet",-12.179071426391602],["лж",-12.17907428741455],["دہ",-12.179085731506348],["▁Nun",-12.179107666015623],["▁៣",-12.179158210754396],["固",-12.179162979125977],["▁बैठक",-12.179182052612305],["kende",-12.179186820983888],["▁अमेरिका",-12.17921543121338],["▁princip",-12.1792631149292],["lən",-12.179347038269045],["सन",-12.179361343383787],["vand",-12.17937183380127],["seid",-12.179388999938965],["liz",-12.179391860961914],["▁tj",-12.179448127746582],["▁cream",-12.179469108581545],["тук",-12.179492950439451],["jeli",-12.17951202392578],["業務",-12.17953109741211],["▁konsult",-12.179536819458008],["▁Stre",-12.17957878112793],["اڻي",-12.179705619812012],["gift",-12.179733276367188],["ေရာက္",-12.179757118225098],["ळी",-12.179760932922363],["δια",-12.179773330688477],["ൺ",-12.179803848266602],["▁خانواده",-12.179807662963867],["▁ఎలా",-12.179837226867676],["atzen",-12.179855346679688],["кре",-12.179863929748535],["▁impact",-12.179896354675291],["▁nepri",-12.18000602722168],["▁Cro",-12.180042266845703],["ighet",-12.180045127868652],["ൗ",-12.180065155029297],["▁bitte",-12.180097579956056],["▁taky",-12.1801176071167],["▁vert",-12.1801176071167],["▁ազատ",-12.180130004882812],["▁koment",-12.180148124694824],["ىدۇ",-12.180169105529783],["۱",-12.18019676208496],["誰",-12.180211067199709],["tado",-12.180219650268556],["▁нашите",-12.180230140686035],["센터",-12.180305480957031],["配合",-12.180326461791992],["▁Taip",-12.18033218383789],["ंक",-12.180353164672852],["пред",-12.180354118347168],["▁trip",-12.180364608764648],["▁Tā",-12.18044662475586],["ял",-12.18047332763672],["▁Albert",-12.180480003356934],["kord",-12.18048095703125],["cida",-12.180567741394045],["aat",-12.180597305297852],["▁пом",-12.1806001663208],["▁zeker",-12.18062686920166],["导致",-12.180665969848633],["▁כן",-12.18093490600586],["▁madhe",-12.180964469909668],["▁වැ",-12.180980682373049],["▁aq",-12.181017875671388],["れば",-12.18103313446045],["▁persoon",-12.181050300598145],["ंत",-12.18106746673584],["ect",-12.181073188781738],["▁exact",-12.181090354919434],["的新",-12.181114196777344],["▁stay",-12.181135177612305],["▁darbu",-12.181158065795898],["▁వె",-12.181200981140137],["şan",-12.181212425231934],["▁дээ",-12.18124294281006],["▁Kaip",-12.181260108947754],["▁dzi",-12.181278228759766],["▁kteří",-12.181299209594728],["▁kanskje",-12.181303977966309],["▁líka",-12.181337356567385],["▁turpis",-12.181366920471191],["▁باشید",-12.181377410888672],["▁살",-12.181384086608888],["▁venner",-12.181387901306152],["କି",-12.181416511535645],["തും",-12.181514739990234],["货",-12.181525230407717],["▁Bes",-12.181559562683104],["▁augue",-12.181570053100586],["▁PH",-12.18159294128418],["▁۵",-12.181629180908203],["▁vielen",-12.18163013458252],["ვრ",-12.18164348602295],["될",-12.181670188903809],["卫",-12.18167209625244],["ائڻ",-12.18167781829834],["盡",-12.181690216064451],["▁høj",-12.18170928955078],["▁women",-12.18172550201416],["ANI",-12.181785583496094],["▁Prin",-12.181792259216309],["▁хамгийн",-12.181800842285156],["▁hưởng",-12.181818962097168],["▁Kle",-12.18183708190918],["▁Id",-12.181849479675291],["▁УИХ",-12.181861877441406],["▁ದ",-12.181872367858888],["rzę",-12.181892395019531],["▁üzerine",-12.181896209716797],["life",-12.18189811706543],["▁primul",-12.181909561157228],["▁ਹਰ",-12.181920051574709],["▁امور",-12.181933403015137],["▁holl",-12.18199062347412],["слав",-12.182003021240234],["▁gall",-12.182046890258787],["ishwa",-12.182051658630373],["▁Continue",-12.18211269378662],["ාව",-12.182140350341797],["▁warm",-12.182156562805176],["რება",-12.182168006896973],["rist",-12.182244300842283],["پی",-12.182249069213867],["これ",-12.182293891906738],["▁точки",-12.182303428649902],["▁آھي",-12.182308197021484],["EE",-12.182327270507812],["llar",-12.182415008544922],["gā",-12.182486534118652],["▁ਸੁ",-12.18251609802246],["овите",-12.182544708251951],["▁αρ",-12.182575225830078],["起來",-12.182623863220217],["▁pse",-12.182653427124023],["voli",-12.182719230651855],["▁anden",-12.182743072509766],["▁arre",-12.182758331298828],["تهم",-12.182825088500977],["▁intim",-12.182825088500977],["▁VOA",-12.18283462524414],["▁protože",-12.18283748626709],["vate",-12.18285846710205],["▁Их",-12.182881355285645],["too",-12.18290901184082],["▁após",-12.18291473388672],["▁своего",-12.182918548583984],["isan",-12.183058738708496],["razi",-12.183061599731444],["ַ",-12.183061599731444],["▁participa",-12.183103561401367],["ড়",-12.18311595916748],["?“",-12.183140754699709],["▁sá",-12.183152198791504],["▁Може",-12.183165550231934],["▁1979",-12.18317699432373],["▁meny",-12.183238983154297],["ချ",-12.183259963989258],["ások",-12.18327808380127],["▁chống",-12.18331241607666],["{",-12.183343887329102],["▁sais",-12.183379173278809],["▁Carlos",-12.183420181274414],["▁דאס",-12.183473587036133],["▁наук",-12.18348503112793],["▁đau",-12.183518409729004],["еше",-12.18352222442627],["sl",-12.18359661102295],["▁reci",-12.18364715576172],["▁машин",-12.183667182922363],[":23",-12.183670043945312],["▁선택",-12.183696746826172],["▁topp",-12.183714866638184],["ējo",-12.183730125427246],["▁tecnologia",-12.183788299560549],["gana",-12.183825492858888],["മോ",-12.183905601501465],["▁مک",-12.18397617340088],["战略",-12.184015274047852],["рова",-12.184024810791016],["▁manifest",-12.184036254882812],["යින්",-12.184046745300291],["▁НЕ",-12.184060096740724],["편",-12.18410873413086],["VE",-12.184123039245604],["▁kabel",-12.18415355682373],["▁ayuda",-12.184200286865234],["▁années",-12.184228897094728],["▁Ху",-12.18435001373291],["uloj",-12.1845703125],["▁ជ",-12.184572219848633],["▁tổng",-12.18467617034912],["▁അമ്മ",-12.184688568115234],["▁hair",-12.184717178344728],["软件",-12.184757232666016],["輕",-12.18476390838623],["▁Ltd",-12.184767723083496],["▁hãy",-12.18479347229004],["준",-12.184794425964355],["▁مقاله",-12.184903144836426],["▁skapa",-12.184917449951172],["▁하지만",-12.18509006500244],["зова",-12.185235023498535],["▁slot",-12.185239791870115],["щу",-12.185243606567385],["▁Đăng",-12.185297966003418],["ทรง",-12.18531322479248],["ično",-12.185392379760742],["寶",-12.185410499572754],["spekt",-12.185447692871094],["▁ilman",-12.185460090637209],["▁पे",-12.185466766357422],["▁forbindelse",-12.185470581054688],["onych",-12.185601234436035],["ilt",-12.185623168945312],["▁بودن",-12.185626029968262],["njo",-12.185630798339844],["▁drag",-12.18563747406006],["üh",-12.185680389404297],["▁dibuat",-12.18572998046875],["教师",-12.185736656188965],["▁kro",-12.18575954437256],["дя",-12.185861587524414],["▁Mate",-12.18600368499756],["рим",-12.18601417541504],["▁эх",-12.18606185913086],["▁២",-12.18613052368164],["▁확인",-12.18619155883789],["▁జ",-12.186199188232422],["▁hormon",-12.186239242553713],["▁шинэ",-12.186243057250977],["降低",-12.186261177062988],["ετ",-12.186379432678224],["▁import",-12.186430931091309],["▁کاری",-12.18648910522461],["▁falar",-12.186521530151367],["ေစ",-12.18659782409668],["main",-12.186606407165527],["▁personne",-12.18666172027588],["▁ν",-12.18670654296875],["ერთ",-12.186708450317385],["글",-12.186718940734863],["▁देखि",-12.186737060546877],["▁đảm",-12.186784744262695],["▁اظهار",-12.186784744262695],["▁उनके",-12.186793327331545],["цен",-12.186899185180664],["▁couple",-12.18692398071289],["pere",-12.18693733215332],["▁sä",-12.18697452545166],["▁niečo",-12.187033653259276],["maks",-12.187036514282228],["sett",-12.187129974365234],["ERA",-12.187131881713867],["tho",-12.187131881713867],["▁अनु",-12.187140464782717],["▁мае",-12.187150955200195],["abile",-12.18727684020996],["▁Porto",-12.187280654907228],["▁tidigare",-12.187285423278809],["▁ನಾನು",-12.187292098999023],["▁Natural",-12.187300682067873],["▁വലിയ",-12.187307357788086],["▁سید",-12.187337875366213],["عا",-12.187365531921388],["充分",-12.187386512756348],["尤其是",-12.187387466430664],["側",-12.18747329711914],["▁тіл",-12.187488555908203],["▁დი",-12.187499046325684],["▁maklik",-12.187535285949709],["▁Sm",-12.187542915344238],["▁babae",-12.187570571899414],["▁Fat",-12.187586784362791],["抱",-12.187618255615234],["▁кос",-12.187639236450195],["效",-12.187708854675291],["▁සා",-12.187745094299316],["sund",-12.187786102294922],["預",-12.18779182434082],["▁tengah",-12.18782901763916],["មិន",-12.187915802001951],["▁ўз",-12.18795394897461],["dziel",-12.187971115112305],["▁mauris",-12.188039779663086],["анс",-12.18805694580078],["pari",-12.188076972961426],["▁തല",-12.188093185424805],["▁मध्य",-12.188105583190918],["ଜି",-12.188125610351562],["▁používa",-12.18813705444336],["肌",-12.188175201416016],["脚",-12.188232421875],["▁этим",-12.188246726989746],["Ab",-12.188270568847656],["▁saúde",-12.188294410705566],["▁Spec",-12.188323974609377],["▁manat",-12.188339233398438],["▁قبول",-12.188377380371094],["▁Ihrer",-12.188385009765623],["▁ahaan",-12.188425064086914],["▁esan",-12.188425064086914],["▁laut",-12.188430786132812],["ائے",-12.188459396362305],["мак",-12.18850040435791],["▁predaj",-12.188509941101074],["독",-12.188511848449709],["eris",-12.188569068908691],["url",-12.188591003417969],["love",-12.188602447509766],["▁درست",-12.188673973083496],["▁stå",-12.188702583312988],["▁노",-12.188729286193848],["▁ciddi",-12.188732147216797],["▁siad",-12.188756942749023],["吸引",-12.188785552978516],["▁bzw",-12.188831329345703],["▁ત્યારે",-12.188833236694336],["▁تغییر",-12.18907356262207],["ND",-12.189078330993652],["▁viens",-12.189079284667969],["цэ",-12.189090728759766],["▁asupra",-12.189091682434082],["▁tarp",-12.18913745880127],["fund",-12.18917465209961],["INI",-12.189180374145508],["▁chegar",-12.189254760742188],["▁demikian",-12.189289093017578],["▁celý",-12.189316749572754],["▁tvor",-12.189336776733398],["▁Eng",-12.18935203552246],["▁budou",-12.189407348632812],["▁اها",-12.189412117004396],["▁Bob",-12.18950080871582],["▁పని",-12.189531326293944],["тера",-12.189534187316896],["▁laiku",-12.18958568572998],["▁William",-12.189703941345217],["▁haut",-12.18974494934082],["จะมี",-12.189769744873049],["皆",-12.189777374267578],["NK",-12.189919471740724],["▁기자",-12.189963340759276],["▁np",-12.19004249572754],["ující",-12.19005012512207],["▁ሁኔታ",-12.19006061553955],["▁sep",-12.190081596374512],["ဇ",-12.190109252929688],["▁restoran",-12.190235137939451],["▁Чу",-12.190281867980955],["▁مدل",-12.190281867980955],["▁(18+)",-12.190296173095703],["ופ",-12.190332412719728],["▁якая",-12.190357208251951],["▁البته",-12.190367698669434],["▁něco",-12.190387725830078],["▁meglio",-12.190423965454102],["▁dood",-12.190455436706545],["▁GO",-12.190476417541504],["zés",-12.19047737121582],["lach",-12.190502166748049],["▁14-",-12.19056224822998],["▁nami",-12.190603256225586],["▁ды",-12.190630912780762],["▁xidmət",-12.190638542175291],["ափ",-12.190671920776367],["▁చేసి",-12.190808296203612],["▁época",-12.19083309173584],["▁ക്കും",-12.1908540725708],["▁nilai",-12.190861701965332],["▁cikin",-12.190884590148926],["▁ediyor",-12.190898895263672],["▁Fly",-12.190910339355469],["тър",-12.190918922424316],["▁размер",-12.190921783447266],["▁negativ",-12.19098949432373],["ụ",-12.191112518310549],["▁بما",-12.191152572631836],["▁unit",-12.19120979309082],["Video",-12.191235542297363],["▁Old",-12.19126319885254],["▁Carl",-12.191267013549805],["ちょっと",-12.191303253173828],["▁выбор",-12.191312789916992],["▁2017-",-12.191349029541016],["▁ار",-12.19137954711914],["▁seinen",-12.191390991210938],["▁dros",-12.191452026367188],["▁наша",-12.191473960876465],["▁cây",-12.191524505615234],["▁hitz",-12.191569328308104],["▁проведення",-12.191575050354004],["静",-12.19162940979004],["▁CHA",-12.19170093536377],["bog",-12.191722869873049],["ாத",-12.191736221313477],["▁potrzeb",-12.191747665405272],["ổ",-12.191795349121094],["▁Зна",-12.191843032836914],["▁רב",-12.191849708557127],["的时间",-12.191861152648926],["▁Harga",-12.191898345947266],["▁(←",-12.192051887512209],["▁vagyok",-12.19214916229248],["siniz",-12.192171096801758],["女人",-12.19219207763672],["▁ଉପ",-12.192218780517578],["▁deras",-12.19223976135254],["▁alia",-12.192289352416992],["▁ଖବର",-12.192303657531738],["▁wong",-12.19230842590332],["/2013",-12.192360877990724],["▁уче",-12.19236183166504],["esa",-12.192376136779783],["Ը",-12.19240951538086],["▁CR",-12.192424774169922],["▁пише",-12.192424774169922],["▁жоқ",-12.192458152770996],["рп",-12.192523002624512],["▁طریق",-12.192538261413574],["▁Nak",-12.192584037780762],["▁direktor",-12.19267749786377],["ち",-12.192715644836426],["▁progetto",-12.192806243896484],["▁түр",-12.192827224731444],["▁Dá",-12.192862510681152],["سال",-12.192886352539062],["▁stik",-12.192888259887695],["ဘူး",-12.192923545837402],["جى",-12.192955017089844],["▁Ata",-12.19297695159912],["ృ",-12.193012237548828],["нет",-12.193031311035156],["jeti",-12.193032264709473],["▁määrä",-12.193071365356444],["▁Express",-12.193085670471191],["წო",-12.193166732788086],["сар",-12.193177223205566],["▁Special",-12.193273544311523],["amp",-12.193344116210938],["▁South",-12.19343376159668],["skiej",-12.193509101867676],["▁lorem",-12.193521499633787],["တို့",-12.1935453414917],["៣",-12.193561553955078],["ització",-12.193702697753906],["▁લ",-12.19387149810791],["▁তিনি",-12.193922996520996],["▁kem",-12.193926811218262],["▁Üniversitesi",-12.193994522094728],["▁temos",-12.194005966186523],["▁ചിത്ര",-12.194011688232422],["▁tố",-12.194074630737305],["▁ost",-12.194085121154783],["▁বলে",-12.19412612915039],["▁dibe",-12.194133758544922],["▁organism",-12.194170951843262],["▁wis",-12.194170951843262],["▁ABD",-12.194174766540527],["▁pale",-12.194188117980955],["▁رات",-12.194201469421388],["暗",-12.194236755371094],["izzazione",-12.194242477416992],["▁вариант",-12.194307327270508],["etto",-12.194317817687988],["▁böyle",-12.19432544708252],["cky",-12.19434928894043],["▁honetan",-12.194443702697754],["Ā",-12.194445610046388],["เพื่อน",-12.19444751739502],["fog",-12.194493293762209],["▁hennes",-12.194564819335938],["odo",-12.194615364074709],["▁edilir",-12.194628715515137],["▁bala",-12.19471263885498],["▁ресурс",-12.19477081298828],["▁жар",-12.194807052612305],["▁adult",-12.194851875305176],["образ",-12.194940567016602],["สําคัญ",-12.194979667663574],["▁אחת",-12.195002555847168],["ким",-12.1950101852417],["EG",-12.195012092590332],["▁бой",-12.19503688812256],["Her",-12.19507122039795],["▁Επι",-12.195079803466797],["健",-12.1951265335083],["ују",-12.195147514343262],["NY",-12.195182800292969],["gade",-12.19527530670166],["喔",-12.195306777954102],["▁thôi",-12.195322036743164],["▁ዓመት",-12.195327758789062],["▁algumas",-12.195366859436035],["▁research",-12.19538402557373],["áciu",-12.195439338684082],["сны",-12.195479393005373],["▁sr",-12.19552230834961],["▁pelos",-12.19563102722168],["▁slo",-12.195642471313477],["▁etmek",-12.195645332336426],["суд",-12.195667266845703],["▁बढ़",-12.195686340332031],["▁промени",-12.195687294006348],["▁مما",-12.195697784423828],["ପା",-12.195743560791016],["獲",-12.195759773254396],["▁وما",-12.195768356323242],["▁Когда",-12.195828437805176],["▁Gabriel",-12.19583225250244],["▁여행",-12.195845603942873],["▁segera",-12.195874214172363],["▁kohë",-12.195876121520996],["רע",-12.195890426635742],["uter",-12.195952415466309],["▁rétt",-12.195953369140623],["ından",-12.196019172668455],["▁Bangla",-12.196022987365724],["▁conten",-12.196085929870604],["▁mají",-12.19611644744873],["▁πε",-12.196124076843262],["しい",-12.196130752563477],["ITE",-12.19613265991211],["FT",-12.196138381958008],["kkal",-12.19614315032959],["▁مجھے",-12.196147918701172],["erea",-12.196148872375488],["ŭ",-12.19621753692627],["גי",-12.19623851776123],["ън",-12.196264266967772],["яр",-12.19626808166504],["▁enjoy",-12.196271896362305],["▁Qeyd",-12.19633960723877],["▁pemain",-12.1963529586792],["▁मेरे",-12.196362495422363],["▁обаче",-12.196367263793944],["▁pru",-12.196476936340332],["▁sociedade",-12.196510314941406],["▁rik",-12.19655418395996],["ىمىز",-12.196563720703123],["▁വില",-12.196571350097656],["ваў",-12.1965913772583],["మీ",-12.196592330932615],["▁konnte",-12.196592330932615],["лып",-12.19662380218506],["ക്കാ",-12.196697235107422],["▁24-",-12.196701049804688],["▁aig",-12.196709632873535],["▁ceux",-12.196734428405762],["▁sare",-12.196773529052734],["▁همان",-12.19677448272705],["▁११",-12.19677734375],["▁dvoj",-12.196836471557615],["不好",-12.196895599365234],["▁mada",-12.196900367736816],["kning",-12.196905136108398],["aidh",-12.196932792663574],["ors",-12.196948051452637],["niai",-12.19695281982422],["▁nõu",-12.196967124938965],["▁стал",-12.196998596191406],["▁duit",-12.197051048278809],["[1]",-12.19705581665039],["▁hende",-12.197084426879885],["cip",-12.197175979614258],["rken",-12.197227478027344],["▁nevar",-12.197245597839355],["▁limita",-12.19725227355957],["ografi",-12.197266578674316],["▁เรื่อง",-12.1973237991333],["త్ర",-12.19740390777588],["▁başqa",-12.197502136230469],["▁diferents",-12.197566986083984],["Bar",-12.197571754455566],["▁skall",-12.197602272033691],["▁tang",-12.197633743286133],["▁तयार",-12.197667121887209],["arma",-12.197746276855469],["നാ",-12.197775840759276],["ዓ",-12.197784423828123],["ხარ",-12.197835922241213],["▁szín",-12.197839736938477],["▁شورای",-12.197850227355955],["▁epi",-12.19788646697998],["价值",-12.197887420654297],["かった",-12.19788932800293],["ىش",-12.197954177856444],["▁penga",-12.198065757751465],["▁תל",-12.198092460632324],["▁lanjut",-12.198113441467283],["▁포",-12.19812297821045],["кат",-12.198140144348145],["▁üç",-12.19821548461914],["▁området",-12.198233604431152],["وري",-12.198275566101074],["瑞",-12.198299407958984],["▁Fast",-12.198301315307615],["蘭",-12.19832992553711],["▁πρώτη",-12.198354721069336],["▁ਰਹੇ",-12.198356628417969],["▁տարի",-12.198450088500977],["▁õ",-12.19845199584961],["▁최",-12.198458671569824],["▁industrial",-12.198471069335938],["▁gyf",-12.198478698730469],["▁boy",-12.198534965515137],["▁ей",-12.198534965515137],["անդ",-12.198578834533691],["▁vanaf",-12.198599815368652],["ajú",-12.19864559173584],["bart",-12.19864559173584],["етін",-12.198704719543455],["kers",-12.1987886428833],["▁naslov",-12.198793411254885],["有了",-12.198807716369627],["▁Polsce",-12.19886302947998],["▁chương",-12.198878288269045],["▁així",-12.198897361755373],["▁tillegg",-12.198919296264648],["▁economic",-12.19894027709961],["▁sebenarnya",-12.198945045471191],["▁muuta",-12.198957443237305],["▁gadā",-12.199021339416504],["▁costa",-12.199047088623049],["▁Eli",-12.199126243591309],["▁હો",-12.199155807495115],["ithi",-12.199212074279783],["ఖ",-12.19931697845459],["普通",-12.19933032989502],["▁olacaq",-12.19935131072998],["▁ඔ",-12.199359893798828],["▁ղեկավար",-12.199366569519045],["▁হয়ে",-12.199377059936523],["▁cậu",-12.199421882629396],["▁Закон",-12.199463844299316],["▁olunur",-12.199464797973633],["▁Hvor",-12.199488639831545],["ît",-12.19949436187744],["▁Liên",-12.199517250061035],["▁đáng",-12.199538230895996],["czne",-12.199543952941896],["elsen",-12.199562072753906],["▁ő",-12.199563026428224],["ствен",-12.19960594177246],["超過",-12.199631690979004],["ไหน",-12.199647903442385],["onen",-12.19973087310791],["ήματα",-12.199740409851074],["راق",-12.199747085571287],["▁priorit",-12.199756622314451],["नो",-12.199851989746094],["▁کراچی",-12.199873924255373],["▁చేశారు",-12.199884414672852],["ลี",-12.199891090393066],["▁развој",-12.199939727783203],["▁meningkatkan",-12.199979782104492],["▁вечер",-12.20000457763672],["كار",-12.200005531311035],["tent",-12.200008392333984],["▁ovaj",-12.200032234191896],["결",-12.200056076049805],["ðum",-12.200074195861816],["▁Xi",-12.200096130371094],["▁Jä",-12.200118064880373],["▁буд",-12.200119972229004],["buru",-12.200159072875977],["▁правильно",-12.20018196105957],["▁дума",-12.200193405151367],["เธอ",-12.200201988220217],["▁կա",-12.200212478637695],["れる",-12.20029640197754],["▁Wal",-12.200313568115234],["▁etdiyi",-12.200350761413574],["▁пла",-12.20035457611084],["▁आवश्यक",-12.200362205505373],["▁Поэтому",-12.200380325317385],["▁nihil",-12.200411796569824],["ଣ୍ଡ",-12.200433731079102],["▁ഡി",-12.200499534606934],["ladı",-12.200521469116213],["တ္",-12.20052719116211],["▁akhirnya",-12.200621604919434],["ikin",-12.20064640045166],["▁parc",-12.200678825378418],["▁ټول",-12.200695037841797],["▁send",-12.200785636901855],["蛋",-12.20082950592041],["▁לעשות",-12.20089340209961],["▁మధ్య",-12.200895309448242],["▁MD",-12.200923919677734],["▁منتشر",-12.200970649719238],["rius",-12.200990676879885],["▁Sunt",-12.201059341430664],["úil",-12.20108413696289],["rier",-12.201157569885254],["leder",-12.20119857788086],["бере",-12.201200485229492],["שר",-12.201523780822754],["▁hareket",-12.201534271240234],["▁산",-12.201565742492676],["ייה",-12.20159149169922],["apo",-12.201623916625977],["▁pagi",-12.20168113708496],["ตรง",-12.201688766479492],["seg",-12.201705932617188],["▁Фе",-12.201727867126465],["wol",-12.201776504516602],["▁PARA",-12.201786041259766],["ուց",-12.201804161071776],["▁phía",-12.201812744140623],["▁شام",-12.20182991027832],["▁hög",-12.201838493347168],["速度",-12.201848983764648],["糖",-12.201866149902344],["▁Fran",-12.201903343200684],["項",-12.201903343200684],["▁हमारे",-12.201904296875],["▁хийх",-12.201916694641112],["▁isa",-12.201943397521973],["▁xi",-12.202018737792969],["nana",-12.202019691467283],["rli",-12.20203971862793],["▁fosse",-12.20207405090332],["එ",-12.202106475830078],["ဲ",-12.202107429504396],["ög",-12.20211410522461],["ട്ട്",-12.20212173461914],["spiel",-12.202181816101074],["▁página",-12.202279090881348],["▁خطر",-12.20229434967041],["死亡",-12.202374458312988],["itos",-12.202430725097656],["cad",-12.202447891235352],["▁ଦିନ",-12.202465057373049],["▁Դ",-12.202486991882324],["▁stedet",-12.202611923217772],["▁فایل",-12.20261287689209],["▁ٿيڻ",-12.20263671875],[".[1]",-12.202688217163086],["▁සිටි",-12.202747344970703],["▁құ",-12.202786445617676],["eria",-12.202823638916016],["져",-12.20289707183838],["▁trzeba",-12.202921867370604],["▁három",-12.202922821044922],["ပြဲ",-12.202932357788086],["▁услуг",-12.202946662902832],["▁importa",-12.202964782714844],["**",-12.203105926513672],["ology",-12.20312213897705],["කාර",-12.203124046325684],["vist",-12.203131675720217],["规模",-12.203166961669922],["▁Vali",-12.2031888961792],["▁مثال",-12.203210830688477],["▁wys",-12.203224182128906],["ရန္",-12.203229904174805],["甜",-12.20323371887207],["Sch",-12.203240394592283],["ිය",-12.203246116638184],["▁scen",-12.20331859588623],["цев",-12.203351020812988],["▁ໂດຍ",-12.203360557556152],["▁sah",-12.203378677368164],["建議",-12.20338249206543],["▁Нови",-12.203413963317873],["وية",-12.203438758850098],["▁besten",-12.203482627868652],["▁نماز",-12.203510284423828],["▁vitin",-12.203548431396484],["guna",-12.203557014465332],["▁ici",-12.203557968139648],["▁वाली",-12.203572273254396],["▁છો",-12.20358180999756],["ξη",-12.203582763671877],["▁candidat",-12.203591346740724],["▁neid",-12.203596115112305],["ød",-12.203641891479492],["▁Ami",-12.20370101928711],["ვს",-12.203702926635742],["▁lib",-12.203747749328612],["▁මු",-12.203753471374512],["▁Kop",-12.203774452209473],["▁vlastní",-12.203781127929688],["▁گھ",-12.20386028289795],["▁현재",-12.203946113586426],["▁inuu",-12.203993797302246],["▁Tổng",-12.20400619506836],["▁könnte",-12.204014778137209],["▁dolar",-12.204015731811523],["▁הכל",-12.20403289794922],["▁ла",-12.20404052734375],["ھا",-12.20411491394043],["tei",-12.20412540435791],["▁فی",-12.2041597366333],["ట్టు",-12.20419979095459],["izado",-12.20430850982666],["בא",-12.20443058013916],["کش",-12.204431533813477],["▁మంచి",-12.204434394836426],["▁अर्थ",-12.204489707946776],["тр",-12.204582214355469],["▁Ihren",-12.204605102539062],["▁Lin",-12.204635620117188],["▁علمی",-12.204703330993652],["▁2007.",-12.204737663269045],["tain",-12.20477294921875],["email",-12.204791069030762],["▁فعال",-12.204834938049316],["▁ար",-12.204863548278809],["زار",-12.204874038696287],["〜",-12.204943656921388],["▁ਵਾਲੇ",-12.2049560546875],["▁kanilang",-12.20502471923828],["▁আজ",-12.205029487609863],["▁minuti",-12.205060005187988],["▁течение",-12.205095291137695],["▁služby",-12.205097198486328],["▁Date",-12.205117225646973],["ሜ",-12.205187797546388],["▁среди",-12.205228805541992],["▁Act",-12.205246925354004],["▁ран",-12.20525074005127],["ív",-12.20533275604248],["ভাবে",-12.20533561706543],["再次",-12.205342292785645],["مز",-12.205349922180176],["stelling",-12.20544147491455],["滑",-12.205479621887209],["▁invece",-12.205488204956056],["▁नव",-12.205520629882812],["▁penuh",-12.205581665039062],["▁basi",-12.205608367919922],["rav",-12.205646514892578],["▁لینک",-12.205732345581056],["▁රිය",-12.205753326416016],["jde",-12.20581340789795],["▁sidste",-12.205827713012695],["▁redan",-12.205894470214844],["ادة",-12.205910682678224],["▁charge",-12.20596408843994],["電腦",-12.2060546875],["ijn",-12.206095695495604],["▁बाल",-12.206113815307615],["▁verdens",-12.206131935119627],["iņu",-12.20625114440918],["▁ahal",-12.206269264221191],["▁arvo",-12.20633029937744],["طر",-12.206345558166504],["▁лв",-12.20638370513916],["▁görev",-12.20643138885498],["▁rapid",-12.206451416015623],["▁Guru",-12.20645809173584],["령",-12.20647430419922],["▁kurios",-12.206517219543455],["εκ",-12.206555366516112],["▁train",-12.206578254699709],["▁máte",-12.2066068649292],["▁hely",-12.20665454864502],["cons",-12.206686973571776],["▁Ketua",-12.206747055053713],["lík",-12.206780433654783],["ሴ",-12.206799507141112],["▁علیه",-12.20682430267334],["▁realiza",-12.20684814453125],["בה",-12.2068510055542],["▁komun",-12.206868171691896],["gram",-12.206910133361816],["▁мая",-12.20691967010498],["sinə",-12.206921577453612],["▁месец",-12.206979751586914],["▁csapat",-12.206990242004396],["▁مج",-12.20699405670166],["▁baino",-12.206997871398926],["▁1,2",-12.207056045532228],["ėje",-12.207056999206545],["▁وروسته",-12.207097053527832],["▁prisi",-12.207118034362791],["随",-12.207130432128906],["▁hartu",-12.20716953277588],["▁다음",-12.207262992858888],["▁maal",-12.207361221313477],["▁quibus",-12.207377433776855],["▁бяха",-12.207406044006348],["▁tien",-12.207457542419434],["▁مالی",-12.20749282836914],["▁mënyrë",-12.207504272460938],["▁foc",-12.207603454589844],["AJ",-12.207640647888184],["了一",-12.207649230957031],["▁siebie",-12.20765209197998],["INE",-12.207682609558104],["▁ity",-12.207789421081545],["▁novos",-12.207805633544922],["อื่น",-12.207819938659668],["▁âm",-12.207825660705566],["MM",-12.20784854888916],["▁خواب",-12.207862854003906],["▁caz",-12.207865715026855],["owana",-12.207866668701172],["गु",-12.207901000976562],["▁tare",-12.207958221435549],["▁nena",-12.20799446105957],["▁کوشش",-12.208014488220217],["ეთი",-12.208024978637695],["▁המת",-12.208052635192873],["▁poker",-12.208080291748049],["▁בח",-12.208105087280272],["ෙන",-12.20811367034912],["וע",-12.208168029785156],["▁ពេល",-12.20818042755127],["צות",-12.20823097229004],["nikov",-12.208322525024414],["online",-12.20832633972168],["▁ils",-12.208346366882324],["nze",-12.20837116241455],["▁تحریک",-12.208531379699709],["▁Москва",-12.208532333374023],["hard",-12.20855712890625],["ীয়",-12.208560943603516],["▁hỗ",-12.208563804626465],["▁gilt",-12.20857048034668],["קר",-12.20861530303955],["west",-12.208754539489746],["ઈ",-12.208782196044922],[":21",-12.20893383026123],["▁februar",-12.209101676940918],["执行",-12.209104537963867],["▁pengar",-12.209110260009766],["▁Eta",-12.209144592285156],["óc",-12.209172248840332],["▁онда",-12.209199905395508],["▁;-)",-12.209203720092772],["▁Sad",-12.209227561950684],["ussa",-12.209261894226074],["▁ficar",-12.209282875061035],["的重要",-12.209296226501465],["քը",-12.20930290222168],["▁following",-12.209310531616213],["਼",-12.209321975708008],[".......",-12.209415435791016],["არი",-12.209418296813965],["▁tok",-12.209460258483888],["乱",-12.209487915039062],["带来",-12.20949935913086],["▁mümkün",-12.209542274475098],["▁πάνω",-12.209550857543944],["▁ارتباط",-12.209552764892578],["▁particolare",-12.20955753326416],["zett",-12.209587097167969],["▁slag",-12.209653854370115],["▁PI",-12.209688186645508],["iņa",-12.209728240966797],["▁ideas",-12.209810256958008],["▁midagi",-12.209869384765623],["сынын",-12.209891319274902],["ண்",-12.209895133972168],["θηκε",-12.209920883178713],["കാര",-12.209920883178713],["rela",-12.209925651550291],["▁proje",-12.209939002990724],["席",-12.209942817687988],["RM",-12.209957122802734],["vista",-12.20996379852295],["▁sull",-12.209976196289062],["▁tepat",-12.210013389587402],["huis",-12.210018157958984],["▁алды",-12.21003246307373],["台湾",-12.210039138793944],["״",-12.21005916595459],["▁ഡ്",-12.210165023803713],["vih",-12.210187911987305],["▁jste",-12.210198402404783],["cular",-12.21021556854248],["▁ಜನ",-12.210227966308594],["▁tamam",-12.210230827331545],["業者",-12.210278511047363],["▁март",-12.210291862487791],["▁adott",-12.210341453552246],[":05",-12.210390090942385],["▁Royal",-12.210489273071287],["モ",-12.210495948791504],["▁سپ",-12.210521697998049],["▁milioni",-12.21052360534668],["▁۲۰",-12.210545539855955],["▁книги",-12.2105712890625],["▁ڏينهن",-12.21057415008545],["ringer",-12.210579872131348],["▁жилийн",-12.210631370544434],["வ்",-12.21064281463623],["▁greu",-12.21071720123291],["▁කියා",-12.210721015930176],["▁miest",-12.210817337036133],["jpg",-12.21081829071045],["ālu",-12.210841178894045],["▁Thank",-12.210858345031738],["ূ",-12.210904121398926],["▁durum",-12.210970878601074],["▁ಮಾ",-12.211010932922363],["▁menunjukkan",-12.21108341217041],["▁الخميس",-12.21108341217041],["▁1978",-12.211090087890623],["▁لیا",-12.211101531982422],["จับ",-12.211149215698242],["ようです",-12.211176872253418],["▁பட",-12.211183547973633],["▁تائين",-12.211190223693848],["กลุ่ม",-12.21121597290039],["▁보기",-12.21121883392334],["▁regi",-12.21129322052002],["elen",-12.211333274841309],["▁കണ്ട",-12.21135711669922],["▁Gun",-12.211426734924316],["ข้อ",-12.211463928222656],["ibu",-12.211472511291504],["▁pó",-12.211570739746094],["妈妈",-12.21157932281494],["私は",-12.211647033691406],["▁ଦ",-12.211651802062988],["▁сын",-12.211821556091309],["▁VII",-12.211834907531738],["▁ث",-12.211895942687988],["mog",-12.211943626403809],["ແມ່ນ",-12.211956977844238],["▁달",-12.211986541748049],["▁marrë",-12.212043762207031],["lies",-12.212048530578612],["▁દ્વારા",-12.212109565734863],["述",-12.212176322937012],["rena",-12.212203979492188],["full",-12.212206840515137],["▁сайд",-12.212210655212402],["课",-12.212241172790527],["▁ligula",-12.21225643157959],["▁vaig",-12.212324142456056],["課",-12.21235179901123],["▁Canada",-12.21238613128662],["दर",-12.212517738342283],[":24",-12.212539672851562],["如今",-12.212576866149902],["чер",-12.212589263916016],["下面",-12.212668418884276],["пей",-12.212750434875488],["▁left",-12.212762832641602],["▁đội",-12.212796211242676],["重大",-12.212815284729004],["RC",-12.212846755981444],["▁deň",-12.212864875793455],["▁modeli",-12.212871551513672],["▁बारेमा",-12.212947845458984],["都会",-12.21297550201416],["ුව",-12.212990760803224],["▁నీ",-12.213094711303713],["▁Vilniaus",-12.213126182556152],["▁сразу",-12.213136672973633],["ones",-12.213141441345217],["▁ସବୁ",-12.213141441345217],["▁verkligen",-12.213149070739746],["awan",-12.213191032409668],["身上",-12.213191986083984],["▁gospodar",-12.213216781616213],["▁Dec",-12.213224411010742],["KAN",-12.213253021240234],["▁kie",-12.213279724121094],["toi",-12.213356971740724],["arum",-12.213371276855469],["γε",-12.21340560913086],["▁୩",-12.213436126708984],["▁były",-12.213438987731934],["▁운영",-12.213491439819336],["經驗",-12.213531494140623],["▁Rp",-12.2135648727417],["▁Pap",-12.213574409484863],["inci",-12.213621139526367],["▁запис",-12.213623046875],["อุ",-12.213634490966797],["▁memilih",-12.21364974975586],["ลูกค้า",-12.21365451812744],["▁എന്",-12.213655471801758],["▁junto",-12.213659286499023],["stig",-12.213789939880373],["îya",-12.21388816833496],["▁बो",-12.21395778656006],["▁درد",-12.214025497436523],["▁ludzi",-12.214056015014648],["▁måtte",-12.214075088500977],["▁אלא",-12.214092254638672],["▁ด้วย",-12.214117050170898],["▁práva",-12.214126586914062],["చ్చ",-12.214179039001465],["ंस",-12.214194297790527],["▁kubwa",-12.214210510253906],["征",-12.214276313781738],["ച്ച്",-12.21430206298828],["▁viata",-12.214320182800291],["▁цей",-12.214320182800291],["▁اصل",-12.214384078979492],["шло",-12.21444320678711],["▁র",-12.214459419250488],["▁prop",-12.214486122131348],["▁eduka",-12.214559555053713],["NING",-12.21457862854004],["ēšanas",-12.214609146118164],["▁وقتی",-12.214640617370604],["▁ДА",-12.214667320251465],["▁Eğitim",-12.214677810668944],["▁අධ්",-12.214678764343262],["▁गर्नुहोस्",-12.214686393737791],["โล",-12.214728355407717],["amaan",-12.214776039123535],["دون",-12.2147798538208],["рь",-12.21479606628418],["▁тал",-12.21483325958252],["▁هنا",-12.214838981628418],["▁kerk",-12.21487808227539],["▁односно",-12.21487808227539],["ála",-12.214925765991213],["終",-12.214929580688477],["▁lauk",-12.214953422546388],["▁move",-12.215003967285156],["/2014",-12.215015411376951],["▁puedes",-12.215017318725586],["сте",-12.215031623840332],["વામાં",-12.215035438537598],["»-",-12.215039253234863],["▁Waxaa",-12.215044975280762],["ail",-12.215062141418455],["соб",-12.215072631835938],["▁maliit",-12.21507453918457],["▁fólk",-12.215075492858888],["ል።",-12.215181350708008],["▁lâu",-12.215210914611816],["vision",-12.215217590332031],["▁توانید",-12.215254783630373],["نامه",-12.215287208557127],["▁Προ",-12.21535587310791],["▁საქართველოში",-12.215356826782228],["▁müəyyən",-12.215388298034668],["miseen",-12.215429306030272],["يار",-12.215439796447754],["โลก",-12.215452194213867],["നേ",-12.215482711791992],["ացի",-12.215521812438965],["▁112",-12.215534210205078],["標",-12.215627670288086],["βά",-12.21566677093506],["东西",-12.215669631958008],["▁CS",-12.215686798095703],["▁together",-12.21568775177002],["▁çalışma",-12.215699195861816],["▁Dam",-12.21571159362793],["▁Már",-12.21571445465088],["lă",-12.215771675109863],["▁файл",-12.215789794921877],["ັກ",-12.2158784866333],["▁moni",-12.21599292755127],["الت",-12.216012001037598],["્યું",-12.216018676757812],["▁Bus",-12.21605110168457],["ുവ",-12.216172218322754],["▁scris",-12.216203689575195],["▁berke",-12.21623420715332],["▁வா",-12.216289520263672],["▁благо",-12.21631145477295],["▁olnud",-12.21633529663086],["ො",-12.21635627746582],["▁palvelu",-12.216370582580566],["네요",-12.216387748718262],["▁счет",-12.216419219970703],["ються",-12.21642017364502],["▁phá",-12.216532707214355],["▁મને",-12.216550827026367],["▁počas",-12.21656894683838],["▁bloggen",-12.216593742370604],["▁பிர",-12.216679573059082],["itur",-12.216829299926758],["ಸಾ",-12.216848373413086],["tului",-12.21688175201416],["▁знам",-12.216882705688477],["▁фактор",-12.216910362243652],["▁mely",-12.216919898986816],["▁VA",-12.216927528381348],["wood",-12.21696949005127],["▁Dor",-12.21699047088623],["▁پرداخت",-12.21710205078125],["▁luat",-12.217138290405272],["▁ነበር",-12.217196464538574],["欲",-12.217199325561523],["潮",-12.21723175048828],["ဏ",-12.21725082397461],["▁நி",-12.217326164245604],["ග්",-12.217331886291504],["▁bietet",-12.217354774475098],["्यु",-12.217358589172363],["▁dưỡng",-12.217394828796388],["हु",-12.217442512512209],["▁זמן",-12.217445373535156],["▁siyasi",-12.21754264831543],["λογ",-12.217562675476074],["प्",-12.21757984161377],["літ",-12.217597007751465],["ðir",-12.21764087677002],["▁культуры",-12.217644691467283],["岛",-12.217674255371094],["нок",-12.2177095413208],["▁prek",-12.217743873596191],["井",-12.217743873596191],["▁beynəlxalq",-12.217767715454102],["▁estima",-12.217788696289062],["▁місце",-12.217833518981934],["лды",-12.2178373336792],["▁преко",-12.217851638793944],["▁desa",-12.217896461486816],["▁eder",-12.217896461486816],["▁Zen",-12.21790599822998],["▁Ça",-12.2179536819458],["伯",-12.218011856079102],["kast",-12.21806526184082],["본",-12.218070030212402],["ออกมา",-12.21809196472168],["เจ",-12.21816349029541],["▁Informa",-12.218170166015623],["▁Say",-12.218207359313965],["глед",-12.218284606933594],["▁Slovenije",-12.218290328979492],["▁Mail",-12.218476295471191],["šanai",-12.218477249145508],["▁students",-12.21849250793457],["▁Bola",-12.21851634979248],["▁mic",-12.218517303466797],["▁Management",-12.218544960021973],["老師",-12.2186279296875],["▁انسانی",-12.218647956848145],["▁spin",-12.218677520751951],["سٹ",-12.218679428100586],["▁ਸਰਕਾਰ",-12.218735694885254],["και",-12.218749046325684],["勒",-12.218772888183594],["৷",-12.218799591064451],["▁וא",-12.218878746032717],["โรคสะเก็ดเงิน",-12.21890354156494],["▁अन",-12.218920707702637],["▁كنت",-12.218966484069824],["ላቸው",-12.21898078918457],["▁Lebens",-12.219008445739746],["▁cuối",-12.219011306762695],["हरुले",-12.219050407409668],["▁Youtube",-12.21906852722168],["▁Kannada",-12.219090461730955],["єр",-12.219133377075195],["▁push",-12.21920680999756],["▁پست",-12.219209671020508],["▁Museum",-12.219239234924316],["خر",-12.219353675842283],["▁ผู้",-12.219433784484863],["▁програми",-12.219449996948242],["ட்டை",-12.21945571899414],["▁През",-12.21947193145752],["▁grav",-12.219510078430176],["人类",-12.219554901123049],["ηση",-12.219679832458496],["bə",-12.219730377197266],["mēr",-12.219731330871582],["▁meiner",-12.219779014587402],["ally",-12.219792366027832],["ண்ட",-12.219881057739258],["力量",-12.219892501831056],["▁Singh",-12.219910621643066],["▁ponad",-12.220011711120604],["теп",-12.220044136047363],["▁raj",-12.22012424468994],["mend",-12.220169067382812],["▁13-",-12.22018337249756],["σκε",-12.22018814086914],["▁läsa",-12.220189094543455],["▁dieta",-12.220209121704102],["▁Yan",-12.22021198272705],["▁Cali",-12.220230102539062],["▁शर्मा",-12.220244407653809],["ică",-12.220258712768556],["▁proxecto",-12.22029972076416],["▁gradu",-12.220301628112791],["▁ប",-12.220304489135742],["▁тр",-12.22033977508545],["층",-12.22035026550293],["▁xüsusi",-12.22035312652588],["▁noin",-12.220366477966309],["нө",-12.220402717590332],["▁ат",-12.220403671264648],["yet",-12.22047233581543],["▁jouw",-12.220473289489746],["صب",-12.220488548278809],["▁třeba",-12.22056007385254],["▁ہوتی",-12.22061824798584],["▁kwanza",-12.220625877380373],["▁gute",-12.220626831054688],["▁nan",-12.220629692077637],["ណា",-12.220635414123535],["產業",-12.22068691253662],["kių",-12.220694541931152],["▁olsa",-12.220727920532228],["usz",-12.22082233428955],["Ме",-12.2208251953125],["▁koffie",-12.220869064331056],["本身",-12.220873832702637],["날",-12.220873832702637],["▁sensibil",-12.22091579437256],["▁აი",-12.220965385437012],["▁nyom",-12.221029281616213],["▁ସୁ",-12.221091270446776],["▁kolo",-12.221099853515623],["▁ጠ",-12.221144676208496],["商业",-12.221196174621582],["▁skakel",-12.221230506896973],["▁message",-12.221237182617188],["▁vorm",-12.221253395080566],["κι",-12.221274375915527],["▁ڇا",-12.221275329589844],["▁анализ",-12.221280097961426],["▁어떤",-12.22138500213623],["▁Nokia",-12.22143840789795],["bag",-12.221455574035645],["వీ",-12.221480369567873],["▁концерт",-12.22157859802246],["▁livro",-12.221600532531738],["ชุด",-12.221609115600586],["▁গ",-12.221640586853027],["ดิ",-12.22164821624756],["▁پاکستانی",-12.221698760986328],["шин",-12.221745491027832],["ฉ",-12.221749305725098],["ځ",-12.22178077697754],["pli",-12.221847534179688],["姆",-12.22184944152832],["เปลี่ยน",-12.221899032592772],["▁omnes",-12.221996307373049],["เพราะ",-12.22202205657959],["▁buena",-12.222026824951172],["消费",-12.222026824951172],["था",-12.2220458984375],["▁Сер",-12.222058296203612],["▁masu",-12.222084999084473],["hib",-12.22209930419922],["官方",-12.222123146057127],["▁gave",-12.222134590148926],["▁ولم",-12.222162246704102],["▁Find",-12.222201347351074],["▁asu",-12.22220230102539],["ffen",-12.222245216369627],["nų",-12.22230339050293],["▁Duis",-12.22230339050293],["▁yao",-12.222396850585938],["▁आम",-12.22240924835205],["▁üzerinde",-12.222417831420898],["انہ",-12.222440719604492],["▁zad",-12.22244644165039],["dini",-12.222451210021973],["czenie",-12.22246265411377],["▁dile",-12.222463607788086],["▁wajah",-12.222489356994627],["мя",-12.222567558288574],["▁dare",-12.222586631774902],["ických",-12.22264289855957],["ავს",-12.222674369812012],["fika",-12.222685813903809],["جمع",-12.222729682922363],["พื้นที่",-12.222763061523438],["কি",-12.222782135009766],["Мо",-12.222820281982422],["tiin",-12.222860336303713],["▁прев",-12.222898483276367],["город",-12.222972869873049],["▁minutter",-12.223036766052246],["▁בש",-12.223116874694824],["ráð",-12.223201751708984],["ທ່ານ",-12.22320556640625],["▁UAB",-12.223226547241213],["۴",-12.223262786865234],["okra",-12.223310470581056],["▁signal",-12.22332763671875],["▁اپنا",-12.223396301269531],["ाउन",-12.223410606384276],["▁Tv",-12.223453521728516],["▁nghị",-12.22346305847168],["▁ज्यादा",-12.223467826843262],["oak",-12.223509788513184],["▁algún",-12.22354793548584],["ង់",-12.223557472229004],["▁veci",-12.223560333251951],["▁Фа",-12.223599433898926],["▁사진",-12.223599433898926],["▁Stu",-12.223600387573242],["gren",-12.223617553710938],["▁метод",-12.223650932312012],["ಅ",-12.22365951538086],["ชั้น",-12.223665237426758],["▁hana",-12.223705291748049],["rva",-12.223715782165527],["tatud",-12.223715782165527],["▁เ",-12.223729133605955],["▁hôm",-12.223773956298828],["▁szem",-12.223895072937012],["ಂಬ",-12.223898887634276],["ския",-12.223918914794922],["▁SR",-12.22392463684082],["▁продукт",-12.22392463684082],["maksu",-12.2239408493042],["気持ち",-12.223971366882324],["▁legjobb",-12.223983764648438],["▁artıq",-12.22398853302002],["▁децата",-12.223998069763184],["▁බවට",-12.224082946777344],["말",-12.224088668823242],["▁hü",-12.224111557006836],["▁၊",-12.224122047424316],["เช",-12.224167823791504],["▁neke",-12.224258422851562],["itor",-12.224435806274414],["КО",-12.22449016571045],["▁බොහෝ",-12.224495887756348],["▁vál",-12.224516868591309],["चं",-12.224541664123535],["▁mihi",-12.22461223602295],["▁ві",-12.224663734436035],["▁Qarabağ",-12.224709510803224],["UD",-12.224769592285156],["▁زور",-12.224778175354004],["حة",-12.224824905395508],["▁маалымат",-12.224857330322266],["▁eigenen",-12.22486400604248],["▁легко",-12.224885940551758],["▁Kay",-12.224902153015137],["قي",-12.224905014038086],["▁αφ",-12.224943161010742],["eerde",-12.2250394821167],["ไม้",-12.225110054016112],["▁неза",-12.225153923034668],["ტურ",-12.225168228149414],["▁хууль",-12.225172996520996],["▁Vaše",-12.225271224975586],["▁70%",-12.225290298461914],["аг",-12.225324630737305],["cial",-12.225339889526367],["▁misma",-12.225359916687012],["▁partea",-12.22536563873291],["▁mooie",-12.2253999710083],["▁luna",-12.225529670715332],["효",-12.225531578063965],["▁متر",-12.225580215454102],["▁mulighed",-12.2255859375],["▁عرض",-12.225618362426758],["ጻ",-12.225621223449709],["▁الوقت",-12.225631713867188],["article",-12.225666046142578],["▁giữ",-12.225826263427734],["φορ",-12.225830078125],["จึง",-12.225836753845217],["印象",-12.225836753845217],["▁ئۇيغۇر",-12.225874900817873],["▁сад",-12.225885391235352],["▁страница",-12.225914001464844],["▁Δια",-12.22592067718506],["▁tea",-12.22593116760254],["▁제품",-12.22598361968994],["▁pouvoir",-12.226059913635254],["▁کیفیت",-12.226073265075684],["▁انصاف",-12.22608757019043],["ож",-12.226096153259276],["ալի",-12.226107597351074],["valg",-12.226146697998049],["▁کھیل",-12.226152420043944],["▁รับ",-12.226153373718262],["▁cực",-12.226171493530272],["tü",-12.226202011108398],["▁formula",-12.226248741149902],["▁реклам",-12.226311683654783],["เร",-12.226387977600098],["▁propose",-12.22645092010498],["▁দু",-12.22646141052246],["惠",-12.226506233215332],["▁sisi",-12.226520538330078],["ブログ",-12.226520538330078],["▁Make",-12.22656536102295],["▁hip",-12.226574897766112],["▁ਕੋਈ",-12.22657871246338],["▁dejting",-12.22659969329834],["▁tung",-12.226699829101562],["kurs",-12.226741790771484],["▁dî",-12.226773262023926],["作業",-12.226781845092772],["▁කියන්න",-12.226799964904783],["▁halda",-12.22689437866211],["ολ",-12.226909637451172],["▁감",-12.226968765258787],["這次",-12.227095603942873],["ДА",-12.227124214172363],["ነው",-12.227148056030272],["▁화",-12.227149963378906],["的事",-12.227165222167969],["▁TER",-12.227197647094728],["▁ବର୍ଷ",-12.227206230163574],["အတွက်",-12.227248191833496],["▁могли",-12.22728443145752],["сту",-12.227309226989746],["▁pdf",-12.227344512939451],["▁केन्द्र",-12.227389335632324],["▁বড়",-12.227422714233398],["▁EP",-12.227436065673828],["▁امن",-12.227436065673828],["rije",-12.227499961853027],["▁σκ",-12.227499961853027],["▁(14",-12.227519989013672],["teria",-12.227540016174316],["zî",-12.22755241394043],["參與",-12.227609634399414],["јќи",-12.227614402770996],["▁حوزه",-12.227618217468262],["กรรม",-12.22766399383545],["ਮੀ",-12.22768497467041],["▁mening",-12.22769546508789],["PU",-12.22770881652832],["設",-12.22770881652832],["▁café",-12.227787017822266],["גו",-12.227802276611328],["ዬ",-12.22781467437744],["▁Face",-12.227849960327148],["صح",-12.227850914001465],["セ",-12.227853775024414],["▁regler",-12.227865219116213],["▁dru",-12.227867126464844],["过去",-12.227896690368652],["वत",-12.22792148590088],["▁decat",-12.22793197631836],["აზ",-12.227937698364258],["eka",-12.227953910827637],["▁Cô",-12.22797679901123],["▁hubungan",-12.227998733520508],["▁разговор",-12.228004455566406],["▁lyd",-12.228012084960938],["taria",-12.22801685333252],["වූ",-12.228113174438477],["▁секс",-12.228118896484377],["~~",-12.22812557220459],["▁كې",-12.228172302246094],["▁ខេត្ត",-12.228208541870115],["▁یہاں",-12.228233337402344],["透",-12.228260040283203],["ರೇ",-12.22826862335205],["▁prese",-12.22827434539795],["žin",-12.228282928466797],["%)",-12.228285789489746],["▁viện",-12.228346824645996],["ۇش",-12.228412628173828],["uva",-12.228447914123535],["▁verib",-12.22854995727539],["▁animal",-12.228556632995604],["により",-12.228617668151855],["▁xHamster",-12.228659629821776],["▁शुरू",-12.22866153717041],["laus",-12.228707313537598],["▁bazı",-12.228708267211914],["gers",-12.228811264038086],["▁Majlis",-12.22882080078125],["ଧ",-12.228867530822754],["loga",-12.228886604309082],["▁yen",-12.228934288024902],["▁ቅዱስ",-12.228971481323242],["▁አሁን",-12.22898006439209],["▁imza",-12.229025840759276],["▁Að",-12.229029655456545],["注册",-12.229124069213867],["▁Call",-12.229186058044434],["▁ઈ",-12.22919750213623],["▁nice",-12.229199409484863],["▁nummer",-12.229290962219238],["▁جه",-12.22934341430664],["ოდ",-12.22941780090332],["මේ",-12.229475021362305],["思考",-12.229487419128418],["▁dod",-12.22956085205078],["▁വാര്",-12.229574203491213],["▁всіх",-12.229597091674805],["ούνται",-12.229632377624512],["▁institu",-12.229650497436523],["mber",-12.229665756225586],["▁knapp",-12.229730606079102],["▁هغو",-12.229763984680176],["▁Dokument",-12.229784965515137],["ຫນ້າ",-12.22978687286377],["చే",-12.229809761047363],["უმ",-12.229818344116213],["ίζουν",-12.229828834533691],["yum",-12.229852676391602],["▁труда",-12.229877471923828],["资",-12.22991180419922],["▁MAN",-12.22994327545166],["▁أس",-12.229971885681152],["▁тих",-12.230021476745604],["▁fill",-12.230061531066896],["▁зас",-12.230062484741213],["▁គឺ",-12.230072975158691],["許",-12.23015594482422],["kunnan",-12.230157852172852],["胡",-12.230180740356444],["▁jí",-12.230196952819824],["şı",-12.230207443237305],["▁willen",-12.23020839691162],["even",-12.230230331420898],["▁Haf",-12.230264663696287],["žiūr",-12.230284690856934],["สุ",-12.230295181274414],["stwo",-12.23031234741211],["▁Pokud",-12.23031997680664],["▁ଉପରେ",-12.23031997680664],["▁ត",-12.23036766052246],["է",-12.230435371398926],["тів",-12.230475425720217],["▁projekti",-12.230531692504885],["▁lima",-12.230563163757324],["这次",-12.230591773986816],["ૃ",-12.23065185546875],["▁Cri",-12.230673789978027],["enden",-12.230732917785645],["او",-12.23073673248291],["▁порядке",-12.230764389038086],["▁ቤተ",-12.230767250061035],["▁Korea",-12.230815887451172],["tën",-12.230934143066406],["lagt",-12.230950355529783],["▁میڈیا",-12.23096752166748],["ēju",-12.230987548828123],["▁വരെ",-12.230987548828123],["zil",-12.231019973754885],["ичен",-12.231039047241213],["▁Simon",-12.231075286865234],["անք",-12.2310791015625],["šči",-12.231093406677246],["▁nisl",-12.231130599975586],["ở",-12.231155395507812],["ایا",-12.231156349182127],["▁bildir",-12.231172561645508],["興",-12.231228828430176],["▁Gau",-12.231266021728516],["▁ਮੰਤਰੀ",-12.231270790100098],["peli",-12.23127269744873],["▁ainult",-12.231281280517578],["လုပ္",-12.231322288513184],["▁speciali",-12.23141860961914],["▁îi",-12.231440544128418],["elser",-12.231462478637695],["pren",-12.231523513793944],["ਆ",-12.231528282165527],["▁druga",-12.23166561126709],["ಮ್ಮ",-12.23171043395996],["▁մենք",-12.231800079345703],["▁city",-12.231849670410156],["▁taki",-12.231863021850586],["▁أول",-12.23187255859375],["▁άλλο",-12.23191738128662],["▁väli",-12.231931686401367],["▁millor",-12.231963157653809],["▁hart",-12.231999397277832],["▁poter",-12.232008934020996],["වීම",-12.232022285461426],["▁ζωή",-12.232057571411133],["▁avis",-12.232115745544434],["▁laiko",-12.232183456420898],["耐",-12.232216835021973],["▁dock",-12.23223114013672],["▁niños",-12.23224925994873],["▁hir",-12.232275009155272],["▁између",-12.232316970825195],["▁figure",-12.23233127593994],["▁laptop",-12.232340812683104],["▁Radi",-12.232354164123535],["人物",-12.23236846923828],["▁Hil",-12.232382774353027],["στε",-12.23238468170166],["rani",-12.232385635375977],["▁знае",-12.23240852355957],["▁تبدیل",-12.232508659362791],["indən",-12.232515335083008],["yw",-12.232526779174805],["▁trenger",-12.23257541656494],["▁pom",-12.23257827758789],["感觉",-12.232600212097168],["usti",-12.232604026794434],["ovým",-12.232623100280762],["▁ér",-12.232735633850098],["▁dikin",-12.232787132263184],["▁хэрэг",-12.232791900634766],["▁berkenaan",-12.232843399047852],["▁deklar",-12.232911109924316],["▁Bang",-12.232975006103516],["ați",-12.233010292053224],["▁interpreta",-12.233017921447754],["भा",-12.233072280883787],["stol",-12.233141899108888],["▁yayın",-12.23318862915039],["▁හිත",-12.233245849609377],["צע",-12.23326301574707],["ቂ",-12.23340129852295],["▁Свет",-12.233431816101074],["少し",-12.233464241027832],["负责",-12.233525276184082],["▁Πα",-12.233526229858398],["▁parla",-12.23353099822998],["чев",-12.23354721069336],["tê",-12.233559608459473],["ਸਾ",-12.233564376831056],["spar",-12.233576774597168],["▁kepala",-12.233576774597168],["這種",-12.233580589294434],["▁uporablja",-12.233599662780762],["વે",-12.233622550964355],["နှင့်",-12.233641624450684],["ਿਕ",-12.233667373657228],["のに",-12.23367977142334],["▁koristi",-12.2337646484375],["້າ",-12.233781814575195],["▁ê",-12.233872413635254],["ப்பட",-12.233905792236328],["▁беларускай",-12.233931541442873],["നായ",-12.233936309814451],["зь",-12.233957290649414],["lərdən",-12.23395824432373],["eed",-12.233963966369627],["יית",-12.234015464782717],["▁danh",-12.234046936035156],["▁జా",-12.234098434448242],["ilgan",-12.234105110168455],["▁וש",-12.234121322631836],["▁والتي",-12.234210014343262],["▁faiz",-12.234224319458008],["▁restaur",-12.234237670898438],["▁qur",-12.234246253967283],["ئة",-12.234313011169434],["▁erste",-12.234336853027344],["俄罗斯",-12.234373092651367],["éis",-12.234390258789062],["渡",-12.234394073486328],["дет",-12.234397888183594],["▁odbor",-12.23442840576172],["ான",-12.2344388961792],["اته",-12.234450340270996],["labora",-12.234457969665527],["▁Монголын",-12.23448371887207],["ească",-12.234573364257812],["▁Iyo",-12.23460865020752],["ാവ",-12.234678268432615],["정보",-12.234789848327637],["▁орон",-12.23480987548828],["▁Bereich",-12.234875679016112],["看着",-12.234895706176758],["ৌ",-12.234936714172363],["印度",-12.234943389892578],["▁vienas",-12.234979629516602],[":59",-12.23498249053955],["▁تازه",-12.234989166259766],["కూ",-12.235014915466309],["legi",-12.235039710998535],["Ve",-12.235041618347168],["▁bekerja",-12.235087394714355],["▁možno",-12.23509407043457],["▁ask",-12.23513889312744],["▁þau",-12.235166549682615],["▁यू",-12.235189437866213],["▁коментар",-12.235199928283691],["▁basta",-12.235328674316406],["holm",-12.235334396362305],["вания",-12.235387802124023],["tivos",-12.235445022583008],["Ī",-12.235462188720703],["▁Satu",-12.235502243041992],["▁۱۳",-12.235520362854004],["▁Така",-12.235568046569824],["▁kroner",-12.235624313354492],["刚",-12.235672950744627],["▁ff",-12.235688209533691],["▁ພວກ",-12.235835075378418],["頂",-12.235858917236328],["▁Pam",-12.235861778259276],["▁овог",-12.23587703704834],["▁वाढ",-12.235957145690918],["သို႔",-12.235987663269045],["▁၃",-12.236004829406738],["pang",-12.236018180847168],["ပါတယ္",-12.236019134521484],["hk",-12.236037254333496],["▁неће",-12.236076354980469],["▁google",-12.236077308654783],["▁Waa",-12.236088752746582],["▁paz",-12.236138343811035],["▁Бел",-12.236141204833984],["▁hd",-12.236153602600098],["▁екі",-12.236177444458008],["rza",-12.23618221282959],["▁során",-12.236198425292969],["azioni",-12.236224174499512],["▁эки",-12.236230850219728],["▁sza",-12.23623752593994],["▁коју",-12.2362642288208],["igkeit",-12.23633098602295],["▁formas",-12.236350059509276],["▁heart",-12.23636531829834],["▁script",-12.236379623413086],["გო",-12.236405372619627],["จังหวัด",-12.23643970489502],["▁शि",-12.2364501953125],["หัว",-12.236492156982422],["လုံး",-12.236516952514648],["▁krist",-12.23654079437256],["cii",-12.236549377441406],["▁resposta",-12.236576080322266],["▁sikre",-12.236618995666504],["幕",-12.236651420593262],["▁lors",-12.236664772033691],["ημα",-12.236810684204102],["▁самом",-12.236839294433594],["quen",-12.236886978149414],["▁Ανα",-12.236963272094728],["teik",-12.23698902130127],["▁Pet",-12.2369966506958],["▁العامة",-12.23708438873291],["ating",-12.23736572265625],["▁dialog",-12.237372398376465],["▁Shar",-12.237441062927246],["▁Premier",-12.237442016601562],["▁neće",-12.237462997436523],["▁Programa",-12.23747730255127],["یاں",-12.237502098083496],["▁Զ",-12.237555503845217],["▁tillsammans",-12.23756504058838],["▁1975",-12.237567901611328],["▁որպես",-12.237590789794922],["▁अधिकारी",-12.237618446350098],["▁Spe",-12.23763656616211],["cess",-12.237642288208008],["ến",-12.237656593322754],["▁아니라",-12.23766803741455],["nete",-12.23768138885498],["ndung",-12.237689018249512],["▁sic",-12.237733840942385],["▁unser",-12.237842559814451],["▁akci",-12.23785400390625],["afi",-12.237870216369627],["▁байланысты",-12.237874984741213],["motor",-12.237909317016602],["▁maji",-12.237911224365234],["▁šal",-12.237961769104004],["▁чет",-12.23805809020996],["ಭ",-12.238059043884276],["圣",-12.23806095123291],["♪",-12.23808765411377],["▁against",-12.238089561462402],["poj",-12.238103866577148],["▁CI",-12.238173484802246],["phone",-12.238236427307127],["▁हामी",-12.238321304321287],["jumi",-12.23841667175293],["▁bermain",-12.238430976867676],["▁حزب",-12.238454818725586],["▁ასე",-12.238521575927734],["▁নতুন",-12.238617897033691],["▁funciona",-12.238688468933104],["▁ľudí",-12.238701820373535],["▁128",-12.238710403442385],["dela",-12.238714218139648],["▁purus",-12.23871898651123],["สาร",-12.23878002166748],["ঙ্গ",-12.238787651062012],["▁ті",-12.238890647888184],["▁adı",-12.238896369934082],["▁ngành",-12.238898277282717],["▁કામ",-12.238911628723145],["▁toon",-12.238935470581056],["▁جيڪي",-12.2389497756958],["ық",-12.238985061645508],["▁trouver",-12.239053726196287],["wad",-12.2391357421875],["▁никогда",-12.239151000976562],["▁روان",-12.239168167114258],["▁صف",-12.239169120788574],["▁ក្រុម",-12.239171028137209],["童",-12.239190101623535],["rás",-12.239192008972168],["▁cla",-12.239265441894531],["τικές",-12.239370346069336],["▁symbol",-12.239409446716309],["graf",-12.239558219909668],["▁فقد",-12.239595413208008],["▁naszej",-12.239608764648438],["暖",-12.239635467529297],["訪",-12.239635467529297],["▁três",-12.23967742919922],["▁volte",-12.23974323272705],["іў",-12.23974609375],["▁siin",-12.239768981933594],["作者",-12.239832878112791],["▁Tartu",-12.239863395690918],["▁mikä",-12.239916801452637],["层",-12.239946365356444],["▁সি",-12.239949226379396],["алт",-12.23995304107666],["JO",-12.239956855773926],["▁mahal",-12.23996925354004],["▁nunha",-12.24010944366455],["سىز",-12.240164756774902],["▁lager",-12.240185737609863],["▁graf",-12.240198135375977],["▁trạng",-12.240199089050291],["▁खर्च",-12.24020290374756],["ತು",-12.240243911743164],["▁cilat",-12.240262985229492],["▁сайн",-12.24026584625244],["मन",-12.240279197692873],["ارة",-12.240314483642578],["▁sjá",-12.240314483642578],["אַ",-12.24033260345459],["ички",-12.240336418151855],["▁شيء",-12.240367889404297],["▁이번",-12.240395545959473],["▁denen",-12.24040699005127],["વાર",-12.24041748046875],["ガ",-12.240436553955078],["▁comune",-12.240439414978027],["▁dados",-12.240453720092772],["▁მეორე",-12.240523338317873],["uši",-12.240533828735352],["▁товар",-12.240543365478516],["至少",-12.24057388305664],["▁olib",-12.240580558776855],["報告",-12.240666389465332],["▁sterk",-12.240703582763672],["▁ماڻهو",-12.240727424621582],["▁lựa",-12.240728378295898],["lár",-12.240764617919922],["▁ajuda",-12.240882873535156],["אט",-12.240911483764648],["心理",-12.24095058441162],["smo",-12.240968704223633],["▁ਸ਼",-12.240975379943848],["▁мі",-12.240976333618164],["▁kereta",-12.240994453430176],["▁força",-12.24100399017334],["եռ",-12.24105739593506],["▁võimalik",-12.241110801696776],["▁പോലെ",-12.241175651550291],["▁zákon",-12.241209983825684],["▁Հայ",-12.241233825683594],["别人",-12.241267204284668],["▁findet",-12.241283416748049],["兼",-12.241291999816896],["▁รุ่น",-12.241332054138184],["项",-12.241334915161133],["సం",-12.241357803344728],["▁Tuhan",-12.241389274597168],["▁segir",-12.241398811340332],["gte",-12.241406440734863],["▁없",-12.241409301757812],["▁dakika",-12.24142837524414],["▁Upp",-12.241442680358888],["மும்",-12.241466522216797],["▁Todo",-12.24147129058838],["kina",-12.241547584533691],["ורה",-12.241551399230955],["ಂಡ",-12.241554260253906],["କ୍ଷ",-12.241580963134766],["▁отыр",-12.241589546203612],["ियो",-12.241652488708496],["▁وخت",-12.241665840148926],["дүү",-12.241669654846191],["▁nemo",-12.241690635681152],["▁школе",-12.241695404052734],["▁TOP",-12.241744041442873],["▁રહી",-12.241787910461426],["▁govori",-12.241804122924805],["▁surat",-12.241827011108398],["▁Sultan",-12.241838455200195],["▁probleem",-12.24184799194336],["▁varias",-12.24193286895752],["▁символ",-12.241984367370604],["▁Reise",-12.242039680480955],["▁akár",-12.24208927154541],["▁stro",-12.242127418518066],["zir",-12.242133140563965],["яг",-12.2422513961792],["▁ráð",-12.242276191711426],["▁aldri",-12.242316246032717],["adó",-12.242406845092772],["▁وژل",-12.242416381835938],["▁parasites",-12.242473602294922],["vite",-12.24249267578125],["▁kv",-12.242530822753906],["▁ڇڏي",-12.24253749847412],["▁inge",-12.242616653442385],["atura",-12.24262809753418],["ఫ్",-12.242667198181152],["ائن",-12.242693901062012],["▁programme",-12.242705345153809],["▁10:00",-12.242741584777832],["▁Նա",-12.24279499053955],["▁Student",-12.242812156677246],["▁liberal",-12.242852210998535],["▁include",-12.242859840393066],["ቹ",-12.242862701416016],["論",-12.242887496948242],["▁Europos",-12.242911338806152],["ဖို့",-12.242921829223633],["▁rigtig",-12.242947578430176],["კე",-12.242980003356934],["▁آخرین",-12.243027687072754],["▁रंग",-12.243061065673828],["үл",-12.24307632446289],["▁hub",-12.243102073669434],["记",-12.243134498596191],["აძე",-12.243149757385254],["ήσουν",-12.243189811706545],["▁vragen",-12.2432279586792],["▁kant",-12.243231773376465],["lho",-12.243353843688965],["pide",-12.24336051940918],["руу",-12.243383407592772],["▁دعا",-12.243409156799316],["制作",-12.243426322937012],["▁including",-12.243436813354492],["โก",-12.24347972869873],["ොත්",-12.243497848510742],["agu",-12.243504524230955],["▁päeva",-12.24351978302002],["▁Lig",-12.243525505065918],["▁disso",-12.243534088134766],["▁маш",-12.243534088134766],["▁нив",-12.243538856506348],["ங்",-12.24356746673584],["▁Sig",-12.243585586547852],["ников",-12.243610382080078],["อ่าน",-12.243616104125977],["іць",-12.243704795837402],["▁стр",-12.243711471557615],["▁അവ",-12.243721961975098],["jeni",-12.243725776672363],["ायला",-12.243775367736816],["ହା",-12.243789672851562],["5,000",-12.243830680847168],["ेका",-12.243996620178224],["▁جيڪو",-12.244017601013184],["▁жай",-12.244091033935549],["น้อง",-12.24413776397705],["▁Imam",-12.244154930114746],["▁mawalan",-12.244227409362791],["公司的",-12.244240760803224],["▁quen",-12.24424171447754],["▁figur",-12.244245529174805],["▁1944",-12.244290351867676],["▁Осы",-12.244325637817385],["ието",-12.244336128234863],["▁కథ",-12.244359970092772],["▁Нај",-12.244376182556152],["Ք",-12.244378089904783],["▁театр",-12.24441909790039],["ajā",-12.244421005249023],["基础",-12.24443244934082],["▁walaupun",-12.244434356689451],["▁अधिकार",-12.244446754455566],["▁rip",-12.24445343017578],["زن",-12.244455337524414],["vare",-12.244468688964844],["▁ең",-12.24454402923584],["▁Merci",-12.244556427001951],["▁große",-12.24460506439209],["▁ಗ",-12.244688034057615],["tius",-12.2446928024292],["▁enkel",-12.244704246520996],["▁izbor",-12.244710922241213],["іст",-12.244726181030272],["mund",-12.244728088378906],["▁1981",-12.244755744934082],["▁обов",-12.244769096374512],["чив",-12.24478530883789],["▁gaya",-12.244797706604004],["lp",-12.244888305664062],["▁බලන්න",-12.244890213012695],["▁gerek",-12.24505615234375],["íonn",-12.24509620666504],["▁berat",-12.24510383605957],["▁kö",-12.245153427124023],["ండ్",-12.245160102844238],["পা",-12.245189666748049],["pping",-12.245214462280272],["▁אנו",-12.245237350463867],["▁correct",-12.245241165161133],["▁אנחנו",-12.245251655578612],["▁prem",-12.245257377624512],["▁azal",-12.24534797668457],["voi",-12.245361328125],["▁دیکھ",-12.245379447937012],["▁unserem",-12.245383262634276],["▁olmuş",-12.245407104492188],["▁davon",-12.245448112487791],["▁influ",-12.245457649230955],["説明",-12.24548625946045],["▁област",-12.245491027832031],["▁Алматы",-12.245497703552246],["ตั้งแต่",-12.245513916015623],["苏",-12.24555778503418],["మో",-12.245600700378418],["▁उच्च",-12.245604515075684],["▁pakai",-12.245633125305176],["▁Kuu",-12.245655059814451],["費用",-12.245665550231934],["മേ",-12.245762825012209],["üs",-12.2457857131958],["igas",-12.245820999145508],["▁expert",-12.245894432067873],["ація",-12.245904922485352],["▁talál",-12.245933532714844],["面对",-12.24594497680664],["ņi",-12.245945930480955],["вели",-12.245945930480955],["▁slab",-12.24596118927002],["幫助",-12.24599552154541],["袋",-12.246005058288574],["ページ",-12.24600601196289],["▁everything",-12.246024131774902],["▁1983",-12.2460298538208],["▁କିଛି",-12.246030807495115],["自我",-12.24604320526123],["pm",-12.246068954467772],["▁врати",-12.246113777160645],["▁Key",-12.246135711669922],["▁Θα",-12.24623203277588],["▁Pinterest",-12.246232986450195],["▁ស្",-12.246238708496094],["▁Back",-12.246259689331056],["▁imaju",-12.24626922607422],["osan",-12.246297836303713],["▁membeli",-12.246310234069824],["▁Sus",-12.246330261230469],["ybės",-12.246332168579102],["▁consumo",-12.246431350708008],["▁सामाजिक",-12.24655055999756],["▁fontos",-12.246576309204102],["▁holder",-12.246582984924316],["▁برنامج",-12.246603965759276],["డా",-12.246615409851074],["▁bune",-12.2466459274292],["▁■",-12.246659278869627],["▁Mega",-12.24666976928711],["▁ello",-12.246753692626951],["▁सांग",-12.246809005737305],["▁voce",-12.246837615966797],["▁termék",-12.246851921081545],["▁points",-12.2468843460083],["▁علیہ",-12.246973991394045],["▁منه",-12.247007369995115],["dura",-12.247035026550291],["▁Қазақ",-12.247051239013672],["▁വഴി",-12.247062683105469],["▁yüksək",-12.247081756591797],["▁Bos",-12.247103691101074],["▁ဘာ",-12.247137069702148],["ರೂ",-12.247203826904297],["ซิ",-12.247278213500977],["▁pui",-12.247283935546877],["▁started",-12.247325897216797],["▁dvije",-12.24742031097412],["▁шүү",-12.247429847717283],["▁නමුත්",-12.24748992919922],["ruk",-12.247492790222168],["▁langs",-12.24758529663086],["iant",-12.24760913848877],["▁matter",-12.247710227966309],["rū",-12.247716903686523],["▁invent",-12.247733116149902],["▁Ona",-12.24776840209961],["병",-12.24779224395752],["独",-12.247828483581545],["▁Bütün",-12.247878074645996],["ெ",-12.247879981994627],["▁chyba",-12.24794101715088],["▁literatura",-12.247973442077637],["▁жи",-12.248000144958496],["▁бага",-12.248004913330078],["▁shkak",-12.248027801513672],["▁17-",-12.248052597045898],["貸款",-12.248056411743164],["▁anggota",-12.248059272766112],["▁кызмат",-12.24806785583496],["▁plak",-12.248122215270996],["▁valmis",-12.248122215270996],["pende",-12.248138427734377],["klas",-12.248163223266602],["▁huyo",-12.248177528381348],["▁৩",-12.248208045959473],["▁اعظم",-12.248208999633787],["urus",-12.248228073120115],["Ő",-12.248230934143066],["מון",-12.248249053955078],["▁ozna",-12.248271942138672],["قت",-12.248296737670898],["▁vamos",-12.248311042785645],["imeve",-12.24837875366211],["▁diferencia",-12.248443603515623],["▁हाल",-12.248472213745115],["stilling",-12.248506546020508],["▁ರಾಜ್ಯ",-12.248537063598633],["▁дагы",-12.248575210571287],["▁Angel",-12.248600006103516],["ĝa",-12.248611450195312],["▁perkara",-12.24862575531006],["nü",-12.248716354370115],["▁TI",-12.24881362915039],["లతో",-12.248822212219238],["▁community",-12.248851776123049],["arinnar",-12.248862266540527],["▁لاس",-12.248893737792969],["開發",-12.248895645141602],["▁бидат",-12.248919486999512],["▁salut",-12.248941421508787],["ಳು",-12.248967170715332],["▁Кі",-12.249031066894531],["dez",-12.24917697906494],["09.",-12.249202728271484],["▁favorit",-12.249222755432127],["▁ura",-12.249226570129396],["▁kiếm",-12.24923324584961],["歡迎",-12.249276161193848],["ราย",-12.24927806854248],["τών",-12.249307632446287],["ιμ",-12.249313354492188],["ङ्ग",-12.24932861328125],["feri",-12.249361991882324],["รอบ",-12.249439239501951],["▁21-",-12.24945068359375],["ось",-12.249454498291016],["▁dekat",-12.249488830566406],["購買",-12.24956226348877],["rich",-12.24957275390625],["▁statt",-12.249574661254885],["▁користи",-12.24961280822754],["▁ხ",-12.249619483947754],["▁koordin",-12.249625205993652],["▁asko",-12.24966812133789],["ива",-12.249698638916016],["EX",-12.24970531463623],["ніх",-12.249714851379396],["ጸ",-12.249746322631836],["anne",-12.24977207183838],["LC",-12.249795913696287],["▁canal",-12.24982452392578],["▁ट्र",-12.249835014343262],["▁accept",-12.24984073638916],["YS",-12.249862670898438],["▁minus",-12.249899864196776],["▁ava",-12.249967575073242],["▁means",-12.249975204467772],["Ru",-12.249985694885254],["koz",-12.249996185302734],["▁sigui",-12.250001907348633],["ಬಹುದು",-12.250033378601074],["▁լ",-12.25005054473877],["▁बता",-12.25005340576172],["▁meus",-12.2500638961792],["вно",-12.25007152557373],["▁RAM",-12.250079154968262],["▁katalog",-12.250102996826172],["▁nešto",-12.250166893005373],["▁Japon",-12.250201225280762],["▁kalian",-12.25022792816162],["▁ከተማ",-12.250232696533203],["ขา",-12.250235557556152],["▁Khu",-12.250244140625],["▁könyv",-12.250258445739746],["▁ისე",-12.250262260437012],["▁осіб",-12.250313758850098],["ില",-12.250314712524414],["編",-12.250316619873049],["▁praktik",-12.250330924987791],["remo",-12.25034236907959],["စီ",-12.250343322753906],["▁ume",-12.250359535217283],["ery",-12.250384330749512],["▁Domini",-12.250486373901367],["телно",-12.25056266784668],["▁batu",-12.250588417053224],["▁ке",-12.250687599182127],["ūk",-12.250696182250977],["▁약",-12.250767707824709],["▁travel",-12.250837326049805],["▁köpa",-12.250846862792969],["▁dini",-12.250848770141602],["▁cạnh",-12.250852584838867],["▁kê",-12.25087547302246],["euse",-12.25094985961914],["logo",-12.250998497009276],["▁جهانی",-12.250999450683594],["▁Norges",-12.251041412353516],["pru",-12.251044273376465],["▁ayat",-12.251133918762209],["▁lisa",-12.25115966796875],["ood",-12.251160621643066],["ĉ",-12.251198768615724],["▁റ",-12.251238822937012],["▁Господ",-12.251300811767578],["▁Bitcoin",-12.25133991241455],["-30",-12.251359939575195],["▁이런",-12.251380920410156],["持續",-12.251435279846191],["▁اولین",-12.251482009887695],["▁använder",-12.251504898071287],["▁երկու",-12.251540184020996],["▁grond",-12.251567840576172],["ոտ",-12.251593589782717],["ZA",-12.25161361694336],["▁полу",-12.251680374145508],["TOR",-12.251699447631836],["▁маг",-12.251758575439451],["xon",-12.25178337097168],["దే",-12.251827239990234],["现场",-12.251850128173828],["ģi",-12.25185203552246],["▁ראש",-12.251872062683104],["▁uống",-12.251927375793455],["▁कविता",-12.25197696685791],["▁menor",-12.252018928527832],["▁големи",-12.252039909362791],["ื",-12.252053260803224],["▁jun",-12.252053260803224],["חו",-12.252074241638184],["▁produc",-12.252156257629396],["ટા",-12.252235412597656],["LT",-12.252274513244627],["▁Интернет",-12.252311706542969],["валі",-12.252361297607422],["ുമ്പോള്",-12.252391815185549],["▁राष्ट्रपति",-12.252410888671877],["▁جبکہ",-12.252419471740724],["kart",-12.252445220947266],["▁полі",-12.252445220947266],["camp",-12.252447128295898],["▁либо",-12.252447128295898],["由於",-12.252477645874023],["▁pt",-12.25251579284668],["▁sembra",-12.252530097961426],["▁lectus",-12.252537727355955],["▁сезон",-12.252546310424805],["aye",-12.25254726409912],["кры",-12.252577781677246],["加上",-12.252662658691406],["▁cod",-12.252760887145996],["▁fotografi",-12.25285816192627],["▁мэт",-12.252862930297852],["▁legisla",-12.252875328063965],["ഞ്ഞ",-12.252889633178713],["組織",-12.25289535522461],["▁អាច",-12.252924919128418],["존",-12.25292682647705],["▁dabar",-12.252936363220217],["เต็ม",-12.252939224243164],["skou",-12.252948760986328],["рей",-12.252964973449709],["▁صبح",-12.253005027770996],["▁dagar",-12.253008842468262],["ovima",-12.25309944152832],["▁vaak",-12.25312042236328],["▁others",-12.253246307373049],["▁Κα",-12.253332138061523],["tiba",-12.25341510772705],["▁संविधान",-12.2534761428833],["▁Además",-12.253484725952148],["长期",-12.253487586975098],["ўся",-12.25350284576416],["▁wata",-12.253512382507324],["▁името",-12.25355625152588],["ֆ",-12.253600120544434],["▁bibliotek",-12.25360870361328],["▁게임",-12.25364589691162],["▁stark",-12.253646850585938],["▁భ",-12.253652572631836],["جار",-12.253694534301758],["▁پن",-12.253695487976074],["▁cinema",-12.25372314453125],["llisen",-12.253772735595703],["نڈ",-12.253798484802246],["▁metri",-12.25392246246338],["薄",-12.253954887390137],["▁urban",-12.253955841064451],["창",-12.253968238830566],["▁między",-12.254011154174805],["▁освіти",-12.254011154174805],["▁फेसबुक",-12.254011154174805],["▁egiteko",-12.25401210784912],["▁tertentu",-12.25401210784912],["▁đoàn",-12.25401210784912],["LET",-12.254023551940918],["▁шо",-12.254039764404297],["▁дней",-12.254064559936523],["積",-12.254079818725586],["▁నాకు",-12.254083633422852],["便利",-12.254131317138672],["▁spl",-12.254161834716797],["ÁN",-12.254223823547363],["kok",-12.254234313964844],["ලේ",-12.25423526763916],["kül",-12.254261016845703],["▁жолу",-12.254264831542969],["arbeit",-12.254279136657717],["hora",-12.254326820373535],["ਗਾ",-12.254331588745115],["▁रह",-12.254347801208496],["מד",-12.25441551208496],["ết",-12.25448989868164],["▁conto",-12.254491806030272],["เว็บไซต์",-12.25454807281494],["▁प्रतिक्रिया",-12.254549026489258],["▁speciale",-12.254608154296877],["kura",-12.254612922668455],["IDA",-12.254619598388672],["द्य",-12.254706382751465],["មក",-12.254733085632324],["▁даде",-12.25475788116455],["▁turut",-12.254782676696776],["▁needs",-12.254828453063965],["▁луѓе",-12.254865646362305],["ומים",-12.254878044128418],["커",-12.254968643188477],["ဆုံး",-12.25499153137207],["वाद",-12.255012512207031],["▁Hospital",-12.255071640014648],["▁ყ",-12.255077362060549],["▁konusunda",-12.255099296569824],["▁dhi",-12.255133628845217],["tiko",-12.255221366882324],["свет",-12.255264282226562],["▁mať",-12.255284309387209],["▁درمان",-12.255291938781738],["▁Control",-12.25529956817627],["▁eftersom",-12.255338668823242],["▁hä",-12.255348205566406],["idagi",-12.255430221557615],["▁состав",-12.2554349899292],["▁экс",-12.255446434020996],["בים",-12.255459785461426],["▁ڪن",-12.255494117736816],["▁nimic",-12.255552291870115],["▁complex",-12.255553245544434],["▁기업",-12.255562782287598],["либо",-12.255563735961914],["TY",-12.25556468963623],[":35",-12.255600929260254],["мена",-12.255607604980469],["▁ڪجهه",-12.255619049072266],["▁gyermek",-12.255645751953123],["▁spar",-12.255680084228516],["係",-12.255714416503906],["▁young",-12.255735397338867],["जि",-12.255767822265623],["▁alın",-12.2557954788208],["▁посла",-12.255815505981444],["▁nomina",-12.25587558746338],["män",-12.255934715270996],["VIDEO",-12.255941390991213],["▁anim",-12.255941390991213],["▁artigo",-12.255964279174805],["fə",-12.255990982055664],["mera",-12.25599193572998],["dare",-12.256043434143066],["▁ange",-12.256056785583496],["ห้อง",-12.256162643432615],["▁OG",-12.25620174407959],["▁law",-12.256213188171388],["▁орта",-12.256214141845703],["▁price",-12.25622272491455],["▁tiada",-12.256234169006348],["▁elimina",-12.256271362304688],["▁קשר",-12.25627613067627],["丹",-12.256305694580078],["kien",-12.25633716583252],["iano",-12.256357192993164],["▁Abs",-12.256397247314451],["鞋",-12.256509780883787],["ప్ప",-12.256611824035645],["toon",-12.256620407104492],["uara",-12.256621360778809],["▁infrastruktur",-12.25669288635254],["▁lok",-12.256731986999512],["▁conocer",-12.25680160522461],["ノ",-12.25680160522461],["ление",-12.25682544708252],["ført",-12.256828308105469],["▁செய்து",-12.256918907165527],["▁gaba",-12.25692653656006],["▁ЕС",-12.256966590881348],["▁Final",-12.257030487060549],["เต",-12.257129669189451],["NH",-12.257162094116213],["▁गर्नु",-12.257176399230955],["▁elektr",-12.25719165802002],["▁1977",-12.257196426391602],["▁gammel",-12.257200241088867],["▁געווען",-12.257227897644045],["▁ऐसे",-12.257230758666992],["▁hoor",-12.257247924804688],["▁transform",-12.257314682006836],["gje",-12.257325172424316],["յալ",-12.257383346557615],["dian",-12.257423400878906],["ická",-12.257434844970703],["▁هند",-12.25744342803955],["HU",-12.257450103759766],["▁destaca",-12.257468223571776],["▁хор",-12.25747299194336],["▁keres",-12.257485389709473],["▁bieten",-12.257487297058104],["▁기술",-12.25754165649414],["▁Menurut",-12.25754451751709],["▁edit",-12.257577896118164],["เดียว",-12.257655143737791],["▁pontos",-12.257660865783691],["▁mesti",-12.25766944885254],["cimento",-12.25767421722412],["maj",-12.257678985595703],["▁ከመ",-12.25775146484375],["▁ആര്",-12.25776195526123],["▁acuerdo",-12.257765769958496],["▁katso",-12.257781982421877],["实际",-12.257852554321287],["초",-12.257890701293944],["▁건",-12.257891654968262],["овать",-12.257930755615234],["▁eur",-12.257946014404297],["▁low",-12.25795078277588],["quer",-12.2579984664917],["▁दूर",-12.258004188537598],["▁চ",-12.258004188537598],["▁Sko",-12.258062362670898],["▁ብሎ",-12.25812530517578],["dua",-12.258142471313477],["▁Minden",-12.25814437866211],["▁రోజు",-12.258151054382324],["▁채",-12.25818157196045],["keskus",-12.258193969726562],["▁කොට",-12.25819492340088],["ใด",-12.258260726928713],["ření",-12.258271217346191],["顯示",-12.258280754089355],["▁ଯେ",-12.258313179016112],["έλ",-12.258320808410645],["nieku",-12.258349418640137],["▁dizajn",-12.258378028869627],["ald",-12.25839614868164],["道路",-12.258408546447754],["▁თქვენი",-12.258442878723145],["іла",-12.25844955444336],["▁правило",-12.258450508117676],["▁części",-12.258556365966797],["▁Ні",-12.25861358642578],["▁(11",-12.25862979888916],["âm",-12.258645057678224],["stran",-12.258647918701172],["▁akin",-12.25866985321045],["▁пайда",-12.258718490600586],["▁informe",-12.258777618408203],["бал",-12.258798599243164],["▁akhir",-12.258808135986328],["宣布",-12.25882625579834],["▁gärna",-12.25883674621582],["▁Sla",-12.258852005004885],["▁فرهنگی",-12.25887393951416],["有很多",-12.258892059326172],["▁مسلمان",-12.258978843688965],["▁nëse",-12.258988380432127],["ที่สุด",-12.25905418395996],["▁čak",-12.25906467437744],["▁Spor",-12.259090423583984],["▁рок",-12.259187698364258],["▁Box",-12.2592191696167],["πέ",-12.259317398071287],["了一个",-12.259347915649414],["▁Lí",-12.259363174438477],["▁млрд",-12.259376525878906],["▁وڌيڪ",-12.259379386901855],["▁dhu",-12.259387969970703],["▁oğlu",-12.259403228759766],["дог",-12.25940990447998],["▁сая",-12.25942325592041],["næ",-12.259437561035156],["▁شاید",-12.259437561035156],["而言",-12.25947093963623],["▁اصول",-12.259477615356444],["зор",-12.259527206420898],["Î",-12.25955581665039],["lite",-12.259565353393556],["лез",-12.259571075439451],["▁conditions",-12.259605407714844],["▁neko",-12.259705543518066],["ေျပာ",-12.259708404541016],["▁labi",-12.259801864624023],["▁İran",-12.25987148284912],["▁gat",-12.259925842285156],["ીને",-12.259989738464355],["▁suport",-12.260007858276367],["насці",-12.26006317138672],["▁ლა",-12.260181427001951],["过程中",-12.260226249694824],["▁teisės",-12.260235786437988],["▁نوم",-12.260246276855469],["ktor",-12.260323524475098],["▁Erik",-12.260326385498049],["▁уг",-12.260369300842283],["sai",-12.260396003723145],["狗",-12.260403633117676],["▁τώρα",-12.260453224182127],["penda",-12.26046085357666],["▁ข่าว",-12.260512351989746],["ائية",-12.260519981384276],["ىل",-12.260555267333984],["කම්",-12.260557174682615],["▁тое",-12.260586738586426],["kovi",-12.260642051696776],["cura",-12.260711669921877],["▁Thanh",-12.260714530944824],["/2016",-12.260717391967772],["igis",-12.260869979858398],["име",-12.260890007019045],["ાઇ",-12.260891914367676],[":31",-12.260904312133787],["刀",-12.260939598083496],["意外",-12.26096248626709],["гот",-12.261053085327148],["▁Schul",-12.261090278625488],["▁membaca",-12.261114120483398],["▁Београд",-12.261213302612305],["▁адамдар",-12.26123046875],["他の",-12.261237144470217],["ୃ",-12.261239051818848],["▁Project",-12.261253356933594],["jala",-12.261256217956545],["▁fran",-12.261285781860352],["лал",-12.261305809020996],["owanych",-12.261310577392578],["nuo",-12.26131534576416],["iseen",-12.261335372924805],["ველ",-12.26142692565918],["ጦ",-12.261526107788086],["▁difficile",-12.26152801513672],["▁tashkil",-12.261531829833984],["▁برگزار",-12.261555671691896],["ਿਤ",-12.26155948638916],["▁ذکر",-12.26159381866455],["▁svensk",-12.26163101196289],["▁živ",-12.261642456054688],["▁Videos",-12.26166820526123],["▁famille",-12.26169776916504],["▁nû",-12.26173496246338],["ộ",-12.261848449707031],["տի",-12.261945724487305],["forma",-12.26196575164795],["一人",-12.262008666992188],["▁determina",-12.262033462524414],["▁совсем",-12.26206874847412],["მატ",-12.262121200561523],["▁Leg",-12.262194633483888],["ះ",-12.26223373413086],["seb",-12.262249946594238],["▁חשוב",-12.262271881103516],["▁enligt",-12.262290954589844],["公里",-12.262304306030272],["▁mela",-12.262341499328612],["▁eos",-12.262383460998535],["▁maneira",-12.262389183044434],["▁Termin",-12.26246738433838],["번",-12.262479782104492],["કાર",-12.26253890991211],["▁সু",-12.26255226135254],["▁သိ",-12.262587547302246],["▁তথ্য",-12.262605667114258],["▁শুরু",-12.262606620788574],["ûn",-12.262642860412598],["llista",-12.262654304504396],["▁ખ",-12.262709617614746],["ddy",-12.262710571289062],["aquest",-12.262738227844238],["sége",-12.262805938720703],["ร่วม",-12.262852668762209],["ではない",-12.262876510620115],["▁olej",-12.262898445129396],["جم",-12.26297092437744],["пов",-12.26304054260254],["terapi",-12.26307773590088],["ဒီ",-12.263087272644045],["ається",-12.263092041015623],["ضع",-12.26309299468994],["▁rand",-12.263104438781738],["▁RT",-12.263106346130373],["▁которое",-12.26314926147461],["▁Ble",-12.263179779052734],["▁দ",-12.263189315795898],["模",-12.263197898864746],["gă",-12.263260841369627],["▁санал",-12.263288497924805],["▁okul",-12.263301849365234],["▁varios",-12.263303756713867],["▁3,5",-12.263317108154297],["ଶି",-12.263371467590332],["委员会",-12.263386726379396],["だから",-12.26339054107666],["rag",-12.263412475585938],["ijs",-12.263437271118164],["▁зло",-12.263449668884276],["▁пі",-12.263463973999023],["▁datum",-12.263494491577148],["▁отдел",-12.26352310180664],["▁vlak",-12.263525009155272],["▁patru",-12.263526916503906],["кара",-12.26358127593994],["ğa",-12.26361083984375],["ಚ",-12.26362419128418],["dite",-12.26365852355957],["▁Sea",-12.26371955871582],["末",-12.26374340057373],["▁heim",-12.263749122619627],["ember",-12.263775825500488],["胜",-12.26386260986328],["oq",-12.26386547088623],["▁Herr",-12.263886451721191],["▁ngủ",-12.263896942138672],["▁يېڭى",-12.263916015625],["jd",-12.26391887664795],["▁ales",-12.264043807983398],["اؤ",-12.264071464538574],["scheid",-12.264102935791016],["▁interesa",-12.264142990112305],["▁Новини",-12.264153480529783],["行為",-12.264172554016112],["ifi",-12.264175415039062],["▁trafik",-12.264179229736328],["▁Book",-12.264195442199709],["▁katawan",-12.264198303222656],["▁stránky",-12.26422882080078],["▁Kind",-12.264286994934082],["ශ",-12.264322280883787],["取り",-12.264366149902344],["▁ús",-12.264405250549316],["▁فت",-12.2644624710083],["markt",-12.264487266540527],["▁déc",-12.264497756958008],["▁medida",-12.264514923095703],["coli",-12.264564514160156],["▁fapt",-12.26465129852295],["▁دوی",-12.264652252197266],["nius",-12.264655113220217],["▁prípade",-12.26466178894043],["ομ",-12.26470947265625],["вез",-12.264734268188477],["▁kësaj",-12.26476764678955],["▁ਗਈ",-12.26477336883545],["▁ල",-12.26483154296875],["▁માં",-12.264842987060549],["公園",-12.264901161193848],["жылы",-12.264910697937012],["▁kabul",-12.26496124267578],["▁pitkä",-12.264979362487791],["▁उत्पादन",-12.264988899230955],["zina",-12.265144348144531],["jās",-12.265159606933594],["ಕರ",-12.265179634094238],["ात्",-12.2651948928833],["できます",-12.265195846557615],["lije",-12.265209197998049],["шли",-12.26527214050293],["君",-12.265287399291992],["стаў",-12.265316009521484],["സാ",-12.265316009521484],["▁سکتا",-12.265342712402344],["▁rag",-12.265344619750977],["च्छ",-12.265363693237305],["huset",-12.265398979187012],["▁Nee",-12.265400886535645],["qan",-12.265473365783691],["sada",-12.26548194885254],["▁porad",-12.26555061340332],["▁spesso",-12.265562057495115],["ιστ",-12.26557731628418],["▁CV",-12.265621185302734],["▁reja",-12.26567554473877],["▁skor",-12.265727043151855],["њем",-12.265789031982422],["ciji",-12.2658052444458],["▁ಗು",-12.265839576721191],["▁Prozent",-12.265850067138672],["▁նրանց",-12.265868186950684],["▁околу",-12.265898704528809],["ñe",-12.265948295593262],["▁produce",-12.265976905822754],["▁Ĉi",-12.266018867492676],["私の",-12.26608180999756],["›",-12.266120910644531],["นี่",-12.266125679016112],["valu",-12.266154289245604],["3-",-12.26617431640625],["ষ্ট",-12.266190528869627],["▁सेक्स",-12.266304969787598],["▁نفت",-12.266312599182127],["▁први",-12.26634407043457],["稅",-12.266366958618164],["မာ",-12.266368865966797],["DL",-12.266380310058594],["grund",-12.266427040100098],["aţi",-12.266494750976562],["辦",-12.266569137573242],["▁salud",-12.266575813293455],["୧",-12.266592025756836],["▁koliko",-12.266651153564451],["▁gosto",-12.266656875610352],["nő",-12.266725540161133],["▁moz",-12.26673698425293],["▁Py",-12.266746520996094],["▁ספר",-12.266762733459473],["álne",-12.266779899597168],["adás",-12.266890525817873],["▁Засгийн",-12.266931533813477],["▁hoạch",-12.266993522644045],["▁vienu",-12.267012596130373],["abili",-12.267017364501951],["▁nobis",-12.267065048217772],["▁سعودی",-12.26707363128662],["▁ഉള്ള",-12.267085075378418],["▁خصوصی",-12.26712703704834],["▁siamo",-12.267129898071287],["гай",-12.267133712768556],["هاي",-12.267147064208984],["▁ض",-12.26717758178711],["▁games",-12.267192840576172],["FC",-12.2672119140625],["▁diberikan",-12.267229080200195],["kiri",-12.267260551452637],["▁तरी",-12.267271995544434],["vina",-12.267306327819824],["▁wani",-12.267327308654783],["▁vej",-12.267377853393556],["▁առ",-12.267434120178224],["▁ಕಾರಣ",-12.26747989654541],["男人",-12.267494201660156],["▁ଥିବା",-12.2675199508667],["▁formación",-12.267539024353027],["▁Жи",-12.267547607421877],["ူ",-12.267568588256836],["уча",-12.26759910583496],["▁HE",-12.267708778381348],["enci",-12.267722129821776],["▁отношения",-12.267741203308104],["▁cuma",-12.267749786376951],["ניות",-12.267752647399902],["▁युवा",-12.267766952514648],["▁maakt",-12.26781177520752],["ણી",-12.267817497253418],["wał",-12.267828941345217],["wier",-12.267903327941896],["▁labore",-12.267972946166992],["▁dje",-12.26797580718994],["▁dne",-12.268010139465332],["▁Hrvatska",-12.268023490905762],["如果你",-12.26806926727295],["Pu",-12.268081665039062],["▁UP",-12.268091201782228],["▁hatt",-12.268152236938477],["ประเทศ",-12.268168449401855],["▁išsi",-12.26819896697998],["▁تصویر",-12.268210411071776],["▁нека",-12.26823616027832],["▁Нема",-12.268292427062988],["ضي",-12.26832389831543],["▁연구",-12.268330574035645],["▁lumea",-12.268333435058594],["לות",-12.268340110778809],["lever",-12.268341064453123],["▁Klein",-12.268479347229004],["▁então",-12.268584251403809],["▁besoin",-12.268593788146973],["εν",-12.268595695495604],["▁Эр",-12.26860523223877],["▁ДО",-12.2686185836792],["dili",-12.26862621307373],["эс",-12.26867389678955],["▁2014,",-12.268709182739258],["▁nostru",-12.268732070922852],["▁resp",-12.268757820129396],["▁DNA",-12.268760681152344],["试",-12.268794059753418],["▁center",-12.268810272216797],["ią",-12.26884651184082],["ELE",-12.268888473510742],["ခ်င္",-12.268980026245115],["▁πο",-12.268998146057127],["inan",-12.26900863647461],["Fo",-12.269100189208984],[":27",-12.269124984741213],["▁ಕೆಲಸ",-12.26918888092041],["▁tenis",-12.269328117370604],["▁эту",-12.269394874572754],["▁Сам",-12.269403457641602],["疑",-12.269464492797852],["▁terapi",-12.269476890563965],["▁cilj",-12.269525527954102],["чко",-12.26954174041748],["യ്ക്ക",-12.269583702087402],["▁وطن",-12.269596099853516],["▁ඔවුන්",-12.26961898803711],["τελ",-12.26964282989502],["▁एउटा",-12.269644737243652],["▁Српске",-12.26965045928955],["benar",-12.269726753234863],["▁Archives",-12.269742012023926],["▁ٿيو",-12.26975440979004],["▁hao",-12.269763946533203],["صل",-12.26977825164795],["之外",-12.269818305969238],["mere",-12.26982593536377],["āji",-12.269857406616213],["▁החל",-12.269859313964844],["rge",-12.26986026763916],["▁банка",-12.269867897033691],["лаж",-12.269948959350586],["▁kone",-12.269948959350586],["▁जनता",-12.269965171813965],["ពេល",-12.269984245300291],["▁გაუ",-12.26999282836914],["▁popula",-12.26999855041504],["▁წინ",-12.270026206970217],["▁Hän",-12.270039558410645],["рів",-12.27005100250244],["▁Contact",-12.270051956176758],["ezés",-12.27011489868164],["▁क्रिकेट",-12.270188331604004],["▁ڪتاب",-12.270197868347168],["▁1950",-12.270207405090332],["▁Prezidenti",-12.270225524902344],["kamp",-12.270246505737305],["▁какие",-12.270263671875],["୩",-12.270309448242188],["▁فرا",-12.270370483398438],["▁Pana",-12.270392417907717],["创",-12.270414352416992],["att",-12.270493507385254],["▁පසු",-12.270493507385254],["meli",-12.270503997802734],["▁Dann",-12.270529747009276],["▁formation",-12.27060604095459],["参",-12.270625114440918],["▁آلمان",-12.270631790161133],["▁Water",-12.270639419555664],["žni",-12.27064323425293],["abad",-12.270645141601562],["partner",-12.270654678344728],["▁elegant",-12.270715713500977],["bora",-12.270716667175291],["▁najbardziej",-12.270731925964355],["▁באופן",-12.270732879638672],["▁θέμα",-12.270763397216797],["▁bolje",-12.27076816558838],["▁altså",-12.27077293395996],["▁резултат",-12.270819664001465],["▁Խ",-12.270824432373049],["▁viz",-12.270827293395996],["農",-12.270833969116213],["وق",-12.270919799804688],["▁поле",-12.27092170715332],["ланд",-12.270938873291016],["▁kaut",-12.270953178405762],["ਕਾਰ",-12.270960807800291],["ോട്",-12.270962715148926],["▁minggu",-12.270988464355469],["▁Lit",-12.270991325378418],["▁Luis",-12.27106761932373],["养",-12.271089553833008],["цію",-12.271105766296388],["▁często",-12.271117210388184],["пон",-12.271154403686523],["stående",-12.271162033081056],["bû",-12.271175384521484],["추",-12.2711763381958],["مثل",-12.271197319030762],["せ",-12.271221160888672],["▁aproape",-12.27127742767334],["▁ਕੀ",-12.271310806274414],["▁들어",-12.271319389343262],["▁خاصة",-12.271327018737791],["句",-12.271327018737791],["▁әлеуметтік",-12.271329879760742],["gali",-12.271337509155272],["ated",-12.27138614654541],["▁Ĝi",-12.271394729614258],["▁kvinne",-12.271429061889648],["▁gras",-12.27145004272461],["▁edə",-12.271502494812012],["▁Peng",-12.271512031555176],["ധ",-12.27153491973877],["ള്ള",-12.27156925201416],["▁yaş",-12.271594047546388],["▁koh",-12.27165412902832],["▁putih",-12.271698951721191],["maat",-12.271773338317873],["▁žena",-12.271805763244627],["erings",-12.271810531616213],["luq",-12.27187728881836],["▁سڀ",-12.271913528442385],["▁באַ",-12.271937370300291],["این",-12.272008895874023],["▁ค",-12.272013664245604],["▁ஐ",-12.272065162658691],["toni",-12.272228240966797],["▁ölkə",-12.27224826812744],["▁sever",-12.272279739379885],["▁घोषणा",-12.272364616394045],["▁должна",-12.272372245788574],["▁Avrupa",-12.272375106811523],["▁1940",-12.2723970413208],["▁사회",-12.272521018981934],["▁вести",-12.272541999816896],["علم",-12.272554397583008],["▁각",-12.27255630493164],["čiau",-12.272665023803713],["ਫ਼",-12.27272605895996],["▁زیادی",-12.27275276184082],["פא",-12.272761344909668],["พิเศษ",-12.272762298583984],["roll",-12.272807121276855],["▁adanya",-12.272846221923828],["ίζεται",-12.272856712341309],["▁possível",-12.272910118103027],["mbro",-12.272989273071287],["▁לס",-12.272994995117188],["▁365",-12.273000717163086],["จิ",-12.273021697998049],["ళ్ళ",-12.273036003112791],["нуть",-12.273101806640623],["ವಾಗ",-12.273134231567385],["▁Є",-12.273215293884276],["▁span",-12.273271560668944],["▁ແຕ່",-12.273439407348633],["▁ещё",-12.273455619812012],["▁ඉතින්",-12.273457527160645],["一份",-12.273466110229492],["回答",-12.27346897125244],["မူ",-12.273469924926758],["▁چاپ",-12.273493766784668],["▁berbeda",-12.273504257202148],["▁төрийн",-12.273540496826172],["sic",-12.273573875427246],["ələ",-12.273592948913574],["▁innen",-12.273597717285156],["▁rece",-12.273630142211914],["ڈی",-12.273653030395508],["▁incluso",-12.273656845092772],["(1)",-12.273689270019531],["wl",-12.27378273010254],["▁пло",-12.273794174194336],["▁नागरिक",-12.273805618286133],["▁belakang",-12.273832321166992],["sikt",-12.273837089538574],["தம்",-12.27383804321289],["жив",-12.273838996887209],["▁səbəb",-12.273975372314451],["▁government",-12.273991584777832],["▁tujuan",-12.27404499053955],["▁lua",-12.274065017700195],["▁Hom",-12.274177551269531],["▁اختیار",-12.274177551269531],["▁כאשר",-12.27421760559082],["cn",-12.274250030517578],["▁Fur",-12.27427577972412],["აშვილი",-12.27429485321045],["負",-12.27429485321045],["ဒါ",-12.27431869506836],["IMA",-12.274369239807127],["ncy",-12.274395942687988],["▁అది",-12.27440071105957],["▁Је",-12.274412155151367],["▁Bit",-12.274510383605955],["labor",-12.274518966674805],["▁ولسمشر",-12.27454662322998],["▁Geschichte",-12.274548530578612],["▁nosotros",-12.274550437927246],["▁poč",-12.274553298950195],["mpah",-12.274561882019045],["▁example",-12.27458667755127],["▁Nis",-12.274663925170898],["旅",-12.274672508239746],["▁политика",-12.274739265441896],["▁टीम",-12.27476692199707],["▁Nulla",-12.2747802734375],["▁افزار",-12.274882316589355],["屬",-12.274938583374023],["▁človek",-12.275032043457031],["enze",-12.275049209594728],["▁муз",-12.275099754333496],["▁roce",-12.275123596191406],["▁જાય",-12.27512550354004],["▁Bok",-12.275134086608888],["▁കി",-12.275139808654783],["anca",-12.275153160095217],["▁कै",-12.275156021118164],["▁proper",-12.27519702911377],["▁Bydd",-12.275218963623049],["ുണ്ട്",-12.275243759155272],["▁şer",-12.275288581848145],["eder",-12.275330543518066],["ango",-12.275335311889648],["▁2.5",-12.275336265563965],["lte",-12.275362968444824],["▁máme",-12.27536964416504],["▁всяка",-12.275398254394531],["vind",-12.27546501159668],["怕",-12.275466918945312],["несе",-12.275500297546388],["geri",-12.275508880615234],["▁korral",-12.275516510009766],["уна",-12.27552890777588],["▁laboral",-12.27557373046875],["평",-12.275586128234863],["AV",-12.27558708190918],["ТЕ",-12.275591850280762],["▁अभि",-12.275604248046877],["宮",-12.275625228881836],["▁spørgsmål",-12.275639533996582],["istoj",-12.275668144226074],["▁Stein",-12.275687217712402],["▁paga",-12.275693893432615],["игра",-12.275786399841309],["变化",-12.275796890258787],["пай",-12.27585220336914],["ဟ",-12.27585220336914],["rën",-12.275996208190918],["▁velja",-12.27601146697998],["▁эта",-12.276021003723145],["▁Sú",-12.276055335998535],["▁apr",-12.276063919067385],["▁резерв",-12.276132583618164],["経験",-12.276165962219238],["报道",-12.276166915893556],["ระหว่าง",-12.2761812210083],["▁bari",-12.27623462677002],["▁бүр",-12.27626609802246],["民族",-12.276299476623535],["附",-12.27631378173828],["美食",-12.276325225830078],["сөн",-12.27634048461914],["▁кой",-12.276413917541504],["▁blå",-12.276472091674805],["▁czasie",-12.27651023864746],["▁pärast",-12.276517868041992],["▁журналист",-12.276521682739258],["▁minuta",-12.276564598083496],["▁있",-12.27657985687256],["▁kazan",-12.276613235473633],["からの",-12.276613235473633],["▁Київ",-12.27662467956543],["cket",-12.276688575744627],["вір",-12.27669906616211],["▁שע",-12.276715278625488],["нула",-12.276856422424316],["ダ",-12.276881217956545],["ชาว",-12.276935577392578],["▁Dun",-12.276945114135742],["▁Pic",-12.27696132659912],["▁খবর",-12.27698040008545],["tionem",-12.276995658874512],["▁parents",-12.276997566223145],["kó",-12.277043342590332],["▁Johan",-12.27706527709961],["▁έναν",-12.27706813812256],["▁أمام",-12.277141571044922],["pá",-12.27717113494873],["▁dituzte",-12.277202606201172],["▁zem",-12.277281761169434],["▁၂",-12.27732753753662],["▁الكثير",-12.27733039855957],["▁быць",-12.277337074279783],["▁Три",-12.277342796325684],["▁pass",-12.277402877807615],["hľad",-12.277457237243652],["▁Кал",-12.277485847473145],["▁bygg",-12.27755069732666],["된다",-12.277554512023926],["строй",-12.277560234069824],["▁következő",-12.277572631835938],["▁uro",-12.277591705322266],["准",-12.277610778808594],["▁Mən",-12.27761936187744],["़",-12.277653694152832],["▁cez",-12.277679443359377],["一度",-12.27768611907959],["▁vine",-12.277702331542969],["elem",-12.277750015258787],["▁тай",-12.277750015258787],["▁lätt",-12.277793884277344],["▁politica",-12.277817726135254],["▁бере",-12.27781867980957],["▁ملاقات",-12.27783203125],["▁жаңы",-12.277841567993164],["üt",-12.27784538269043],["▁ऐसा",-12.277847290039062],["▁1974",-12.27786922454834],["▁compartir",-12.277874946594238],["▁වෙනස්",-12.27788543701172],["▁nemen",-12.27790355682373],["▁където",-12.277921676635742],["ța",-12.277952194213867],["σσ",-12.277973175048828],["▁jakie",-12.277973175048828],["▁வழி",-12.277996063232422],["▁vieno",-12.278051376342772],["ຄ",-12.27808952331543],["דור",-12.278135299682615],["▁Путин",-12.278154373168944],["නවා",-12.278191566467283],["ાં",-12.27822494506836],["▁ಜ",-12.2782621383667],["වලට",-12.27833080291748],["▁hjemmeside",-12.278352737426758],["နဲ႕",-12.278380393981934],["▁Attribution",-12.2783842086792],["▁Semua",-12.27847957611084],["▁Dum",-12.278520584106444],["番",-12.278619766235352],["ୂ",-12.278646469116213],["▁τρόπο",-12.2787504196167],["▁tala",-12.278763771057127],["▁según",-12.278797149658203],["▁باب",-12.278800964355469],["პირ",-12.278813362121582],["વિ",-12.278828620910645],["tko",-12.27884292602539],["▁sikkert",-12.278876304626465],["ပါတယ်။",-12.278887748718262],["▁займа",-12.278964042663574],["特に",-12.279006004333496],["▁dokumenta",-12.279008865356444],["rahan",-12.279009819030762],["ojmë",-12.279038429260254],["怎麼",-12.279069900512695],["▁Bali",-12.279109001159668],["▁stud",-12.279112815856934],["▁Cy",-12.279139518737791],["▁문",-12.279159545898438],["▁driver",-12.279170989990234],["eniem",-12.279183387756348],["кал",-12.27918529510498],["▁الثاني",-12.279216766357422],["▁იქ",-12.279251098632812],["આ",-12.279252052307127],["iĝas",-12.279302597045898],["势",-12.279303550720217],["عب",-12.279437065124512],["ину",-12.279459953308104],["ಿಸುವ",-12.279502868652344],["▁بسبب",-12.279520988464355],["之间",-12.279521942138672],["១",-12.279536247253418],["▁ഭ",-12.279582023620604],["▁juos",-12.279597282409668],["ல்ல",-12.27960205078125],["▁Programm",-12.279603958129885],["▁4:",-12.279661178588867],["▁ур",-12.279743194580078],["くらい",-12.279788970947266],["อาจ",-12.279792785644531],["体验",-12.279950141906738],["盘",-12.279973983764648],["▁часть",-12.27999496459961],["▁ახლა",-12.280036926269531],["▁Gas",-12.28006362915039],["▁cancer",-12.280077934265137],["▁batean",-12.28009796142578],["▁æ",-12.28013515472412],["▁Jer",-12.280145645141602],["▁Mustafa",-12.280146598815918],["▁galega",-12.280159950256348],["▁Só",-12.28016185760498],["حد",-12.28018283843994],["▁Rum",-12.280316352844238],["جی",-12.280340194702148],["▁sitio",-12.280346870422363],["▁خبری",-12.280385971069336],["▁host",-12.280390739440918],["▁benim",-12.280402183532717],["▁ରେ",-12.280475616455078],["▁often",-12.280522346496582],["▁регион",-12.280524253845217],["▁Work",-12.280537605285645],["ေန႔",-12.280550003051758],["▁الجمعة",-12.28057098388672],["гло",-12.280584335327148],["▁Isa",-12.280585289001465],["▁ឬ",-12.280628204345703],["▁Agus",-12.28062915802002],["▁act",-12.280632972717283],["▁hét",-12.2806396484375],["▁ču",-12.280665397644045],["▁وهذا",-12.28073787689209],["▁Ibu",-12.280757904052734],["▁2012,",-12.280766487121582],["▁accident",-12.280776977539062],["▁ज्ञान",-12.280800819396973],["▁Čo",-12.280801773071287],["▁имаат",-12.280817985534668],["வில்லை",-12.280869483947754],["▁geb",-12.280871391296388],["arse",-12.280877113342283],["▁haki",-12.280927658081056],["dă",-12.280951499938965],["▁experiment",-12.28096866607666],["овые",-12.281067848205566],["▁periode",-12.28109359741211],["即使",-12.28111743927002],["▁mengetahui",-12.281120300292969],["▁əlavə",-12.281120300292969],["▁Agen",-12.28114891052246],["▁bước",-12.281163215637209],["▁1968",-12.281182289123535],["oms",-12.281210899353027],["▁Habari",-12.281230926513672],["▁törvény",-12.28126049041748],["ším",-12.28126621246338],["▁modal",-12.28134536743164],["ics",-12.281434059143066],["▁responsable",-12.281494140625],["▁OP",-12.281513214111328],["แค่",-12.281529426574709],["bund",-12.281567573547363],["▁чак",-12.281588554382324],["▁richtig",-12.281593322753906],["▁kısa",-12.28159523010254],["▁stimul",-12.281600952148438],["▁pele",-12.28162670135498],["▁işi",-12.28162956237793],["▁within",-12.281641960144045],["▁iniciativa",-12.281667709350586],["▁volutpat",-12.281670570373535],["▁हूँ",-12.281673431396484],["▁الثلاثاء",-12.281675338745115],["▁причин",-12.281707763671877],["▁طول",-12.281723022460938],["ենք",-12.281731605529783],["▁فارسی",-12.281736373901367],["اڭ",-12.281776428222656],["▁उनको",-12.2818021774292],["mės",-12.2818603515625],["ۋا",-12.281862258911133],["kusi",-12.281917572021484],["▁inser",-12.281999588012695],["▁vatten",-12.282002449035645],["्नु",-12.282004356384276],["▁ദ",-12.282022476196287],["ਈ",-12.28210163116455],["峰",-12.282112121582031],["▁времето",-12.282118797302246],["認識",-12.28217601776123],["▁penal",-12.28217887878418],["сор",-12.282196044921877],["▁شماره",-12.282234191894531],["гле",-12.282251358032228],["nī",-12.282264709472656],["▁капитал",-12.282302856445312],["піс",-12.282307624816896],["ጌ",-12.282334327697754],["kih",-12.282424926757812],["▁ster",-12.282442092895508],["нення",-12.282600402832031],["▁лекар",-12.28261947631836],["▁კა",-12.28262710571289],["കെ",-12.282662391662598],["▁получить",-12.28266429901123],["▁бі",-12.282702445983888],["يع",-12.282727241516112],["▁بش",-12.282739639282228],["检查",-12.282753944396973],["▁panjang",-12.2827787399292],["▁ಶ್ರೀ",-12.28282070159912],["▁वापर",-12.282825469970703],["▁Human",-12.282917976379396],["▁tovább",-12.282965660095217],["▁ukuthi",-12.283032417297363],["ndus",-12.283045768737791],["▁जाते",-12.283061981201172],["▁наши",-12.28317642211914],["▁Ел",-12.283266067504885],["▁nato",-12.28327465057373],["▁novembro",-12.28334140777588],["▁xử",-12.283373832702637],["0,00",-12.283381462097168],["すること",-12.283432960510254],["ake",-12.283486366271973],["▁vinter",-12.28349781036377],["▁From",-12.28357982635498],["يج",-12.283605575561523],["▁жатат",-12.28360652923584],["radh",-12.283612251281738],["▁Zem",-12.283617973327637],["altra",-12.283665657043455],["▁Ша",-12.283669471740724],["▁suscipit",-12.28370761871338],["▁บริษัท",-12.283713340759276],["▁Trang",-12.28371810913086],["出口",-12.283737182617188],["▁Иван",-12.28379249572754],["▁cose",-12.283879280090332],["mü",-12.283884048461914],["πι",-12.283931732177734],["▁Україна",-12.283945083618164],["▁הספר",-12.283990859985352],["▁Historia",-12.283992767333984],["தாக",-12.283995628356934],["▁mật",-12.283997535705566],["भाव",-12.284003257751465],["▁rejse",-12.284074783325195],["▁dalje",-12.284168243408203],["▁tém",-12.284210205078123],["▁Arte",-12.284286499023438],["jene",-12.28429126739502],["▁दल",-12.284294128417969],["▁ende",-12.284305572509766],["▁tí",-12.2843656539917],["▁revista",-12.28438949584961],["▁Eva",-12.284479141235352],["▁chẳng",-12.284518241882324],["lm",-12.284541130065918],["▁permanent",-12.284542083740234],["töö",-12.284554481506348],["▁सम्",-12.284561157226562],["▁بھارت",-12.284574508666992],["▁lie",-12.284634590148926],["ଆ",-12.284656524658203],["▁Had",-12.284667015075684],["ား",-12.284669876098633],["тарын",-12.284676551818848],["▁Či",-12.284775733947754],["את",-12.284778594970703],["▁niso",-12.284807205200195],["▁۴",-12.284808158874512],["▁Io",-12.28481674194336],["zad",-12.28501033782959],["几个",-12.285018920898438],["▁görüş",-12.285026550292969],["▁kondisi",-12.2850980758667],["▁MEN",-12.285174369812012],["ველი",-12.285271644592283],["▁AZ",-12.285293579101562],["▁sababu",-12.285418510437012],["▁বিভাগ",-12.285443305969238],["▁алган",-12.28547477722168],["şte",-12.28549098968506],["▁deset",-12.285531997680664],["家人",-12.285554885864258],["▁قومی",-12.285562515258787],["ối",-12.28564167022705],["▁kennis",-12.285717964172363],["moni",-12.285736083984377],["▁ଶ",-12.28576946258545],["传统",-12.28581714630127],["售",-12.285841941833496],["▁ملا",-12.285886764526367],["▁калган",-12.285951614379885],["بن",-12.285969734191896],["▁Kru",-12.286005973815918],["▁krem",-12.28600788116455],["સ્ત",-12.286039352416992],["gari",-12.286046028137209],["▁دنبال",-12.286086082458496],["▁водо",-12.286112785339355],["▁Charles",-12.286157608032228],["讀",-12.286185264587402],["ስት",-12.28618621826172],["▁aastat",-12.286237716674805],["▁စာ",-12.28626823425293],["▁գործ",-12.286294937133787],["▁nima",-12.286331176757812],["▁Australia",-12.286344528198242],["-25",-12.28636646270752],["js",-12.28637409210205],["▁postav",-12.286401748657228],["ribu",-12.286486625671388],["▁ටික",-12.286520957946776],["sî",-12.286535263061523],["▁Agar",-12.28653621673584],["制造",-12.286555290222168],["▁თავის",-12.28657341003418],["▁методи",-12.286608695983888],["రణ",-12.28661060333252],["▁첫",-12.286638259887695],["▁გაა",-12.286660194396973],["▁وكان",-12.286676406860352],["运动",-12.286741256713867],["▁ανά",-12.286742210388184],["▁ä",-12.286785125732422],["▁என்பது",-12.286795616149902],["▁hey",-12.28679656982422],["▁변",-12.286839485168455],[":26",-12.286847114562988],["▁추",-12.286864280700684],["чне",-12.286910057067873],["čení",-12.286925315856934],["▁производ",-12.286943435668944],["▁ო",-12.286961555480955],["▁වසර",-12.286962509155272],["▁hänen",-12.286982536315918],["▁gyn",-12.287012100219728],["▁слуша",-12.28710651397705],["▁साहित्य",-12.287137985229492],["ေရ",-12.287149429321287],["▁matur",-12.287171363830566],["▁کسب",-12.28721046447754],["▁ಕಾಲ",-12.287233352661133],["SKA",-12.28726863861084],["▁mamy",-12.28726863861084],["▁AF",-12.28727912902832],["▁その",-12.287313461303713],["▁особено",-12.287352561950684],["▁dicta",-12.287375450134276],["▁linna",-12.287506103515623],["品質",-12.287525177001951],["এ",-12.287599563598633],["투",-12.287601470947266],["▁understand",-12.287625312805176],["rast",-12.287644386291504],["τού",-12.287654876708984],["တင်",-12.28768253326416],["▁grill",-12.287701606750488],["лган",-12.287707328796388],["ňuje",-12.287762641906738],["复",-12.28778076171875],["▁Gör",-12.287786483764648],["▁výkon",-12.287790298461914],["▁---",-12.28783130645752],["駅",-12.28783130645752],["▁තව",-12.287863731384276],["▁слід",-12.287888526916504],["दे",-12.287894248962402],["mayı",-12.287933349609377],["▁DR",-12.287945747375488],["ible",-12.287973403930664],["mó",-12.28799819946289],["▁pane",-12.28800106048584],["▁яв",-12.288009643554688],["▁Hey",-12.288056373596191],["▁кад",-12.288162231445312],["PDF",-12.288164138793944],["▁владе",-12.288200378417969],["▁tín",-12.288220405578612],["效率",-12.288252830505373],["▁plastik",-12.288284301757812],["AF",-12.288289070129396],["ಕೊಂಡ",-12.288291931152344],["▁трэба",-12.28830337524414],["▁السعودية",-12.28842830657959],["՛",-12.288475036621094],["▁sociais",-12.288484573364258],["▁приказ",-12.288564682006836],["事故",-12.28857135772705],["еле",-12.288612365722656],["▁जित",-12.288614273071287],["імен",-12.2886323928833],["யோ",-12.288636207580566],["▁auk",-12.288644790649414],["วี",-12.28866481781006],["▁(13)",-12.28868293762207],["áról",-12.28870677947998],["▁elektronik",-12.288762092590332],["▁අව",-12.288801193237305],["ीकरण",-12.288808822631836],["iyar",-12.288823127746582],["▁خودرو",-12.288843154907228],["▁புதிய",-12.28884506225586],["▁Nhật",-12.288846015930176],[":28",-12.288861274719238],["▁whole",-12.288864135742188],["quin",-12.288908004760742],["▁रहेका",-12.28896713256836],["råd",-12.288969993591309],["rów",-12.288981437683104],["▁мест",-12.288995742797852],["▁tingkat",-12.288999557495115],["▁өмір",-12.28900909423828],["▁salt",-12.289017677307127],["uden",-12.289018630981444],["▁terá",-12.289054870605469],["▁create",-12.289071083068848],["▁тыс",-12.289073944091797],["bata",-12.289135932922363],["களும்",-12.289237022399902],["▁அரசு",-12.289244651794434],["▁تس",-12.289246559143066],["▁decide",-12.289271354675291],["ニ",-12.28928565979004],["sens",-12.289348602294922],["▁फोटो",-12.289392471313477],["▁108",-12.289398193359377],["▁mjesto",-12.289399147033691],["▁uporabo",-12.289400100708008],["meld",-12.289401054382324],["▁سامنے",-12.289409637451172],["▁januari",-12.289416313171388],["▁mostrar",-12.289480209350586],["結婚",-12.289522171020508],["▁Nama",-12.289566040039062],["การศึกษา",-12.289573669433594],["▁scene",-12.289578437805176],["▁وان",-12.289592742919922],["▁2018-",-12.289639472961426],["▁etish",-12.289684295654297],["маг",-12.289685249328612],["相當",-12.289775848388672],["合同",-12.289783477783203],["我是",-12.289813995361328],["▁ئال",-12.28981590270996],["▁இருந்து",-12.289912223815918],["част",-12.289921760559082],["▁تنظیم",-12.289953231811523],["▁soovi",-12.290031433105469],["tione",-12.29007053375244],["ล่าสุด",-12.290079116821287],["בור",-12.290091514587402],["യം",-12.290111541748049],["▁Doch",-12.290120124816896],["טן",-12.290181159973145],["دد",-12.29023551940918],["▁ባለ",-12.290278434753418],["施",-12.290315628051758],["ਟਰ",-12.290319442749023],["ujem",-12.290362358093262],["▁wenig",-12.290364265441896],["▁ପରି",-12.290367126464844],["▁zwar",-12.290375709533691],["kler",-12.290390014648438],["축",-12.290410041809082],["ാൽ",-12.290428161621094],["▁llegar",-12.290428161621094],["▁ಬಿಜೆಪಿ",-12.290507316589355],["▁پروژه",-12.290509223937988],["▁ними",-12.290657043457031],["▁નિ",-12.290658950805664],["▁nogen",-12.290681838989258],["▁disk",-12.290731430053713],["▁şa",-12.290761947631836],["▁propio",-12.290767669677734],["Ց",-12.290769577026367],["▁استاد",-12.290837287902832],["智能",-12.290850639343262],["▁прямо",-12.2908935546875],["▁män",-12.29103660583496],["▁жөнүндө",-12.291062355041504],["▁obo",-12.2910737991333],["▁نحو",-12.291090965270996],["▁ଇ",-12.291143417358398],["▁update",-12.29119110107422],["▁implant",-12.291214942932127],["▁tasa",-12.291226387023926],["限制",-12.291231155395508],["loop",-12.291303634643556],["▁informacije",-12.291305541992188],["▁Fach",-12.291374206542969],["тун",-12.291388511657717],["▁PAR",-12.291491508483888],["▁ehkä",-12.29151725769043],["曼",-12.291566848754885],["▁الدين",-12.291573524475098],["▁můžete",-12.291584014892578],["▁Ima",-12.291613578796388],["нови",-12.291626930236816],["▁içerisinde",-12.291626930236816],["ោ",-12.291693687438965],["უნ",-12.291703224182127],["▁subject",-12.291712760925291],["სტი",-12.29179859161377],["▁Foi",-12.291836738586426],["leh",-12.291844367980955],[":02",-12.291872024536133],["▁عنه",-12.291872024536133],["დნენ",-12.291887283325195],["创业",-12.291943550109863],["▁hittar",-12.291946411132812],["▁сар",-12.29200553894043],["▁Gil",-12.292030334472656],["▁Regional",-12.292122840881348],["धी",-12.292128562927246],["▁교",-12.292141914367676],["▁कुमार",-12.29216766357422],["▁తర్వాత",-12.292173385620115],["tuksen",-12.292192459106444],["▁duine",-12.292262077331545],["▁pelajar",-12.29227066040039],["▁biologi",-12.292336463928224],["umas",-12.29234218597412],["他们的",-12.292366027832031],["▁стали",-12.292386054992676],["unga",-12.2924222946167],["▁പോ",-12.292428016662598],["дыр",-12.29245376586914],["field",-12.292462348937988],["▁वाट",-12.292463302612305],["▁tidur",-12.292465209960938],["▁πρό",-12.292470932006836],["▁Run",-12.29256534576416],["DP",-12.292594909667969],["ёр",-12.292713165283203],["湯",-12.292715072631836],["όρ",-12.292725563049316],["▁рублей",-12.292741775512695],["▁ଉ",-12.292762756347656],["▁viņš",-12.292792320251465],["数字",-12.292799949645996],["larida",-12.292826652526855],["▁қала",-12.292856216430664],["许多",-12.29290771484375],["▁бөл",-12.292938232421877],["▁уч",-12.292977333068848],["▁print",-12.292980194091797],["电话",-12.292984008789062],["باب",-12.293004989624023],["▁מער",-12.293007850646973],["पन",-12.293083190917969],["λου",-12.293092727661133],["▁člen",-12.293177604675291],["సే",-12.29319190979004],["▁ต",-12.293207168579102],["▁Пол",-12.293209075927734],["нные",-12.293212890625],["▁libri",-12.29328441619873],["▁između",-12.293285369873049],["▁неколку",-12.29328727722168],["▁ଆଉ",-12.293306350708008],["▁shqiptar",-12.293323516845703],["▁መንገድ",-12.29334831237793],["ลาย",-12.293364524841309],["▁fonda",-12.293437004089355],["▁Kro",-12.293439865112305],["主席",-12.29346752166748],["整理",-12.293571472167969],["▁её",-12.293601036071776],["▁шар",-12.293611526489258],["janje",-12.293617248535156],["rop",-12.2936372756958],["▁σαν",-12.293660163879396],["насць",-12.293776512145996],["▁սահման",-12.293801307678224],["▁Komp",-12.293816566467283],["英語",-12.293831825256348],["▁lenne",-12.293843269348145],["бет",-12.29384994506836],["▁১০",-12.293865203857422],["ήθηκε",-12.293867111206056],["ática",-12.293926239013672],["▁しかし",-12.294038772583008],["bai",-12.29405403137207],["▁कप",-12.294090270996094],["▁topic",-12.294150352478027],["প্র",-12.294166564941406],["▁گفته",-12.294167518615724],["▁bunga",-12.294194221496582],["▁мали",-12.294200897216797],["ىتى",-12.29423713684082],["▁براي",-12.294268608093262],["▁Polski",-12.29433250427246],["▁Lumpur",-12.294398307800291],["▁Soomaaliyeed",-12.294398307800291],["▁kompani",-12.294427871704102],["vida",-12.294440269470217],["▁tit",-12.294512748718262],["▁чер",-12.294597625732422],["ઠ",-12.2946138381958],["▁된다",-12.294681549072266],["ဗ",-12.294692993164062],["▁prev",-12.29470443725586],["▁పా",-12.294732093811035],["▁думаю",-12.294794082641602],["▁tinha",-12.29484748840332],["▁Lagu",-12.294861793518066],["沉",-12.294867515563965],["доо",-12.294883728027344],["▁vende",-12.294897079467772],["дава",-12.294904708862305],["yana",-12.29493522644043],["staty",-12.29495906829834],["▁ולה",-12.29496955871582],["torii",-12.29506492614746],["puk",-12.295127868652344],["EF",-12.29514503479004],["dium",-12.295166015625],["▁complete",-12.295205116271973],["▁rész",-12.295257568359377],["▁média",-12.29531955718994],["▁Irak",-12.29540729522705],["▁culpa",-12.295411109924316],["▁caracter",-12.295427322387695],["▁qoy",-12.29543113708496],["cją",-12.295433044433594],["▁esempio",-12.295514106750488],["▁podatkov",-12.295527458190918],["ỏ",-12.295586585998535],["していた",-12.29560375213623],["зов",-12.29562759399414],["▁ügy",-12.295644760131836],["êng",-12.295655250549316],["▁בבית",-12.295674324035645],["▁અન્ય",-12.295700073242188],["ndən",-12.29570484161377],["IKA",-12.295730590820312],["KT",-12.295775413513184],["▁ಕು",-12.29591464996338],["▁fari",-12.295926094055176],["▁1976",-12.296002388000488],["၌",-12.296070098876951],["▁پارٹی",-12.2960844039917],["tap",-12.296161651611328],["▁ලංකාවේ",-12.296188354492188],["мест",-12.296217918395996],["▁siapa",-12.296229362487791],["▁голос",-12.296308517456056],["▁mung",-12.2963228225708],["▁ኣ",-12.296350479125977],["▁vannak",-12.296357154846191],["▁گذاری",-12.296364784240724],["▁fuera",-12.29643440246582],["▁Frau",-12.296449661254885],["ぶ",-12.296483993530272],["▁كم",-12.296504020690918],["▁warto",-12.296525955200195],["それは",-12.296546936035156],["ashi",-12.296563148498535],["▁sẻ",-12.2965669631958],["▁ഞാൻ",-12.296628952026367],["tgan",-12.296701431274414],["▁María",-12.296709060668944],["▁kanya",-12.296711921691896],["▁धन",-12.29672908782959],["ален",-12.2967529296875],["ταν",-12.296808242797852],["pita",-12.296833038330078],["▁kiek",-12.296896934509276],["ших",-12.296913146972656],["зар",-12.296914100646973],["्दा",-12.29702091217041],["▁potest",-12.29702377319336],["année",-12.297085762023926],["ambi",-12.29709243774414],["▁Any",-12.297104835510254],["Tra",-12.297113418579102],["ψε",-12.297122955322266],["▁nepre",-12.297122955322266],["▁ruo",-12.297123908996582],["လိုက်",-12.297163009643556],["جل",-12.297178268432615],["▁todėl",-12.29718780517578],["lac",-12.297191619873049],["▁vốn",-12.297237396240234],["пор",-12.297248840332031],["inius",-12.297263145446776],["lishi",-12.297285079956056],["doj",-12.297313690185549],["ডি",-12.297348022460938],["▁anar",-12.29738712310791],["▁કર",-12.29738712310791],["▁szabad",-12.2974214553833],["▁хийж",-12.297454833984377],["▁track",-12.2974853515625],["當然",-12.297547340393066],["پو",-12.297554969787598],["Va",-12.297555923461914],["tuse",-12.29755973815918],[":55",-12.297561645507812],["▁Nou",-12.29758358001709],["στά",-12.29758644104004],["0000",-12.29767608642578],["▁meel",-12.297679901123049],["▁تھی۔",-12.297733306884766],["sjonen",-12.29773998260498],["▁ziyaret",-12.29775047302246],["Ajuntament",-12.297768592834473],["lif",-12.297778129577637],["ear",-12.297796249389648],["▁kommune",-12.29779815673828],["ரோ",-12.297819137573242],["уш",-12.29782485961914],["вме",-12.297829627990724],["▁chắc",-12.29783058166504],["▁elsker",-12.297948837280272],["▁جلد",-12.297954559326172],["▁(20",-12.29806423187256],["▁тол",-12.298084259033203],["hov",-12.298104286193848],["▁فر",-12.298129081726074],["uten",-12.298130989074709],["▁hồi",-12.298142433166504],["▁პირ",-12.298174858093262],["โค",-12.298192024230955],["▁dyr",-12.29820442199707],["ણા",-12.298277854919434],["▁desarrollo",-12.298304557800291],["▁رهيو",-12.298335075378418],["▁იქნება",-12.298377990722656],["▁රා",-12.298386573791504],["▁kroppen",-12.298395156860352],["▁possa",-12.298396110534668],["้า",-12.298407554626465],["▁kut",-12.298407554626465],["▁regard",-12.298432350158691],["▁25%",-12.298442840576172],["▁тему",-12.298481941223145],["иста",-12.29849624633789],["▁buiten",-12.29857349395752],["▁lleva",-12.298595428466797],["▁yanında",-12.298601150512695],["▁තම",-12.29860496520996],["理念",-12.298624038696287],["eiras",-12.298653602600098],["▁кре",-12.29873752593994],["▁гэтага",-12.298760414123535],["▁ቀ",-12.298806190490724],["▁اقدام",-12.298810958862305],["▁perse",-12.29885959625244],["ției",-12.29888153076172],["▁nhớ",-12.298891067504885],["▁знач",-12.29892921447754],["廠",-12.298946380615234],["lici",-12.29894733428955],["▁keski",-12.298951148986816],["▁seminar",-12.29897117614746],["▁disebut",-12.298972129821776],["schap",-12.298983573913574],["▁ikon",-12.298996925354004],["เหมือน",-12.299076080322266],["▁ਜਾਣ",-12.29908561706543],["klik",-12.299116134643556],["kten",-12.29915714263916],["▁eski",-12.299219131469728],["▁әр",-12.29928207397461],["ڳ",-12.299297332763672],["חות",-12.299355506896973],["职业",-12.299357414245604],["▁Ու",-12.299365997314451],["Par",-12.29941463470459],["▁infantil",-12.29942226409912],["▁Eiropas",-12.299424171447754],["▁ejemplo",-12.29942512512207],["▁эс",-12.299473762512209],["▁Update",-12.299522399902344],["ハ",-12.299537658691406],["kja",-12.299676895141602],["으",-12.299710273742676],["atio",-12.299717903137209],["前往",-12.299744606018066],["tato",-12.29975700378418],["ဆောင်",-12.299789428710938],["▁раст",-12.299809455871582],["бил",-12.29988956451416],["дне",-12.299943923950195],["ווה",-12.299967765808104],["▁zure",-12.299968719482422],["追求",-12.299972534179688],["▁කා",-12.299991607666016],["▁कभी",-12.29999542236328],["achd",-12.300034523010254],["▁имате",-12.300043106079102],["সহ",-12.300076484680176],["▁Restaurant",-12.300086975097656],["chas",-12.300142288208008],["▁partido",-12.300153732299805],["▁ጥ",-12.300168991088867],["メール",-12.300176620483398],["▁Amar",-12.300271034240724],["える",-12.30027961730957],["▁воде",-12.300307273864746],["?»",-12.300369262695312],["不得",-12.300376892089844],["▁Fun",-12.300379753112791],["ливо",-12.300382614135742],["▁petite",-12.30042552947998],["小时",-12.300457000732422],["ajan",-12.300463676452637],["每个",-12.300471305847168],["mín",-12.300521850585938],["¤",-12.300542831420898],["▁гра",-12.300552368164062],["▁ust",-12.30055809020996],["▁ইসলাম",-12.300583839416504],["ზა",-12.30059051513672],["သည်။",-12.3006010055542],["▁parce",-12.30061149597168],["▁ishlab",-12.300681114196776],["▁Bản",-12.300751686096191],["▁мова",-12.30079746246338],["øy",-12.300809860229492],["▁након",-12.300874710083008],["хват",-12.30095100402832],["▁کنار",-12.30095672607422],["▁vẻ",-12.300983428955078],["stic",-12.300995826721191],["mili",-12.300996780395508],["quis",-12.30100917816162],["▁ER",-12.301032066345217],["Β",-12.301085472106934],["hê",-12.301145553588867],["▁فلا",-12.301158905029297],["そして",-12.301172256469728],["▁국",-12.301176071166992],["▁Neste",-12.30123805999756],["▁reli",-12.30129623413086],["дни",-12.301308631896973],["▁seal",-12.301352500915527],[".3.",-12.30144214630127],["典",-12.301519393920898],["▁مۇ",-12.301623344421388],["▁Työ",-12.301631927490234],["۲",-12.301655769348145],["▁duzu",-12.301665306091309],["▁රකාශ",-12.301669120788574],["tad",-12.30171012878418],["▁cím",-12.301739692687988],["מא",-12.301753044128418],["جاب",-12.30180549621582],["▁вр",-12.30180835723877],["电子",-12.301883697509766],["▁دینے",-12.301889419555664],["rå",-12.301898956298828],["▁rural",-12.301898956298828],["▁чы",-12.301899909973145],["ेत",-12.301915168762209],["▁Normal",-12.301934242248535],[":03",-12.301935195922852],["mn",-12.302027702331545],["לע",-12.302061080932615],["▁цен",-12.302072525024414],["ativ",-12.302109718322754],["τζ",-12.30215072631836],["▁1930",-12.302188873291016],["▁handel",-12.302189826965332],["▁мамлекеттик",-12.302231788635254],["▁उसे",-12.302322387695312],["၈",-12.302427291870115],["▁Mitarbeiter",-12.302457809448242],["භ",-12.302501678466797],["thai",-12.302507400512695],["ሮች",-12.30252456665039],["lös",-12.302526473999023],["avoir",-12.302563667297363],["▁กับ",-12.302572250366213],["кро",-12.302642822265623],["μό",-12.302663803100586],["▁روشن",-12.302703857421877],["▁Science",-12.302722930908203],["▁জাতীয়",-12.30278778076172],["▁saf",-12.30283546447754],["英文",-12.302881240844728],["住宿",-12.302916526794434],["▁Flash",-12.302928924560549],["MEN",-12.302946090698242],["oš",-12.302946090698242],["▁ନା",-12.30299472808838],["▁ابو",-12.303001403808594],["指定",-12.30304718017578],["▁sli",-12.303077697753906],["▁ganske",-12.303101539611816],["▁daje",-12.30315113067627],["▁seguridad",-12.303159713745115],["تاب",-12.303183555603027],["راف",-12.30318546295166],["▁പ്രതി",-12.303186416625977],["▁fam",-12.303223609924316],["▁אתם",-12.303296089172363],["唐",-12.303306579589844],["ените",-12.303346633911133],["▁faucibus",-12.303349494934082],["▁plataforma",-12.303353309631348],["▁ஆனால்",-12.303361892700195],["▁בישראל",-12.303388595581056],["▁للت",-12.303421020507812],["▁парк",-12.303434371948242],["▁KWA",-12.303462982177734],["▁للأ",-12.303526878356934],["▁seguro",-12.303560256958008],["▁jut",-12.303606033325195],["▁gick",-12.303690910339355],["▁lave",-12.303706169128418],["▁rac",-12.303735733032228],["▁scor",-12.303853034973145],["malt",-12.303862571716309],["▁ክ",-12.30388641357422],["gera",-12.30390739440918],["versa",-12.303930282592772],["▁छु",-12.303932189941406],["責任",-12.30404281616211],["▁تصاویر",-12.304076194763184],["体育",-12.304088592529297],["▁ministro",-12.304100036621094],["αί",-12.304268836975098],["ιτ",-12.304288864135742],["autor",-12.30438232421875],["▁पो",-12.304399490356444],["▁eksempel",-12.304429054260254],["▁రాష్ట్ర",-12.30449676513672],["▁fon",-12.304540634155272],["▁1200",-12.304553031921388],["▁מק",-12.304553985595703],["▁කියන්නේ",-12.304640769958496],["เล็ก",-12.304642677307127],["ză",-12.30467128753662],["▁cadre",-12.304673194885254],["▁zao",-12.30470371246338],["▁конце",-12.304716110229492],["กลับ",-12.304722785949709],["▁другите",-12.304859161376951],["▁działa",-12.304862022399902],["េង",-12.30489730834961],["palvelu",-12.304916381835938],["制定",-12.304933547973633],["▁European",-12.304953575134276],["KK",-12.304975509643556],["▁competi",-12.305030822753906],["NP",-12.30505084991455],["iņš",-12.305062294006348],["▁થઇ",-12.30506706237793],["zien",-12.305194854736328],["をして",-12.305225372314451],["leta",-12.305290222167969],["▁Polis",-12.30530071258545],["▁obu",-12.30540370941162],["కర",-12.30545425415039],["▁Lip",-12.305512428283691],["ಂಗ್",-12.305564880371094],["民眾",-12.30564785003662],["भन्दा",-12.305715560913086],["▁적",-12.305734634399414],["uč",-12.305767059326172],["ಿದ್ದರು",-12.305778503417969],["▁kerül",-12.305804252624512],["დით",-12.305822372436523],["ирован",-12.30594539642334],["進入",-12.305974006652832],["▁сили",-12.306000709533691],["▁економ",-12.306048393249512],["▁پرو",-12.306052207946776],["ЛО",-12.306053161621094],["▁ከተ",-12.306070327758787],["കളുടെ",-12.306085586547852],["▁proyecto",-12.306126594543455],["ىدىغان",-12.306159019470217],["amus",-12.306190490722656],["▁hoxe",-12.306201934814451],["▁Ancak",-12.30629062652588],["▁Эл",-12.306293487548828],["▁hyd",-12.306304931640623],["▁əvvəl",-12.306379318237305],["At",-12.306482315063477],["▁گرد",-12.306525230407717],["స్తు",-12.306550979614258],["▁rahat",-12.306634902954102],["▁descri",-12.30667495727539],["仁",-12.30667495727539],["▁दिने",-12.306675910949709],["▁kolay",-12.306678771972656],["▁দিয়ে",-12.30672550201416],["изам",-12.306729316711426],["धा",-12.306808471679688],["յա",-12.306819915771484],["▁certa",-12.306845664978027],["न्त",-12.306883811950684],["uese",-12.306886672973633],["▁Hä",-12.306903839111328],["▁Code",-12.306964874267578],["▁दोन",-12.306964874267578],["nada",-12.30702018737793],["▁رشته",-12.307044982910156],["▁auga",-12.30714511871338],["ಮಾನ",-12.307178497314451],["▁oer",-12.307191848754885],["▁فرهنگ",-12.307252883911133],["▁Pass",-12.307263374328612],["▁අමාත්",-12.307291030883787],["ából",-12.307326316833496],["▁pretty",-12.30738353729248],["▁barna",-12.307402610778809],["▁هنر",-12.30746364593506],["ထဲ",-12.307470321655272],["બા",-12.307503700256348],["โน",-12.307504653930664],["▁ایسا",-12.307513236999512],["tvi",-12.307583808898926],["ஜி",-12.307641983032228],["sara",-12.307671546936035],["ടു",-12.307676315307615],["ДУ",-12.30770492553711],["liti",-12.307710647583008],["ebi",-12.307756423950195],["tour",-12.30777072906494],["ەن",-12.307795524597168],["▁samtidig",-12.307823181152344],["繼續",-12.307823181152344],["▁সাথে",-12.307855606079102],["▁epä",-12.307856559753418],["▁pagkatapos",-12.307856559753418],["▁Visi",-12.307869911193848],["▁permit",-12.307890892028809],["▁Русија",-12.307890892028809],["▁osobných",-12.307902336120604],["они",-12.307907104492188],["▁ўжо",-12.307958602905272],["▁comunicación",-12.30798625946045],["▁ගැනීම",-12.308056831359863],["▁लाभ",-12.308063507080078],["room",-12.308088302612305],["પી",-12.308135986328123],["▁İslam",-12.308155059814451],["йд",-12.308188438415527],["▁კონ",-12.308236122131348],["όν",-12.308255195617676],["ilmesi",-12.30827522277832],["êr",-12.30833911895752],["름",-12.308345794677734],["▁utilizar",-12.308347702026367],["▁industria",-12.308362007141112],["▁asal",-12.308395385742188],["▁српски",-12.30841827392578],["نم",-12.308419227600098],["▁PIXNET",-12.308420181274414],["▁ಮಾಹಿತಿ",-12.308420181274414],["▁Þetta",-12.308430671691896],["▁नगरपालिका",-12.30843734741211],["▁ሆነ",-12.308478355407717],["▁etki",-12.308480262756348],["▁мотив",-12.308515548706056],["▁reper",-12.30854320526123],["▁endur",-12.30861473083496],["Dr",-12.308619499206545],["atha",-12.308623313903809],["ທາງ",-12.308634757995604],["▁nën",-12.308679580688477],["məsi",-12.30869483947754],["调整",-12.30869483947754],["▁доктор",-12.308711051940918],["නෙ",-12.308737754821776],["Об",-12.30873966217041],["しない",-12.308755874633787],["▁שנה",-12.308757781982422],["၀င္",-12.308758735656738],["πό",-12.308815956115724],["TB",-12.30887222290039],["lke",-12.30889129638672],["▁Bab",-12.30892562866211],["fang",-12.30897617340088],["▁ਵੀਡੀਓ",-12.308984756469728],["▁فیصلہ",-12.30901050567627],["▁exterior",-12.309025764465332],["vra",-12.309036254882812],["▁alarm",-12.309103965759276],["eyn",-12.309117317199709],["▁데",-12.309142112731934],["hul",-12.309232711791992],["inë",-12.309250831604004],["šet",-12.309252738952637],["osť",-12.309344291687012],["▁jin",-12.30941390991211],["oria",-12.309453010559082],["mizi",-12.309484481811523],["▁الآن",-12.309525489807127],["▁follow",-12.309558868408203],["▁상품",-12.309589385986328],["fod",-12.309593200683594],["▁Республикасы",-12.309603691101074],["▁çıxış",-12.309605598449709],["сет",-12.30960750579834],["programm",-12.3096284866333],["elis",-12.3096342086792],["▁tästä",-12.309693336486816],["▁redu",-12.30976390838623],["▁integral",-12.309769630432127],["گار",-12.309800148010254],["duo",-12.309815406799316],["▁wereld",-12.309823989868164],["/08",-12.309976577758787],["▁تومان",-12.310025215148926],["▁ара",-12.31004238128662],["▁האי",-12.310070037841797],["▁नं",-12.310070037841797],["स्क",-12.310078620910645],["▁1967",-12.31007957458496],["TR",-12.31009006500244],["ချက်",-12.310117721557615],["▁bakteri",-12.310194969177246],["ccio",-12.310211181640623],["▁만",-12.31021499633789],["▁etapa",-12.310309410095217],["▁imagine",-12.310310363769531],["▁Yer",-12.31035614013672],["▁आयोजना",-12.310386657714844],["▁zbyt",-12.31043815612793],["स्थित",-12.310450553894045],["回家",-12.310462951660156],["става",-12.310510635375977],["▁hoog",-12.310510635375977],["▁gest",-12.31053352355957],["AYA",-12.310551643371582],["▁Network",-12.310564041137695],["कल",-12.310568809509276],["液",-12.3106050491333],["سک",-12.310626983642578],["uses",-12.310651779174805],["ళ్ల",-12.310665130615234],["▁Omegle",-12.31068229675293],["▁đứng",-12.31068229675293],["▁இன்று",-12.31069278717041],["▁קיין",-12.310718536376951],["▁అంటే",-12.310726165771484],["éről",-12.31075954437256],["праў",-12.31078052520752],["manna",-12.310784339904783],["cido",-12.310803413391112],["▁დღე",-12.310813903808594],["ధి",-12.310823440551758],["dono",-12.3108491897583],["нных",-12.310853958129885],["▁Pl",-12.31092929840088],["maga",-12.310959815979004],["▁yapan",-12.310986518859863],["▁ଭ",-12.31100368499756],["最佳",-12.311063766479492],["▁queste",-12.311101913452148],["gono",-12.311118125915527],["▁naik",-12.311140060424805],["▁conce",-12.311171531677246],["רצ",-12.311199188232422],["拜",-12.311208724975586],["▁ส่วน",-12.311211585998535],["čo",-12.311214447021484],["링",-12.31124210357666],["▁Đức",-12.311261177062988],["garri",-12.311264038085938],["▁risiko",-12.311279296875],["▁sab",-12.311293601989746],["гли",-12.311320304870604],["▁ध्यान",-12.311354637145996],["▁Leo",-12.311360359191896],["▁tira",-12.311378479003906],["▁gens",-12.311387062072754],["▁kể",-12.311389923095703],["をする",-12.311421394348145],["▁child",-12.3114595413208],["▁lingvo",-12.311487197875977],["ಕಿ",-12.311503410339355],["까",-12.311540603637695],["▁влада",-12.31155014038086],["▁Bergen",-12.311596870422363],["▁danych",-12.311644554138184],["▁Doc",-12.31166648864746],["บ้าง",-12.311668395996094],["▁любим",-12.311695098876951],["▁پار",-12.31171417236328],["宣传",-12.311738014221191],["▁manu",-12.311789512634276],["▁психолог",-12.311826705932615],["лэл",-12.311849594116213],["▁velike",-12.311857223510742],["ච",-12.311941146850586],["▁Гар",-12.311960220336914],["▁φορά",-12.311972618103027],["עט",-12.312068939208984],["▁الآ",-12.312088966369627],["xt",-12.312089920043944],["▁Več",-12.312094688415527],["▁에",-12.312128067016602],["OC",-12.31214714050293],["▁شاه",-12.312151908874512],["ศึกษา",-12.312186241149902],["▁ശ്രീ",-12.312265396118164],["▁diversas",-12.31234073638916],["▁ಮೊದಲ",-12.312348365783691],["▁medidas",-12.312410354614258],["▁걸",-12.31242847442627],["▁Mh",-12.3124361038208],["▁संख्या",-12.312458038330078],["▁edil",-12.312487602233888],["▁коле",-12.312494277954102],["स्ता",-12.31252670288086],["руж",-12.312588691711426],["▁şe",-12.312602043151855],["▁ಪಾ",-12.312625885009766],["竞争",-12.31264305114746],["市民",-12.312664031982422],["▁baj",-12.31273365020752],["▁offentlig",-12.312756538391112],["举",-12.31277084350586],["ป่า",-12.312833786010742],["ref",-12.312837600708008],["tral",-12.312878608703612],["meter",-12.312906265258787],["状況",-12.31293487548828],["▁eigentlich",-12.312958717346191],["▁yako",-12.313036918640137],["▁അവര്",-12.31309986114502],["%,",-12.313109397888184],["▁sollten",-12.313218116760254],["▁locale",-12.313278198242188],["alne",-12.31329345703125],["юцца",-12.313392639160156],["▁Há",-12.31343936920166],["充",-12.313461303710938],["нды",-12.313488960266112],["pack",-12.313490867614746],["âr",-12.313536643981934],["▁Bakıda",-12.313549041748049],["▁totiž",-12.313572883605955],["οδο",-12.31357479095459],["▁wote",-12.31364631652832],["▁zda",-12.313700675964355],["▁නැ",-12.313714981079102],["批",-12.313715934753418],["פה",-12.313732147216797],["ilir",-12.313751220703123],["▁protagonista",-12.313894271850586],["▁offen",-12.313934326171877],["▁luce",-12.313957214355469],["வது",-12.313961029052734],["▁Add",-12.313983917236328],["▁arab",-12.313987731933594],["勤",-12.314009666442873],["自動",-12.314024925231934],["▁ଚ",-12.314096450805664],["▁grafik",-12.314152717590332],["▁visus",-12.314205169677734],["▁تین",-12.314266204833984],["▁★",-12.314289093017578],["▁peab",-12.31429672241211],["▁disciplina",-12.31429958343506],["▁Imp",-12.314305305480955],["RAS",-12.314306259155272],["▁успех",-12.314321517944336],["▁felt",-12.314339637756348],["▁bidang",-12.31442928314209],["▁tindakan",-12.314437866210938],["▁Ло",-12.31445026397705],["▁биз",-12.314508438110352],["▁ಈಗ",-12.314587593078612],["仍然",-12.314602851867676],["▁பதிவு",-12.31462287902832],["ayaa",-12.314638137817385],["▁suara",-12.314638137817385],["▁새로운",-12.314655303955078],["▁waxaan",-12.31467342376709],["했습니다",-12.314689636230469],["ـــ",-12.314762115478516],["购买",-12.31481647491455],["ох",-12.314908981323242],["▁egne",-12.314958572387695],["▁Кри",-12.314970016479492],["ада",-12.314977645874023],["▁Adi",-12.31504249572754],["保证",-12.315103530883787],["▁방법",-12.31517505645752],["▁лес",-12.31520938873291],["▁ακόμη",-12.315220832824709],["ဖို႔",-12.315225601196287],["▁possibilità",-12.315225601196287],["▁namin",-12.315239906311035],["▁Unha",-12.3153076171875],["ayotgan",-12.315327644348145],["யான",-12.31539249420166],["rr",-12.315420150756836],["▁hic",-12.31544589996338],["▁tô",-12.315446853637695],["▁grupos",-12.315475463867188],["струк",-12.315489768981934],["▁evit",-12.31558895111084],["▁olmayan",-12.315637588500977],["اض",-12.315643310546877],["▁Oli",-12.315646171569824],["▁Хар",-12.315654754638672],["ניק",-12.315666198730469],["▁1972",-12.315668106079102],["əcək",-12.315738677978516],["▁دفتر",-12.315791130065918],["ាំង",-12.315814018249512],["▁СЕ",-12.315838813781738],["▁اہم",-12.315845489501951],["thy",-12.3158597946167],["▁perder",-12.315876960754396],["是不是",-12.315947532653809],["本次",-12.31597900390625],["erii",-12.316017150878906],["ിലേക്ക്",-12.316032409667969],["nku",-12.316043853759766],["ਗੇ",-12.31605339050293],["▁들",-12.31612777709961],["▁115",-12.316180229187012],["nky",-12.316187858581545],["▁داعش",-12.316197395324709],["▁vē",-12.316250801086426],["▁relevant",-12.316253662109377],["มีการ",-12.316272735595703],["▁ಸಹ",-12.316316604614258],["▁kertaa",-12.316325187683104],["▁ciò",-12.31637477874756],["خان",-12.316375732421877],["▁ON",-12.316391944885254],["▁شئ",-12.316393852233888],["qar",-12.316399574279783],["▁inlägg",-12.316455841064451],["▁ఐ",-12.31655216217041],["▁Tə",-12.316553115844728],["cili",-12.316564559936523],["▁gum",-12.31656551361084],["▁كو",-12.316570281982422],["▁aumento",-12.316597938537598],["▁bekannt",-12.316824913024902],["▁internationale",-12.316827774047852],["▁держав",-12.316845893859863],["ojo",-12.316853523254396],["tamente",-12.316882133483888],["標準",-12.316882133483888],["5000",-12.316883087158203],["▁сектор",-12.316888809204102],["เซ",-12.316916465759276],["▁bek",-12.316934585571287],["▁Holland",-12.316960334777832],["▁එකේ",-12.316972732543944],["▁Tačiau",-12.31699562072754],["▁באמצעות",-12.317020416259766],["ాల్లో",-12.317042350769045],["▁бус",-12.317082405090332],[":29",-12.317087173461914],["▁buhay",-12.317095756530762],["▁quân",-12.317142486572266],["כנס",-12.317150115966797],["▁MAR",-12.31723403930664],["▁effektiv",-12.317261695861816],["спе",-12.317305564880373],["шел",-12.317306518554688],["▁aktor",-12.317326545715332],["▁ունի",-12.31733512878418],["раб",-12.31734848022461],["▁bali",-12.317400932312012],["▁أفضل",-12.317407608032228],["▁Vä",-12.317452430725098],["▁ئۈچۈن",-12.317499160766602],["▁انواع",-12.317499160766602],["▁souvent",-12.317527770996094],["▁חיים",-12.317601203918455],["▁หาก",-12.317619323730469],["▁iom",-12.317632675170898],["▁තමා",-12.317641258239746],["▁bagus",-12.317646980285645],["▁puna",-12.317662239074709],["▁អា",-12.317667961120604],["▁olabilir",-12.31768798828125],["属",-12.317692756652832],["▁इन्",-12.317700386047363],["cą",-12.317873001098633],["edig",-12.31787395477295],["▁Blu",-12.317912101745604],["▁loob",-12.317923545837402],["▁Studi",-12.31796169281006],["▁göstər",-12.318072319030762],["▁cover",-12.318074226379396],["דו",-12.318098068237305],["▁fuit",-12.318150520324709],["ײַ",-12.318159103393556],["וש",-12.318193435668944],["bada",-12.318243980407717],["▁सहित",-12.318292617797852],["▁potencial",-12.31836986541748],["▁उक्त",-12.31838321685791],["▁ધ",-12.31838321685791],["好像",-12.318391799926758],["иран",-12.318410873413086],["满足",-12.31844997406006],["▁Kala",-12.318517684936523],["iat",-12.318540573120115],["த்திற்கு",-12.318620681762695],["inten",-12.318631172180176],["▁उन्हें",-12.318641662597656],["▁ενός",-12.318695068359377],["対応",-12.318811416625977],["▁αγ",-12.318832397460938],["olta",-12.318848609924316],["▁vreme",-12.318915367126465],["▁muna",-12.319012641906738],["oca",-12.319040298461914],["▁قا",-12.31906509399414],["ədə",-12.319073677062988],["тив",-12.319092750549316],["▁mezin",-12.319093704223633],["asca",-12.319121360778809],["၆",-12.319122314453123],["▁høy",-12.31914520263672],["▁פשוט",-12.319157600402832],["tə",-12.31917953491211],["製造",-12.31923007965088],["下去",-12.319232940673828],["▁mieste",-12.31924819946289],["▁गरि",-12.31935214996338],["SEN",-12.319353103637695],["▁chữa",-12.319368362426758],["▁చె",-12.319419860839844],["แค",-12.31950569152832],["▁Շ",-12.319538116455078],["rog",-12.319539070129396],["▁فرق",-12.319564819335938],["▁kautta",-12.319578170776367],["▁ວັນ",-12.31959629058838],["▁tada",-12.319664001464844],["▁Jones",-12.31966495513916],["▁interesante",-12.31972312927246],["ьные",-12.319738388061523],["ፃ",-12.31977653503418],["▁ରାଜ୍ୟ",-12.319787979125977],["▁quid",-12.31983470916748],["▁Aktiv",-12.319847106933594],["▁මෙය",-12.319869041442873],["▁नस",-12.319879531860352],["neho",-12.319880485534668],["лығы",-12.319890975952148],["tni",-12.319929122924805],["▁ڏي",-12.31997013092041],["ตอน",-12.320032119750977],["▁prakti",-12.320043563842772],["nse",-12.320058822631836],["▁motiva",-12.320059776306152],["unter",-12.320070266723633],["▁ඒත්",-12.320082664489746],["▁Ком",-12.32009506225586],["▁ظاهر",-12.320141792297363],["хай",-12.320159912109377],["▁vene",-12.320171356201172],["▁결과",-12.320186614990234],["考虑",-12.3201904296875],["tör",-12.32022190093994],["▁લે",-12.320273399353027],["▁മി",-12.32028102874756],["มีความ",-12.32028579711914],["▁peti",-12.320311546325684],["▁summa",-12.32034969329834],["ದಾ",-12.320356369018556],["▁bezala",-12.320377349853516],["▁cà",-12.320420265197754],["ינה",-12.320451736450195],["▁Moment",-12.320516586303713],["mmin",-12.320536613464355],["▁алуу",-12.32054615020752],["▁rosa",-12.320606231689451],["▁میل",-12.320631980895996],["dzenia",-12.320645332336426],["▁bugün",-12.320646286010742],["zak",-12.32067584991455],["▁شن",-12.32068157196045],["Pi",-12.320690155029297],["▁đô",-12.32072639465332],["TEL",-12.320740699768066],["არა",-12.320759773254396],["▁takich",-12.320793151855469],["▁Maxamed",-12.320923805236816],["證",-12.320944786071776],["▁ಹೆ",-12.32097339630127],["▁организация",-12.320990562438965],["페이지",-12.321027755737305],["▁откри",-12.321052551269531],["ებთან",-12.32106590270996],["▁ෂ",-12.321130752563477],["▁agenda",-12.321142196655272],["fet",-12.321166038513184],["▁musta",-12.321200370788574],["▁ಐ",-12.321258544921877],["▁(«",-12.321266174316406],["▁Kü",-12.32126808166504],["ized",-12.321290969848633],["▁رمضان",-12.321308135986328],["γρά",-12.321329116821287],["قص",-12.321333885192873],["居民",-12.321340560913086],["»,-",-12.32135009765625],["kohta",-12.32135772705078],["▁צריך",-12.321446418762209],["▁있으며",-12.32150173187256],["уд",-12.321510314941406],["▁preso",-12.32156467437744],["▁Todos",-12.32157039642334],["▁Katika",-12.321573257446287],["▁ایسے",-12.321605682373049],["umba",-12.321617126464844],["▁billeder",-12.32162094116211],["▁поглед",-12.321627616882324],["ț",-12.321646690368652],["▁donar",-12.321681022644045],["▁рат",-12.321703910827637],["▁లే",-12.321706771850586],["▁আমার",-12.321722030639648],["▁ĝis",-12.32172679901123],["▁treg",-12.321736335754396],["良好的",-12.321744918823242],["સ્",-12.32175350189209],["▁පිට",-12.321859359741213],["▁culture",-12.321913719177246],["▁fas",-12.32191562652588],["unni",-12.321934700012209],["ຣ",-12.321939468383787],["▁station",-12.322012901306152],["▁frågor",-12.322016716003418],["גה",-12.322021484375],["▁bangsa",-12.322040557861328],["▁ಅಂತ",-12.322064399719238],["▁ועוד",-12.322066307067873],["▁μέσω",-12.32207202911377],["▁दुनिया",-12.32207202911377],["▁sahibi",-12.322077751159668],["▁එහි",-12.3220796585083],["న్నా",-12.322103500366213],["▁livello",-12.32213306427002],["گن",-12.322175979614258],["▁ikka",-12.32217788696289],["ических",-12.322239875793455],["▁nere",-12.322239875793455],["▁waarop",-12.322285652160645],["▁гал",-12.322365760803224],["▁provoca",-12.322367668151855],["▁అంత",-12.32243537902832],["ţ",-12.322443008422852],["лося",-12.32255744934082],["▁някои",-12.322641372680664],["ታቸው",-12.32273006439209],["▁झाला",-12.322768211364746],["▁dimana",-12.322776794433594],["▁රම",-12.32278823852539],["▁edilmiş",-12.322797775268556],["▁EX",-12.322800636291504],["▁участь",-12.322802543640137],["▁люб",-12.3228120803833],["▁бүх",-12.322821617126465],["riga",-12.322840690612791],["ฉัน",-12.322847366333008],["হা",-12.322890281677246],["lərlə",-12.322908401489258],["▁דר",-12.322911262512209],["rätt",-12.32291316986084],["خواه",-12.322924613952637],["▁हात",-12.322936058044434],["segu",-12.322978019714355],["▁сва",-12.322991371154783],["▁суда",-12.323070526123049],["▁Հայաստանում",-12.32310390472412],["veld",-12.323163986206056],["▁Galega",-12.323177337646484],["▁laika",-12.323185920715332],["သည့်",-12.323213577270508],["▁ద్వారా",-12.323214530944824],["▁bambini",-12.32321548461914],["▁kesehatan",-12.32321548461914],["▁કહ્યું",-12.32321548461914],["▁కొన్ని",-12.32321834564209],["▁күні",-12.32329559326172],["▁пада",-12.32334327697754],["უს",-12.323360443115234],["્ર",-12.323421478271484],["▁انتشار",-12.32345962524414],["▁nors",-12.323497772216797],["大部分",-12.323532104492188],["▁specific",-12.323564529418944],["▁mean",-12.323569297790527],["▁ци",-12.32357120513916],["▁metros",-12.323583602905272],["કી",-12.323616027832031],["പ്പെട",-12.323634147644045],["arii",-12.323638916015623],["▁Pedro",-12.323687553405762],["▁odpad",-12.323724746704102],["ಡು",-12.323729515075684],["μένα",-12.323762893676758],["ief",-12.32383918762207],["▁jotain",-12.323864936828612],["дом",-12.323884963989258],["▁condi",-12.32390022277832],["ță",-12.323915481567385],["▁sizin",-12.323955535888672],["▁maro",-12.323981285095217],["感情",-12.32400894165039],["▁развива",-12.324021339416504],["▁ઇ",-12.324031829833984],["▁implement",-12.324069023132324],["慢",-12.324085235595703],["▁interpret",-12.32408618927002],["рна",-12.324094772338867],["દા",-12.32410717010498],["▁حر",-12.324151992797852],[".4.",-12.324193000793455],["▁stel",-12.324341773986816],["▁таны",-12.324360847473145],["▁đọc",-12.324366569519045],["▁Với",-12.324457168579102],["▁කතාව",-12.324457168579102],["▁prom",-12.32446002960205],["▁Сите",-12.32448959350586],["▁libros",-12.324496269226074],["Sam",-12.324499130249023],["wedd",-12.3245210647583],["▁എന്നു",-12.32452392578125],["aĵoj",-12.3245267868042],["▁gesê",-12.324559211730955],["▁posebno",-12.324559211730955],["梦",-12.324570655822754],["▁Bahasa",-12.3245849609375],["īgas",-12.324591636657717],["▁којима",-12.32464599609375],["رہ",-12.324666023254396],["slav",-12.324708938598633],["ımı",-12.32475471496582],["ič",-12.324847221374512],["▁tela",-12.324898719787598],["▁දෙක",-12.324915885925291],["▁sociales",-12.324943542480469],["stab",-12.324952125549316],["のお",-12.32498264312744],["▁дев",-12.325024604797363],["▁установлен",-12.325032234191896],["سه",-12.325067520141602],["它的",-12.325105667114258],["ыңыз",-12.325165748596191],["pea",-12.325177192687988],["事业",-12.3251953125],["đi",-12.325207710266112],["hom",-12.325218200683594],["▁चर्चा",-12.32523250579834],["rót",-12.325237274169922],["уль",-12.32530403137207],["cció",-12.325387001037598],["▁girls",-12.325395584106444],["▁termina",-12.325462341308594],["▁etwa",-12.325468063354492],["▁Ман",-12.325469970703123],["▁Entwicklung",-12.325510025024414],["▁କରିଛନ୍ତି",-12.325528144836426],["вых",-12.325570106506348],["▁Koh",-12.325580596923828],["▁perus",-12.32559299468994],["▁ಹೆಚ್ಚು",-12.32559585571289],["▁кг",-12.32570743560791],["ограф",-12.32576847076416],["hos",-12.325858116149902],["▁लग",-12.325883865356444],["៥",-12.325925827026367],["ான்",-12.325932502746582],["neš",-12.325945854187012],["Kon",-12.32597827911377],["▁fais",-12.326005935668944],["▁thất",-12.32608413696289],["จุด",-12.326086044311523],["▁სადაც",-12.326107025146484],["ао",-12.326165199279783],["▁영화",-12.326233863830566],["做出",-12.326268196105955],["avais",-12.326324462890623],["home",-12.326353073120115],["があった",-12.326388359069824],["▁رهي",-12.326396942138672],["▁vált",-12.326421737670898],["▁thuê",-12.326566696166992],["4)",-12.326568603515623],["▁пита",-12.326582908630373],["ຄົນ",-12.326598167419434],["moqda",-12.3266019821167],["到底",-12.3266019821167],["HR",-12.32661247253418],["ğu",-12.326624870300291],["▁rộng",-12.326659202575684],["▁નામ",-12.32668113708496],["▁שא",-12.326684951782228],["▁भन्दै",-12.32669448852539],["▁true",-12.326702117919922],["▁image",-12.326708793640137],["▁Tage",-12.32671356201172],["▁مادر",-12.326719284057615],["▁ביי",-12.326722145080566],["▁Шу",-12.326757431030272],["▁educa",-12.326855659484863],["▁бий",-12.326882362365724],["▁ფ",-12.32688331604004],["日前",-12.326976776123049],["ология",-12.32701015472412],["▁plán",-12.327095985412598],["საც",-12.32713794708252],["▁SS",-12.327177047729492],["▁droit",-12.32717990875244],["wane",-12.327276229858398],["▁sebelumnya",-12.327290534973145],["▁însă",-12.327295303344728],["▁priekš",-12.32732105255127],["▁통",-12.327374458312988],["▁තියෙනවා",-12.327417373657228],["▁பற்றி",-12.327442169189451],["ighed",-12.327448844909668],["czek",-12.3274507522583],["АС",-12.327451705932615],["▁बातम्या",-12.327470779418944],["▁kaasa",-12.32748317718506],["тни",-12.327528953552246],["▁بچ",-12.327564239501951],["ዎ",-12.3275728225708],["Is",-12.3275785446167],["▁სახელმწიფო",-12.327661514282228],["▁ٽ",-12.327679634094238],["▁검색",-12.32769775390625],["جز",-12.327702522277832],["▁інші",-12.327710151672363],["kseen",-12.327733039855955],["easca",-12.327740669250488],["掌握",-12.327752113342283],["▁წარმო",-12.32780933380127],["▁đạt",-12.327815055847168],["▁tillbaka",-12.327862739562988],["▁kh",-12.327966690063477],["▁города",-12.328011512756348],["我們的",-12.32805347442627],["της",-12.328075408935549],["тып",-12.328205108642578],["▁ಚ",-12.328246116638184],["▁Mind",-12.328259468078612],["▁sicer",-12.328269004821776],["xar",-12.328285217285156],["tora",-12.32829761505127],["▁26,",-12.328313827514648],["xta",-12.328320503234863],["łow",-12.328405380249023],["▁måde",-12.328410148620604],["▁ସଂ",-12.328421592712402],["▁എന്നാല്",-12.328438758850098],["зя",-12.328479766845703],["сіз",-12.32850456237793],["▁geç",-12.328511238098145],["▁Jur",-12.328530311584473],["▁Dansk",-12.328535079956056],["טור",-12.3285493850708],["▁мод",-12.3285493850708],["اص",-12.328551292419434],["leb",-12.32855224609375],["清楚",-12.32855987548828],["াল",-12.328706741333008],["▁Cookies",-12.328736305236816],["Men",-12.328807830810549],["▁normale",-12.328812599182127],["itta",-12.328817367553713],["▁Rosa",-12.328855514526367],["▁Boy",-12.328866958618164],["ჰ",-12.32890510559082],["ALE",-12.328948974609377],["逃",-12.328954696655272],["▁փոխ",-12.329042434692385],["いました",-12.329069137573242],["lę",-12.329092979431152],["рийн",-12.329092979431152],["ปรสิต",-12.329181671142578],["óir",-12.329193115234377],["หมด",-12.329195022583008],["▁valami",-12.329208374023438],["ኪ",-12.329216957092283],["▁ورود",-12.329248428344728],["เกม",-12.329249382019045],["双方",-12.329261779785156],["▁карт",-12.329313278198242],["שט",-12.32936668395996],["▁Trend",-12.32947826385498],["▁lini",-12.329516410827637],["▁لاہور",-12.32953929901123],["▁dijo",-12.329578399658203],["▁المع",-12.329585075378418],["▁있을",-12.32961654663086],["▁gym",-12.329619407653809],["されています",-12.329622268676758],["лған",-12.329633712768556],["pik",-12.329639434814451],["▁ajuns",-12.329648971557615],["vim",-12.329656600952148],["ště",-12.329687118530272],["yna",-12.329691886901855],["されて",-12.32973861694336],["▁الخاصة",-12.329741477966309],["jit",-12.329744338989258],["▁គេ",-12.329751014709473],["▁Maz",-12.329771041870115],["▁século",-12.329779624938965],["ány",-12.329822540283203],["▁sports",-12.32986545562744],["▁जोड",-12.329904556274414],["▁రి",-12.329919815063477],["▁ബാ",-12.329925537109377],["▁court",-12.329936981201172],["▁stru",-12.329959869384766],["tige",-12.32997703552246],["▁ژوند",-12.330123901367188],["▁مسجد",-12.330127716064451],["▁Rela",-12.330182075500488],["▁posi",-12.33026885986328],["ктор",-12.330344200134276],["▁College",-12.330350875854492],["做到",-12.33043384552002],["▁blo",-12.330439567565918],["ۇپ",-12.33045768737793],["▁każdy",-12.33048152923584],["▁rekening",-12.330509185791016],["蒙",-12.33062744140625],["▁oil",-12.330638885498049],["▁thënë",-12.33070182800293],["▁بلکہ",-12.330714225769045],["▁giống",-12.330721855163574],["登録",-12.330825805664062],["▁lebo",-12.33084487915039],["身體",-12.330852508544922],["انے",-12.330903053283691],["kio",-12.330938339233398],["ที่ดี",-12.33094882965088],["▁తెలంగాణ",-12.330955505371094],["ਕੇ",-12.330975532531738],["▁benefici",-12.331015586853027],["doc",-12.331031799316406],["コン",-12.331031799316406],["ciąg",-12.331036567687988],["陽",-12.33104419708252],["крат",-12.331106185913086],["▁narra",-12.33110809326172],["▁yardım",-12.331138610839844],["▁beslut",-12.331158638000488],["▁sain",-12.331167221069336],["၉",-12.331199645996094],["▁faha",-12.331232070922852],["▁respecto",-12.33126449584961],["▁которого",-12.331295013427734],["անում",-12.33132266998291],["▁lyst",-12.331329345703123],["▁muncul",-12.331329345703123],["ហើយ",-12.331340789794922],["▁Lie",-12.331364631652832],["csi",-12.331398010253906],["▁дерев",-12.331401824951172],["▁1973",-12.331405639648438],["▁биш",-12.331411361694336],["TK",-12.331427574157717],["уге",-12.331463813781738],["vány",-12.331572532653809],["兴",-12.331626892089844],["▁został",-12.33163070678711],["แก่",-12.331636428833008],["▁sue",-12.331657409667969],["କାର",-12.331686973571776],["▁mê",-12.331692695617676],["▁город",-12.331734657287598],["搞",-12.331744194030762],["▁તેના",-12.331830978393556],["▁jäi",-12.331832885742188],["▁మరో",-12.331870079040527],["▁slav",-12.331905364990234],["▁käi",-12.331992149353027],["телей",-12.332100868225098],["▁mission",-12.332148551940918],["▁testi",-12.332185745239258],["iae",-12.332198143005373],["▁Ost",-12.33224105834961],["▁lisä",-12.332255363464355],["ლების",-12.332258224487305],["▁iddia",-12.33229160308838],["护",-12.332319259643556],["▁airson",-12.332385063171388],["▁kraft",-12.332444190979004],["▁kasa",-12.332469940185549],["▁dobra",-12.332518577575684],["tien",-12.332549095153809],["▁leið",-12.332559585571287],["אָר",-12.332579612731934],["▁spr",-12.332663536071776],["▁ኮ",-12.332730293273926],["▁মু",-12.332745552062988],["▁로",-12.332791328430176],["אָ",-12.332805633544922],["▁свету",-12.332859992980955],["قف",-12.332871437072754],["▁Finland",-12.33287239074707],["बल",-12.332914352416992],["▁найти",-12.332929611206056],["▁sisse",-12.332930564880373],["▁людини",-12.332941055297852],["宁",-12.332951545715332],["▁maanta",-12.332962989807127],["▁Kategori",-12.333012580871582],["ことで",-12.333033561706545],["▁Ի",-12.333072662353516],["ুন",-12.333086967468262],["ozás",-12.33316421508789],["clu",-12.333215713500977],["很好",-12.333298683166504],["▁része",-12.333329200744627],["עי",-12.333348274230955],["▁Share",-12.333354949951172],["іння",-12.33337116241455],["ЫН",-12.3333740234375],["▁Сол",-12.333409309387209],["▁دنيا",-12.333481788635254],["waar",-12.333558082580566],["▁මාධ්",-12.333587646484377],["▁докато",-12.333588600158691],["▁phủ",-12.333593368530272],["mę",-12.33360767364502],["▁줄",-12.333612442016602],["▁quidem",-12.333687782287598],["▁глас",-12.333696365356444],["▁kooli",-12.33371639251709],["лаш",-12.333744049072266],["▁View",-12.33379364013672],["▁solution",-12.333809852600098],["▁Ey",-12.33381175994873],["uca",-12.33388328552246],["▁faptul",-12.333898544311523],["▁prim",-12.333959579467772],["がない",-12.334017753601074],["μή",-12.334020614624023],["trop",-12.334025382995604],["abh",-12.334028244018556],["бла",-12.334111213684082],["▁Tad",-12.334115028381348],["▁зор",-12.334121704101562],["혜",-12.334165573120115],["▁konferenci",-12.334174156188965],["▁μεταξύ",-12.33417797088623],["▁declarat",-12.33418083190918],["file",-12.33420181274414],["▁address",-12.334216117858888],["мун",-12.334217071533203],["▁परीक्षा",-12.334236145019531],["lasi",-12.33423900604248],["oth",-12.334267616271973],["▁فتح",-12.334272384643556],["▁hedef",-12.33430290222168],["▁bajo",-12.334315299987791],["▁ፍ",-12.334358215332031],["▁buon",-12.334360122680664],["▁Inform",-12.33439826965332],["採用",-12.334406852722168],["▁28,",-12.334449768066406],["IF",-12.334515571594238],["овски",-12.334521293640137],["rage",-12.334561347961426],["][",-12.33457851409912],["kogu",-12.334586143493652],["ರಲ್ಲಿ",-12.33459186553955],["▁kin",-12.334592819213867],["కుండా",-12.334640502929688],["ของเรา",-12.334664344787598],["▁ความ",-12.334710121154783],["▁пыта",-12.334720611572266],["▁Heydər",-12.334745407104492],["▁elektrik",-12.33481216430664],["▁laget",-12.334888458251951],["適合",-12.334909439086914],["▁gikk",-12.334939002990724],["▁PRI",-12.334941864013672],["▁baka",-12.334994316101074],["مین",-12.335025787353516],["▁Sek",-12.335047721862791],["राम",-12.335168838500977],["▁koke",-12.335169792175291],["nali",-12.33518409729004],["چه",-12.335189819335938],["hap",-12.335206031799316],["▁muž",-12.335244178771973],["▁yaxshi",-12.335325241088867],["▁როცა",-12.335330963134766],["готов",-12.335345268249512],["▁debate",-12.335371971130373],["▁trois",-12.335418701171877],["မည်",-12.335436820983888],["▁finnes",-12.335472106933594],["ทา",-12.335503578186035],["lino",-12.3355712890625],["▁حرف",-12.33558464050293],["یف",-12.335606575012209],["▁футбол",-12.335684776306152],["▁medical",-12.335694313049316],["▁види",-12.335713386535645],["▁siinä",-12.335759162902832],["IB",-12.335777282714844],["▁vivi",-12.335790634155272],["▁rappresenta",-12.335847854614258],["資源",-12.335867881774902],["▁জ",-12.335871696472168],["처럼",-12.335895538330078],["▁எனக்கு",-12.33590030670166],["▁produkty",-12.335952758789062],["οκ",-12.335956573486328],["ද්ද",-12.33602523803711],["ndri",-12.336082458496094],["sive",-12.336087226867676],["ВА",-12.33611011505127],["โรงแรม",-12.336124420166016],["▁einnig",-12.336132049560549],["▁rap",-12.336187362670898],["ená",-12.336188316345217],["imme",-12.336189270019531],["▁seni",-12.336244583129885],["▁qualidade",-12.336277961730955],["skole",-12.33628749847412],["▁университет",-12.336288452148438],["▁Acc",-12.336302757263184],["▁bör",-12.33632755279541],["▁katta",-12.336343765258787],[":32",-12.336344718933104],["▁pagamento",-12.336474418640137],["▁mieli",-12.33655071258545],["▁olivat",-12.336555480957031],["销",-12.3366060256958],["hall",-12.33663558959961],["▁Linux",-12.33666706085205],["▁acolo",-12.336671829223633],["▁ჰ",-12.33669090270996],["szą",-12.33672046661377],["ויות",-12.336812019348145],["ेश",-12.336816787719728],["دين",-12.336833953857422],["▁ağır",-12.33687686920166],["이라는",-12.336880683898926],["ფი",-12.336901664733888],["▁isti",-12.336915969848633],["ilmiş",-12.33694839477539],["తం",-12.336970329284668],["यर",-12.3369722366333],["▁ettei",-12.336977005004885],["▁طبق",-12.336994171142578],["▁Sommer",-12.337006568908691],["意味",-12.337010383605955],["▁жара",-12.337026596069336],["▁Fla",-12.337061882019045],["▁طالب",-12.33707046508789],["▁بودند",-12.337113380432127],["▁tunne",-12.337165832519531],["▁rhan",-12.33717918395996],["▁Jūsų",-12.33720588684082],["стве",-12.33722972869873],["▁okoli",-12.33724880218506],["▁egentlig",-12.337263107299805],["▁دهند",-12.337278366088867],["bá",-12.337292671203612],["ნათ",-12.33730125427246],["lita",-12.337305068969728],["▁dhan",-12.33733367919922],["▁ਮੁ",-12.337390899658203],["▁cewa",-12.337403297424316],["▁rend",-12.337416648864746],["▁šest",-12.337579727172852],["▁datingside",-12.33761215209961],["LAN",-12.337623596191406],["▁எப்படி",-12.337651252746582],["▁ରହିଛି",-12.337654113769531],["ivas",-12.337657928466797],["▁माझ्या",-12.337658882141112],["往往",-12.337739944458008],["▁ПРО",-12.337780952453612],["▁කරලා",-12.33780288696289],["▁step",-12.33782958984375],["▁papir",-12.337883949279783],["▁pessoa",-12.338032722473145],["▁ਮੈਂ",-12.338074684143066],["府",-12.338134765625],["obi",-12.338156700134276],["艾",-12.338191032409668],["락",-12.338210105895996],["ੱਕ",-12.33821964263916],["▁lep",-12.33821964263916],["ੌ",-12.338229179382324],["▁Rhannu",-12.338231086730955],["▁attend",-12.33824062347412],["▁इतर",-12.338251113891602],["▁mate",-12.33827018737793],["▁사업",-12.338274002075195],["▁ambaye",-12.338335990905762],["▁GR",-12.338348388671877],["▁ampak",-12.338398933410645],["美元",-12.33842945098877],["합",-12.338449478149414],["▁batera",-12.338479042053224],["▁aikaa",-12.33851718902588],["▁pirm",-12.33852481842041],["аються",-12.338541030883787],["混",-12.338579177856444],["ег",-12.338589668273926],["Վ",-12.338603973388672],["▁দিন",-12.338690757751465],["▁sabab",-12.338716506958008],["ชัย",-12.33872127532959],["▁starp",-12.338730812072754],["ીઓ",-12.338739395141602],["tras",-12.33875846862793],["លា",-12.338808059692385],["сал",-12.338824272155762],["▁estan",-12.338837623596191],["▁sipas",-12.338892936706545],["▁днес",-12.33889389038086],["ขนาด",-12.338913917541504],["▁têm",-12.3389892578125],["▁Histori",-12.338991165161133],["▁arbeids",-12.339009284973145],["▁nyelv",-12.339014053344728],["ىدا",-12.339038848876951],["▁организира",-12.339101791381836],["быз",-12.339143753051758],["▁голов",-12.339155197143556],["ალური",-12.339180946350098],["▁spiller",-12.339320182800291],["kru",-12.339337348937988],["▁Ibrahim",-12.339394569396973],["▁Gai",-12.339404106140137],["▁উপ",-12.339406967163086],["來說",-12.339422225952148],["▁θέση",-12.339435577392578],["দা",-12.339462280273438],["ström",-12.339475631713867],["▁ତା",-12.339522361755373],["्वा",-12.339608192443848],["▁korun",-12.339637756347656],["ovaná",-12.339693069458008],["▁phù",-12.33970546722412],["▁baten",-12.3397216796875],["FE",-12.33978271484375],["tip",-12.339805603027344],["隨",-12.339838027954102],["就在",-12.339842796325684],["▁fix",-12.339862823486328],["▁καλ",-12.339877128601074],["▁παιδιά",-12.339977264404297],["rész",-12.33997917175293],["▁polisi",-12.33999252319336],["▁ĉar",-12.340062141418455],["▁ғана",-12.340106010437012],["▁ទ",-12.340136528015137],["zda",-12.340150833129885],["▁лек",-12.3402099609375],["шке",-12.340213775634766],["קים",-12.340229034423828],["▁الدولة",-12.340269088745115],["▁bora",-12.34038257598877],["พร",-12.34039306640625],["▁дав",-12.340500831604004],["▁blod",-12.340539932250977],["▁كبير",-12.34056568145752],["▁дейін",-12.340585708618164],["▁inci",-12.340593338012695],["▁محمود",-12.340595245361328],["▁ఉండ",-12.340655326843262],["熟",-12.340709686279297],["кли",-12.340713500976562],["▁eg",-12.34073543548584],["▁شی",-12.340778350830078],["▁بإ",-12.340785026550291],["▁పె",-12.340886116027832],["▁neces",-12.340889930725098],["▁nostres",-12.340899467468262],["mağa",-12.34092140197754],["▁گے۔",-12.34093189239502],["▁थप",-12.34094524383545],["ukka",-12.340991020202637],["ճ",-12.341002464294434],["▁deriva",-12.341029167175291],["llet",-12.341109275817873],["▁Zanzibar",-12.341143608093262],["即可",-12.341150283813477],["▁बड़ा",-12.341161727905272],["▁мета",-12.34119701385498],["▁იგი",-12.341221809387209],["不斷",-12.341261863708496],["ील",-12.341278076171877],["▁mówi",-12.341313362121582],["▁menurut",-12.341317176818848],["▁вар",-12.341333389282228],["lū",-12.341357231140137],["關係",-12.341381072998049],["維",-12.341448783874512],["Λ",-12.34147834777832],["ვენ",-12.341507911682127],["▁Acest",-12.341534614562988],["ოლ",-12.34154224395752],["ገል",-12.34160041809082],["▁Fol",-12.341641426086426],["▁kete",-12.341681480407717],["уда",-12.341702461242676],["nais",-12.3417387008667],["▁adlı",-12.34179973602295],["kana",-12.341888427734377],["▁делу",-12.341896057128906],["gata",-12.341901779174805],["фу",-12.341906547546388],["▁класс",-12.341912269592283],["ക്കുന്ന",-12.341914176940918],["▁اهي",-12.341999053955078],["▁Brazil",-12.342140197753906],["center",-12.342191696166992],["▁iza",-12.34221076965332],["▁coisa",-12.342212677001951],["”)",-12.342246055603027],["库",-12.342256546020508],["acağı",-12.342273712158203],["ERE",-12.342308044433594],["▁Nei",-12.342413902282717],["▁save",-12.342424392700195],["▁حمل",-12.342445373535156],["▁quoi",-12.34253978729248],["eran",-12.342558860778809],["вари",-12.34257698059082],["▁šis",-12.342595100402832],["ებელი",-12.342596054077148],["▁atat",-12.342605590820312],["ASA",-12.342621803283691],["▁քաղաքական",-12.34262752532959],["bera",-12.34263801574707],["▁קר",-12.342639923095703],["FF",-12.342686653137209],["▁नीति",-12.342687606811523],["စီး",-12.342856407165527],["▁آقای",-12.342859268188477],["庫",-12.34286403656006],["▁mjög",-12.342896461486816],["▁Amerik",-12.342913627624512],["▁داسې",-12.34291934967041],["▁nüüd",-12.34292984008789],["iniz",-12.342939376831056],["▁ಡಾ",-12.34304428100586],["RL",-12.34306812286377],["▁muitas",-12.343070030212402],["▁Urdu",-12.343071937561035],["▁नई",-12.343113899230955],["nant",-12.343152046203612],["▁ملڪ",-12.343246459960938],["faq",-12.34328842163086],["▁flash",-12.343321800231934],["▁rendez",-12.34333610534668],["▁उपचार",-12.343369483947754],["▁Inte",-12.343378067016602],["▁automobil",-12.343403816223145],["▁ikut",-12.34345245361328],["▁මත",-12.343457221984863],["ญี่ปุ่น",-12.343473434448242],["▁oziroma",-12.343481063842772],["айте",-12.343501091003418],["來自",-12.343517303466797],["▁шмат",-12.34353256225586],["▁мер",-12.343560218811035],["▁പൊ",-12.34359359741211],["ΙΑ",-12.343605041503906],["सो",-12.343626976013184],["menu",-12.343653678894045],["trans",-12.343660354614258],["▁geta",-12.343719482421877],["▁......",-12.343720436096191],["▁hei",-12.343815803527832],["▁كيف",-12.343850135803224],["beste",-12.343860626220703],["ຂ",-12.34386920928955],["▁Geo",-12.34388256072998],["ıyla",-12.343910217285156],["Fe",-12.343932151794434],["▁hasi",-12.343957901000977],["里面",-12.34396743774414],["îr",-12.343982696533203],["που",-12.344034194946287],["jedno",-12.344066619873049],["▁surtout",-12.344069480895996],["▁статус",-12.344104766845703],["▁гэр",-12.34413242340088],["uke",-12.344139099121094],["▁ancak",-12.344156265258787],["Би",-12.3441801071167],["▁Etiam",-12.344188690185549],["જા",-12.34420394897461],["理论",-12.344215393066406],["हो",-12.34426975250244],["▁devi",-12.34427261352539],["▁இருக்கும்",-12.344282150268556],["▁malah",-12.34429931640625],["▁આજે",-12.34433937072754],["RAN",-12.344439506530762],["▁ακ",-12.34446144104004],["۰۰",-12.344468116760254],["▁støtte",-12.34449863433838],["▁proced",-12.344500541687012],["▁የእ",-12.344541549682615],["▁цр",-12.3445463180542],["ଡା",-12.344558715820312],["▁ئې",-12.34458827972412],["▁240",-12.344661712646484],["узе",-12.344674110412598],["ids",-12.344680786132812],["▁fisk",-12.344681739807127],["▁možnost",-12.344687461853027],["itar",-12.344690322875977],["კითხ",-12.344696044921877],["▁Sigur",-12.344752311706545],["▁Meh",-12.34475803375244],["▁hemû",-12.344765663146973],["▁ຕໍ່",-12.344792366027832],["▁болду",-12.34480094909668],["▁Manager",-12.344876289367676],["▁ընտր",-12.344927787780762],[":34",-12.344937324523926],["nout",-12.34494972229004],["應用",-12.344963073730469],["гин",-12.344978332519531],["▁ជាមួយ",-12.34499168395996],["▁oral",-12.345006942749023],["▁gem",-12.345043182373049],["▁месеца",-12.345057487487791],["▁bolig",-12.345067977905272],["共有",-12.345077514648438],["ınız",-12.345105171203612],["▁पुरुष",-12.34512424468994],["きます",-12.345149040222168],["▁esp",-12.34516429901123],["▁ئا",-12.345169067382812],["▁veren",-12.34519863128662],["▁vùng",-12.34520149230957],["▁γίνει",-12.345239639282228],["नगर",-12.34525203704834],["▁ਹੋਏ",-12.345253944396973],["▁Vás",-12.345256805419922],["sök",-12.34528350830078],["▁IL",-12.345327377319336],["▁норм",-12.34533977508545],["▁gece",-12.34542465209961],["ərək",-12.34545612335205],["VEN",-12.345513343811035],["▁Ј",-12.345513343811035],[":37",-12.345551490783691],["医疗",-12.34555721282959],["▁руски",-12.34556484222412],["▁ච",-12.345575332641602],["就像",-12.345616340637209],["▁karto",-12.345684051513672],["بند",-12.345733642578123],["欧洲",-12.345768928527832],["որդ",-12.345772743225098],["▁কাজ",-12.34579086303711],["agit",-12.345806121826172],["▁использовать",-12.345824241638184],["▁उसके",-12.345845222473145],["▁diskut",-12.345870018005373],["melding",-12.345871925354004],["▁நல்ல",-12.346025466918944],["itsi",-12.346041679382324],["▁Тя",-12.34604263305664],["▁lectura",-12.346135139465332],["▁яна",-12.346158981323242],["määrä",-12.346160888671877],["▁крај",-12.346177101135254],["▁Ekonomi",-12.34619426727295],["Mer",-12.346233367919922],["atlan",-12.346308708190918],["▁повер",-12.34639835357666],["▁വളരെ",-12.346415519714355],["▁gönder",-12.346424102783203],["▁Sana",-12.346427917480469],["▁stór",-12.34643268585205],["sip",-12.346463203430176],["▁zorg",-12.346471786499023],["▁hör",-12.346485137939451],["▁соң",-12.3464994430542],["▁тұр",-12.34652328491211],["▁poet",-12.34657859802246],["Ր",-12.346583366394045],["▁BN",-12.34671115875244],["延",-12.346735000610352],["▁ሥራ",-12.346771240234377],["▁ځای",-12.346774101257324],["టర్",-12.346794128417969],["▁Kang",-12.346843719482422],["/2012",-12.346850395202637],["ош",-12.346867561340332],["▁vent",-12.34690284729004],["пыт",-12.34692096710205],["▁нав",-12.34695053100586],["姿",-12.346972465515137],["▁тоо",-12.347001075744627],["▁έτσι",-12.347002983093262],["▁tiện",-12.347050666809082],["ισμός",-12.347058296203612],["▁prodotto",-12.347067832946776],["▁الأمر",-12.347081184387209],["▁solicita",-12.347084999084473],["▁шест",-12.34709930419922],["▁آواز",-12.347124099731444],["sional",-12.347132682800291],["▁27,",-12.34716510772705],["▁Dha",-12.34716510772705],["ମି",-12.347168922424316],["▁bác",-12.347189903259276],["▁besök",-12.347201347351074],["сыл",-12.347237586975098],["▁tanggal",-12.347296714782717],["rbi",-12.34730052947998],["▁대표",-12.347333908081056],["лардың",-12.347357749938965],["ához",-12.347363471984863],["导",-12.347406387329102],["▁страны",-12.34743595123291],["▁이어",-12.347451210021973],["冬",-12.347453117370604],["▁крас",-12.347456932067873],["▁муж",-12.34747314453125],["▁الحق",-12.347489356994627],["ှ",-12.347517013549805],["ปลา",-12.34752368927002],["▁buscar",-12.347525596618652],["▁genere",-12.34753131866455],["▁vital",-12.34753131866455],["▁بازدید",-12.347590446472168],["▁kommentaar",-12.347611427307127],["▁everyone",-12.347644805908203],["▁pride",-12.347658157348633],["してください",-12.347686767578123],["▁protected",-12.347719192504885],["明确",-12.347732543945312],["▁आए",-12.347759246826172],["כים",-12.3477783203125],["▁brak",-12.347808837890623],["မ်",-12.347843170166016],["▁Market",-12.347845077514648],["lling",-12.347880363464355],["▁vakar",-12.347883224487305],["▁Суд",-12.347902297973633],["soo",-12.347909927368164],["люб",-12.347943305969238],["▁metais",-12.347952842712402],["▁medios",-12.347963333129885],["▁Excel",-12.348017692565918],["▁මෙ",-12.348034858703612],["办法",-12.348044395446776],["tsiooni",-12.348071098327637],["▁Ili",-12.348074913024902],["لارنىڭ",-12.348085403442385],["▁davvero",-12.348172187805176],["▁gửi",-12.348172187805176],["▁tran",-12.34821891784668],["▁krijgen",-12.348225593566896],["울",-12.348251342773438],["▁Vene",-12.34826374053955],["▁besi",-12.348333358764648],["արան",-12.348384857177734],["▁môi",-12.348387718200684],["日に",-12.348397254943848],["ứ",-12.348421096801758],["ходить",-12.348441123962402],["щата",-12.348469734191896],["▁tortor",-12.348470687866213],["▁தலை",-12.348505020141602],["Det",-12.348511695861816],["oč",-12.348518371582031],["▁transporte",-12.348527908325195],["ratu",-12.348590850830078],["▁kou",-12.34861946105957],["▁оби",-12.348657608032228],["ਨੇ",-12.348678588867188],["ერთი",-12.348742485046388],["▁socio",-12.348746299743652],["ประมาณ",-12.348753929138184],["▁JU",-12.348763465881348],["▁ehk",-12.348790168762209],["▁filtr",-12.348803520202637],["▁இரு",-12.34880542755127],["▁federal",-12.348892211914062],["▁simples",-12.348909378051758],["дум",-12.348990440368652],["တစ္",-12.349016189575195],["kê",-12.34906005859375],["▁aç",-12.3490629196167],["▁ilgi",-12.34907341003418],["▁hep",-12.349112510681152],["▁sür",-12.349187850952148],["的一个",-12.349213600158691],["▁usług",-12.34926986694336],["▁pamamagitan",-12.349348068237305],["▁besonders",-12.34934902191162],["govor",-12.349377632141112],["ឡ",-12.34941291809082],["▁Kaikki",-12.349478721618652],["ปาก",-12.349491119384766],["▁projecte",-12.349510192871094],["స్ట్",-12.349514961242676],["▁등을",-12.3495454788208],["▁เช่น",-12.349562644958496],["▁Jana",-12.349565505981444],["lanma",-12.349596977233888],["▁ră",-12.349602699279783],["▁masing",-12.349605560302734],["▁ඕන",-12.34960651397705],["ช่วง",-12.34964084625244],["▁target",-12.34969711303711],["ആ",-12.349730491638184],["▁møte",-12.34975814819336],["▁editor",-12.34976291656494],["▁jaren",-12.34977912902832],["เข้ามา",-12.349783897399902],["till",-12.349787712097168],["▁medie",-12.349790573120115],["▁devo",-12.349821090698242],["▁Kamp",-12.349845886230469],["▁дар",-12.349872589111328],["来看",-12.349897384643556],["yp",-12.349918365478516],["▁kredi",-12.349919319152832],["▁सम्पन्न",-12.34993839263916],["▁pelbagai",-12.349945068359377],["Har",-12.349949836730955],["ရဲ",-12.349994659423828],["▁დამ",-12.350031852722168],["▁sogar",-12.350040435791016],["▁camere",-12.350078582763672],["قل",-12.350106239318848],["lingen",-12.35014820098877],["▁vino",-12.35019588470459],["▁tengo",-12.350210189819336],["inā",-12.350241661071776],["▁জা",-12.350257873535156],["gå",-12.350296974182127],["2004",-12.350322723388672],["▁vyp",-12.35037326812744],["damente",-12.350391387939451],["▁கி",-12.350492477416992],["▁kiri",-12.350525856018066],["▁شاعر",-12.350529670715332],["▁thiếu",-12.350534439086914],["พอ",-12.350584030151367],["ปรับ",-12.350591659545898],["PER",-12.35065746307373],["rato",-12.350703239440918],["▁kya",-12.35071086883545],["▁Εν",-12.35071086883545],["▁simbol",-12.350878715515137],["▁alku",-12.351072311401367],["欧",-12.351089477539062],["务",-12.351106643676758],["▁Verfügung",-12.351115226745604],["hong",-12.351120948791504],["▁किंवा",-12.351123809814451],["▁naši",-12.35116481781006],["▁Бирок",-12.351166725158691],["▁Cab",-12.351192474365234],["▁DW",-12.351224899291992],["achta",-12.351255416870115],["ever",-12.351285934448242],["oru",-12.35129165649414],["коў",-12.351301193237305],["▁artean",-12.35135555267334],["र्ड",-12.35137176513672],["▁држава",-12.351401329040527],["▁ਵ",-12.351401329040527],["▁Chan",-12.351433753967283],["ását",-12.35148811340332],["aktiv",-12.351495742797852],["▁kənd",-12.351496696472168],["යෙක්",-12.35153865814209],["▁سوم",-12.351550102233888],["▁सोच",-12.35167407989502],["▁lap",-12.351682662963867],["▁లా",-12.351685523986816],["ควร",-12.351701736450195],["దీ",-12.35171127319336],["▁noastre",-12.351714134216309],["▁veik",-12.351714134216309],["▁propia",-12.351744651794434],["дем",-12.351758003234863],["▁750",-12.351758003234863],["mov",-12.351761817932127],["▁पत्रकार",-12.351768493652344],["jera",-12.351795196533203],["▁visitar",-12.351863861083984],["▁imam",-12.351865768432615],["▁eneste",-12.351889610290527],["yta",-12.351890563964844],["此外",-12.3519287109375],["▁estou",-12.351953506469728],["זי",-12.35196304321289],["Web",-12.352058410644531],["▁كتاب",-12.352068901062012],["▁ആണ്",-12.352105140686035],["核心",-12.352194786071776],["គ្នា",-12.352213859558104],["тау",-12.352248191833496],["▁gelek",-12.352252006530762],["▁кел",-12.352252006530762],["▁makes",-12.352262496948242],["cita",-12.352272987365724],["poru",-12.352276802062988],["▁କଲେ",-12.35230541229248],["▁गर्दा",-12.352317810058594],["TAL",-12.352327346801758],["▁Heb",-12.352336883544922],["бат",-12.352346420288086],["▁PD",-12.352402687072754],["▁sợ",-12.352411270141602],["▁Jacob",-12.352415084838867],["lji",-12.352499961853027],["ration",-12.35251522064209],["我就",-12.35259246826172],["▁maksu",-12.3526029586792],["▁pirms",-12.352632522583008],["ත්ත",-12.35265064239502],["▁neem",-12.35267448425293],["▁одговор",-12.352690696716309],["▁presse",-12.352699279785156],["▁говорить",-12.352734565734863],["zī",-12.352739334106444],["▁भन्दा",-12.352761268615724],["▁kartą",-12.352792739868164],["▁احتجاج",-12.352886199951172],["બી",-12.352912902832031],["SL",-12.352913856506348],["ಕಾರ",-12.352941513061523],["▁divi",-12.352944374084473],["▁সব",-12.352962493896484],["ஜ",-12.352970123291016],["Hu",-12.352993965148926],["anı",-12.35299587249756],["▁amach",-12.353025436401367],["▁دونوں",-12.353041648864746],["▁varētu",-12.353116989135742],["▁фильм",-12.353190422058104],["▁lank",-12.353195190429688],["▁Rob",-12.353257179260254],["▁▲",-12.353257179260254],["▁مدينة",-12.353273391723633],["జా",-12.353307723999023],["termin",-12.353341102600098],["yah",-12.353346824645996],["olt",-12.35335922241211],["jón",-12.35339069366455],["アップ",-12.353403091430664],["sina",-12.353408813476562],["结构",-12.353410720825195],["ရင်",-12.353415489196776],["ală",-12.353416442871094],["асы",-12.353436470031738],["▁gratuit",-12.353447914123535],["▁230",-12.353475570678713],["▁արդեն",-12.353475570678713],["▁ಕಾಂಗ್ರೆಸ್",-12.353475570678713],["ража",-12.35354232788086],["vale",-12.353549003601074],["นอน",-12.353556632995604],["rst",-12.353618621826172],["sider",-12.353687286376951],["▁Bene",-12.353767395019531],["real",-12.353801727294922],["を見",-12.353808403015137],["▁prazo",-12.353830337524414],["meri",-12.353870391845703],["▁trochu",-12.353875160217283],["▁৫",-12.353891372680664],["মি",-12.353970527648926],["忙",-12.35400390625],["▁सरकारी",-12.354004859924316],["▁напред",-12.354022979736328],["망",-12.354039192199709],["فه",-12.354056358337402],["▁автобус",-12.354080200195312],["▁Մի",-12.354137420654297],["лэх",-12.354151725769045],["ئن",-12.354171752929688],["▁документи",-12.354251861572266],["▁активно",-12.354254722595217],["▁ofta",-12.354270935058594],["OO",-12.35428524017334],["▁jawab",-12.35433864593506],["礼",-12.354373931884766],["▁oppi",-12.354434967041016],["露",-12.354440689086914],["▁kil",-12.354445457458496],["▁овај",-12.354479789733888],["මත්",-12.354491233825684],["▁страната",-12.354507446289062],["ىدىن",-12.354534149169922],["goo",-12.35457992553711],["▁hund",-12.35459804534912],["▁Story",-12.354612350463867],["ૈ",-12.354631423950195],["чик",-12.354655265808104],["▁برخه",-12.354676246643066],["▁айтып",-12.354677200317385],["▁використання",-12.354805946350098],["▁aktif",-12.35484504699707],["▁хэрэгтэй",-12.354985237121582],["太多",-12.355030059814451],["▁60%",-12.35507106781006],["▁pozitiv",-12.355084419250488],["▁გი",-12.355111122131348],["ește",-12.355117797851562],["互",-12.355158805847168],["实践",-12.355167388916016],["sque",-12.355173110961914],["ots",-12.355183601379396],["放在",-12.355220794677734],["▁keputusan",-12.355250358581545],["tuta",-12.355262756347656],["▁томе",-12.355271339416504],["▁riêng",-12.35530948638916],["▁mundial",-12.355406761169434],["eč",-12.35543727874756],["ợ",-12.355438232421877],["▁fh",-12.355450630187988],["▁mambo",-12.35546588897705],["▁telefonu",-12.355467796325684],["ikki",-12.355618476867676],["▁weit",-12.355623245239258],["interno",-12.355629920959473],["▁Mhe",-12.3556547164917],["▁Please",-12.35588836669922],["▁جای",-12.35590362548828],["▁berjalan",-12.355937004089355],["idea",-12.355969429016112],["γγ",-12.356122016906738],["▁збор",-12.35621452331543],["▁Kada",-12.356261253356934],["lant",-12.356295585632324],["ದ್",-12.356295585632324],["▁باغ",-12.356313705444336],["ился",-12.356322288513184],["div",-12.356364250183104],["▁дан",-12.356411933898926],["▁maat",-12.35643196105957],["etes",-12.3564453125],["▁gabe",-12.35647964477539],["▁vé",-12.356526374816896],["vant",-12.35653591156006],["▁ಕಿ",-12.356555938720703],["▁Szer",-12.356573104858398],["▁Energie",-12.356578826904297],[":09",-12.356611251831056],["▁wi",-12.356657028198242],["jang",-12.356734275817873],["ัล",-12.356760025024414],["▁necesario",-12.356783866882324],["xia",-12.356829643249512],["sá",-12.356833457946776],["▁ዛሬ",-12.35694980621338],["▁diferente",-12.356969833374023],["logi",-12.35697078704834],["泡",-12.356999397277832],["▁aujourd",-12.357000350952148],["▁ζ",-12.357014656066896],["▁combat",-12.35706901550293],["▁hår",-12.357097625732422],["ķi",-12.357100486755373],["▁tradici",-12.357107162475586],["՞",-12.357163429260254],["▁return",-12.35725212097168],["upa",-12.35726547241211],["▁rank",-12.357308387756348],["نٹ",-12.35738754272461],["▁pă",-12.357393264770508],["▁filma",-12.35745620727539],["▁өт",-12.357457160949709],["突破",-12.35757541656494],["▁מב",-12.35761547088623],["▁хуулийн",-12.357620239257812],["▁ਕਿਹਾ",-12.35763931274414],["ۋې",-12.357641220092772],["స్త",-12.357696533203123],["付け",-12.357738494873049],["ಿಕ",-12.357743263244627],["łe",-12.35775089263916],["▁wissen",-12.357793807983398],["をお",-12.35780906677246],["dhë",-12.35784149169922],["▁Много",-12.357851028442385],["▁adopt",-12.357891082763672],["▁yerine",-12.35789394378662],["ైన",-12.357916831970217],["▁102",-12.357921600341797],["ჯ",-12.357988357543944],["▁hemma",-12.35802173614502],["ง่าย",-12.358036041259766],["▁mænd",-12.35810375213623],["そうです",-12.35812759399414],["ory",-12.358165740966797],["▁hacia",-12.35818576812744],["▁कर्मचारी",-12.358213424682615],["▁ನಂತರ",-12.358213424682615],["곡",-12.358213424682615],["дөө",-12.358246803283691],["▁органи",-12.358247756958008],["יהם",-12.358291625976562],["▁Kut",-12.3583345413208],["▁LU",-12.3583345413208],["▁acestea",-12.358349800109863],["▁année",-12.358355522155762],["▁хочу",-12.358356475830078],["▁તેને",-12.35842990875244],["▁мяне",-12.358430862426758],["裏",-12.358474731445312],["▁θ",-12.358521461486816],["▁vận",-12.358525276184082],["LD",-12.358545303344728],["чий",-12.358552932739258],["▁බවත්",-12.358555793762209],["▁sifat",-12.358556747436523],["وض",-12.35860824584961],["▁learn",-12.358617782592772],["▁associa",-12.358648300170898],["▁alternative",-12.35871410369873],["▁soy",-12.358776092529297],["▁රට",-12.358811378479004],["▁vesel",-12.358838081359863],["पर्यंत",-12.358839988708496],["▁voidaan",-12.358866691589355],["▁Сва",-12.358881950378418],["цом",-12.358898162841797],["▁début",-12.358916282653809],["парт",-12.358951568603516],["▁pozor",-12.358966827392578],["▁kaup",-12.358967781066896],["▁dut",-12.35898780822754],["▁calor",-12.35902214050293],["▁wao",-12.359055519104004],["▁England",-12.359058380126951],["▁लिया",-12.359137535095217],["▁Milano",-12.359145164489746],["▁जल",-12.359210968017578],["gaard",-12.359278678894045],["▁Travel",-12.359320640563965],["ורים",-12.359329223632812],["▁selain",-12.359350204467772],["▁بيان",-12.359363555908203],["▁территории",-12.35940170288086],["▁الأربعاء",-12.35940170288086],["▁tipi",-12.359434127807615],["▁यदि",-12.35947036743164],["طة",-12.35948085784912],["▁sevi",-12.359489440917969],["ტან",-12.35953140258789],["▁setmana",-12.359533309936523],["быт",-12.359661102294922],["▁Gala",-12.3596830368042],["▁hod",-12.359692573547363],["▁Америка",-12.35972499847412],["▁യാത്ര",-12.359736442565918],["ေကာင္း",-12.359756469726562],["parte",-12.359807014465332],["▁2006.",-12.359822273254396],["ymi",-12.359833717346191],["כנ",-12.359893798828123],["▁KB",-12.359895706176758],["▁skladu",-12.359923362731934],["▁Зем",-12.359940528869627],["▁told",-12.359962463378906],["販売",-12.359979629516602],["▁হচ্ছে",-12.35999870300293],["▁ତ",-12.36000633239746],["▁Manchester",-12.36002254486084],["בע",-12.360030174255373],["宗",-12.360085487365724],["激",-12.360122680664062],["▁rug",-12.360304832458496],["▁χώρα",-12.360312461853027],["gá",-12.360440254211426],["▁partes",-12.36044216156006],["▁bilen",-12.36045265197754],["ಕೊಳ್ಳ",-12.360466003417969],["त्व",-12.360468864440918],["一點",-12.360478401184082],["ໆ",-12.360583305358888],["▁компьютер",-12.360590934753418],["▁نړۍ",-12.360604286193848],["bez",-12.360641479492188],["▁וועט",-12.360700607299805],["lande",-12.360708236694336],["▁काठमाडौँ",-12.36073398590088],["መን",-12.360750198364258],["▁verzi",-12.360770225524902],["▁fora",-12.36077117919922],["شم",-12.360803604125977],["δη",-12.36081314086914],["▁genre",-12.36090850830078],["िने",-12.36091423034668],["▁iu",-12.36093807220459],["avu",-12.36098289489746],["▁29,",-12.36103630065918],["▁بسیاری",-12.361075401306152],["αχ",-12.36109733581543],["▁arriva",-12.361124038696287],["▁voli",-12.36113452911377],["▁ээ",-12.361172676086426],["▁fear",-12.361196517944336],["▁చిత్రం",-12.361227989196776],["一種",-12.361227989196776],["tini",-12.361308097839355],["▁метал",-12.361336708068848],["▁Möglichkeit",-12.361419677734377],["īja",-12.361441612243652],["já",-12.361465454101562],["▁legyen",-12.361472129821776],["集中",-12.361502647399902],["▁assist",-12.361534118652344],["rody",-12.361586570739746],["▁דרך",-12.361601829528809],["زى",-12.361620903015137],["tyn",-12.36167049407959],["▁támogatás",-12.361681938171388],["▁див",-12.36168384552002],["▁стана",-12.361742973327637],["forum",-12.36176300048828],["▁වෙනුවෙන්",-12.36178207397461],["ዎቹ",-12.361785888671877],["하면서",-12.361799240112305],["ങ്ങളെ",-12.361823081970217],["▁bă",-12.361824989318848],["魅力",-12.361841201782228],["▁rozhod",-12.361845970153809],["▁תו",-12.361854553222656],["جب",-12.361894607543944],["▁នាក់",-12.361910820007324],["判断",-12.361920356750488],[">>",-12.361944198608398],["▁ڈ",-12.36198616027832],["刻",-12.362010955810549],["person",-12.362049102783203],["▁polit",-12.362055778503418],["ப்பி",-12.362080574035645],["▁kasva",-12.362168312072754],["▁સમય",-12.362188339233398],["ევ",-12.362190246582031],["வர்கள்",-12.362217903137209],["▁sac",-12.362217903137209],["świad",-12.362236976623535],["▁Aenean",-12.36224365234375],["▁мере",-12.36226749420166],["▁dame",-12.36232566833496],["敬",-12.36233139038086],["▁берген",-12.36235809326172],["▁зэрэг",-12.362377166748049],["▁झाली",-12.362401962280272],["减",-12.36244010925293],["dine",-12.362448692321776],["ଳା",-12.362464904785156],["ály",-12.362467765808104],["▁الض",-12.362472534179688],["рады",-12.362529754638672],["▁მიმართ",-12.362547874450684],["▁German",-12.362558364868164],["▁проекта",-12.362605094909668],["യായ",-12.362630844116213],["▁מידע",-12.36264991760254],["▁cau",-12.36272430419922],["▁واضح",-12.36276149749756],["特殊",-12.362770080566406],["感謝",-12.362842559814451],["▁svojih",-12.36288833618164],["▁Kaya",-12.362940788269045],["▁կապ",-12.362948417663574],["र्ट",-12.362953186035156],["▁đấu",-12.362974166870115],["▁pagkawala",-12.362977981567385],["▁कार",-12.363000869750977],["core",-12.363011360168455],["▁активности",-12.363045692443848],["ठा",-12.363078117370604],["。「",-12.36310863494873],["вся",-12.36314582824707],["▁õigus",-12.363182067871094],["▁tiba",-12.36321258544922],["▁보고",-12.363245010375977],["编",-12.363250732421877],["▁zbor",-12.363295555114746],["当地",-12.363301277160645],["Ми",-12.36332893371582],["ovú",-12.363346099853516],["yat",-12.363353729248049],["luar",-12.363372802734377],["רון",-12.36343479156494],["▁Winter",-12.36345672607422],["uq",-12.363472938537598],["▁όσο",-12.363494873046877],["වර",-12.363521575927734],["ისთვის",-12.363527297973633],["▁khỏi",-12.363570213317873],["▁بڑی",-12.363632202148438],["ُّ",-12.363664627075195],["jsko",-12.363802909851074],["▁ljud",-12.363809585571287],["नं",-12.363815307617188],["▁ચ",-12.363824844360352],["▁Kosten",-12.363835334777832],["▁Mario",-12.363837242126465],["یده",-12.363847732543944],["boy",-12.363855361938477],["▁prezidenti",-12.363936424255373],["▁obsah",-12.363972663879396],["▁Gali",-12.363978385925291],["อัน",-12.364043235778809],["▁Musa",-12.364128112792969],["ures",-12.364134788513184],["скага",-12.364144325256348],["setning",-12.364228248596191],["▁tercih",-12.364228248596191],["▁баг",-12.364248275756836],["▁lav",-12.364296913146973],["▁جمهور",-12.364314079284668],["▁contacto",-12.364315032958984],["▁completamente",-12.36432647705078],["▁старо",-12.364333152770996],["凡",-12.364349365234377],["эк",-12.364381790161133],["関係",-12.364391326904297],["经常",-12.364412307739258],["અ",-12.364456176757812],["▁blond",-12.36447525024414],["农",-12.364484786987305],["▁Rumah",-12.364553451538086],["▁großen",-12.364612579345703],["▁org",-12.364612579345703],["▁smag",-12.364645957946776],["▁lepo",-12.364686012268066],["fé",-12.364696502685549],["▁바로",-12.364706993103027],["叶",-12.364727973937988],["▁પરંતુ",-12.364765167236328],["▁trời",-12.364770889282228],["▁उनी",-12.364784240722656],["▁paylaş",-12.364800453186035],["راب",-12.36489486694336],["▁destina",-12.364901542663574],["quo",-12.364919662475586],["▁preparat",-12.36492156982422],["spill",-12.364952087402344],["ТУ",-12.36500644683838],["包含",-12.365039825439451],["ாமல்",-12.365066528320312],["گا",-12.365166664123535],["▁poolt",-12.36520004272461],["▁izin",-12.36523151397705],["իվ",-12.365232467651367],["▁بلند",-12.365242004394531],["▁أجل",-12.365249633789062],["▁skriva",-12.365251541137695],["▁gått",-12.365253448486328],["▁ያስ",-12.36527156829834],["▁гости",-12.365274429321287],["▁jana",-12.365291595458984],["▁وڏي",-12.365301132202148],["džio",-12.365342140197754],["ICA",-12.365344047546388],["▁అన",-12.365378379821776],["пита",-12.365384101867676],["ět",-12.365388870239258],["િયા",-12.365410804748535],["חל",-12.365421295166016],["▁Từ",-12.36546516418457],["▁Light",-12.36548900604248],["իայի",-12.365489959716797],["▁öl",-12.365507125854492],["లోని",-12.365509033203123],["▁molti",-12.365516662597656],["жения",-12.365528106689451],["уючи",-12.365558624267578],["ത്തിനു",-12.365575790405272],["▁рух",-12.365614891052246],["▁enter",-12.365615844726562],["▁fad",-12.365638732910156],["ิ",-12.365647315979004],["▁tenho",-12.365696907043455],["»،",-12.36573886871338],["gran",-12.36577606201172],["▁одним",-12.365808486938477],["▁Comme",-12.365889549255373],["▁svih",-12.36590576171875],["恶",-12.365924835205078],["ETA",-12.365958213806152],["▁çalışan",-12.366046905517578],["жно",-12.366106986999512],["▁الأولى",-12.366111755371094],["▁jaroj",-12.36614227294922],["ळा",-12.366150856018066],["▁ezért",-12.36616039276123],["▁Αυτό",-12.36618423461914],["▁kojoj",-12.366186141967772],["▁RI",-12.36624240875244],["▁ώστε",-12.366327285766602],["ТИ",-12.366422653198242],["דים",-12.366429328918455],["▁þó",-12.36646842956543],["▁تعلق",-12.366474151611328],["▁Wel",-12.36649227142334],["嘉",-12.366494178771973],["▁выше",-12.3665189743042],["▁تست",-12.366522789001465],["ឲ្យ",-12.36653995513916],["vak",-12.366541862487791],["▁ପି",-12.366547584533691],["▁parole",-12.366579055786133],["▁पत्र",-12.36660099029541],["▁період",-12.366625785827637],["TF",-12.366641998291016],["▁അടുത്ത",-12.366641998291016],["▁104",-12.366692543029783],["▁vieta",-12.366737365722656],["▁میرے",-12.366744041442873],["▁počet",-12.366772651672363],["ශ්",-12.366820335388184],["▁ហ៊ុន",-12.366827964782717],["ിരുന്ന",-12.366830825805664],["▁barne",-12.36693000793457],["鐵",-12.366962432861328],["▁firme",-12.367107391357422],["dors",-12.367127418518066],["▁während",-12.367157936096191],["▁ତେବେ",-12.367157936096191],["న్స్",-12.367172241210938],["\"،",-12.367197036743164],["▁انتقال",-12.367213249206545],["▁dijî",-12.3672513961792],["▁iawn",-12.367277145385742],["nuti",-12.367284774780272],["▁किन",-12.367284774780272],["▁pretium",-12.367440223693848],["हर",-12.36744785308838],["▁Party",-12.36744785308838],["▁1965",-12.367464065551758],["τυχ",-12.367473602294922],["▁fredag",-12.36751937866211],["ୀୟ",-12.367589950561523],["▁אמר",-12.367646217346191],["φέρ",-12.36766242980957],["卷",-12.367717742919922],["aju",-12.367737770080566],["철",-12.36774444580078],["▁управление",-12.367757797241213],["▁Hver",-12.36777687072754],["▁Apakah",-12.367796897888184],["hani",-12.36785888671875],["▁દિવસ",-12.367986679077148],["▁Ole",-12.368019104003906],["▁امنیت",-12.368020057678224],["▁Dos",-12.36809539794922],["▁сатып",-12.36810302734375],["▁جديدة",-12.368119239807127],["よりも",-12.368234634399414],["▁ích",-12.368263244628906],["▁kuul",-12.368282318115234],["▁انتظار",-12.368372917175291],["▁catre",-12.368417739868164],["▁بچوں",-12.36844539642334],["▁새",-12.368465423583984],["-22",-12.368474960327148],["▁праве",-12.36847972869873],["▁azy",-12.368492126464844],["आई",-12.368555068969728],["his",-12.368581771850586],["tığı",-12.368670463562012],["ۋى",-12.368677139282228],["▁відповідно",-12.368738174438477],["▁Tout",-12.3687744140625],["▁sellest",-12.368806838989258],["▁դեպքում",-12.368806838989258],["ംഗ",-12.368905067443848],["▁langis",-12.36890697479248],["▁প্রথম",-12.368956565856934],["▁dapibus",-12.36895751953125],["▁sos",-12.368999481201172],["二十",-12.369017601013184],["▁1969",-12.369061470031738],["иса",-12.369091987609863],["ہی",-12.369157791137695],["▁ਅੱਜ",-12.36922836303711],["▁stránka",-12.36926555633545],["▁hemos",-12.369293212890623],["▁byen",-12.36933422088623],["ண்டு",-12.369335174560549],["▁Director",-12.36936378479004],["čić",-12.369400024414062],["▁منزل",-12.369441032409668],["▁naszych",-12.369513511657717],["▁tuyệt",-12.369559288024902],["▁Disney",-12.369601249694824],["ण्या",-12.369610786437988],["ities",-12.369620323181152],["REN",-12.369626998901367],["емо",-12.36965274810791],["ИС",-12.369658470153809],["noma",-12.369709014892578],["▁nhw",-12.369742393493652],["▁ಬೆ",-12.36984920501709],["▁Ermənistan",-12.369890213012695],["▁tedy",-12.369892120361328],["▁опыт",-12.369952201843262],["ஆ",-12.36998462677002],["▁izmantot",-12.369991302490234],["rvi",-12.370024681091309],["िय",-12.370041847229004],["▁issue",-12.370054244995115],["▁paro",-12.370059967041016],["▁statistik",-12.37014865875244],["▁प्रदर्शन",-12.37016487121582],["vada",-12.370182037353516],["▁gana",-12.370195388793944],["▁manager",-12.370285034179688],["▁реа",-12.370333671569824],["▁wichtig",-12.370343208312988],["սկ",-12.370357513427734],["▁numa",-12.37053394317627],["▁1948",-12.370534896850586],["▁manten",-12.370628356933594],["▁ໄປ",-12.370633125305176],["▁타",-12.37065315246582],["▁быстро",-12.37067413330078],["▁образование",-12.3707275390625],["نک",-12.37074089050293],["▁இந்திய",-12.370757102966309],["▁ଆରମ୍ଭ",-12.370758056640623],["▁kormány",-12.370777130126951],["▁obli",-12.370805740356444],["▁প্রযুক্তি",-12.37082290649414],["▁ince",-12.37082576751709],["ഗ്",-12.37086296081543],["▁अशी",-12.370864868164062],["કે",-12.370874404907228],["▁apoio",-12.370946884155272],["núť",-12.370949745178224],["മില്ല",-12.370957374572754],["▁gef",-12.371000289916992],["獨",-12.371063232421877],["▁kuka",-12.37108325958252],["▁раздел",-12.371097564697266],["malı",-12.371150016784668],["▁:(",-12.371159553527832],["ід",-12.37116527557373],["сия",-12.371166229248049],["▁Prí",-12.37119960784912],["▁sää",-12.37121295928955],["▁dags",-12.371227264404297],["▁sjálf",-12.37123203277588],["▁особенно",-12.371273040771484],["然後",-12.371329307556152],["ಣಿ",-12.37134838104248],["▁نتیجه",-12.371359825134276],["बहादुर",-12.371363639831545],["▁mejores",-12.371371269226074],["調整",-12.371373176574709],["▁ଚା",-12.371416091918944],["स्थ",-12.371500015258787],["▁15%",-12.371500968933104],["▁خەلق",-12.371512413024902],["▁இருந்த",-12.3715181350708],["▁Fire",-12.371543884277344],["нува",-12.371576309204102],["▁Ά",-12.37160301208496],["scen",-12.37160873413086],["ຫຼາຍ",-12.371611595153809],["чных",-12.37161636352539],["完善",-12.371620178222656],["▁month",-12.371652603149414],["▁پیر",-12.371668815612791],["ونی",-12.371676445007324],["させて",-12.371681213378906],["▁budete",-12.371695518493652],["คุณภาพ",-12.371739387512209],["▁Сайт",-12.371752738952637],["................",-12.37175464630127],["▁mutat",-12.371789932250977],["šiem",-12.371826171875],["▁एस",-12.371838569641112],["▁plena",-12.371909141540527],["▁फोन",-12.37193202972412],["เพิ่มเติม",-12.371935844421388],["▁système",-12.37196159362793],["▁ಅಥವಾ",-12.37196159362793],["▁milyen",-12.372066497802734],["▁ಕೂಡ",-12.372082710266112],["maður",-12.372100830078123],["▁primit",-12.37214469909668],["浮",-12.372159004211426],["ក្",-12.372175216674805],["▁لباس",-12.372183799743652],["▁onlar",-12.372214317321776],["ګر",-12.37224578857422],["▁զ",-12.372281074523926],["ること",-12.372282028198242],["רח",-12.372355461120604],["lder",-12.372357368469238],["lez",-12.372441291809082],["ბის",-12.372482299804688],["▁වෙ",-12.372503280639648],["tier",-12.372518539428713],["挺",-12.372525215148926],["▁levar",-12.37252712249756],["▁þessu",-12.372536659240724],["▁canto",-12.372540473937988],["▁διά",-12.372562408447266],["▁υπάρχουν",-12.372565269470217],["ически",-12.37257957458496],["▁यहां",-12.372614860534668],[":33",-12.372617721557615],["▁obe",-12.372637748718262],["๋",-12.37265682220459],["▁шанс",-12.372692108154297],["xes",-12.372725486755373],["ността",-12.372742652893066],["ılır",-12.372773170471191],["▁이야기",-12.372777938842772],["gga",-12.372807502746582],["▁giác",-12.37289333343506],["▁água",-12.372901916503906],["▁puntos",-12.372934341430664],["▁(14)",-12.372944831848145],["ỉ",-12.372947692871094],["▁تون",-12.372982025146484],["▁hav",-12.372983932495115],["şə",-12.372992515563965],["▁పరి",-12.373005867004396],["diri",-12.373028755187988],["ttää",-12.37311840057373],["व्या",-12.373122215270996],["וך",-12.373159408569336],["balik",-12.37319278717041],["による",-12.373209953308104],["▁dere",-12.373212814331056],["۔۔۔",-12.37322998046875],["▁povo",-12.37330436706543],["▁आधार",-12.373306274414062],["тү",-12.373310089111328],["ટર",-12.3733549118042],["▁bóng",-12.373364448547363],["▁hiszen",-12.373394012451172],["▁۶",-12.373401641845703],["лем",-12.373420715332031],["mittel",-12.373537063598633],["ļā",-12.373567581176758],["▁אדם",-12.373598098754885],["▁ከዚህ",-12.373653411865234],["▁نشست",-12.373703002929688],["ポイント",-12.373760223388672],["▁Mira",-12.37377643585205],["▁ඒවා",-12.373784065246582],["▁seves",-12.373799324035645],["▁instala",-12.373818397521973],["رف",-12.37394905090332],["▁doz",-12.373961448669434],["pura",-12.37396240234375],["гн",-12.37399673461914],["▁vetë",-12.37399673461914],["▁ndërsa",-12.3740873336792],["▁اطلاع",-12.374098777770996],["▁नेपालको",-12.374105453491213],["▁lees",-12.374107360839844],["▁ET",-12.374151229858398],["▁سنة",-12.374155044555664],["DM",-12.374164581298828],["▁sole",-12.374180793762209],["▁együtt",-12.37423038482666],["של",-12.374237060546877],["ரே",-12.374255180358888],["head",-12.374279022216797],["▁Nad",-12.374281883239746],["有關",-12.374297142028809],["رون",-12.37430477142334],["▁export",-12.374351501464844],["▁Política",-12.374394416809082],["▁luctus",-12.374404907226562],["▁sidan",-12.37442398071289],["另一",-12.374429702758787],["యో",-12.374502182006836],["ONE",-12.37456226348877],["వర",-12.374563217163086],["ცია",-12.374591827392578],["▁непри",-12.374606132507324],["▁viņu",-12.3746337890625],["ตก",-12.374685287475586],["▁Even",-12.374692916870115],["▁наред",-12.374709129333496],["cyjne",-12.374749183654783],["▁termo",-12.374752044677734],["窗",-12.374754905700684],["▁producto",-12.374774932861328],["TES",-12.37479019165039],["▁Etter",-12.37480354309082],["▁inkişaf",-12.374804496765137],["gadh",-12.374815940856934],["最好",-12.374878883361816],["ਸਟ",-12.374902725219728],["基礎",-12.374930381774902],["▁vendar",-12.37496280670166],["▁chạy",-12.374975204467772],["▁lớp",-12.374975204467772],["▁הצ",-12.374975204467772],["▁fjalë",-12.37497615814209],["ûr",-12.374979972839355],["宜",-12.374988555908203],["ុ",-12.375030517578123],["▁Andhra",-12.37503147125244],["▁çıxar",-12.375069618225098],["▁trud",-12.375076293945312],["ろ",-12.37508773803711],["culo",-12.37521266937256],["ගම",-12.375213623046877],["▁дода",-12.37524700164795],["▁بح",-12.375251770019531],["られ",-12.375299453735352],["▁živi",-12.37533473968506],["▁konstru",-12.375354766845703],["erin",-12.375364303588867],["び",-12.37538719177246],["tató",-12.37540340423584],["▁forza",-12.375423431396484],["▁Фонд",-12.375432968139648],["sign",-12.37546157836914],["skich",-12.375500679016112],["▁сур",-12.375553131103516],["middel",-12.375555992126465],["▁multa",-12.375555992126465],["▁mercato",-12.375561714172363],["▁Бишкек",-12.375591278076172],["▁cú",-12.375614166259766],["▁avem",-12.375624656677246],["▁situa",-12.375676155090332],["lde",-12.37576961517334],["▁všechny",-12.375779151916504],["તિ",-12.375800132751465],["liselt",-12.375940322875977],["▁voca",-12.375947952270508],["ตอบ",-12.37595272064209],["▁ulike",-12.375959396362305],["ខ្មែរ",-12.3759765625],["▁ĝin",-12.375981330871582],["▁مشاهده",-12.376005172729492],["ங்களில்",-12.376075744628906],["▁škola",-12.37611961364746],["sjoner",-12.376138687133787],["▁Sveriges",-12.376140594482422],["▁másik",-12.376164436340332],["▁бесплатно",-12.37618350982666],["▁letter",-12.376200675964355],["▁sukces",-12.376212120056152],["▁Eco",-12.376222610473633],["تىپ",-12.37623405456543],["▁entrevista",-12.376276016235352],["▁سور",-12.376294136047363],["▁ទាំង",-12.376294136047363],["▁omnia",-12.376322746276855],["พล",-12.37632656097412],["ന്നു",-12.376331329345703],["▁tikrai",-12.376436233520508],["२०",-12.376472473144531],["tuz",-12.376482009887695],["IG",-12.376508712768556],["▁source",-12.376514434814451],["Read",-12.376593589782717],["▁проф",-12.376599311828612],["▁master",-12.376638412475586],["▁totalmente",-12.376660346984863],["▁maand",-12.376663208007812],["หนัง",-12.376680374145508],["▁rid",-12.376681327819824],["ења",-12.376738548278809],["lud",-12.376778602600098],["▁इतिहास",-12.376789093017578],["▁nit",-12.37679672241211],["▁үед",-12.37686252593994],["▁UM",-12.376973152160645],["▁leave",-12.376980781555176],["▁תק",-12.377060890197754],["▁idé",-12.37707805633545],["icamente",-12.377082824707031],["aciones",-12.377084732055664],["▁අඩු",-12.377118110656738],["ਾਉਣ",-12.377155303955078],["▁Core",-12.377172470092772],["▁plass",-12.377190589904783],["▁світ",-12.377306938171388],["黨",-12.377362251281738],["▁너무",-12.377374649047852],["▁дахь",-12.377397537231444],["▁реши",-12.37740707397461],["▁okaz",-12.37741470336914],["▁Rah",-12.377416610717772],["show",-12.377429962158203],["▁گیری",-12.377439498901367],["üz",-12.377450942993164],["ակի",-12.37746524810791],["पे",-12.377490997314451],["▁Új",-12.377504348754885],["▁gh",-12.377508163452148],["EI",-12.37753200531006],["▁Ас",-12.377543449401855],["▁Türkiyə",-12.377575874328612],["▁Some",-12.377609252929688],["pte",-12.377642631530762],["▁ئۆز",-12.377645492553713],["▁edilen",-12.377670288085938],["▁جز",-12.37774658203125],["▁murah",-12.377747535705566],["tumia",-12.377813339233398],["▁கொ",-12.37787914276123],["▁եղել",-12.378000259399414],["▁ਹੋਈ",-12.378012657165527],["▁sosyal",-12.378016471862791],["▁Resort",-12.378079414367676],["pă",-12.378087043762209],["регул",-12.378101348876951],["▁ház",-12.378169059753418],["varo",-12.378214836120604],["лга",-12.378286361694336],["opa",-12.378345489501951],["不了",-12.378345489501951],["▁започна",-12.37840175628662],["▁شنبه",-12.37845516204834],["▁νέο",-12.37850284576416],["▁ό",-12.378583908081056],["▁Segundo",-12.378604888916016],["▁tungkol",-12.378642082214355],["лів",-12.378676414489746],["▁materi",-12.378731727600098],["▁sponsor",-12.37878704071045],["建築",-12.378806114196776],["▁Praha",-12.378881454467772],["▁بج",-12.378917694091797],["ሰራ",-12.378944396972656],["▁xeito",-12.378989219665527],["我也",-12.37903881072998],["▁dzie",-12.379100799560549],["ımız",-12.379108428955078],["▁Бай",-12.379143714904783],["୬",-12.379154205322266],["पु",-12.379156112670898],["▁vrai",-12.379230499267578],["تۇر",-12.379237174987791],["▁nature",-12.379321098327637],["gj",-12.379350662231444],["▁වෙත",-12.37936305999756],["▁Καλ",-12.37938404083252],["▁vur",-12.379441261291504],["▁dám",-12.379465103149414],["ném",-12.379474639892578],["▁Град",-12.379515647888184],["巨",-12.37953758239746],["krit",-12.37959098815918],["-12-",-12.379631042480469],["▁COM",-12.379640579223633],["企",-12.379648208618164],["▁фр",-12.379684448242188],["▁kilometr",-12.379693984985352],["оце",-12.379711151123049],["nale",-12.379779815673828],["▁дараа",-12.379813194274902],["▁pagpapalaki",-12.379820823669434],["▁पटक",-12.37982177734375],["▁येथे",-12.37982940673828],["▁Aliquam",-12.37987232208252],["jela",-12.379900932312012],["▁евра",-12.379907608032228],["▁korte",-12.3800048828125],["▁ailə",-12.38005256652832],["jazd",-12.380093574523926],["汽車",-12.380151748657228],["▁bayi",-12.380187034606934],["手段",-12.380220413208008],["▁DC",-12.380379676818848],["ılmış",-12.380400657653809],["▁mencapai",-12.380423545837402],["▁начал",-12.380426406860352],["▁трудно",-12.380427360534668],["jav",-12.38046169281006],["▁factor",-12.380512237548828],["▁xét",-12.380518913269045],["eye",-12.38058376312256],["▁Texas",-12.38062572479248],["▁Шта",-12.380632400512695],["▁tibi",-12.380638122558594],["ּ",-12.380661964416504],["manda",-12.380715370178224],["San",-12.380728721618652],["▁api",-12.380735397338867],["▁Index",-12.38074779510498],["tk",-12.38075351715088],["▁төв",-12.38076114654541],["ဳ",-12.380818367004396],["릴",-12.38092041015625],["ປີ",-12.380988121032717],["大きな",-12.381017684936523],["▁قۇر",-12.381035804748535],["▁СССР",-12.381056785583496],["раш",-12.38109302520752],["▁אותה",-12.381109237670898],["▁часов",-12.38115406036377],["েই",-12.3811674118042],["▁sende",-12.3811674118042],["▁Victoria",-12.38119125366211],["▁role",-12.38122272491455],["kredit",-12.38124179840088],["▁taal",-12.381300926208496],["のように",-12.381385803222656],["▁கருத்து",-12.3814115524292],["તું",-12.381434440612791],["▁ਹ",-12.38144874572754],["вих",-12.381482124328612],["motiv",-12.381507873535156],["κου",-12.381522178649902],["▁tě",-12.381556510925291],["▁свобод",-12.38162899017334],["▁жүзеге",-12.38163948059082],["▁Дан",-12.381664276123049],["▁मेरी",-12.381702423095703],["▁Extra",-12.381726264953612],["tük",-12.38172721862793],["▁Disse",-12.381747245788574],["▁جمله",-12.38178253173828],["意見",-12.38178539276123],["ولي",-12.381789207458496],["මය",-12.381797790527344],["▁buen",-12.381811141967772],["些",-12.381843566894531],["、1",-12.381860733032228],["ított",-12.381879806518556],["▁vindt",-12.381879806518556],["▁lihat",-12.381881713867188],["мей",-12.38189697265625],["▁Svi",-12.38190746307373],["▁helg",-12.381964683532717],["▁شاہ",-12.382001876831056],["▁شعب",-12.38205909729004],["cana",-12.382081031799316],["information",-12.382123947143556],["zis",-12.382140159606934],["▁verdade",-12.382184982299805],["mali",-12.382214546203612],["屆",-12.38221836090088],["ongo",-12.382222175598145],["▁patri",-12.382224082946776],["▁কে",-12.38227367401123],["▁bûn",-12.382333755493164],["ימה",-12.382359504699709],["▁sebanyak",-12.382359504699709],["ፎ",-12.382403373718262],["▁juo",-12.382452964782717],["▁Kanak",-12.382476806640623],["▁mega",-12.382486343383787],["▁grava",-12.382607460021973],["лардын",-12.382649421691896],["▁තියෙන",-12.38267421722412],["ဝင်",-12.382678031921388],["▁její",-12.382692337036133],["▁savā",-12.382699012756348],["ários",-12.382710456848145],["▁versi",-12.382750511169434],["фік",-12.382792472839355],["▁senaste",-12.382801055908203],["▁choix",-12.382861137390137],["▁ਗਏ",-12.382862091064451],["▁Στην",-12.382889747619627],["▁Փ",-12.382912635803224],["isasi",-12.382939338684082],["töl",-12.38296127319336],["▁ро",-12.382976531982422],["wag",-12.38304328918457],["▁εκεί",-12.383050918579102],["▁preferi",-12.383085250854492],["kay",-12.38308811187744],["收到",-12.38312530517578],["देव",-12.383126258850098],["muz",-12.38314723968506],["▁mln",-12.383172035217283],["ティ",-12.383176803588867],["óg",-12.383191108703612],["害",-12.383249282836914],["▁solu",-12.38328456878662],["▁jogo",-12.383289337158203],["▁yüzde",-12.383346557617188],["▁Skriv",-12.383377075195312],["aith",-12.38340663909912],["▁ಪ್ರತಿ",-12.38343620300293],["ంటి",-12.383451461791992],["tische",-12.383452415466309],["édi",-12.383509635925291],["pom",-12.383564949035645],["эв",-12.383590698242188],["▁yanı",-12.3836030960083],["lmaz",-12.38364028930664],["▁spring",-12.38365650177002],["▁trying",-12.383676528930664],["▁උප",-12.383708953857422],["డీ",-12.383746147155762],["eto",-12.383808135986328],["חשב",-12.383829116821287],["แ",-12.383867263793944],["Qu",-12.383950233459473],["▁meh",-12.383960723876951],["刺",-12.384021759033203],["terna",-12.384037017822266],["▁Vita",-12.38408374786377],["▁Kinh",-12.384099960327148],["▁mire",-12.384136199951172],["▁góð",-12.384174346923828],["məyə",-12.384187698364258],["▁(15)",-12.384242057800291],["▁fəaliyyət",-12.384273529052734],["▁минути",-12.384349822998049],["词",-12.384427070617676],["tín",-12.38444709777832],["ало",-12.384469985961914],["एम",-12.384483337402344],["▁트",-12.384551048278809],["▁Salah",-12.384556770324709],["asjon",-12.384570121765137],["▁hægt",-12.384577751159668],["举行",-12.384600639343262],["▁நாள்",-12.384628295898438],["▁Jums",-12.3846435546875],["ภ",-12.384653091430664],["tisch",-12.384655952453612],["▁NEWS",-12.384674072265623],["▁ԱՄՆ",-12.38468074798584],["▁معرفی",-12.38468074798584],["ror",-12.384702682495115],["▁ಕೈ",-12.38470458984375],["αξ",-12.38480281829834],["▁család",-12.384828567504885],["▁doel",-12.38486385345459],["何か",-12.384876251220703],["▁wanao",-12.384879112243652],["cet",-12.384880065917969],["▁riktig",-12.384918212890623],["kada",-12.384932518005373],["zug",-12.38497257232666],["▁ٹیم",-12.38500690460205],["יצ",-12.38502311706543],["▁kinders",-12.385043144226074],["▁Judi",-12.385066032409668],["OM",-12.38510513305664],["тэн",-12.385107040405272],["▁کړ",-12.385111808776855],["▁မြန်မာ",-12.385138511657717],["▁मै",-12.38514804840088],["▁Gia",-12.38516330718994],["▁bunun",-12.385190963745115],["▁dadi",-12.385196685791016],["czę",-12.385220527648926],["▁reason",-12.385224342346191],["▁আগে",-12.385255813598633],["▁तपाईं",-12.38527011871338],["▁birçok",-12.38529109954834],["▁pesquisa",-12.38529109954834],["营",-12.38529109954834],["▁सडक",-12.385294914245604],["یشن",-12.385321617126465],["کہ",-12.385327339172363],["▁eorum",-12.385332107543944],["ወጣ",-12.385351181030272],["▁институт",-12.385366439819336],["のある",-12.385375022888184],["mers",-12.385454177856444],["▁Bou",-12.385489463806152],["عيد",-12.385562896728516],["택",-12.38556671142578],["▁Bah",-12.385583877563477],["mpel",-12.385592460632324],["▁نوجوان",-12.38560962677002],["iah",-12.385634422302246],["ข้าง",-12.385652542114258],["▁edəcək",-12.385662078857422],["prava",-12.385672569274902],["▁ezer",-12.385689735412598],["▁ടി",-12.38571548461914],["▁Franc",-12.385743141174316],["紧",-12.38579559326172],["şê",-12.385912895202637],["ໄ",-12.385941505432127],["▁benda",-12.38599681854248],["तं",-12.386026382446287],["tory",-12.386070251464844],["▁fotografie",-12.386091232299805],["rios",-12.38609218597412],["▁Vår",-12.38609218597412],["▁disputa",-12.386093139648438],["txa",-12.386096000671388],["ອ",-12.386096954345703],["▁investigación",-12.38609790802002],["krát",-12.386103630065918],["mok",-12.386113166809082],["atkan",-12.386211395263672],["▁Emp",-12.386220932006836],["گەن",-12.386244773864746],["dau",-12.386287689208984],["▁Ray",-12.386337280273438],["▁azken",-12.38634967803955],["čkog",-12.38636302947998],["нүн",-12.386367797851562],["stå",-12.386381149291992],["sü",-12.386399269104004],["ذر",-12.386486053466797],["rec",-12.386494636535645],["▁vue",-12.386504173278809],[":53",-12.386507034301758],["▁እንጂ",-12.386518478393556],["▁Pasi",-12.386531829833984],["▁प्रत्येक",-12.386556625366213],["▁pies",-12.38656234741211],["bala",-12.38658618927002],["হি",-12.38662338256836],["引起",-12.386638641357422],["บริการ",-12.386662483215332],["ปัญหา",-12.386690139770508],["นาน",-12.386703491210938],["▁پيش",-12.386703491210938],["▁મો",-12.386720657348633],["nzi",-12.386730194091797],["আ",-12.38673210144043],["ома",-12.386798858642578],["batan",-12.386802673339844],["▁sprawie",-12.386826515197754],["िल",-12.38685131072998],["▁jelent",-12.386874198913574],["创造",-12.386921882629396],["▁možnosti",-12.386963844299316],["時期",-12.386969566345217],["▁Belediye",-12.387005805969238],["moc",-12.387017250061035],["랑",-12.38705348968506],["▁ກ",-12.387100219726562],["▁kysy",-12.387110710144045],["▁þér",-12.387176513671877],["OV",-12.387226104736328],["▁բաց",-12.38724422454834],["mobil",-12.387262344360352],["ICE",-12.38727855682373],["仅",-12.387310981750488],["▁nový",-12.387374877929688],["дап",-12.387381553649902],["▁мобил",-12.38742733001709],["▁సీ",-12.387484550476074],["дать",-12.38754653930664],["たら",-12.387551307678224],["▁दिल",-12.387555122375488],["bach",-12.38755702972412],["▁ország",-12.387653350830078],["녀",-12.387666702270508],["▁krok",-12.387699127197266],["▁Չ",-12.387725830078123],["▁кат",-12.387752532958984],["txo",-12.387770652770996],["eten",-12.38780117034912],["kannya",-12.387807846069336],["▁ministra",-12.387825965881348],["都在",-12.387860298156738],["ställning",-12.387874603271484],["▁صحت",-12.38792610168457],["▁banget",-12.387951850891112],["▁Chris",-12.387956619262695],["▁Leon",-12.388007164001465],["▁teknoloji",-12.388035774230955],["航空",-12.388052940368652],["▁дур",-12.388081550598145],["时代",-12.388081550598145],["▁معلوم",-12.388163566589355],["▁nuair",-12.388169288635254],["သို့",-12.388171195983888],["▁Angeles",-12.388176918029783],["गर",-12.388283729553224],["சை",-12.38830852508545],["tório",-12.388320922851562],["▁cynnwys",-12.388344764709473],["很多人",-12.388344764709473],["▁давно",-12.388415336608888],["dli",-12.388442039489746],["▁hodin",-12.388468742370604],["好好",-12.388471603393556],["▁iliyo",-12.388504028320312],["▁سمجھ",-12.388623237609863],["▁OF",-12.388631820678713],["ผ",-12.388647079467772],["▁Жан",-12.38869857788086],["reich",-12.388707160949709],["▁blogi",-12.388712882995604],["शि",-12.388799667358398],["联合",-12.388815879821776],["fas",-12.388856887817385],["ىگە",-12.388867378234863],["ገር",-12.388891220092772],["▁መንግስት",-12.388896942138672],["▁käyttö",-12.388904571533203],["▁szeret",-12.388911247253418],["ซึ่ง",-12.38891315460205],["▁окуп",-12.388914108276367],["ਉ",-12.38891887664795],["စေ",-12.388944625854492],["▁tomonidan",-12.388957023620604],["▁μεγάλη",-12.388957023620604],["▁tes",-12.388976097106934],["▁adam",-12.388978004455566],["ítő",-12.38907527923584],["طلب",-12.389135360717772],["▁coi",-12.389189720153809],["โครงการ",-12.38921356201172],["▁դեմ",-12.38922119140625],["▁mannen",-12.38925838470459],["▁dugo",-12.38934326171875],["രെ",-12.389416694641112],["▁nehmen",-12.389430046081545],["壁",-12.389524459838867],["স্ট",-12.38952922821045],["▁قم",-12.389559745788574],["bele",-12.38957405090332],["ဘာ",-12.389592170715332],["▁Frauen",-12.389633178710938],["▁helse",-12.389660835266112],["▁üst",-12.389731407165527],["bey",-12.389814376831056],["▁resulta",-12.389946937561035],["▁موجب",-12.389972686767578],["▁בעל",-12.389976501464844],["iyey",-12.39001750946045],["artha",-12.39002799987793],["▁título",-12.3900785446167],["алы",-12.39010238647461],["▁UV",-12.39013957977295],["▁घट",-12.390146255493164],["橋",-12.39015293121338],["▁daily",-12.390164375305176],["▁cou",-12.3901948928833],["ತ್ತು",-12.390210151672363],["▁пункт",-12.390239715576172],["媒體",-12.390249252319336],["▁가격",-12.390276908874512],["titi",-12.390336036682127],["▁کافی",-12.390392303466797],["ுக்கு",-12.390413284301758],["▁والع",-12.390419960021973],["ической",-12.390446662902832],["ပါး",-12.390448570251465],["ไอ",-12.390484809875488],["tegi",-12.390531539916992],["▁ishin",-12.390549659729004],["▁ਜਿਸ",-12.390563011169434],["վեր",-12.390597343444824],["▁korrekt",-12.390612602233888],["击",-12.390618324279783],["▁кош",-12.390649795532228],["▁болса",-12.390706062316896],["▁laatste",-12.390732765197754],["▁stock",-12.390741348266602],["zení",-12.390746116638184],["▁هزینه",-12.390795707702637],["நா",-12.390828132629396],["▁Пан",-12.39083194732666],["▁browser",-12.39083766937256],["▁лидер",-12.390839576721191],["ың",-12.390929222106934],["УР",-12.390968322753906],["శ్",-12.391034126281738],["tinen",-12.39105224609375],["▁виконання",-12.391057014465332],["▁यी",-12.39116382598877],["стри",-12.39118194580078],["▁kutoa",-12.391182899475098],["My",-12.391199111938477],["▁Eestis",-12.391231536865234],["లలో",-12.3912353515625],["▁hvað",-12.391237258911133],["▁semula",-12.391263008117676],["▁käyttä",-12.391279220581056],["hta",-12.391283988952637],["पद",-12.391350746154783],["druž",-12.391404151916504],["▁ಇರ",-12.3914155960083],["▁URL",-12.391456604003906],["▁pensi",-12.391509056091309],["▁없다",-12.3916015625],["▁lev",-12.391650199890137],["▁Све",-12.391698837280272],["▁መስ",-12.391715049743652],["szerű",-12.391717910766602],["නී",-12.391741752624512],["▁rasmi",-12.391805648803713],["▁пакет",-12.391838073730469],["▁ڳ",-12.391911506652832],["షన్",-12.391912460327148],["区域",-12.391921043395996],["RK",-12.392009735107422],["ઃ",-12.392023086547852],["ющий",-12.392040252685549],["▁áo",-12.39207363128662],["▁ගැනීමට",-12.392098426818848],["▁Mere",-12.39215087890625],["rý",-12.392215728759766],["bezpeč",-12.39228343963623],["سٽ",-12.392340660095217],["▁చిన్న",-12.392340660095217],["เพ",-12.392356872558594],["▁ਰਿਹਾ",-12.39235782623291],["▁گرم",-12.392369270324709],["したり",-12.392375946044922],["elm",-12.39247703552246],["열",-12.392486572265623],["▁Адам",-12.39252471923828],["▁pobl",-12.392539978027344],["▁спас",-12.392581939697266],["希",-12.392596244812012],["▁probably",-12.392643928527832],["iais",-12.392666816711426],["▁quality",-12.392690658569336],["▁compara",-12.3927001953125],["ihii",-12.392729759216309],["▁ningún",-12.392729759216309],["▁Have",-12.392759323120115],["▁ຫ",-12.392780303955078],["cijos",-12.392783164978027],["атын",-12.392815589904783],["лова",-12.392842292785645],["ටම",-12.39285945892334],["▁នោះ",-12.392874717712402],["▁език",-12.39291763305664],["лін",-12.392919540405272],["▁تح",-12.392936706542969],["czny",-12.392937660217283],["▁ହେଲେ",-12.39296531677246],["▁location",-12.392982482910156],["िम",-12.392988204956056],["▁команд",-12.393043518066406],["fis",-12.393049240112305],["▁tol",-12.39307689666748],["▁kasta",-12.393094062805176],["▁mim",-12.393120765686035],["▁დაი",-12.39313507080078],["▁iespēja",-12.393145561218262],["MER",-12.393181800842283],["Sp",-12.393194198608398],["▁Bunun",-12.393282890319824],["▁образования",-12.393282890319824],["▁presenza",-12.393305778503418],["▁છું",-12.393306732177734],["πλ",-12.393390655517578],["▁celle",-12.393505096435549],["なる",-12.393510818481444],["▁karne",-12.393539428710938],["ρώ",-12.393560409545898],["▁Luft",-12.393595695495604],["деп",-12.39360809326172],["нија",-12.39365005493164],["ემ",-12.39365291595459],["▁Vien",-12.393706321716309],["নের",-12.393709182739258],["▁áreas",-12.393743515014648],["volle",-12.393747329711914],["ერი",-12.39376163482666],["▁Ee",-12.393820762634276],["▁identi",-12.393839836120604],["เท่านั้น",-12.393879890441896],["▁Gri",-12.393879890441896],["▁informácie",-12.39388942718506],["sey",-12.393895149230955],["sker",-12.393895149230955],["▁yourself",-12.39393711090088],["불",-12.39395523071289],["▁Regi",-12.393962860107422],["▁Iar",-12.394061088562012],["▁து",-12.394099235534668],["▁студ",-12.394119262695312],["▁jaoks",-12.394207954406738],["▁našich",-12.39421558380127],["▁ਐ",-12.39421844482422],["holder",-12.394227981567385],["營",-12.394229888916016],["▁spôsob",-12.39423370361328],["▁محبت",-12.394267082214355],["oita",-12.394278526306152],["▁suk",-12.394285202026367],["▁gin",-12.394292831420898],["公开",-12.394339561462402],["ಕ್ಕ",-12.394347190856934],["磨",-12.394431114196776],["资料",-12.39444637298584],["▁результат",-12.394464492797852],["▁تعالى",-12.394484519958496],["òr",-12.39449405670166],["ຖ",-12.394521713256836],["▁nettet",-12.394603729248049],["▁danes",-12.394619941711426],["▁دین",-12.394739151000977],["▁gravida",-12.394770622253418],["kkaa",-12.394780158996582],["▁عشر",-12.394801139831545],["ază",-12.394818305969238],["▁dzīvo",-12.394903182983398],["lha",-12.394941329956056],[":41",-12.394948959350586],["本人",-12.394954681396484],["▁Kort",-12.394962310791016],["▁elektri",-12.394963264465332],["▁Qur",-12.394967079162598],["vola",-12.395058631896973],["▁projeto",-12.395071983337402],["gay",-12.395079612731934],["▁زموږ",-12.395097732543944],["▁शरीर",-12.395097732543944],["▁навчання",-12.395103454589844],["位於",-12.395130157470703],["vény",-12.395136833190918],["▁styl",-12.39515781402588],["bong",-12.395164489746094],["脸",-12.395191192626951],["veg",-12.395207405090332],["้น",-12.395257949829102],["綠",-12.395288467407228],["ront",-12.395293235778809],["▁духов",-12.395352363586426],["▁kü",-12.395357131958008],["广",-12.3954439163208],["eľ",-12.395477294921877],["Allah",-12.395496368408203],["▁Information",-12.395499229431152],["▁0,5",-12.395501136779783],["atzeko",-12.39552402496338],["▁lamang",-12.395547866821287],["▁क्षेत्रमा",-12.395547866821287],["▁ovom",-12.395630836486816],["累",-12.39564609527588],["ፕ",-12.395692825317385],["能源",-12.39570426940918],["ਹਾ",-12.39571475982666],["▁prevede",-12.395731925964355],["јер",-12.395753860473633],["vla",-12.395761489868164],["▁CCM",-12.39577579498291],["ળી",-12.395782470703123],["ลอง",-12.395819664001465],["▁paĝo",-12.395831108093262],["▁वै",-12.395837783813477],["slo",-12.39584732055664],["▁لگا",-12.39588451385498],["▁kohtu",-12.395901679992676],["▁아니",-12.395926475524902],["▁نزدیک",-12.395929336547852],["▁laser",-12.395962715148926],["寒",-12.395962715148926],["script",-12.395990371704102],["Por",-12.396021842956545],["▁KON",-12.396026611328123],["ஞ்ச",-12.396029472351074],["▁List",-12.396066665649414],["▁salva",-12.396081924438477],["roč",-12.396117210388184],["ชิ",-12.396241188049316],["祖",-12.396242141723633],["fia",-12.396268844604492],["ობას",-12.396357536315918],["nten",-12.396395683288574],["▁despois",-12.396440505981444],["ovania",-12.39645290374756],["sú",-12.39651107788086],["▁sóc",-12.396528244018556],["əsi",-12.396600723266602],["▁كبيرة",-12.396614074707031],["ору",-12.396620750427246],["stos",-12.396629333496094],["▁Bhí",-12.39663314819336],["▁måneder",-12.39664077758789],["▁פר",-12.396688461303713],["arre",-12.39675521850586],["yya",-12.396827697753906],["ácii",-12.39684009552002],["▁excelente",-12.396842956542969],["▁ný",-12.39687728881836],["▁lim",-12.396900177001951],["▁Vì",-12.396915435791016],["▁кроз",-12.396931648254396],["▁maisha",-12.396943092346191],["▁žmonių",-12.396950721740724],["▁laoreet",-12.396952629089355],["mjer",-12.396982192993164],["osh",-12.39698600769043],["▁پشت",-12.39700698852539],["▁सभा",-12.397011756896973],["▁age",-12.397012710571287],["граф",-12.39705753326416],["現代",-12.39707374572754],["talet",-12.397089958190918],["▁noc",-12.397125244140623],["▁Universitat",-12.397171020507812],["▁Dopo",-12.397174835205078],["▁passé",-12.397187232971191],["▁കൊ",-12.397193908691406],["యూ",-12.397196769714355],["▁журнал",-12.397231101989746],["▁marad",-12.397234916687012],["rava",-12.397253036499023],["уж",-12.397253036499023],["ρες",-12.39727020263672],["▁වෙනවා",-12.397296905517578],["ทาน",-12.397309303283691],["free",-12.397323608398438],["▁پیام",-12.397350311279297],["▁klima",-12.39737033843994],["▁nech",-12.3974027633667],["▁ବେଳେ",-12.397412300109863],["ekê",-12.397453308105469],["杰",-12.397497177124023],["▁ਹੋਣ",-12.397500038146973],["中文",-12.397505760192873],["2005",-12.397518157958984],["יף",-12.397528648376465],["▁запо",-12.397546768188477],["▁исти",-12.397564888000488],["кот",-12.397570610046388],["ษ",-12.39763069152832],["hub",-12.39771842956543],["▁Wochen",-12.39772891998291],["сил",-12.397760391235352],["▁ஆண்டு",-12.39777660369873],["▁życie",-12.397777557373049],["ंना",-12.397794723510742],["plus",-12.397817611694336],["yty",-12.397826194763184],["▁dúas",-12.397832870483398],["ckt",-12.397854804992676],["▁predmet",-12.397863388061523],["dv",-12.397878646850586],["kuntza",-12.397943496704102],["টে",-12.39795970916748],["guard",-12.398064613342283],["儿童",-12.398123741149902],["▁göster",-12.398157119750977],["さらに",-12.398180961608888],["▁πάντα",-12.39819049835205],["avan",-12.398202896118164],["كون",-12.398215293884276],["▁Zer",-12.398263931274414],["ကား",-12.39826488494873],["menta",-12.398289680480955],["▁السبت",-12.398297309875488],["▁kaas",-12.398303985595703],["mele",-12.39836597442627],["▁Moi",-12.3983736038208],["▁stronie",-12.39844799041748],["▁pogled",-12.39849853515625],["▁Blick",-12.398520469665527],["lato",-12.398564338684082],["▁Audio",-12.398571968078612],["oya",-12.398765563964844],["▁ولایت",-12.398775100708008],["▁त्यांच्या",-12.398812294006348],["▁‬",-12.398821830749512],["გრ",-12.39884090423584],["▁klubu",-12.398900985717772],["did",-12.398931503295898],["▁preciso",-12.398940086364746],["ପ୍",-12.398955345153809],["гд",-12.398984909057615],["▁مخالف",-12.398991584777832],["ësi",-12.398999214172363],["▁Rö",-12.399028778076172],["▁Hava",-12.399042129516602],[")))",-12.399051666259766],["▁Hej",-12.399053573608398],["▁XXI",-12.399054527282717],["etara",-12.399066925048828],["▁focus",-12.399087905883787],["ीन",-12.399105072021484],["бед",-12.39911651611328],["สด",-12.39918327331543],["▁निर्",-12.399192810058594],["▁1971",-12.399210929870604],["写真",-12.399338722229004],["▁ئو",-12.399357795715332],["▁წარ",-12.399388313293455],["▁kreativ",-12.39939308166504],["技能",-12.399407386779783],["卫生",-12.39941692352295],["▁യു",-12.399418830871582],["term",-12.399422645568848],["▁କରିଥିଲେ",-12.399434089660645],["best",-12.399491310119627],["▁گڏ",-12.399516105651855],["▁Republike",-12.399518013000488],["arkan",-12.399529457092283],["▁settore",-12.399553298950195],["▁Brun",-12.39957046508789],["▁Polo",-12.399580001831056],["acije",-12.39959716796875],["▁Photo",-12.399603843688965],["▁сигнал",-12.399651527404783],["▁Chrome",-12.399652481079102],["ız",-12.399669647216797],["▁ଜଣେ",-12.399670600891112],["อาการ",-12.39967155456543],["▁sanat",-12.399672508239746],["▁తమ",-12.399675369262695],["חה",-12.399805068969728],["ép",-12.399806022644045],["思い",-12.399837493896484],["▁мира",-12.39993667602539],["ျပဳ",-12.399965286254885],["yz",-12.40001392364502],["一个人",-12.400029182434082],["▁ərzində",-12.40003776550293],["▁გიორგი",-12.40003776550293],["aali",-12.400038719177246],["ोर",-12.40007209777832],["▁wonder",-12.400116920471191],["2019",-12.400117874145508],["raya",-12.400138854980469],["teh",-12.400151252746582],["гээр",-12.400191307067873],["▁posla",-12.400240898132324],["ボ",-12.40025520324707],["▁laba",-12.400270462036133],["లోకి",-12.400301933288574],["▁plastic",-12.400412559509276],["ผ้า",-12.400439262390137],["רס",-12.400500297546388],["▁beiden",-12.400501251220703],["▁Beh",-12.400534629821776],["▁skade",-12.400546073913574],["гч",-12.40061378479004],["▁Vr",-12.400616645812988],["▁batzuk",-12.400691032409668],["NYA",-12.400732040405272],["进行了",-12.400789260864258],["▁жени",-12.400906562805176],["▁tank",-12.400908470153809],["кови",-12.400933265686035],["也会",-12.400948524475098],["评",-12.40095329284668],["▁prend",-12.40096664428711],["▁Cas",-12.400970458984377],["ීම",-12.40098762512207],["ierte",-12.401026725769045],["ांचे",-12.401044845581056],["สวน",-12.401071548461914],["▁præ",-12.401080131530762],["▁személy",-12.401089668273926],["▁lộ",-12.40109157562256],["ተኛ",-12.401124000549316],["判",-12.401134490966797],["▁mila",-12.401137351989746],["▁Tallinna",-12.40115451812744],["▁quite",-12.401166915893556],["很大",-12.401168823242188],["ışı",-12.40120792388916],["▁جنوب",-12.401214599609377],["▁slobod",-12.401219367980955],["▁Tho",-12.40125846862793],["▁ਇ",-12.401263236999512],["▁stá",-12.401280403137209],["▁최근",-12.401283264160156],["▁puu",-12.40129566192627],["▁कह",-12.401314735412598],["▁ապա",-12.401351928710938],["hkan",-12.401371002197266],["prat",-12.401374816894531],["▁សែន",-12.40139389038086],["▁mica",-12.401460647583008],["▁KM",-12.40147876739502],["aquesta",-12.401525497436523],["หนอน",-12.40153694152832],["▁bahay",-12.401543617248535],["گه",-12.401573181152344],["voz",-12.401601791381836],["▁These",-12.401620864868164],["YO",-12.40162754058838],["▁вис",-12.40163230895996],["czu",-12.401640892028809],["ově",-12.401721954345703],["▁جام",-12.40183925628662],["▁pp",-12.401861190795898],["bula",-12.401920318603516],["struk",-12.401934623718262],["ىدىكى",-12.40194606781006],["▁게",-12.402017593383787],["▁roka",-12.402022361755373],["▁spam",-12.402070045471191],["ajat",-12.402093887329102],["кле",-12.402095794677734],["▁svět",-12.402101516723633],["cord",-12.402217864990234],["KY",-12.402299880981444],["▁Jei",-12.402362823486328],["gericht",-12.402398109436035],["▁Запад",-12.402421951293944],["ہر",-12.402427673339844],["▁услови",-12.402458190917969],["σή",-12.402469635009766],["ນາ",-12.402472496032717],["แดง",-12.402474403381348],["▁hatin",-12.40247631072998],["署",-12.402482986450195],["独立",-12.402491569519045],["▁دہشت",-12.40251350402832],["▁720",-12.402523040771484],["▁پوسٹ",-12.402530670166016],["▁ఫోన్",-12.402541160583496],["真是",-12.402545928955078],["▁dvs",-12.402554512023926],["tw",-12.40255641937256],["▁voda",-12.402600288391112],["及时",-12.402674674987791],["jme",-12.402749061584473],["▁ठ",-12.40275764465332],["▁wek",-12.402766227722168],["esto",-12.4027738571167],["▁మి",-12.402814865112305],["hri",-12.40283203125],["購入",-12.402904510498049],["▁Rīgas",-12.402905464172363],["▁Cast",-12.402945518493652],["ирует",-12.402955055236816],["下来",-12.402959823608398],["tuur",-12.402974128723145],["ուշ",-12.403045654296877],["wid",-12.403072357177734],["ifa",-12.403076171875],["純",-12.40318489074707],["מצא",-12.403196334838867],["资产",-12.403228759765623],["splan",-12.403264999389648],["gaz",-12.403322219848633],["▁жаса",-12.403350830078123],["facebook",-12.40335178375244],["ຢ",-12.403380393981934],["▁exist",-12.403413772583008],["▁получи",-12.403433799743652],["▁mom",-12.403485298156738],["▁Αγ",-12.403497695922852],["hia",-12.403509140014648],["íodh",-12.403526306152344],["စဥ္",-12.403552055358888],["▁posts",-12.403565406799316],["▁corri",-12.40357494354248],["▁एव",-12.403582572937012],["▁책",-12.403593063354492],["▁Nap",-12.4036226272583],["▁plen",-12.403626441955566],["arti",-12.403631210327148],["▁ramach",-12.403643608093262],["▁أنها",-12.403731346130373],["▁quien",-12.403743743896484],["そんな",-12.403751373291016],["▁wneud",-12.40376091003418],["кључ",-12.40377426147461],["▁vul",-12.403833389282228],["▁miljö",-12.40385627746582],["UB",-12.40388011932373],["▁kari",-12.403885841369627],["▁Ani",-12.403953552246094],["住宅",-12.40396213531494],["▁đôi",-12.403976440429688],["▁ತನ್ನ",-12.403989791870115],["▁möchten",-12.404006004333496],["кав",-12.404013633728027],["ავ",-12.404061317443848],["▁рус",-12.404061317443848],["▁fonction",-12.404090881347656],["ေဆာင္",-12.404094696044922],["كان",-12.40412139892578],["▁gebied",-12.404169082641602],["▁பொ",-12.404191970825195],["▁хот",-12.404213905334473],["▁function",-12.40422534942627],["▁засаг",-12.404260635375977],["ວັນ",-12.404273986816406],["▁Personen",-12.404312133789062],["ಿಸಿದ",-12.404314041137695],["/7",-12.404342651367188],["澳门",-12.404353141784668],["ujejo",-12.40435791015625],["ање",-12.404366493225098],["▁glob",-12.40437126159668],["ltä",-12.404373168945312],["▁yoxdur",-12.40440845489502],["▁нее",-12.404424667358398],["aggio",-12.404443740844728],["▁করার",-12.404495239257812],["ятся",-12.404508590698242],["ಲಾಗಿದೆ",-12.40453815460205],["chádza",-12.404570579528809],["▁afge",-12.40459156036377],["歳",-12.404611587524414],["▁토",-12.404638290405272],["▁ගේ",-12.404662132263184],["▁endnu",-12.404683113098145],["▁kuulu",-12.40479564666748],["ādā",-12.404807090759276],["ống",-12.40481948852539],["▁deci",-12.404829978942873],["▁දි",-12.404840469360352],["▁wind",-12.4048490524292],["хов",-12.404857635498049],["▁Кам",-12.404861450195312],["▁ඔබේ",-12.40488338470459],["▁lung",-12.404906272888184],["咨询",-12.404953956604004],["▁azad",-12.404990196228027],["¥",-12.405001640319824],["▁Để",-12.405020713806152],["▁valo",-12.40502643585205],["▁zie",-12.405029296875],["▁lisäksi",-12.405033111572266],["чну",-12.405065536499023],["fér",-12.405068397521973],["ుతుంది",-12.405080795288086],["都會",-12.405081748962402],["mí",-12.405106544494627],["corre",-12.40518283843994],["წე",-12.405221939086914],["▁Ош",-12.405228614807127],["जर",-12.405245780944824],["δέ",-12.40524673461914],["▁ทําให้",-12.405247688293455],["ece",-12.40525245666504],["viet",-12.405298233032228],["▁lån",-12.405298233032228],["▁бие",-12.405308723449709],["ани",-12.405329704284668],["RIA",-12.40541172027588],["आर",-12.405420303344728],["wią",-12.40546989440918],["tijd",-12.405492782592772],["▁suami",-12.40550422668457],["树",-12.40554141998291],["दु",-12.405582427978516],["lane",-12.405610084533691],["▁pika",-12.40561294555664],["▁müxtəlif",-12.405624389648438],["နည္း",-12.40562915802002],["▁تعالی",-12.40562915802002],["▁مکمل",-12.40563678741455],["قات",-12.405652046203612],["จํา",-12.40566062927246],["▁۱۰",-12.40567111968994],["క్క",-12.405682563781738],["▁২০",-12.405689239501951],["▁패",-12.405757904052734],["▁continuar",-12.40579891204834],["▁исп",-12.405808448791504],["억",-12.405844688415527],["tøy",-12.405864715576172],["ତ୍",-12.405869483947754],["▁isteri",-12.405920028686523],["▁sabi",-12.405929565429688],["▁ከፍተኛ",-12.405933380126951],["▁tugas",-12.405946731567385],["▁관",-12.405948638916016],["▁pria",-12.406051635742188],["▁ок",-12.406092643737791],["Լ",-12.406116485595703],["िङ",-12.406139373779297],["гаар",-12.406145095825195],["лися",-12.40614891052246],["▁sod",-12.406157493591309],["▁भू",-12.406171798706056],["▁тог",-12.40617847442627],["ۇل",-12.406182289123535],["▁dzień",-12.406211853027344],["財",-12.40622329711914],["鎮",-12.40623664855957],["▁varme",-12.406256675720217],["▁ёсць",-12.406259536743164],["▁մեզ",-12.40626621246338],["▁ху",-12.40633773803711],["өөр",-12.406379699707031],["▁mh",-12.406394958496094],["▁šī",-12.406464576721191],["▁ezek",-12.406501770019531],["ေတာင္",-12.406508445739746],["살",-12.40655517578125],["ONG",-12.406665802001951],["的話",-12.406756401062012],["▁ເມືອງ",-12.406758308410645],["ናት",-12.406770706176758],["čila",-12.406780242919922],["▁olar",-12.4068021774292],["▁વર્ષ",-12.4068021774292],["▁kyn",-12.406820297241213],["შე",-12.406829833984377],["▁amalga",-12.406837463378906],["☆",-12.406867980957031],["▁बड़े",-12.40688133239746],["കളെ",-12.406888008117676],["▁времена",-12.406902313232422],["▁රටේ",-12.406925201416016],["的朋友",-12.406993865966797],["▁byli",-12.406997680664062],["▁यु",-12.407071113586426],["▁договора",-12.40708827972412],["▁demanda",-12.407099723815918],["भव",-12.407135009765623],["ymas",-12.407233238220217],["▁ਖ",-12.407307624816896],["▁journalist",-12.40736198425293],["▁ٻ",-12.407402992248535],["▁Stand",-12.407414436340332],["▁шлях",-12.407419204711914],["迅速",-12.407442092895508],["▁pem",-12.407455444335938],["rock",-12.407487869262695],["▁đấy",-12.407493591308594],["▁ಕೇಂದ್ರ",-12.407499313354492],["▁webwerf",-12.407510757446287],["▁кем",-12.40761375427246],["▁مجموعه",-12.407665252685549],["▁comes",-12.407666206359863],["▁termine",-12.40770149230957],["zit",-12.40772819519043],["▁oop",-12.407757759094238],["▁tiun",-12.407812118530272],["gled",-12.407840728759766],["▁yox",-12.407849311828612],["▁ପ୍ରତି",-12.407877922058104],["wą",-12.407898902893066],["▁Happy",-12.407936096191406],["▁nori",-12.40794849395752],["▁prati",-12.407952308654783],["▁съвет",-12.407994270324709],["▁လ",-12.407999992370604],["кая",-12.408051490783691],["rice",-12.40808391571045],["▁մինչեւ",-12.408119201660156],["▁अनुभव",-12.408122062683104],["▁Rang",-12.40812873840332],["▁politiko",-12.408140182495115],["▁ovog",-12.408148765563965],["▁espaço",-12.408153533935549],["ິ",-12.40817928314209],["▁utile",-12.408202171325684],["▁pomo",-12.408235549926758],["ວາ",-12.408270835876465],["結構",-12.408285140991213],["▁lệ",-12.408305168151855],["▁vahel",-12.40830898284912],["▁ខ្មែរ",-12.408329963684082],["guer",-12.408336639404297],["▁Union",-12.408366203308104],["▁вместе",-12.408380508422852],["▁ლე",-12.408388137817385],["kide",-12.40844440460205],["▁מצ",-12.40848159790039],["фон",-12.40849494934082],["▁सात",-12.408570289611816],["方が",-12.408613204956056],["osos",-12.408620834350586],["laşma",-12.408638954162598],["确定",-12.408674240112305],["mete",-12.408686637878418],["▁kannst",-12.40868854522705],["▁euismod",-12.408740997314451],["▁Pis",-12.408791542053224],["ijas",-12.408820152282717],["▁Angli",-12.408843994140623],["pont",-12.408860206604004],["▁Capital",-12.40892505645752],["▁River",-12.40892505645752],["ੋਂ",-12.408939361572266],["比赛",-12.408974647521973],["▁segue",-12.408987998962402],["▁meeste",-12.409003257751465],["▁Мир",-12.409038543701172],["▁Solo",-12.409071922302246],["world",-12.409124374389648],["▁salg",-12.40912628173828],["▁måske",-12.409162521362305],["MC",-12.409193992614746],["▁hoop",-12.40920352935791],["▁:))",-12.409208297729492],["неш",-12.409213066101074],["вес",-12.409224510192873],["▁writing",-12.409235954284668],["នាក់",-12.409261703491213],["簡單",-12.409268379211426],["▁informations",-12.40931797027588],["GP",-12.409348487854004],["寺",-12.409357070922852],["নো",-12.409375190734863],["కం",-12.409468650817873],["▁Hoa",-12.409480094909668],["▁natin",-12.409485816955566],["שות",-12.409502029418944],["▁בד",-12.409553527832031],["▁piir",-12.40958023071289],["ρυ",-12.409590721130373],["▁חדש",-12.409616470336914],["▁hamda",-12.40964412689209],["▁всю",-12.409659385681152],["ிக்க",-12.409717559814451],["мян",-12.409743309020996],["▁dasar",-12.409771919250488],["tür",-12.409783363342283],["▁pemerintah",-12.409783363342283],["▁Rusia",-12.409788131713867],["undang",-12.409819602966309],["▁pogod",-12.409832000732422],["▁कल",-12.409867286682127],["▁suku",-12.409873962402344],["▁чалавек",-12.409873962402344],["蒂",-12.40993595123291],["nē",-12.409939765930176],["▁tę",-12.40996551513672],["▁коме",-12.410028457641602],["▁strand",-12.410038948059082],["ार्थ",-12.410042762756348],["▁donner",-12.410066604614258],["▁napa",-12.41010570526123],["▁feb",-12.410117149353027],["▁wish",-12.410122871398926],["mię",-12.41014003753662],["▁mód",-12.410238265991213],["▁века",-12.410260200500488],["▁investiga",-12.41035270690918],["▁сана",-12.410359382629396],["▁sə",-12.410391807556152],["ĩ",-12.410441398620604],["銀",-12.41044807434082],["▁αυτο",-12.410476684570312],["▁عزیز",-12.41049861907959],["▁moder",-12.410562515258787],["▁אפ",-12.410569190979004],["tunut",-12.410626411437988],["▁mendi",-12.41066551208496],["▁Pode",-12.410675048828123],["ደር",-12.410686492919922],["OU",-12.41069221496582],["▁사람",-12.410746574401855],["eşti",-12.410758018493652],["个月",-12.41078758239746],["▁ጥያቄ",-12.410789489746094],["▁Cabdi",-12.4108247756958],["▁прием",-12.410846710205078],["▁honom",-12.410855293273926],["vani",-12.410893440246582],["▁뒤",-12.410895347595217],["ም፡፡",-12.410930633544922],["оор",-12.410943031311035],["▁space",-12.410964012145996],["▁Ober",-12.410970687866213],["▁begge",-12.410983085632324],["ес",-12.41100025177002],["▁ډول",-12.411043167114258],["РУ",-12.411066055297852],["مس",-12.411067008972168],["▁Yes",-12.411128997802734],["kere",-12.41115665435791],["▁एन",-12.411174774169922],["بى",-12.411191940307615],["▁haec",-12.41119384765625],["▁hvilket",-12.411215782165527],["үр",-12.411236763000488],["▁قوانین",-12.411242485046388],["▁turva",-12.411293983459473],["▁igre",-12.411336898803713],["▁conversa",-12.411381721496582],["▁senere",-12.411405563354492],["▁povez",-12.411417007446287],["▁бид",-12.411454200744627],["▁principalmente",-12.411463737487791],["▁شىنجاڭ",-12.411504745483398],["ukai",-12.411508560180664],["▁minyak",-12.411510467529297],["КУ",-12.41152572631836],["rga",-12.411561965942385],["ಿಂದ",-12.411564826965332],["▁totale",-12.411602020263672],["것",-12.411654472351074],["とき",-12.411657333374023],["▁υπό",-12.4116792678833],["▁tund",-12.411686897277832],["▁מג",-12.41169548034668],["▁bekommen",-12.41170597076416],["▁техника",-12.411723136901855],["▁Job",-12.411748886108398],["▁ሳ",-12.411755561828612],["紹介",-12.41181182861328],["▁apakah",-12.411822319030762],["▁fen",-12.411827087402344],["▁priestor",-12.41184425354004],["ציה",-12.41185474395752],["▁ਵਲੋਂ",-12.41186809539795],["널",-12.41186809539795],["nski",-12.411870956420898],["wij",-12.411884307861328],["▁posuere",-12.411907196044922],["▁onderzoek",-12.4119291305542],["ЕР",-12.411931991577148],["เรือ",-12.41196060180664],["šče",-12.411967277526855],["▁მეტი",-12.411968231201172],["▁cauza",-12.412010192871094],[":07",-12.412017822265623],["▁உலக",-12.412049293518066],["trze",-12.412064552307127],["기를",-12.41207790374756],["▁tee",-12.412151336669922],["lų",-12.412212371826172],["▁billig",-12.412251472473145],["klaus",-12.412257194519045],["ี",-12.412281036376951],["▁Boleh",-12.412282943725586],["▁history",-12.412352561950684],["所有的",-12.412363052368164],["yak",-12.412384033203123],["ሰማ",-12.412450790405272],["瓶",-12.412450790405272],["▁procura",-12.412483215332031],["ائم",-12.41250991821289],["▁kaç",-12.412521362304688],["त्त",-12.412525177001951],["▁artık",-12.41256046295166],["روز",-12.412565231323242],["▁വേണ്ടി",-12.412567138671877],["搭",-12.412579536437988],[":44",-12.412586212158203],["▁hry",-12.412593841552734],["misi",-12.41259765625],["▁citu",-12.412605285644531],["leik",-12.412642478942873],["▁jakości",-12.412681579589844],["ارا",-12.41268825531006],["▁koha",-12.41270637512207],["කම",-12.412732124328612],["▁Гр",-12.41282844543457],["▁ଖ",-12.41285228729248],["diyi",-12.412853240966797],["▁koncentr",-12.41288948059082],["▁vilja",-12.41292667388916],["راء",-12.4129638671875],["▁සිය",-12.412997245788574],["rud",-12.413002014160156],["▁హైదరాబాద్",-12.413013458251951],["adii",-12.413044929504396],["▁proprie",-12.413061141967772],["同じ",-12.413074493408203],["สุขภาพ",-12.413077354431152],["▁egestas",-12.413125991821287],["▁ardından",-12.413126945495604],["▁journée",-12.413134574890137],["▁അത",-12.413135528564451],["▁ਦੋ",-12.413199424743652],["iyada",-12.413226127624512],["ಗು",-12.413235664367676],["最好的",-12.413274765014648],["▁დაუ",-12.413285255432127],["nari",-12.413290977478027],["▁maio",-12.413314819335938],["ança",-12.41334629058838],["▁новых",-12.413387298583984],["▁manžel",-12.413403511047363],["▁տարբեր",-12.413432121276855],["heng",-12.413434982299805],["▁Bien",-12.413450241088867],["iston",-12.413487434387209],["mið",-12.413500785827637],["▁Köz",-12.413509368896484],["mies",-12.413548469543455],["▁гре",-12.413549423217772],["▁രണ്ടു",-12.413586616516112],["కర్",-12.413588523864746],["▁значение",-12.413606643676758],["bela",-12.41364574432373],["πρ",-12.41364574432373],["▁perjalanan",-12.41374969482422],["▁ଜାଣନ୍ତୁ",-12.41374969482422],["▁tarvitse",-12.413775444030762],["▁кейін",-12.413788795471191],["ปา",-12.413800239562988],["Kar",-12.413801193237305],["▁പിന്നെ",-12.413860321044922],["ære",-12.413861274719238],["▁recens",-12.413896560668944],["ትና",-12.413905143737791],["藝",-12.413912773132324],["▁біл",-12.413970947265623],["▁Lyrics",-12.414011001586914],["مىز",-12.414013862609863],["▁Lisa",-12.414045333862305],["▁निकाल",-12.414107322692873],["kond",-12.414108276367188],["dab",-12.41411304473877],["▁youtube",-12.414125442504885],["ồng",-12.414133071899414],["▁Great",-12.414133071899414],["sari",-12.414162635803224],["पो",-12.414188385009766],["全体",-12.414196968078612],["tres",-12.414217948913574],["հան",-12.414220809936523],["▁члан",-12.414220809936523],["▁mesi",-12.414228439331056],["▁façon",-12.414266586303713],["▁Vale",-12.414281845092772],["lege",-12.414295196533203],["効果",-12.414356231689451],["▁member",-12.414359092712402],["▁گرفتار",-12.4143648147583],["▁Vestibulum",-12.414377212524414],["iona",-12.414388656616213],["▁nemlig",-12.414436340332031],["▁pren",-12.41444969177246],["ליך",-12.414510726928713],["mbah",-12.414512634277344],["▁ਦਿਨ",-12.414525032043455],["▁கூட",-12.414615631103516],["▁aktu",-12.414636611938477],["▁takim",-12.414642333984377],["ັງ",-12.414661407470703],["重要的",-12.414729118347168],["▁خیال",-12.41473388671875],["итися",-12.414742469787598],["▁فون",-12.414761543273926],["▁כולל",-12.414931297302246],["▁vesti",-12.414977073669434],["▁beide",-12.4149808883667],["Qué",-12.414996147155762],["▁Foundation",-12.415011405944824],["▁درج",-12.415083885192873],["ề",-12.415085792541504],["▁пасля",-12.415131568908691],["LV",-12.415156364440918],["▁rekao",-12.415156364440918],["▁universal",-12.415159225463867],["同意",-12.41516399383545],["ყი",-12.415207862854004],["▁пазар",-12.415263175964355],["▁labāk",-12.415298461914062],["▁Rück",-12.415319442749023],["đu",-12.4153470993042],["▁საქმე",-12.415369033813477],["▁ответ",-12.415399551391602],["еви",-12.415432929992676],["ہم",-12.415447235107422],["ించిన",-12.415451049804688],["▁kupata",-12.415609359741213],["▁malesuada",-12.415634155273438],["▁cambia",-12.415641784667969],["яўляецца",-12.41564655303955],["אני",-12.41565990447998],["▁děti",-12.41566276550293],["ど",-12.41572093963623],["serie",-12.415769577026367],["agrada",-12.415783882141112],["▁beg",-12.41580581665039],["▁stille",-12.415842056274414],["هد",-12.415916442871094],["▁demonstra",-12.415934562683104],["lc",-12.41598129272461],["ησε",-12.416015625],["▁СА",-12.416035652160645],["нт",-12.416128158569336],["▁ясно",-12.416150093078612],["▁finder",-12.416157722473145],["舉辦",-12.416226387023926],["▁bran",-12.416253089904783],["▁mẫu",-12.41626262664795],["▁vielleicht",-12.41626262664795],["قام",-12.41627025604248],["▁رفتار",-12.41627597808838],["håll",-12.416305541992188],["യൂ",-12.416316032409668],["▁kamen",-12.4163236618042],["РО",-12.416326522827148],["schau",-12.416359901428224],["▁rezultate",-12.416382789611816],["ач",-12.41645336151123],["▁Кур",-12.416496276855469],["▁պ",-12.416510581970217],["▁فيما",-12.416510581970217],["గల",-12.416521072387695],["वास",-12.416624069213867],["▁سە",-12.416626930236816],["▁IR",-12.416641235351562],["اوي",-12.416735649108888],["zur",-12.416749954223633],["▁اینجا",-12.416755676269531],["ապես",-12.416812896728516],["emia",-12.416837692260742],["▁ଜ",-12.416877746582031],["几乎",-12.41688060760498],["▁овде",-12.416940689086914],["▁institut",-12.417036056518556],["▁dieną",-12.417061805725098],["▁રે",-12.417069435119627],["▁Kauno",-12.417071342468262],["ốc",-12.417101860046388],["▁Anu",-12.417106628417969],["സം",-12.417213439941406],["▁کور",-12.417231559753418],["▁škole",-12.417251586914062],["▁bone",-12.41726779937744],["........",-12.417292594909668],["にて",-12.41732120513916],["ହି",-12.417376518249512],["ಇ",-12.417383193969728],["ਓ",-12.417386054992676],["▁טע",-12.417388916015623],["saa",-12.417393684387209],["лка",-12.417421340942385],["以外",-12.417447090148926],["▁رفت",-12.417468070983888],["ляр",-12.41751194000244],["กําหนด",-12.417520523071287],["▁मुख्यमंत्री",-12.417524337768556],["▁ఆర్",-12.417526245117188],["▁VIII",-12.417543411254885],["▁Fund",-12.417562484741213],["็",-12.417620658874512],["▁פא",-12.417625427246094],["▁saranno",-12.417665481567385],["▁tata",-12.41769313812256],["▁gebruikt",-12.417698860168455],["amiento",-12.417736053466797],["▁quelli",-12.41775608062744],["▁wengi",-12.417794227600098],["ಪಾ",-12.417806625366213],["пусти",-12.417814254760742],["▁milion",-12.417823791503906],[":46",-12.417826652526855],["▁middel",-12.417841911315918],["▁tanda",-12.41786003112793],["ätt",-12.417923927307127],["иште",-12.418008804321287],["변",-12.418044090270996],["▁нев",-12.41807460784912],["enzi",-12.418085098266602],["▁кеткен",-12.418161392211914],["▁στιγμή",-12.418167114257812],["قی",-12.418211936950684],["iff",-12.418258666992188],["▁vinna",-12.41826629638672],["▁იყოს",-12.418274879455566],["都能",-12.418302536010742],["▁speel",-12.418305397033691],["ળા",-12.418309211730955],["▁patron",-12.41835594177246],["▁dié",-12.418365478515623],["شو",-12.418450355529783],["zzi",-12.418463706970217],["ılı",-12.418484687805176],["途",-12.418492317199709],["▁ομάδα",-12.418499946594238],["ตัวเอง",-12.418505668640137],["โร",-12.418549537658691],["▁vys",-12.418581008911133],["▁Автор",-12.418590545654297],["▁گیا۔",-12.418607711791992],["ISI",-12.41867446899414],["▁baixa",-12.418705940246582],["▁جماعت",-12.418730735778809],["таки",-12.418749809265137],["ают",-12.418778419494627],["▁insieme",-12.418787956237791],[":36",-12.41880702972412],["▁Marka",-12.418816566467283],["在一起",-12.418856620788574],["▁Ст",-12.418905258178713],["▁φιλ",-12.418914794921877],["iOS",-12.419028282165527],["štu",-12.419060707092283],["▁लाग",-12.419129371643066],["▁معروف",-12.419142723083496],["▁პრო",-12.419142723083496],["тва",-12.419143676757812],["חי",-12.419272422790527],["▁(„",-12.419307708740234],["niti",-12.4193115234375],["rzu",-12.41932201385498],["▁هوا",-12.41933822631836],["▁pizza",-12.419361114501951],["▁šajā",-12.419390678405762],["▁жоғары",-12.419414520263672],["どんな",-12.419415473937988],["▁Two",-12.419487953186035],["▁طی",-12.4194917678833],["▁ደረጃ",-12.41950798034668],["▁charakter",-12.41951847076416],["▁turisti",-12.419628143310549],["▁Peru",-12.419637680053713],["▁slike",-12.419651985168455],["▁թվականի",-12.41970157623291],["▁Mange",-12.419772148132324],["▁wal",-12.419776916503906],["▁XVI",-12.419787406921388],["▁ocen",-12.419808387756348],["big",-12.419881820678713],["▁soft",-12.419906616210938],["▁annen",-12.419910430908203],["бел",-12.419913291931152],["▁kurā",-12.419919967651367],["租",-12.419937133789062],["▁පි",-12.419962882995604],["▁FI",-12.419974327087402],["▁esimerkiksi",-12.420044898986816],["あります",-12.420092582702637],["▁trò",-12.420104026794434],["ራት",-12.420130729675291],["▁často",-12.42018699645996],["▁Però",-12.420196533203123],["ász",-12.420217514038086],["▁ман",-12.42022705078125],["▁الرئيسية",-12.420265197753906],["▁vann",-12.420281410217283],["Foto",-12.420299530029297],["mála",-12.420331001281738],["▁prepara",-12.42035675048828],["▁abandon",-12.420384407043455],["▁játék",-12.42042350769043],["星期",-12.42043399810791],["=1",-12.42046356201172],["燈",-12.420472145080566],["твер",-12.42048454284668],["roga",-12.420488357543944],["▁메",-12.420488357543944],["▁segít",-12.42049503326416],["णार",-12.420537948608398],["▁pues",-12.42058563232422],["kona",-12.420588493347168],["jut",-12.42064094543457],["▁moito",-12.420641899108888],["弄",-12.420642852783203],["▁ప్రత్యేక",-12.420676231384276],["▁άλλα",-12.420677185058594],["▁néhány",-12.420680046081545],["▁Trường",-12.420687675476074],["ಿತ್ತು",-12.420815467834473],["▁सू",-12.420859336853027],["▁الأحد",-12.420883178710938],["ুর",-12.420927047729492],["▁Thailand",-12.420927047729492],["▁했다",-12.420953750610352],["▁write",-12.420966148376465],["▁υπ",-12.420988082885742],["▁Messi",-12.421000480651855],["▁በእ",-12.421016693115234],["stavlja",-12.421124458312988],["ματα",-12.421130180358888],["ICO",-12.421142578125],["規定",-12.421159744262695],["▁drob",-12.421260833740234],["altro",-12.421268463134766],["▁atmosfer",-12.421302795410156],["▁vulputate",-12.421308517456056],["▁науки",-12.421317100524902],["▁شهرستان",-12.421360969543455],["လုပ်",-12.42138385772705],["īva",-12.421401023864746],["διο",-12.42142391204834],["▁കേന്ദ്ര",-12.421427726745604],["▁『",-12.421448707580566],["▁Ran",-12.42145824432373],["▁podob",-12.421459197998049],["פתח",-12.42147731781006],["▁vastaan",-12.421512603759766],["売",-12.42158317565918],["▁Гра",-12.421584129333496],["▁okres",-12.421598434448242],["券",-12.42161750793457],["telli",-12.421629905700684],["▁japon",-12.421655654907228],["dım",-12.421701431274414],["▁қажет",-12.421737670898438],["▁ځان",-12.421759605407717],["액",-12.4218111038208],["buch",-12.421884536743164],["טים",-12.42189121246338],["тира",-12.421894073486328],["▁Castell",-12.42190933227539],["hing",-12.421916961669922],["▁milioane",-12.421957015991213],["▁vær",-12.421972274780272],["▁भि",-12.422003746032717],["▁кафе",-12.422035217285156],["▁hampir",-12.422046661376951],["▁खोज",-12.422101974487305],["рч",-12.42211627960205],["фик",-12.422121047973633],["▁Bandar",-12.422163963317873],["pora",-12.42216968536377],["топ",-12.422185897827148],["šil",-12.422196388244627],["▁fenomen",-12.42222785949707],["沿",-12.422271728515623],["▁المر",-12.42227840423584],["ξι",-12.422333717346191],["▁toliko",-12.422368049621582],["▁(“",-12.422372817993164],["▁vina",-12.422403335571287],["▁->",-12.422409057617188],["ఎస్",-12.422420501708984],["ENA",-12.42242431640625],["گي",-12.422460556030272],["žas",-12.422471046447754],["ləş",-12.422527313232422],["▁publi",-12.42253875732422],["▁Search",-12.422579765319824],["μον",-12.422582626342772],["▁रुपये",-12.422582626342772],["距離",-12.422589302062988],["▁naszym",-12.422599792480469],["ბე",-12.422624588012695],["laid",-12.42264175415039],["▁internetu",-12.422672271728516],["▁geval",-12.42271614074707],["ವರ್",-12.422771453857422],["第四",-12.422805786132812],["davo",-12.422821998596191],["тия",-12.422858238220217],["தே",-12.422860145568848],["▁Moto",-12.422913551330566],["▁Nh",-12.42294216156006],["xis",-12.422995567321776],["▁готов",-12.423008918762209],["ונ",-12.42307472229004],["▁சொல்ல",-12.423086166381836],["▁PL",-12.423135757446287],["АП",-12.423137664794922],["규",-12.423137664794922],["▁आयोग",-12.423138618469238],["顺",-12.423144340515137],["▁ситуации",-12.42320728302002],["▁Hamburg",-12.423223495483398],["▁ചെ",-12.423235893249512],["作出",-12.423303604125977],["▁основ",-12.42331314086914],["▁속",-12.423338890075684],["▁rambut",-12.423365592956545],["▁moderna",-12.42337131500244],["▁Arm",-12.423439025878906],["▁network",-12.423477172851562],["ప్ర",-12.423508644104004],["дах",-12.42350959777832],["▁locul",-12.423556327819824],["▁Hab",-12.423598289489746],["ως",-12.42362117767334],["áll",-12.423725128173828],["qat",-12.423739433288574],["▁loppu",-12.423775672912598],["▁skoro",-12.423779487609863],["▁þarf",-12.423818588256836],["▁ngo",-12.42383098602295],["もっと",-12.423831939697266],["▁máximo",-12.42384147644043],["▁biển",-12.42385482788086],["Italia",-12.423863410949709],["rych",-12.42390251159668],["▁කු",-12.423906326293944],["▁Norske",-12.423938751220703],["▁cl",-12.4239501953125],["▁کئی",-12.42397403717041],["▁академ",-12.423993110656738],["▁आर",-12.424041748046877],["無料",-12.42408847808838],["esso",-12.424089431762695],["棒",-12.42410945892334],["▁عامل",-12.424195289611816],["цыю",-12.424237251281738],["▁Ngh",-12.424243927001951],["▁كا",-12.424254417419434],["iller",-12.424262046813965],["园",-12.424311637878418],["▁συμ",-12.424324035644531],["▁opinion",-12.42432975769043],["เดือน",-12.424346923828123],["aş",-12.42442798614502],["▁pata",-12.424450874328612],["nei",-12.424474716186523],["▁сьогодні",-12.424474716186523],["▁гишүүн",-12.42447566986084],["▁cuba",-12.424519538879396],["▁tą",-12.42453384399414],["▁fii",-12.424569129943848],["Pri",-12.42457675933838],["▁spørsmål",-12.424649238586426],["ālo",-12.424654006958008],["▁नाव",-12.42465591430664],["▁असून",-12.424687385559082],["▁kust",-12.424720764160156],["kili",-12.424764633178713],[":06",-12.42477035522461],["▁यही",-12.424772262573242],["▁OR",-12.424798011779783],["ovu",-12.424821853637695],["لس",-12.42485237121582],["▁продаж",-12.424918174743652],["normal",-12.424955368041992],["Cu",-12.424989700317385],["▁işçi",-12.425063133239746],["kter",-12.42508029937744],["▁segment",-12.425100326538086],["▁Ông",-12.425175666809082],["▁seguito",-12.425190925598145],["▁تار",-12.425198554992676],["▁கே",-12.425198554992676],["▁Wanita",-12.42520523071289],["พัฒนา",-12.425216674804688],["▁resurs",-12.425237655639648],["ינים",-12.425250053405762],["Ber",-12.42528247833252],["▁interessa",-12.425283432006836],["dno",-12.425372123718262],["让人",-12.42540168762207],["vide",-12.42544651031494],["▁pieni",-12.425447463989258],["▁хүний",-12.425451278686523],["ස්ස",-12.425480842590332],["▁ස්",-12.425522804260254],["▁Cla",-12.425538063049316],["▁پزشکی",-12.425542831420898],["ંડ",-12.42555332183838],["▁പു",-12.42556095123291],["▁جوړ",-12.425580024719238],["قان",-12.425581932067873],["▁elə",-12.425636291503906],["JI",-12.425651550292969],["插",-12.425657272338867],["イベント",-12.425731658935549],["▁Видео",-12.425743103027344],["mple",-12.425751686096191],[":43",-12.425776481628418],["▁ڈاکٹر",-12.425813674926758],["▁خطاب",-12.42581844329834],["▁kaks",-12.425823211669922],["▁Sv",-12.425840377807615],["deg",-12.425907135009766],["▁رشد",-12.425944328308104],["▁telo",-12.4259614944458],["▁भाव",-12.42606258392334],["بات",-12.426071166992188],["▁Honda",-12.426076889038086],["▁sklep",-12.426166534423828],["▁заб",-12.42617130279541],["藍",-12.426173210144045],["gare",-12.426177978515623],["פן",-12.426243782043455],["bali",-12.426258087158203],["▁guida",-12.426268577575684],["tola",-12.426276206970217],["▁naš",-12.42635726928711],["ією",-12.426437377929688],["▁عالی",-12.42650032043457],["▁nakup",-12.426505088806152],["▁along",-12.426512718200684],["ுள்ளது",-12.42653465270996],["▁teatro",-12.426556587219238],["飯",-12.426629066467283],["▁কোন",-12.426654815673828],["▁جھ",-12.426660537719728],["▁creo",-12.426673889160156],["▁Велико",-12.426697731018066],["자가",-12.426727294921877],["తీ",-12.426732063293455],["ГА",-12.426764488220217],["▁LCD",-12.42677402496338],["▁dev",-12.426793098449709],["▁ნი",-12.426820755004885],["▁говорит",-12.4268217086792],["▁उपयोग",-12.426836967468262],["୭",-12.426850318908691],["▁ເຈົ້າ",-12.42685604095459],["λύ",-12.426912307739258],["tiden",-12.426965713500977],["下午",-12.426983833312988],["▁ör",-12.427000999450684],["VER",-12.427008628845217],["▁gồm",-12.42701530456543],["▁chỗ",-12.427041053771973],["ΟΥ",-12.427051544189451],["мж",-12.427062034606934],["▁Tekst",-12.427062034606934],["NU",-12.427063941955566],["▁සම",-12.427123069763184],["▁Anton",-12.427139282226562],["▁کرے",-12.427153587341309],["shwa",-12.42726230621338],["▁Lees",-12.427286148071287],["▁Mest",-12.427292823791504],["▁πρώτο",-12.427297592163086],["▁freuen",-12.427315711975098],["▁jäsen",-12.427342414855955],["čius",-12.427376747131348],["跨",-12.42738914489746],["▁cé",-12.427444458007812],["ണ്ണ",-12.427451133728027],["▁artır",-12.427453994750977],["▁Ron",-12.427461624145508],["▁ਸਿੱਖ",-12.427560806274414],["▁тор",-12.427565574645996],["▁asocia",-12.427584648132324],["▁словами",-12.427607536315918],["▁regn",-12.427621841430664],["▁ਦਿੱਤਾ",-12.427651405334473],["▁وویل",-12.427688598632812],["▁join",-12.427725791931152],["▁용",-12.427738189697266],["▁виз",-12.427838325500488],["▁Glas",-12.427857398986816],["ಂಗ",-12.427897453308104],["päivä",-12.427911758422852],["▁સી",-12.428021430969238],["դր",-12.428025245666504],["▁welke",-12.428030014038086],["お店",-12.4280424118042],["▁phản",-12.428045272827148],["悲",-12.428098678588867],[":04",-12.428101539611816],["▁percent",-12.428141593933104],["▁dünyada",-12.428155899047852],["▁strana",-12.428155899047852],["▁obec",-12.428168296813965],["ခင္",-12.428183555603027],["runt",-12.428213119506836],["бит",-12.428217887878418],["!!!!!",-12.428242683410645],["ဆံုး",-12.428247451782228],["▁However",-12.428299903869627],["▁مشر",-12.428327560424805],["▁obr",-12.4283447265625],["▁занима",-12.428359031677246],["trin",-12.428400993347168],["▁polos",-12.42842960357666],["leid",-12.428443908691406],["บี",-12.428444862365724],["сло",-12.428457260131836],["▁قطر",-12.428457260131836],["өх",-12.42848014831543],["▁Ново",-12.42848300933838],["▁нищо",-12.42848777770996],["zol",-12.428515434265137],["ിലും",-12.428540229797363],["нку",-12.42862319946289],["▁пап",-12.4286527633667],["▁kesan",-12.428672790527344],["▁Θ",-12.428688049316406],["▁круг",-12.428804397583008],["▁Anal",-12.428810119628906],["環",-12.428824424743652],["viv",-12.428874969482422],["▁enten",-12.428878784179688],["▁отношение",-12.428925514221191],["▁וגם",-12.428955078125],["▁කරන්නේ",-12.428959846496582],["▁කරනවා",-12.428994178771973],["▁веќе",-12.42910099029541],["lala",-12.429165840148926],["ілген",-12.42916774749756],["ତ୍ର",-12.429173469543455],["▁আছে",-12.42919921875],["▁месца",-12.42924690246582],["ệt",-12.42926025390625],["▁کيس",-12.42930793762207],["▁looks",-12.429328918457031],["▁أما",-12.429350852966309],["čných",-12.429380416870115],["ڻي",-12.429423332214355],["▁Marg",-12.429437637329102],["▁१८",-12.429441452026367],["มอง",-12.429483413696287],["▁واپس",-12.429497718811035],["▁bilde",-12.429532051086426],["▁Gay",-12.429560661315918],["▁maksimal",-12.429567337036133],["ואר",-12.42960262298584],["▁क्लिक",-12.429609298706056],["კის",-12.429627418518066],["jū",-12.429738998413086],["kosten",-12.429758071899414],["teet",-12.429824829101562],["▁opere",-12.429862022399902],["▁mirov",-12.429889678955078],["▁عل",-12.42990493774414],["▁cargo",-12.42994499206543],["▁வீ",-12.429953575134276],["дут",-12.429980278015137],["▁Spi",-12.430058479309082],["ест",-12.430098533630373],["▁الإنسان",-12.43010139465332],["မ္း",-12.430120468139648],["▁keç",-12.430124282836914],["成了",-12.430143356323242],["례",-12.430183410644531],["▁нового",-12.430195808410645],["▁ଭୁବନେଶ୍ୱର",-12.430200576782228],["▁kont",-12.430231094360352],["▁ضروری",-12.430235862731934],["▁toiminta",-12.430243492126465],["២០",-12.430253028869627],["kend",-12.430289268493652],["▁hansı",-12.430313110351562],["▁งาน",-12.430323600769045],["▁pau",-12.430365562438965],["▁kleur",-12.430368423461914],["▁посети",-12.430404663085938],["▁Wild",-12.430435180664062],["hî",-12.430444717407228],["▁неща",-12.430464744567873],["▁presa",-12.43046760559082],["որեն",-12.430529594421388],["حي",-12.43055248260498],["ongan",-12.430574417114258],["▁համ",-12.430611610412598],["▁oxu",-12.43062973022461],["Φ",-12.43063259124756],["хож",-12.430768966674805],["тым",-12.430770874023438],["ilia",-12.430784225463867],["▁თ",-12.430821418762209],["▁Београду",-12.430829048156738],["▁esetén",-12.430842399597168],["▁pengalaman",-12.43084716796875],["團隊",-12.430858612060549],["▁Την",-12.430866241455078],["▁dienas",-12.430871963500977],["ട്ടെ",-12.43088436126709],["▁کش",-12.430901527404783],["替",-12.430978775024414],["▁patent",-12.430986404418944],["▁popul",-12.431010246276855],["/2011",-12.431093215942385],["ासाठी",-12.431097030639648],["ranje",-12.431112289428713],["▁ansatte",-12.431132316589355],["เริ่ม",-12.43115234375],["▁חו",-12.43116855621338],["▁Platz",-12.431192398071287],["tale",-12.431193351745604],["▁Typ",-12.431218147277832],["▁1900",-12.431238174438477],["▁mellor",-12.431299209594728],["▁schön",-12.431337356567385],["кт",-12.43139934539795],["▁dernier",-12.431467056274414],["▁hæ",-12.431490898132324],["▁Beat",-12.43150806427002],["▁cập",-12.431560516357422],["buka",-12.431612968444824],["▁ಕರ್ನಾಟಕ",-12.431623458862305],["▁inaugura",-12.431632041931152],["▁ping",-12.43167495727539],["▁самых",-12.43171215057373],["▁herri",-12.43177890777588],["มหา",-12.43181610107422],["▁경제",-12.43183135986328],["▁значит",-12.431882858276367],["дъл",-12.43189525604248],["▁Durch",-12.431913375854492],["▁inspira",-12.43193817138672],["пові",-12.431941032409668],["▁gust",-12.431991577148438],["争",-12.432029724121094],["▁हु",-12.43203353881836],["▁centru",-12.432039260864258],["▁Jis",-12.432056427001951],["▁relat",-12.4320707321167],["▁지금",-12.43207550048828],["vka",-12.432076454162598],["▁бери",-12.432103157043455],["▁dau",-12.43211269378662],["үнө",-12.432120323181152],["▁حرکت",-12.432121276855469],["▁Lund",-12.432125091552734],["▁уби",-12.432135581970217],["▁मेरो",-12.432148933410645],["▁ह्या",-12.43216323852539],["▁개발",-12.4321928024292],["▁jestem",-12.432199478149414],["UI",-12.432214736938477],["▁дизайн",-12.432236671447754],["▁konkur",-12.432259559631348],["▁زیات",-12.432271003723145],["▁දැන",-12.432283401489258],["▁fro",-12.432369232177734],["▁Mehr",-12.43237590789795],["kais",-12.43241024017334],["ڪر",-12.4324369430542],["▁Tribunal",-12.432438850402832],["ētu",-12.432439804077148],["diği",-12.432443618774414],["▁מכ",-12.432472229003906],["▁ਨਿ",-12.43251132965088],["▁ngu",-12.432539939880373],["ωση",-12.432544708251951],["▁Нет",-12.432619094848633],["▁Kurdistan",-12.432661056518556],["อยาก",-12.432673454284668],["萨",-12.432699203491213],["▁۱۹",-12.432729721069336],["cate",-12.432753562927246],["载",-12.432806015014648],["ković",-12.432873725891112],["▁pravice",-12.43288516998291],["йды",-12.432905197143556],["▁ඊට",-12.432906150817873],["▁Além",-12.432942390441896],["Nya",-12.433013916015623],["▁өр",-12.433042526245115],["aż",-12.43305206298828],["tief",-12.433087348937988],["masing",-12.43309497833252],["▁एमाले",-12.433135986328123],["▁Allt",-12.433149337768556],["▁شکست",-12.433157920837402],["▁facilita",-12.4331636428833],["▁vr",-12.433235168457031],["דת",-12.433272361755373],["▁éve",-12.433302879333496],["▁neno",-12.43331527709961],["▁kaffe",-12.433362007141112],["▁KH",-12.433366775512695],["▁Bell",-12.433465003967283],["▁కాంగ్రెస్",-12.433494567871094],["▁होगी",-12.433515548706056],["-24",-12.433516502380373],["φαν",-12.43351936340332],["▁زده",-12.433571815490724],["▁trái",-12.433581352233888],["▁avy",-12.433592796325684],["खे",-12.43362522125244],["විය",-12.433627128601074],["شى",-12.433677673339844],["▁ίδιο",-12.433797836303713],["iyle",-12.433815956115724],["llisesti",-12.43382453918457],["▁دختر",-12.433879852294922],["▁sku",-12.43388557434082],["dende",-12.433899879455566],["▁સમાચાર",-12.433913230895996],["▁sûr",-12.433917999267578],["▁jaunu",-12.433919906616213],["▁направлен",-12.433979988098145],["▁تمامی",-12.434009552001951],["▁അദ്ദേഹം",-12.434051513671877],["▁države",-12.434087753295898],["▁Няма",-12.434110641479492],["▁آمد",-12.434144020080566],["▁محصولات",-12.434268951416016],["考试",-12.43431282043457],["ရွင္",-12.434378623962402],[":42",-12.434502601623535],["ପୁର",-12.43453311920166],["属于",-12.434550285339355],["▁lands",-12.434569358825684],["▁Spre",-12.434575080871582],["בת",-12.434578895568848],["意識",-12.434617042541504],["杨",-12.434621810913086],["▁vàng",-12.434633255004885],["甘",-12.434647560119627],["▁Derfor",-12.434666633605955],["▁دوسرے",-12.43467617034912],["采用",-12.434707641601562],["是一种",-12.434709548950195],["φρ",-12.434736251831056],["цю",-12.43474292755127],["▁фи",-12.43476390838623],["avad",-12.434764862060549],["ئا",-12.43476676940918],["emp",-12.434767723083496],["▁Faz",-12.434816360473633],["▁शिव",-12.434876441955566],["▁គ",-12.434942245483398],["▁modu",-12.434958457946776],["tett",-12.434980392456056],["pop",-12.435030937194824],["▁ਸਿ",-12.435067176818848],["▁추가",-12.43507194519043],["▁Мал",-12.43510627746582],["tering",-12.4351224899292],["▁ajudar",-12.435123443603516],["ধা",-12.435124397277832],["▁जाती",-12.435131072998049],["▁qadın",-12.435140609741213],["▁filmu",-12.435151100158691],["▁شمار",-12.435154914855955],["▁hatten",-12.435160636901855],["ЕТ",-12.435233116149902],["▁депутат",-12.435233116149902],["chilik",-12.435272216796877],["▁зар",-12.435293197631836],["価格",-12.435306549072266],["▁امریکہ",-12.435317039489746],["▁यात्रा",-12.43532943725586],[":47",-12.435494422912598],["ിരിക്കുന്നു",-12.435508728027344],["▁चाह",-12.43552017211914],["▁usko",-12.435569763183594],["上了",-12.43559741973877],["ലു",-12.435646057128906],["▁[2]",-12.435698509216309],["لات",-12.435763359069824],["rep",-12.435795783996582],["▁وئي",-12.435800552368164],["▁далеко",-12.435866355895996],["Ко",-12.435879707336426],["▁2016-",-12.435906410217283],["▁کولو",-12.43591022491455],["▁feliz",-12.435955047607422],["▁všetkých",-12.435957908630373],["傳統",-12.435962677001951],["▁société",-12.43596363067627],["чная",-12.435973167419434],["▁budu",-12.4359769821167],[":58",-12.435994148254396],["▁жылғы",-12.43600845336914],["▁punct",-12.43602180480957],["▁shk",-12.436091423034668],["▁شک",-12.43612003326416],["▁Oscar",-12.436128616333008],["ບັນດາ",-12.436214447021484],["▁hell",-12.436304092407228],["wet",-12.436373710632324],["izo",-12.436383247375488],["▁ਲੈ",-12.436395645141602],["eeritud",-12.436418533325195],["chter",-12.436476707458496],["▁muitos",-12.436514854431152],["▁zullen",-12.436514854431152],["▁барлық",-12.436524391174316],["ቢያ",-12.43657112121582],["▁המש",-12.43659210205078],["ради",-12.43659496307373],["▁unrhyw",-12.436599731445312],["ဆီ",-12.436604499816896],["▁त्यामुळे",-12.43660831451416],["▁ఉంటుంది",-12.43661117553711],["▁رسانه",-12.436641693115234],["▁høre",-12.43664836883545],["▁blandt",-12.436659812927246],["▁করেন",-12.43668270111084],["▁lång",-12.436684608459473],["▁lakukan",-12.43669891357422],["ktion",-12.436738967895508],["ដល់",-12.436739921569824],["▁хи",-12.436802864074709],["யம்",-12.436803817749023],["▁روح",-12.436823844909668],["▁mr",-12.436833381652832],["采取",-12.436834335327148],["▁mesin",-12.43687343597412],["dź",-12.43688678741455],["ન્ટ",-12.436896324157717],["ife",-12.436917304992676],["▁Tren",-12.436933517456056],["有所",-12.43694305419922],["зд",-12.436968803405762],["▁represent",-12.436982154846191],["▁Mot",-12.437016487121582],["▁hús",-12.437024116516112],["บอล",-12.43703556060791],["▁steder",-12.437047958374023],["▁сло",-12.43709659576416],["▁kelas",-12.437114715576172],["誠",-12.437137603759766],["ljeni",-12.437171936035156],["ಯಾಗಿ",-12.437171936035156],["lić",-12.437176704406738],["cine",-12.437192916870115],["ुक",-12.437203407287598],["▁Oct",-12.43720817565918],["▁ફિલ્મ",-12.437226295471191],["▁gata",-12.437232971191406],["▁shopping",-12.43726921081543],["gård",-12.437274932861328],["▁الحكومة",-12.437292098999023],["čil",-12.437321662902832],["ishga",-12.437339782714844],["sert",-12.43734073638916],["▁rij",-12.437498092651367],["▁ነ",-12.43753719329834],["▁elementum",-12.43756103515625],["zt",-12.437561988830566],["▁soon",-12.437575340270996],["▁aivan",-12.437589645385742],["▁ది",-12.437596321105955],["ोऽ",-12.437612533569336],["的好",-12.437630653381348],["▁୨୦",-12.43769645690918],["២",-12.43772029876709],["▁гэх",-12.437722206115724],["▁adat",-12.437725067138672],["▁fir",-12.437743186950684],["▁Bull",-12.437755584716797],["▁elas",-12.437779426574709],["▁inicia",-12.437837600708008],["▁करू",-12.437841415405272],["▁பதி",-12.437858581542969],["MET",-12.437884330749512],["▁betyder",-12.43788719177246],["▁ਬਾਰੇ",-12.43789005279541],["▁fili",-12.43790054321289],["плат",-12.437907218933104],["hina",-12.437909126281738],["▁interview",-12.43795394897461],["▁2561",-12.43806266784668],["▁roh",-12.438093185424805],["amat",-12.43812370300293],["▁حفظ",-12.438132286071776],["fah",-12.438138961791992],["▁проблемы",-12.438165664672852],["▁нашей",-12.438173294067385],["▁עבודה",-12.43818187713623],["cri",-12.438190460205078],["▁đáp",-12.438234329223633],["▁ಕೋ",-12.438252449035645],["▁Sele",-12.438278198242188],["्छ",-12.43830394744873],["тардың",-12.43830680847168],["▁ณ",-12.438345909118652],["▁bobl",-12.438361167907717],["▁tarihi",-12.438441276550291],["▁lét",-12.438443183898926],["吹",-12.438461303710938],["▁ထို",-12.438469886779783],["协议",-12.438488960266112],["ต่อไป",-12.43849754333496],["让我",-12.438498497009276],["▁viszont",-12.438529014587402],["▁meisten",-12.438535690307615],["សា",-12.43856430053711],["▁شدند",-12.438570022583008],["▁страда",-12.438570976257324],["igheid",-12.438634872436523],["čná",-12.43865966796875],["რად",-12.438693046569824],["▁poh",-12.438739776611328],["רט",-12.438783645629885],["▁दक्षिण",-12.438817977905272],["▁טו",-12.43887710571289],["▁मंत्री",-12.438912391662598],["▁nothing",-12.438925743103027],["valla",-12.43892765045166],["▁forse",-12.438993453979492],["ជាតិ",-12.43899631500244],["urre",-12.43901824951172],["нымі",-12.43906593322754],["കം",-12.439093589782717],["贵",-12.43910789489746],["刺激",-12.439139366149902],["antar",-12.439148902893066],["ager",-12.439154624938965],["才是",-12.43915557861328],["培养",-12.439159393310549],["อื่นๆ",-12.439189910888672],["▁Company",-12.439212799072266],["載",-12.43929672241211],["▁ngồi",-12.43931007385254],["стап",-12.43931770324707],["woord",-12.439346313476562],["▁определен",-12.43935203552246],["used",-12.43938159942627],["▁खास",-12.439408302307127],["▁Rep",-12.439496040344238],["▁delega",-12.4395170211792],["▁Minister",-12.439518928527832],["▁ايران",-12.439545631408691],["▁الماضي",-12.439547538757324],["III",-12.439581871032717],["▁ក៏",-12.43961238861084],["owaniu",-12.43964385986328],["ык",-12.439704895019531],["оста",-12.439800262451172],["antis",-12.439828872680664],["▁बड़ी",-12.439833641052246],["ლება",-12.43984317779541],["▁šv",-12.439844131469728],["▁Chúng",-12.439860343933104],["▁precio",-12.439860343933104],["▁Kina",-12.439916610717772],["▁tager",-12.439921379089355],["rv",-12.439963340759276],["▁secundum",-12.439979553222656],["▁нео",-12.439984321594238],["▁котором",-12.439990043640137],["▁jule",-12.440009117126465],["prze",-12.440011024475098],["▁دکتر",-12.440017700195312],["▁يك",-12.440062522888184],["ush",-12.440068244934082],["اڻ",-12.4401216506958],["nding",-12.440166473388672],["rida",-12.440177917480469],["τερ",-12.440195083618164],["տա",-12.440196990966797],["▁اعتماد",-12.440207481384276],["同样",-12.44022274017334],["一条",-12.440237998962402],["БА",-12.440247535705566],["▁letu",-12.440253257751465],["pje",-12.440265655517578],["▁tatu",-12.44028091430664],["geni",-12.440309524536133],["▁৪",-12.440324783325195],["▁Kommune",-12.440390586853027],["烟",-12.44040870666504],["腿",-12.440410614013672],["▁menja",-12.440421104431152],["mester",-12.440472602844238],["▁berdasarkan",-12.440475463867188],["▁найбільш",-12.440486907958984],["▁piscina",-12.4404935836792],["▁styr",-12.440511703491213],["န်း",-12.440550804138184],["▁nâng",-12.440553665161133],["▁ډ",-12.440563201904297],["фр",-12.440589904785156],["▁militar",-12.440607070922852],["▁अशा",-12.44061279296875],["まだ",-12.440616607666016],["▁сыр",-12.440622329711914],["andin",-12.440631866455078],["▁lạnh",-12.440672874450684],["▁چهار",-12.440675735473633],["▁vje",-12.440690994262695],["ќ",-12.440699577331545],["▁መሆኑን",-12.440706253051758],["shan",-12.44072151184082],["▁चौ",-12.440728187561035],["決",-12.440738677978516],["וּ",-12.44074821472168],["шан",-12.440775871276855],["予",-12.440858840942385],["たり",-12.44088363647461],["▁hərbi",-12.440892219543455],["ledning",-12.44093418121338],["▁Kamu",-12.440979957580566],["▁road",-12.441000938415527],["อย",-12.441007614135742],["▁පාර",-12.44102382659912],["tky",-12.441069602966309],["dienst",-12.441080093383787],["▁інформації",-12.441104888916016],["▁పవన్",-12.44110870361328],["▁hac",-12.441117286682127],["weis",-12.441131591796877],["тру",-12.441134452819824],["ၾကား",-12.441139221191406],["MENT",-12.441140174865724],["▁राह",-12.441190719604492],["▁gek",-12.441194534301758],["နီ",-12.441200256347656],["保险",-12.44120979309082],["▁kommet",-12.4412202835083],["案件",-12.441229820251465],["тис",-12.44126319885254],["▁gau",-12.4412841796875],["▁σπ",-12.441384315490724],["rwydd",-12.441446304321287],["▁SZ",-12.441451072692873],["▁helyzet",-12.441451072692873],["▁verme",-12.441476821899414],["它们",-12.441520690917969],["▁birinci",-12.441555976867676],["iers",-12.441561698913574],["▁reco",-12.44156551361084],["▁قلب",-12.44157886505127],["▁начала",-12.44161891937256],["▁ኢ",-12.441682815551758],["是什么",-12.441686630249023],["kake",-12.441727638244627],["folk",-12.44173812866211],["แจ้ง",-12.441742897033691],["ລາຍການວິທະຍຸ",-12.441749572753906],["▁Sloveniji",-12.441757202148438],["αμε",-12.441766738891602],["▁vanskelig",-12.441826820373535],["▁ordine",-12.441832542419434],["▁ponto",-12.441853523254396],["▁करोड़",-12.441859245300291],["▁қайта",-12.441862106323242],["▁Tah",-12.441895484924316],["▁Jin",-12.441908836364746],["▁90-",-12.441912651062012],["legen",-12.441926956176758],["▁rutin",-12.442055702209473],["аб",-12.44209098815918],["▁stroj",-12.442096710205078],["▁Total",-12.44212818145752],["▁действительно",-12.442190170288086],["luğu",-12.44219970703125],["final",-12.4422025680542],["▁fos",-12.442220687866213],["▁Chí",-12.44228172302246],["▁tiel",-12.442344665527344],["cup",-12.442371368408203],["老人",-12.442378044128418],["▁Joseph",-12.442391395568848],["▁ျမန္မာ",-12.44241428375244],["ėja",-12.442459106445312],["eho",-12.44247055053711],["året",-12.442477226257324],["▁VIP",-12.442520141601562],["ುತ್ತ",-12.442535400390623],["》(",-12.442549705505373],["▁stanie",-12.442554473876951],["▁ಎಸ್",-12.44257640838623],["▁nalazi",-12.44259548187256],["ಷ್",-12.442607879638672],["чак",-12.442622184753418],["▁доби",-12.442639350891112],["▁Onun",-12.44264316558838],["тора",-12.442660331726074],["▁여성",-12.442710876464844],["іх",-12.44272804260254],["▁imajo",-12.44282341003418],["▁pronto",-12.442848205566406],["▁Oleh",-12.44285774230957],["▁correo",-12.4429349899292],["▁книга",-12.442981719970703],["依然",-12.44299030303955],["课程",-12.442995071411133],["езд",-12.442996978759766],["▁aset",-12.443004608154297],["様々な",-12.443017959594728],["▁dovolj",-12.44303035736084],["教學",-12.443041801452637],["▁rog",-12.44306755065918],["▁известно",-12.44306755065918],["▁statu",-12.44308090209961],["گى",-12.44309902191162],["▁planeta",-12.44310474395752],["▁swoim",-12.44311237335205],["▁recente",-12.443119049072266],["స్తా",-12.44312572479248],["sanız",-12.443138122558594],[":52",-12.443145751953123],["▁språk",-12.443315505981444],["▁mwy",-12.443328857421877],["▁dành",-12.44338035583496],["▁mond",-12.443394660949709],["发展的",-12.443411827087402],["▁своя",-12.443419456481934],["▁dhacay",-12.44344425201416],["▁වන්නේ",-12.44350242614746],["tės",-12.44350528717041],["▁piger",-12.443507194519045],["driv",-12.44353485107422],["▁그런",-12.443604469299316],["▁gazte",-12.443608283996582],["备",-12.44362449645996],["▁мм",-12.443645477294922],["พี่",-12.443682670593262],["ፌ",-12.443687438964844],["▁Angebot",-12.443696022033691],["▁Zdrav",-12.443727493286133],["▁האתר",-12.44372844696045],["▁завод",-12.443729400634766],["▁nằm",-12.443733215332031],["▁search",-12.443747520446776],["▁olema",-12.443751335144045],["ော",-12.443756103515623],["▁geçen",-12.443758010864258],["▁ارز",-12.44377613067627],["▁овом",-12.443870544433594],["նել",-12.443883895874023],["이며",-12.44391918182373],["▁interest",-12.443967819213867],["▁موت",-12.443973541259766],["▁फे",-12.443975448608398],["▁terminal",-12.444013595581056],["材",-12.444014549255373],["лыг",-12.444059371948242],["▁tug",-12.444063186645508],["▁entender",-12.444101333618164],["▁Pře",-12.444141387939451],["▁coisas",-12.444147109985352],["ड़ी",-12.444169998168944],["nz",-12.44419288635254],["tjie",-12.44420337677002],["▁taču",-12.444241523742676],["▁גי",-12.444241523742676],["rilor",-12.444268226623535],["ała",-12.444296836853027],["чек",-12.444314002990724],["▁سکتے",-12.444348335266112],["▁Sale",-12.444409370422363],["▁pista",-12.444424629211426],["▁اڳ",-12.44449234008789],["АМ",-12.444499015808104],["▁силу",-12.444499969482422],["bec",-12.444535255432127],["aient",-12.44456386566162],["▁gjerne",-12.444584846496582],["▁казва",-12.444585800170898],["▁Ketika",-12.44459342956543],["ार",-12.444629669189451],["▁building",-12.444659233093262],["▁fli",-12.444690704345703],["▁kush",-12.44469928741455],["▁കാര്യ",-12.444713592529297],["▁equipe",-12.44471549987793],["▁сайте",-12.444774627685549],["▁fortsatt",-12.444801330566406],["▁հա",-12.444849967956545],["▁bald",-12.444876670837402],["▁கொண்டு",-12.444910049438477],["wiktionary",-12.4449462890625],["▁vizit",-12.44495677947998],["▁வேலை",-12.444995880126951],["▁मौत",-12.444998741149902],["mila",-12.445019721984863],["▁jullie",-12.445030212402344],["lings",-12.445054054260254],["nicy",-12.445058822631836],["팅",-12.445082664489746],["nod",-12.44508934020996],["ANT",-12.445120811462402],["▁pouze",-12.44515895843506],["▁Mie",-12.445324897766112],["▁veido",-12.445393562316896],["▁ନିଜ",-12.445394515991213],["hag",-12.445432662963867],["junk",-12.445453643798828],["▁season",-12.44545841217041],["▁ĉiuj",-12.445459365844728],["غر",-12.445487022399902],["ંદ",-12.445489883422852],["▁edad",-12.445564270019531],["tên",-12.44557285308838],["▁ھەر",-12.445574760437012],["▁nhé",-12.445586204528809],["خي",-12.445594787597656],["숙",-12.445627212524414],["ujete",-12.44563102722168],["AE",-12.44564437866211],["շտ",-12.44565486907959],["▁Obat",-12.44566249847412],["▁terlihat",-12.445674896240234],["▁fez",-12.445683479309082],["▁دشمن",-12.445686340332031],["▁käyttää",-12.445723533630373],["▁propor",-12.44572925567627],["ፀ",-12.445818901062012],["▁Bí",-12.445819854736328],["ημ",-12.4458646774292],["ნების",-12.4458646774292],["ხა",-12.445874214172363],["ဘဲ",-12.445884704589844],["uhu",-12.445889472961426],["bê",-12.445899963378906],["cami",-12.445937156677246],["ปิด",-12.445940017700195],["▁എന്നിവ",-12.445944786071776],["dava",-12.4459867477417],["yy",-12.4459867477417],["▁pokud",-12.44601821899414],["▁структур",-12.44604778289795],["▁мај",-12.44607639312744],["▁нават",-12.446104049682615],["iú",-12.446107864379885],["ပံု",-12.446110725402832],["▁Kama",-12.446123123168944],["▁4000",-12.44613552093506],["▁אנשים",-12.446146965026855],["ச்சு",-12.44615364074707],["具体",-12.446154594421388],["▁qanun",-12.446189880371094],["▁अंक",-12.446203231811523],["ଳି",-12.446206092834473],["nila",-12.446229934692385],["▁удар",-12.446246147155762],["▁abge",-12.44625186920166],["ੱਲ",-12.446266174316406],["phy",-12.446276664733888],["▁համաձայն",-12.44627857208252],["月份",-12.446287155151367],["രോ",-12.446290016174316],["▁আইন",-12.44634246826172],["xer",-12.446343421936035],["νος",-12.446343421936035],["▁comunica",-12.446345329284668],["իմ",-12.446435928344728],["▁국내",-12.446455001831056],["▁elég",-12.446492195129396],["▁وضعیت",-12.446524620056152],["vétel",-12.446525573730469],["alkan",-12.446537017822266],["▁mici",-12.446565628051758],["▁cyf",-12.446582794189451],["▁aplicar",-12.446595191955566],["▁×",-12.446617126464844],["▁берүү",-12.44662094116211],["▁בלי",-12.446642875671388],["уються",-12.446660041809082],["▁edasi",-12.446704864501951],["▁Woche",-12.44671630859375],["▁גל",-12.44674301147461],["幫",-12.44678783416748],["▁секретар",-12.446828842163086],["טו",-12.44685173034668],["▁alde",-12.446855545043944],["▁العربي",-12.446861267089844],["▁వై",-12.446893692016602],["schutz",-12.446919441223145],["▁വീണ്ടും",-12.44692611694336],["үм",-12.446931838989258],["▁රධාන",-12.446932792663574],["zə",-12.44693374633789],["▁nächsten",-12.446969032287598],["▁обра",-12.447004318237305],["德国",-12.44700527191162],["вага",-12.447031021118164],["▁keçir",-12.447103500366213],["▁misy",-12.44711971282959],["▁तयारी",-12.447173118591309],["rząd",-12.44727897644043],["▁മുന്",-12.447299003601074],["协会",-12.447338104248049],["▁пес",-12.447359085083008],["୮",-12.447388648986816],["зір",-12.447397232055664],["豪",-12.447466850280762],["մա",-12.447484970092772],["gah",-12.447505950927734],["рс",-12.447555541992188],["▁Gymraeg",-12.447575569152832],["▁potrivit",-12.447576522827148],["ऽ",-12.447585105895996],["▁22-",-12.44763469696045],["▁אויך",-12.447638511657717],["▁Roh",-12.44765281677246],["」「",-12.44765853881836],["▁Może",-12.44766330718994],["▁mnogo",-12.447677612304688],["etaan",-12.447696685791016],["▁rámci",-12.447704315185549],["▁enää",-12.447712898254396],["▁کیوں",-12.447784423828123],["▁Liebe",-12.447794914245604],["Dan",-12.4478120803833],["▁sommes",-12.447827339172363],["▁정도",-12.447840690612791],["▁aliqua",-12.447856903076172],["Ве",-12.44790267944336],["▁злочин",-12.447906494140623],["izma",-12.447908401489258],["▁leicht",-12.447940826416016],["ઇ",-12.448017120361328],["▁inf",-12.44802474975586],["ांचा",-12.448030471801758],["▁ವರ್ಷ",-12.448041915893556],["365",-12.448054313659668],["kunde",-12.44809341430664],["▁теми",-12.448140144348145],["针对",-12.448161125183104],["▁سک",-12.44820499420166],["ട്",-12.448220252990724],["▁lyk",-12.448259353637695],["▁اڄ",-12.4483060836792],["Em",-12.448308944702148],["▁овие",-12.448308944702148],["ഞ്ഞു",-12.44831657409668],["▁ይህን",-12.448333740234377],["▁dvi",-12.44835376739502],["▁поч",-12.448363304138184],["bd",-12.448370933532717],["▁કહે",-12.448396682739258],["▁autora",-12.448419570922852],["▁மூ",-12.448442459106444],["▁περί",-12.448480606079102],["▁Produk",-12.448554039001465],["▁keni",-12.448590278625488],["vah",-12.448606491088867],["▁words",-12.448638916015623],["ப்போ",-12.448718070983888],["▁வரை",-12.448719024658203],["▁Cras",-12.448725700378418],["кава",-12.44877815246582],["▁Administra",-12.448816299438477],["慢慢",-12.448834419250488],["▁šta",-12.448835372924805],["▁случај",-12.448845863342283],["GS",-12.448874473571776],["”",-12.448874473571776],["бут",-12.44888401031494],["stone",-12.448898315429688],["▁pure",-12.448918342590332],["masi",-12.448979377746582],["κρι",-12.449013710021973],["дөн",-12.449015617370604],["▁Argentina",-12.449027061462402],["▁поне",-12.449042320251465],["▁OM",-12.449051856994627],["ंच",-12.449063301086426],["ยก",-12.449067115783691],["ített",-12.44912052154541],["nade",-12.449127197265623],["▁эмне",-12.449129104614258],["▁bö",-12.449143409729004],[":38",-12.449172973632812],["▁случи",-12.449193954467772],["rón",-12.449363708496094],["нието",-12.449485778808594],["см",-12.449488639831545],["菌",-12.44948959350586],["ւ",-12.449507713317873],["မ်ိဳး",-12.44952392578125],["▁নিহত",-12.4495267868042],["▁منابع",-12.449527740478516],["ηλ",-12.449543952941896],["▁helemaal",-12.449566841125488],["šia",-12.449585914611816],["ֶ",-12.44958782196045],["▁tuta",-12.449590682983398],["▁formato",-12.449599266052246],["公告",-12.449649810791016],["▁clip",-12.44968318939209],["▁werde",-12.449684143066406],["design",-12.449691772460938],["▁მათი",-12.44971752166748],["▁chy",-12.44972324371338],["лица",-12.44973087310791],["▁President",-12.449731826782228],["žno",-12.44974136352539],["dies",-12.449764251708984],["▁humana",-12.449766159057615],["lda",-12.44976806640625],["▁પે",-12.449813842773438],["وى",-12.44984245300293],["▁бири",-12.449867248535156],["▁Wars",-12.449891090393066],["မြို့",-12.449926376342772],["لارنى",-12.449963569641112],["▁belli",-12.449974060058594],["▁Buna",-12.450016021728516],["▁malu",-12.45001983642578],["▁18:00",-12.450060844421388],["▁Más",-12.450063705444336],["▁ón",-12.450078964233398],["რების",-12.450153350830078],["▁людзей",-12.450175285339355],["▁समूह",-12.450175285339355],["▁उद्योग",-12.450176239013672],["▁xarici",-12.450204849243164],["temp",-12.450222969055176],["▁थ",-12.450291633605955],["▁gereken",-12.450304985046388],["▁Ју",-12.450347900390623],["өт",-12.450353622436523],["▁jednej",-12.450387954711914],["▁YANG",-12.450390815734863],["tiem",-12.450450897216797],["రె",-12.450494766235352],["▁эдийн",-12.450499534606934],["▁vrij",-12.450514793395996],["ाम",-12.450568199157717],["▁Taifa",-12.450569152832031],["▁VER",-12.450589179992676],["▁iDNES",-12.450657844543455],["កម្ពុជា",-12.450658798217772],["▁نئی",-12.45069408416748],["ppo",-12.45072078704834],["▁Games",-12.45073699951172],["▁жаш",-12.450746536254885],["လံုး",-12.450775146484377],["▁غذا",-12.450800895690918],["अर",-12.450803756713867],["▁جنرل",-12.450836181640623],["▁питань",-12.45083999633789],["lova",-12.45094108581543],["เขียน",-12.450973510742188],["▁aps",-12.451008796691896],["lighet",-12.45103645324707],["▁ജീവിത",-12.451045989990234],["เพลง",-12.451069831848145],["▁necesita",-12.45110321044922],["▁vergi",-12.45110321044922],["▁Тем",-12.45112133026123],["вало",-12.451126098632812],["یش",-12.451144218444824],["▁baju",-12.451148986816406],["▁sədri",-12.451183319091797],["▁sicut",-12.451197624206545],["▁preocupa",-12.451239585876465],["▁slim",-12.451308250427246],["โดน",-12.451324462890623],["▁qay",-12.451370239257812],["ႏွစ္",-12.451376914978027],["τί",-12.45137882232666],["▁stöd",-12.45138931274414],["▁keçirilib",-12.451480865478516],["vine",-12.451488494873049],["▁ажлын",-12.451489448547363],["▁چنین",-12.451552391052246],["OST",-12.451558113098145],["かな",-12.45155906677246],["▁καλά",-12.451560974121094],["▁cursos",-12.451581001281738],["/10/",-12.451582908630373],["▁priča",-12.451611518859863],["▁점",-12.451611518859863],["cera",-12.451615333557127],["hó",-12.451626777648926],["▁tohto",-12.451644897460938],["อม",-12.451661109924316],["下降",-12.451745986938477],["▁verseny",-12.451763153076172],["▁VR",-12.45177173614502],["▁Wahl",-12.451799392700195],["▁Vær",-12.451824188232422],["terra",-12.451828002929688],["▁ចំនួន",-12.45182991027832],["▁value",-12.45186996459961],["llan",-12.45187759399414],["saky",-12.45188045501709],["▁уж",-12.45189094543457],["ໍ",-12.451934814453123],["▁কম",-12.451956748962402],["ombo",-12.451991081237791],["▁yine",-12.45201301574707],["▁својим",-12.452040672302246],["▁gwe",-12.45209503173828],["鬼",-12.452113151550291],["ਜਾ",-12.45212745666504],["▁սակայն",-12.452129364013672],["▁đoạn",-12.452130317687988],["▁ovdje",-12.45213508605957],["▁Банк",-12.452136993408203],["▁ગયા",-12.452142715454102],["▁scri",-12.452143669128418],["nko",-12.452178955078123],["▁nikdy",-12.452178955078123],["ઢ",-12.452204704284668],["▁супер",-12.45224666595459],["▁oleva",-12.452316284179688],["▁trovare",-12.45237636566162],["วันนี้",-12.452437400817873],["」、「",-12.452494621276855],["ड़ा",-12.452495574951172],["nám",-12.452502250671388],["เห",-12.452521324157717],["▁ამო",-12.452561378479004],["אם",-12.452570915222168],["ኛው",-12.452637672424316],["KER",-12.452651023864746],["▁rady",-12.45266056060791],["▁தர",-12.452693939208984],["发挥",-12.4526948928833],["污染",-12.452702522277832],["嘴",-12.45272731781006],["▁сәйкес",-12.452781677246094],["▁mobiele",-12.45278263092041],["▁چگونه",-12.452786445617676],["▁tracta",-12.452788352966309],["fle",-12.45279598236084],["ùng",-12.452844619750977],["▁aeg",-12.45288372039795],["▁mieć",-12.452948570251465],["▁ਟ",-12.452958106994627],["ОТ",-12.452970504760742],["/06",-12.452999114990234],["ନ୍ଦ",-12.453022956848145],["σει",-12.453124046325684],["▁ఇప్పుడు",-12.453143119812012],["丽",-12.4531888961792],["▁المست",-12.453189849853516],["▁dage",-12.453205108642578],["▁termos",-12.45322036743164],["สาม",-12.45323371887207],["▁cunha",-12.45333480834961],["ුණු",-12.453344345092772],["ረው",-12.45343780517578],["▁특히",-12.453445434570312],["▁genoeg",-12.45345973968506],["рил",-12.453460693359377],["▁dim",-12.453463554382324],["▁wiki",-12.453479766845703],["ケ",-12.453510284423828],["ское",-12.453511238098145],["いたします",-12.453536033630373],["▁കണ്ണ",-12.45353889465332],["▁అన్ని",-12.45355224609375],["頃",-12.453569412231444],["となります",-12.453574180603027],["pila",-12.45358657836914],["וצ",-12.453625679016112],["▁լավ",-12.45366096496582],["dział",-12.45366668701172],["rys",-12.4537353515625],["可能性",-12.453742980957031],["▁vlast",-12.453813552856444],["▁Werk",-12.453821182250977],["技",-12.453821182250977],["haa",-12.453828811645508],["▁Toate",-12.453892707824709],["kimi",-12.453901290893556],["▁kids",-12.453991889953612],["▁debut",-12.4540376663208],["▁brat",-12.454050064086914],["腰",-12.454052925109863],["▁informacij",-12.454078674316406],["॒",-12.454087257385254],["各位",-12.454087257385254],["▁මහින්ද",-12.45408821105957],["UND",-12.454090118408203],["▁مراسم",-12.454100608825684],["ھر",-12.454107284545898],["▁pone",-12.45412540435791],["RY",-12.454151153564451],["ությունն",-12.454182624816896],["▁reduce",-12.45418643951416],["▁friend",-12.454215049743652],["出版",-12.454267501831056],["હા",-12.454288482666016],["ūd",-12.454326629638672],["ಜ್",-12.454338073730469],["▁ජය",-12.454371452331545],["ені",-12.45443630218506],["▁ervaring",-12.454456329345703],["dzī",-12.454460144042969],["▁въпрос",-12.454551696777344],["▁ശേഷം",-12.45455265045166],["ացել",-12.45457363128662],["▁Pun",-12.454599380493164],["ेज",-12.45462131500244],["▁مناطق",-12.45462417602539],["就會",-12.454627990722656],["▁elementos",-12.454655647277832],["▁sembla",-12.454697608947754],["▁suficiente",-12.454699516296388],["▁Petro",-12.454710006713867],["禮",-12.45474910736084],["▁mestu",-12.454764366149902],["日常",-12.454782485961914],["hane",-12.45479965209961],["..!!",-12.45485496520996],["arios",-12.454869270324709],[":54",-12.454890251159668],["▁selon",-12.45494270324707],["▁Journal",-12.454998970031738],["だろう",-12.455029487609863],["基地",-12.455048561096191],["▁jaki",-12.455083847045898],["иф",-12.455089569091797],["౦",-12.455090522766112],["▁lẽ",-12.455097198486328],["endra",-12.455117225646973],["PD",-12.455161094665527],["京",-12.455196380615234],["▁ವ",-12.45528507232666],["▁szer",-12.455303192138672],["厅",-12.455327033996582],["kau",-12.45533561706543],["완",-12.455347061157228],["kak",-12.45534896850586],["ที่นี่",-12.455363273620604],["賞",-12.455374717712402],["▁તમારા",-12.455395698547363],["hrad",-12.455399513244627],["▁poole",-12.455425262451172],["PN",-12.455431938171388],["▁yüz",-12.455453872680664],["غۇ",-12.45549488067627],["▁ટ",-12.455516815185549],["სახ",-12.455549240112305],["ोप",-12.45564079284668],["lando",-12.455656051635742],["hör",-12.455707550048828],["▁puhu",-12.455718994140623],["▁partida",-12.45572566986084],["意识",-12.45578670501709],["の中で",-12.455792427062988],["іль",-12.455881118774414],["▁տար",-12.455899238586426],["▁Սա",-12.45590114593506],["ОК",-12.455918312072754],["ៈ",-12.455933570861816],["▁победи",-12.455947875976562],["▁Tsy",-12.455963134765623],["▁стари",-12.455982208251951],["鱼",-12.456005096435549],["▁osallistu",-12.456049919128418],["बद्दल",-12.456052780151367],["▁tard",-12.45606517791748],["lərindən",-12.45608139038086],["RES",-12.456090927124023],["legg",-12.456144332885742],["вата",-12.456337928771973],["ORA",-12.456343650817873],["▁Urban",-12.456344604492188],["▁emberek",-12.456363677978516],["▁percaya",-12.456415176391602],["▁ทํา",-12.456417083740234],["джа",-12.45644187927246],["годи",-12.456498146057127],["▁given",-12.456549644470217],["主义",-12.456558227539062],["ಸೆ",-12.456625938415527],["стей",-12.45663356781006],["001",-12.45665454864502],["ଓ",-12.45669651031494],["▁trắng",-12.456704139709473],["▁אור",-12.45671558380127],["ক্ত",-12.45671844482422],["尚",-12.456746101379396],["▁wollte",-12.456754684448242],["▁يعني",-12.456759452819824],["▁informacji",-12.456778526306152],["▁histori",-12.456802368164062],["porta",-12.45680332183838],["▁Abdullah",-12.456814765930176],["▁Hav",-12.4568510055542],["mda",-12.456872940063477],["▁ர",-12.45688533782959],["▁sukker",-12.456891059875488],["ИТ",-12.45697021484375],["чит",-12.457019805908203],["▁jopa",-12.457050323486328],["2003",-12.45706558227539],["atti",-12.457090377807615],["▁ສົ່ງ",-12.457173347473145],["iĝo",-12.457233428955078],["観",-12.457316398620604],["▁ሲሆን",-12.45732307434082],["▁Хи",-12.457353591918944],["▁spoločnosti",-12.45736026763916],["ခု",-12.457377433776855],["▁добри",-12.457387924194336],["▁موږ",-12.457392692565918],["ନ୍ତୁ",-12.457478523254396],["্ব",-12.457493782043455],["▁menneske",-12.457515716552734],["▁поль",-12.457562446594238],["▁2.1",-12.457586288452148],["skriv",-12.457587242126465],["▁rate",-12.45760726928711],["▁balat",-12.457634925842283],["流行",-12.457669258117676],["▁clienti",-12.457685470581056],["▁ама",-12.457708358764648],["jske",-12.457775115966797],["uun",-12.45779800415039],["▁להם",-12.457803726196287],["টো",-12.457807540893556],["δυ",-12.4578275680542],["▁Original",-12.457828521728516],["▁вос",-12.45786476135254],["▁гара",-12.457890510559082],["▁aveva",-12.457935333251951],["töm",-12.457942008972168],["vila",-12.458003044128418],["▁ultricies",-12.458015441894531],["▁manj",-12.458105087280272],["יסט",-12.458120346069336],["ਸ਼ਾ",-12.458120346069336],["tawa",-12.45814323425293],["▁ਪੰਜਾਬੀ",-12.45814323425293],["▁בס",-12.458147048950195],["୯",-12.458158493041992],[":39",-12.458196640014648],["ећи",-12.458196640014648],["▁prijs",-12.458255767822266],["▁치",-12.458256721496582],["▁pêk",-12.458274841308594],["ników",-12.458298683166504],["▁হতে",-12.458332061767578],[":56",-12.458340644836426],["▁በኢትዮጵያ",-12.45835018157959],["▁زا",-12.458355903625488],["ရိ",-12.458374977111816],["▁baita",-12.45838451385498],["တ်",-12.45843505859375],["рица",-12.45846462249756],["▁Ե",-12.458490371704102],["▁बजे",-12.458500862121582],["ūr",-12.45850944519043],["ąż",-12.45862865447998],["אב",-12.458651542663574],["▁સ્",-12.458662986755373],["▁ଟଙ୍କା",-12.45871639251709],["▁лечение",-12.458728790283203],["▁dgn",-12.458745956420898],["▁Food",-12.458770751953123],["▁érték",-12.45877170562744],["▁percep",-12.458820343017578],["▁눈",-12.458833694458008],["▁alusta",-12.45883560180664],["ଲୁ",-12.458902359008787],["▁కల",-12.45890998840332],["高度",-12.458922386169434],["▁сэтгэл",-12.458974838256836],["▁роль",-12.458978652954102],["työ",-12.459020614624023],["▁norte",-12.459043502807615],["演出",-12.459061622619627],["סטר",-12.459067344665527],["னு",-12.45907211303711],["▁väri",-12.459088325500488],["▁1939",-12.459110260009766],["▁milit",-12.459115028381348],["▁vody",-12.459150314331056],["▁хувь",-12.459150314331056],["严重",-12.459158897399902],["လေး",-12.459223747253418],["ಘ",-12.459321022033691],["የት",-12.459330558776855],["▁רוצה",-12.459346771240234],["▁آینده",-12.459393501281738],["พูด",-12.45939826965332],["▁jasno",-12.45941162109375],["reje",-12.459424018859863],["▁კ",-12.45942497253418],["▁millioner",-12.459426879882812],["▁कं",-12.459427833557127],["цију",-12.459464073181152],["▁نی",-12.45947551727295],["▁पुणे",-12.459484100341797],["▁лиц",-12.459498405456545],["これは",-12.459568977355955],["පත්",-12.45957374572754],["▁Таким",-12.459576606750488],["scribe",-12.459625244140623],["▁مربوط",-12.459625244140623],["телен",-12.459644317626951],["地域",-12.45969009399414],[":48",-12.459697723388672],["申",-12.459707260131836],["світ",-12.459718704223633],["▁23-",-12.459759712219238],["dens",-12.459782600402832],["chem",-12.459837913513184],["tvr",-12.459877967834473],["▁මි",-12.459888458251951],["▁lug",-12.459917068481444],["▁sne",-12.45992374420166],["▁ගෙ",-12.45992374420166],["冲",-12.45993423461914],["洞",-12.459942817687988],["▁Markt",-12.459959983825684],["স্ত",-12.45998477935791],["сү",-12.46001148223877],["▁ነበር።",-12.460028648376465],["▁помощью",-12.460055351257324],["ดัง",-12.460088729858398],["ไหม",-12.460090637207031],["▁സെ",-12.460097312927246],["ώνει",-12.460100173950195],["▁atom",-12.460126876831056],["▁кі",-12.460137367248535],["▁Dương",-12.460139274597168],["▁վերջին",-12.460162162780762],["▁لهم",-12.46017360687256],["нести",-12.460189819335938],["▁שלכם",-12.460213661193848],["ସ୍ତ",-12.460220336914062],["▁1.2",-12.46022891998291],["优势",-12.4602632522583],["vz",-12.460302352905272],["озна",-12.460339546203612],["our",-12.460345268249512],["АК",-12.460350036621094],["eleza",-12.460371017456056],["deli",-12.460400581359863],["附近",-12.46043586730957],["就要",-12.460443496704102],["llään",-12.460457801818848],["عه",-12.460487365722656],["গা",-12.46049690246582],["▁instant",-12.460508346557615],["▁друге",-12.460508346557615],["▁kesk",-12.46051025390625],["leti",-12.460565567016602],["يلي",-12.460565567016602],["গুলো",-12.46058177947998],["▁רי",-12.46058750152588],["▁mại",-12.460644721984863],["ເຂົ້າ",-12.460663795471191],["▁카",-12.460695266723633],["▁held",-12.460731506347656],["တူ",-12.460790634155272],["▁demokrati",-12.46086597442627],["▁ішінде",-12.460875511169434],["▁મારી",-12.46088695526123],["miştir",-12.460887908935549],["ального",-12.460920333862305],["хі",-12.46092128753662],["▁essay",-12.460999488830566],["▁ड",-12.461008071899414],["▁тара",-12.461067199707031],["abe",-12.461106300354004],["深圳",-12.461112022399902],["▁גדול",-12.46113109588623],["IAN",-12.461159706115724],["ég",-12.461198806762695],["▁lugares",-12.461204528808594],["ños",-12.461219787597656],["▁Nobel",-12.461221694946287],["lean",-12.461246490478516],["▁جہاں",-12.46130084991455],["ències",-12.461308479309082],["cell",-12.461312294006348],["▁ஒ",-12.46131706237793],["▁정말",-12.46131992340088],["[/",-12.46139144897461],["▁ବେ",-12.461421012878418],["▁évi",-12.461453437805176],["▁അറിയ",-12.461474418640137],["▁OD",-12.461525917053224],["dja",-12.46155071258545],["ēta",-12.461555480957031],["ዋል",-12.46158504486084],["kto",-12.461600303649902],["▁permette",-12.46161651611328],["▁мама",-12.46165943145752],["▁অনেক",-12.461687088012695],["down",-12.46169090270996],["▁편",-12.461715698242188],["▁trad",-12.461727142333984],["▁feeling",-12.461746215820312],["▁ure",-12.461831092834473],["▁اج",-12.461836814880373],["▁pocit",-12.461837768554688],["▁Sche",-12.461853981018066],["▁område",-12.461864471435549],["▁activa",-12.461870193481444],["▁gamla",-12.461874961853027],["▁האו",-12.46187686920166],["▁kitu",-12.461899757385254],["▁مكان",-12.461915016174316],["צל",-12.461942672729492],["▁района",-12.461956024169922],["▁Dzięki",-12.461958885192873],["▁Selangor",-12.461958885192873],["▁ዜና",-12.461960792541504],["สา",-12.46201515197754],["wend",-12.462026596069336],["loos",-12.462035179138184],["▁tutaj",-12.462043762207031],["▁จํากัด",-12.462048530578612],["▁parto",-12.46205997467041],["นะครับ",-12.46207046508789],["▁ambapo",-12.46208667755127],["▁нийт",-12.462113380432127],["өргө",-12.46213436126709],["▁Saudi",-12.462187767028809],["仕事",-12.46219539642334],["▁୪",-12.462240219116213],["▁পি",-12.462270736694336],["▁közel",-12.462303161621094],["лері",-12.462324142456056],["▁ذهن",-12.462367057800291],["minu",-12.462505340576172],["▁venta",-12.46254825592041],["ഹ്",-12.46255874633789],["▁Cra",-12.462579727172852],["▁clear",-12.462587356567385],["絲",-12.462596893310549],["▁мисли",-12.462614059448242],["တဲ႔",-12.462621688842772],["▁took",-12.46262264251709],["polis",-12.462640762329102],["▁geworden",-12.462645530700684],["efni",-12.46265983581543],["▁Бир",-12.462690353393556],["歷史",-12.462721824645996],["▁seinem",-12.462725639343262],["▁ორი",-12.462725639343262],["▁əl",-12.462757110595703],["σί",-12.46277904510498],["▁Conta",-12.46281909942627],["▁Fond",-12.46281909942627],["▁Rok",-12.4628324508667],["ybos",-12.462844848632812],["▁yetu",-12.462852478027344],["▁development",-12.462854385375977],["▁КО",-12.462854385375977],["戶",-12.462956428527832],["▁triste",-12.463007926940918],["inho",-12.463021278381348],["iler",-12.463109970092772],["▁kích",-12.463210105895996],["▁سزا",-12.463211059570312],["▁þjóð",-12.463239669799805],["▁ಶ",-12.463241577148438],["▁וואָס",-12.46327781677246],["▁işlər",-12.463278770446776],["▁سرمایه",-12.463279724121094],["කෝ",-12.46329116821289],["pam",-12.463297843933104],["▁شدید",-12.463315963745115],["▁isu",-12.463332176208496],["▁sería",-12.463335037231444],["▁ώρα",-12.463394165039062],["autre",-12.46343231201172],["▁pär",-12.463438987731934],["iseksi",-12.463445663452148],["▁twitter",-12.463452339172363],["▁pill",-12.463455200195312],["▁tenen",-12.463462829589844],["هار",-12.463485717773438],["▁pels",-12.463504791259766],["▁올",-12.463582992553713],["▁ټولو",-12.463607788085938],["理想",-12.463650703430176],["▁arrest",-12.463709831237791],["▁Никола",-12.463751792907717],["▁əldə",-12.463757514953612],["chá",-12.463768005371094],["▁식",-12.46376895904541],["haj",-12.463805198669434],["▁무료",-12.463809967041016],["zott",-12.46384620666504],["▁nettsteder",-12.46387004852295],["▁Museo",-12.463906288146973],["▁mấy",-12.463936805725098],["▁એટલે",-12.463936805725098],["▁Moldova",-12.46394157409668],["கர்",-12.463945388793944],["▁Report",-12.463950157165527],["ацыі",-12.463961601257324],["▁протест",-12.464012145996094],["koj",-12.464034080505373],["гийг",-12.464040756225586],["の方",-12.464078903198242],["▁ياد",-12.464162826538086],["▁lage",-12.464163780212402],["▁middag",-12.464171409606934],["▁ҚР",-12.464173316955566],["▁sut",-12.464189529418944],["๊",-12.46420192718506],["คอ",-12.464207649230955],["▁aš",-12.464214324951172],["▁kupitia",-12.464225769042969],["▁مليون",-12.46424388885498],["cere",-12.46426773071289],["ograf",-12.464350700378418],["dę",-12.464385986328123],["ሆን",-12.46445655822754],["ნახ",-12.464482307434082],["▁вопросы",-12.464548110961914],["▁практика",-12.46456813812256],["▁október",-12.46459674835205],["tavat",-12.464609146118164],["▁някой",-12.464635848999023],["▁Presidente",-12.464654922485352],["کان",-12.464683532714844],["▁iska",-12.464696884155272],["▁êtes",-12.464698791503906],["▁maiores",-12.464778900146484],["طن",-12.464798927307127],["ັບ",-12.464844703674316],["▁handle",-12.464863777160645],["άρι",-12.464936256408691],["bid",-12.46495246887207],["മല്ല",-12.464964866638184],["▁ති",-12.464970588684082],["▁fred",-12.464982986450195],["iol",-12.46507453918457],["▁удаа",-12.465079307556152],["OZ",-12.465115547180176],["stas",-12.465166091918944],["/2010",-12.465174674987791],["रूप",-12.465208053588867],["▁Sr",-12.465219497680664],["▁difficult",-12.465219497680664],["▁säga",-12.465238571166992],["▁feugiat",-12.46525764465332],["▁وړاندې",-12.46525764465332],["ఇ",-12.465264320373535],["▁oras",-12.465298652648926],["▁manfaat",-12.46532917022705],["ถ้า",-12.465339660644531],[":08",-12.46536922454834],["cati",-12.465423583984377],["▁Hong",-12.465471267700195],["▁बढ",-12.465472221374512],["кин",-12.465482711791992],["fung",-12.465509414672852],["isuus",-12.465516090393066],["なんて",-12.465531349182127],["ANDA",-12.465534210205078],["▁اتحاد",-12.46554946899414],["nija",-12.46555519104004],["ക്കാന്",-12.465581893920898],["ούσε",-12.465590476989746],["▁маса",-12.465609550476074],["▁hind",-12.465619087219238],["簡",-12.46563720703125],["▁grein",-12.465660095214844],["ikal",-12.465712547302246],["▁Beli",-12.465777397155762],["えて",-12.465795516967772],["▁vare",-12.465838432312012],["▁ਭਾਰਤ",-12.46585464477539],["laget",-12.465863227844238],["▁школи",-12.465890884399414],["▁taarifa",-12.46591854095459],["▁خواتین",-12.465920448303224],["▁ಬೆಂಗಳೂರು",-12.465922355651855],["▁ду",-12.465924263000488],["▁financi",-12.465962409973145],["kket",-12.46597385406494],["אק",-12.46599006652832],["▁Pää",-12.46603012084961],["vili",-12.466035842895508],["▁Ew",-12.466062545776367],["▁देने",-12.466111183166504],["зон",-12.46612548828125],["รุ่น",-12.466158866882324],["▁आय",-12.466240882873535],["▁Marc",-12.466256141662598],["▁日本",-12.466257095336914],["estre",-12.466268539428713],["創新",-12.466270446777344],["▁sele",-12.466300010681152],["▁رسید",-12.466322898864746],["▁Nunc",-12.46633243560791],["چر",-12.466339111328123],["▁месте",-12.466385841369627],["▁hili",-12.466407775878906],["ьных",-12.466431617736816],["▁ጽ",-12.466552734375],["ለም",-12.466554641723633],["▁प्रवेश",-12.466588020324709],["▁kérdés",-12.466590881347656],["▁viet",-12.466593742370604],["▁החברה",-12.466617584228516],["▁그러나",-12.466629981994627],["▁dahulu",-12.466630935668944],["▁производство",-12.466678619384766],["्व",-12.46671199798584],["▁безопасности",-12.466712951660156],["▁teme",-12.466757774353027],["▁vér",-12.466912269592283],["▁propon",-12.466924667358398],["ngga",-12.466941833496094],["▁টি",-12.46694278717041],["yasi",-12.466963768005373],["RAT",-12.467013359069824],["▁données",-12.467022895812988],["▁siap",-12.467082977294922],["աք",-12.467192649841309],["styrelsen",-12.467202186584473],["▁beautiful",-12.467242240905762],["▁हिंदी",-12.467244148254396],["roni",-12.467294692993164],["MG",-12.467342376708984],["▁температура",-12.467351913452148],["raad",-12.467446327209473],["دام",-12.467498779296877],["▁వారు",-12.467529296875],["▁പ്രവര്",-12.467589378356934],["żo",-12.46759796142578],["しかし",-12.46759796142578],["▁bergen",-12.467608451843262],["وند",-12.467612266540527],["▁मन्त्री",-12.467620849609377],["cl",-12.467638969421388],["▁पाकिस्तान",-12.467731475830078],["▁teste",-12.467799186706545],["▁komment",-12.467809677124023],["繁",-12.467853546142578],["stov",-12.467877388000488],["Ą",-12.467904090881348],["▁plaisir",-12.467904090881348],["▁предприятия",-12.467904090881348],["▁keçirilən",-12.467906951904297],["▁અહીં",-12.467923164367676],["▁rapide",-12.467928886413574],["szor",-12.467947006225586],["▁сала",-12.467978477478027],["ตลาด",-12.468034744262695],["vado",-12.468073844909668],["万円",-12.46807861328125],["があり",-12.468110084533691],["▁apart",-12.468147277832031],["▁nghiên",-12.46815299987793],["▁پور",-12.468178749084473],["gian",-12.46818733215332],["бре",-12.46823787689209],["顿",-12.468305587768556],["ຸ",-12.468324661254885],["表演",-12.468358993530272],["▁ánh",-12.468374252319336],["▁وٽ",-12.468545913696287],["▁duo",-12.468551635742188],["▁žal",-12.468564987182615],["▁kola",-12.46859359741211],["▁ਲੋਕਾਂ",-12.468600273132324],["▁berhasil",-12.468605995178224],["▁akiwa",-12.46861171722412],["tingu",-12.468616485595703],["▁maaaring",-12.46867847442627],["▁distin",-12.468684196472168],["▁జిల్లా",-12.46872615814209],["▁2011,",-12.468804359436035],["зни",-12.468817710876465],["▁Тер",-12.46882438659668],["▁ჩამო",-12.46882438659668],["実際に",-12.46883773803711],["▁başa",-12.468887329101562],["嫌",-12.46889305114746],["▁schwer",-12.468901634216309],["▁registra",-12.468950271606444],["૬",-12.468955993652344],["▁côté",-12.468976020812988],["▁svojo",-12.469008445739746],["ரம்",-12.469011306762695],["▁개인",-12.46906280517578],["▁ആദ്യ",-12.46908473968506],["▁kvalit",-12.469120025634766],["nahme",-12.469133377075195],["▁зда",-12.46914291381836],["唱",-12.46916961669922],["▁MAS",-12.469202995300291],["▁ছবি",-12.469212532043455],["▁outubro",-12.469229698181152],["▁Fair",-12.469237327575684],["▁assez",-12.469278335571287],["すれば",-12.469307899475098],["▁مجموعة",-12.469346046447754],["▁izol",-12.469351768493652],["▁szervezet",-12.469396591186523],["бя",-12.469420433044434],["▁проста",-12.469426155090332],["プロ",-12.469429969787598],["▁scar",-12.469440460205078],["▁Petr",-12.469465255737305],["▁үнэ",-12.469473838806152],["▁خا",-12.469478607177734],["გე",-12.469489097595217],["▁white",-12.469528198242188],["▁온",-12.469557762145996],["दै",-12.46957302093506],["▁Class",-12.469598770141602],["▁хат",-12.469612121582031],["▁znači",-12.469630241394045],["meg",-12.469636917114258],["ยาก",-12.46970272064209],["мек",-12.469714164733888],["▁plante",-12.469744682312012],["níci",-12.46981430053711],["髮",-12.469839096069336],["爸爸",-12.469844818115234],["▁क्यों",-12.469887733459473],["▁επίσης",-12.469903945922852],["▁іншых",-12.4699125289917],["提醒",-12.469929695129396],["▁жерде",-12.46994972229004],["▁සිටින",-12.47000503540039],["РИ",-12.470013618469238],["▁Liv",-12.470032691955566],["ringen",-12.470041275024414],["▁المللی",-12.47004222869873],["▁نت",-12.470053672790527],["▁коло",-12.470157623291016],["▁لس",-12.470163345336914],["ייַ",-12.470168113708496],["绝",-12.470173835754396],["ھى",-12.47019100189209],["▁Мас",-12.470245361328123],["▁unua",-12.470277786254885],["????",-12.470288276672363],["нікі",-12.470303535461426],["סל",-12.470311164855955],["▁verða",-12.47036838531494],["101",-12.47044277191162],["ваме",-12.470451354980469],["出來",-12.47046184539795],["шат",-12.470467567443848],["警察",-12.470498085021973],["宇",-12.470519065856934],["もある",-12.470528602600098],["乾",-12.470531463623049],["tando",-12.47053337097168],["asse",-12.470562934875488],["▁उनकी",-12.470571517944336],["意见",-12.47072410583496],["▁මැ",-12.470763206481934],["спа",-12.47078800201416],["▁consiste",-12.47082805633545],["▁Sloven",-12.470831871032717],["▁plano",-12.470840454101562],["女子",-12.470884323120115],["▁below",-12.470951080322266],["enge",-12.470952033996582],["ents",-12.470965385437012],["▁Kerja",-12.470985412597656],["▁appel",-12.471002578735352],["▁سوچ",-12.471027374267578],["ybė",-12.471070289611816],["▁Då",-12.47110080718994],["kost",-12.471114158630373],["背景",-12.471132278442385],["beh",-12.4711332321167],["规范",-12.471172332763672],["猫",-12.47117805480957],["มากกว่า",-12.471216201782228],["▁어떻게",-12.471221923828123],["▁еңбек",-12.47123908996582],["Dit",-12.471240043640137],["леу",-12.471240043640137],["sloven",-12.471242904663086],["▁vietas",-12.471294403076172],["ían",-12.47131633758545],["ေတြကို",-12.471322059631348],["▁kitap",-12.471343994140623],["김",-12.471370697021484],["nutí",-12.471403121948242],["▁Zagreb",-12.471433639526367],["Facebook",-12.471436500549316],["lą",-12.471437454223633],["▁senso",-12.47143840789795],["mite",-12.47148609161377],["▁trú",-12.47149658203125],["▁citi",-12.471577644348145],["heen",-12.471595764160156],["ுள்ள",-12.47164249420166],["▁časa",-12.471644401550291],["istan",-12.471646308898926],["▁gje",-12.471656799316406],["▁svim",-12.471678733825684],["▁πολλά",-12.47169303894043],["▁leider",-12.471694946289062],["roq",-12.471747398376465],["ማር",-12.471755027770996],["НУ",-12.471776008605955],["▁discussion",-12.471792221069336],["▁fini",-12.471835136413574],["▁ngoại",-12.47188663482666],["▁grupa",-12.471887588500977],["▁uyğun",-12.471890449523926],["▁حوالے",-12.471905708312988],["▁diyar",-12.47190761566162],["稳定",-12.47194004058838],["免费",-12.471952438354492],["▁Đông",-12.471963882446287],["۹",-12.47197437286377],["▁ô",-12.471989631652832],["πο",-12.47199249267578],["經過",-12.472010612487791],["fly",-12.472107887268066],["elige",-12.472119331359863],["▁neçə",-12.472161293029783],["ktan",-12.472163200378418],["▁jar",-12.47216510772705],["lân",-12.472198486328123],["▁bisnis",-12.47221565246582],["文明",-12.472216606140137],["▁ofrece",-12.472232818603516],["▁ска",-12.472234725952148],["ốt",-12.472238540649414],["vės",-12.472253799438477],["rong",-12.472259521484377],["▁består",-12.47227668762207],["▁petits",-12.472317695617676],["▁deyib",-12.472333908081056],["▁drugo",-12.472350120544434],["▁Tere",-12.47240161895752],["购",-12.472406387329102],["энд",-12.472423553466797],["душ",-12.472442626953123],["▁deal",-12.472471237182615],["kkan",-12.472481727600098],["▁turizm",-12.472526550292969],["▁oldukça",-12.472551345825195],["▁Sekolah",-12.472556114196776],["▁wiem",-12.47260856628418],["▁אותם",-12.472628593444824],["बू",-12.472631454467772],["▁живее",-12.472644805908203],["▁izle",-12.472702026367188],["▁мүмкін",-12.472712516784668],["erit",-12.47272491455078],["▁Ton",-12.472806930541992],["ယာ",-12.472825050354004],["▁Tieto",-12.472919464111328],["安心",-12.47293472290039],["безпеч",-12.47296142578125],["lug",-12.47300910949707],["▁అంద",-12.473014831542969],["iec",-12.473105430603027],["ชน",-12.473111152648926],["▁byd",-12.473111152648926],["▁можем",-12.473122596740724],["ΡΑ",-12.47312831878662],["▁answer",-12.473179817199709],[":49",-12.473185539245604],["哦",-12.473210334777832],["▁وزیراعظم",-12.473217964172363],["▁egingo",-12.473259925842283],["▁posibil",-12.473310470581056],["ище",-12.473315238952637],["▁Nya",-12.4733304977417],["▁문제",-12.47333526611328],["▁आठ",-12.473343849182127],["larımız",-12.473370552062988],["▁Ces",-12.473397254943848],["กะ",-12.473413467407228],["▁nagyobb",-12.473432540893556],["▁نشر",-12.473433494567873],["ំ",-12.473437309265137],["查看",-12.47344207763672],["▁ciclo",-12.473533630371094],["되고",-12.473590850830078],["▁марта",-12.47360134124756],["றை",-12.47360610961914],["▁study",-12.473630905151367],["prim",-12.473636627197266],["דער",-12.473639488220217],["▁କୁ",-12.47364902496338],["▁Tư",-12.473685264587402],["▁laik",-12.473694801330566],["എം",-12.473715782165527],["報導",-12.473718643188477],["▁50-",-12.473721504211426],["tli",-12.473725318908691],["论",-12.473732948303224],["用的",-12.473841667175291],["▁සම්බන්ධ",-12.473873138427734],["ВИДЕО",-12.473883628845217],["▁взгляд",-12.473883628845217],["▁۱۳۹۷",-12.473884582519531],["▁දේවල්",-12.473895072937012],["研发",-12.47392749786377],["ჯერ",-12.473974227905272],["▁police",-12.474011421203612],["▁десет",-12.47401523590088],["gesteld",-12.474048614501951],["▁jūsų",-12.474058151245115],["▁دهید",-12.474058151245115],["▁Sei",-12.474223136901855],["्म",-12.474227905273438],["TION",-12.47422981262207],["▁icra",-12.474263191223145],["ಯೋ",-12.47429370880127],["▁ಸರ್ಕಾರ",-12.474295616149902],["▁hrani",-12.474303245544434],["▁கை",-12.474313735961914],["▁tev",-12.474346160888672],["▁Male",-12.474371910095217],["tano",-12.474397659301758],["νι",-12.474403381347656],["▁pjes",-12.474419593811035],["hali",-12.47445297241211],["เจอ",-12.474509239196776],["▁vrea",-12.474530220031738],["▁अस्पताल",-12.47454833984375],["▁menemukan",-12.4745512008667],["▁ndryshme",-12.474554061889648],["▁ёстой",-12.474555015563965],["▁предо",-12.474562644958496],["▁حين",-12.474590301513672],["▁जग",-12.47465705871582],["▁oikea",-12.474699020385742],["▁வரும்",-12.47470474243164],["▁kiire",-12.47471046447754],["mıştır",-12.474737167358398],["至今",-12.474749565124512],["një",-12.47482681274414],["▁нужда",-12.47483730316162],["▁ലോക",-12.474864959716797],["▁ಸ್",-12.474881172180176],["▁പൂ",-12.474905967712402],["▁nesu",-12.474933624267578],["▁څو",-12.47494888305664],["▁правда",-12.475018501281738],["▁bundan",-12.475028038024902],["▁جۇڭگو",-12.475030899047852],["ρια",-12.475035667419434],["୍ୟା",-12.47508716583252],["kazi",-12.47510051727295],["▁Oneindia",-12.475224494934082],["▁практически",-12.475237846374512],["▁joh",-12.475255012512209],["▁terakhir",-12.475257873535156],["▁خوبی",-12.475261688232422],["jeve",-12.475322723388672],["lè",-12.47533130645752],["vec",-12.475348472595217],["▁پای",-12.475384712219238],["▁tenemos",-12.475406646728516],["▁Prie",-12.47542953491211],["vald",-12.475455284118652],["▁qara",-12.475458145141602],["▁१३",-12.47547721862793],["บท",-12.475503921508787],["pele",-12.475613594055176],["etin",-12.475625991821287],["▁ھو",-12.475632667541504],["fta",-12.475672721862791],["▁Nagar",-12.475703239440918],["ช่อง",-12.47570514678955],["meta",-12.475733757019045],["▁dużo",-12.47573947906494],["さんの",-12.475746154785156],["▁مە",-12.475789070129396],["▁houden",-12.475797653198242],["▁uning",-12.47580337524414],["▁měl",-12.475812911987305],["laan",-12.475831985473633],["рије",-12.475852012634276],["ಿಕೆ",-12.475876808166504],["▁zarówno",-12.475885391235352],["▁ଏବେ",-12.47590446472168],["▁Banyak",-12.475912094116213],["▁പെ",-12.47596836090088],["▁Kosova",-12.475987434387209],["एन",-12.476001739501951],["▁רבי",-12.476004600524902],["▁секој",-12.4760160446167],["▁ଘ",-12.476019859313965],["位于",-12.476033210754396],["▁храни",-12.476059913635254],["▁želite",-12.47607707977295],["兩個",-12.476114273071287],["▁quatre",-12.476167678833008],["rico",-12.476168632507324],["▁bën",-12.47616958618164],["▁shall",-12.476179122924805],["ण्यासाठी",-12.476208686828612],["▁Salam",-12.476214408874512],["▁Nicht",-12.476222038269045],["计",-12.476228713989258],["▁biraz",-12.47623348236084],["▁ефект",-12.47626495361328],["▁උන්",-12.47629737854004],["▁необхідно",-12.476330757141112],["вел",-12.47634506225586],["vom",-12.47635269165039],["пы",-12.47636604309082],["game",-12.476381301879885],["াই",-12.47640323638916],["▁العديد",-12.476487159729004],["etek",-12.47654151916504],[":57",-12.47661018371582],["▁المجتمع",-12.47662353515625],["▁komplett",-12.476703643798828],["▁liye",-12.47670841217041],["▁Dua",-12.47673225402832],["สังคม",-12.476752281188965],["▁საქართველო",-12.476752281188965],["եյ",-12.47675323486328],["íl",-12.476761817932127],["▁ലി",-12.47678565979004],["一部",-12.47678565979004],["▁bred",-12.476795196533203],["▁giản",-12.476821899414062],["▁predstav",-12.476829528808594],["૨",-12.476859092712402],["▁marcha",-12.476913452148438],["ező",-12.476919174194336],["▁ahogy",-12.476951599121094],["полн",-12.47697925567627],["هو",-12.47703456878662],["▁pelas",-12.47712230682373],["مع",-12.477194786071776],["▁өөр",-12.477213859558104],["▁famiglia",-12.477224349975586],["举办",-12.4772367477417],["▁Sid",-12.47726058959961],["ിട",-12.477266311645508],["izare",-12.47727870941162],["▁боль",-12.477296829223633],["ाल",-12.47730827331543],["▁가능",-12.477337837219238],["▁nyheter",-12.47734832763672],["▁emot",-12.47736644744873],["研",-12.477378845214844],["cinta",-12.477409362792969],["▁अप",-12.477437019348145],["▁св",-12.47744083404541],["▁rasti",-12.477452278137209],["чук",-12.477468490600586],["▁څ",-12.477494239807127],["▁Μπ",-12.477582931518556],["дарды",-12.477594375610352],["▁urbo",-12.47763442993164],["▁gradi",-12.477649688720703],["без",-12.477669715881348],["lil",-12.477697372436523],["▁Barat",-12.477745056152344],["▁فوت",-12.477782249450684],["珠",-12.477810859680176],["▁hynny",-12.477840423583984],["▁elever",-12.477865219116213],["▁دوستان",-12.477892875671388],["▁újra",-12.477910041809082],["镇",-12.477935791015623],["主任",-12.47797679901123],["▁ասել",-12.477984428405762],["▁sebi",-12.478025436401367],["▁seul",-12.47804069519043],["leni",-12.478075981140137],["▁amelyek",-12.478105545043944],["遇",-12.478147506713867],["▁میزان",-12.47819709777832],["▁características",-12.478209495544434],["▁Hello",-12.478222846984863],["▁Ord",-12.478240966796877],["จีน",-12.478256225585938],["वन",-12.47826862335205],["punt",-12.478322982788086],["▁dager",-12.478350639343262],["bita",-12.478381156921388],["альна",-12.478410720825195],["tettiin",-12.478449821472168],["ໃຈ",-12.47846794128418],["▁impli",-12.478482246398926],["▁103",-12.478487014770508],["▁penger",-12.478495597839355],["▁моя",-12.478503227233888],["▁نگاه",-12.478538513183594],["▁produs",-12.478550910949709],["▁تحقيق",-12.478555679321287],["▁댓글",-12.478560447692873],["ించారు",-12.47856616973877],["▁море",-12.4785795211792],["mentos",-12.478615760803224],["mama",-12.478620529174805],["▁предвид",-12.47862434387207],["zing",-12.47865390777588],["מח",-12.478697776794434],["kje",-12.478710174560549],["▁hadlay",-12.478765487670898],["undan",-12.478789329528809],["▁ച്ച",-12.47879123687744],["boek",-12.47881317138672],["▁którego",-12.478824615478516],["tamine",-12.478841781616213],["ಳೆ",-12.478903770446776],["šas",-12.478910446166992],["hes",-12.478914260864258],["▁категор",-12.478922843933104],["watch",-12.478954315185549],["лки",-12.478960990905762],["▁كۆپ",-12.47896671295166],["▁judi",-12.478968620300291],["योग",-12.478971481323242],["▁त्यस",-12.478994369506836],["▁olid",-12.479045867919922],["▁раб",-12.479056358337402],["haha",-12.479120254516602],["▁encontro",-12.479134559631348],["ரிய",-12.47915267944336],["▁reti",-12.479153633117676],["ässä",-12.479175567626951],["▁قىلىپ",-12.479243278503418],["▁ಸಿನಿಮಾ",-12.479275703430176],["SAN",-12.47937297821045],["▁टी",-12.479402542114258],["▁condition",-12.479411125183104],["yv",-12.479429244995115],["رغ",-12.479436874389648],["▁Zum",-12.479450225830078],["riti",-12.479461669921877],["電視",-12.479520797729492],["▁جاري",-12.4795503616333],["លោក",-12.479551315307615],["▁impo",-12.479597091674805],["zini",-12.47969913482666],["▁gebruiken",-12.479703903198242],["maha",-12.479755401611328],["▁Vu",-12.479758262634276],["▁pasur",-12.479779243469238],["λες",-12.479793548583984],["▁ځ",-12.47984790802002],["íocht",-12.4798583984375],["Ъ",-12.479924201965332],["publik",-12.479978561401367],["▁razem",-12.480029106140137],["guru",-12.480055809020996],["结合",-12.480085372924805],["لب",-12.480101585388184],["▁Vers",-12.480124473571776],["सर",-12.480144500732422],["▁دەپ",-12.480181694030762],["чини",-12.480213165283203],["беж",-12.480228424072266],["▁Đà",-12.480242729187012],["▁Lietuva",-12.480253219604492],["town",-12.480263710021973],["▁domina",-12.480271339416504],["▁hate",-12.480307579040527],["нга",-12.480429649353027],["דן",-12.480453491210938],["▁kutokana",-12.480511665344238],["▁종",-12.480541229248049],["▁studenti",-12.48055934906006],["▁woon",-12.480560302734377],["▁کشمیر",-12.480573654174805],["ацију",-12.48058032989502],["▁principais",-12.480642318725586],["▁Deutsche",-12.480745315551758],["▁меч",-12.48074722290039],["▁documento",-12.480770111083984],["që",-12.480772018432615],["▁Sabi",-12.480780601501465],["នី",-12.480820655822754],["▁hehe",-12.48082160949707],["討論",-12.480823516845703],["cens",-12.48084831237793],["sınız",-12.480879783630373],["▁standart",-12.480914115905762],["Ter",-12.480926513671877],["minister",-12.481109619140623],["ziv",-12.481157302856444],["เฟ",-12.481161117553713],["▁хүмүүс",-12.481161117553713],["反映",-12.481164932250977],["▁мөн",-12.48116683959961],["ымен",-12.481185913085938],["▁kyl",-12.481201171875],["▁قطع",-12.481220245361328],["▁ձեռք",-12.481241226196287],["▁omnibus",-12.481261253356934],["▁chồng",-12.481271743774414],["▁nemá",-12.481294631958008],["گل",-12.481304168701172],["esha",-12.481310844421388],["回到",-12.481311798095703],["▁داستان",-12.48132038116455],["fum",-12.481403350830078],["▁bela",-12.481440544128418],["施工",-12.481443405151367],["▁baga",-12.481471061706545],["▁ሀ",-12.48147678375244],["तात",-12.481477737426758],["▁الوطني",-12.481510162353516],["▁based",-12.48151397705078],["▁muaj",-12.48153591156006],["▁જા",-12.481559753417969],["▁Ann",-12.481569290161133],["agan",-12.48157024383545],["လီ",-12.481633186340332],["liai",-12.48166561126709],["գր",-12.481693267822266],["▁کردم",-12.48173713684082],["▁hoi",-12.481740951538086],["▁Bud",-12.481794357299805],["biera",-12.48188018798828],["▁Toshkent",-12.481913566589355],["ITI",-12.48192310333252],["▁meinen",-12.48193645477295],["▁Бор",-12.481937408447266],["▁defensa",-12.481978416442873],["▁Fir",-12.481996536254885],["▁ženy",-12.482019424438477],["társ",-12.482048034667969],["▁Тут",-12.48210334777832],["▁produto",-12.482147216796877],["▁ඔබට",-12.482170104980469],["▁כתב",-12.48222541809082],["▁તેની",-12.48223114013672],["วาง",-12.482298851013184],["tøj",-12.482364654541016],["▁yalnız",-12.482406616210938],["ਹਿ",-12.482436180114746],["有一个",-12.482460021972656],["ចិត្ត",-12.482477188110352],["είο",-12.482501029968262],["อี",-12.48252773284912],["▁Dies",-12.482549667358398],["▁ေျပာ",-12.48256778717041],["ふ",-12.482573509216309],["▁ජනාධිපති",-12.482580184936523],["▁kenyataan",-12.482584953308104],["▁væri",-12.482588768005373],["tende",-12.482606887817385],["変更",-12.482606887817385],["umą",-12.482610702514648],["▁арасында",-12.482611656188965],["▁ș",-12.48266315460205],["ුම්",-12.482666969299316],["▁క్",-12.482688903808594],["चर",-12.482711791992188],["▁документа",-12.482725143432615],["▁Св",-12.482772827148438],["ร้อน",-12.482789039611816],["會議",-12.48281192779541],["▁如果",-12.482826232910156],["ديد",-12.482940673828123],["áč",-12.482954025268556],["kowe",-12.482966423034668],["▁گیرد",-12.482973098754885],["saka",-12.483013153076172],["ົດ",-12.483030319213867],["样",-12.483031272888184],["скіх",-12.483078956604004],["nö",-12.483081817626951],["▁vidare",-12.483081817626951],["મે",-12.483087539672852],["mac",-12.483129501342772],["▁geeft",-12.483149528503418],["▁pénz",-12.483149528503418],["▁Ние",-12.483194351196287],["▁மே",-12.483238220214844],["▁دسته",-12.48325538635254],["▁روابط",-12.483258247375488],["▁ភ្នំពេញ",-12.483281135559082],["▁κόσμο",-12.483302116394045],["▁qayb",-12.483314514160156],["▁რო",-12.483330726623535],["▁Anders",-12.483348846435549],["▁Suomi",-12.483415603637695],["នៃ",-12.483440399169922],["▁kosten",-12.483489990234377],["▁ዕ",-12.483497619628906],["▁wilayah",-12.48351001739502],["اند",-12.483543395996094],["mpre",-12.48358154296875],["īvā",-12.4835844039917],["ჟ",-12.48362922668457],["here",-12.483661651611328],["▁trener",-12.483671188354492],["▁keras",-12.483710289001465],["ענט",-12.483717918395996],["▁विरोध",-12.483774185180664],["▁utk",-12.483887672424316],["ziona",-12.483904838562012],["薬",-12.483916282653809],["▁Wert",-12.483939170837402],["IST",-12.48394775390625],["▁කිය",-12.483953475952148],["▁valid",-12.483972549438477],["тори",-12.483989715576172],["▁ля",-12.483993530273438],["▁versch",-12.484041213989258],["▁yun",-12.484052658081056],["ՈՒ",-12.484076499938965],["▁poderá",-12.48407745361328],["bris",-12.484115600585938],["ळे",-12.48412799835205],["now",-12.48419189453125],["▁muligt",-12.484230995178224],["▁zeigt",-12.484249114990234],["वल",-12.484262466430664],["teil",-12.484309196472168],["ستر",-12.484333038330078],["▁edək",-12.484390258789062],["腳",-12.48443603515625],["మని",-12.484461784362791],["ลด",-12.484491348266602],["▁એમ",-12.484529495239258],["amy",-12.484556198120115],["zuje",-12.48458480834961],["想到",-12.484593391418455],["▁rai",-12.48465061187744],["vica",-12.484683990478516],["ੱਤ",-12.484724044799805],["▁dink",-12.48473072052002],["▁Bach",-12.484774589538574],["▁شى",-12.484787940979004],["▁Chúa",-12.484857559204102],["oza",-12.484885215759276],["▁کلی",-12.484907150268556],["უფ",-12.48491096496582],["▁महत्व",-12.484922409057615],["▁живе",-12.4849271774292],["▁menit",-12.484975814819336],["tke",-12.484976768493652],["▁quru",-12.484980583190918],["▁kawan",-12.4849853515625],["atul",-12.484991073608398],["▁respond",-12.485023498535156],["版本",-12.48503303527832],["પા",-12.485047340393066],["▁bomba",-12.485084533691406],["▁ብር",-12.485098838806152],["▁cert",-12.485102653503418],["hlu",-12.48516845703125],["▁ք",-12.485219955444336],["▁2005.",-12.485231399536133],["biri",-12.485268592834473],["ぜひ",-12.485272407531738],["▁କରନ୍ତୁ",-12.48528003692627],["สมาชิก",-12.485283851623535],["və",-12.485285758972168],["▁ច",-12.48529052734375],["▁عورت",-12.485321044921877],["▁algunas",-12.485404014587402],["工作的",-12.48541259765625],["print",-12.485419273376465],["tiu",-12.485469818115234],["visa",-12.485471725463867],["trum",-12.485475540161133],["▁fum",-12.48548984527588],["یلی",-12.485499382019045],["θούν",-12.485512733459473],["ились",-12.485515594482422],["πε",-12.48552417755127],["加工",-12.4855318069458],["▁anyone",-12.485539436340332],["டா",-12.485550880432127],["ັນ",-12.485576629638672],["▁गर",-12.485580444335938],["高校",-12.485590934753418],["▁حس",-12.485615730285645],["راد",-12.485652923583984],["有点",-12.485671997070312],["▁भु",-12.485688209533691],["▁రెడ్డి",-12.485719680786133],["иг",-12.485729217529297],["คง",-12.48578643798828],["▁לקבל",-12.485797882080078],["æt",-12.485848426818848],["▁serial",-12.485852241516112],["一段",-12.48586654663086],["дро",-12.485876083374023],["▁शो",-12.485892295837402],["▁ამის",-12.485902786254885],["▁walk",-12.48594570159912],["▁допомогою",-12.485952377319336],["▁تشکیل",-12.485952377319336],["▁شاهد",-12.485992431640623],["արար",-12.486029624938965],["▁homoj",-12.48603057861328],["zár",-12.486047744750977],["ક્",-12.486065864562988],["▁য",-12.486090660095217],["ไม่ใช่",-12.486127853393556],["麵",-12.486127853393556],["▁नियम",-12.48613452911377],["▁ceann",-12.486163139343262],["ەر",-12.486165046691896],["▁dö",-12.486217498779297],["▁pubblica",-12.486248016357422],["▁appro",-12.486250877380373],["▁رفع",-12.48627758026123],["▁hé",-12.486294746398926],["wart",-12.486297607421877],["шо",-12.48630142211914],["▁ਸੰ",-12.486334800720217],["▁αι",-12.486336708068848],["▁među",-12.486343383789062],["▁bhith",-12.486360549926758],["▁مەن",-12.486361503601074],["▁الط",-12.48637580871582],["▁sisä",-12.486403465270996],["biti",-12.486416816711426],["▁ನೋಡ",-12.486430168151855],["በር",-12.486459732055664],["▁село",-12.486465454101562],["nicu",-12.486482620239258],["broj",-12.486506462097168],["퍼",-12.486527442932127],["旧",-12.48657512664795],["مند",-12.486580848693848],["ారని",-12.48662567138672],["▁আমরা",-12.486629486083984],["▁lässt",-12.486634254455566],["angka",-12.486653327941896],["zno",-12.486680030822754],["▁Đồng",-12.486699104309082],["▁этому",-12.48671817779541],["▁item",-12.486756324768066],["▁управления",-12.486788749694824],["▁meminta",-12.486815452575684],["lagen",-12.486865997314451],["▁آس",-12.486908912658691],["메",-12.486913681030272],["њ",-12.487051010131836],["▁Produkte",-12.487053871154783],["▁attention",-12.487080574035645],["▁좀",-12.487153053283691],["▁pago",-12.487183570861816],["ATE",-12.487217903137209],["brá",-12.487225532531738],["4-",-12.48723030090332],["▁ûnder",-12.487256050109863],["esen",-12.487266540527344],["istus",-12.48729419708252],["▁kontraŭ",-12.487312316894531],["طل",-12.487313270568848],["幸",-12.48733139038086],["ște",-12.487339973449709],["▁kutsu",-12.487343788146973],["▁ਮਨ",-12.487363815307615],["▁Kry",-12.487371444702148],["ვას",-12.487380981445312],["▁Oer",-12.487391471862791],["વ્ય",-12.487394332885742],["▁dạng",-12.48739528656006],["▁പേര്",-12.48740005493164],["▁huvi",-12.487401008605955],["ึ",-12.487401962280272],["音乐",-12.487446784973145],["קבל",-12.487475395202637],["▁personnel",-12.487479209899902],["▁duk",-12.48748779296875],["ছ",-12.48753547668457],["거나",-12.487548828125],["በረ",-12.48755931854248],["▁libra",-12.48758602142334],["▁štát",-12.487604141235352],["▁rég",-12.487619400024414],["▁agak",-12.487688064575195],["攻",-12.48769760131836],["▁critic",-12.48772430419922],["بية",-12.487735748291016],["nung",-12.487780570983888],["▁barre",-12.487792015075684],["▁fail",-12.487893104553224],["▁شمال",-12.48790454864502],["▁реал",-12.487909317016602],["ському",-12.487934112548828],["▁чым",-12.487994194030762],["▁۷",-12.488027572631836],["iselle",-12.488032341003418],["шим",-12.488057136535645],["▁kadın",-12.488067626953123],["ամյա",-12.48806858062744],["▁Tras",-12.488092422485352],["的一",-12.488107681274414],["น์",-12.488112449645996],["▁aprender",-12.488128662109377],["หี",-12.488158226013184],["▁успешно",-12.488201141357422],["歐",-12.488204002380373],["▁εισ",-12.488245964050291],["▁الدولي",-12.48824691772461],["ymo",-12.488247871398926],["▁жүз",-12.48827075958252],["чні",-12.488360404968262],["▁fito",-12.488364219665527],["▁Kft",-12.488420486450195],["လှ",-12.488428115844728],["有着",-12.488430976867676],["▁podpor",-12.488432884216309],["▁Menge",-12.488460540771484],["titude",-12.488465309143066],["▁koncept",-12.488497734069824],["RAM",-12.48861312866211],["▁hiểm",-12.488654136657717],["пі",-12.488656044006348],["xin",-12.48865795135498],["成长",-12.488685607910156],["рач",-12.488689422607422],["▁fizi",-12.488696098327637],["餐廳",-12.4887056350708],["▁dez",-12.4887113571167],["song",-12.488733291625977],["▁cavab",-12.48873519897461],["▁សូម",-12.488747596740724],["haf",-12.488751411437988],["▁이후",-12.488757133483888],["▁135",-12.488765716552734],["γη",-12.488770484924316],["öld",-12.48877239227295],["▁option",-12.488776206970217],["▁אף",-12.488844871520996],["항",-12.488872528076172],["▁સો",-12.4888916015625],["▁रन",-12.48892307281494],["▁ፈ",-12.488961219787598],["▁talle",-12.488974571228027],["▁dtí",-12.488977432250977],["▁teny",-12.489080429077148],["rink",-12.489086151123049],["▁called",-12.489086151123049],[".«",-12.489123344421388],["▁Té",-12.48913860321045],["شار",-12.489140510559082],["▁Pola",-12.489184379577637],["▁ಮೇ",-12.489190101623535],["▁relative",-12.489229202270508],["யு",-12.489237785339355],["▁ఇక్కడ",-12.489264488220217],["▁HI",-12.489302635192873],["لال",-12.48930835723877],["ოდა",-12.489313125610352],["ዜ",-12.489315032958984],["▁hagy",-12.4893217086792],["▁автомобил",-12.489375114440918],["▁joulu",-12.489383697509766],["valitsus",-12.489421844482422],["▁marché",-12.489425659179688],["▁청",-12.489435195922852],["zija",-12.489437103271484],["ΕΙ",-12.489508628845217],["▁Tul",-12.489542961120604],["▁ରା",-12.489574432373049],["اخ",-12.489584922790527],["▁handler",-12.489612579345703],["▁membres",-12.489614486694336],["▁capacidade",-12.489617347717283],["кси",-12.489643096923828],["ற்று",-12.489668846130373],["іт",-12.489693641662598],["kách",-12.489700317382812],["▁जु",-12.48970890045166],["▁دیگری",-12.489715576171877],["▁kwaliteit",-12.489727020263672],["పో",-12.489733695983888],["▁poly",-12.489782333374023],["▁hug",-12.489788055419922],["认真",-12.48981761932373],["▁fant",-12.489822387695312],["▁efek",-12.489832878112791],["▁ಮಾತ್ರ",-12.489837646484377],["▁disponible",-12.4898681640625],["▁Review",-12.489909172058104],["▁vesi",-12.48993968963623],["mio",-12.48998737335205],["ոք",-12.489989280700684],["หนังสือ",-12.49000358581543],["▁ماڻهن",-12.490007400512695],["▁پروگرام",-12.490010261535645],["▁सकती",-12.490023612976074],["上市",-12.490023612976074],["erie",-12.4900484085083],["▁kudu",-12.490055084228516],["sav",-12.490081787109377],["vő",-12.490117073059082],["▁महिना",-12.490141868591309],["ể",-12.490159034729004],["dato",-12.490214347839355],["▁поли",-12.49024772644043],["žení",-12.49026870727539],["វា",-12.490283012390137],["kust",-12.490306854248049],["▁ажиллагаа",-12.490344047546388],["▁simul",-12.490362167358398],["▁داری",-12.490368843078612],["▁ອ",-12.490449905395508],["ուկ",-12.490500450134276],["انت",-12.490581512451172],["▁lak",-12.490609169006348],["▁Ос",-12.49061393737793],["▁බ්ලොග්",-12.49068832397461],["▁ministr",-12.490713119506836],["▁chini",-12.49073600769043],["▁hoofd",-12.490772247314451],["▁айыл",-12.490830421447754],["▁6)",-12.490873336791992],["aṃ",-12.490882873535156],["▁uprav",-12.49089527130127],["▁Castro",-12.490920066833496],["ઘ",-12.49094581604004],["▁Seit",-12.49096965789795],["▁takip",-12.490983963012695],["▁пора",-12.491011619567873],["醒",-12.491036415100098],["▁Barn",-12.491049766540527],["jno",-12.491060256958008],["▁журналіст",-12.491086959838867],["▁vesz",-12.491087913513184],["▁realidade",-12.491119384765623],["tuan",-12.491129875183104],["ድር",-12.491141319274902],["VR",-12.49115753173828],["nato",-12.491175651550291],["ağa",-12.49117660522461],["iPhone",-12.491206169128418],["صد",-12.49125862121582],["▁Bul",-12.491288185119627],["▁tür",-12.491290092468262],["ยิ่ง",-12.491302490234377],["іцца",-12.491338729858398],["集團",-12.49134349822998],["▁сайті",-12.491354942321776],["▁sicurezza",-12.491363525390623],["บริษัท",-12.491393089294434],["▁hoch",-12.491408348083496],["ería",-12.491409301757812],["iros",-12.491421699523926],["▁النظام",-12.49147129058838],["▁تحقیق",-12.49147605895996],["▁previsto",-12.491511344909668],["نان",-12.491517066955566],["gub",-12.49152374267578],["▁जात",-12.49152374267578],["nsä",-12.491539001464844],["kid",-12.491551399230955],["σεων",-12.491552352905272],["▁රාජ්",-12.491552352905272],["▁ني",-12.491581916809082],["рыс",-12.491583824157717],["▁Ultra",-12.491647720336914],["idir",-12.49164867401123],["▁confe",-12.491666793823242],["ோம்",-12.491687774658203],["▁ტა",-12.491705894470217],["nąć",-12.491711616516112],["▁teema",-12.491751670837402],["▁enkele",-12.491787910461426],["gama",-12.491790771484377],["ვთ",-12.49186897277832],["▁demand",-12.491897583007812],["▁جاتی",-12.491900444030762],["▁ating",-12.492013931274414],["▁taking",-12.49202823638916],["dzē",-12.49204158782959],["博士",-12.492047309875488],["▁mawala",-12.49204921722412],["ular",-12.492064476013184],["камі",-12.492064476013184],["iranja",-12.492130279541016],["▁myself",-12.492138862609863],["AGA",-12.492158889770508],["▁кот",-12.492181777954102],["胸",-12.492194175720217],["▁اهڙي",-12.492226600646973],["hald",-12.49223804473877],["iño",-12.492259979248049],["▁այսօր",-12.49232578277588],["اۋ",-12.492341995239258],["▁biztosít",-12.492344856262209],["▁OB",-12.492376327514648],["▁ഇത",-12.492443084716797],["▁გარდა",-12.492451667785645],["goed",-12.492453575134276],["▁fans",-12.492478370666504],["▁शे",-12.492499351501465],["endus",-12.492609024047852],["لەش",-12.492650985717772],["itudine",-12.492681503295898],["▁ସରକାର",-12.49271011352539],["혼",-12.49271011352539],["иялық",-12.492714881896973],["▁ullamcorper",-12.492720603942873],["▁venenatis",-12.492720603942873],["▁Холбоо",-12.492720603942873],["▁мүмкүн",-12.492720603942873],["▁चे",-12.4927339553833],["▁වා",-12.49278163909912],["▁සී",-12.492788314819336],["▁miejsca",-12.492790222167969],["▁asian",-12.492834091186523],["▁pobo",-12.492835998535156],["▁buka",-12.492867469787598],["пада",-12.492871284484863],["bane",-12.492938041687012],["▁kho",-12.492938041687012],["▁teñen",-12.492944717407228],["▁اجرا",-12.492952346801758],["เคย",-12.492984771728516],["ijoje",-12.49302101135254],["▁emocion",-12.493035316467283],["hend",-12.493047714233398],["чина",-12.493091583251951],["▁kilku",-12.49310302734375],["մի",-12.493104934692385],["държа",-12.493123054504396],["▁Mode",-12.4931640625],["likult",-12.493199348449709],["▁cucina",-12.49321746826172],["▁двете",-12.493236541748049],["▁effekt",-12.493252754211426],["▁дипломат",-12.493303298950195],["lagan",-12.493327140808104],["▁ري",-12.4933443069458],["▁२५",-12.493351936340332],["广告",-12.493351936340332],["権",-12.493362426757812],["▁vorba",-12.493377685546877],["▁ngoku",-12.493401527404783],["▁직접",-12.493402481079102],["τρα",-12.493490219116213],["▁จังหวัด",-12.493494033813477],["▁разлика",-12.493496894836426],["▁Yr",-12.493504524230955],["െന്ന്",-12.493547439575195],["▁لإ",-12.493573188781738],["▁chega",-12.49357795715332],["▁vendi",-12.493592262268066],["▁창",-12.49368953704834],["▁отново",-12.493703842163086],["▁रोग",-12.493720054626465],["▁production",-12.493751525878906],["▁aplicación",-12.493760108947754],["ліз",-12.49377727508545],["▁majlis",-12.49379062652588],["110",-12.493831634521484],["▁svart",-12.493902206420898],["▁környezet",-12.493945121765137],["vyo",-12.493962287902832],["▁отчет",-12.493978500366213],["▁حڪومت",-12.49407958984375],["▁በጣም",-12.49407958984375],["▁Lé",-12.494081497192385],["▁Vladimir",-12.494094848632812],["ирования",-12.494098663330078],["▁vezi",-12.494099617004396],["ਧ",-12.494132041931152],["центр",-12.494141578674316],["▁üret",-12.494147300720217],["▁volgens",-12.494155883789062],["▁bahagian",-12.49428367614746],["▁drop",-12.494287490844728],["▁Rahmen",-12.494335174560549],["rimin",-12.494339942932127],["േണ്ട",-12.494345664978027],["▁දෙන",-12.49441909790039],["џ",-12.494437217712402],["▁Svar",-12.494457244873049],["7)",-12.494468688964844],["▁cca",-12.494547843933104],["▁ခ",-12.494584083557127],["cident",-12.494588851928713],["▁двор",-12.49462604522705],["▁Seks",-12.494633674621582],["▁مرة",-12.49464511871338],["規劃",-12.494709968566896],["▁190",-12.494715690612791],["ຈ",-12.494722366333008],["▁mart",-12.494728088378906],["▁slov",-12.494747161865234],["▁ግ",-12.494757652282717],["▁condimentum",-12.49476146697998],["▁ਤੱਕ",-12.494766235351562],["▁없습니다",-12.494769096374512],["▁môžu",-12.494780540466309],["▁düş",-12.494790077209473],["▁rẻ",-12.49482250213623],["▁geçir",-12.494832038879396],["▁passion",-12.494847297668455],["▁viva",-12.494847297668455],["▁торгов",-12.494882583618164],["-11-",-12.494898796081545],["▁duduk",-12.494902610778809],["▁olevan",-12.494937896728516],["▁1963",-12.494942665100098],["kret",-12.494996070861816],["වරයා",-12.495000839233398],["▁inkább",-12.495041847229004],["▁rossz",-12.49507999420166],["cki",-12.495088577270508],["danie",-12.495187759399414],["▁સામે",-12.495187759399414],["▁мя",-12.495201110839844],["్య",-12.495243072509766],["ப்படும்",-12.49527072906494],["▁trou",-12.495342254638672],["▁تورى",-12.49535083770752],["CN",-12.495367050170898],["isku",-12.495370864868164],["▁miljø",-12.495370864868164],["ುತ್ತಾರೆ",-12.495417594909668],["Wi",-12.495424270629885],["▁müraciət",-12.495440483093262],["likum",-12.495442390441896],["▁اجلاس",-12.49545192718506],["óra",-12.495622634887695],["いない",-12.495631217956545],["▁കാല",-12.495633125305176],["enter",-12.49565315246582],["▁löytyy",-12.495659828186035],["▁Maak",-12.495660781860352],["▁данных",-12.495677947998049],["▁bebas",-12.495723724365234],["▁español",-12.49574851989746],["ustus",-12.495750427246094],["▁նրան",-12.49576187133789],["麗",-12.495776176452637],["▁bogat",-12.495807647705078],["shq",-12.49582576751709],["▁semi",-12.495826721191406],["▁pide",-12.495889663696287],["સ્થ",-12.49590301513672],["▁resten",-12.49593734741211],["▁udział",-12.495988845825195],["glie",-12.495999336242676],["▁oprav",-12.496030807495115],["яти",-12.496057510375977],["▁પી",-12.496058464050291],["▁setembro",-12.49612522125244],["▁ধ",-12.496177673339844],["▁minn",-12.496302604675291],["便宜",-12.496302604675291],["یون",-12.496317863464355],["▁Night",-12.496336936950684],["▁району",-12.496360778808594],["પ્ર",-12.496397018432615],["▁feu",-12.496471405029297],["waan",-12.49647331237793],["▁එකට",-12.496498107910156],["▁Flu",-12.49652862548828],["▁кут",-12.496541023254396],["▁Metro",-12.49655818939209],["▁Franco",-12.496607780456545],["▁azok",-12.496642112731934],["▁festiv",-12.496648788452148],["▁העיר",-12.496649742126465],["tiya",-12.496659278869627],["▁անց",-12.49666690826416],["▁Sé",-12.496736526489258],["істю",-12.49677562713623],["▁միջազգային",-12.496803283691406],["▁Aby",-12.496855735778809],["▁केंद्र",-12.496862411499023],["▁medlemmer",-12.496875762939451],["▁नेपालमा",-12.496899604797363],["▁سياسي",-12.496907234191896],["irim",-12.49696922302246],["рг",-12.497004508972168],["▁Gha",-12.49709701538086],["▁odnosno",-12.49714183807373],["▁propri",-12.49714183807373],["▁bedrijf",-12.49725341796875],["ેશ",-12.49726676940918],["mpia",-12.497336387634276],["▁куда",-12.497342109680176],["ાવ",-12.497349739074709],["باد",-12.49742317199707],["▁yapmak",-12.497428894042969],["rine",-12.497480392456056],["▁ինձ",-12.49748706817627],["▁società",-12.497488021850586],["▁କଥା",-12.497492790222168],["▁Rakyat",-12.497495651245115],["購物",-12.497583389282228],["▁민",-12.497603416442873],["▁nadzor",-12.497654914855955],["ají",-12.497709274291992],["TAS",-12.49771499633789],["▁ਹਾਂ",-12.497727394104004],["ჯი",-12.497762680053713],["▁ді",-12.49777603149414],["▁yakin",-12.497790336608888],["▁συνεχ",-12.497794151306152],["ৱ",-12.497815132141112],["istik",-12.497867584228516],["▁olona",-12.497876167297363],["นอก",-12.497878074645996],["ilib",-12.497894287109377],["▁плати",-12.497967720031738],["nij",-12.497980117797852],["▁بك",-12.498089790344238],["▁ravi",-12.498092651367188],["▁volum",-12.498098373413086],["ලී",-12.498106002807615],["▁neka",-12.498106956481934],["▁175",-12.498109817504885],["▁そして",-12.498116493225098],["雞",-12.498139381408691],["▁Victor",-12.498162269592283],["това",-12.49818229675293],["▁dlatego",-12.498196601867676],["▁ہماری",-12.49820041656494],["▁Santo",-12.49820613861084],["ത്തില",-12.498208999633787],["Ğ",-12.49821949005127],["▁berjaya",-12.498223304748535],["dza",-12.498250961303713],["▁107",-12.498319625854492],["皇",-12.498339653015137],["▁диск",-12.498353004455566],["thar",-12.498372077941896],["liye",-12.498393058776855],["▁TRA",-12.498455047607422],["▁โรงแรม",-12.49851894378662],["står",-12.49857234954834],["▁mollis",-12.498580932617188],["▁ա",-12.498608589172363],["▁taxa",-12.498617172241213],["▁1966",-12.498624801635742],["▁chứ",-12.498658180236816],["▁Ajo",-12.498690605163574],["▁៤",-12.498726844787598],["థ",-12.498733520507812],["臉",-12.498754501342772],["ங்கு",-12.498766899108888],["▁valla",-12.498791694641112],["▁دادن",-12.4988374710083],["តា",-12.498842239379885],["Via",-12.49885368347168],["一旦",-12.498865127563477],["ورو",-12.498927116394045],["cini",-12.498971939086914],["▁ලද",-12.49897289276123],["▁kezd",-12.498987197875977],["ຄໍາ",-12.499027252197266],["NC",-12.499040603637695],["▁Vall",-12.499067306518556],["ај",-12.499115943908691],["ISA",-12.499116897583008],["▁celé",-12.499122619628906],["מס",-12.499139785766602],["յուն",-12.499155044555664],["应当",-12.499170303344728],["▁زمانی",-12.499204635620115],["▁dija",-12.499207496643066],["▁IST",-12.499277114868164],["кло",-12.499303817749023],["▁қатар",-12.499309539794922],["▁کول",-12.499321937561035],["▁slags",-12.499342918395996],["▁dedica",-12.499395370483398],["ovaní",-12.499402046203612],["▁gand",-12.499404907226562],["▁ሳይ",-12.499427795410156],["▁success",-12.499494552612305],["▁свої",-12.49951457977295],["ирование",-12.499530792236328],["▁ਵਿਖੇ",-12.499536514282228],["عرض",-12.499573707580566],["šić",-12.499594688415527],["▁Sebagai",-12.499601364135742],["▁machine",-12.499642372131348],["ဟု",-12.499655723571776],["▁thú",-12.499698638916016],["▁kanak",-12.49974250793457],["▁যায়",-12.49977970123291],["შო",-12.49979019165039],["▁espe",-12.499818801879885],["ОЛ",-12.499832153320312],["ocht",-12.499847412109377],["的需求",-12.49986743927002],["▁pesar",-12.499874114990234],["prop",-12.499885559082031],["ындағы",-12.499906539916992],["ಗಾ",-12.499916076660156],["▁Law",-12.499916076660156],["dej",-12.499923706054688],["▁кө",-12.49992847442627],["ód",-12.5],["lanması",-12.50004768371582],["▁eye",-12.5000581741333],["韩国",-12.500059127807615],["embre",-12.50006103515625],["อิ",-12.50007438659668],["laha",-12.500154495239258],["▁เครื่อง",-12.50016975402832],["▁calon",-12.500174522399902],["ศาสตร์",-12.500214576721191],["▁pubblico",-12.500218391418455],["▁पाँच",-12.500218391418455],["▁suomalais",-12.500238418579102],["แฟน",-12.500256538391112],["▁públicos",-12.500266075134276],["▁wet",-12.500307083129885],["▁Lage",-12.500336647033691],["▁tota",-12.500441551208496],["ନ୍ତି",-12.500444412231444],["最低",-12.500449180603027],["▁svetu",-12.500469207763672],["වෙන්",-12.500508308410645],["ЕВ",-12.500533103942873],["▁সম্পাদক",-12.500571250915527],["▁اعتبار",-12.50057601928711],["NES",-12.500618934631348],["▁tac",-12.500618934631348],["▁Hasan",-12.50068473815918],["▁אַז",-12.500689506530762],["减少",-12.500703811645508],["שע",-12.500714302062988],["▁1956",-12.500715255737305],["ण्याची",-12.50072956085205],["▁половин",-12.500777244567873],["▁বছর",-12.50080108642578],["quet",-12.500802993774414],["细",-12.500813484191896],["探索",-12.50084114074707],["▁задач",-12.500852584838867],["▁Sum",-12.500890731811523],["▁λίγο",-12.500905990600586],["لىرىنى",-12.500933647155762],["▁poner",-12.500944137573242],["했던",-12.500968933105469],["uci",-12.500978469848633],["益",-12.50099754333496],["▁кабыл",-12.501023292541504],["▁ورزش",-12.501029014587402],["▁ruta",-12.501039505004885],["▁jijini",-12.50104808807373],["શન",-12.501065254211426],["rên",-12.501116752624512],["▁račun",-12.501132011413574],["ખા",-12.501175880432127],["▁podría",-12.50118350982666],["▁fedha",-12.501184463500977],["аюць",-12.501205444335938],["cych",-12.501216888427734],["BY",-12.501224517822266],["▁usuarios",-12.501237869262695],["▁break",-12.501286506652832],["▁Bosh",-12.50129222869873],["ন্ড",-12.50132656097412],["計畫",-12.501364707946776],["렌",-12.501373291015623],["fli",-12.501380920410156],["▁serviços",-12.50141143798828],["унда",-12.501421928405762],["ادر",-12.501421928405762],["▁njohur",-12.501439094543455],["▁इसे",-12.50146770477295],["▁आला",-12.501504898071287],["▁følgende",-12.501507759094238],["▁Esse",-12.501543998718262],["▁verdad",-12.501544952392578],["ူး",-12.501547813415527],["▁백",-12.501558303833008],["▁dispar",-12.501605033874512],["قول",-12.501619338989258],["حق",-12.501622200012209],["ndet",-12.501627922058104],["lighed",-12.501654624938965],["tella",-12.50165557861328],["▁БиХ",-12.501665115356444],["ท่า",-12.501686096191406],["FS",-12.501696586608888],["▁nhu",-12.501702308654783],["▁ព",-12.501714706420898],["▁складу",-12.501788139343262],["ประกอบ",-12.501815795898438],["тас",-12.501861572265623],["▁sled",-12.501870155334473],["posto",-12.501896858215332],["▁lamp",-12.501917839050291],["▁ataupun",-12.501982688903809],["▁začne",-12.501999855041504],["▁perspektiv",-12.502030372619627],["▁професор",-12.502052307128906],["▁Selle",-12.502065658569336],["▁collega",-12.502070426940918],["مبر",-12.502071380615234],["ılıb",-12.502093315124512],["▁communication",-12.502134323120115],["▁Hotell",-12.502157211303713],["opo",-12.502171516418455],["لر",-12.502205848693848],["▁Chỉ",-12.502220153808594],["▁درمیان",-12.502275466918944],["▁2010,",-12.502288818359377],["▁spet",-12.502328872680664],["изации",-12.502330780029297],["唯一",-12.50234317779541],["▁Vé",-12.502349853515623],["▁пък",-12.502354621887209],["▁mə",-12.502364158630373],["жылдын",-12.502394676208496],["▁juda",-12.502431869506836],["男子",-12.502461433410645],["▁своју",-12.502470016479492],["ዎችን",-12.502477645874023],["▁OL",-12.502495765686035],["villa",-12.50251293182373],["▁stem",-12.50262451171875],["סע",-12.502649307250977],["პი",-12.502687454223633],["зв",-12.502717971801758],["▁trag",-12.50274658203125],["▁ақпарат",-12.502764701843262],["▁показа",-12.50279712677002],["▁gelê",-12.50282382965088],["▁edildi",-12.502840995788574],["ەك",-12.502867698669434],["▁Hadi",-12.502952575683594],["▁kasuta",-12.502952575683594],["▁możliwość",-12.502959251403809],["义",-12.50298309326172],["▁Ён",-12.50301456451416],["▁lære",-12.503080368041992],["▁գլխավոր",-12.503081321716309],["الة",-12.503178596496582],["slov",-12.50318717956543],["cata",-12.503210067749023],["▁semaine",-12.503226280212402],["▁route",-12.503257751464844],["▁legger",-12.503270149230955],["については",-12.503273963928224],["▁aid",-12.503321647644045],["▁Frei",-12.503325462341309],["▁recep",-12.503400802612305],["▁acara",-12.503451347351074],["▁nebe",-12.50350570678711],["urg",-12.50351905822754],["water",-12.50355052947998],["ώσει",-12.503560066223145],["▁Poker",-12.503568649291992],["▁vitit",-12.503592491149902],["vira",-12.503632545471191],["▁Tetapi",-12.503642082214355],["ңыз",-12.503692626953123],["▁تعلیم",-12.5037841796875],["▁admit",-12.50379467010498],["▁active",-12.503808975219728],["▁wajib",-12.503829002380373],["▁mukana",-12.503862380981444],["▁118",-12.503868103027344],["▁tack",-12.503888130187988],["▁працы",-12.503896713256836],["▁uče",-12.503941535949709],["▁चाल",-12.503954887390137],["قدر",-12.503983497619627],["▁иде",-12.50399684906006],["rank",-12.504002571105955],["▁স্ব",-12.50400447845459],["▁projek",-12.504006385803224],["▁niej",-12.50400733947754],["félag",-12.504027366638184],["▁Komentar",-12.504037857055664],["圓",-12.50408172607422],["SB",-12.504085540771484],["лада",-12.504117965698242],["▁ಇದೆ",-12.504122734069824],["▁وسي",-12.504228591918944],["▁merah",-12.504247665405272],["杜",-12.504279136657717],["灯",-12.504281044006348],["лери",-12.50428581237793],["▁ល",-12.50428867340088],["菲",-12.504289627075195],["חים",-12.504294395446776],["那麼",-12.504300117492676],["▁ured",-12.504321098327637],["▁swoich",-12.504339218139648],["▁dvo",-12.504382133483888],["ulis",-12.504383087158203],["▁Baz",-12.504386901855469],["χο",-12.504388809204102],["癌",-12.50439739227295],["▁سائٹ",-12.504424095153809],["асан",-12.504432678222656],["▁WiFi",-12.50448513031006],["▁Lle",-12.50450038909912],["IDE",-12.504514694213867],["CM",-12.504520416259766],["就有",-12.504524230957031],["▁الدول",-12.504542350769045],["▁Itu",-12.504551887512209],["▁eraill",-12.504581451416016],["▁část",-12.50460720062256],["原本",-12.50461483001709],["ላት",-12.504618644714355],["▁Õ",-12.504687309265137],["▁школ",-12.504693984985352],["ശി",-12.504768371582031],["▁bella",-12.504769325256348],["▁express",-12.504791259765623],["▁гэты",-12.504828453063965],["▁amesema",-12.504853248596191],["▁contoh",-12.504874229431152],["▁орны",-12.504886627197266],["▁suy",-12.504890441894531],["bilidade",-12.504918098449709],["эт",-12.504935264587402],["▁भनेर",-12.504947662353516],["▁edes",-12.50498867034912],["cant",-12.504993438720703],["▁कंपनी",-12.505020141601562],["▁kuras",-12.505022048950195],["▁ફ",-12.505033493041992],["maak",-12.505047798156738],["▁қай",-12.505061149597168],["现实",-12.505069732666016],["▁금",-12.505105018615724],["▁kemur",-12.505138397216797],["▁dünyanın",-12.50514316558838],["пал",-12.505189895629885],["▁diu",-12.505200386047363],["oista",-12.505236625671388],["▁intervju",-12.50533962249756],["▁ayam",-12.505351066589355],["▁पड",-12.505374908447266],["▁ముందు",-12.50538444519043],["bouw",-12.50538730621338],["riu",-12.50540828704834],["ৃ",-12.505438804626465],["▁olin",-12.50548267364502],["▁stage",-12.505508422851562],["在线",-12.505517959594728],["▁светот",-12.505520820617676],["ലെ",-12.505578994750977],["▁ဆို",-12.50558376312256],["行業",-12.50560474395752],["▁طالبان",-12.505606651306152],["बीच",-12.505613327026367],["▁series",-12.505634307861328],["ew",-12.505645751953123],["鲁",-12.505654335021973],["▁회원",-12.505656242370604],["▁бед",-12.50566864013672],["伦",-12.505684852600098],["▁bijvoorbeeld",-12.505707740783691],["▁mientras",-12.505709648132324],["amma",-12.50577163696289],["▁Teknik",-12.50578784942627],["▁guzti",-12.505802154541016],["▁эм",-12.505834579467772],["ливи",-12.505895614624023],["аю",-12.505913734436035],["当前",-12.505940437316896],["족",-12.50595474243164],["▁gól",-12.506013870239258],["▁hvorfor",-12.506027221679688],["ías",-12.506048202514648],["ordning",-12.506078720092772],["▁dinami",-12.506093978881836],["在此",-12.506118774414062],["▁пан",-12.50612735748291],["▁रात",-12.506136894226074],["▁Jó",-12.50615692138672],["▁میدان",-12.506227493286133],["▁Kaa",-12.506234169006348],["▁egal",-12.506239891052246],["▁ਸਨ",-12.50637149810791],["iskās",-12.50638484954834],["▁τότε",-12.506397247314451],["▁१६",-12.506400108337402],["▁piedāvā",-12.506409645080566],["▁jaso",-12.506410598754885],["▁þær",-12.506425857543944],["ibili",-12.506487846374512],["▁tällä",-12.506503105163574],["ስተ",-12.50650691986084],["▁CU",-12.506509780883787],["ustu",-12.506525993347168],["▁kiinni",-12.506548881530762],["shu",-12.50656032562256],["▁lucht",-12.506599426269531],["▁Ahmed",-12.506654739379885],["▁wenye",-12.506654739379885],["經營",-12.506665229797363],["首次",-12.506665229797363],["чите",-12.506672859191896],["илась",-12.506728172302246],["▁qiladi",-12.50674819946289],["HP",-12.50676727294922],["ाया",-12.5067720413208],["▁gehad",-12.5067720413208],["宣",-12.50685691833496],["▁・",-12.506874084472656],["▁fia",-12.506880760192873],["3000",-12.506929397583008],["▁Кол",-12.506978034973145],["මට",-12.506985664367676],["▁exempel",-12.507040977478027],["וט",-12.50704860687256],["▁կարեւոր",-12.507085800170898],["▁Mano",-12.507086753845217],["▁experiência",-12.507088661193848],["ഖ",-12.507097244262695],["▁Quisque",-12.507105827331545],["▁şəkildə",-12.507107734680176],["អា",-12.507150650024414],["▁şəhər",-12.507203102111816],["madan",-12.507206916809082],["▁fermentum",-12.507207870483398],["▁каля",-12.507210731506348],["lsen",-12.507229804992676],["▁çocuk",-12.50723648071289],["▁дунд",-12.507241249084473],["mpo",-12.507247924804688],["▁xả",-12.507247924804688],["开放",-12.50727653503418],["Со",-12.507312774658203],["เว็บ",-12.507331848144531],["▁دارم",-12.507360458374023],["▁Tech",-12.507390022277832],["▁Ради",-12.50739288330078],["▁आली",-12.507431030273438],["总统",-12.507439613342283],["▁diventa",-12.507526397705078],["Vo",-12.507548332214355],["▁असं",-12.507647514343262],["ميل",-12.507670402526855],["▁pratica",-12.50770664215088],["ORD",-12.507744789123535],["▁መካከል",-12.507773399353027],["▁винаги",-12.507789611816406],["▁iespējams",-12.507805824279783],["ດີ",-12.507813453674316],["▁จึง",-12.507820129394531],["શ્",-12.507855415344238],["▁দুই",-12.507864952087402],["ėjimo",-12.507885932922363],["uwe",-12.507915496826172],["▁quer",-12.507915496826172],["▁jord",-12.507933616638184],["▁ขอ",-12.50795555114746],["▁vài",-12.507972717285156],["▁tayo",-12.50798797607422],["▁zło",-12.508010864257812],["mpf",-12.508049964904783],["▁kvinde",-12.508060455322266],["šť",-12.508069038391112],["▁Dub",-12.50808334350586],["gging",-12.508115768432615],["▁truc",-12.508118629455566],["vast",-12.508126258850098],["liwa",-12.508132934570312],["▁wanted",-12.508138656616213],["ылды",-12.508140563964844],["▁nasi",-12.508185386657717],["▁کوم",-12.508203506469728],["▁사이트",-12.50821304321289],["后来",-12.5082426071167],["▁sten",-12.508278846740724],["veik",-12.508308410644531],["बे",-12.508315086364746],["▁punika",-12.508341789245604],["▁posisi",-12.5083589553833],["▁Буд",-12.508367538452148],["봉",-12.508405685424805],["▁Cruz",-12.5084228515625],["יפה",-12.508437156677246],["▁वीडियो",-12.508463859558104],["▁mans",-12.50849437713623],["ił",-12.508536338806152],["▁האבן",-12.508539199829102],["▁111",-12.50856590270996],["▁عشق",-12.508573532104492],["▁Logo",-12.508621215820312],["▁resmi",-12.50866413116455],["воль",-12.5086669921875],["▁έχουμε",-12.508679389953612],["▁tweede",-12.50868320465088],["щих",-12.508734703063965],["▁ТА",-12.508739471435549],["を見て",-12.508752822875977],["sdag",-12.508784294128418],["ությունները",-12.50880241394043],["αζ",-12.508808135986328],["▁przeciw",-12.508886337280272],["▁ideologi",-12.508906364440918],["кус",-12.508919715881348],["▁detal",-12.50892448425293],["▁شاهه",-12.508929252624512],["▁puoli",-12.509004592895508],["▁acabar",-12.509057998657228],["监管",-12.50908374786377],["▁Rasulullah",-12.509156227111816],["▁kultura",-12.50915813446045],["▁stara",-12.509163856506348],["▁enne",-12.509207725524902],["▁bár",-12.509218215942385],["▁кер",-12.509220123291016],["▁livs",-12.509228706359863],["▁rajon",-12.50924301147461],["lette",-12.509248733520508],["blick",-12.50925064086914],["▁кас",-12.50930881500244],["ești",-12.509381294250488],["▁сон",-12.509406089782717],["struct",-12.509421348571776],["▁причина",-12.509428977966309],["▁FB",-12.509451866149902],["▁pc",-12.509490966796877],["▁yhteis",-12.509495735168455],["▁rete",-12.50952434539795],["▁درس",-12.50953197479248],["צא",-12.509538650512695],["cookies",-12.509561538696287],["▁இருக்க",-12.509572982788086],["ρού",-12.50959300994873],["惊",-12.509608268737791],["▁내용",-12.509624481201172],["▁Polisi",-12.509631156921388],["▁bygge",-12.509647369384766],["▁celebrar",-12.509666442871094],["▁pohod",-12.509669303894045],["▁तुम",-12.50969409942627],["现代",-12.509725570678713],["indo",-12.509737014770508],["▁ඇය",-12.509758949279783],["ppel",-12.509803771972656],["▁identifik",-12.509833335876465],["▁bazar",-12.509865760803224],["▁26-",-12.509879112243652],["▁ଅଧିକ",-12.509881019592283],["icu",-12.509902954101562],["ety",-12.509963989257812],["abu",-12.509965896606444],["▁szükséges",-12.509991645812988],["▁object",-12.510004997253418],["▁אתר",-12.510075569152832],["▁саны",-12.510091781616213],["▁رپورٹ",-12.510098457336426],["lki",-12.51015281677246],["▁изјави",-12.510223388671877],["gående",-12.51024055480957],["▁валют",-12.510260581970217],["द्र",-12.510272979736328],["▁uči",-12.510275840759276],["cznego",-12.51028537750244],["▁эч",-12.510308265686035],["şık",-12.510331153869627],["▁Uu",-12.51039218902588],["ڏ",-12.510395050048828],["▁ອາ",-12.510405540466309],["归",-12.510440826416016],["nsko",-12.510443687438965],["▁კვ",-12.510483741760254],["餘",-12.510504722595217],["▁whether",-12.51053524017334],["▁पक्राउ",-12.51053524017334],["▁асуудал",-12.510540008544922],["ონი",-12.510580062866213],["▁former",-12.510601043701172],["▁hạ",-12.51063346862793],["▁பெண்",-12.510640144348145],["▁hieman",-12.510679244995115],["▁يىل",-12.510770797729492],["তার",-12.510790824890137],["isma",-12.51079559326172],["▁semble",-12.510798454284668],["▁හොඳ",-12.510838508605955],["▁విడుదల",-12.510847091674805],["▁Instituto",-12.510857582092283],["ług",-12.510921478271484],["▁publice",-12.510936737060549],["▁санаа",-12.51099681854248],["▁Kosovë",-12.511014938354492],["▁цін",-12.511018753051758],["▁सर्",-12.511022567749023],["ccia",-12.5110502243042],["▁контроль",-12.511100769042969],["▁adapta",-12.511138916015623],["▁prosent",-12.511165618896484],["job",-12.51119613647461],["сад",-12.511211395263672],["▁پښتو",-12.511229515075684],["▁වලින්",-12.511232376098633],["▁Uku",-12.51123332977295],["▁teve",-12.511238098144531],["▁alimentos",-12.511247634887695],["▁olunmuş",-12.51125717163086],["▁borg",-12.511280059814451],["sant",-12.511333465576172],["▁controle",-12.51134204864502],["▁hear",-12.511371612548828],["▁सुरू",-12.51137924194336],["▁ismer",-12.511422157287598],["▁sieht",-12.511436462402344],["tiques",-12.511441230773926],["▁έως",-12.511520385742188],["▁megfelelő",-12.511566162109377],["▁kapit",-12.51156997680664],["▁benne",-12.511603355407717],["▁وعلى",-12.511611938476562],["▁MT",-12.511652946472168],["▁llevar",-12.511685371398926],["клуч",-12.511711120605469],["чын",-12.511720657348633],["剛",-12.51175594329834],["ىڭ",-12.511756896972656],["变得",-12.511832237243652],["▁บ้าน",-12.511838912963867],["竟然",-12.511858940124512],["ሳት",-12.511859893798828],["▁visar",-12.511868476867676],["▁sette",-12.511871337890623],["ష్ట",-12.511874198913574],["іш",-12.511903762817385],["tinės",-12.511905670166016],["▁আপনি",-12.511919021606444],["▁ପୃଷ୍ଠା",-12.511919021606444],["▁რადგან",-12.511919021606444],["▁çfarë",-12.511920928955078],["▁සමඟ",-12.511924743652344],["jeg",-12.511935234069824],["шир",-12.511982917785645],["stof",-12.512018203735352],["▁hatua",-12.512060165405272],["▁Use",-12.512131690979004],["▁සිත",-12.51214599609375],["ési",-12.5121488571167],["議",-12.512149810791016],["rey",-12.512166023254396],["▁tead",-12.512170791625977],["▁söyledi",-12.512208938598633],["▁momentos",-12.512212753295898],["ícia",-12.512277603149414],["▁מקום",-12.512428283691406],["mbol",-12.512456893920898],["▁raak",-12.512479782104492],["▁ажиллаж",-12.512479782104492],["▁select",-12.512513160705566],["▁wilt",-12.51252269744873],["▁Aki",-12.512533187866213],["羊",-12.51254940032959],["遺",-12.51257038116455],["▁Folk",-12.512578964233398],["▁ultrices",-12.512611389160156],["▁რომელსაც",-12.512611389160156],["▁ህዝብ",-12.512611389160156],["▁указан",-12.512636184692385],["▁əsasən",-12.512638092041016],["ंज",-12.512653350830078],["▁ഒന്ന",-12.512675285339355],["▁ചാ",-12.51272964477539],["તર",-12.51274871826172],["▁אלו",-12.5127534866333],["ेन्द्र",-12.512755393981934],["動物",-12.512777328491213],["tron",-12.51278305053711],["▁humano",-12.51279067993164],["οσ",-12.512797355651855],["▁ମଧ୍ୟରେ",-12.512810707092283],["ლია",-12.512862205505373],["olás",-12.512869834899902],["لاپ",-12.51287078857422],["وك",-12.512913703918455],["▁evident",-12.51292324066162],["▁eher",-12.512953758239746],["▁niiden",-12.51295566558838],["सहित",-12.512975692749023],["▁ditugu",-12.512975692749023],["▁across",-12.51297664642334],["▁آزادی",-12.513001441955566],["▁teksto",-12.513054847717283],["über",-12.51307487487793],["▁schimb",-12.513100624084473],["▁terutama",-12.51310920715332],["▁khusus",-12.513111114501951],["फ़",-12.513113975524902],["ຕໍ່",-12.513168334960938],["िट",-12.513237953186035],["▁التى",-12.513259887695312],["狂",-12.513265609741213],["▁maging",-12.51327419281006],["燒",-12.51327896118164],["märk",-12.513296127319336],["挑战",-12.513299942016602],["▁Mundial",-12.513312339782717],["▁olet",-12.513313293457031],["▁dry",-12.513343811035156],["▁stof",-12.513400077819824],["▁ہمیں",-12.513416290283203],["aron",-12.51343822479248],["▁sayfa",-12.51346206665039],["▁fået",-12.513469696044922],["mind",-12.51347541809082],["▁kasu",-12.513525009155272],["▁жаз",-12.513544082641602],["ilah",-12.513592720031738],["ವಾಗಿದೆ",-12.513594627380373],["פֿ",-12.513596534729004],["▁ainakin",-12.5136079788208],["▁դ",-12.513636589050291],["ளை",-12.51364517211914],["▁चु",-12.513646125793455],["▁мектеп",-12.513654708862305],["rą",-12.513676643371582],["▁део",-12.51369857788086],["▁above",-12.513713836669922],["▁ആവശ്യ",-12.513747215270996],["▁revela",-12.513786315917969],["▁oud",-12.513798713684082],["τοπ",-12.513864517211914],["eringen",-12.513909339904783],["шчы",-12.51392936706543],["▁താ",-12.513931274414062],["▁מקור",-12.51393222808838],["御",-12.513971328735352],["¬",-12.513998031616213],["▁éxito",-12.51400089263916],["▁адвокат",-12.51400089263916],["▁termen",-12.514009475708008],["▁suna",-12.514019966125488],["▁හෙ",-12.51402473449707],["▁Кроме",-12.514032363891602],["ตร",-12.514062881469728],["▁terbaru",-12.514079093933104],["တီ",-12.514102935791016],["க்கம்",-12.51410961151123],["ടാ",-12.51412296295166],["▁Meta",-12.514189720153809],["▁Dkt",-12.514204025268556],["▁Africa",-12.514211654663086],["ший",-12.514214515686035],["▁Event",-12.514220237731934],["기업",-12.51422119140625],["▁Nasional",-12.514238357543944],["角色",-12.514328002929688],["▁Stefan",-12.514336585998535],["ピ",-12.514354705810549],["▁මො",-12.514366149902344],["▁näh",-12.514386177062988],["دور",-12.514416694641112],["otu",-12.514481544494627],["гін",-12.514498710632324],["▁комитет",-12.51453685760498],["ัส",-12.514538764953612],["▁vlasti",-12.51455307006836],["rave",-12.514643669128418],["▁Cad",-12.514668464660645],["▁taong",-12.514671325683594],["▁vọng",-12.514692306518556],["▁მაინც",-12.514692306518556],["▁stuk",-12.514695167541504],["▁sâu",-12.51469612121582],["ənə",-12.514727592468262],["այն",-12.514792442321776],["සී",-12.51479721069336],["сак",-12.514815330505373],["дым",-12.514841079711914],["▁พระ",-12.514856338500977],["անգ",-12.514883995056152],["azio",-12.514917373657228],["▁भाजपा",-12.514946937561035],["建設",-12.514965057373049],["▁bos",-12.514986991882324],["тий",-12.51499080657959],["▁spoj",-12.51499843597412],["żenia",-12.515052795410156],["▁кыз",-12.515074729919434],["赤",-12.515096664428713],["▁вес",-12.515207290649414],["رح",-12.515228271484377],["حن",-12.515260696411133],["▁சினிமா",-12.515286445617676],["▁Мон",-12.515297889709473],["работен",-12.515325546264648],["▁Dato",-12.515339851379396],["▁оба",-12.515341758728027],["уреди",-12.515371322631836],["ூர்",-12.515377044677734],["▁cộng",-12.515386581420898],["▁त्यांनी",-12.515396118164062],["▁бидејќи",-12.515402793884276],["õl",-12.51540756225586],["▁забезпечення",-12.515416145324709],["युक्त",-12.515438079833984],["▁pomeni",-12.515450477600098],["▁visite",-12.515450477600098],["▁unique",-12.515451431274414],["damas",-12.51551914215088],["ۇق",-12.515542984008787],["▁Спорт",-12.51555061340332],["▁İn",-12.51564121246338],["▁Bisa",-12.515642166137695],["▁principio",-12.515677452087402],["raba",-12.51569652557373],["▁szív",-12.515707015991213],["dığını",-12.51571559906006],["▁Strategi",-12.515766143798828],["▁ಭಾರತ",-12.51578140258789],["▁పొ",-12.515827178955078],["▁parola",-12.515829086303713],["▁kalba",-12.515844345092772],["ИН",-12.515920639038086],["▁послуг",-12.515935897827148],["▁liền",-12.515957832336426],["עק",-12.515979766845703],["▁sitter",-12.515979766845703],["▁näin",-12.516008377075195],["tzaile",-12.516027450561523],["ធ្វើ",-12.516039848327637],["▁אויס",-12.516045570373535],["▁sentiment",-12.516064643859863],["ホ",-12.51606559753418],["▁ڳالهه",-12.516080856323242],["満",-12.516088485717772],["AAN",-12.516096115112305],["▁اد",-12.51609706878662],["▁ຄ",-12.51609992980957],["▁Kin",-12.516133308410645],["▁असेल",-12.516135215759276],["▁նման",-12.516143798828123],["itin",-12.516162872314451],["▁maklumat",-12.516191482543944],["▁relação",-12.516226768493652],["pole",-12.516231536865234],["▁interese",-12.51629638671875],["лош",-12.516310691833496],["▁bumi",-12.516315460205078],["isella",-12.516359329223633],["shay",-12.516392707824709],["éri",-12.516397476196287],["▁դրամ",-12.516407012939451],["ჩი",-12.51640796661377],["тим",-12.51641845703125],["▁aktivnosti",-12.516435623168944],["▁acestui",-12.516546249389648],["▁meste",-12.51656436920166],["▁kleinen",-12.516592979431152],["▁mujeres",-12.516599655151367],["▁Uit",-12.5166015625],["ród",-12.516618728637695],["▁hätte",-12.516655921936035],["dú",-12.51667022705078],["თავ",-12.516671180725098],["虎",-12.516685485839844],["▁tiuj",-12.516704559326172],["▁동안",-12.51670742034912],["▁сім",-12.51671028137207],["▁olvas",-12.51672077178955],["▁Cela",-12.516743659973145],["▁Frankfurt",-12.51677703857422],["▁організації",-12.516782760620115],["úd",-12.516786575317385],["empi",-12.516887664794922],["▁interdum",-12.516897201538086],["UE",-12.516924858093262],["▁connect",-12.51695156097412],["▁kumpulan",-12.516996383666992],["▁sopra",-12.517032623291016],["selskap",-12.517040252685549],["▁inför",-12.51722240447998],["通り",-12.51723289489746],["même",-12.51723575592041],["▁Nullam",-12.51723575592041],["עוד",-12.51724338531494],["▁else",-12.517245292663574],["▁поред",-12.517284393310549],["▁партия",-12.517293930053713],["ပြော",-12.517319679260254],["ապետ",-12.517353057861328],["ヤ",-12.517358779907228],["TRA",-12.517363548278809],["ций",-12.517399787902832],["犬",-12.517437934875488],["简单",-12.51744270324707],["蘇",-12.51744270324707],["▁аймгийн",-12.51747226715088],["▁ούτε",-12.517476081848145],["▁жатқан",-12.517480850219728],["说明",-12.517480850219728],["▁៥",-12.517574310302734],["▁rendah",-12.517595291137695],["▁המשפט",-12.517600059509276],["▁മാര്",-12.51760482788086],["løp",-12.517622947692873],["ാനും",-12.517643928527832],["ಚಿ",-12.51764678955078],["▁Oto",-12.517657279968262],["letni",-12.517663955688477],["ttua",-12.517680168151855],["ВО",-12.51771640777588],["zona",-12.517735481262209],["tutako",-12.517805099487305],["▁పేరు",-12.517836570739746],["▁lead",-12.517858505249023],["出席",-12.517887115478516],["▁पुस्तक",-12.517906188964844],["▁gamot",-12.517950057983398],["▁wang",-12.517952919006348],["▁mobili",-12.517970085144045],["▁애",-12.517977714538574],["▁көрсету",-12.517991065979004],["▁rende",-12.518033981323242],["▁معا",-12.518074989318848],["▁svijetu",-12.518075942993164],["▁tudom",-12.518086433410645],["ีย",-12.518117904663086],["▁utrolig",-12.518131256103516],["율",-12.518149375915527],["ผลิตภัณฑ์",-12.518166542053224],["ிருந்த",-12.51817798614502],["▁argi",-12.518221855163574],["▁Pus",-12.5182466506958],["രം",-12.518309593200684],["▁възможност",-12.518310546875],["▁دم",-12.51834201812744],["▁đông",-12.51838207244873],["▁comida",-12.518383026123049],["搭配",-12.518388748168944],["▁sõ",-12.518415451049805],["▁новинар",-12.518438339233398],["▁pô",-12.518446922302246],["▁ഇന്",-12.518447875976562],["оф",-12.518452644348145],["▁loi",-12.518487930297852],["▁heures",-12.518508911132812],["વર",-12.51851749420166],["mem",-12.51856517791748],["što",-12.518589973449709],["▁ordu",-12.518599510192873],["▁ତାଙ୍କ",-12.51860523223877],["TAR",-12.518670082092283],["▁szól",-12.51867961883545],["▁gaur",-12.51870059967041],["▁certains",-12.518712997436523],["ไ",-12.518738746643066],["័",-12.518744468688965],["метр",-12.518753051757812],["pote",-12.51876163482666],["LM",-12.51877498626709],["-03-",-12.518781661987305],["cyjnych",-12.518823623657228],["ാനുള്ള",-12.518858909606934],["▁ଘଟଣା",-12.518865585327148],["▁תגובה",-12.518876075744627],["▁север",-12.51891040802002],["状態",-12.518919944763184],["թի",-12.518930435180664],["▁stjórn",-12.518956184387209],["▁عملية",-12.518991470336914],["atı",-12.519010543823242],["ligini",-12.519030570983888],["▁vremena",-12.519035339355469],["ให้กับ",-12.519105911254885],["metro",-12.519109725952148],["▁serveis",-12.519129753112791],["ကြောင်း",-12.519134521484377],["▁onge",-12.519139289855955],["▁مدیر",-12.51931095123291],["▁asked",-12.519330978393556],["▁relevante",-12.519352912902832],["ိတ္",-12.519362449645996],["▁ಪಿ",-12.519404411315918],["ទៀត",-12.519493103027344],["旗",-12.51951789855957],["ieri",-12.519536018371582],["dada",-12.519559860229492],["▁गांधी",-12.51956272125244],["▁Мемлекеттік",-12.519567489624023],["▁მაშინ",-12.519567489624023],["▁Magyarország",-12.519570350646973],["▁swoją",-12.519577026367188],["▁naziri",-12.519577980041504],["daş",-12.519580841064451],["▁peaks",-12.51963710784912],["▁චිත්",-12.51964282989502],["▁matin",-12.51965045928955],["تھ",-12.51967430114746],["▁גו",-12.519733428955078],["▁оптим",-12.519737243652344],["▁idir",-12.519750595092772],["komen",-12.519783973693848],["▁rú",-12.519818305969238],["▁einge",-12.519834518432615],["▁six",-12.519858360290527],["▁öğren",-12.519879341125488],["▁Popular",-12.519886016845703],["ங்கி",-12.519892692565918],["iền",-12.519957542419434],["▁wymaga",-12.51995849609375],["ਲੋ",-12.51996612548828],["▁seems",-12.519989013671877],["بال",-12.519990921020508],["▁dest",-12.52003002166748],["▁fasci",-12.520036697387695],["aris",-12.520073890686035],["ukat",-12.52013874053955],["▁madre",-12.52013874053955],["利率",-12.520150184631348],["holde",-12.520171165466309],["inum",-12.520188331604004],["šā",-12.520196914672852],["czenia",-12.520217895507812],["rand",-12.520258903503418],["▁रुपैयाँ",-12.52026081085205],["上面",-12.520270347595217],["ukas",-12.520272254943848],["▁Qualität",-12.52027416229248],["stell",-12.520309448242188],["负",-12.5203218460083],["нути",-12.520330429077148],["ವೂ",-12.520336151123049],["++",-12.520343780517578],["▁Бал",-12.52037239074707],["▁ਰਾਜ",-12.52037239074707],["▁Nan",-12.520437240600586],["زر",-12.520458221435549],["чным",-12.520604133605955],["▁dobré",-12.5206298828125],["▁hlad",-12.520666122436523],["▁나는",-12.520673751831056],["▁niste",-12.5206880569458],["adresse",-12.520758628845217],["ପ୍ର",-12.520791053771973],["еро",-12.520796775817873],["▁başarı",-12.52081298828125],["▁सेना",-12.520893096923828],["ivu",-12.520899772644045],["▁dön",-12.52091121673584],["縮",-12.520915031433104],["▁문화",-12.520950317382812],["▁probabil",-12.52095890045166],["▁табылады",-12.520962715148926],["▁יהיה",-12.52096939086914],["문화",-12.52097225189209],["▁põhi",-12.520988464355469],["▁bih",-12.521000862121582],["▁pokoj",-12.52100658416748],["▁anual",-12.521110534667969],["▁aurrera",-12.521116256713867],["▁skrev",-12.521181106567385],["▁zina",-12.521198272705078],["▁indlæg",-12.52120304107666],["▁legend",-12.52122402191162],["▁டி",-12.521244049072266],["▁ashtu",-12.521245956420898],["▁hại",-12.521270751953123],["▁zon",-12.52127456665039],["nieks",-12.521276473999023],["▁новини",-12.52133083343506],["▁گفتگو",-12.52134609222412],["▁ਮਿ",-12.521361351013184],["cznych",-12.521397590637209],["▁alter",-12.521414756774902],["▁Օ",-12.521465301513672],["▁kurdî",-12.521472930908203],["ṣ",-12.52149772644043],["▁Šv",-12.52150058746338],["ിട്ട",-12.52151584625244],["▁ilaa",-12.521530151367188],["„",-12.521538734436035],["kni",-12.5216064453125],["інен",-12.521636962890623],["机会",-12.521722793579102],["▁belül",-12.521758079528809],["▁adresse",-12.52182960510254],["▁Raha",-12.521836280822754],["ುತ್ತಿದೆ",-12.521841049194336],["광",-12.521852493286133],["▁печат",-12.521855354309082],["wd",-12.521895408630373],["▁sibi",-12.52193832397461],["▁дэ",-12.52198314666748],["▁derece",-12.521992683410645],["▁ಸೆ",-12.52200698852539],["▁Mix",-12.522022247314451],["eil",-12.522050857543944],["СИ",-12.522130966186523],["муш",-12.522138595581056],["MT",-12.522144317626951],["▁ఇలా",-12.522174835205078],["โซ",-12.52219581604004],["▁felé",-12.522223472595217],["▁hòa",-12.522260665893556],["▁पश्चिम",-12.522271156311035],["iten",-12.522276878356934],["▁च्या",-12.522276878356934],["都要",-12.522293090820312],["▁ändå",-12.52236270904541],["▁Lö",-12.522366523742676],["▁newsletter",-12.522367477416992],["roka",-12.522374153137209],["▁Puntland",-12.522383689880373],["▁tredje",-12.522385597229004],["▁நீங்கள்",-12.52238655090332],["▁током",-12.522440910339355],["▁Did",-12.522473335266112],["რთ",-12.522478103637695],["▁teksti",-12.52249813079834],["аца",-12.522517204284668],["▁Raya",-12.522608757019045],["▁skuld",-12.522616386413574],["▁terasa",-12.522663116455078],["▁ახალ",-12.522668838500977],["GT",-12.522679328918455],["sua",-12.522690773010254],["▁travers",-12.522704124450684],["kārt",-12.522708892822266],["▁träffa",-12.522722244262695],["感覺",-12.522749900817873],["160",-12.522768020629885],["ací",-12.522769927978516],["▁زیبا",-12.522820472717283],["됩니다",-12.52292251586914],["стре",-12.522936820983888],["тура",-12.522941589355469],["ለን",-12.52295970916748],["▁brīv",-12.523004531860352],["чита",-12.523008346557615],["▁δημιουργ",-12.523011207580566],["▁qualsiasi",-12.523056983947754],["▁നമ്മുടെ",-12.523056983947754],["▁негізгі",-12.523062705993652],["ற்",-12.523063659667969],["▁längre",-12.523077964782717],["▁???",-12.523133277893066],["访",-12.523134231567385],["お客様",-12.523143768310549],["▁ڪار",-12.523153305053713],["▁Album",-12.523154258728027],["▁گونه",-12.523158073425291],["▁kolej",-12.523159980773926],["▁kurš",-12.523201942443848],["▁panas",-12.523212432861328],["ווע",-12.5232515335083],["الی",-12.523287773132324],["▁Thái",-12.52329158782959],["▁issues",-12.523369789123535],["rebbe",-12.523386001586914],["▁bry",-12.523398399353027],["▁аны",-12.523422241210938],["LP",-12.523431777954102],["ছেন",-12.52343463897705],["▁dizer",-12.52345848083496],["▁ਏ",-12.52353572845459],["lett",-12.523561477661133],["▁Skal",-12.523606300354004],["dê",-12.52360725402832],["▁waarde",-12.523621559143066],["▁Други",-12.523641586303713],["臺",-12.523641586303713],["ロー",-12.523646354675291],["▁apare",-12.523651123046877],["▁aukšt",-12.52366065979004],["mbak",-12.523663520812988],["▁kojeg",-12.523666381835938],["▁პა",-12.52366828918457],["லில்",-12.523681640625],["▁dara",-12.523690223693848],["▁hónap",-12.523760795593262],["▁Հայաստան",-12.523765563964844],["▁progres",-12.523804664611816],["▁همکاری",-12.52380657196045],[":51",-12.523815155029297],["हरुलाई",-12.523816108703612],["▁ఒక్క",-12.523850440979004],["▁consul",-12.523900032043455],["▁ψ",-12.52392292022705],["dent",-12.523951530456545],["ির",-12.523956298828123],["機構",-12.523961067199709],["base",-12.524012565612791],["▁2018:",-12.524027824401855],["مە",-12.524109840393066],["▁plate",-12.524123191833496],["▁ນ",-12.52415370941162],["▁tornar",-12.524182319641112],["▁Vitamin",-12.52419090270996],["最終",-12.52419376373291],["▁publiko",-12.524219512939451],["▁फा",-12.524224281311035],["।।",-12.524236679077148],["▁Legg",-12.524249076843262],["植物",-12.524272918701172],["كن",-12.524340629577637],["▁ڪندي",-12.5243501663208],["▁compar",-12.524405479431152],["trag",-12.52443027496338],["▁alisin",-12.524435997009276],["‭",-12.524457931518556],["▁allerdings",-12.524457931518556],["▁කරමින්",-12.524500846862791],["▁insanların",-12.524518966674805],["itve",-12.524531364440918],["▁Louis",-12.524576187133787],["▁међу",-12.524609565734863],["▁looga",-12.524613380432127],["ために",-12.524619102478027],["▁Tây",-12.524641036987305],["▁Gut",-12.524642944335938],["▁akibat",-12.524642944335938],["ander",-12.524698257446287],["zioa",-12.524723052978516],["کي",-12.52472686767578],["民主",-12.524744987487791],["▁رخ",-12.52479076385498],["ក្រ",-12.52479362487793],["न्द",-12.52480697631836],["▁aún",-12.524823188781738],["▁stig",-12.524831771850586],["edo",-12.524888038635254],["رين",-12.524924278259276],["▁mimi",-12.524925231933594],["▁mohli",-12.524975776672363],["bă",-12.52500820159912],["▁فأ",-12.525009155273438],["法院",-12.525012969970703],["费用",-12.525014877319336],["etus",-12.525068283081056],["▁vostra",-12.525113105773926],["JU",-12.525146484375],["зан",-12.525153160095217],["▁ਕਿਸੇ",-12.52517318725586],["tici",-12.525188446044922],["▁vợ",-12.525200843811035],["▁முன்",-12.525243759155272],["▁BAR",-12.525260925292969],["▁moč",-12.525309562683104],["υμ",-12.525335311889648],["mea",-12.525341033935549],["▁всяко",-12.52535343170166],["▁utveckla",-12.52536392211914],["▁ergo",-12.525420188903809],["▁시장",-12.525453567504885],["美容",-12.525458335876465],["▁Temp",-12.525477409362791],["竹",-12.525479316711426],["のような",-12.525490760803224],["iņas",-12.525514602661133],["▁aura",-12.525546073913574],["▁separa",-12.525579452514648],["▁yay",-12.525609016418455],["▁arbejds",-12.525670051574709],["每次",-12.525694847106934],["St",-12.525707244873049],["▁Spar",-12.525718688964844],["▁range",-12.525738716125488],["球队",-12.525738716125488],["饭",-12.52577781677246],["שם",-12.525778770446776],["▁zlo",-12.52578067779541],["弹",-12.525824546813965],["ಿನಲ್ಲಿ",-12.525848388671877],["▁विद्यार्थी",-12.525860786437988],["▁վերաբերյալ",-12.525861740112305],["rki",-12.525900840759276],["нс",-12.525908470153809],["МО",-12.525912284851074],["uži",-12.525949478149414],["▁siguiente",-12.525971412658691],["▁rūp",-12.52600383758545],["εμ",-12.526020050048828],["▁leng",-12.52603244781494],["▁весь",-12.526076316833496],["ார்கள்",-12.526079177856444],["▁decidi",-12.526117324829102],["▁Politika",-12.526124000549316],["니다",-12.526141166687012],["▁имам",-12.52615737915039],["▁усе",-12.526163101196287],["▁bilet",-12.526171684265137],["▁nafasi",-12.52623462677002],["πη",-12.526247024536133],["▁ທາງ",-12.526269912719728],["राव",-12.526280403137209],["CL",-12.52629852294922],["ੰਦ",-12.526333808898926],["▁сүй",-12.52635097503662],["ಎ",-12.52638816833496],["▁ayında",-12.52639102935791],["▁endast",-12.526422500610352],["იმ",-12.526427268981934],["▁თამაში",-12.526440620422363],["▁výstav",-12.526442527770996],["ость",-12.526458740234377],["▁môn",-12.526494026184082],["▁stred",-12.52649974822998],["▁levi",-12.526521682739258],["sters",-12.52653980255127],["තර",-12.526554107666016],["▁bho",-12.526558876037598],["▁достъп",-12.526568412780762],["▁referendum",-12.526577949523926],["▁प्रक्रिया",-12.52657985687256],["▁række",-12.52661418914795],["▁रहने",-12.52664852142334],["都可以",-12.526695251464844],["▁gelir",-12.52673053741455],["0.00",-12.526771545410156],["тку",-12.52680778503418],["▁aero",-12.526832580566406],["▁අත්",-12.526869773864746],["ობ",-12.526895523071287],["ଞ୍ଚ",-12.52690601348877],["▁strong",-12.526979446411133],["▁Ше",-12.527029991149902],["▁voy",-12.527056694030762],["▁ชุด",-12.527061462402344],["▁sagte",-12.527063369750977],["▁шиг",-12.527063369750977],["ਵਰ",-12.527074813842772],["▁þessa",-12.527090072631836],["▁Quy",-12.527093887329102],["▁nő",-12.527098655700684],["жна",-12.527118682861328],["▁seconda",-12.527143478393556],["▁Fru",-12.52716827392578],["▁wegen",-12.527180671691896],["▁වැනි",-12.527206420898438],["齐",-12.527210235595703],["▁odo",-12.527216911315918],["▁permanente",-12.527240753173828],["ଥାଏ",-12.52724838256836],["ced",-12.527255058288574],["▁मानव",-12.527259826660156],["▁ďalšie",-12.527266502380373],["▁México",-12.527268409729004],["▁lip",-12.527281761169434],["▁cuerpo",-12.527283668518066],["▁Đây",-12.527286529541016],["วน",-12.527311325073242],["▁jur",-12.52735424041748],["▁Xalq",-12.527429580688477],["antur",-12.527436256408691],["cun",-12.527440071105955],["▁destino",-12.52744960784912],["▁belirle",-12.527458190917969],["▁බි",-12.527502059936523],["▁opet",-12.52761936187744],["іңіз",-12.5276460647583],["ភ",-12.527688026428224],["▁අපිට",-12.527713775634766],["▁hodnot",-12.527727127075195],["家长",-12.527735710144045],["▁বে",-12.527765274047852],["▁Día",-12.527782440185549],["으면",-12.527786254882812],["سته",-12.527802467346191],["▁ചെയ്യുന്ന",-12.52781581878662],["ərə",-12.527840614318848],["തിര",-12.527872085571287],["▁Suspendisse",-12.527969360351562],["▁মি",-12.52799129486084],["▁নাম",-12.527992248535156],["ač",-12.528046607971191],["▁Teg",-12.52822494506836],["▁óra",-12.52823543548584],["▁lög",-12.52828311920166],["ρία",-12.528311729431152],["만원",-12.528334617614746],["▁esas",-12.528367042541504],["niji",-12.528470993041992],["▁spise",-12.528521537780762],["үүл",-12.528589248657228],["▁palabras",-12.52860164642334],["▁jazyk",-12.528602600097656],["▁صلی",-12.528624534606934],["▁diploma",-12.52863883972168],["▁зато",-12.528648376464844],["▁అన్న",-12.52865219116211],["لل",-12.528682708740234],["արդ",-12.528711318969728],["十二",-12.52871322631836],["▁любой",-12.528731346130373],["▁giro",-12.528733253479004],["医生",-12.528735160827637],["ettu",-12.528759956359863],["▁வழங்க",-12.528837203979492],["▁ബോ",-12.528857231140137],["جة",-12.528910636901855],["▁qərar",-12.528932571411133],["房屋",-12.528959274291992],["ൊക്കെ",-12.528964042663574],["▁fent",-12.528977394104004],["▁ideale",-12.529061317443848],["▁περιοχή",-12.529094696044922],["▁ép",-12.52919101715088],["▁рак",-12.529196739196776],["rno",-12.52921199798584],["ічні",-12.52933120727539],["悪",-12.529348373413086],["▁perubahan",-12.529378890991213],["▁פעם",-12.529391288757324],["vum",-12.529394149780272],["ження",-12.529399871826172],["ျပန္",-12.529419898986816],["తే",-12.52947998046875],["▁Mehmet",-12.529480934143066],["При",-12.529587745666504],["▁помогне",-12.529610633850098],["▁Morbi",-12.52963924407959],["uči",-12.529712677001951],["▁ඇත්තේ",-12.529827117919922],["▁помага",-12.529851913452148],["▁සමාජ",-12.529912948608398],["▁ihnen",-12.529958724975586],["▁tisíc",-12.52997589111328],["▁بھ",-12.529979705810549],["▁자료",-12.529983520507812],["-23",-12.530016899108888],["诺",-12.530016899108888],["pili",-12.530056953430176],["disk",-12.53006362915039],["▁celebr",-12.530072212219238],["ပို",-12.530080795288086],["▁Helsinki",-12.53008270263672],["ਪੀ",-12.53009033203125],["▁αυτές",-12.530095100402832],["▁remember",-12.530097007751465],["▁lacinia",-12.530101776123049],["quel",-12.530107498168944],["▁социал",-12.530116081237791],["▁هنوز",-12.530125617980955],["▁різних",-12.530136108398438],["ntur",-12.530147552490234],["▁الحياة",-12.530147552490234],["elés",-12.53014850616455],["▁123",-12.530166625976562],["▁Hill",-12.530292510986328],["▁פו",-12.530356407165527],["▁පිළිබඳ",-12.53036403656006],["▁demokrat",-12.530375480651855],["▁المس",-12.530383110046388],["wek",-12.530439376831056],["ែ",-12.53044319152832],["ića",-12.530460357666016],["▁Mora",-12.530510902404783],["▁ແ",-12.530525207519531],["▁omrop",-12.530580520629885],["σμα",-12.53058910369873],["års",-12.530596733093262],["▁kitų",-12.530600547790527],["ferð",-12.530609130859377],["▁доллар",-12.530624389648438],["▁игры",-12.530692100524902],["维护",-12.530713081359863],["▁jį",-12.530716896057127],["klop",-12.530721664428713],["祝",-12.530742645263672],["孔",-12.530746459960938],["▁bổ",-12.530754089355469],["ゲーム",-12.530776023864746],["▁olup",-12.530781745910645],["raan",-12.53081226348877],["▁बताया",-12.530828475952148],["▁stellt",-12.530838966369627],["▁fitness",-12.530840873718262],["▁법",-12.530853271484377],["▁helyi",-12.530854225158691],["kate",-12.530863761901855],["▁FIFA",-12.530864715576172],["ACH",-12.530890464782717],["ীর",-12.530891418457031],["دت",-12.53090000152588],["жил",-12.530901908874512],["verse",-12.530948638916016],["▁nuovi",-12.53095817565918],["▁अघि",-12.531006813049316],["▁őket",-12.531027793884276],["▁miks",-12.53105354309082],["▁звер",-12.531070709228516],["▁100-",-12.531112670898438],["ിക്കുന്നു",-12.531120300292969],["▁၁",-12.531155586242676],["逆",-12.531164169311523],["ں",-12.531194686889648],["▁مشخص",-12.531203269958496],["ቸ",-12.531244277954102],["ične",-12.531296730041504],["▁프로그램",-12.531310081481934],["▁Sistema",-12.531353950500488],["▁anyag",-12.5314359664917],["hlas",-12.531460762023926],["移动",-12.53146266937256],["▁Рус",-12.531481742858888],["农业",-12.531487464904783],["▁electrónico",-12.531493186950684],["▁प्रस्ताव",-12.531493186950684],["▁අදහස්",-12.53150463104248],["▁Smith",-12.53155517578125],["▁карта",-12.531556129455566],["▁cast",-12.531579971313477],["RR",-12.531582832336426],["▁Cookie",-12.5316162109375],["▁يقول",-12.531624794006348],["▁carrer",-12.531631469726562],["▁Dabei",-12.531652450561523],["▁kje",-12.531660079956056],["▁되는",-12.531705856323242],["alin",-12.531736373901367],["▁školy",-12.53173828125],["visi",-12.531747817993164],["▁mende",-12.53175163269043],["slut",-12.531805038452148],["▁якого",-12.531821250915527],["▁ადამიანი",-12.531831741333008],["食物",-12.53183937072754],["▁чувства",-12.531842231750488],["▁1942",-12.5318603515625],["essen",-12.531878471374512],["▁puhe",-12.531903266906738],["zte",-12.53194522857666],["korra",-12.531983375549316],["▁Япон",-12.531993865966797],["виж",-12.532022476196287],["ियां",-12.53209114074707],["整合",-12.532094955444336],["lico",-12.5321044921875],["▁דורך",-12.532156944274902],["จะได้",-12.532167434692385],["▁ету",-12.532177925109863],["▁ദിവസം",-12.532196998596191],["▁länge",-12.532198905944824],["▁ምክንያት",-12.532203674316406],["Gi",-12.53221035003662],["▁található",-12.53221321105957],["▁Lebih",-12.532244682312012],["schrift",-12.53225040435791],["▁skjer",-12.53225326538086],["▁tais",-12.532261848449709],["それを",-12.532285690307615],["▁слово",-12.532303810119627],["ары",-12.532308578491213],["▁problème",-12.532320976257324],["人間",-12.532333374023438],["▁mặc",-12.532343864440918],["▁여러",-12.532344818115234],["stö",-12.532350540161133],["▁medier",-12.532365798950195],["▁មនុស្ស",-12.532376289367676],["▁ቅ",-12.532397270202637],["▁seri",-12.532401084899902],["▁хувьд",-12.532404899597168],["▁spod",-12.532415390014648],["elin",-12.532425880432127],["となる",-12.532430648803713],["▁Finans",-12.532447814941406],["聞",-12.532453536987305],["midler",-12.532459259033203],["▁vooral",-12.532586097717283],["用品",-12.532587051391602],["▁Закона",-12.532588005065918],["▁చే",-12.53259563446045],["▁прив",-12.532641410827637],["▁мат",-12.532689094543455],["▁annonser",-12.532696723937988],["庭",-12.532716751098633],["▁Bé",-12.532724380493164],["▁groupe",-12.532732009887695],["▁רבים",-12.532769203186035],["▁2009,",-12.532791137695312],["▁force",-12.532803535461426],["▁flot",-12.53280544281006],["పీ",-12.532819747924805],["▁ekip",-12.53283977508545],["▁venir",-12.532880783081056],["دھ",-12.532881736755373],["▁struttura",-12.53288745880127],["лген",-12.532901763916016],["▁ಉತ್ತರ",-12.532907485961914],["▁آیت",-12.532929420471191],["▁එන",-12.53293800354004],["▁৬",-12.532971382141112],["ฮ",-12.53298282623291],["voja",-12.53299045562744],["▁ශ",-12.53302764892578],["割",-12.533036231994627],["▁سبز",-12.533047676086426],["▁özellikle",-12.533075332641602],["eda",-12.533092498779297],["▁nua",-12.533101081848145],["ліч",-12.533110618591309],["▁выход",-12.533111572265623],["▁precios",-12.5331449508667],["▁verði",-12.533158302307127],["ucht",-12.533177375793455],["іка",-12.533183097839355],["▁canvi",-12.533235549926758],["▁tonë",-12.533247947692873],["▁سود",-12.533282279968262],["▁sya",-12.533287048339844],["Ь",-12.533318519592283],["ఆ",-12.533318519592283],["▁vaba",-12.533344268798828],["▁28-",-12.53334903717041],["▁sug",-12.53346824645996],["▁माओवादी",-12.53347110748291],["▁WC",-12.533480644226074],["▁nossos",-12.533482551574709],["ема",-12.53355884552002],["حال",-12.533597946166992],["▁menyatakan",-12.533613204956056],["▁ເທດ",-12.533613204956056],["▁Ari",-12.533615112304688],["σμ",-12.533620834350586],["▁Toto",-12.533634185791016],["▁neuro",-12.533634185791016],["መር",-12.533641815185549],["mány",-12.533649444580078],["▁Superior",-12.533693313598633],["▁горе",-12.533708572387695],["▁Colo",-12.53371810913086],["▁ဘ",-12.533738136291504],["kvi",-12.53373908996582],["າ",-12.533825874328612],["▁האָט",-12.533845901489258],["jezd",-12.533873558044434],["▁किस",-12.533873558044434],["▁blijven",-12.533889770507812],["ኤ",-12.533894538879396],["▁vision",-12.533897399902344],["gim",-12.533916473388672],["▁وړ",-12.53393268585205],["▁храна",-12.533935546875],["▁دلار",-12.533947944641112],["...»",-12.533950805664062],["▁Mill",-12.533957481384276],["tely",-12.533965110778809],["And",-12.53403663635254],["даў",-12.53404426574707],["▁мин",-12.534133911132812],["▁tried",-12.534160614013672],["uba",-12.534171104431152],["异",-12.534178733825684],["▁писа",-12.534218788146973],["▁federa",-12.534228324890137],["▁കോടതി",-12.534273147583008],["犯罪",-12.53427791595459],["zional",-12.534282684326172],["▁współ",-12.53431797027588],["▁ስም",-12.534331321716309],["▁pey",-12.534335136413574],["▁rynku",-12.534350395202637],["azi",-12.534378051757812],["▁වීම",-12.53438663482666],["Յ",-12.534400939941406],["▁gevolg",-12.534412384033203],["ooyinka",-12.53442096710205],["▁Ion",-12.534428596496582],["▁stessa",-12.534449577331545],["૭",-12.534512519836426],["hak",-12.534523010253906],["▁franc",-12.534592628479004],["ပြု",-12.534602165222168],["ருக்கு",-12.534610748291016],["性能",-12.53464412689209],["▁맞",-12.534647941589355],["▁rev",-12.534658432006836],["▁وار",-12.534693717956545],["դի",-12.534710884094238],["ພ",-12.534722328186035],["▁sabor",-12.534734725952148],["body",-12.534774780273438],["▁Nike",-12.534784317016602],["▁multaj",-12.534804344177246],["▁tenta",-12.534826278686523],["ൂര്",-12.534839630126951],["Gra",-12.534883499145508],["rve",-12.534899711608888],["高中",-12.534906387329102],["каза",-12.534918785095217],["乎",-12.534932136535645],["▁Pří",-12.53494644165039],["▁kādu",-12.534955978393556],["باط",-12.534963607788086],["哭",-12.534969329833984],["のも",-12.535011291503906],["▁gula",-12.535030364990234],["▁کبھی",-12.535033226013184],["▁ሕዝብ",-12.535040855407717],["tuak",-12.535058975219728],["▁ماشین",-12.535059928894045],["овима",-12.53508758544922],["也能",-12.535127639770508],["ภาค",-12.535130500793455],["▁patient",-12.535152435302734],["▁Pai",-12.535155296325684],["▁relativ",-12.53516960144043],["▁choses",-12.535189628601074],["imizin",-12.535200119018556],["▁இட",-12.53521728515625],["▁ከአ",-12.535225868225098],["nesi",-12.53523063659668],["▁نشده",-12.535239219665527],["▁Ela",-12.535264015197754],["▁tits",-12.535264015197754],["Sİ",-12.53528118133545],["▁ця",-12.535295486450195],["льні",-12.535383224487305],["rce",-12.53541660308838],["▁oblig",-12.535483360290527],["जु",-12.53550148010254],["▁హీరో",-12.535507202148438],["というのは",-12.535541534423828],["▁nguy",-12.535573959350586],["isu",-12.535662651062012],["▁বিজ্ঞান",-12.535677909851074],["▁tuyến",-12.535741806030272],["▁γίνεται",-12.535741806030272],["기도",-12.535795211791992],["▁bölge",-12.535816192626951],["▁עמ",-12.535819053649902],["▁barcha",-12.535826683044434],["რდა",-12.53582763671875],["▁وزن",-12.535894393920898],["image",-12.53591251373291],["▁polu",-12.53591251373291],["曾经",-12.535998344421388],["бележ",-12.536026000976562],["▁தீ",-12.53611660003662],["▁참여",-12.536148071289062],["▁porod",-12.536189079284668],["▁Asi",-12.536224365234377],["▁menjaga",-12.536246299743652],["▁зо",-12.53628635406494],["▁болуы",-12.536287307739258],["▁چیف",-12.536349296569824],["▁amen",-12.536407470703123],["摸",-12.536412239074709],["韓",-12.536418914794922],["▁carro",-12.536422729492188],["▁асоб",-12.536437034606934],["▁ذم",-12.536447525024414],["▁바랍니다",-12.53644847869873],["▁zasad",-12.536449432373049],["▁Tev",-12.536452293395996],["▁ხოლო",-12.536468505859377],["▁ан",-12.536480903625488],["▁molestie",-12.536492347717283],["相比",-12.536532402038574],["fana",-12.536535263061523],["▁منا",-12.536611557006836],["就业",-12.536625862121582],["▁приема",-12.536629676818848],["રણ",-12.536667823791504],["▁zápas",-12.536707878112791],["获",-12.53671646118164],["▁Kate",-12.53679084777832],["バー",-12.53679656982422],["▁dní",-12.536820411682127],["ாய்",-12.53683376312256],["▁Pou",-12.536839485168455],["汉",-12.536904335021973],["விட",-12.536919593811035],["kiisa",-12.53693389892578],["cta",-12.536942481994627],["▁pages",-12.53696346282959],["▁Dob",-12.536988258361816],["às",-12.537006378173828],["▁շ",-12.537015914916992],["łę",-12.537031173706056],["▁panta",-12.537060737609863],["tasi",-12.537094116210938],["贡献",-12.537114143371582],["Book",-12.537132263183594],["▁демократ",-12.53714656829834],["▁ನಾವು",-12.537176132202148],["сю",-12.537191390991213],["lna",-12.53722858428955],["nění",-12.537263870239258],["根據",-12.537296295166016],["শা",-12.537308692932127],["▁oyunu",-12.537321090698242],["▁финал",-12.53732967376709],["▁خصوص",-12.537373542785645],["地球",-12.537392616271973],["階",-12.537445068359377],["設施",-12.537463188171388],["оро",-12.53750705718994],["▁رابطه",-12.537529945373535],["女孩",-12.53754997253418],["▁Micro",-12.53756618499756],["▁hjælp",-12.53757667541504],["▁betul",-12.53758144378662],["อิน",-12.537583351135254],["▁필요",-12.537657737731934],["▁کردیا",-12.53765869140625],["iens",-12.537686347961426],["▁දේශපාලන",-12.537712097167969],["૫",-12.537718772888184],["ssing",-12.537732124328612],["▁ویب",-12.53773307800293],["隆",-12.537757873535156],["▁HIV",-12.537778854370115],["čiť",-12.537800788879396],["лия",-12.537801742553713],["taminen",-12.537826538085938],["▁fotoğraf",-12.537866592407228],["▁ਸ੍ਰੀ",-12.537866592407228],["▁සඳහන්",-12.537866592407228],["▁biblioteca",-12.537867546081545],["▁Všetky",-12.53786849975586],["▁ternyata",-12.537871360778809],["▁scelta",-12.537885665893556],["bhair",-12.537938117980955],["▁institution",-12.537962913513184],["▁llibre",-12.537973403930664],["¿",-12.53800106048584],["▁ભારત",-12.538002014160156],["▁SV",-12.538007736206056],["bana",-12.538016319274902],["▁devas",-12.538041114807127],["▁такая",-12.538067817687988],["▁crise",-12.538071632385254],["קת",-12.538081169128418],["▁Spring",-12.538084983825684],["ωμα",-12.538097381591797],["記者",-12.538104057312012],["túr",-12.538108825683594],["edel",-12.538116455078123],["▁पर्ने",-12.538192749023438],["▁чака",-12.538200378417969],["ज्",-12.538228034973145],["ců",-12.538254737854004],["▁quedar",-12.53825855255127],["اة",-12.53829860687256],["▁trots",-12.538305282592772],["▁région",-12.538330078125],["▁ບ",-12.538341522216797],["▁enorm",-12.538387298583984],["▁წინა",-12.538394927978516],["▁veniam",-12.538454055786133],["▁living",-12.538488388061523],["情況",-12.538498878479004],["▁સરકાર",-12.538506507873535],["语言",-12.538519859313965],["▁kriti",-12.538551330566406],["นั่ง",-12.538578033447266],["▁రాజకీయ",-12.538578033447266],["▁imperdiet",-12.538579940795898],["▁Huawei",-12.53858470916748],["▁kär",-12.53859043121338],["kontakt",-12.538602828979492],["רד",-12.538602828979492],["▁gruppo",-12.538602828979492],["▁аралык",-12.538604736328123],["法国",-12.538613319396973],["产生",-12.53862476348877],["توب",-12.538626670837402],["нак",-12.53865909576416],["šanos",-12.538666725158691],["▁artisti",-12.538679122924805],["ljivo",-12.538683891296388],["▁zelen",-12.5386962890625],["aldia",-12.538704872131348],["jka",-12.538705825805664],["oku",-12.53873062133789],["ớ",-12.538740158081056],["荷",-12.538741111755373],["▁செய்தி",-12.538801193237305],["多个",-12.5388822555542],["▁وکړي",-12.538928985595703],["ள்ள",-12.538931846618652],["▁uh",-12.538932800292969],["▁awak",-12.538952827453612],["▁kahit",-12.538952827453612],["不安",-12.538952827453612],["▁perfekte",-12.53896141052246],["கள",-12.538982391357422],["пуска",-12.539013862609863],["jati",-12.539030075073242],["▁veta",-12.53909969329834],["▁Bundan",-12.539111137390137],["খা",-12.539139747619627],["твар",-12.539189338684082],["▁определя",-12.539209365844728],["▁שכ",-12.539230346679688],["мене",-12.539276123046877],["▁mengikuti",-12.539287567138672],["▁Giardia",-12.539289474487305],["▁syarikat",-12.539289474487305],["▁pina",-12.53929042816162],["▁ගෙදර",-12.53929042816162],["▁escola",-12.539353370666504],["▁намира",-12.539353370666504],["▁ретінде",-12.539423942565918],["▁મારા",-12.539437294006348],["▁deben",-12.539498329162598],["▁جذب",-12.539514541625977],["▁naturale",-12.539539337158203],["BR",-12.539541244506836],["담",-12.53954792022705],["lare",-12.539552688598633],["хе",-12.539575576782228],["▁สามารถ",-12.53957748413086],["▁единствен",-12.539605140686035],["產生",-12.5396089553833],["▁aşa",-12.539632797241213],["▁rights",-12.539671897888184],["▁concentra",-12.539695739746094],["ിനു",-12.539735794067385],["ଜା",-12.53975772857666],["võ",-12.53982162475586],["測",-12.539841651916504],["Auto",-12.539843559265137],["▁waard",-12.539843559265137],["▁млади",-12.539864540100098],["-09-",-12.539884567260742],["▁terima",-12.53990650177002],["ڪو",-12.539974212646484],["▁қамтамасыз",-12.540000915527344],["▁Erdoğan",-12.54001808166504],["μου",-12.540040016174316],["emu",-12.540044784545898],["▁ბო",-12.540057182312012],["▁وزارة",-12.5401029586792],["▁arah",-12.540160179138184],["▁Cultural",-12.540194511413574],["нската",-12.54023265838623],["▁بابت",-12.540239334106444],["HS",-12.54029941558838],["▁maximum",-12.540306091308594],["ηρ",-12.54030704498291],["▁wide",-12.54031753540039],["▁sân",-12.540331840515137],["▁galite",-12.54034423828125],["▁tul",-12.540410041809082],["▁tienes",-12.54045295715332],["▁Tir",-12.540470123291016],["цо",-12.540496826171877],["▁arme",-12.540512084960938],["▁shume",-12.540514945983888],["กิจกรรม",-12.540526390075684],["രുടെ",-12.540552139282228],["ിട്ട്",-12.540570259094238],["fak",-12.540578842163086],["▁sarebbe",-12.540594100952148],["▁Lanka",-12.54059600830078],["lein",-12.540626525878906],["▁Privat",-12.54063892364502],["ПК",-12.540641784667969],["之中",-12.540684700012209],["▁ઘ",-12.540704727172852],["틀",-12.5407133102417],["▁juurde",-12.540727615356444],["▁투",-12.540727615356444],["▁Edward",-12.540732383728027],["▁pueda",-12.540741920471191],["▁अभी",-12.540742874145508],["▁peux",-12.54075527191162],["▁λόγω",-12.540822982788086],["▁coming",-12.54084587097168],["▁මහා",-12.54084587097168],["dw",-12.540892601013184],["▁yük",-12.54092025756836],["▁والوں",-12.541020393371582],["цем",-12.54102611541748],["ัม",-12.541049003601074],["▁сом",-12.541069984436035],["zig",-12.541092872619627],["leva",-12.541102409362791],["▁poslan",-12.54110622406006],["oil",-12.541133880615234],["▁mema",-12.54114818572998],["▁Балкан",-12.541231155395508],["▁kahe",-12.54126262664795],["undi",-12.541298866271973],["▁této",-12.541308403015137],["ર્સ",-12.541332244873049],["▁يست",-12.541340827941896],["沟通",-12.541348457336426],["▁seba",-12.54139232635498],["ОР",-12.541396141052246],["shirt",-12.541419982910156],["▁обычно",-12.541441917419434],["yć",-12.541461944580078],["▁බැරි",-12.541461944580078],["▁drift",-12.541540145874023],["ქმ",-12.54155445098877],["▁Siya",-12.541574478149414],["▁ilustra",-12.541614532470703],["使得",-12.541626930236816],["ԵՐ",-12.541627883911133],["▁tätä",-12.541651725769045],["优",-12.541658401489258],["ประตู",-12.541662216186523],["▁سيد",-12.541662216186523],["▁катары",-12.54167366027832],["jø",-12.54167652130127],["▁varius",-12.54167938232422],["adka",-12.5416898727417],["▁χρόνο",-12.54176425933838],["▁микро",-12.541773796081545],["iúil",-12.541791915893556],["▁१४",-12.54180145263672],["▁Өз",-12.541910171508787],["▁පිළි",-12.541916847229004],["▁خير",-12.54192066192627],["UF",-12.54192352294922],["bė",-12.541942596435549],["▁הז",-12.541942596435549],["▁ruang",-12.54196071624756],["▁Scr",-12.54197883605957],["หญิง",-12.541986465454102],["▁drwy",-12.541997909545898],["徐",-12.542024612426758],["▁seront",-12.542055130004885],["umiem",-12.542078971862791],["▁машина",-12.54208278656006],["▁Ida",-12.542132377624512],["▁diqqət",-12.54213809967041],["▁1917",-12.54214859008789],["▁келип",-12.542153358459473],["▁migliori",-12.542160987854004],["▁szép",-12.542163848876951],["tym",-12.54216766357422],["▁ນັ້ນ",-12.542197227478027],["orden",-12.54221248626709],["لىقى",-12.542218208312988],["360",-12.542237281799316],["▁daran",-12.542265892028809],["onder",-12.542333602905272],["▁игри",-12.542340278625488],["ങ്ങി",-12.542372703552246],["▁боку",-12.542376518249512],["бир",-12.54239273071289],["▁çe",-12.542396545410156],["机制",-12.542428016662598],["脑",-12.542428016662598],["▁geni",-12.542457580566406],["▁وري",-12.542488098144531],["gru",-12.542508125305176],["▁Kiel",-12.542570114135742],["▁محصول",-12.542596817016602],["▁1.1",-12.542606353759766],["▁haven",-12.542613983154297],["▁gondol",-12.542622566223145],["▁cest",-12.542651176452637],["קן",-12.542665481567385],["satz",-12.54271125793457],["▁අහ",-12.542715072631836],["▁Трамп",-12.542744636535645],["bereich",-12.542750358581545],["▁ত",-12.54277801513672],["ذهب",-12.5427827835083],["▁چارو",-12.542784690856934],["dáva",-12.542794227600098],["တယ်",-12.542794227600098],["▁Срби",-12.542802810668944],["▁amigo",-12.54281997680664],["▁पै",-12.542838096618652],["▁Bắc",-12.542852401733398],["▁परिवर्तन",-12.542852401733398],["▁106",-12.54290008544922],["▁persi",-12.542960166931152],["▁защита",-12.542978286743164],["▁bau",-12.54298210144043],["カード",-12.543007850646973],["ganda",-12.543024063110352],["Ә",-12.543044090270996],["▁introdu",-12.543087005615234],["▁01.",-12.54310131072998],["▁опит",-12.54310417175293],["BL",-12.543118476867676],["不僅",-12.543124198913574],["▁είχαν",-12.543182373046877],["▁بهره",-12.543217658996582],["▁kaca",-12.543298721313477],["▁veg",-12.543306350708008],["▁pros",-12.54331111907959],["rons",-12.54333782196045],["▁respectiv",-12.543349266052246],["▁Laat",-12.543390274047852],["лак",-12.543506622314451],["ტუ",-12.543519973754885],["▁tevékenység",-12.543566703796388],["▁الدكتور",-12.543570518493652],["▁በፊት",-12.543604850769045],["▁музей",-12.543609619140623],["կա",-12.543671607971191],["染",-12.543700218200684],["▁penye",-12.54370403289795],["▁ڈال",-12.543721199035645],["▁служби",-12.543757438659668],["▁allemaal",-12.54376983642578],["OB",-12.543790817260742],["▁μεγάλο",-12.543834686279297],["▁vinnu",-12.54383659362793],["▁нього",-12.543840408325195],["▁10.000",-12.543842315673828],["ಜಾ",-12.543951034545898],["ਵੀ",-12.54399871826172],["kuma",-12.54401683807373],["sikan",-12.544028282165527],["▁gyd",-12.544038772583008],["▁самого",-12.544050216674805],["日の",-12.54405117034912],["xir",-12.544063568115234],["▁בק",-12.544063568115234],["▁separat",-12.544108390808104],["▁ارزش",-12.544111251831056],["ନ୍ତ",-12.544163703918455],["sioni",-12.544175148010254],["▁ಇಲ್ಲಿ",-12.544194221496582],["что",-12.544219017028809],["mán",-12.54423713684082],["不管",-12.544254302978516],["▁Kanada",-12.544257164001465],["▁меньше",-12.544265747070312],["▁lãnh",-12.544281959533691],["▁अभियान",-12.544283866882324],["▁ਹੁਣ",-12.544341087341309],["خبر",-12.544352531433104],["ந்தி",-12.5443754196167],["heter",-12.544378280639648],["baran",-12.54438304901123],["āja",-12.54438591003418],["▁చేసే",-12.544395446777344],["씨",-12.544416427612305],["lám",-12.54447078704834],[".9.",-12.544475555419922],["▁stvar",-12.544520378112791],["▁férfi",-12.544535636901855],["▁Gott",-12.544567108154297],["▁фотограф",-12.544621467590332],["▁налази",-12.544628143310549],["▁കട",-12.544686317443848],["▁सामान्य",-12.544705390930176],["kų",-12.544722557067873],["lék",-12.544734001159668],["▁Cyf",-12.544737815856934],["▁reprezenta",-12.54476547241211],["ധി",-12.544775009155272],["cado",-12.544776916503906],["اتها",-12.54478931427002],["▁avion",-12.544878005981444],["▁번",-12.54489040374756],["▁Toyota",-12.544910430908203],["領域",-12.544928550720217],["臺灣",-12.544947624206545],["▁तू",-12.54496955871582],["▁speed",-12.544997215270996],["▁तुम्ही",-12.544997215270996],["▁Kerajaan",-12.544998168945312],["ээд",-12.54509162902832],["▁Hoàng",-12.545098304748535],["▁спе",-12.545101165771484],["▁بنانے",-12.545101165771484],["▁iam",-12.545210838317873],["weni",-12.545214653015137],["yra",-12.545220375061035],["tık",-12.545246124267578],["▁asunto",-12.545316696166992],["ÉS",-12.54535675048828],["ixi",-12.545371055603027],["▁możemy",-12.54537582397461],["▁Britani",-12.545406341552734],["▁cate",-12.545422554016112],["▁разум",-12.54542636871338],["imos",-12.54548454284668],["▁navê",-12.545491218566896],["raha",-12.545499801635742],["总是",-12.545499801635742],["▁raken",-12.54557991027832],["▁langkah",-12.545620918273926],["umus",-12.545621871948242],["ኅ",-12.5457124710083],["Ế",-12.5457124710083],["▁ନୂଆ",-12.545713424682615],["▁जैसे",-12.545717239379885],["▁lú",-12.545721054077148],["તાં",-12.545844078063965],["▁gama",-12.545858383178713],["wei",-12.545860290527344],["rés",-12.54587745666504],["nç",-12.545916557312012],["播",-12.545918464660645],["▁mach",-12.545936584472656],["elta",-12.545948028564451],["▁भिडियो",-12.545979499816896],["突",-12.54599380493164],["▁programas",-12.546025276184082],["▁производи",-12.546046257019045],["▁Premi",-12.546055793762209],["▁denuncia",-12.546152114868164],["▁Today",-12.546167373657228],["▁santé",-12.546181678771973],["▁Νέα",-12.5462064743042],["▁ajuta",-12.54625415802002],["ovací",-12.546257972717283],["конференц",-12.546270370483398],["▁чек",-12.546303749084473],["▁toteut",-12.546320915222168],["เสริม",-12.546356201171877],["±",-12.546428680419922],["SF",-12.54644775390625],["▁fyr",-12.546485900878906],["نډ",-12.546518325805664],["LED",-12.546536445617676],["▁financ",-12.546571731567385],["▁klasse",-12.546605110168455],["ttava",-12.546630859375],["vii",-12.54672908782959],["ացման",-12.546772956848145],["▁application",-12.546802520751951],["uvchi",-12.546822547912598],["rete",-12.546876907348633],["enka",-12.546894073486328],["▁آئے",-12.546935081481934],["▁Arti",-12.5469388961792],["▁nép",-12.546998023986816],["елі",-12.547005653381348],["ніка",-12.54701042175293],["weza",-12.547024726867676],["▁1964",-12.547066688537598],["困难",-12.547080039978027],["▁دفع",-12.547115325927734],["tanda",-12.54711627960205],["皮膚",-12.547117233276367],["▁eleifend",-12.54714584350586],["▁gwneud",-12.54714584350586],["▁sagittis",-12.54714584350586],["▁სხვადასხვა",-12.54714584350586],["自信",-12.547165870666504],["▁huduma",-12.547179222106934],["▁Hollywood",-12.547197341918944],["▁Sosial",-12.547226905822754],["告诉",-12.547226905822754],["▁alumnos",-12.547232627868652],["▁articles",-12.547308921813965],["chung",-12.547337532043455],["▁oman",-12.547344207763672],["ђи",-12.547348976135254],["▁حم",-12.547370910644531],["我在",-12.547374725341797],["radio",-12.547386169433594],["▁tutki",-12.54738712310791],["不想",-12.547441482543944],["ضاف",-12.547497749328612],["વાની",-12.54753303527832],["▁retour",-12.54759120941162],["▁pomoč",-12.54759693145752],["ЕМ",-12.547636985778809],["рш",-12.547636985778809],["▁ዓለም",-12.547639846801758],["▁envie",-12.547663688659668],["aram",-12.547680854797363],["▁Må",-12.547687530517578],["chia",-12.547691345214844],["▁encuentra",-12.547740936279297],["ימות",-12.547791481018066],["▁Seg",-12.54780387878418],["▁الجديد",-12.547812461853027],["▁możliwości",-12.547863960266112],["▁Thiết",-12.54787254333496],["ട്ടു",-12.54787826538086],["▁pano",-12.547884941101074],["愿",-12.5479097366333],["▁Thế",-12.54793643951416],["▁Ок",-12.54794216156006],["▁שאני",-12.547954559326172],["ild",-12.54796028137207],["óirí",-12.547965049743652],["▁pied",-12.547974586486816],["พนักงาน",-12.547977447509766],["▁בג",-12.548001289367676],["ঘ",-12.548009872436523],["▁intern",-12.54802703857422],["▁oluyor",-12.548027992248535],["▁გამ",-12.548028945922852],["娘",-12.548073768615724],["лено",-12.548080444335938],["▁ខ្ញុំ",-12.548081398010254],["▁Room",-12.548088073730469],["▁MC",-12.54818630218506],["hav",-12.548190116882324],["ısı",-12.548210144042969],["▁Station",-12.548221588134766],["ตะ",-12.548227310180664],["▁dhidi",-12.548235893249512],["ИЛ",-12.548246383666992],["▁राख",-12.548270225524902],["▁dob",-12.548285484313965],["▁Před",-12.54832649230957],["bant",-12.548384666442873],["point",-12.5483980178833],["▁ვიდეო",-12.548436164855955],["▁pä",-12.548483848571776],["भूत",-12.548501014709473],["似",-12.548505783081056],["▁naših",-12.548516273498535],["خص",-12.548526763916016],["▁абсолютно",-12.548561096191406],["usan",-12.54857063293457],["▁маршрут",-12.548583030700684],["▁gehört",-12.548583984375],["dog",-12.548585891723633],["ગા",-12.54861068725586],["▁тілі",-12.54861068725586],["▁લો",-12.54862117767334],["▁жу",-12.548636436462402],["marka",-12.548656463623049],["▁loogu",-12.548657417297363],["▁thoughts",-12.54865837097168],["saf",-12.548690795898438],["▁potential",-12.548707962036133],["長期",-12.548721313476562],["▁ప్రేమ",-12.54873275756836],["▁сильно",-12.548744201660156],["▁விட",-12.548757553100586],["▁niður",-12.54876708984375],["▁görün",-12.548775672912598],["▁حسب",-12.54877758026123],["jaar",-12.548778533935549],["▁soha",-12.548837661743164],["bly",-12.548869132995604],["▁szóló",-12.548873901367188],["spek",-12.548877716064451],["eign",-12.54888153076172],["▁árið",-12.548900604248049],["ражен",-12.54890251159668],["▁កំពុង",-12.548935890197754],["▁প্রকাশ",-12.548968315124512],["▁spletne",-12.548995018005373],["▁сцена",-12.549001693725586],["▁души",-12.549027442932127],["▁hidro",-12.549036979675291],["▁sortu",-12.54905605316162],["▁συγ",-12.549092292785645],["স্থ",-12.549128532409668],["▁tagad",-12.549165725708008],["彈",-12.549190521240234],["ying",-12.5491943359375],["cznej",-12.549238204956056],["сне",-12.549248695373535],["▁letto",-12.549277305603027],["維持",-12.54928207397461],["▁joita",-12.549283027648926],["▁solar",-12.549334526062012],["と思って",-12.549339294433594],["▁fragment",-12.54935073852539],["▁slå",-12.5493745803833],["▁Стан",-12.549384117126465],["▁Esto",-12.54938793182373],["endur",-12.549388885498049],["▁राजनीतिक",-12.54948616027832],["tzi",-12.549505233764648],["▁শিক্ষা",-12.549510955810549],["▁iela",-12.549511909484863],["ानि",-12.549513816833496],["▁geografi",-12.549559593200684],["των",-12.549590110778809],["jenje",-12.549627304077148],["ٽن",-12.549635887145996],["▁тепло",-12.549641609191896],["いつも",-12.54965591430664],["▁ਰਾ",-12.549657821655272],["larining",-12.549663543701172],["ባት",-12.549713134765623],["▁paš",-12.549723625183104],["▁آدم",-12.549723625183104],["▁chan",-12.5497407913208],["感染",-12.549786567687988],["בות",-12.549796104431152],["bær",-12.54983615875244],["▁1962",-12.549842834472656],["jmë",-12.549880981445312],["▁њен",-12.549911499023438],["ತ್ರ",-12.549914360046388],["▁Gül",-12.549939155578612],["▁خپلو",-12.549943923950195],["▁ڇڏيو",-12.55001449584961],["▁Улаанбаатар",-12.550018310546877],["▁последните",-12.550019264221191],["▁vengono",-12.550042152404783],["TAN",-12.550065994262695],["▁všech",-12.55010223388672],["▁வந்து",-12.550129890441896],["▁کیا۔",-12.550137519836426],["▁lud",-12.550146102905272],["▁Marco",-12.550209045410156],["▁мире",-12.550210952758787],["▁licht",-12.55027198791504],["▁uygulama",-12.550278663635254],["▁pac",-12.550291061401367],["שק",-12.550315856933594],["▁sinu",-12.550390243530272],["უხ",-12.550395011901855],["uak",-12.550397872924805],["レー",-12.550411224365234],["▁Muh",-12.550422668457031],["▁տես",-12.550458908081056],["表现",-12.550484657287598],["這裡",-12.55051040649414],["过来",-12.550552368164062],["改變",-12.55056381225586],["ិត",-12.550567626953123],["▁миллион",-12.550603866577148],["កម្ម",-12.550633430480955],["▁aliyê",-12.550667762756348],["俗",-12.550667762756348],["▁valores",-12.55068016052246],["ແ",-12.55069065093994],["▁acción",-12.550698280334473],["▁Гол",-12.550700187683104],["▁ସୂଚନା",-12.550738334655762],["▁naprawdę",-12.550739288330078],["▁gioco",-12.55075454711914],["▁منهنجي",-12.55075454711914],["lån",-12.550843238830566],["▁avanza",-12.5508451461792],["▁બાદ",-12.550857543945312],["▁femei",-12.550865173339844],["IH",-12.550908088684082],["prast",-12.550908088684082],["▁многие",-12.550908088684082],["▁plans",-12.550910949707031],["▁realizado",-12.55091667175293],["▁$1",-12.550920486450195],["▁gid",-12.550951957702637],["ਿੰਗ",-12.550971031188965],["▁ווען",-12.550982475280762],["▁Kore",-12.550985336303713],["μένη",-12.551018714904783],["▁Học",-12.551019668579102],["▁gerçekleştir",-12.551070213317873],["▁inget",-12.551081657409668],["▁borde",-12.551090240478516],["ייט",-12.551156997680664],["ுவது",-12.55116844177246],["قه",-12.551177024841309],["▁Estas",-12.551192283630373],["▁altra",-12.551193237304688],["nker",-12.5512056350708],["▁Rem",-12.5512056350708],["▁zv",-12.551210403442385],["황",-12.551237106323242],["bour",-12.551239013671877],["▁Denn",-12.551246643066406],["▁necesidades",-12.551279067993164],["▁kabi",-12.551291465759276],["мова",-12.551299095153809],["kava",-12.551314353942873],["▁đêm",-12.551321029663086],["▁사실",-12.551356315612791],["qq",-12.55136775970459],["omat",-12.551409721374512],["οποίηση",-12.551433563232422],["全國",-12.551456451416016],["▁Wind",-12.551457405090332],["▁También",-12.551458358764648],["uki",-12.551494598388672],["▁Bonus",-12.551519393920898],["глав",-12.55154037475586],["ছি",-12.551568984985352],["▁Garden",-12.551568984985352],["飾",-12.551570892333984],["▁එන්න",-12.551600456237791],["efter",-12.55160427093506],["æk",-12.55161476135254],["▁dodat",-12.55165672302246],["brug",-12.551671028137209],["▁اپ",-12.551687240600586],["▁aap",-12.551714897155762],["päeva",-12.551740646362305],["▁музика",-12.551740646362305],["AKA",-12.551753044128418],["ֵ",-12.551772117614746],["▁Check",-12.551775932312012],["▁magam",-12.551837921142578],["▁никак",-12.551838874816896],["КИ",-12.551849365234377],["▁lähi",-12.55186367034912],["▁సె",-12.55189323425293],["doo",-12.551901817321776],["▁crianças",-12.55191135406494],["▁होइन",-12.551931381225586],["oren",-12.551950454711914],["▁lidhje",-12.551972389221191],["цеп",-12.55199146270752],["মে",-12.552003860473633],["▁Thor",-12.552038192749023],["▁ელ",-12.55205249786377],["▁quest",-12.552108764648438],["▁bók",-12.552131652832031],["▁organi",-12.552165985107422],["▁खिलाफ",-12.552178382873535],["▁ዝ",-12.552186012268066],["▁Flo",-12.552199363708496],["▁الحر",-12.552204132080078],["ästä",-12.552223205566406],["▁oplysninger",-12.55226993560791],["jeva",-12.552297592163086],["▁3.5",-12.552337646484377],["LAT",-12.552379608154297],["津",-12.552385330200195],["sila",-12.552386283874512],["▁mondta",-12.552397727966309],["留下",-12.552407264709473],["▁кез",-12.552411079406738],["困",-12.552437782287598],["▁والإ",-12.552438735961914],["▁rette",-12.552502632141112],["atok",-12.552519798278809],["▁verte",-12.55252170562744],["迫",-12.552542686462402],["眼睛",-12.552610397338867],["არე",-12.552614212036133],["▁spá",-12.552741050720217],["▁Ves",-12.55274486541748],["▁mladi",-12.55279541015625],["▁fór",-12.552820205688477],["▁susu",-12.552855491638184],["vete",-12.55288028717041],["▁accumsan",-12.552899360656738],["რუ",-12.552918434143066],["▁ভিডিও",-12.552922248840332],["▁Slovensku",-12.552935600280762],["▁yaşam",-12.552943229675291],["▁AI",-12.552967071533203],["▁डी",-12.552977561950684],["▁বাংলা",-12.552996635437012],["▁неговата",-12.553004264831545],["▁ہ",-12.553010940551758],["▁kese",-12.55303192138672],["어요",-12.55303955078125],["rde",-12.553075790405272],["▁Quan",-12.553077697753906],["페",-12.553078651428224],["▁ematen",-12.553091049194336],["▁merci",-12.553092956542969],["▁products",-12.553107261657717],["んです",-12.553129196166992],["▁tuh",-12.553187370300291],["ወን",-12.553193092346191],["ây",-12.553217887878418],["▁garantir",-12.553224563598633],["▁ເກມ",-12.553226470947266],["▁डो",-12.553230285644531],["▁cell",-12.553306579589844],["पूर्ण",-12.553312301635742],["▁κά",-12.5533447265625],["dzenie",-12.553389549255373],["▁विश्वास",-12.553397178649902],["жо",-12.553495407104492],["ирана",-12.55349826812744],["înin",-12.553542137145996],["▁finna",-12.553558349609377],["▁spis",-12.55356216430664],["ໃດ",-12.553618431091309],["▁küçük",-12.553625106811523],["▁स्ट",-12.553666114807127],["▁lazım",-12.553675651550291],["들에게",-12.55367946624756],["▁ligi",-12.553691864013672],["ရီ",-12.553712844848633],["▁aega",-12.553728103637695],["▁şo",-12.553728103637695],["▁ہوتے",-12.553750991821287],["▁rerum",-12.553756713867188],["▁IX",-12.553767204284668],["zela",-12.55378532409668],["Бо",-12.553791046142578],["▁(30",-12.553791999816896],["03.",-12.553800582885742],["θα",-12.553831100463867],["▁ڇو",-12.55385684967041],["ໃຊ້",-12.553902626037598],["▁వీ",-12.553914070129396],["ध्या",-12.553915023803713],["▁fier",-12.553916931152344],["uman",-12.55391788482666],["▁constru",-12.553937911987305],["であり",-12.553948402404783],["കു",-12.553969383239746],["▁ling",-12.553997039794922],["▁Hr",-12.554003715515137],["▁dårlig",-12.554085731506348],["▁Sok",-12.554153442382812],["▁месеци",-12.554153442382812],["▁70-",-12.55417251586914],["nok",-12.55419635772705],["▁Besar",-12.554206848144531],["ipu",-12.554241180419922],["४",-12.554315567016602],["RP",-12.554330825805664],["ונית",-12.55433464050293],["▁калды",-12.55434513092041],["▁Messenger",-12.554347038269045],["▁являются",-12.554350852966309],["▁αφού",-12.5543794631958],["▁Oyun",-12.554391860961914],["▁گردد",-12.554428100585938],["▁Haber",-12.554462432861328],["▁руки",-12.554484367370604],["verket",-12.554526329040527],["▁niha",-12.554567337036133],["ენტ",-12.55457592010498],["meyi",-12.554579734802246],["▁želi",-12.55459976196289],["тир",-12.554611206054688],["▁konce",-12.554641723632812],["nare",-12.554643630981444],["▁проекти",-12.554643630981444],["անկ",-12.55466079711914],["ज्ञ",-12.55468463897705],["သီး",-12.5546875],["▁italiano",-12.554693222045898],["▁kamay",-12.554713249206545],["lian",-12.554779052734377],["TN",-12.554780006408691],["▁sant",-12.55478572845459],["uno",-12.55478858947754],["2001",-12.554821968078612],["▁ઉપર",-12.554862022399902],["▁ఇక",-12.55487060546875],["▁Kil",-12.554872512817385],["▁Бра",-12.55488109588623],["めて",-12.554892539978027],["ხე",-12.554917335510254],["采",-12.554948806762695],["şt",-12.554960250854492],["▁Тра",-12.55496597290039],["▁План",-12.55498218536377],["있는",-12.5549955368042],["ensi",-12.555012702941896],["▁yaptı",-12.555021286010742],["▁centros",-12.555026054382324],["▁անդամ",-12.555033683776855],["स्तु",-12.555059432983398],["ৎ",-12.555065155029297],["▁امید",-12.555092811584473],["\"?",-12.555099487304688],["▁passo",-12.55510425567627],["▁médica",-12.55510711669922],["▁Бол",-12.55512523651123],["дения",-12.555129051208496],["вије",-12.555153846740724],["дох",-12.555205345153809],["▁населения",-12.55520725250244],["新しい",-12.55521297454834],["jön",-12.555228233337402],["ksen",-12.555231094360352],["▁уверен",-12.555238723754885],["▁Nic",-12.555248260498049],["▁fueron",-12.55526065826416],["▁reto",-12.555326461791992],["▁adó",-12.555331230163574],["▁وکړ",-12.555355072021484],["▁procesu",-12.555356979370115],["▁seguinte",-12.55536937713623],["▁Christus",-12.555377960205078],["▁изда",-12.55540370941162],["▁цвет",-12.555418014526367],["▁گوشی",-12.555419921875],["ණි",-12.555447578430176],["მართ",-12.555455207824709],["▁osv",-12.555580139160156],["glo",-12.555584907531738],["▁hö",-12.555587768554688],["▁მარ",-12.555599212646484],["▁చేర",-12.555633544921877],["▁chodzi",-12.555649757385254],["ాం",-12.555671691894531],["▁manipul",-12.55569839477539],["こう",-12.555715560913086],["ิง",-12.555723190307615],["契約",-12.55576992034912],["▁സ്ത്രീ",-12.555770874023438],["▁batang",-12.555777549743652],["▁Mwenyekiti",-12.55578899383545],["Kom",-12.555801391601562],["▁caption",-12.5558443069458],["یٹ",-12.55587387084961],["▁ਰ",-12.555906295776367],["ਜੀਤ",-12.55591106414795],["▁soc",-12.555913925170898],["是我",-12.555936813354492],["▁diena",-12.55600643157959],["ētā",-12.556025505065918],["SR",-12.55604362487793],["形象",-12.556059837341309],["inc",-12.55607795715332],["▁tiro",-12.556081771850586],["▁lop",-12.55612850189209],["▁oyuncu",-12.556138038635254],["▁කැ",-12.556145668029783],["kota",-12.556154251098633],["vaja",-12.556157112121582],["▁Jau",-12.556159973144531],["AO",-12.556167602539062],["▁Aquesta",-12.556180953979492],["感受",-12.556203842163086],["stry",-12.556204795837402],["▁والح",-12.556233406066896],["▁täna",-12.55624008178711],["▁роки",-12.556270599365234],["▁sayang",-12.556275367736816],["зив",-12.556306838989258],["यः",-12.556306838989258],["зда",-12.556313514709473],["▁gros",-12.55634593963623],["▁dub",-12.556355476379396],["lele",-12.556452751159668],["▁Fransa",-12.556478500366213],["▁feladat",-12.556488990783691],["▁ისინი",-12.556522369384766],["▁လက္",-12.556556701660156],["▁نصب",-12.556578636169434],["сов",-12.556581497192385],["cente",-12.55659294128418],["▁comunque",-12.55659294128418],["▁uporabi",-12.556610107421877],["ível",-12.556671142578123],["▁scho",-12.556682586669922],["یق",-12.556686401367188],["▁Rights",-12.55670166015625],["mî",-12.556724548339844],["執行",-12.556726455688477],["▁پانی",-12.556803703308104],["card",-12.556807518005373],["▁محیط",-12.556811332702637],["▁yar",-12.55683708190918],["ตลอด",-12.556861877441406],["telefon",-12.556917190551758],["▁پہنچ",-12.557028770446776],["▁user",-12.557040214538574],["cano",-12.557056427001951],["ಕು",-12.557058334350586],["▁esca",-12.557058334350586],["的故事",-12.557113647460938],["産",-12.557161331176758],["▁satış",-12.557173728942873],["▁quelque",-12.557209014892578],["gile",-12.557233810424805],["ВЕ",-12.557242393493652],["▁nhẹ",-12.557245254516602],["▁riguarda",-12.557245254516602],["firm",-12.557269096374512],["▁توپ",-12.557269096374512],["▁keinen",-12.55727767944336],["▁возможности",-12.557293891906738],["problem",-12.557330131530762],["▁свих",-12.557334899902344],["▁rå",-12.55739402770996],["▁zakup",-12.557425498962402],["的方法",-12.55742645263672],["▁زد",-12.557432174682615],["▁অব",-12.557433128356934],["▁ganze",-12.557449340820312],["çar",-12.557465553283691],["лото",-12.557482719421388],["▁kehit",-12.557497024536133],["нный",-12.557518005371094],["voc",-12.557544708251951],["ώσεις",-12.557544708251951],["▁aceea",-12.557558059692385],["▁pochi",-12.55756378173828],["WI",-12.55759048461914],["ader",-12.557592391967772],["▁Marina",-12.557605743408203],["▁blanc",-12.55761432647705],["ໃນການ",-12.557637214660645],["▁лу",-12.557637214660645],["▁μά",-12.557666778564451],["ସା",-12.557671546936035],["▁каз",-12.557717323303224],["▁jelen",-12.5577392578125],["ίδα",-12.557743072509766],["▁cab",-12.557750701904297],["▁kaina",-12.557753562927246],["patr",-12.55776023864746],["gehen",-12.557766914367676],["▁vidu",-12.557796478271484],["▁später",-12.5577974319458],["▁Federa",-12.557815551757812],["성을",-12.557821273803713],["pk",-12.557852745056152],["▁kuasa",-12.557855606079102],["▁wiha",-12.557861328125],["føre",-12.557878494262695],["ยาว",-12.557889938354492],["▁acontece",-12.557904243469238],["υσ",-12.557920455932615],["▁екен",-12.55792999267578],["▁Над",-12.557958602905272],["DF",-12.557969093322754],["▁Wikimedia",-12.55797004699707],["ktu",-12.557975769042969],["▁ହୋଇଛି",-12.557976722717283],["hlo",-12.557977676391602],["▁हुनेछ",-12.557982444763184],["▁setembre",-12.557984352111816],["▁തന്റെ",-12.558000564575195],["▁Giáo",-12.55801010131836],["▁iedereen",-12.55801773071289],["▁ഇന്ന്",-12.55805206298828],["▁Secret",-12.558101654052734],["▁समिति",-12.558109283447266],["▁구매",-12.55812168121338],["緊",-12.558156967163086],["▁kilogram",-12.558157920837402],["▁Cuando",-12.55817413330078],["▁Çe",-12.558197021484377],["END",-12.558237075805664],["▁Hoo",-12.558260917663574],["▁Ջ",-12.558262825012209],["GEN",-12.55833625793457],["kati",-12.558354377746582],["▁заг",-12.558432579040527],["ेड",-12.558477401733398],["څ",-12.558480262756348],["jedni",-12.558524131774902],["▁syö",-12.558530807495115],["▁내가",-12.558534622192385],["▁준비",-12.558538436889648],["▁rex",-12.55854320526123],["prāt",-12.558599472045898],["Op",-12.55860424041748],["ஷ்",-12.558622360229492],["忠",-12.558639526367188],["inä",-12.558735847473145],["▁subito",-12.55874729156494],["▁eigene",-12.558761596679688],["கோ",-12.558773040771484],["bhi",-12.5587739944458],["ILA",-12.558778762817385],["ičky",-12.558783531188965],["▁enerji",-12.558785438537598],["▁سایر",-12.558831214904783],["▁exclu",-12.558850288391112],["▁حالة",-12.558887481689451],["▁origine",-12.558914184570312],["▁políticas",-12.558919906616213],["lið",-12.558945655822754],["træ",-12.558979034423828],["らず",-12.558988571166992],["쪽",-12.55900764465332],["лять",-12.559022903442385],["▁зай",-12.559024810791016],["▁meinem",-12.559077262878418],["▁Pani",-12.559165954589844],["っています",-12.559184074401855],["▁اهل",-12.559211730957031],["▁Ash",-12.559263229370115],["合理",-12.559268951416016],["▁담",-12.559301376342772],["ਟਾ",-12.559391975402832],["▁آموزشی",-12.559409141540527],["ener",-12.559410095214844],["▁первый",-12.559412002563477],["▁interesant",-12.559412956237791],["▁kilometer",-12.55942440032959],["▁만들어",-12.559426307678224],["емся",-12.559459686279297],["▁दोनों",-12.559494972229004],["▁beni",-12.55950164794922],["υφ",-12.559541702270508],["电视",-12.559577941894531],["▁İnsan",-12.55965805053711],["▁känner",-12.55967617034912],["▁могла",-12.559682846069336],["▁XI",-12.559739112854004],["▁uri",-12.559755325317385],["▁مجال",-12.55978012084961],["▁Wood",-12.559826850891112],["らない",-12.559832572937012],["▁dejar",-12.559852600097656],["lendir",-12.559859275817873],["▁риск",-12.55986499786377],["То",-12.559884071350098],["ակից",-12.5599365234375],["אָל",-12.559962272644045],["▁программ",-12.55996322631836],["▁تعليم",-12.559971809387209],["žan",-12.559976577758787],["▁Dem",-12.559988021850586],["ювання",-12.559991836547852],["▁mieszka",-12.560096740722656],["▁Salon",-12.560108184814451],["ሺ",-12.560136795043944],["▁Після",-12.560138702392578],["▁मीडिया",-12.560142517089844],["▁پڑھ",-12.560155868530272],["มากมาย",-12.560172080993652],["▁Dani",-12.560208320617676],["▁мың",-12.560224533081056],["▁dose",-12.560233116149902],["arki",-12.560235977172852],["▁profond",-12.560264587402344],["▁باد",-12.56028938293457],["NL",-12.560291290283203],["▁weniger",-12.560294151306152],["оза",-12.560322761535645],["▁baadhi",-12.56032657623291],["चार",-12.560338973999023],["ಾದ",-12.560344696044922],["▁electric",-12.560534477233888],["▁aff",-12.56054973602295],["хир",-12.560566902160645],["kne",-12.560574531555176],["-2016",-12.560582160949709],["гран",-12.560583114624023],["▁ខ្លួន",-12.560587882995604],["lerimiz",-12.56061553955078],["read",-12.560641288757324],["▁၄",-12.560653686523438],["vene",-12.560670852661133],["▁hinter",-12.560779571533203],["▁confort",-12.56080436706543],["▁Sü",-12.560812950134276],["onis",-12.56082248687744],["▁participación",-12.560846328735352],["▁дүйнө",-12.56086254119873],["▁କାର୍ଯ୍ୟ",-12.56086540222168],["▁foron",-12.560873031616213],["▁발생",-12.560891151428224],["▁Ayrıca",-12.560893058776855],["дол",-12.560900688171388],["СТ",-12.560903549194336],["preis",-12.560979843139648],["▁concurs",-12.561017036437988],["ceni",-12.561031341552734],["心里",-12.561073303222656],["▁tenía",-12.561138153076172],["▁ziua",-12.561161994934082],["▁util",-12.561223030090332],["▁теңге",-12.561271667480469],["▁хр",-12.561283111572266],["чет",-12.56129264831543],["▁أبو",-12.56130313873291],["▁ทั้ง",-12.561326026916504],["▁proiect",-12.561347007751465],["▁ਦ",-12.56135082244873],["▁балдар",-12.561352729797363],["ਠ",-12.561402320861816],["▁Оп",-12.561406135559082],["▁fand",-12.561407089233398],["▁koa",-12.561412811279297],["消費者",-12.561418533325195],["的东西",-12.561436653137209],["略",-12.561441421508787],["▁Мес",-12.561448097229004],["არს",-12.56147003173828],["广泛",-12.561530113220217],["陣",-12.56155490875244],["▁ibilbide",-12.561555862426758],["▁mềm",-12.56159210205078],["ธรรมชาติ",-12.561595916748049],["▁sahifa",-12.561605453491213],["ଣା",-12.561607360839844],["▁Matt",-12.561620712280272],["▁урок",-12.56162166595459],["liya",-12.56162929534912],["▁లేదా",-12.561643600463867],["▁Rau",-12.5616455078125],["的原因",-12.561667442321776],["▁skupaj",-12.561702728271484],["țele",-12.561786651611328],["▁Sports",-12.561797142028809],["mynd",-12.561800956726074],["ຫມ",-12.561829566955566],["წი",-12.561891555786133],["▁zaj",-12.561891555786133],["脱",-12.561920166015623],["▁Alter",-12.561996459960938],["▁kord",-12.562004089355469],["ੱਟ",-12.562016487121582],["App",-12.562049865722656],["▁χρ",-12.562061309814451],["tual",-12.562076568603516],["▁mű",-12.56208324432373],["▁Ihrem",-12.56208610534668],["▁keq",-12.56208610534668],["▁дух",-12.56209945678711],["ismus",-12.562108993530272],["げ",-12.56210994720459],["داری",-12.56217098236084],["▁cama",-12.562172889709473],["主題",-12.562211036682127],["탄",-12.562240600585938],["▁bril",-12.562304496765137],["ใต้",-12.56230640411377],["▁Cine",-12.562307357788086],["▁Malayalam",-12.562325477600098],["▁różnych",-12.562337875366213],["辛",-12.562338829040527],["▁XII",-12.562339782714844],["稳",-12.562345504760742],["dna",-12.562368392944336],["仕",-12.562376022338867],["vī",-12.56240463256836],["▁৯",-12.56240463256836],["妹",-12.56240940093994],["ຮ",-12.562481880187988],["▁comuni",-12.56248664855957],["yda",-12.562506675720217],["ယ်",-12.562506675720217],["sian",-12.562524795532228],["▁common",-12.562536239624023],["▁coraz",-12.5625638961792],["▁ohi",-12.562602043151855],["▁рет",-12.562621116638184],["मेल",-12.562623023986816],["▁Cons",-12.562651634216309],["niž",-12.56266975402832],["ଣି",-12.562682151794434],["▁работать",-12.56277561187744],["▁sara",-12.562801361083984],["▁permis",-12.562936782836914],["▁თავი",-12.562941551208496],["תר",-12.562968254089355],["ذكر",-12.562973976135254],["▁comun",-12.563023567199709],["▁posterior",-12.56302547454834],["sjons",-12.563028335571287],["▁Pēc",-12.563048362731934],["▁giấy",-12.563048362731934],["مد",-12.5630521774292],["▁فارس",-12.563053131103516],["▁dignissim",-12.563055038452148],["شير",-12.56305694580078],["▁csoport",-12.56308937072754],["▁чини",-12.563092231750488],["▁poezi",-12.56309986114502],["▁hiçbir",-12.56313705444336],["▁feit",-12.56314182281494],["▁होत",-12.563146591186523],["тина",-12.563153266906738],["▁اختيار",-12.563178062438965],["શો",-12.56321907043457],["ием",-12.563228607177734],["▁않",-12.563260078430176],["▁ไป",-12.5632905960083],["▁будуть",-12.56338119506836],["▁profesi",-12.563494682312012],["▁मार्ग",-12.56352710723877],["כר",-12.563535690307615],["▁پيدا",-12.56355094909668],["▁пута",-12.563575744628906],["▁kring",-12.563576698303224],["▁країн",-12.563604354858398],["▁цих",-12.563628196716309],["▁jezik",-12.563629150390623],["ического",-12.563651084899902],["ましょう",-12.563663482666016],["▁ओर",-12.563764572143556],["čně",-12.563809394836426],["ღა",-12.563814163208008],["▁sali",-12.56381893157959],["ിട്ടില്ല",-12.563833236694336],["▁dó",-12.563834190368652],["▁gesch",-12.5638427734375],["▁usando",-12.563851356506348],["ဆိုတာ",-12.563859939575195],["ότητας",-12.563884735107422],["▁নয়",-12.563895225524902],["шев",-12.563993453979492],["ндар",-12.56400203704834],["▁ჯერ",-12.56401824951172],["jf",-12.564019203186035],["▁Vietnam",-12.564027786254885],["ႏ",-12.564064979553224],["▁melhores",-12.564090728759766],["▁فعل",-12.56413745880127],["▁puh",-12.564197540283203],["ацыя",-12.56423282623291],["▁врз",-12.56424045562744],["▁እኔ",-12.56424617767334],["▁qytet",-12.564269065856934],["▁stiu",-12.564291000366213],["▁27-",-12.56429386138916],["▁categorie",-12.564306259155272],["etud",-12.564361572265623],["send",-12.56436824798584],["▁Ling",-12.564438819885254],["▁Palm",-12.56447982788086],["▁berita",-12.56447982788086],["▁måned",-12.564486503601074],["வர",-12.564498901367188],["▁చంద్రబాబు",-12.56450653076172],["▁ситуация",-12.564508438110352],["זע",-12.564512252807615],["▁بینی",-12.564512252807615],["jed",-12.56452751159668],["ປະເທດ",-12.564538955688477],["▁Maga",-12.56454086303711],["▁ښار",-12.564541816711426],["▁сигурност",-12.564550399780272],["прави",-12.564555168151855],["▁szybko",-12.564567565917969],["▁pretože",-12.564592361450195],["LG",-12.564655303955078],["▁gjë",-12.564668655395508],["▁gwa",-12.564689636230469],["▁dostęp",-12.564720153808594],["स्व",-12.564749717712402],["▁Stunden",-12.564813613891602],["▁uk",-12.564886093139648],["▁برد",-12.56495189666748],["▁கரு",-12.565008163452148],["awi",-12.565017700195312],["لك",-12.565081596374512],["▁tengok",-12.56509780883789],["▁voisi",-12.565114974975586],["▁ابتدا",-12.56512451171875],["▁rodzin",-12.565146446228027],["▁sø",-12.565155029296877],["▁պաշտոն",-12.565184593200684],["ുടെ",-12.565186500549316],["▁teď",-12.565216064453123],["▁također",-12.56523609161377],["▁jornada",-12.565237045288086],["нія",-12.5652494430542],["▁laporan",-12.565267562866213],["ยู",-12.565268516540527],["▁administr",-12.565326690673828],["▁talde",-12.565346717834473],["▁actor",-12.565374374389648],["imen",-12.565375328063965],["▁представлен",-12.565404891967772],["▁теж",-12.565407752990724],["▁ministri",-12.565447807312012],["richt",-12.56546401977539],["▁lavora",-12.56548023223877],["▁форми",-12.56550121307373],["▁kupi",-12.565509796142578],["▁ਜੇ",-12.56551456451416],["▁цэ",-12.565522193908691],["קע",-12.565532684326172],["тара",-12.565553665161133],["čnih",-12.56562328338623],["▁आदि",-12.565651893615724],["▁Probleme",-12.565655708312988],["സ്റ്റ",-12.565661430358888],["▁niekto",-12.565731048583984],["▁სე",-12.56576919555664],["profit",-12.565825462341309],["duğu",-12.565844535827637],["akt",-12.565876960754396],["宫",-12.565917015075684],["czyć",-12.565932273864746],["漢",-12.565956115722656],["▁زر",-12.565959930419922],["▁apresenta",-12.565964698791504],["▁vestibulum",-12.565966606140137],["▁როდესაც",-12.565966606140137],["▁άλλη",-12.565977096557615],["주의",-12.565985679626465],["пом",-12.565990447998049],["▁közül",-12.566035270690918],["▁تجارت",-12.566082000732422],["ליי",-12.566086769104004],["ประจํา",-12.566089630126951],["讓人",-12.566146850585938],["ellen",-12.566155433654783],["duc",-12.566164016723633],["▁technik",-12.566168785095217],["▁mb",-12.566211700439451],["▁reng",-12.566248893737791],["▁اسلامي",-12.566265106201172],["▁acerca",-12.566290855407717],["▁곳",-12.566365242004396],["▁luke",-12.566398620605469],["▁hapo",-12.566433906555176],["▁pregunta",-12.566457748413086],["enje",-12.566502571105955],["▁rink",-12.566508293151855],["self",-12.566509246826172],["▁ሕ",-12.566574096679688],["▁መንግሥት",-12.566636085510254],["នា",-12.566650390625],["cej",-12.56666088104248],["▁עי",-12.56667137145996],["▁hicho",-12.566675186157228],["明显",-12.566683769226074],["الله",-12.566690444946287],["▁території",-12.56669807434082],["▁რომლებიც",-12.56669807434082],["▁voksne",-12.566699981689451],["▁Hard",-12.56670093536377],["▁goeie",-12.566731452941896],["ziale",-12.56677532196045],["▁ჩვენს",-12.566800117492676],["▁early",-12.566816329956056],["▁mājas",-12.566821098327637],["▁présent",-12.566858291625977],["▁पता",-12.566877365112305],["igita",-12.566946029663086],["▁Tempat",-12.56694793701172],["▁babes",-12.56696891784668],["gini",-12.566969871520996],["▁nava",-12.56700038909912],["ыі",-12.567002296447754],["▁Əliyevin",-12.56702995300293],["▁Hir",-12.567056655883787],["▁drev",-12.567070960998535],["ēšana",-12.567093849182127],["ljub",-12.567106246948242],["擊",-12.567130088806152],["▁magnet",-12.567143440246582],["▁فشار",-12.567145347595217],["ondo",-12.56715202331543],["▁ван",-12.56715488433838],["▁функц",-12.567194938659668],["内部",-12.567209243774414],["ഞ്ച",-12.567217826843262],["▁Wiki",-12.567228317260742],["▁جل",-12.567241668701172],["▁laste",-12.567243576049805],["нце",-12.567305564880373],["▁അതു",-12.567306518554688],["▁måte",-12.567309379577637],["▁Dj",-12.567317962646484],["180",-12.567319869995115],["▁aparece",-12.567334175109863],["不再",-12.567338943481444],["owski",-12.567352294921877],["▁thiên",-12.567422866821287],["▁fakta",-12.567423820495604],["츠",-12.567428588867188],["▁කෙනෙක්",-12.567429542541504],["▁שוין",-12.567447662353516],["▁cilvēku",-12.56745147705078],["▁serait",-12.56745433807373],["▁vodo",-12.567456245422363],["針對",-12.567513465881348],["னம்",-12.567530632019045],["▁ទេ",-12.56761074066162],["▁njegove",-12.567615509033203],["▁Asa",-12.567617416381836],["mpar",-12.567639350891112],["▁Երեւանի",-12.567644119262695],["▁semmi",-12.567660331726074],["▁blivit",-12.5676851272583],["دۇ",-12.56772518157959],["▁Mé",-12.56772804260254],["▁حجم",-12.567749977111816],["området",-12.567784309387209],["Ac",-12.567787170410156],["▁gyerek",-12.567832946777344],["▁subven",-12.567853927612305],["码",-12.567863464355469],["▁የማይ",-12.56790542602539],["▁പരി",-12.567922592163086],["ML",-12.56799602508545],["▁successo",-12.568002700805664],["▁лесно",-12.568012237548828],["tið",-12.568028450012209],["▁Republik",-12.56808090209961],["▁dirinya",-12.56808376312256],["▁posso",-12.568086624145508],["DB",-12.568097114562988],["йшла",-12.568138122558594],["เตรียม",-12.568160057067873],["▁Лондон",-12.568161010742188],["▁మూవీ",-12.568163871765137],["に関する",-12.56816577911377],["▁thiện",-12.568169593811035],["ủ",-12.568172454833984],["▁rəsmi",-12.568194389343262],["apan",-12.568225860595703],["fast",-12.568306922912598],["tön",-12.568378448486328],["▁ዶ",-12.568436622619627],["▁নির্বাচন",-12.568438529968262],["▁एम",-12.56847095489502],["גול",-12.568480491638184],["▁hadisə",-12.568503379821776],["▁vehicula",-12.56850814819336],["▁رود",-12.568533897399902],["▁bırak",-12.568543434143066],["▁ruimte",-12.568553924560549],["▁האב",-12.568617820739746],["▁Cuba",-12.568633079528809],["ىدە",-12.568660736083984],["乘",-12.568663597106934],["▁nudi",-12.568683624267578],["รู",-12.56869411468506],["૦",-12.568696022033691],["▁musica",-12.568717956542969],["▁తప్ప",-12.568767547607422],["▁අනු",-12.56886100769043],["▁რამდენიმე",-12.568907737731934],["▁4-5",-12.568910598754885],["▁salle",-12.568913459777832],["▁typer",-12.568942070007324],["▁realtà",-12.56898593902588],["mato",-12.569003105163574],["▁genel",-12.569045066833496],["▁situat",-12.569096565246582],["▁висок",-12.569100379943848],["တု",-12.569103240966797],["язку",-12.569220542907717],["▁leben",-12.569225311279297],["▁имају",-12.569262504577637],["▁ஹ",-12.569286346435549],["▁zdaj",-12.569324493408203],["▁estrutura",-12.56944465637207],["masını",-12.569507598876951],["йна",-12.569514274597168],["▁جاء",-12.569523811340332],["▁дзень",-12.56955623626709],["▁novih",-12.56961441040039],["▁неколико",-12.569628715515137],["開催",-12.569656372070312],["ünün",-12.569665908813477],["ткі",-12.569726943969728],["▁convoca",-12.569740295410156],["յում",-12.569743156433104],["▁الحل",-12.569781303405762],["▁öt",-12.569794654846191],["▁بتایا",-12.569804191589355],["▁ezi",-12.569814682006836],["▁వచ్చే",-12.569816589355469],["jena",-12.569819450378418],["▁Help",-12.56987762451172],["▁aż",-12.56990909576416],["glu",-12.569927215576172],["рез",-12.56997299194336],["▁rever",-12.570013046264648],["▁ផ្",-12.570013999938965],["vezető",-12.570068359375],["cznym",-12.570123672485352],["▁ଗୋ",-12.570131301879885],["▁tulisan",-12.570155143737791],["▁ମେ",-12.570173263549805],["▁bete",-12.570210456848145],["▁hjelp",-12.570212364196776],["▁въ",-12.570213317871094],["代理",-12.570222854614258],["ที่เป็น",-12.57023811340332],["ዳን",-12.570239067077637],["▁watch",-12.570239067077637],["▁huku",-12.57030200958252],["▁Тому",-12.570303916931152],["舊",-12.570317268371582],["пры",-12.57033348083496],["ShareAlike",-12.570361137390137],["▁tuần",-12.570361137390137],["▁μπορούν",-12.570361137390137],["▁thuế",-12.570375442504885],["▁enkelte",-12.570393562316896],["こんな",-12.570393562316896],["▁almost",-12.570396423339844],["[2]",-12.57041072845459],["pf",-12.570414543151855],["ğı",-12.570426940917969],["▁concurso",-12.570456504821776],["▁ទៀត",-12.57047176361084],["▁velika",-12.57048511505127],["▁tash",-12.570489883422852],["▁confirm",-12.570490837097168],["つけ",-12.570494651794434],["Ak",-12.570566177368164],["ovati",-12.570599555969238],["出去",-12.5706148147583],["▁pedido",-12.57068157196045],["▁బి",-12.570682525634766],["▁silah",-12.570741653442385],["anê",-12.57076930999756],["▁đóng",-12.57079792022705],["▁прочита",-12.570801734924316],["▁Dios",-12.57085132598877],["▁gl",-12.570852279663086],["dő",-12.570914268493652],["нах",-12.570930480957031],["bija",-12.570944786071776],["提供了",-12.571006774902344],["▁pick",-12.571037292480469],["论坛",-12.571052551269531],["仙",-12.571063995361328],["▁kaluar",-12.57107925415039],["▁settimana",-12.571096420288086],["▁hores",-12.571100234985352],["▁teho",-12.571101188659668],["▁istinadən",-12.571107864379885],["▁आइ",-12.571159362792969],["tyd",-12.571165084838867],["▁मग",-12.5712251663208],["▁Bush",-12.571236610412598],["▁frisk",-12.571266174316406],["कू",-12.571270942687988],["▁lyg",-12.571332931518556],["▁самым",-12.571412086486816],["χε",-12.57143497467041],["ъз",-12.571450233459473],["▁kera",-12.571499824523926],["▁masz",-12.571539878845217],["▁قىلغان",-12.571568489074709],["ディ",-12.571606636047363],["▁atbild",-12.57168197631836],["▁ტ",-12.571682929992676],["vää",-12.571684837341309],["▁PSD",-12.571694374084473],["▁innhold",-12.571701049804688],["ænd",-12.571714401245115],["hua",-12.571732521057127],["čkom",-12.57176685333252],["ਸ਼ਨ",-12.571784973144531],["亡",-12.57178783416748],["▁ауыл",-12.571791648864746],["瓜",-12.571818351745604],["бег",-12.571826934814451],["▁საუკეთესო",-12.571829795837402],["映画",-12.57183074951172],["▁encontra",-12.571881294250488],[".11.20",-12.571882247924805],["▁ekonomik",-12.571894645690918],["▁حالات",-12.571908950805664],["▁سائنس",-12.571928024291992],["症状",-12.57194709777832],["▁artista",-12.571991920471191],["▁politici",-12.571992874145508],["▁शा",-12.572023391723633],["直到",-12.572023391723633],["▁reda",-12.572051048278809],["▁ndje",-12.572071075439451],["рата",-12.572080612182615],["▁större",-12.572099685668944],["▁gá",-12.572129249572754],["▁படம்",-12.57213020324707],["φων",-12.572197914123535],["▁meira",-12.5722074508667],["▁برا",-12.572230339050291],["▁časti",-12.572242736816406],["оль",-12.572284698486328],["ngan",-12.572327613830566],["ூ",-12.572340965270996],["▁loka",-12.572346687316896],["kaup",-12.572352409362791],["server",-12.572381019592283],["▁Rand",-12.57248306274414],["▁jazz",-12.572487831115724],["tulo",-12.57250690460205],["損",-12.572508811950684],["వ్",-12.572517395019531],["ключа",-12.572553634643556],["▁있어",-12.572553634643556],["▁compañía",-12.572565078735352],["▁müvafiq",-12.572565078735352],["▁sollicitudin",-12.572566032409668],["▁découvrir",-12.572566986083984],["▁وع",-12.572579383850098],["▁bist",-12.572598457336426],["цкі",-12.572626113891602],["ៃ",-12.572641372680664],["stes",-12.57272243499756],["dex",-12.572723388671877],["▁disfrutar",-12.572750091552734],["▁अलग",-12.572778701782228],["▁niego",-12.572792053222656],["▁Ши",-12.572799682617188],["▁údaje",-12.57280445098877],["▁watoto",-12.572811126708984],["▁migra",-12.57281494140625],["дзей",-12.572848320007324],["▁Rin",-12.572870254516602],["▁dagens",-12.572920799255373],["ndro",-12.572925567626951],["▁سچ",-12.572936058044434],["ბო",-12.572969436645508],["зван",-12.572973251342772],["ior",-12.572985649108888],["Ре",-12.572988510131836],["▁ກັນ",-12.573046684265137],["kový",-12.57312297821045],["τές",-12.573149681091309],["▁viti",-12.57321834564209],["▁৮",-12.573248863220217],["顶",-12.573250770568848],["ライ",-12.573282241821287],["▁സംസ്ഥാന",-12.573290824890137],["▁Pendidikan",-12.573301315307615],["▁πλέον",-12.573301315307615],["ቸውን",-12.573302268981934],["轻",-12.57330322265625],["▁bantuan",-12.57334041595459],["▁никто",-12.57334041595459],["▁مركز",-12.57338047027588],["૯",-12.573381423950195],["▁trochę",-12.573420524597168],["چی",-12.573423385620115],["▁کلیک",-12.573430061340332],["▁(25",-12.573455810546877],["▁antal",-12.573471069335938],["bati",-12.57347297668457],["ເມືອງ",-12.573553085327148],["▁SAM",-12.573569297790527],["▁maana",-12.573569297790527],["▁Lov",-12.57359504699707],["▁پک",-12.57359504699707],["▁kyk",-12.57362937927246],["յուր",-12.57363986968994],["Θ",-12.573684692382812],["▁Latvija",-12.57370662689209],["ينا",-12.573766708374023],["ಸ್ಟ್",-12.573769569396973],["KB",-12.573798179626465],["▁Çok",-12.573806762695312],["▁møde",-12.57383155822754],["▁movimento",-12.57383632659912],["▁technolog",-12.57383918762207],["▁பார்க்க",-12.573866844177246],["щен",-12.573867797851562],["▁kök",-12.573899269104004],["▁որի",-12.573899269104004],["▁HS",-12.573933601379396],["scar",-12.574007987976074],["▁جم",-12.574024200439451],["▁Liverpool",-12.574037551879885],["돼",-12.574037551879885],["▁όλες",-12.57404327392578],["▁listopada",-12.574044227600098],["▁ಪುಸ್ತಕ",-12.574045181274414],["▁Praesent",-12.574055671691896],["ទេ",-12.574076652526855],["▁المنطقة",-12.574092864990234],["▁ສາ",-12.5740966796875],["дали",-12.574156761169434],["▁Кыргызстан",-12.57419204711914],["ranta",-12.574231147766112],["▁据",-12.57423210144043],["▁treatment",-12.574234008789062],["▁акча",-12.57425594329834],["▁резултати",-12.574256896972656],["stęp",-12.57427978515625],["▁weyn",-12.574289321899414],["▁אויב",-12.57429313659668],["▁олардың",-12.574294090270996],["▁ಮೆ",-12.57430362701416],["▁बनाउन",-12.574304580688477],["▁stori",-12.574312210083008],["購",-12.574316024780272],["▁ಉಪ",-12.574334144592283],["▁resist",-12.574356079101562],["」(",-12.574400901794434],["▁موارد",-12.57448959350586],["▁Россия",-12.57450008392334],["▁ក្រោយ",-12.574511528015137],["กิ",-12.57454776763916],["▁menang",-12.574552536010742],["▁informace",-12.57459545135498],["▁Print",-12.574620246887209],["▁cai",-12.57462215423584],["▁април",-12.574657440185549],["▁mpya",-12.574685096740724],["▁ਲਾ",-12.574692726135254],["▁ایمیل",-12.574694633483888],["▁Kum",-12.574695587158203],["zom",-12.574711799621582],["訓練",-12.57473373413086],["▁ասաց",-12.57474136352539],["▁semanas",-12.574742317199709],["協助",-12.574766159057615],["▁шийдвэр",-12.574769973754885],["bą",-12.574772834777832],["▁происходит",-12.57477569580078],["▁कोण",-12.574784278869627],["▁cantik",-12.574894905090332],["▁maxim",-12.574914932250977],["▁ofereix",-12.57491970062256],["рыя",-12.574936866760254],["ಕರ್",-12.57497215270996],["▁приз",-12.574984550476074],["▁شیر",-12.575032234191896],["кур",-12.575058937072754],["▁operasyon",-12.575060844421388],["▁пал",-12.575080871582031],["▁պար",-12.575164794921877],["数量",-12.575215339660645],["▁ბ",-12.575247764587402],["▁ਪਾ",-12.575287818908691],["▁பே",-12.575292587280272],["▁అమ్మ",-12.575362205505373],["▁ért",-12.575372695922852],["лім",-12.575459480285645],["▁கொள்ள",-12.5755033493042],["▁ಅಧ್ಯಕ್ಷ",-12.575511932373049],["▁ලොකු",-12.575511932373049],["▁ಬೆಲೆ",-12.575523376464844],["▁सामने",-12.575555801391602],["έν",-12.575601577758787],["▁reise",-12.575615882873535],["▁నిర్",-12.575676918029783],["▁lata",-12.575698852539062],["sec",-12.575701713562012],["liter",-12.57573127746582],["versi",-12.575742721557615],["▁Kok",-12.575749397277832],["▁илүү",-12.57576847076416],["▁ਗੁਰ",-12.57578468322754],["▁Teatr",-12.575828552246094],["eros",-12.575862884521484],["▁dhex",-12.575862884521484],["▁mpi",-12.575872421264648],["▁koop",-12.575879096984863],["▁1080",-12.575895309448242],["▁problemer",-12.575900077819824],["ética",-12.575901985168455],["▁próxima",-12.575916290283203],["▁बल",-12.575916290283203],["▁aire",-12.575934410095217],["യര്",-12.575945854187012],["▁планира",-12.57595157623291],["▁сказала",-12.575997352600098],["бата",-12.576000213623049],["▁අපට",-12.576007843017578],["▁skrevet",-12.57601261138916],["ก็ได้",-12.576019287109377],["▁rä",-12.576021194458008],["▁Namen",-12.576057434082031],["ಕ್ಕೂ",-12.576059341430664],["▁producten",-12.57606315612793],["tré",-12.576080322265623],["▁بشر",-12.576094627380373],["ಮೆ",-12.57611846923828],["▁داخلی",-12.576130867004396],["▁వచ్చిన",-12.576131820678713],["▁Erfolg",-12.576147079467772],["▁փորձ",-12.576159477233888],["かもしれません",-12.576168060302734],["▁KR",-12.576170921325684],["చు",-12.576183319091797],["▁alcune",-12.57618522644043],["▁soi",-12.576186180114746],["特朗普",-12.576186180114746],["τικών",-12.576190948486328],["άς",-12.57619857788086],["tojen",-12.576201438903809],["▁मूल्य",-12.576214790344238],["▁صد",-12.576253890991213],["قە",-12.57625675201416],["▁Thị",-12.576258659362791],["šča",-12.576272010803224],["곳",-12.576273918151855],["МУ",-12.576278686523438],["▁stabili",-12.57629108428955],["tning",-12.57634162902832],["▁parametr",-12.576353073120115],["▁gange",-12.576363563537598],["▁berse",-12.576430320739746],["▁vän",-12.576448440551758],["hús",-12.576502799987791],["γο",-12.576515197753906],["▁taxi",-12.576516151428224],["▁ίδια",-12.57655143737793],["ország",-12.576556205749512],["▁கட",-12.57662582397461],["▁oikein",-12.576637268066406],["▁locali",-12.576642036437988],["zwe",-12.576659202575684],["▁جد",-12.576715469360352],["▁introduc",-12.576726913452148],["▁فرض",-12.576790809631348],["▁შემთხვევაში",-12.576804161071776],["ĉa",-12.576811790466309],["кү",-12.576823234558104],["▁Jose",-12.576851844787598],["▁zodat",-12.576863288879396],["▁hazırla",-12.576906204223633],["ລິ",-12.576910018920898],["liber",-12.576930046081545],["▁پيو",-12.576943397521973],["▁Сред",-12.576956748962402],["04.",-12.576976776123049],["▁اصلاح",-12.577001571655272],["▁aliquet",-12.5770902633667],["▁Zato",-12.577118873596191],["ขาว",-12.577178955078123],["訊",-12.57717990875244],["úc",-12.577219009399414],["▁titre",-12.577289581298828],["cce",-12.577316284179688],["▁груп",-12.577327728271484],["material",-12.577345848083496],["න්ගේ",-12.577378273010254],["▁joku",-12.577385902404783],["▁kuinka",-12.577430725097656],["▁систему",-12.577439308166504],["පි",-12.57748031616211],["tű",-12.577488899230955],["▁ਸਭ",-12.57750415802002],["▁മാറ്റ",-12.5775146484375],["▁इं",-12.577543258666992],["▁वाच",-12.577567100524902],["▁California",-12.577577590942385],["▁vole",-12.577597618103027],["shte",-12.577617645263672],["▁مشروع",-12.577624320983888],["ثر",-12.577627182006836],["▁ბე",-12.57763671875],["▁lå",-12.577669143676758],["俄",-12.577677726745604],["▁እንዴት",-12.577682495117188],["για",-12.577767372131348],["ત્",-12.577776908874512],["▁adapt",-12.577776908874512],["▁өл",-12.577777862548828],["▁миң",-12.57781219482422],["▁ଶ୍ରୀ",-12.5778226852417],["▁ولي",-12.577844619750977],["▁oare",-12.577900886535645],["▁Bà",-12.577921867370604],["▁مور",-12.577924728393556],["læ",-12.577926635742188],["क्रम",-12.57794189453125],["ಿಯ",-12.577957153320312],["▁Gir",-12.578001976013184],["▁maig",-12.578020095825195],["▁Anche",-12.578022003173828],["▁vòng",-12.578025817871094],["▁Asta",-12.578032493591309],["▁duda",-12.578036308288574],["skor",-12.578167915344238],["▁മന്ത്രി",-12.578169822692873],["▁deyə",-12.57819938659668],["▁зап",-12.578211784362791],["▁වෙයි",-12.578213691711426],["▁بلد",-12.578252792358398],["જો",-12.578269004821776],["БУ",-12.578288078308104],["▁operator",-12.57829475402832],["лиш",-12.57829761505127],["เข้าไป",-12.57833194732666],["hdy",-12.578347206115724],["▁склада",-12.578431129455566],["▁Waar",-12.57844352722168],["▁Рэспублікі",-12.57846736907959],["▁ಸಚಿವ",-12.578468322753906],["▁објави",-12.578471183776855],["▁сябе",-12.57847499847412],["დეთ",-12.578486442565918],["▁mulle",-12.578505516052246],["▁sentit",-12.578513145446776],["▁sund",-12.57854175567627],["ታዊ",-12.578546524047852],["▁sian",-12.57859992980957],["▁šiuo",-12.57859992980957],["▁البحث",-12.578619003295898],["йтесь",-12.578726768493652],["▁veiks",-12.57874584197998],["▁Свети",-12.578747749328612],["▁gjelder",-12.578755378723145],["▁80-",-12.578774452209473],["▁ואת",-12.578777313232422],["▁Ана",-12.578807830810549],["▁taken",-12.57883071899414],["▁особи",-12.578843116760254],["krzy",-12.578944206237791],["čiti",-12.57897663116455],["oties",-12.579054832458496],["SKI",-12.57906436920166],["bahan",-12.57906436920166],["вна",-12.57906723022461],["јата",-12.579079627990724],["▁Christi",-12.579119682312012],["▁fana",-12.579141616821287],["ONA",-12.579179763793944],["leyen",-12.579181671142578],["quí",-12.579191207885742],["спор",-12.579204559326172],["▁dấu",-12.57920742034912],["▁hendrerit",-12.57920742034912],["▁پیشنهاد",-12.57920742034912],["▁titular",-12.57926082611084],["risti",-12.579265594482422],["▁الشيخ",-12.57934284210205],["一场",-12.579345703125],["▁төлөө",-12.579352378845217],["▁Sind",-12.57939624786377],["▁უფლება",-12.579404830932615],["▁leuke",-12.579411506652832],["ੰਡ",-12.57941436767578],["▁ຢ່າງ",-12.57946491241455],["ಶ್",-12.579479217529297],["uge",-12.579503059387209],["qin",-12.579504013061523],["trac",-12.579612731933594],["▁කි",-12.579615592956545],["▁връзка",-12.579634666442873],["もあります",-12.579663276672363],["λια",-12.579669952392578],["্র",-12.579696655273438],["▁بہتر",-12.579774856567385],["刷",-12.57978057861328],["他人",-12.579812049865724],["liza",-12.579845428466797],["ชั่น",-12.57986545562744],["▁adına",-12.579870223999023],["▁เปิด",-12.579872131347656],["▁chcete",-12.579885482788086],["▁hvem",-12.579920768737791],["價值",-12.579943656921388],["gården",-12.579948425292969],["▁додека",-12.579948425292969],["▁මුදල්",-12.5799560546875],["▁12:00",-12.579967498779297],["▁pasangan",-12.579985618591309],["▁министр",-12.580002784729004],["▁aplikasi",-12.58000946044922],["▁név",-12.5800142288208],["▁prób",-12.580080032348633],["▁Cách",-12.580096244812012],["▁различ",-12.580102920532228],["βολ",-12.580122947692873],["▁vilka",-12.580127716064451],["zobraz",-12.58013916015625],["操",-12.580151557922363],["brani",-12.580156326293944],["▁kare",-12.580158233642578],["▁चित्र",-12.580194473266602],["▁guna",-12.580238342285156],["▁Arena",-12.58024787902832],["▁Шар",-12.580252647399902],["▁פרי",-12.580259323120115],["▁വൈ",-12.5802640914917],["زاد",-12.580284118652344],["▁məsələ",-12.580294609069824],["▁modell",-12.580350875854492],["ાન",-12.580371856689451],["▁മറ",-12.58037281036377],["▁кола",-12.580392837524414],["▁Kab",-12.580418586730955],["▁Koko",-12.580451011657717],["▁Shqipëri",-12.580453872680664],["▁Ung",-12.580474853515623],["აქ",-12.580519676208496],["▁=)",-12.58057689666748],["▁μιας",-12.580592155456545],["▁db",-12.580621719360352],["gruppen",-12.58064079284668],["rare",-12.580670356750488],["▁viongozi",-12.580689430236816],["▁فوتبال",-12.580693244934082],["βε",-12.580694198608398],["保存",-12.580744743347168],["НС",-12.5807466506958],["明白",-12.580764770507812],["▁سولې",-12.580782890319824],["▁ਲੋਕ",-12.58079433441162],["▁அவ",-12.580812454223633],["▁Русия",-12.580849647521973],["あまり",-12.580873489379885],["▁Тэр",-12.580878257751465],["न्ति",-12.58087921142578],["kset",-12.580896377563477],["看了",-12.580957412719728],["▁section",-12.581101417541504],["▁lartë",-12.581120491027832],["3/",-12.581132888793944],["née",-12.581135749816896],["uyor",-12.581165313720703],["▁своим",-12.58122730255127],["▁හැ",-12.58130931854248],["▁אלה",-12.581326484680176],["▁Trin",-12.581355094909668],["▁هېواد",-12.581409454345703],["▁campaña",-12.581436157226562],["▁maintenant",-12.581440925598145],["▁rápido",-12.581440925598145],["▁facilisis",-12.581448554992676],["▁Lady",-12.581450462341309],["र्म",-12.581473350524902],["▁dekor",-12.58147430419922],["▁neiz",-12.581483840942385],["allah",-12.581491470336914],["▁grada",-12.581523895263672],["āts",-12.581552505493164],["løb",-12.58155632019043],["▁వల్ల",-12.5816011428833],["▁отырып",-12.581616401672363],["▁vě",-12.581624984741213],["▁câ",-12.581640243530272],["trof",-12.58164119720459],["▁dada",-12.581659317016602],["▁NASA",-12.581663131713867],["▁Isto",-12.58168125152588],["▁hír",-12.58168125152588],["▁świat",-12.581759452819824],["ارات",-12.58178997039795],["ിംഗ്",-12.58184051513672],["▁tanta",-12.58184814453125],["▁мнение",-12.581862449645996],["βλ",-12.581873893737791],["אַט",-12.581911087036133],["▁роботу",-12.58191204071045],["organisation",-12.58191967010498],["mble",-12.581953048706056],["▁altyd",-12.5819730758667],["▁either",-12.581981658935549],["▁omnis",-12.58202838897705],["▁efni",-12.582038879394531],["▁wcześniej",-12.582173347473145],["▁געשריבן",-12.582173347473145],["▁मोबाइल",-12.582173347473145],["▁ವಿರುದ್ಧ",-12.582173347473145],["▁САЩ",-12.58217716217041],["▁찾아",-12.58220386505127],["▁کون",-12.582219123840332],["▁Christmas",-12.582223892211914],["hesh",-12.582252502441406],["▁Aktual",-12.582257270812988],["▁coas",-12.582260131835938],["▁Nú",-12.58228588104248],["ศรี",-12.582311630249023],["▁Cristo",-12.582314491271973],["▁engelsk",-12.582331657409668],["inici",-12.582356452941896],["▁បើ",-12.582359313964844],["sicht",-12.582408905029297],["iar",-12.582441329956056],["▁نسل",-12.582441329956056],["▁profession",-12.582448959350586],["▁చి",-12.582462310791016],["▁cateva",-12.58248519897461],["▁jemand",-12.582517623901367],["teiden",-12.582549095153809],["▁rais",-12.582549095153809],["▁gik",-12.582565307617188],["стен",-12.58256721496582],["▁mies",-12.58257293701172],["mė",-12.582585334777832],["▁pesan",-12.582601547241213],["▁referencia",-12.582655906677246],["▁mattis",-12.582663536071776],["▁Ola",-12.58266544342041],["▁جنوبی",-12.582683563232422],["▁Computer",-12.582734107971191],["▁speci",-12.582743644714355],["▁zelfs",-12.582749366760254],["klu",-12.582763671875],["อก",-12.582780838012695],["чення",-12.582799911499023],["▁máu",-12.582822799682615],["怀",-12.58283805847168],["休息",-12.582871437072754],["/2009",-12.58287239074707],["▁اروپا",-12.58290195465088],["▁популярн",-12.58290672302246],["▁ହେଲା",-12.582924842834473],["ဇာ",-12.582940101623535],["▁изменения",-12.582971572875977],["▁dalej",-12.582988739013672],["▁Là",-12.582992553710938],["▁праці",-12.582998275756836],["▁Parc",-12.583003044128418],["▁grip",-12.583006858825684],["Й",-12.58303165435791],["▁अवस्था",-12.58304214477539],["▁köp",-12.583063125610352],["▁ቦታ",-12.583072662353516],["▁görül",-12.583099365234377],["▁huset",-12.583126068115234],["biy",-12.583135604858398],["▁ponúka",-12.583135604858398],["malar",-12.58319854736328],["▁Ave",-12.583274841308594],["范围",-12.583294868469238],["ювати",-12.58331298828125],["碼",-12.583353996276855],["▁buurt",-12.583398818969728],["चित",-12.583468437194824],["៤",-12.583481788635254],["same",-12.583556175231934],["-35",-12.58355712890625],["▁Kurulu",-12.583625793457031],["▁ጎ",-12.583642959594728],["做了",-12.583654403686523],["▁Batu",-12.583659172058104],["▁Tài",-12.583680152893066],["▁nombreux",-12.58370590209961],["农村",-12.583732604980469],["ਗਰ",-12.583754539489746],["▁Janeiro",-12.58376121520996],["▁1%",-12.58381175994873],["้ง",-12.583819389343262],["▁препарат",-12.58382511138916],["▁ട്ര",-12.583842277526855],["▁одну",-12.583866119384766],["▁1947",-12.583880424499512],["।”",-12.5839262008667],["しております",-12.583966255187988],["ძი",-12.583977699279783],["гам",-12.584041595458984],["▁dado",-12.584046363830566],["▁balance",-12.584063529968262],["▁милиона",-12.584064483642578],["▁bertan",-12.584089279174805],["ЭЛ",-12.58409309387207],["▁namn",-12.584096908569336],["ньня",-12.584097862243652],["▁તા",-12.58412265777588],["rlar",-12.58413028717041],["▁подход",-12.58415699005127],["стоян",-12.584168434143066],["▁Joe",-12.584169387817385],["▁سات",-12.584174156188965],["్లో",-12.584181785583496],["▁funktion",-12.584259033203123],["▁Hag",-12.584263801574709],["▁gleda",-12.584271430969238],["▁hjælpe",-12.584298133850098],["▁світу",-12.584346771240234],["บุคคล",-12.584402084350586],["▁lobortis",-12.58440399169922],["▁буюу",-12.58440399169922],["▁Brown",-12.584421157836914],["不可能",-12.58448600769043],["യന്",-12.584534645080566],["ਪੁਰ",-12.584542274475098],["▁colori",-12.58454704284668],["▁матч",-12.584568977355955],["▁Még",-12.584585189819336],["▁forward",-12.584687232971191],["▁simula",-12.584746360778809],["λλ",-12.584762573242188],["▁ஜ",-12.584781646728516],["рите",-12.584860801696776],["▁гэтай",-12.584898948669434],["确保",-12.584904670715332],["ולה",-12.58491325378418],["فاق",-12.58494472503662],["वर्त",-12.584945678710938],["dong",-12.584978103637695],["▁Fara",-12.585015296936035],["▁හිමි",-12.585042953491213],["▁условиях",-12.58505630493164],["▁Regel",-12.585065841674805],["▁primeru",-12.585065841674805],["ိတ်",-12.585102081298828],["やって",-12.585105895996094],["莱",-12.58511447906494],["죠",-12.585115432739258],["▁بكل",-12.585128784179688],["ૌ",-12.585134506225586],["保険",-12.585144996643066],["▁වශයෙන්",-12.585148811340332],["▁повод",-12.585150718688965],["▁tradition",-12.585155487060549],["elim",-12.585156440734863],["▁மக்கள்",-12.585163116455078],["▁είπε",-12.585168838500977],["▁የም",-12.585171699523926],["igne",-12.585185050964355],["▁তাদের",-12.585185050964355],["ন্ট",-12.58518886566162],["▁의미",-12.585193634033203],["▁bandar",-12.58524227142334],["▁Ό",-12.585253715515137],["ंगा",-12.585260391235352],["▁miał",-12.585261344909668],["▁Alles",-12.58527374267578],["▁அவர்கள்",-12.585277557373049],["▁Pontevedra",-12.585280418395996],["▁Access",-12.58528995513916],["▁усіх",-12.58530330657959],["▁lief",-12.585314750671388],["▁турист",-12.585314750671388],["دية",-12.585334777832031],["mbung",-12.585336685180664],["lægge",-12.58534049987793],["жива",-12.585365295410156],["▁Abend",-12.585384368896484],["▁prinsip",-12.585390090942385],["▁carga",-12.58541774749756],["spil",-12.585427284240724],["ழி",-12.585448265075684],["sidan",-12.585450172424316],["▁roky",-12.585487365722656],["▁PKK",-12.585521697998049],["▁indir",-12.585529327392578],["ということ",-12.585590362548828],["szik",-12.58566951751709],["boli",-12.58570384979248],["ولا",-12.58574676513672],["ಪ್ರ",-12.585747718811035],["▁SIM",-12.58575439453125],["loh",-12.585760116577148],["▁jeunes",-12.585785865783691],["žky",-12.585801124572754],["jis",-12.585833549499512],["汗",-12.585861206054688],["host",-12.585892677307127],["ထြက္",-12.585892677307127],["ခ်ဳပ္",-12.585893630981444],["▁యొక్క",-12.585894584655762],["▁goberno",-12.585896492004396],["▁viaje",-12.585930824279783],["baz",-12.585941314697266],["▁жылдың",-12.585956573486328],["▁berg",-12.585959434509276],["ਾਇ",-12.585967063903809],["▁favorite",-12.585972785949709],["ფერ",-12.586007118225098],["▁þessum",-12.58607578277588],["bbe",-12.586088180541992],["▁ø",-12.586097717285156],["▁Minuten",-12.586146354675291],["▁ձեր",-12.58615493774414],["▁tratamento",-12.586196899414062],["▁ლო",-12.58619785308838],["ัญ",-12.586228370666504],["輕鬆",-12.586244583129885],["▁Gaz",-12.586294174194336],["▁Chor",-12.58630657196045],["▁Dev",-12.586323738098145],["▁کسان",-12.586508750915527],["टो",-12.586524963378906],["▁ś",-12.58653450012207],["▁วิธี",-12.586626052856444],["position",-12.586639404296877],["▁कृषि",-12.586639404296877],["調查",-12.586644172668455],["▁nəticəsində",-12.58664894104004],["▁вчера",-12.586678504943848],["▁ሆኖ",-12.586821556091309],["ക്കോ",-12.58686351776123],["ذي",-12.586908340454102],["▁njega",-12.58692455291748],["▁Александр",-12.58698272705078],["▁Oni",-12.586984634399414],["▁desembre",-12.586995124816896],["▁1.000",-12.587028503417969],["天然",-12.58705711364746],["jna",-12.587078094482422],["rott",-12.587080955505373],["策",-12.587101936340332],["سې",-12.58710765838623],["▁soldat",-12.587121963500977],["▁адреса",-12.587124824523926],["енные",-12.587136268615724],["▁국가",-12.587142944335938],["▁sollen",-12.587175369262695],["ivos",-12.58717918395996],["▁ម",-12.587198257446287],["бин",-12.587203025817873],["inig",-12.58721923828125],["런",-12.587221145629885],["▁ඉතා",-12.58732795715332],["능",-12.587359428405762],["▁Jamhuri",-12.587425231933594],["tych",-12.587479591369627],["▁ova",-12.587482452392578],["▁Kuch",-12.587512969970703],["▁છ",-12.587590217590332],["▁Planet",-12.587591171264648],["▁rekke",-12.587606430053713],["▁niz",-12.587654113769531],["▁yaşlı",-12.587671279907228],["▁clara",-12.58768081665039],["૮",-12.587698936462402],["▁роб",-12.587701797485352],["gung",-12.5877103805542],["▁ျဖစ္",-12.58775520324707],["▁현",-12.587807655334473],["▁गत",-12.58781909942627],["▁several",-12.587828636169434],["نام",-12.5878324508667],["▁deposit",-12.587846755981444],["▁менш",-12.587848663330078],["▁Burada",-12.587868690490724],["СП",-12.587907791137695],["خا",-12.587913513183594],["▁escrito",-12.587955474853516],["▁vähe",-12.588022232055664],["כס",-12.588048934936523],["▁закуп",-12.588056564331056],["池",-12.588089942932127],["būt",-12.588098526000977],["ગી",-12.588127136230469],["▁alami",-12.588130950927734],["ify",-12.588141441345217],["ેટ",-12.588152885437012],["▁засгийн",-12.588157653808594],["▁Gem",-12.588177680969238],["▁kek",-12.588196754455566],["▁టీ",-12.588202476501465],["jave",-12.58820343017578],["▁있고",-12.588233947753906],["▁thái",-12.5882568359375],["▁Zin",-12.58826732635498],["пера",-12.588311195373535],["▁potrebbe",-12.588337898254396],["▁diseño",-12.588339805603027],["igung",-12.5884428024292],["ଡ଼ି",-12.588448524475098],["并没有",-12.588501930236816],["▁бат",-12.588534355163574],["dėti",-12.588552474975586],["▁4.0",-12.588579177856444],["рија",-12.588615417480469],["вля",-12.588668823242188],["face",-12.588688850402832],["горе",-12.588708877563477],["akat",-12.588740348815918],["▁satisf",-12.58878231048584],["媽媽",-12.58879566192627],["出生",-12.588817596435549],["ván",-12.588866233825684],["дите",-12.588873863220217],["▁इंडिया",-12.588881492614746],["▁பூ",-12.588881492614746],["▁Pela",-12.588907241821287],["ប់",-12.588987350463867],["▁цени",-12.588987350463867],["▁баланс",-12.58905792236328],["▁1961",-12.589103698730469],["兒童",-12.58912181854248],["▁بھارتی",-12.58916187286377],["▁Sö",-12.58920192718506],["ುತ್ತಿರುವ",-12.58921718597412],["▁ഡോ",-12.589221000671388],["▁Dla",-12.589241027832031],["იდ",-12.58924961090088],["ทิ",-12.589265823364258],["lede",-12.589278221130373],["也要",-12.589282035827637],["▁байдал",-12.589298248291016],["▁gestione",-12.58930492401123],["▁ທ",-12.589330673217772],["▁ಚಿ",-12.589372634887695],["▁piln",-12.589426040649414],["▁사랑",-12.58942699432373],["下載",-12.58944606781006],["ポ",-12.589518547058104],["čaj",-12.589540481567385],["でしょうか",-12.58956527709961],["▁kode",-12.589593887329102],["▁echipa",-12.589614868164062],["трым",-12.589617729187012],["തായി",-12.589625358581545],["▁tịch",-12.5896635055542],["▁disponibile",-12.589704513549805],["րել",-12.58975601196289],["▁Також",-12.589761734008787],["իա",-12.5897798538208],["āti",-12.589780807495115],["▁পারে",-12.589801788330078],["టే",-12.58981704711914],["itzen",-12.589835166931152],["▁kız",-12.589841842651367],["CIA",-12.589856147766112],["▁chá",-12.589900016784668],["lera",-12.589922904968262],["রী",-12.589938163757324],["把握",-12.589958190917969],["meti",-12.59003448486328],["▁Sap",-12.59003734588623],["ИМ",-12.590041160583496],["察",-12.590124130249023],["▁kapag",-12.590130805969238],["lado",-12.590142250061035],["▁1943",-12.59022045135498],["画面",-12.590221405029297],["▁college",-12.590272903442385],["ଥି",-12.590291976928713],["▁pirmo",-12.59031867980957],["▁Who",-12.590333938598633],["च्च",-12.590376853942873],["▁Miguel",-12.590381622314451],["▁foli",-12.590383529663086],["▁которую",-12.590384483337402],["▁centrale",-12.590394020080566],["▁vah",-12.590397834777832],["▁دولار",-12.590410232543944],["ძალ",-12.590411186218262],["▁Williams",-12.590420722961426],["▁gwaith",-12.590478897094728],["ครั้ง",-12.59049129486084],["▁spazio",-12.59052848815918],["▁Schi",-12.590606689453123],["ร่างกาย",-12.590627670288086],["▁plein",-12.590628623962402],["▁wait",-12.590638160705566],["▁ناهي",-12.590651512145996],["▁možnosť",-12.590655326843262],["▁നില",-12.59066390991211],["эж",-12.590672492980955],["▁naszego",-12.590675354003906],["tone",-12.590713500976562],["▁enviar",-12.590713500976562],["▁реч",-12.590740203857422],["▁except",-12.590770721435549],["▁Zam",-12.590835571289062],["▁аял",-12.590840339660645],["▁Kak",-12.590860366821287],["ီး",-12.5908784866333],["زه",-12.590888023376465],["▁sih",-12.59090805053711],["▁medal",-12.59091854095459],["träd",-12.590923309326172],["stwa",-12.59093189239502],["▁ವಾ",-12.590933799743652],["גור",-12.590971946716309],["్యం",-12.590988159179688],["银",-12.590994834899902],["Ң",-12.59102725982666],["రూ",-12.591039657592772],["дардын",-12.591050148010254],["rija",-12.591068267822266],["加拿大",-12.591070175170898],["训练",-12.591076850891112],["ison",-12.591082572937012],["▁Wan",-12.59108543395996],["搜尋",-12.59108829498291],["▁lingkungan",-12.59112548828125],["▁ਬਹੁਤ",-12.59112548828125],["нца",-12.591129302978516],["▁hơi",-12.59115219116211],["ുമ",-12.591172218322754],["ույթ",-12.591194152832031],["▁positiv",-12.591196060180664],["敏",-12.591215133666992],["▁mafi",-12.591230392456056],["ສາມາດ",-12.59123420715332],["णं",-12.59128475189209],["▁인터넷",-12.591294288635254],["ਕੀ",-12.591306686401367],["▁Lands",-12.591314315795898],["▁ली",-12.591325759887695],["▁khám",-12.591351509094238],["▁ກ່າວ",-12.59136962890625],["ător",-12.591408729553224],["дь",-12.591421127319336],["▁sty",-12.591421127319336],["관리",-12.59145736694336],["▁jambo",-12.591465950012209],["結合",-12.591470718383787],["▁voljo",-12.59147834777832],["▁buru",-12.59150505065918],["волю",-12.591569900512695],["▁Jedno",-12.59160327911377],["▁jafn",-12.591635704040527],["▁مارس",-12.591680526733398],["▁quarta",-12.591691970825195],["絕",-12.591711044311523],["▁minä",-12.59172821044922],["க்கள்",-12.591756820678713],["▁Mall",-12.59177589416504],["莉",-12.591838836669922],["▁Nghymru",-12.591875076293944],["▁поскольку",-12.591875076293944],["勉強",-12.591875076293944],["▁Suomessa",-12.591876029968262],["▁ഇല്ല",-12.591876983642578],["▁ਆਪਣੀ",-12.591878890991213],["▁ਹੋਇਆ",-12.591878890991213],["▁développement",-12.591913223266602],["্ম",-12.591937065124512],["▁בדרך",-12.591938972473145],["▁Svet",-12.591958045959473],["▁तब",-12.592000007629396],["▁Fakt",-12.592008590698242],["ડે",-12.592019081115724],["▁320",-12.592049598693848],["▁nationale",-12.592073440551758],["▁ଆସି",-12.59209442138672],["मल",-12.592119216918944],["▁jõu",-12.592151641845703],["ान्",-12.59216022491455],["▁Cinema",-12.592240333557127],["▁კომ",-12.592270851135254],["▁personen",-12.592293739318848],["▁sales",-12.592296600341797],["ტრა",-12.592334747314451],["ტყ",-12.592342376708984],["bida",-12.592368125915527],["▁humanos",-12.592448234558104],["尽管",-12.592449188232422],["▁creación",-12.592473030090332],["▁கால",-12.592485427856444],["▁Services",-12.592498779296877],["▁servei",-12.592537879943848],["▁კარგი",-12.592581748962402],["احة",-12.592591285705566],["▁ਮੁੱਖ",-12.592629432678224],["▁२०७५",-12.59263038635254],["てしまう",-12.59264850616455],["isesta",-12.59265422821045],["▁खुद",-12.592684745788574],["▁trav",-12.592707633972168],["▁estará",-12.592710494995115],["▁заяви",-12.592718124389648],["▁Mitä",-12.592721939086914],["▁Mati",-12.592723846435549],["▁немає",-12.59274959564209],["▁suv",-12.592782020568848],["▁הרי",-12.592782020568848],["▁למה",-12.592814445495604],["ゴ",-12.5928955078125],["هایی",-12.59290885925293],["▁fí",-12.592921257019045],["Saya",-12.592945098876951],["ئى",-12.592971801757812],["▁Direkt",-12.592977523803713],["▁Groß",-12.593015670776367],["▁mingi",-12.593017578125],["▁clase",-12.593092918395996],["きました",-12.593095779418944],["▁אום",-12.593213081359863],["▁Agri",-12.593215942382812],["▁tarihinde",-12.593294143676758],["▁Rechts",-12.593310356140137],["พัก",-12.593328475952148],["mita",-12.593335151672363],["▁farm",-12.593371391296388],["▁pulvinar",-12.593376159667969],["▁жас",-12.593389511108398],["▁tähän",-12.59339427947998],["▁física",-12.59340763092041],["▁بڑے",-12.593446731567385],["▁барих",-12.593466758728027],["▁existen",-12.593570709228516],["▁байж",-12.593578338623049],["kým",-12.593579292297363],["▁сынып",-12.593585014343262],["क्त",-12.593607902526855],["흥",-12.593639373779297],["sberg",-12.59364891052246],["iladi",-12.59367561340332],["ايل",-12.59373378753662],["ורי",-12.59375],["vija",-12.593764305114746],["ତୁ",-12.593780517578123],["նա",-12.593791007995604],["▁облысы",-12.593843460083008],["另",-12.593899726867676],["MAS",-12.59390640258789],["▁коментари",-12.59392547607422],["▁Dou",-12.593949317932127],["tegn",-12.593957901000977],["▁sempat",-12.593969345092772],["▁підприємства",-12.593971252441406],["▁любов",-12.59398365020752],["惡",-12.594034194946287],["▁Version",-12.594057083129885],["▁faktiskt",-12.59407901763916],["ģe",-12.594104766845703],["▁회사",-12.59411907196045],["szk",-12.594128608703612],["▁znak",-12.594157218933104],["▁איבער",-12.594162940979004],["ڄ",-12.594204902648926],["▁electrònic",-12.594221115112305],["▁istinad",-12.594228744506836],["▁Kõik",-12.594258308410645],["Ci",-12.594268798828123],["ዋል።",-12.59430694580078],["▁Когато",-12.594310760498049],["ట్ట",-12.59431266784668],["▁taraf",-12.594330787658691],["▁ασ",-12.594337463378906],["▁Mauris",-12.5943603515625],["▁идея",-12.594472885131836],["▁denken",-12.594473838806152],["▁милиони",-12.59451675415039],["▁creare",-12.594532012939451],["▁angen",-12.594545364379885],["ugi",-12.59455394744873],["つの",-12.594581604003906],["مين",-12.594592094421388],["හු",-12.594612121582031],["cá",-12.594621658325195],["▁የሚል",-12.594635009765623],["scher",-12.594666481018066],["▁పు",-12.594698905944824],["立即",-12.594714164733888],["kasta",-12.594720840454102],["APP",-12.594731330871582],["▁pha",-12.5947847366333],["▁demais",-12.594788551330566],["etatik",-12.594801902770996],["▁सुधार",-12.594803810119627],["▁semplice",-12.594812393188477],["▁ಹೊರ",-12.594839096069336],["▁помощи",-12.594843864440918],["움",-12.594868659973145],["▁Zukunft",-12.594879150390623],["▁mjesta",-12.594883918762209],["▁Chương",-12.594901084899902],["▁አገር",-12.594963073730469],["ñas",-12.594985961914062],["▁инструмент",-12.595014572143556],["▁berarti",-12.595026016235352],["▁कुल",-12.595027923583984],["ישע",-12.595044136047363],["里的",-12.595064163208008],["ข้าว",-12.595075607299805],["压力",-12.595085144042969],["▁hipo",-12.595090866088867],["▁मित्र",-12.59510326385498],["▁delta",-12.595118522644045],["▁վար",-12.595134735107422],["itia",-12.595144271850586],["▁करे",-12.595148086547852],["▁Ting",-12.59519386291504],["▁Cari",-12.595251083374023],["្រ",-12.59525203704834],["▁arv",-12.595291137695312],["VN",-12.595294952392578],["จิต",-12.595294952392578],["ION",-12.5953369140625],["שו",-12.595343589782717],["▁less",-12.595396041870115],["と言う",-12.595444679260254],["tartás",-12.595498085021973],["TRO",-12.59556484222412],["的情况",-12.59559154510498],["▁Compostela",-12.59563159942627],["▁notamment",-12.595633506774902],["गार",-12.59565258026123],["▁वर्ग",-12.59565258026123],["ฺ",-12.595657348632812],["▁condiciones",-12.595662117004396],["▁추천",-12.595674514770508],["各地",-12.59567642211914],["saan",-12.595690727233888],["ища",-12.595693588256836],["▁hainbat",-12.595703125],["▁ós",-12.595704078674316],["▁እንዲህ",-12.595706939697266],["▁siyasət",-12.59571647644043],["▁articol",-12.59578800201416],["▁crede",-12.595802307128906],["▁თბილისი",-12.59580421447754],["วิธี",-12.595832824707031],["▁Fas",-12.595833778381348],["패",-12.59584140777588],["tzat",-12.595855712890623],["▁Rag",-12.595869064331056],["衛",-12.595885276794434],["سون",-12.59591007232666],["ezh",-12.595917701721191],["álás",-12.595932960510254],["▁BB",-12.595955848693848],["▁оку",-12.59595775604248],["ницу",-12.59596824645996],["▁pekerjaan",-12.595980644226074],["▁میری",-12.596004486083984],["holt",-12.59601593017578],["pela",-12.596016883850098],["ئين",-12.59602165222168],["ソ",-12.59604835510254],["▁Beynəlxalq",-12.596129417419434],["RD",-12.59617519378662],["▁നന്ദി",-12.59618091583252],["▁Mera",-12.596183776855469],["μένων",-12.596199989318848],["მან",-12.596205711364746],["開放",-12.59627628326416],["▁ännu",-12.596294403076172],["▁manter",-12.59631633758545],["成分",-12.596319198608398],["▁memor",-12.59632682800293],["gom",-12.596349716186523],["娜",-12.596358299255373],["▁dwóch",-12.59638500213623],["ៀ",-12.596397399902344],["óz",-12.596407890319824],["리는",-12.596426963806152],["bring",-12.59646224975586],["ებისა",-12.59648609161377],["▁phút",-12.596487998962402],["▁मेरा",-12.596490859985352],["▁Thiên",-12.596508979797363],["sole",-12.596516609191896],["▁fuori",-12.596522331237791],["▁Ary",-12.59652614593506],["▁stabilit",-12.596543312072754],["▁lunch",-12.59657859802246],["▁annars",-12.596585273742676],["▁נגד",-12.596611976623535],["ási",-12.596632957458496],["ಿತು",-12.596636772155762],["▁خارجی",-12.59669589996338],["▁Bitte",-12.596715927124023],["▁Font",-12.59675407409668],["▁capaz",-12.596769332885742],["সে",-12.596794128417969],["勇",-12.596808433532717],["روس",-12.596817016601562],["ู",-12.59683322906494],["ർത്ത",-12.59691333770752],["ମାନଙ୍କ",-12.596920013427734],["hada",-12.59695053100586],["▁Thủ",-12.596982955932615],["Sha",-12.596988677978516],["yli",-12.596993446350098],["vidi",-12.597025871276855],["▁kriminal",-12.597047805786133],["ίνη",-12.59704875946045],["▁ఎంత",-12.59705924987793],["指导",-12.597073554992676],["▁kopā",-12.597091674804688],["横",-12.597110748291016],["vido",-12.597132682800291],["▁efficace",-12.597138404846191],["▁kejadian",-12.597138404846191],["▁بلوچستان",-12.59714412689209],["鮮",-12.597168922424316],["ജ്",-12.59721565246582],["លើ",-12.597233772277832],["▁hero",-12.597234725952148],["▁diskusi",-12.597243309020996],["▁قانونی",-12.59725570678711],["▁тъй",-12.597264289855955],["▁dönem",-12.597286224365234],["ಟ್ಟು",-12.597312927246094],["▁lán",-12.59732151031494],["▁“",-12.597322463989258],["▁xanh",-12.59732437133789],["▁kabla",-12.597335815429688],["▁amplia",-12.5973539352417],["▁전체",-12.597373962402344],["رق",-12.597387313842772],["ٹا",-12.597400665283203],["▁диплом",-12.597403526306152],["ктер",-12.597441673278809],["▁investor",-12.59750747680664],["▁दिएको",-12.59750747680664],["▁etmiş",-12.59752082824707],["▁gəlir",-12.59754467010498],["практ",-12.597596168518066],["かなり",-12.59762954711914],["▁surf",-12.597630500793455],["▁tanı",-12.597641944885254],["دۇر",-12.597685813903809],["ീയ",-12.597686767578123],["ಹಾ",-12.597753524780272],["▁совершенно",-12.597763061523438],["ஸ",-12.59779167175293],["▁ပါ",-12.597795486450195],["▁Оста",-12.597797393798828],["▁व्हा",-12.597800254821776],["▁современ",-12.597804069519045],["dangan",-12.597847938537598],["▁ന്ന",-12.597851753234863],["плата",-12.597943305969238],["▁பணி",-12.597966194152832],["▁Körper",-12.597967147827148],["ðið",-12.597977638244627],["skr",-12.59801197052002],["▁otur",-12.598030090332031],["軟體",-12.598036766052246],["▁komunist",-12.598050117492676],["▁Xbox",-12.598081588745115],["▁Try",-12.598102569580078],["▁weeks",-12.598109245300291],["▁දා",-12.598114013671877],["▁generale",-12.598115921020508],["vern",-12.598164558410645],["onto",-12.598196029663086],["drift",-12.598271369934082],["▁classifica",-12.598291397094728],["-04-",-12.598315238952637],["เดินทาง",-12.598326683044434],["/11/",-12.598400115966797],["▁собира",-12.59841537475586],["以为",-12.598465919494627],["▁novinar",-12.598472595214844],["بين",-12.598562240600586],["▁vzor",-12.598573684692385],["▁بڑھ",-12.598578453063965],["əyə",-12.59860134124756],["стру",-12.598612785339355],["▁pusat",-12.598623275756836],["▁fuck",-12.598630905151367],["▁رہ",-12.598644256591797],["rito",-12.59864902496338],["▁excellent",-12.59865379333496],["▁ජන",-12.598666191101074],["zos",-12.5986909866333],["ardi",-12.59873867034912],["līdz",-12.598742485046388],["▁Kirja",-12.598804473876951],["▁fato",-12.598819732666016],["Sh",-12.598821640014648],["▁چیزی",-12.598822593688965],["▁altura",-12.598828315734863],["▁Ligi",-12.598832130432127],["tust",-12.598889350891112],["▁(16)",-12.598896026611328],["▁intensiv",-12.598896026611328],["▁등록",-12.598913192749023],["వచ్చు",-12.59891414642334],["api",-12.598966598510742],["▁nyky",-12.59897232055664],["code",-12.5989990234375],["▁inayo",-12.599019050598145],["▁varm",-12.59902000427246],["▁Familien",-12.59910011291504],["тип",-12.599102973937988],["のために",-12.59914207458496],["Во",-12.599153518676758],["▁xeral",-12.599164009094238],["▁negative",-12.599190711975098],["▁poster",-12.59923267364502],["жі",-12.599251747131348],["kva",-12.59925365447998],["▁facial",-12.599276542663574],["ბილ",-12.59933853149414],["▁ćemo",-12.599356651306152],["▁Cin",-12.599367141723633],["▁alder",-12.599367141723633],["▁Sementara",-12.599403381347656],["▁న్యూస్",-12.599421501159668],["▁mengikut",-12.599434852600098],["▁ਸਾਲ",-12.599468231201172],["כט",-12.599469184875488],["ērt",-12.5994873046875],["▁koppie",-12.59950828552246],["周年",-12.59951114654541],["поль",-12.599555015563965],["▁beş",-12.599557876586914],["▁ille",-12.599568367004396],["яна",-12.599581718444824],["ցի",-12.599589347839355],["أت",-12.599649429321287],["pendi",-12.599677085876465],["κιν",-12.599750518798828],["▁Gin",-12.59975528717041],["▁Viktor",-12.599763870239258],["pto",-12.599771499633787],["kwe",-12.599811553955078],["▁گی۔",-12.599891662597656],["уур",-12.59994125366211],["▁ಕೊಡ",-12.599961280822754],["多年",-12.599969863891602],["ിത",-12.599974632263184],["▁mesiac",-12.599998474121094],["▁fråga",-12.600010871887209],["▁lingvoj",-12.600016593933104],["▁jel",-12.60004711151123],["ణం",-12.600067138671877],["style",-12.600105285644531],["▁Seiten",-12.600115776062012],["▁ನೋಡಿ",-12.600117683410645],["tās",-12.600130081176758],["ጂ",-12.60015106201172],["▁βρίσκεται",-12.60015869140625],["▁teles",-12.600163459777832],["▁کېږي",-12.600165367126465],["▁Ovo",-12.600189208984377],["▁тү",-12.600214004516602],["▁loop",-12.600228309631348],["▁ଭାରତ",-12.600231170654297],["▁ಅದು",-12.60028076171875],["нас",-12.60028839111328],["▁Dad",-12.60030746459961],["बारे",-12.600327491760254],["▁රය",-12.600345611572266],["дүн",-12.600359916687012],["▁renova",-12.60036277770996],["▁Люб",-12.60036849975586],["।’",-12.600396156311035],["tím",-12.600427627563477],["だけで",-12.600485801696776],["▁វិទ្យុ",-12.600489616394045],["(2)",-12.600491523742676],["▁lut",-12.60049533843994],["EV",-12.60049819946289],["یج",-12.600508689880373],["àn",-12.600545883178713],["▁ет",-12.600578308105469],["▁XVIII",-12.60058307647705],["▁помоћ",-12.600607872009276],["强调",-12.600607872009276],["▁κυβέρνηση",-12.600637435913086],["ере",-12.600655555725098],["▁veni",-12.60067081451416],["▁ሐ",-12.600699424743652],["மோ",-12.600700378417969],["ulat",-12.600753784179688],["▁سول",-12.600754737854004],["▁لگ",-12.600780487060549],["볼",-12.600789070129396],["▁spri",-12.600800514221191],["▁dress",-12.600802421569824],["英语",-12.600848197937012],["сія",-12.600852012634276],["▁jente",-12.600872993469238],["ضم",-12.600900650024414],["рва",-12.600908279418944],["လက္",-12.600908279418944],["ائد",-12.600915908813477],["▁მომ",-12.600922584533691],["ਗੀ",-12.600927352905272],["ाना",-12.600930213928224],["▁demi",-12.600946426391602],["▁educación",-12.601027488708496],["▁wetu",-12.601080894470217],["میں",-12.60109519958496],["audi",-12.601097106933594],["ením",-12.601119041442873],["▁klare",-12.601189613342283],["▁reik",-12.60119915008545],["▁ذكر",-12.60120677947998],["▁utveckling",-12.601258277893066],["▁19:00",-12.601283073425291],["通信",-12.601323127746582],["相談",-12.601337432861328],["▁común",-12.601338386535645],["▁njegov",-12.601374626159668],["գի",-12.601383209228516],["▁17:00",-12.60138702392578],["▁сти",-12.601395606994627],["▁operation",-12.601405143737791],["▁ऑ",-12.601411819458008],["ADI",-12.601470947265623],["▁People",-12.601483345031738],["▁جهاز",-12.60149383544922],["▁ថ្មី",-12.601497650146484],["pera",-12.601511001586914],["cē",-12.60151195526123],["த்துக்கு",-12.601518630981444],["▁Kauf",-12.601534843444824],["enca",-12.601577758789062],["习惯",-12.601609230041504],["欠",-12.601622581481934],["▁tři",-12.601659774780272],["пут",-12.60166358947754],["εξ",-12.60166835784912],["▁Например",-12.60167121887207],["▁iaculis",-12.601686477661133],["▁infla",-12.601724624633787],["▁جسم",-12.601755142211914],["▁ưu",-12.60175609588623],["ctu",-12.60180950164795],["이고",-12.601847648620604],["ايي",-12.601862907409668],["▁prostu",-12.601895332336426],["▁Muut",-12.60193920135498],["wig",-12.60194206237793],["ტრი",-12.60195255279541],["▁Проект",-12.60197639465332],["DVD",-12.602161407470703],["▁Departament",-12.602163314819336],["物流",-12.602168083190918],["ół",-12.602190971374512],["徒",-12.602301597595217],["haya",-12.602328300476074],["▁vezető",-12.602355003356934],["cía",-12.60236930847168],["▁voyage",-12.602395057678224],["▁debido",-12.60240364074707],["yası",-12.602428436279297],["រា",-12.602431297302246],["▁चलचित्र",-12.60243320465088],["▁पूरी",-12.60244369506836],["▁contrat",-12.602446556091309],["strategi",-12.60245418548584],["कृत",-12.602455139160156],["▁אבער",-12.602474212646484],["älle",-12.602476119995115],["arin",-12.6024808883667],["▁otsi",-12.602505683898926],["თქ",-12.602535247802734],["从而",-12.602542877197266],["▁negali",-12.602547645568848],["▁хүнд",-12.60255527496338],["DT",-12.602584838867188],["ēji",-12.60261344909668],["▁Ś",-12.602652549743652],["▁обязательно",-12.602681159973145],["▁Place",-12.602689743041992],["rias",-12.602705955505373],["▁ඇත්ත",-12.602716445922852],["vise",-12.60277271270752],["▁والس",-12.602774620056152],["ाइएको",-12.602776527404783],["▁nəzərdə",-12.602791786193848],["▁tələb",-12.602837562561035],["下的",-12.602860450744627],["▁niyang",-12.60287380218506],["dud",-12.60290241241455],["▁adresu",-12.602947235107422],["რეს",-12.602968215942385],["▁Tol",-12.602975845336914],["警方",-12.602978706359863],["▁шы",-12.602989196777344],["lema",-12.603021621704102],["微信",-12.60302734375],["▁ఎస్",-12.603053092956545],["టో",-12.60306167602539],["АН",-12.603067398071287],["▁Point",-12.603105545043944],["لن",-12.603107452392578],["лок",-12.603147506713867],["▁tuyển",-12.603187561035156],["▁кількість",-12.603187561035156],["▁Seperti",-12.603189468383787],["▁לג",-12.60321044921875],["▁Mina",-12.603219985961914],["ଜ୍",-12.603262901306152],["ՈՐ",-12.603275299072266],["ගත්",-12.603320121765137],["▁उनका",-12.60334587097168],["▁Musta",-12.603354454040527],["▁varie",-12.603370666503906],["▁обл",-12.603445053100586],["ρος",-12.603471755981444],["ick",-12.60355281829834],["EJ",-12.60358715057373],["▁Shan",-12.60361385345459],["RET",-12.60365390777588],["▁Cul",-12.603654861450195],["▁gerçek",-12.603656768798828],["▁měla",-12.603739738464355],["rona",-12.60374641418457],["▁profesion",-12.603782653808594],["▁ప్రభుత్వం",-12.603793144226074],["wil",-12.603899955749512],["▁otrok",-12.603907585144045],["▁thuis",-12.603915214538574],["▁समझ",-12.60391902923584],["قيم",-12.603931427001951],["gust",-12.603933334350586],["▁Dog",-12.603934288024902],["▁zároveň",-12.6039457321167],["▁ब्लॉग",-12.6039457321167],["▁آثار",-12.603960037231444],["▁त्यांना",-12.60396671295166],["▁vilken",-12.604022026062012],["▁dinge",-12.604063987731934],["▁partit",-12.604111671447754],["чето",-12.604117393493652],["▁ABS",-12.604187965393066],["စရာ",-12.604198455810549],["▁سگهي",-12.604223251342772],["▁najbolje",-12.60426902770996],["ainen",-12.604275703430176],["▁Ideal",-12.604280471801758],["▁hivi",-12.604290962219238],["▁модели",-12.604300498962402],["▁بسته",-12.604321479797363],["▁stap",-12.604352951049805],["ικός",-12.604385375976562],["▁Fro",-12.604414939880373],["▁lehetőség",-12.604424476623535],["czki",-12.604439735412598],["club",-12.60444164276123],["Bir",-12.604454040527344],["▁예약",-12.604461669921877],["▁ход",-12.604474067687988],["岁的",-12.604497909545898],["ській",-12.604562759399414],["▁Dh",-12.604567527770996],["ability",-12.604594230651855],["▁بخ",-12.60461711883545],["▁تق",-12.604633331298828],["▁बै",-12.604643821716309],["▁ڪل",-12.604663848876951],["ობს",-12.6046724319458],["เย็น",-12.604703903198242],["▁септември",-12.604705810546877],["▁ਬਾਅਦ",-12.604705810546877],["ака",-12.604710578918455],["▁qualcosa",-12.604710578918455],["▁amount",-12.604744911193848],["▁مقصد",-12.60474967956543],["▁venda",-12.604765892028809],["નિ",-12.604774475097656],["рман",-12.604787826538086],["५",-12.60479736328125],["▁mês",-12.604843139648438],["okon",-12.604857444763184],["特别是",-12.604857444763184],["ٹھ",-12.60486888885498],["▁фільм",-12.604878425598145],["▁penggunaan",-12.604891777038574],["▁천",-12.604987144470217],["라이",-12.60501480102539],["▁109",-12.605022430419922],["sept",-12.60504150390625],["▁reuni",-12.605096817016602],["▁2.2",-12.605109214782717],["▁jota",-12.605109214782717],["inėje",-12.605113983154297],["▁Proin",-12.60512924194336],["σημ",-12.60515594482422],["čiu",-12.605181694030762],["▁Laura",-12.605188369750977],["ovaných",-12.605192184448242],["VP",-12.605224609375],["երեն",-12.605226516723633],["▁умов",-12.605257987976074],["▁coma",-12.605266571044922],["▁bolesti",-12.605280876159668],["▁Lugo",-12.605301856994627],["tips",-12.605318069458008],["tzer",-12.60531997680664],["▁wag",-12.605355262756348],["sels",-12.605392456054688],["▁spiritual",-12.605406761169434],["▁expect",-12.60541534423828],["▁rare",-12.605425834655762],["ोल",-12.60544490814209],["▁विद्यालय",-12.60546588897705],["▁brin",-12.605484008789062],["وث",-12.605509757995604],["ഷി",-12.605514526367188],["▁zrobić",-12.60552215576172],["▁tulla",-12.605523109436035],["予定",-12.60553741455078],["▁മലയാളം",-12.605572700500488],["▁timu",-12.605576515197754],["tetty",-12.605603218078612],["tyki",-12.605652809143066],["▁технологии",-12.605664253234863],["ури",-12.605692863464355],["▁filmer",-12.60570240020752],["▁куб",-12.605705261230469],["▁освен",-12.605717658996582],["bantu",-12.605748176574709],["▁prise",-12.605789184570312],["ଁ",-12.605798721313477],["чески",-12.605823516845703],["ાર",-12.605823516845703],["▁vendos",-12.605838775634766],["▁Rid",-12.605849266052246],["▁човека",-12.605896949768066],["▁Oro",-12.60593032836914],["▁država",-12.605932235717772],["زد",-12.605938911437988],["▁పడ",-12.605944633483888],["▁ddi",-12.605958938598633],["pore",-12.605960845947266],["▁የሰ",-12.605969429016112],["▁കൂട്ട",-12.605995178222656],["▁bons",-12.6060152053833],["▁î",-12.606021881103516],["▁Lä",-12.60610008239746],["глас",-12.606138229370115],["aidd",-12.60614013671875],["朱",-12.606144905090332],["宽",-12.60615348815918],["نية",-12.606156349182127],["▁дамыту",-12.606189727783203],["შა",-12.606196403503418],["▁τέλος",-12.606225967407228],["▁înainte",-12.60623264312744],["▁स्थापना",-12.606246948242188],["▁байгууллага",-12.606253623962402],["ែក",-12.606257438659668],["▁tránh",-12.606258392333984],["▁konusu",-12.606263160705566],["▁edirəm",-12.60627555847168],["▁20:00",-12.60630989074707],["▁magazine",-12.606329917907717],["▁ged",-12.606331825256348],["▁februari",-12.606406211853027],["▁circula",-12.606416702270508],["▁Երեւան",-12.606548309326172],["PF",-12.606563568115234],["▁telefoni",-12.60662078857422],["▁Belgi",-12.606666564941406],["▁история",-12.606680870056152],["ёт",-12.606708526611328],["skú",-12.606722831726074],["▁operativ",-12.606752395629885],["▁Bart",-12.606764793395996],["▁РЕ",-12.606805801391602],["ification",-12.606855392456056],["▁købe",-12.606891632080078],["ရပ်",-12.606904983520508],["อาจจะ",-12.606922149658203],["скія",-12.60695457458496],["pana",-12.606980323791504],["▁περίπτωση",-12.606987953186035],["▁bērnu",-12.606989860534668],["▁можливість",-12.606989860534668],["▁ಹೋಗ",-12.60702133178711],["固定",-12.607074737548828],["▁Դա",-12.60707664489746],["▁vieš",-12.607085227966309],["▁Гер",-12.607100486755373],["▁καρ",-12.607169151306152],["▁bibe",-12.607172012329102],["▁feil",-12.60720157623291],["lūk",-12.607208251953123],["▁klasik",-12.607216835021973],["▁ким",-12.607217788696287],["相互",-12.607254028320312],["መራ",-12.60730266571045],["▁туку",-12.607324600219728],["▁tendrá",-12.607394218444824],["▁доход",-12.60740089416504],["▁bied",-12.607414245605469],["ngar",-12.60741901397705],["▁povedal",-12.60741901397705],["▁ມ",-12.607423782348633],["қан",-12.607431411743164],["▁Merkezi",-12.607460021972656],["үндө",-12.607481002807615],["▁türk",-12.607502937316896],["▁الذ",-12.607549667358398],["▁arribar",-12.607551574707031],["方面的",-12.60760498046875],["▁1914",-12.607613563537598],["station",-12.607640266418455],["になり",-12.607641220092772],["▁redo",-12.607666015625],["▁okt",-12.607673645019531],["▁টাকা",-12.607674598693848],["▁Hilfe",-12.607678413391112],["UU",-12.607718467712402],["泉",-12.60771942138672],["ฟัง",-12.607751846313477],["เก่า",-12.607755661010742],["▁மேலும்",-12.60775661468506],["▁программы",-12.607765197753906],["▁případě",-12.607768058776855],["cking",-12.607807159423828],["▁Reuters",-12.607810020446776],["▁euskal",-12.607820510864258],["▁yaşayan",-12.607827186584473],["大力",-12.607848167419434],["▁aumenta",-12.607866287231444],["▁ସ୍",-12.607884407043455],["inos",-12.60789966583252],["▁הפר",-12.607914924621582],["скоп",-12.607938766479492],["▁سواء",-12.607942581176758],["▁suuri",-12.607959747314451],["ikh",-12.607982635498049],["▁rozp",-12.607999801635742],["ტერ",-12.608052253723145],["▁పాటు",-12.608097076416016],["▁ezzel",-12.608115196228027],["печат",-12.608162879943848],["▁చూడ",-12.608231544494627],["▁indul",-12.60824966430664],["▁υπερ",-12.608250617980955],["用戶",-12.608256340026855],["▁однако",-12.608277320861816],["fred",-12.60827922821045],["aanka",-12.60829734802246],["▁lễ",-12.60829734802246],["▁مقرر",-12.608344078063965],["▁Fab",-12.608356475830078],["లోనే",-12.608386039733888],["duse",-12.608391761779783],["قۇ",-12.608409881591797],["▁reclama",-12.608428955078123],["올",-12.60843276977539],["▁شوید",-12.608444213867188],["රෝ",-12.608449935913086],["▁Maecenas",-12.608510971069336],["▁राम्रो",-12.608515739440918],["▁practice",-12.60851764678955],["▁полностью",-12.608519554138184],["▁qanday",-12.608528137207031],["▁puternic",-12.608570098876951],["▁सुख",-12.60858154296875],["銷售",-12.60858154296875],["▁гээд",-12.608610153198242],["▁ஓ",-12.608627319335938],["▁kesä",-12.608634948730469],["▁gerai",-12.608647346496582],["▁independent",-12.60866928100586],["▁раён",-12.608712196350098],["▁собствен",-12.60871410369873],["▁американски",-12.60871696472168],["dés",-12.608731269836426],["हार",-12.60873317718506],["▁ٻئي",-12.60875415802002],["▁stie",-12.60876750946045],["▁sumar",-12.608774185180664],["认识",-12.608781814575195],["قبل",-12.608811378479004],["graph",-12.608824729919434],["tünk",-12.60890007019043],["▁gestion",-12.60890293121338],["▁обе",-12.608942985534668],["▁Харин",-12.608980178833008],["שוב",-12.609003067016602],["ADO",-12.609020233154297],["îna",-12.609036445617676],["ጓ",-12.60904312133789],["▁lagun",-12.609048843383787],["▁mention",-12.609068870544434],["▁کد",-12.60907745361328],["뉴스",-12.609088897705078],["▁egyes",-12.609128952026367],["▁ஸ்",-12.60913372039795],["▁खोल",-12.609139442443848],["▁κρατ",-12.609166145324709],["ाप",-12.60916805267334],["錯",-12.609211921691896],["▁Badan",-12.60924243927002],["▁limite",-12.609264373779297],["說明",-12.60927391052246],["▁ámbito",-12.609277725219728],["▁geriausi",-12.609286308288574],["▁udvikling",-12.609313011169434],["▁hlas",-12.60932445526123],["▁espero",-12.609341621398926],["▁Hü",-12.60935115814209],["▁کیس",-12.609378814697266],["▁pian",-12.609380722045898],["▁semasa",-12.609387397766112],["asis",-12.609393119812012],["▁عالمی",-12.60939598083496],["運用",-12.609418869018556],["बु",-12.609432220458984],["лија",-12.609467506408691],["verti",-12.609477996826172],["▁Rie",-12.60951328277588],["तन",-12.609522819519045],["▁cross",-12.60954475402832],["▁futhi",-12.60955047607422],["ంట",-12.6095552444458],["▁cere",-12.609585762023926],["科學",-12.609585762023926],["ingin",-12.609624862670898],["skydd",-12.609641075134276],["忍",-12.60964584350586],["raja",-12.609668731689451],["▁кадр",-12.609679222106934],["▁niez",-12.609681129455566],["▁کودک",-12.6096830368042],["▁juny",-12.609689712524414],["ndal",-12.609691619873049],["ууга",-12.609691619873049],["分别",-12.609695434570312],["entia",-12.609737396240234],["▁गृह",-12.609737396240234],["▁posse",-12.60974407196045],["ទឹក",-12.609745979309082],["▁koi",-12.609798431396484],["▁ključ",-12.609810829162598],["▁сабақ",-12.609850883483888],["▁fakultet",-12.609935760498049],["TL",-12.609960556030272],["▁هٿ",-12.609972953796388],["▁Rim",-12.609992027282717],["দি",-12.609996795654297],["▁Gur",-12.610026359558104],["▁করেছে",-12.610044479370115],["▁ktoś",-12.610093116760254],["▁föld",-12.61014175415039],["Æ",-12.610146522521973],["നോ",-12.610212326049805],["▁ilang",-12.610218048095703],["▁된",-12.61022663116455],["दान",-12.610244750976562],["▁vaš",-12.610304832458496],["ნია",-12.610370635986328],["нул",-12.61040210723877],["▁ფოტო",-12.610440254211426],["язан",-12.610445976257324],["▁obro",-12.610466957092283],["täjä",-12.61048412322998],["▁វា",-12.610498428344728],["хүү",-12.610541343688965],["ತನ",-12.610556602478027],["يال",-12.610562324523926],["▁flott",-12.61056423187256],["▁ದೊಡ್ಡ",-12.610572814941406],["▁posa",-12.610591888427734],["▁dô",-12.610645294189451],["熊",-12.610678672790527],["▁Rap",-12.610748291015623],["▁simt",-12.610770225524902],["انگ",-12.610800743103027],["▁própria",-12.610800743103027],["▁UMNO",-12.610816955566406],["▁جنسی",-12.610821723937988],["在这里",-12.61083698272705],["醫",-12.610838890075684],["▁တစ္",-12.610852241516112],["▁Escola",-12.610865592956545],["갈",-12.610879898071287],["▁minna",-12.610880851745604],["▁బాబు",-12.610918045043944],["▁vlada",-12.610934257507324],["რები",-12.610973358154297],["▁बनाने",-12.610981941223145],["▁dildo",-12.61098575592041],["▁1.3",-12.61099910736084],["▁xente",-12.611005783081056],["▁hoteller",-12.611028671264648],["▁bæ",-12.611059188842772],["▁Šis",-12.611129760742188],["▁dituen",-12.611169815063477],["▁Vet",-12.61122989654541],["革命",-12.611276626586914],["▁retra",-12.61128044128418],["▁خیر",-12.61133098602295],["ではなく",-12.611397743225098],["quot",-12.611416816711426],["irin",-12.611425399780272],["▁১৯",-12.611452102661133],["▁Bey",-12.611470222473145],["补",-12.61150360107422],["גים",-12.611530303955078],["աբար",-12.611539840698242],["धार",-12.611552238464355],["▁umat",-12.611564636230469],["▁predvsem",-12.611565589904783],["▁spela",-12.611571311950684],["▁دیدگاه",-12.611571311950684],["▁Thời",-12.611572265625],["▁қандай",-12.61157512664795],["▁2%",-12.61167049407959],["▁下一篇",-12.611680030822754],["▁३०",-12.611681938171388],["REA",-12.611698150634766],["▁అవ",-12.61171531677246],["▁pomysł",-12.611719131469728],["▁pulang",-12.611760139465332],["▁алады",-12.61176586151123],["cek",-12.611838340759276],["▁වගේම",-12.611845016479492],["한국",-12.611884117126465],["rman",-12.61189079284668],["▁ასეთი",-12.611899375915527],["novi",-12.611952781677246],["▁پدر",-12.611957550048828],["られて",-12.611977577209473],["▁inta",-12.6119966506958],["ئر",-12.612005233764648],["хим",-12.612014770507812],["rash",-12.612021446228027],["▁ښه",-12.612042427062988],["rick",-12.61209201812744],["▁اجرای",-12.61209774017334],["ေပ",-12.612112045288086],["فض",-12.612136840820312],["əş",-12.612183570861816],["rige",-12.612190246582031],["ccions",-12.612265586853027],["▁organizira",-12.612277030944824],["▁מלא",-12.612277030944824],["▁familje",-12.612278938293455],["赞",-12.61229133605957],["▁iné",-12.612299919128418],["▁ას",-12.61230182647705],["▁Steve",-12.612318992614746],["▁נוספים",-12.612330436706545],["▁കൂടുതല്",-12.612330436706545],["▁завжди",-12.61233139038086],["▁superficie",-12.612334251403809],["▁Hitler",-12.612346649169922],["▁heti",-12.612363815307615],["нного",-12.612380027770996],["▁במקום",-12.612380027770996],["niej",-12.612383842468262],["▁chăm",-12.612412452697754],["▁Asociación",-12.612420082092283],["▁yleis",-12.612476348876951],["▁toi",-12.612519264221191],["팀",-12.612523078918455],["ىم",-12.612531661987305],["llow",-12.612552642822266],["aide",-12.61258029937744],["ர்கள்",-12.61259937286377],["▁Komis",-12.612611770629885],["গ্",-12.612667083740234],["▁vient",-12.612678527832031],["ولة",-12.612689018249512],["▁troppo",-12.612709999084473],["对方",-12.612710952758787],["gangs",-12.612716674804688],["▁Mike",-12.612751960754396],["mper",-12.61276626586914],["liko",-12.612812042236328],["▁Jaun",-12.612826347351074],["▁২০১৭",-12.612943649291992],["each",-12.612948417663574],["▁eneo",-12.612950325012209],["janja",-12.612953186035156],["廣告",-12.612975120544434],["▁រឿង",-12.613061904907228],["▁ibang",-12.613089561462402],["▁próprio",-12.613096237182615],["▁αποτελεί",-12.613100051879885],["되",-12.613107681274414],["पाल",-12.613129615783691],["▁assume",-12.613130569458008],["papa",-12.613139152526855],["▁Ռ",-12.613153457641602],["ИЙН",-12.613165855407717],["ינט",-12.613165855407717],["▁qilib",-12.613165855407717],["นี",-12.61320972442627],["▁bazi",-12.613215446472168],["▁temel",-12.613218307495115],["(1",-12.61325740814209],["▁deles",-12.61329746246338],["พี",-12.613306999206545],["▁monet",-12.613313674926758],["还要",-12.613336563110352],["यां",-12.613399505615234],["ழ்",-12.6134033203125],["▁דף",-12.613442420959473],["▁Grupo",-12.613447189331056],["ります",-12.613462448120115],["▁Vidu",-12.613547325134276],["靜",-12.613712310791016],["▁फ़",-12.613722801208496],["▁ଆମ",-12.613755226135254],["▁камер",-12.61376667022705],["හා",-12.613768577575684],["▁dè",-12.613779067993164],["ಯಾದ",-12.61379051208496],["terne",-12.6138277053833],["▁Gegen",-12.613840103149414],["▁тело",-12.613861083984377],["▁كېرەك",-12.613863945007324],["▁Ата",-12.613873481750488],["▁сервис",-12.613873481750488],["▁calitate",-12.613885879516602],["▁copia",-12.613927841186523],["▁Friday",-12.613934516906738],["amas",-12.613946914672852],["ഐ",-12.613951683044434],["▁قادر",-12.613968849182127],["okban",-12.61399269104004],["▁чалавека",-12.614006042480469],["2002",-12.614006996154783],["පෙ",-12.614014625549316],["▁ბოლო",-12.614055633544922],["▁බො",-12.614109992980955],["昨天",-12.614121437072754],["tics",-12.614129066467283],["▁mater",-12.61413860321045],["▁usia",-12.614173889160156],["▁2019.",-12.614182472229004],["ール",-12.614192008972168],["ਵੇ",-12.614221572875977],["סו",-12.614227294921877],["misest",-12.614256858825684],["▁miejscu",-12.614256858825684],["▁MM",-12.614320755004885],["▁noche",-12.614336013793944],["കളില്",-12.61434555053711],["▁Cru",-12.61435317993164],["▁encanta",-12.61435317993164],["จริงๆ",-12.614372253417969],["ētas",-12.614391326904297],["▁details",-12.614446640014648],["чката",-12.61445426940918],["vri",-12.61453628540039],["▁Hold",-12.614540100097656],["▁जीत",-12.61459255218506],["кет",-12.614617347717283],["ችሁ",-12.614628791809082],["▁व्यक्त",-12.614631652832031],["▁многих",-12.614720344543455],["▁ipse",-12.614754676818848],["کھ",-12.614757537841797],["▁science",-12.614778518676758],["▁कथा",-12.614781379699709],["▁renov",-12.614789962768556],["▁ठेव",-12.614861488342283],["▁Siri",-12.614863395690918],["▁직",-12.614877700805664],["▁Filip",-12.614883422851562],["▁eiro",-12.61488437652588],["▁raste",-12.614898681640623],["ຂອງທ່ານ",-12.614952087402344],["კუ",-12.61497974395752],["▁български",-12.615012168884276],["▁ແຂວງ",-12.615023612976074],["▁lungo",-12.615031242370604],["▁ана",-12.615032196044922],["รอย",-12.61506462097168],["etu",-12.615072250366213],["▁quase",-12.615077018737791],["▁ເຂົ້າ",-12.615097999572754],["▁sporta",-12.61512279510498],["drom",-12.615141868591309],["▁ravno",-12.615172386169434],["แนว",-12.615180015563965],["enek",-12.615217208862305],["▁ард",-12.615219116210938],["▁lämna",-12.615245819091797],["ений",-12.615246772766112],["ēl",-12.61524772644043],["▁pertsona",-12.615288734436035],["▁еді",-12.615289688110352],["▁öllum",-12.615338325500488],["σουν",-12.615352630615234],["▁mutu",-12.615371704101562],["▁الاس",-12.615376472473145],["พิมพ์",-12.615391731262209],["▁tiểu",-12.615396499633787],["▁کنترل",-12.615396499633787],["▁február",-12.615397453308104],["ణి",-12.615405082702637],["▁Onder",-12.61540699005127],["▁Januari",-12.61543083190918],["▁அதை",-12.61547565460205],["ਤਿ",-12.615478515625],["premi",-12.61551284790039],["Թ",-12.615540504455566],["▁heldur",-12.615558624267578],["▁winter",-12.61558723449707],["▁noor",-12.615591049194336],["▁манай",-12.615592002868652],["พ่อ",-12.615608215332031],["▁meeting",-12.615629196166992],["वान",-12.6156587600708],["▁네",-12.615739822387695],["▁వరకు",-12.61574649810791],["▁cart",-12.615753173828123],["▁beraber",-12.615774154663086],["▁oseb",-12.615815162658691],["▁Refer",-12.615821838378906],["▁1959",-12.61583137512207],["นัด",-12.615836143493652],["▁almeno",-12.61585521697998],["▁blu",-12.615889549255373],["▁можат",-12.615897178649902],["ferm",-12.615906715393066],["werking",-12.615907669067385],["人に",-12.615922927856444],["ቀር",-12.615943908691406],["וד",-12.615974426269531],["Que",-12.615997314453123],["prend",-12.616003036499023],["-40",-12.616006851196287],["▁tuz",-12.616069793701172],["▁слу",-12.616098403930664],["▁buruzko",-12.616128921508787],["ترین",-12.61614990234375],["xwe",-12.616192817687988],["整体",-12.616192817687988],["▁darüber",-12.616254806518556],["▁крв",-12.61625862121582],["▁معظم",-12.616280555725098],["▁зүйн",-12.616310119628906],["tensi",-12.616312980651855],["▁ipsa",-12.616345405578612],["ее",-12.616405487060549],["▁Gambar",-12.61641788482666],["تۇ",-12.61642074584961],["بین",-12.61643409729004],["▁sửa",-12.616437911987305],["▁usein",-12.61645793914795],["ано",-12.616485595703123],["raka",-12.616503715515137],["وني",-12.61650562286377],["▁нито",-12.61656093597412],["բա",-12.61657428741455],["ব্য",-12.6165771484375],["РЕ",-12.616601943969728],["▁دستور",-12.616633415222168],["▁Phone",-12.616639137268066],["▁کړه",-12.616654396057127],["alia",-12.616666793823242],["▁פּ",-12.616671562194824],["bygg",-12.61667823791504],["▁#1",-12.616684913635254],["▁divers",-12.616694450378418],["jaga",-12.616739273071287],["▁الخاص",-12.616765975952148],["▁عباس",-12.616765975952148],["されます",-12.616785049438477],["vār",-12.616803169250488],["▁njihove",-12.616809844970703],["自主",-12.616829872131348],["▁Forma",-12.616830825805664],["▁(18)",-12.616847038269045],["masa",-12.61685562133789],["шь",-12.616861343383787],["是个",-12.616865158081056],["▁bago",-12.616876602172852],["亿美元",-12.616876602172852],["错",-12.616896629333496],["▁gobierno",-12.61693286895752],["▁പ്രധാന",-12.616935729980469],["▁ყველაფერი",-12.616943359375],["▁발표",-12.616949081420898],["▁сен",-12.617006301879885],["▁чисто",-12.617022514343262],["czyn",-12.617025375366213],["創作",-12.617035865783691],["▁کورٹ",-12.61713409423828],["あり",-12.617244720458984],["lyg",-12.61725902557373],["nța",-12.61725902557373],["▁опис",-12.617266654968262],["▁региона",-12.617317199707031],["▁liker",-12.61732578277588],["▁handi",-12.617334365844728],["▁වාර්තා",-12.617359161376951],["▁biaya",-12.61737060546875],["▁keli",-12.617389678955078],["▁Cr",-12.617403030395508],["umā",-12.617425918579102],["▁Să",-12.617433547973633],["▁Пу",-12.61745834350586],["▁tempi",-12.617547035217283],["▁복",-12.617559432983398],["▁ಮನೆ",-12.617595672607422],["▁nuove",-12.617598533630373],["▁gim",-12.617673873901367],["▁correu",-12.617698669433594],["போ",-12.61772632598877],["▁계",-12.617754936218262],["ङ्",-12.617759704589844],["▁것입니다",-12.61777687072754],["▁applica",-12.617806434631348],["тко",-12.61783218383789],["stoff",-12.617887496948242],["▁নেই",-12.617895126342772],["sci",-12.617925643920898],["▁ага",-12.617966651916504],["▁служба",-12.617984771728516],["帮",-12.618014335632324],["▁necessario",-12.61807918548584],["▁нашу",-12.618082046508787],["▁cuid",-12.618098258972168],["▁လုပ္",-12.618114471435549],["▁scurt",-12.618115425109863],["ገለ",-12.618122100830078],["visning",-12.61815071105957],["ົນ",-12.618194580078123],["▁ljós",-12.618204116821287],["▁nouă",-12.618220329284668],["▁undan",-12.618223190307615],["▁svijeta",-12.618253707885742],["leið",-12.618280410766602],["▁vstup",-12.618348121643066],["▁ډیر",-12.618363380432127],["слов",-12.618367195129396],["▁неко",-12.618382453918455],["▁результате",-12.618383407592772],["Ал",-12.618426322937012],["▁интересно",-12.618426322937012],["足球",-12.618427276611328],["ဥ",-12.618460655212402],["▁ਪ੍ਰਧਾਨ",-12.6184720993042],["▁ಯಾವುದೇ",-12.618477821350098],["球员",-12.618480682373049],["一致",-12.61849880218506],["Де",-12.618504524230955],["▁Tea",-12.618513107299805],["▁börjar",-12.61858367919922],["阅读",-12.618595123291016],["▁حالی",-12.61865234375],["▁βρ",-12.618656158447266],["irani",-12.618688583374023],["▁организација",-12.6187105178833],["Ви",-12.6187162399292],["▁alkaa",-12.618719100952148],["▁kennen",-12.618739128112791],["▁Akan",-12.618785858154297],["▁bū",-12.618844985961914],["▁고객",-12.61884593963623],["တစ်",-12.618864059448242],["▁krijgt",-12.618942260742188],["▁especially",-12.618977546691896],["▁Антон",-12.618983268737791],["ИТЕ",-12.619011878967283],["▁DS",-12.619011878967283],["सू",-12.619057655334473],["▁Guð",-12.619070053100586],["\\",-12.61913776397705],["zky",-12.61916160583496],["▁salam",-12.619163513183594],["▁бірі",-12.619165420532228],["▁потребно",-12.619196891784668],["GH",-12.61923122406006],["▁మాట్లాడ",-12.619240760803224],["▁pellentesque",-12.619242668151855],["▁bibendum",-12.619250297546388],["▁որտեղ",-12.61927890777588],["لين",-12.619373321533203],["помни",-12.619383811950684],["▁stuff",-12.619433403015137],["▁comer",-12.61944580078125],["▁ئاي",-12.619452476501465],["▁Başkan",-12.61949348449707],["ntang",-12.619507789611816],["FOTO",-12.619508743286133],["舉",-12.619508743286133],["▁düşün",-12.61954116821289],["мени",-12.619544982910156],["楽",-12.619577407836914],["▁বল",-12.61961841583252],["▁(@",-12.619622230529783],["▁fruit",-12.619633674621582],["▁boven",-12.619680404663086],["ička",-12.619729042053224],["▁dêr",-12.619731903076172],["▁ayudar",-12.619738578796388],["khulu",-12.61976432800293],["▁üz",-12.619766235351562],["▁pretende",-12.61977195739746],["化的",-12.619779586791992],["▁sağlık",-12.619831085205078],["▁1946",-12.61986255645752],["skai",-12.61988925933838],["▁maç",-12.619898796081545],["▁матери",-12.619903564453123],["hej",-12.619908332824709],["▁relation",-12.619911193847656],["召开",-12.619915008544922],["лё",-12.619929313659668],["▁փ",-12.619969367980955],["ंचा",-12.619983673095703],["ૉ",-12.619997024536133],["müş",-12.62000846862793],["kaya",-12.620009422302246],["▁Pellentesque",-12.620014190673828],["▁fringilla",-12.620014190673828],["▁Junior",-12.62001609802246],["▁dicembre",-12.620017051696776],["▁ሳይሆን",-12.62001895904541],["ثق",-12.62003231048584],["▁Java",-12.620043754577637],["▁téma",-12.620088577270508],["tiek",-12.620122909545898],["▁prior",-12.620142936706545],["▁double",-12.620182037353516],["▁ornare",-12.620196342468262],["▁liquid",-12.62021827697754],["▁Après",-12.620233535766602],["▁1938",-12.620248794555664],["▁eus",-12.620327949523926],["▁CP",-12.62034034729004],["▁haka",-12.620394706726074],["เร็ว",-12.620410919189451],["▁ത്തി",-12.620413780212402],["▁suger",-12.620417594909668],["bom",-12.620441436767578],["ಫ್",-12.620450019836426],["пас",-12.620548248291016],["haal",-12.620549201965332],["tiche",-12.620570182800291],["▁Dream",-12.62060832977295],["ernes",-12.62062168121338],["နှ",-12.62062168121338],["▁Recht",-12.620630264282228],["πά",-12.620659828186035],["▁участ",-12.620685577392578],["▁ایل",-12.62069606781006],["tryk",-12.62073040008545],["三个",-12.620747566223145],["▁दौरान",-12.620787620544434],["ttet",-12.6207914352417],["લિ",-12.620809555053713],["▁зробити",-12.620853424072266],["▁تېر",-12.62086296081543],["▁Hér",-12.62088680267334],["▁пъти",-12.620916366577148],["▁Turn",-12.620926856994627],["가지",-12.620928764343262],["線上",-12.620945930480955],["ීය",-12.620973587036133],["▁worth",-12.621002197265623],["▁alasan",-12.621010780334473],["친",-12.621055603027344],["▁৭",-12.62106990814209],["▁تهیه",-12.621081352233888],["▁لكل",-12.62109088897705],["▁સિ",-12.621134757995604],["телна",-12.62129020690918],["▁komponent",-12.62130069732666],["▁Drag",-12.621322631835938],["yama",-12.621357917785645],["▁глава",-12.621367454528809],["dalen",-12.621421813964844],["呀",-12.621533393859863],["рог",-12.621537208557127],["▁ਜਾਂ",-12.621537208557127],["тек",-12.621546745300291],["數據",-12.62155055999756],["▁podmienky",-12.621557235717772],["▁ребенка",-12.621557235717772],["ຕ່າງໆ",-12.62155818939209],["▁واحدة",-12.62156105041504],["▁Bagaimana",-12.621597290039062],["▁direito",-12.621625900268556],["消費",-12.621662139892578],["▁ຊ",-12.62168025970459],["вка",-12.621692657470703],["gong",-12.621698379516602],["fald",-12.621734619140623],["▁Jim",-12.621752738952637],["▁អត្ថបទ",-12.6217679977417],["▁Valsts",-12.621783256530762],["arvo",-12.621788024902344],["▁Tässä",-12.621806144714355],["ājiem",-12.62190055847168],["ყა",-12.621919631958008],["ierten",-12.621939659118652],["ständig",-12.621984481811523],["▁raske",-12.622003555297852],["▁trygg",-12.6220121383667],["▁לר",-12.622071266174316],["BAN",-12.62210750579834],["лина",-12.622108459472656],["ātu",-12.622125625610352],["▁flam",-12.622152328491213],["မ္",-12.622159004211426],["dung",-12.622173309326172],["จะเป็น",-12.622288703918455],["▁nasze",-12.622296333312988],["ିଆ",-12.62230396270752],["▁Mut",-12.622326850891112],["̃",-12.62233066558838],["▁convallis",-12.62233066558838],["▁Πριν",-12.622333526611328],["кти",-12.622336387634276],["▁кажется",-12.622350692749023],["ניה",-12.622355461120604],["▁نمونه",-12.622355461120604],["הן",-12.622356414794922],["ապահ",-12.62236785888672],["کم",-12.62236785888672],["厂",-12.622380256652832],["▁الموضوع",-12.62238597869873],["▁bəzi",-12.622437477111816],["glav",-12.622453689575195],["なくて",-12.622453689575195],["сун",-12.622454643249512],["▁нап",-12.622458457946776],["▁skupina",-12.622474670410156],["▁желание",-12.622517585754396],["▁пл",-12.62253761291504],["щий",-12.62255573272705],["关键",-12.622570037841797],["▁Premium",-12.622612953186035],["ထိ",-12.622642517089844],["▁масло",-12.62270450592041],["▁Luc",-12.622718811035156],["իտ",-12.622807502746582],["▁관리",-12.622817039489746],["▁बन्द",-12.62283706665039],["▁Aziz",-12.622838973999023],["▁თა",-12.622840881347656],["ပင္",-12.622843742370604],["ෂ්",-12.622857093811035],["▁læse",-12.622857093811035],["ხუ",-12.622870445251465],["▁ដូច",-12.622923851013184],["▁стру",-12.622931480407717],["▁Apr",-12.622952461242676],["цаў",-12.622962951660156],["κλ",-12.622974395751951],["ода",-12.62299346923828],["経",-12.623028755187988],["സ്റ്റ്",-12.623041152954102],["▁उल्लेख",-12.623104095458984],["▁näiteks",-12.623106956481934],["▁మాత్రం",-12.62310791015625],["▁regoor",-12.623138427734377],["▁zakresie",-12.623149871826172],["▁subjekt",-12.62315273284912],["سأل",-12.623165130615234],["▁2030",-12.623244285583496],["▁olmaz",-12.623295783996582],["▁altera",-12.623296737670898],["Ұ",-12.62331771850586],["▁đem",-12.623321533203123],["▁există",-12.62332820892334],["china",-12.623330116271973],["чког",-12.623353958129885],["不知",-12.623361587524414],["▁asfalt",-12.623400688171388],["▁lapse",-12.623406410217283],["gard",-12.62342357635498],["▁dobře",-12.623448371887209],["▁Edit",-12.623486518859863],["▁Antoni",-12.623490333557127],["uddin",-12.623494148254396],["anam",-12.623531341552734],["ايو",-12.623536109924316],["▁लो",-12.623558044433594],["▁kú",-12.62356662750244],["መው",-12.623574256896973],["▁өте",-12.623604774475098],["▁എന്നെ",-12.623613357543944],["统一",-12.623615264892578],["테",-12.623640060424805],["▁popis",-12.623663902282717],["▁Surat",-12.62367820739746],["▁función",-12.623701095581056],["dyti",-12.623709678649902],["▁bulundu",-12.623712539672852],["รุ",-12.62374210357666],["▁mân",-12.623753547668455],["şu",-12.623785972595217],["▁мені",-12.623788833618164],["讨论",-12.623811721801758],["USA",-12.623827934265137],["▁उठ",-12.62382984161377],["▁Kredit",-12.623831748962402],["▁nghề",-12.62387752532959],["ációs",-12.623883247375488],["▁nhóm",-12.62388515472412],["▁නිදහස්",-12.62389087677002],["bler",-12.623903274536133],["▁பெ",-12.623909950256348],["▁hvilke",-12.623957633972168],["▁ئۇنىڭ",-12.623957633972168],["frei",-12.624021530151367],["▁зміни",-12.624043464660645],["ۇن",-12.624066352844238],["▁נת",-12.624095916748049],["▁sehat",-12.624152183532717],["▁आगे",-12.62420654296875],["gate",-12.624207496643066],["laiset",-12.624213218688965],["क्क",-12.624229431152344],["شهر",-12.624250411987305],["▁אותי",-12.62426471710205],["چا",-12.624267578125],["фат",-12.624306678771973],["бап",-12.62434196472168],["▁bună",-12.624361038208008],["nê",-12.624371528625488],["ісі",-12.624411582946776],["ความรู้",-12.624422073364258],["ცნობ",-12.624436378479004],["▁‹",-12.624484062194824],["▁komp",-12.624550819396973],["▁학",-12.62456226348877],["дума",-12.624584197998049],["▁SL",-12.624595642089844],["▁национал",-12.624639511108398],["▁kebutuhan",-12.624652862548828],["▁thẩm",-12.624652862548828],["意义",-12.624744415283203],["▁احتمال",-12.62476921081543],["▁morning",-12.624773025512695],["ירות",-12.624818801879885],["▁எம்",-12.624826431274414],["län",-12.624858856201172],["▁kostenlos",-12.624865531921388],["šlo",-12.624926567077637],["sigur",-12.624934196472168],["▁bringen",-12.624977111816406],["аючи",-12.625012397766112],["ھي",-12.625017166137695],["▁Centrum",-12.625018119812012],["ଫ",-12.625048637390137],["▁५०",-12.625056266784668],["▁circ",-12.62507438659668],["▁sorg",-12.625078201293944],["ebilirsiniz",-12.62508773803711],["▁групп",-12.62510871887207],["▁paikka",-12.625141143798828],["ბუ",-12.625176429748535],["的要求",-12.62520980834961],["პო",-12.62521743774414],["▁używa",-12.625245094299316],["▁užsi",-12.625279426574709],["だと思います",-12.62533473968506],["▁gö",-12.625337600708008],["新加坡",-12.62538242340088],["자의",-12.625399589538574],["▁काँग्रेस",-12.62542724609375],["▁insurance",-12.625429153442385],["▁Bardzo",-12.62544059753418],["▁સૌથી",-12.625444412231444],["▁Pau",-12.625460624694824],["▁Bata",-12.62546730041504],["ੜਾ",-12.625475883483888],["▁୬",-12.625486373901367],["ുമായി",-12.625492095947266],["▁imagina",-12.625502586364746],["іна",-12.62551212310791],["▁author",-12.625532150268556],["ציע",-12.625550270080566],["幅",-12.625564575195312],["sons",-12.625567436218262],["▁místo",-12.625567436218262],["▁знаете",-12.625579833984377],["ОВ",-12.625580787658691],["اھ",-12.625584602355955],["▁Ils",-12.625606536865234],["▁eble",-12.62562656402588],["▁práve",-12.625687599182127],["▁kaa",-12.625723838806152],["▁khas",-12.625723838806152],["▁።",-12.625781059265137],["록",-12.625792503356934],["▁योग",-12.625813484191896],["▁ota",-12.625819206237791],["प्रति",-12.62583827972412],["ట్టి",-12.6258544921875],["這樣的",-12.62594223022461],["ेण",-12.62597370147705],["ಲಿದೆ",-12.625999450683594],["▁bith",-12.626001358032228],["▁giriş",-12.626020431518556],["▁diberi",-12.62603759765625],["▁გავ",-12.62604522705078],["ائين",-12.626052856445312],["ЕС",-12.62606143951416],["想像",-12.626066207885742],["▁ettiği",-12.626139640808104],["▁ପୂର୍ବ",-12.62618923187256],["▁mið",-12.626195907592772],["▁Muungano",-12.626203536987305],["▁selvfølgelig",-12.626203536987305],["▁Massasje",-12.626212120056152],["▁Vikipedio",-12.626221656799316],["▁comparti",-12.626229286193848],["▁região",-12.626240730285645],["终于",-12.626253128051758],["▁tuleva",-12.626276016235352],["▁testimoni",-12.62628173828125],["▁linh",-12.626301765441896],["-50",-12.626337051391602],["▁colaboración",-12.62633991241455],["สู่",-12.62634563446045],["▁čini",-12.626348495483398],["▁Lou",-12.626379013061523],["się",-12.626426696777344],["oper",-12.62644863128662],["náš",-12.626489639282228],["▁Рос",-12.626504898071287],["▁anne",-12.626524925231934],["▁Nähe",-12.62656593322754],["invest",-12.626608848571776],["▁Strand",-12.626652717590332],["ຫຼ",-12.626653671264648],["ालय",-12.626659393310549],["▁demonstr",-12.626664161682127],["likuwa",-12.626679420471191],["▁reproduc",-12.626715660095217],["กด",-12.62674045562744],["▁برید",-12.626747131347656],["▁hjelpe",-12.626802444458008],["ამდე",-12.626803398132324],["сний",-12.626824378967283],["▁Kart",-12.626864433288574],["▁arbete",-12.626896858215332],["Os",-12.62693977355957],["脫",-12.626945495605469],["計劃",-12.626953125],["▁મળી",-12.62697410583496],["▁Madaxweynaha",-12.62697982788086],["▁pharetra",-12.62697982788086],["▁Նիկոլ",-12.62697982788086],["▁മ്മ",-12.627020835876465],["▁hậu",-12.627022743225098],["▁zanima",-12.62702751159668],["▁uli",-12.627033233642578],["υπ",-12.627036094665527],["▁(20)",-12.627038955688477],["▁ఇం",-12.627042770385742],["▁helfen",-12.627044677734377],["▁Way",-12.627105712890623],["▁እንዲሁም",-12.627111434936523],["涉及",-12.627111434936523],["世纪",-12.627177238464355],["▁hita",-12.62721061706543],["▁оз",-12.62723445892334],["یست",-12.627312660217283],["▁ఒ",-12.627323150634766],["ази",-12.627326011657717],["סת",-12.627330780029297],["達到",-12.627371788024902],["Val",-12.62738800048828],["രിക്ക",-12.62740993499756],["▁sistemas",-12.627429008483888],["▁abans",-12.627484321594238],["▁exam",-12.62760066986084],["灵",-12.62766456604004],["क्र",-12.627676963806152],["▁minta",-12.627695083618164],["azo",-12.627703666687012],["وله",-12.62772274017334],["▁chấp",-12.62775707244873],["▁peluang",-12.627758026123049],["▁Stop",-12.627763748168944],["▁vreemdelinge",-12.627767562866213],["▁serán",-12.627812385559082],["▁deutlich",-12.627833366394045],["اله",-12.627842903137209],["▁hyggelig",-12.627861976623535],["UZ",-12.627862930297852],["่น",-12.627862930297852],["▁પડ",-12.627904891967772],["▁آء",-12.627927780151367],["thro",-12.62796115875244],["mañ",-12.62797737121582],["žil",-12.627982139587402],["▁참",-12.627988815307615],["更好的",-12.627999305725098],["▁žele",-12.628003120422363],["▁Eltern",-12.628011703491213],["tibus",-12.628022193908691],["▁tiem",-12.628022193908691],["▁innehåll",-12.628059387207031],["▁ගමන්",-12.628087997436523],["▁cesta",-12.628090858459473],["▁сп",-12.62810230255127],["▁yllä",-12.628119468688965],["▁등의",-12.62812042236328],["nejšie",-12.628151893615724],["▁защо",-12.62816333770752],["▁misi",-12.628177642822266],["grafi",-12.62818431854248],["▁Está",-12.62820816040039],["rasa",-12.62822437286377],["изация",-12.628251075744627],["▁مثلا",-12.62828540802002],["amin",-12.62831211090088],["▁کیے",-12.628323554992676],["▁dobu",-12.62834930419922],["व्य",-12.62835693359375],["▁фі",-12.62835693359375],["АД",-12.628382682800291],["▁viu",-12.628393173217772],["▁없이",-12.62840747833252],["зија",-12.628440856933594],["堡",-12.628440856933594],["يى",-12.628457069396973],["小孩",-12.628514289855955],["單位",-12.628515243530272],["▁كال",-12.628536224365234],["▁ياخشى",-12.628539085388184],["rait",-12.628595352172852],["▁zat",-12.628628730773926],["▁امیر",-12.628637313842772],["▁Want",-12.62864875793457],["▁കര",-12.628687858581545],["▁ລະ",-12.628721237182615],["▁blandit",-12.62872314453125],["skrif",-12.628742218017578],["tás",-12.62878704071045],["▁Yi",-12.628866195678713],["ští",-12.628904342651367],["▁izvor",-12.628912925720217],["άκια",-12.62891960144043],["IRA",-12.628969192504885],["▁zapis",-12.62899684906006],["amour",-12.628998756408691],["▁gua",-12.62901496887207],["رے",-12.629053115844728],["▁maksa",-12.629088401794434],["▁arbeid",-12.629205703735352],["▁panahon",-12.629232406616213],["ண்ண",-12.62923526763916],["▁voa",-12.629287719726562],["▁דין",-12.629304885864258],["▁okviru",-12.629310607910156],["စဉ်",-12.629312515258787],["▁کیونکہ",-12.629312515258787],["▁मामले",-12.629312515258787],["▁Dominus",-12.62932586669922],["▁سنوات",-12.629340171813965],["tium",-12.629376411437988],["▁ihrem",-12.629383087158203],["▁إليه",-12.629396438598633],["сп",-12.629438400268556],["▁cale",-12.62947940826416],["▁vrh",-12.629497528076172],["▁Visos",-12.629549980163574],["▁tête",-12.629552841186523],["▁trakt",-12.629582405090332],["▁foly",-12.629598617553713],["▁드",-12.62963581085205],["sø",-12.629647254943848],["▁එපා",-12.629655838012695],["▁yeri",-12.629694938659668],["▁mínimo",-12.629701614379885],["šku",-12.629716873168944],["Sta",-12.629770278930664],["▁almak",-12.629789352416992],["ince",-12.62979221343994],["▁язык",-12.62981414794922],["ಿದರು",-12.629834175109863],["▁باور",-12.62986183166504],["사회",-12.629873275756836],["ារ",-12.629880905151367],["OG",-12.629887580871582],["fá",-12.629894256591797],["▁krye",-12.629900932312012],["▁(24",-12.629920959472656],["▁ganger",-12.629932403564451],["▁njegovo",-12.629977226257324],["▁ўсе",-12.629987716674805],["▁произ",-12.629996299743652],["▁प्रकाश",-12.629999160766602],["сель",-12.630000114440918],["बर",-12.630040168762209],["▁apre",-12.630043029785156],["▁flotte",-12.630043029785156],["▁உண்மை",-12.630091667175291],["▁innebär",-12.630096435546877],["▁giovani",-12.630099296569824],["▁иначе",-12.630101203918455],["适合",-12.63010311126709],["刊",-12.630108833312988],["ったり",-12.63011646270752],["▁áður",-12.630125045776367],["▁поста",-12.630134582519531],["並且",-12.630136489868164],["▁מתוך",-12.630142211914062],["פק",-12.630170822143556],["▁söker",-12.630200386047363],["▁czym",-12.63023281097412],["▁Sound",-12.630244255065918],["▁produit",-12.630268096923828],["▁ስራ",-12.630327224731444],["шни",-12.630403518676758],["വ്യ",-12.630420684814451],["mysl",-12.630457878112791],["▁ಎಂ",-12.63047695159912],["▁избира",-12.630539894104004],["נון",-12.63060188293457],["▁jaro",-12.630606651306152],["▁Én",-12.630611419677734],["made",-12.630633354187012],["toka",-12.630684852600098],["პა",-12.63068675994873],["▁bac",-12.630695343017578],["▁swojej",-12.630729675292969],["经历",-12.630759239196776],["▁Hər",-12.63077163696289],["베",-12.630772590637209],["朗",-12.630814552307127],["評",-12.630819320678713],["▁ਰਹੀ",-12.630860328674316],["▁societat",-12.630929946899414],["▁الأن",-12.630971908569336],["обр",-12.630973815917969],["atay",-12.631011009216309],["▁Vam",-12.631019592285156],["▁viverra",-12.631027221679688],["erung",-12.63105583190918],["▁Berita",-12.631074905395508],["▁имаше",-12.631097793579102],["群众",-12.631105422973633],["▁తా",-12.631120681762695],["▁civile",-12.631121635437012],["▁seulement",-12.631121635437012],["▁Mb",-12.631150245666504],["▁каш",-12.63115119934082],["▁copil",-12.631210327148438],["ữ",-12.631211280822754],["▁låg",-12.631224632263184],["▁آنے",-12.631235122680664],["plin",-12.63127326965332],["నూ",-12.631277084350586],["ΑΝ",-12.631314277648926],["▁necə",-12.631340980529783],["lls",-12.631369590759276],["▁suggest",-12.63138484954834],["dila",-12.6314058303833],["▁detaljer",-12.631441116333008],["▁ხელ",-12.63144874572754],["▁fungsi",-12.631481170654297],["tojnë",-12.63150405883789],["于是",-12.631540298461914],["▁Merkel",-12.631573677062988],["misega",-12.631630897521973],["▁ιστορία",-12.631650924682615],["▁слишком",-12.631650924682615],["▁тодорхой",-12.631650924682615],["▁आवेदन",-12.631650924682615],["▁मंदिर",-12.631650924682615],["▁Plu",-12.631656646728516],["▁kijken",-12.631669998168944],["▁modus",-12.631685256958008],["▁CAR",-12.631691932678224],["▁Research",-12.631698608398438],["▁sodales",-12.631702423095703],["ново",-12.631749153137209],["υτ",-12.63178253173828],["▁aldı",-12.631795883178713],["▁끝",-12.631820678710938],["rano",-12.631841659545898],["▁inicio",-12.631848335266112],["identifi",-12.631879806518556],["▁collection",-12.631884574890137],["ujo",-12.631887435913086],["Ý",-12.631892204284668],["▁Nó",-12.631940841674805],["تار",-12.63196849822998],["prem",-12.63200569152832],["пр",-12.632012367248535],["▁ניט",-12.632027626037598],["нията",-12.632046699523926],["▁שימוש",-12.632064819335938],["移民",-12.63209056854248],["صول",-12.632125854492188],["▁نش",-12.63215160369873],["▁piel",-12.632164001464844],["▁årets",-12.63217544555664],["▁totul",-12.632179260253906],["▁ihana",-12.63218116760254],["վելու",-12.632193565368652],["gave",-12.632198333740234],["上述",-12.632220268249512],["▁Demo",-12.63222599029541],["ୟା",-12.632240295410156],["▁nutzen",-12.632240295410156],["tinis",-12.632246971130373],["刑",-12.6322660446167],["▁શ્રી",-12.632287979125977],["▁qabul",-12.632291793823242],["▁ਮਾ",-12.632349967956545],["lleen",-12.632351875305176],["яне",-12.632369041442873],["▁સ્વ",-12.632421493530272],["▁eerder",-12.632437705993652],["▁correspond",-12.632444381713867],["tră",-12.632452964782717],["หนุ่ม",-12.63245964050293],["▁हमें",-12.632473945617676],["▁Haut",-12.632476806640623],["▁nosaltres",-12.632508277893066],["▁laban",-12.632590293884276],["▁πολυ",-12.632612228393556],["ებთ",-12.632637977600098],["▁різ",-12.63264274597168],["araha",-12.632664680480955],["லு",-12.632667541503906],["ผลิต",-12.632668495178224],["ۇ",-12.632678031921388],["▁بى",-12.632678985595703],["роч",-12.632684707641602],["▁плод",-12.632701873779297],["▁veľa",-12.63272762298584],["▁explicar",-12.632750511169434],["▁ਬੀ",-12.632774353027344],["фото",-12.632776260375977],["stant",-12.632781982421877],["減少",-12.632821083068848],["▁начина",-12.632827758789062],["▁קען",-12.632838249206545],["▁dades",-12.632844924926758],["ÁS",-12.632866859436035],["раж",-12.632916450500488],["时期",-12.632940292358398],["raga",-12.63294506072998],["▁depende",-12.632983207702637],["χω",-12.633020401000977],["သံ",-12.633033752441406],["▁honest",-12.63305950164795],["▁Year",-12.633111953735352],["给我",-12.633113861083984],["над",-12.633142471313477],["▁mum",-12.63319206237793],["▁ottaa",-12.633209228515623],["▁Fram",-12.633252143859863],["▁అన్నారు",-12.633260726928713],["網友",-12.63326358795166],["ဝင္",-12.633268356323242],["ਬਾ",-12.633333206176758],["ington",-12.633339881896973],["LIK",-12.633343696594238],["▁merge",-12.633365631103516],["▁туура",-12.633382797241213],["док",-12.633404731750488],["▁kapu",-12.63342571258545],["्यू",-12.633430480957031],["私が",-12.63343620300293],["▁confirma",-12.633442878723145],["▁صور",-12.63344669342041],["▁פֿ",-12.633472442626951],["ючы",-12.633512496948242],["▁તેઓ",-12.633545875549316],["▁ngân",-12.6336030960083],["tetés",-12.633621215820312],["imis",-12.633639335632324],["▁nieuw",-12.63366413116455],["▁সং",-12.63369846343994],["zah",-12.63371753692627],["像是",-12.6337308883667],["▁गुरु",-12.63375186920166],["▁구성",-12.633773803710938],["震",-12.633773803710938],["으며",-12.633797645568848],["šel",-12.633804321289062],["രീ",-12.633811950683594],["▁ró",-12.633824348449709],["▁န",-12.633843421936035],["他在",-12.633865356445312],["當時",-12.63393783569336],["▁linea",-12.63394546508789],["▁suc",-12.633956909179688],["▁Bluetooth",-12.633994102478027],["Ek",-12.634005546569824],["▁América",-12.634007453918455],["避",-12.634008407592772],["objectiu",-12.63401222229004],["▁ئەڭ",-12.63401222229004],["▁حقیقت",-12.634013175964355],["▁त्रि",-12.634023666381836],["▁공개",-12.63403606414795],["ENS",-12.634037971496582],["▁dues",-12.634039878845217],["takan",-12.63405990600586],["▁största",-12.63406753540039],["nemu",-12.634078025817873],["閱讀",-12.634079933166504],["zenie",-12.634084701538086],["▁1958",-12.634110450744627],["知名",-12.63412857055664],["▁Onlar",-12.634156227111816],["▁својата",-12.63418674468994],["▁verd",-12.634236335754396],["▁religion",-12.634275436401367],["▁grib",-12.634278297424316],["শ্",-12.63428020477295],["▁DIN",-12.634297370910645],["▁radikal",-12.63430881500244],["▁Jou",-12.634313583374023],["▁Германия",-12.634340286254885],["อย่างไร",-12.6343412399292],["▁đào",-12.634344100952148],["cciones",-12.634352684020996],["ותו",-12.634368896484377],["▁lhe",-12.634369850158691],["▁вся",-12.634371757507324],["▁petrol",-12.634393692016602],["dub",-12.634403228759766],["վեն",-12.63442611694336],["网上",-12.634437561035156],["▁pace",-12.634449005126951],["▁werkt",-12.634455680847168],["▁aktiviteter",-12.6344633102417],["▁उपाय",-12.63447093963623],["ありません",-12.634566307067873],["腦",-12.634577751159668],["▁baja",-12.634608268737791],["▁serão",-12.634645462036133],["කො",-12.634655952453612],["▁hatiye",-12.634692192077637],["▁တရား",-12.634716033935549],["策略",-12.634722709655762],["▁boca",-12.634740829467772],["jál",-12.634743690490724],["โปรแกรม",-12.634775161743164],["▁Göteborg",-12.634777069091797],["▁fald",-12.634777069091797],["▁második",-12.63477897644043],["▁Стефан",-12.634780883789062],["▁penduduk",-12.63478183746338],["一系列",-12.634791374206545],["▁कसरी",-12.634800910949709],["▁Belə",-12.634807586669922],["лым",-12.63483715057373],["вању",-12.634843826293944],["രിയ",-12.634921073913574],["▁progress",-12.634929656982422],["▁beat",-12.634934425354004],["ክር",-12.634965896606444],["▁mikið",-12.635004043579102],["▁положение",-12.635013580322266],["▁fotografia",-12.63502025604248],["عظم",-12.63503646850586],["ண்டி",-12.63504409790039],["▁Direct",-12.635075569152832],["هدف",-12.635099411010742],["KL",-12.635112762451172],["▁Бил",-12.635140419006348],["▁العراق",-12.635154724121094],["Tur",-12.635165214538574],["▁alcalde",-12.63519287109375],["▁Oxford",-12.635196685791016],["цэн",-12.635212898254396],["情報を",-12.63521957397461],["▁delen",-12.635271072387695],["▁obriga",-12.635272026062012],["нсульт",-12.635296821594238],["的心",-12.63530731201172],["قد",-12.635340690612791],["▁niks",-12.635355949401855],["ոց",-12.635400772094728],["▁serà",-12.63544464111328],["ылып",-12.635483741760254],["ทั่วไป",-12.635526657104492],["▁মোবাইল",-12.635538101196287],["sher",-12.635544776916504],["ሂ",-12.635560035705566],["▁информацию",-12.635560035705566],["▁դուրս",-12.635560035705566],["▁പ്രത്യേക",-12.635560035705566],["عادة",-12.635567665100098],["▁tristique",-12.635568618774414],["▁όλους",-12.635577201843262],["חור",-12.635595321655272],["▁svin",-12.635601043701172],["zku",-12.63560676574707],["▁60-",-12.635639190673828],["▁wpis",-12.635644912719728],["三年",-12.635649681091309],["▁Цар",-12.63565444946289],["▁Zimmer",-12.635655403137209],["▁ინ",-12.635698318481444],["▁korda",-12.635724067687988],["agar",-12.63575839996338],["specta",-12.635760307312012],["▁tanke",-12.635760307312012],["க்கா",-12.635774612426758],["规",-12.63577938079834],["овано",-12.635781288146973],["▁persoa",-12.635844230651855],["▁german",-12.635883331298828],["▁erfaring",-12.635887145996094],["▁kasvu",-12.635991096496582],["ดํา",-12.636019706726074],["ITO",-12.636078834533691],["▁ថ្ងៃទី",-12.636088371276855],["▁ຮັບ",-12.636093139648438],["▁Map",-12.636096000671388],["也很",-12.636128425598145],["πει",-12.636149406433104],["zná",-12.636155128479004],["ද්",-12.636208534240724],["▁File",-12.636218070983888],["▁2015-",-12.636226654052734],["成就",-12.63623046875],["läng",-12.63623332977295],["▁tendo",-12.63625431060791],["nist",-12.636255264282228],["▁август",-12.636255264282228],["υγ",-12.636281967163086],["▁mother",-12.636284828186035],["יפ",-12.636295318603516],["▁dl",-12.636301040649414],["▁හැම",-12.63630199432373],["▁ನಡೆದ",-12.636334419250488],["▁Inggris",-12.636343955993652],["▁ችግር",-12.636343955993652],["▁enquanto",-12.636346817016602],["▁غلام",-12.6363525390625],["▁قریب",-12.63635540008545],["▁오늘",-12.636356353759766],["▁რომლის",-12.636363983154297],["▁italiana",-12.63637924194336],["ंदा",-12.636404991149902],["▁الشركة",-12.636411666870115],["▁интер",-12.6364164352417],["▁Tempo",-12.636475563049316],["ம்ப",-12.636540412902832],["记录",-12.636550903320312],["▁ഫ",-12.63656520843506],["▁Απο",-12.636585235595703],["eru",-12.636590957641602],["анти",-12.636629104614258],["▁Стра",-12.636670112609863],["▁மன",-12.636786460876465],["▁teras",-12.63679313659668],["▁forint",-12.636846542358398],["▁amici",-12.63687229156494],["ىشى",-12.636881828308104],["မျိုး",-12.63691234588623],["▁monument",-12.63692855834961],["▁Schu",-12.636932373046877],["▁главни",-12.63697624206543],["▁వీడియో",-12.637017250061035],["▁pinta",-12.637042045593262],["▁Leute",-12.637057304382324],["leika",-12.637060165405272],["▁њих",-12.637062072753906],["父亲",-12.637096405029297],["▁പോക",-12.637097358703612],["ايا",-12.637106895446776],["▁цієї",-12.637127876281738],["▁Curabitur",-12.637128829956056],["▁प्रधानमंत्री",-12.637128829956056],["য়ার",-12.637137413024902],["▁трети",-12.637165069580078],["▁քան",-12.63717555999756],["culos",-12.63719081878662],["nehmen",-12.637224197387695],["▁սա",-12.637271881103516],["ekben",-12.637280464172363],["ację",-12.637293815612791],["▁дагуу",-12.637300491333008],["▁Eskorte",-12.637325286865234],["概念",-12.637331008911133],["ಲೂ",-12.637362480163574],["linn",-12.63738250732422],["tuna",-12.637408256530762],["oki",-12.637434005737305],["▁tjejer",-12.637435913085938],["▁verb",-12.637439727783203],["▁rhai",-12.637502670288086],["▁mindenki",-12.637503623962402],["శా",-12.637507438659668],["paz",-12.63755226135254],["OLA",-12.637567520141602],["добре",-12.63762092590332],["рем",-12.637632369995115],["▁primje",-12.6376953125],["▁fabrica",-12.63769817352295],["▁meest",-12.637727737426758],["▁основе",-12.637774467468262],["▁emis",-12.63782024383545],["▁entry",-12.63782787322998],["▁Азия",-12.637845039367676],["▁masak",-12.637873649597168],["緩",-12.637885093688965],["ၿပီ",-12.637903213500977],["▁mesec",-12.637911796569824],["ኬ",-12.63791275024414],["▁Ngày",-12.63791275024414],["▁uwagę",-12.63791275024414],["▁Alexander",-12.637919425964355],["pero",-12.637924194335938],["▁истории",-12.637927055358888],["੍ਰ",-12.637954711914062],["▁којој",-12.637995719909668],["▁workshop",-12.638029098510742],["▁Anwar",-12.63807201385498],["▁oba",-12.638073921203612],["fold",-12.63808250427246],["▁मैले",-12.638101577758787],["▁Какво",-12.638113021850586],["▁करत",-12.638139724731444],["▁Klo",-12.63816738128662],["▁dòng",-12.638190269470217],["▁soal",-12.638218879699709],["同学",-12.638223648071287],["▁650",-12.638239860534668],["▁menuju",-12.638246536254885],["▁රැ",-12.638256072998049],["լի",-12.638258934020996],["▁Plaza",-12.638279914855955],["▁στ",-12.638283729553224],["▁प्रकाशित",-12.638350486755373],["▁කාලය",-12.638398170471191],["۵",-12.638404846191406],["▁pró",-12.638407707214355],["▁books",-12.63844871520996],["▁klinik",-12.638489723205566],["köz",-12.6384916305542],["robi",-12.638495445251465],["wers",-12.638534545898438],["▁Latin",-12.63853931427002],["ମାନ",-12.638555526733398],["рым",-12.63857364654541],["▁kalori",-12.638575553894045],["ຂຶ້ນ",-12.638612747192385],["▁latest",-12.638614654541016],["域",-12.638654708862305],["▁JAV",-12.638660430908203],["vå",-12.638667106628418],["▁এখন",-12.638679504394531],["▁äldre",-12.638699531555176],["▁simply",-12.638708114624023],["▁පුවත්",-12.638714790344238],["▁vertel",-12.638726234436035],["▁اداره",-12.6387357711792],["▁બધા",-12.638745307922363],["▁veliki",-12.638749122619627],["öz",-12.638774871826172],["图片",-12.63878059387207],["ագույն",-12.638790130615234],["ikas",-12.638805389404297],["▁oamenii",-12.63882541656494],["▁vila",-12.6388521194458],["▁ਮਿਲ",-12.63889217376709],["▁Galego",-12.63893222808838],["стат",-12.638964653015137],["▁Bursa",-12.63898468017578],["ói",-12.639002799987791],["▁Luna",-12.63900375366211],["▁pump",-12.63903522491455],["tynyt",-12.639039039611816],["1.5",-12.639039993286133],["liau",-12.639043807983398],["▁Indian",-12.639063835144045],["десет",-12.639070510864258],["ଶା",-12.63907527923584],["▁kül",-12.639132499694824],["▁daglig",-12.639168739318848],["▁miasta",-12.639195442199709],["▁ব্যবহার",-12.6392240524292],["▁народу",-12.63925552368164],["▁Íslandi",-12.639264106750488],["▁GI",-12.639328002929688],["▁Şa",-12.639348983764648],["▁ಭಾಗ",-12.639410972595217],["▁αντί",-12.639423370361328],["▁kurių",-12.6394624710083],["記憶",-12.639469146728516],["ską",-12.639479637145996],["▁porttitor",-12.639484405517578],["▁ඉදිරිපත්",-12.639486312866213],["▁veľké",-12.639488220214844],["▁nhật",-12.63949203491211],["▁përdor",-12.639498710632324],["▁pitäisi",-12.639525413513184],["▁bëhet",-12.639531135559082],["▁eenvoudig",-12.639531135559082],["▁Activ",-12.639532089233398],["लेल्या",-12.639540672302246],["▁असते",-12.63954257965088],["▁ограничен",-12.639561653137209],["▁Giá",-12.639570236206056],["▁texte",-12.6395845413208],["▁alergi",-12.639591217041016],["ությանը",-12.639593124389648],["gör",-12.639610290527344],["▁Bron",-12.639629364013672],["▁міжнародн",-12.639629364013672],["reis",-12.639639854431152],["▁siete",-12.63966178894043],["▁tovar",-12.639692306518556],["jien",-12.63970184326172],["teks",-12.639752388000488],["XX",-12.639800071716309],["ንና",-12.639815330505373],["▁Petra",-12.639827728271484],["▁सफल",-12.639883995056152],["▁amal",-12.639979362487791],["▁Seminar",-12.64004135131836],["▁граф",-12.640118598937988],["носи",-12.640124320983888],["▁Rud",-12.640143394470217],["bhar",-12.64016056060791],["▁berish",-12.640182495117188],["▁ખા",-12.64020824432373],["做的",-12.640217781066896],["plaats",-12.640250205993652],["▁σύμφωνα",-12.640271186828612],["▁તમને",-12.640271186828612],["▁kjæreste",-12.640294075012209],["▁zituen",-12.640317916870115],["▁Berikut",-12.64035987854004],["▁alternativa",-12.640378952026367],["▁participat",-12.64039134979248],["▁យក",-12.640405654907228],["\"...",-12.640422821044922],["▁osobe",-12.640423774719238],["▁некој",-12.640440940856934],["pula",-12.640460968017578],["خرج",-12.640467643737791],["स्म",-12.64047145843506],["",-12.66839599609375],["▁pagpapa",-12.668404579162598],["▁raison",-12.668429374694824],["▁Saj",-12.66843605041504],["▁artículo",-12.668445587158203],["▁víc",-12.668463706970217],["▁фил",-12.66846752166748],["ቀው",-12.668567657470703],["wę",-12.668570518493652],["muo",-12.66858959197998],["▁Əli",-12.668594360351562],["dici",-12.668618202209473],["▁storie",-12.668620109558104],["▁rencontre",-12.668643951416016],["▁pote",-12.668652534484863],["શી",-12.668656349182127],["ကောင်း",-12.66868782043457],["nî",-12.668706893920898],["ໂດຍ",-12.668712615966797],["יקה",-12.66871738433838],["ชนิด",-12.668723106384276],["คะ",-12.668736457824709],["нати",-12.668761253356934],["▁yorum",-12.668794631958008],["▁cały",-12.66879653930664],["▁Mang",-12.668806076049805],["▁dạy",-12.668831825256348],["▁personali",-12.668845176696776],["หนัก",-12.668855667114258],["taire",-12.66889476776123],["δρο",-12.668903350830078],["▁ktorých",-12.66891098022461],["▁Young",-12.668927192687988],["▁tram",-12.668931007385254],["▁одоо",-12.66894245147705],["▁села",-12.668943405151367],["կի",-12.669002532958984],["▁pierwszy",-12.669008255004885],["huone",-12.669011116027832],["▁اظہار",-12.669015884399414],["▁අවුරුදු",-12.669015884399414],["▁ಶಿಕ್ಷಣ",-12.66901683807373],["ஷ",-12.669023513793944],["▁lec",-12.669054985046388],["▁Integer",-12.669099807739258],["▁problems",-12.669142723083496],["▁Kamera",-12.669178009033203],["gą",-12.669197082519531],["▁сре",-12.669207572937012],["平成",-12.669235229492188],["бур",-12.66926097869873],["▁있었다",-12.669270515441896],["▁својот",-12.669306755065918],["▁ОД",-12.669318199157717],["ését",-12.669325828552246],["▁jauna",-12.669360160827637],["ryn",-12.669363021850586],["▁آمده",-12.669363021850586],["ični",-12.669377326965332],["▁pomoć",-12.669384002685549],["aigua",-12.66945457458496],["ڪار",-12.669466972351074],["▁аймаг",-12.669473648071287],["team",-12.669528007507324],["▁Kül",-12.66957187652588],["fik",-12.669591903686523],["▁jarri",-12.669642448425291],["▁varat",-12.669649124145508],["ნებ",-12.66965103149414],["▁pengguna",-12.669665336608888],["icht",-12.669730186462402],["▁দল",-12.669745445251465],["хва",-12.669757843017578],["áló",-12.66976833343506],["▁ОБ",-12.66981029510498],["بير",-12.669822692871094],["▁lắm",-12.66982650756836],["▁اجازه",-12.66982650756836],["ظهر",-12.669848442077637],["▁производства",-12.66987419128418],["▁buk",-12.669879913330078],["▁אבי",-12.669885635375977],["▁estes",-12.669903755187988],["▁богат",-12.66998291015625],["▁প্রধানমন্ত্রী",-12.67001247406006],["วันที่",-12.67002010345459],["▁páxina",-12.67007541656494],["▁ED",-12.670120239257812],["▁stát",-12.670129776000977],["▁sma",-12.670132637023926],["طور",-12.670174598693848],["▁qad",-12.670188903808594],["паз",-12.67018985748291],["據",-12.67018985748291],["vizi",-12.670228004455566],["▁delo",-12.670228958129885],["▁ბევრი",-12.670278549194336],["ився",-12.670364379882812],["▁نفسه",-12.67037296295166],["Dis",-12.670392990112305],["▁field",-12.67041015625],["▁אנ",-12.670418739318848],["▁прем",-12.670425415039062],["접",-12.67045783996582],["▁kjærlighet",-12.670461654663086],["hood",-12.670527458190918],["▁გვ",-12.670551300048828],["üş",-12.670554161071776],["▁مہ",-12.670555114746094],["▁Kris",-12.670568466186523],["▁againn",-12.670576095581056],["▁fény",-12.670578956604004],["แต่ง",-12.670583724975586],["▁kemampuan",-12.670637130737305],["▁ત્યાં",-12.67064094543457],["▁Schritt",-12.670646667480469],["ăng",-12.67068862915039],["เบอร์",-12.670699119567873],["▁الاتحاد",-12.670700073242188],["하세요",-12.670702934265137],["▁ترک",-12.670705795288086],["לער",-12.670719146728516],["віч",-12.670759201049805],["ుకుని",-12.67076587677002],["▁కే",-12.670777320861816],["協",-12.670816421508787],["gê",-12.670844078063965],["▁నే",-12.670880317687988],["危",-12.67091464996338],["▁레",-12.67091941833496],["▁બની",-12.670924186706545],["▁près",-12.670926094055176],["▁bilik",-12.670950889587402],["شق",-12.671024322509766],["ота",-12.671072006225586],["▁Alta",-12.671095848083496],["ოც",-12.671146392822266],["ьного",-12.671207427978516],["家里",-12.671212196350098],["頁",-12.671212196350098],["▁Myanmar",-12.671216011047363],["pida",-12.671221733093262],["▁аш",-12.671222686767578],["▁1936",-12.671241760253906],["▁edilən",-12.671268463134766],["هل",-12.671273231506348],["marin",-12.671292304992676],["송",-12.671308517456056],["ΕΛ",-12.671329498291016],["▁Prim",-12.67133331298828],["纸",-12.671334266662598],["资本",-12.671334266662598],["▁తి",-12.671339988708496],["ište",-12.671390533447266],["▁Като",-12.671398162841797],["▁кет",-12.671432495117188],["หน่อย",-12.671449661254885],["▁tầng",-12.671449661254885],["▁очередь",-12.671449661254885],["▁જ્યારે",-12.671449661254885],["▁Chelsea",-12.6714506149292],["▁Natal",-12.67146110534668],["tyy",-12.67147445678711],["▁କରିବାକୁ",-12.67155647277832],["▁аль",-12.671564102172852],["۷",-12.671589851379396],["▁നിയമ",-12.671595573425291],["▁киши",-12.671623229980469],["▁મિ",-12.671648979187012],["prima",-12.671649932861328],["すぐに",-12.67165184020996],["āta",-12.671652793884276],["▁dokter",-12.671669960021973],["▁financier",-12.671692848205566],["可能会",-12.671692848205566],["▁Jordan",-12.671698570251465],["▁daarvan",-12.671740531921388],["evo",-12.671748161315918],["▁bheil",-12.671751976013184],["▁Europsk",-12.671794891357422],["ଟେ",-12.671795845031738],["▁لائن",-12.671815872192385],["▁studia",-12.6718168258667],["▁hands",-12.671828269958496],["忘",-12.67184829711914],["dık",-12.67185401916504],["▁mujer",-12.671878814697266],["íu",-12.671911239624023],["▁muze",-12.671924591064451],["▁baha",-12.6719970703125],["▁flug",-12.672067642211914],["чете",-12.672080993652344],["dning",-12.672091484069824],["پل",-12.67209529876709],["▁beau",-12.672101974487305],["▁алған",-12.672101974487305],["▁እኛ",-12.672163009643556],["日本の",-12.672164916992188],["tile",-12.672171592712402],["▁homes",-12.672171592712402],["▁продава",-12.672175407409668],["提到",-12.672183990478516],["怒",-12.672211647033691],["▁ហ",-12.672212600708008],["στή",-12.672219276428224],["වෙ",-12.672235488891602],["▁lascia",-12.672250747680664],["▁berkaitan",-12.672261238098145],["▁nền",-12.67226219177246],["貓",-12.67226219177246],["jící",-12.672270774841309],["▁առանց",-12.672271728515623],["▁ලක්",-12.672290802001951],["▁ڊي",-12.672295570373535],["fed",-12.672304153442385],["▁గు",-12.672308921813965],["نب",-12.672341346740724],["▁Гл",-12.672395706176758],["▁Биз",-12.672405242919922],["▁Alan",-12.6724214553833],["▁kéo",-12.672422409057615],["▁oy",-12.672444343566896],["کٹ",-12.67247486114502],["▁मैंने",-12.672479629516602],["▁Essen",-12.67248249053955],["▁Sarah",-12.672492980957031],["▁Guide",-12.672505378723145],["▁gətir",-12.672505378723145],["▁XXX",-12.672527313232422],["/12/",-12.672561645507812],["illas",-12.672572135925291],["▁जिस",-12.672578811645508],["▁رحم",-12.672582626342772],["▁lidí",-12.67258644104004],["▁нэгэн",-12.672592163085938],["▁Jong",-12.672647476196287],["OA",-12.672690391540527],["ကြား",-12.672754287719728],["▁لان",-12.672771453857422],["▁(21",-12.672789573669434],["мед",-12.672809600830078],["torul",-12.672839164733888],["yur",-12.672845840454102],["▁ნუ",-12.672897338867188],["гол",-12.672904014587402],["▁ලෝක",-12.67296314239502],["▁carbon",-12.672968864440918],["▁març",-12.672988891601562],["▁finalmente",-12.672993659973145],["▁puesto",-12.673004150390623],["▁continuare",-12.6730375289917],["ၿမိဳ႕",-12.673072814941406],["rı",-12.673081398010254],["环保",-12.673112869262695],["▁tiyan",-12.673123359680176],["▁została",-12.67313003540039],["▁amazing",-12.673133850097656],["▁가족",-12.673134803771973],["▁materiál",-12.673139572143556],["ury",-12.673199653625488],["ڍ",-12.67320156097412],["▁około",-12.67324924468994],["▁มัน",-12.673254013061523],["▁mijë",-12.673294067382812],["▁സ്വ",-12.673299789428713],["▁valida",-12.673303604125977],["른",-12.673315048217772],["мах",-12.673340797424316],["▁fui",-12.67334270477295],["▁ши",-12.67336082458496],["▁камен",-12.673365592956545],["லே",-12.673397064208984],["εσ",-12.673416137695312],["▁bestemt",-12.673423767089844],["ಎಂ",-12.673432350158691],["▁listen",-12.673481941223145],["เนื้อ",-12.673505783081056],["▁lengkap",-12.673534393310549],["oknak",-12.673535346984863],["▁ආණ්ඩුව",-12.673583030700684],["ፈር",-12.673601150512695],["▁acce",-12.673635482788086],["▁vsi",-12.673636436462402],["▁hisob",-12.673645973205566],["เตอร์",-12.673666000366213],["▁αρχ",-12.673669815063477],["ujące",-12.673687934875488],["MD",-12.67368984222412],["haft",-12.673709869384766],["▁quay",-12.67372703552246],["▁Мат",-12.67374038696289],["leriyle",-12.673751831054688],["生活中",-12.673751831054688],["очки",-12.673837661743164],["viti",-12.673855781555176],["ipi",-12.67385959625244],["щие",-12.673860549926758],["▁aspira",-12.673860549926758],["▁ठूलो",-12.673888206481934],["χι",-12.67388916015625],["▁amháin",-12.67388916015625],["▁sớm",-12.67388916015625],["▁wtedy",-12.673895835876465],["年に",-12.673905372619627],["▁해당",-12.673910140991213],["ΤΕ",-12.67391872406006],["▁mängi",-12.673938751220703],["▁запад",-12.673979759216309],["▁njihovo",-12.673998832702637],["▁Bun",-12.674002647399902],["ሠ",-12.674005508422852],["が多い",-12.6740140914917],["ieron",-12.674036979675291],["▁Katibu",-12.674044609069824],["▁થ",-12.67404556274414],["ত্ত",-12.67405605316162],["дър",-12.674071311950684],["సారి",-12.674090385437012],["▁vsem",-12.674111366271973],["▁järgi",-12.674139976501465],["▁places",-12.674142837524414],["▁răng",-12.674200057983398],["▁scop",-12.674217224121094],["ширен",-12.67421817779541],["▁camping",-12.674224853515623],["▁எந்த",-12.674234390258787],["▁Совет",-12.674266815185549],["▁жақсы",-12.674288749694824],["▁ćete",-12.674299240112305],["▁ٻين",-12.674321174621582],["unge",-12.67433738708496],["▁verən",-12.674341201782228],["spi",-12.674368858337402],["▁început",-12.674384117126465],["▁wrong",-12.674413681030272],["ໃຫມ່",-12.67445945739746],["ביל",-12.674508094787598],["taki",-12.674607276916504],["▁heraus",-12.674607276916504],["▁сям",-12.674660682678224],["▁desenvolvimento",-12.674702644348145],["▁έγινε",-12.67470359802246],["▁କିନ୍ତୁ",-12.674711227416992],["▁Oficial",-12.67471694946289],["▁Beitrag",-12.674721717834473],["ტური",-12.674731254577637],["وط",-12.674745559692385],["▁Gent",-12.674748420715332],["変",-12.674756050109863],["oissa",-12.674759864807127],["ימים",-12.674786567687988],["▁каршы",-12.67479133605957],["ಚ್ಚ",-12.674823760986328],["▁yatırım",-12.674850463867188],["▁câteva",-12.674851417541504],["емых",-12.674872398376465],["action",-12.674911499023438],["▁HER",-12.674919128417969],["监督",-12.674951553344728],["Biz",-12.674966812133787],["▁اندازه",-12.674973487854004],["▁انه",-12.674986839294434],["рот",-12.674996376037598],["rudi",-12.675004005432127],["ङ्क",-12.675004005432127],[".[2]",-12.67502212524414],["อั",-12.67503547668457],["▁الزام",-12.675045013427734],["良好",-12.675052642822266],["wick",-12.675071716308594],["-07-",-12.675106048583984],["йтын",-12.675111770629885],["▁चला",-12.67516040802002],["▁bukur",-12.67517375946045],["▁üzerinden",-12.675177574157717],["ນ້ໍາ",-12.675214767456056],["ंची",-12.675217628479004],["冠",-12.675246238708496],["▁ალ",-12.67525577545166],["โด",-12.67525863647461],["cios",-12.67526149749756],["клас",-12.675314903259276],["▁постанов",-12.675353050231934],["▁priprav",-12.675389289855955],["ДИ",-12.675406455993652],["ലും",-12.675426483154297],["不太",-12.675442695617676],["▁Gobierno",-12.675517082214355],["▁خبرگزاری",-12.675517082214355],["▁olmaq",-12.675519943237305],["▁разом",-12.675521850585938],["▁уровне",-12.675531387329102],["顔",-12.67555332183838],["▁ଦଳ",-12.675569534301758],["▁prática",-12.675606727600098],["▁lux",-12.675621032714844],["▁lär",-12.675621032714844],["▁teljesen",-12.675736427307127],["▁viol",-12.675743103027344],["▁EK",-12.675752639770508],["áveis",-12.675810813903809],["ања",-12.675819396972656],["▁title",-12.675838470458984],["kamer",-12.675878524780272],["▁Penga",-12.675884246826172],["ರಿಯ",-12.675891876220703],["▁luogo",-12.675921440124512],["quant",-12.67596435546875],["▁Дали",-12.67597770690918],["▁kwam",-12.676015853881836],["▁Michel",-12.676033973693848],["▁skicka",-12.676061630249023],["eann",-12.676068305969238],["▁activitats",-12.676095008850098],["▁mori",-12.676095008850098],["れて",-12.676102638244627],["▁egon",-12.676137924194336],["▁nola",-12.676155090332031],["inizi",-12.676169395446776],["▁druži",-12.676177024841309],["ujących",-12.676206588745115],["cir",-12.676209449768066],["לעך",-12.676223754882812],["išče",-12.676236152648926],["خط",-12.67624855041504],["▁ہزار",-12.676255226135254],["▁ครีม",-12.67625904083252],["धन",-12.67626667022705],["аваны",-12.676275253295898],["եզ",-12.676275253295898],["szem",-12.676299095153809],["▁فضای",-12.676308631896973],["檔",-12.676311492919922],["১",-12.67631721496582],["▁4,5",-12.676319122314451],["ตําแหน่ง",-12.676331520080566],["▁Kraj",-12.67635440826416],["▁prostitu",-12.676356315612791],["pê",-12.676376342773438],["詩",-12.67638111114502],["▁όλοι",-12.676387786865234],["▁ପାଇ",-12.676396369934082],["▁korištenja",-12.676400184631348],["reise",-12.676406860351562],["▁რას",-12.67642307281494],["▁навед",-12.676444053649902],["▁juttu",-12.67649269104004],["shik",-12.676521301269531],["▁calcul",-12.67657470703125],["кого",-12.67658805847168],["atoare",-12.676612854003906],["fekt",-12.67661952972412],["▁ഇത്തര",-12.676639556884766],["▁pratique",-12.67668342590332],["▁yngre",-12.67669677734375],["▁პო",-12.676712989807127],["▁Camera",-12.676716804504396],["▁yapma",-12.676752090454102],["你可以",-12.676763534545898],["▁Head",-12.676764488220217],["▁створення",-12.676777839660645],["как",-12.6768159866333],["ajte",-12.676827430725098],["▁colo",-12.67684555053711],["дение",-12.676846504211426],["▁Ав",-12.676847457885742],["▁говорят",-12.676892280578612],["อันดับ",-12.67692756652832],["▁sauna",-12.676959991455078],["▁Cluj",-12.676986694335938],["bûn",-12.676989555358888],["▁passen",-12.677043914794922],["拖",-12.677099227905272],["▁statut",-12.67712116241455],["▁τελευταία",-12.677149772644045],["▁საერთაშორისო",-12.677149772644045],["▁Hãy",-12.67715072631836],["▁Bucuresti",-12.677153587341309],["γά",-12.677189826965332],["▁definitiv",-12.677189826965332],["▁정치",-12.677199363708496],["▁вяр",-12.677217483520508],["ใกล้",-12.677234649658203],["сем",-12.677252769470217],["▁needed",-12.677271842956545],["deel",-12.67727279663086],["▁Belediyesi",-12.67730712890625],["zë",-12.677374839782717],["▁stron",-12.677392959594728],["jil",-12.677414894104004],["▁Bož",-12.67742156982422],["▁interne",-12.677425384521484],["бави",-12.677450180053713],["临",-12.677450180053713],["Inter",-12.677465438842772],["รายการ",-12.677465438842772],["▁തുടങ്ങിയ",-12.677474975585938],["▁invit",-12.677485466003418],["▁מן",-12.67750358581543],["▁soir",-12.677508354187012],["▁svaki",-12.67751693725586],["Kur",-12.677563667297363],["▁aroma",-12.677566528320312],["skat",-12.677627563476562],["▁تنظيم",-12.677674293518066],["▁особо",-12.677679061889648],["▁ക്കു",-12.677682876586914],["▁ايم",-12.677692413330078],["▁служи",-12.67772102355957],["/15",-12.677721977233888],["▁dermed",-12.677724838256836],["▁jashtë",-12.677760124206545],["▁једна",-12.677767753601074],["▁корист",-12.677776336669922],["ευση",-12.677785873413086],["юк",-12.67782974243164],["桥",-12.677931785583496],["▁världen",-12.67794132232666],["рук",-12.677943229675291],["мө",-12.67796230316162],["▁perkembangan",-12.677966117858888],["▁podjetja",-12.677966117858888],["▁درآمد",-12.67796802520752],["▁있도록",-12.677969932556152],["cena",-12.67797565460205],["▁ade",-12.677978515625],["▁يوليو",-12.677988052368164],["▁бусад",-12.678007125854492],["cione",-12.678009033203123],["▁രണ്ട്",-12.67802619934082],["amerika",-12.678034782409668],["▁уме",-12.6780424118042],["ással",-12.678050994873049],["रिया",-12.678055763244627],["▁Introdu",-12.678095817565918],["▁wengine",-12.678128242492676],["▁विदेश",-12.678129196166992],["▁ਘਰ",-12.678130149841309],["相当",-12.678133010864258],["▁기본",-12.67813491821289],["▁dejta",-12.678217887878418],["▁መብት",-12.678217887878418],["ەۋ",-12.678224563598633],["sigt",-12.678232192993164],["mpu",-12.678236961364746],["▁سنت",-12.678237915039062],["▁joj",-12.67823886871338],["▁obair",-12.678272247314451],["▁siendo",-12.678281784057615],["-2015",-12.678319931030272],["▁diris",-12.678336143493652],["ରୋ",-12.678380966186523],["▁114",-12.678426742553713],["zani",-12.678427696228027],["▁ডি",-12.67843246459961],["▁округ",-12.678448677062988],["muka",-12.678452491760254],["▁poput",-12.678472518920898],["▁tipos",-12.678481101989746],["▁jaz",-12.67850112915039],["▁נא",-12.678518295288086],["▁parler",-12.678526878356934],["рки",-12.678555488586426],["จอง",-12.678582191467283],["▁interneta",-12.67864227294922],["▁পুলিশ",-12.678696632385254],["▁usque",-12.678702354431152],["cino",-12.678712844848633],["柔",-12.678750038146973],["▁ТВ",-12.678751945495604],["▁чека",-12.678780555725098],["▁밝혔다",-12.678807258605955],["▁Shqipërisë",-12.678812980651855],["▁policy",-12.678813934326172],["ंनी",-12.678821563720703],["▁CM",-12.67884349822998],["▁etiket",-12.678850173950195],["ตาย",-12.678874969482422],["▁عصر",-12.678903579711914],["▁પોતાના",-12.678906440734863],["fare",-12.678925514221191],["▁eventos",-12.678926467895508],["дес",-12.678936958312988],["▁pintu",-12.678975105285645],["ಸ್ತ",-12.678994178771973],["▁පමණ",-12.678995132446287],["▁भूमिका",-12.678997039794922],["▁FO",-12.679006576538086],["laste",-12.679028511047363],["▁Interesse",-12.679043769836426],["vera",-12.679078102111816],["▁blind",-12.67909336090088],["били",-12.679096221923828],["ందని",-12.679171562194824],["▁politic",-12.679191589355469],["▁toks",-12.679203987121582],["പ്പോ",-12.679235458374023],["▁scan",-12.67933177947998],["νή",-12.679333686828612],["éad",-12.679349899291992],["tamaan",-12.679381370544434],["▁hålla",-12.67940902709961],["▁виду",-12.679469108581545],["舉行",-12.679469108581545],["▁reflect",-12.679471969604492],["trakt",-12.679491996765137],["daya",-12.6795072555542],["及其",-12.679508209228516],["▁Edition",-12.679516792297363],["tapa",-12.679542541503906],["ись",-12.679545402526855],["扬",-12.67955207824707],["учи",-12.67956256866455],["▁центра",-12.67956256866455],["▁elev",-12.67957592010498],["▁pozici",-12.679577827453612],["▁kafe",-12.679582595825195],["plex",-12.679591178894045],["▁Hulle",-12.679607391357422],["▁duket",-12.679620742797852],["▁सुविधा",-12.679640769958496],["▁Base",-12.679643630981444],["▁escolar",-12.679643630981444],["▁Lý",-12.679647445678713],["प्त",-12.679668426513672],["inq",-12.679677963256836],["llos",-12.679698944091797],["▁қорғау",-12.67972469329834],["डि",-12.67973804473877],["▁পে",-12.67973804473877],["▁pede",-12.679739952087402],["▁программа",-12.679803848266602],["▁tarih",-12.679810523986816],["▁nowe",-12.679829597473145],["▁ÉS",-12.679845809936523],["▁ತಾ",-12.679875373840332],["অ",-12.679986953735352],["ભાઈ",-12.68000316619873],["ésére",-12.680015563964844],["▁igény",-12.68002223968506],["▁پڻ",-12.680025100708008],["λλη",-12.680036544799805],["atud",-12.680059432983398],["▁congue",-12.680088996887209],["▁беа",-12.680102348327637],["國內",-12.680115699768066],["ႈ",-12.68015480041504],["ქი",-12.680176734924316],["▁XP",-12.68021297454834],["▁Çin",-12.680224418640137],["dék",-12.680267333984377],["▁lojë",-12.680294036865234],["▁Cina",-12.68029499053955],["▁Indien",-12.680304527282717],["▁болести",-12.680330276489258],["qə",-12.680350303649902],["▁minuter",-12.6803560256958],["lost",-12.680368423461914],["ペ",-12.68039321899414],["▁manje",-12.68041706085205],["▁σχέση",-12.680421829223633],["يلة",-12.680445671081545],["▁غلط",-12.68048858642578],["ēti",-12.680489540100098],["ttävä",-12.680557250976562],["▁diferite",-12.680564880371094],["ative",-12.680580139160156],["▁Ezt",-12.680588722229004],["▁Gara",-12.680596351623535],["▁୧",-12.68061637878418],["しても",-12.680644035339355],["▁akademik",-12.6806640625],["AND",-12.680673599243164],["▁नवीन",-12.680676460266112],["になりました",-12.680693626403809],["▁strany",-12.680706024169922],["viš",-12.680723190307615],["itati",-12.68079948425293],["ิต",-12.680829048156738],["▁Osa",-12.680859565734863],["▁Elektro",-12.680898666381836],["▁tempore",-12.68092441558838],["ndis",-12.680935859680176],["▁Lokal",-12.680957794189451],["▁χρήση",-12.68095874786377],["▁જોવા",-12.6809663772583],["▁resep",-12.680973052978516],["▁нашем",-12.680988311767578],["▁نیا",-12.680999755859377],["▁mkuu",-12.68100643157959],["álny",-12.68105125427246],["▁ምንም",-12.681072235107422],["的主要",-12.68108367919922],["▁zonas",-12.681097030639648],["ÁL",-12.681116104125977],["▁विजय",-12.681147575378418],["▁GE",-12.681160926818848],["izat",-12.68117332458496],["gani",-12.681204795837402],["▁təsdiq",-12.681241035461426],["▁වෛද්",-12.681241035461426],["▁собі",-12.681244850158691],["▁статті",-12.681251525878906],["▁Рэ",-12.681262969970703],["▁сматра",-12.681262969970703],["▁boyunca",-12.681292533874512],["になっている",-12.681294441223145],["▁handling",-12.681302070617676],["▁නම",-12.681349754333496],["▁gesta",-12.681402206420898],["▁ил",-12.681410789489746],["▁ლი",-12.681418418884276],["▁እንዳ",-12.681429862976074],["▁포함",-12.68146514892578],["cium",-12.68149757385254],["▁joan",-12.681557655334473],["дени",-12.681595802307127],["نف",-12.681612968444824],["โย",-12.681657791137695],["▁marg",-12.68167781829834],["RON",-12.681684494018556],["▁אתרים",-12.681693077087402],["တွေကို",-12.681745529174805],["สอน",-12.68175220489502],["לית",-12.681757926940918],["edit",-12.681779861450195],["▁dili",-12.681859970092772],["▁შეი",-12.681865692138672],["▁Hau",-12.681884765625],["▁сторон",-12.681896209716797],["严格",-12.681994438171388],["简",-12.682001113891602],["حسن",-12.682002067565918],["▁ympäristö",-12.682005882263184],["tune",-12.68201732635498],["zanie",-12.68203830718994],["нис",-12.682045936584473],["▁අත",-12.682056427001951],["▁Erdogan",-12.682062149047852],["▁прокурор",-12.682062149047852],["▁ವಿಶೇಷ",-12.682062149047852],["▁ሁለት",-12.6820650100708],["▁krát",-12.682096481323242],["▁қарсы",-12.682111740112305],["▁ಲಕ್ಷ",-12.68213176727295],["gegeven",-12.682150840759276],["▁nomine",-12.682186126708984],["oby",-12.682202339172363],["▁вступ",-12.682208061218262],["▁ቃል",-12.682217597961426],["تال",-12.682241439819336],["▁बने",-12.682269096374512],["公布",-12.682271003723145],["ավար",-12.682272911071776],["▁cég",-12.682291030883787],["▁научи",-12.682318687438965],["ونکي",-12.682321548461914],["ására",-12.682326316833496],["▁roda",-12.68233871459961],["νων",-12.682358741760254],["▁Kuva",-12.682373046875],["onin",-12.68238925933838],["наас",-12.682390213012695],["▁वन",-12.682405471801758],["ibo",-12.682435035705566],["▁Lars",-12.682458877563477],["修改",-12.682504653930664],["▁اخ",-12.682507514953612],["受け",-12.682628631591797],["δω",-12.682683944702148],["خذ",-12.682686805725098],["搬",-12.682711601257324],["▁zarar",-12.682714462280272],["▁Поль",-12.682722091674805],["чор",-12.682734489440918],["issima",-12.682768821716309],["▁starfs",-12.682806968688965],["LING",-12.68280792236328],["▁cambiar",-12.682808876037598],["▁njia",-12.682819366455078],["▁latin",-12.682825088500977],["亂",-12.68282699584961],["洪",-12.682838439941406],["▁scelerisque",-12.68288230895996],["킹",-12.682883262634276],["palli",-12.68288516998291],["▁zdarma",-12.682897567749023],["▁അനു",-12.682899475097656],["▁lihtsalt",-12.682909965515137],["▁panna",-12.6829195022583],["▁круп",-12.68294906616211],["▁ਡਾ",-12.682953834533691],["▁കാരണം",-12.682961463928224],["▁প্ৰ",-12.682989120483398],["▁3.1",-12.683003425598145],["▁Ott",-12.683015823364258],["ały",-12.68302059173584],["▁defin",-12.683030128479004],["ຕົວ",-12.683042526245115],["▁katse",-12.6830472946167],["真正的",-12.683048248291016],["▁συ",-12.683049201965332],["rovna",-12.683055877685549],["350",-12.683061599731444],["大変",-12.683065414428713],["▁αντ",-12.683104515075684],["▁tih",-12.683116912841797],["ofer",-12.68312168121338],["▁skrib",-12.683131217956545],["ახ",-12.683134078979492],["▁regres",-12.683167457580566],["طو",-12.683170318603516],["भाग",-12.68317699432373],["▁храм",-12.683191299438477],["▁transmis",-12.683202743530272],["▁አው",-12.683212280273438],["▁කලා",-12.683256149291992],["EMA",-12.683263778686523],["▁vendit",-12.683295249938965],["RB",-12.68332862854004],["▁перад",-12.683345794677734],["やすい",-12.683345794677734],["▁kovo",-12.683354377746582],["▁ທີ",-12.683394432067873],["▁poslední",-12.68341064453125],["때",-12.683473587036133],["▁Lim",-12.683480262756348],["wanie",-12.68350601196289],["▁Muz",-12.683576583862305],["ڪا",-12.68357753753662],["ਡੀ",-12.68358039855957],["miss",-12.68364143371582],["zysk",-12.683661460876465],["blik",-12.68366527557373],["ljeno",-12.683688163757324],["广大",-12.683690071105955],["▁අලුත්",-12.683704376220703],["▁හිටපු",-12.683706283569336],["▁પાસે",-12.683716773986816],["▁ድረስ",-12.683728218078612],["▁samtidigt",-12.683752059936523],["رئ",-12.683805465698242],["tzera",-12.683843612670898],["▁Arma",-12.683846473693848],["▁ful",-12.68388843536377],["徵",-12.683917999267578],["ብር",-12.68393325805664],["▁řekl",-12.683989524841309],["▁originale",-12.68401050567627],["▁moitos",-12.684045791625977],["mile",-12.684056282043455],["ોને",-12.684083938598633],["ეც",-12.68409824371338],["▁mall",-12.6841459274292],["sman",-12.684165954589844],["hva",-12.684236526489258],["▁paša",-12.684256553649902],["KG",-12.68427848815918],["status",-12.68431568145752],["ेश्वर",-12.684334754943848],["▁iray",-12.684366226196287],["andu",-12.684370040893556],["▁guten",-12.684405326843262],["jące",-12.684423446655272],["▁publicado",-12.684423446655272],["eeyay",-12.684429168701172],["▁העל",-12.684443473815918],["šanā",-12.684459686279297],["شكل",-12.684473991394045],["pid",-12.684475898742676],["▁GmbH",-12.684526443481444],["▁Když",-12.684526443481444],["▁그래서",-12.684528350830078],["▁ұлттық",-12.684530258178713],["▁tratamiento",-12.68454647064209],["vun",-12.684552192687988],["▁Henry",-12.684564590454102],["▁zero",-12.68458366394043],["▁vecka",-12.684590339660645],["▁आदेश",-12.684590339660645],["▁ஊ",-12.684615135192873],["▁شهید",-12.684622764587402],["▁בלבד",-12.68463897705078],["icky",-12.684673309326172],["▁سلمان",-12.684697151184082],["▁erotisk",-12.684754371643066],["▁vërtetë",-12.684799194335938],["患",-12.684815406799316],["▁byly",-12.684823989868164],["▁सक्छ",-12.684823989868164],["▁Dim",-12.684825897216797],["▁විශේෂ",-12.684849739074709],["▁ervaar",-12.684854507446287],["▁zilizo",-12.68494987487793],["▁esimese",-12.684988021850586],["▁discuss",-12.685003280639648],["印刷",-12.68503475189209],["ተር",-12.685038566589355],["▁incendi",-12.685077667236328],["▁переход",-12.68510913848877],["▁ජ",-12.685179710388184],["▁manipula",-12.685182571411133],["וֹ",-12.685185432434082],["ულად",-12.685206413269045],["вяр",-12.685214042663574],["▁хол",-12.68521785736084],["▁ülke",-12.685232162475586],["▁සෙ",-12.685239791870115],["/18",-12.685242652893066],["eoirí",-12.685246467590332],["жни",-12.685285568237305],["▁pinag",-12.685290336608888],["東西",-12.685320854187012],["配置",-12.685322761535645],["fera",-12.685332298278809],["▁român",-12.685340881347656],["ۀ",-12.685349464416504],["▁kesempatan",-12.685349464416504],["▁články",-12.685349464416504],["▁वेबसाइट",-12.685349464416504],["▁କଂଗ୍ରେସ",-12.685349464416504],["▁संपर्क",-12.68535327911377],["▁Wizara",-12.685361862182615],["▁importancia",-12.685367584228516],["▁مولانا",-12.685382843017578],["▁izena",-12.685410499572754],["راه",-12.685415267944336],["▁mỹ",-12.68542194366455],["▁ort",-12.685425758361816],["▁Canon",-12.685426712036133],["▁مثبت",-12.68543815612793],["▁först",-12.685502052307127],["▁талап",-12.685508728027344],["оло",-12.685517311096191],["▁Beth",-12.685628890991213],["▁минута",-12.68563175201416],["▁Ես",-12.685635566711426],["去了",-12.68568229675293],["ičnih",-12.685739517211914],["mare",-12.68574047088623],["▁führen",-12.685779571533203],["▁alguma",-12.685836791992188],["▁blot",-12.685837745666504],["▁hlav",-12.685866355895996],["валася",-12.685914039611816],["▁kutu",-12.685992240905762],["▁மிக",-12.68602466583252],["မီ",-12.686058044433594],["▁ලිය",-12.68607234954834],["ание",-12.686076164245604],["ચા",-12.686079978942873],["▁свого",-12.68608856201172],["斗",-12.686115264892578],["kong",-12.686127662658691],["▁واست",-12.686148643493652],["▁dầu",-12.68617343902588],["▁Улуттук",-12.68617343902588],["▁lod",-12.686182022094728],["поряд",-12.686223030090332],["▁lesen",-12.686238288879396],["▁numele",-12.686240196228027],["ीको",-12.68628978729248],["έμ",-12.686299324035645],["▁constante",-12.686360359191896],["▁населення",-12.686361312866213],["ლებს",-12.686373710632324],["ಮೀ",-12.686394691467283],["▁Rolle",-12.686429023742676],["▁Kli",-12.686452865600586],["gång",-12.686460494995115],["▁Girl",-12.686470985412598],["قب",-12.686535835266112],["lci",-12.686593055725098],["▁IZ",-12.686604499816896],["▁Amerikan",-12.68660831451416],["sika",-12.68661880493164],["▁cusub",-12.686620712280272],["▁550",-12.68663215637207],["▁bhe",-12.68663501739502],["აღ",-12.68665885925293],["өөс",-12.68667221069336],["▁מנת",-12.686697959899902],["▁ion",-12.686702728271484],["ूल",-12.68671703338623],["otti",-12.686721801757812],["ພັກ",-12.686745643615724],["▁Nadal",-12.686762809753418],["agra",-12.686786651611328],["▁SAN",-12.686796188354492],["▁cep",-12.686797142028809],["▁ĉiu",-12.686833381652832],["▁vieną",-12.68687629699707],["shish",-12.686899185180664],["承担",-12.686930656433104],["▁pourquoi",-12.68699836730957],["▁प्रयास",-12.68699836730957],["▁протягом",-12.687003135681152],["▁නොව",-12.687013626098633],["▁cykl",-12.687047958374023],["▁לדעת",-12.687056541442873],["依法",-12.687056541442873],["▁بىز",-12.68706226348877],["ប្រទេស",-12.687076568603516],["▁حصہ",-12.687085151672363],["▁kunye",-12.687142372131348],["に出",-12.687164306640623],["▁tlak",-12.687178611755373],["▁cup",-12.68718147277832],["▁jasa",-12.687214851379396],["فو",-12.687236785888672],["▁build",-12.687254905700684],["▁Mkoa",-12.68725872039795],["▁granda",-12.687328338623049],["уул",-12.687357902526855],["▁ovu",-12.687382698059082],["▁ચો",-12.687438011169434],["▁Günü",-12.6874418258667],["▁svijet",-12.687487602233888],["▁anledning",-12.6875],["▁경기",-12.687515258789062],["▁збір",-12.687530517578123],["▁Serie",-12.687541007995604],["▁traz",-12.687545776367188],["▁Contra",-12.68755054473877],["萬元",-12.687576293945312],["အရ",-12.68760871887207],["ൊന്നും",-12.6876802444458],["▁Sua",-12.687682151794434],["új",-12.687695503234863],["▁dışında",-12.687744140625],["▁informatii",-12.687780380249023],["soci",-12.687784194946287],["▁भविष्य",-12.687822341918944],["▁Вучић",-12.687826156616213],["bba",-12.687868118286133],["▁صفحہ",-12.687870025634766],["▁आमच्या",-12.687870979309082],["jima",-12.687875747680664],["ально",-12.68788719177246],["ლით",-12.687909126281738],["▁কোনো",-12.68792724609375],["▁เวลา",-12.687932014465332],["▁Ż",-12.687932968139648],["▁dekora",-12.68793773651123],["好きな",-12.687945365905762],["▁Ezek",-12.687954902648926],["▁число",-12.68799877166748],["▁прогноз",-12.688000679016112],["▁abu",-12.688018798828123],["▁Hala",-12.68801975250244],["oce",-12.688024520874023],["▁disini",-12.688034057617188],["▁রা",-12.68803596496582],["▁Unsere",-12.688159942626951],["katan",-12.688167572021484],["きた",-12.688196182250977],["льна",-12.688300132751465],["paikka",-12.688321113586426],["▁neben",-12.688325881958008],["μεν",-12.68836498260498],["ిత",-12.688369750976562],["ያን",-12.68837070465088],["lacht",-12.68838882446289],["▁něm",-12.688394546508787],["▁Syd",-12.688444137573242],["ថ្ងៃទី",-12.688446044921877],["malıdır",-12.6884765625],["ఘ",-12.688484191894531],["统计",-12.688488006591797],["рост",-12.688528060913086],["lko",-12.6885347366333],["▁велики",-12.6885347366333],["wania",-12.68855094909668],["atto",-12.688640594482422],["▁muun",-12.688645362854004],["▁видим",-12.68864631652832],["▁poprzez",-12.688648223876951],["▁Expo",-12.688671112060549],["▁ezen",-12.688672065734863],["▁सत्य",-12.688700675964355],["生态",-12.688702583312988],["▁sov",-12.688711166381836],["ம்பு",-12.688807487487791],["▁Ев",-12.688820838928224],["▁miesta",-12.68882942199707],["ิน",-12.688835144042969],["▁board",-12.688838005065918],["ന്ന്",-12.688879013061523],["▁rö",-12.688902854919434],["mö",-12.688908576965332],["▁gé",-12.688929557800291],["▁rü",-12.688970565795898],["အဖွဲ့",-12.68898105621338],["▁kolekt",-12.689040184020996],["▁ер",-12.68905544281006],["▁సో",-12.689074516296388],["▁بنیاد",-12.689154624938965],["▁نيوز",-12.689202308654783],["پي",-12.689212799072266],["▁trovi",-12.689216613769531],["lili",-12.689233779907228],["▁thanks",-12.689269065856934],["和平",-12.689287185668944],["▁tinka",-12.689316749572754],["DK",-12.68934440612793],["gruppe",-12.689382553100586],["▁председник",-12.689437866210938],["▁civ",-12.689447402954102],["データ",-12.689452171325684],["▁στοιχεία",-12.689475059509276],["▁Daerah",-12.689478874206545],["▁СО",-12.689478874206545],["▁матеріалів",-12.689534187316896],["▁Led",-12.689539909362791],["▁Hou",-12.68954372406006],["▁Украины",-12.689562797546388],["ుతున్న",-12.689563751220703],["зия",-12.689583778381348],["Av",-12.689658164978027],["▁очаква",-12.68968105316162],["▁koš",-12.689708709716797],["▁készült",-12.68973159790039],["éhez",-12.689741134643556],["ητ",-12.689764976501465],["nett",-12.6898193359375],["▁технология",-12.6898193359375],["▁despe",-12.68992042541504],["▁namen",-12.689940452575684],["などを",-12.689964294433594],["lion",-12.690025329589844],["▁जय",-12.690052032470703],["嚴",-12.690056800842283],["▁சரி",-12.69008731842041],["确实",-12.690092086791992],["▁alba",-12.690112113952637],["▁Wolf",-12.690174102783203],["▁például",-12.69020175933838],["ابة",-12.690237998962402],["▁סו",-12.690239906311035],["▁takut",-12.690256118774414],["ថ្មី",-12.690301895141602],["▁ktorú",-12.690325736999512],["▁فرما",-12.690325736999512],["▁جریان",-12.690326690673828],["▁Tara",-12.690329551696776],["▁Росії",-12.690335273742676],["süsteemi",-12.69033908843994],["▁않고",-12.690383911132812],["▁가지고",-12.690396308898926],["▁verschiedene",-12.690410614013672],["▁დ",-12.690423965454102],["▁peserta",-12.690427780151367],["▁cierto",-12.690430641174316],["▁खु",-12.690448760986328],["zane",-12.690459251403809],["▁Kem",-12.69049835205078],["▁følger",-12.690510749816896],["▁تۇر",-12.690528869628906],["▁savas",-12.69053077697754],["таў",-12.690581321716309],["▁kweli",-12.690584182739258],["▁eldre",-12.690613746643066],["▁occupa",-12.690618515014648],["▁diatas",-12.690632820129396],["▁פע",-12.690665245056152],["▁משרד",-12.690678596496582],["▁شخصیت",-12.690680503845217],["lumotlar",-12.690710067749023],["▁Ֆ",-12.690720558166504],["多次",-12.690733909606934],["lägg",-12.690775871276855],["▁команда",-12.69080924987793],["laista",-12.690828323364258],["deling",-12.69083023071289],["众",-12.690848350524902],["ിരിക്കുന്നത്",-12.6908540725708],["▁Pit",-12.690888404846191],["整個",-12.69089412689209],["မင္း",-12.690910339355469],["▁മനുഷ്യ",-12.690911293029783],["▁esper",-12.690940856933594],["طف",-12.690954208374023],["▁przede",-12.6909761428833],["ьная",-12.691021919250488],["ука",-12.69103717803955],["▁decisión",-12.69104290008545],["▁খ",-12.691045761108398],["▁balans",-12.691067695617676],["▁karya",-12.691086769104004],["▁tö",-12.691109657287598],["耗",-12.691110610961914],["▁Roo",-12.69113063812256],["▁bunlar",-12.691133499145508],["海洋",-12.691144943237305],["▁тариф",-12.69115161895752],["▁kep",-12.691162109375],["▁Dewan",-12.691182136535645],["大概",-12.691184043884276],["▁pige",-12.691241264343262],["▁commune",-12.691286087036133],["▁Keď",-12.69128704071045],["▁sako",-12.691356658935549],["▁incident",-12.691362380981444],["▁actividad",-12.691398620605469],["තාව",-12.691420555114746],["▁остави",-12.69142723083496],["▁Bere",-12.691429138183594],["мол",-12.69145393371582],["▁inceput",-12.691473960876465],["▁asnjë",-12.691508293151855],["▁Tid",-12.691513061523438],["යක",-12.691533088684082],["нут",-12.691551208496094],["▁ир",-12.691580772399902],["чист",-12.69158172607422],["ሰው",-12.69160270690918],["hadi",-12.691642761230469],["▁Teis",-12.691643714904783],["aasta",-12.69167137145996],["▁түз",-12.691678047180176],["▁Konu",-12.691762924194336],["loven",-12.691768646240234],["聊",-12.691798210144045],["пак",-12.691810607910156],["▁kerro",-12.691816329956056],["▁awam",-12.691861152648926],["ଙ୍କର",-12.691864967346191],["ួ",-12.691908836364746],["스타",-12.69191837310791],["▁edificio",-12.69192600250244],["toare",-12.691953659057615],["ថ្ងៃ",-12.691956520080566],["lendi",-12.6919584274292],["▁využíva",-12.6919584274292],["▁mùa",-12.691962242126465],["▁शादी",-12.69196891784668],["URA",-12.691993713378906],["▁уреди",-12.69199562072754],["▁لما",-12.691998481750488],["▁spokoj",-12.692028999328612],["шил",-12.692036628723145],["▁sige",-12.692036628723145],["▁kirjuta",-12.692044258117676],["▁négy",-12.692044258117676],["▁liefde",-12.692083358764648],["nique",-12.692092895507812],["sö",-12.692093849182127],["▁1949",-12.692121505737305],["▁zitten",-12.692138671875],["▁residen",-12.692152976989746],["гов",-12.692157745361328],["▁ફે",-12.692173957824709],["▁ley",-12.692191123962402],["▁건강",-12.6922025680542],["െന്നും",-12.692205429077148],["▁Waxa",-12.692231178283691],["ρύ",-12.692249298095703],["loge",-12.692293167114258],["وح",-12.6923189163208],["pse",-12.69232177734375],["▁Selbst",-12.692330360412598],["▁présente",-12.69239616394043],["יני",-12.692401885986328],["ንግ",-12.692402839660645],["lno",-12.692412376403809],["移動",-12.692434310913086],["▁betale",-12.69245147705078],["tla",-12.692461967468262],["▁රස",-12.692522048950195],["миш",-12.692535400390623],["માન",-12.692553520202637],["▁premio",-12.692622184753418],["רא",-12.69265842437744],["dui",-12.692673683166504],["יץ",-12.6926908493042],["鏡",-12.692713737487791],["▁етап",-12.69271755218506],["▁bhf",-12.692740440368652],["▁obxectivo",-12.692788124084473],["▁میلیارد",-12.692789077758787],["▁зла",-12.692803382873535],["▁című",-12.692813873291016],["credit",-12.69282341003418],["ტები",-12.692825317382812],["ଶ୍",-12.692832946777344],["▁predsjednik",-12.692865371704102],["▁سرعت",-12.69288444519043],["▁Hip",-12.692901611328123],["▁trì",-12.69290542602539],["▁structure",-12.692913055419922],["ัต",-12.69298267364502],["wê",-12.693011283874512],["rendszer",-12.69301414489746],["▁bidez",-12.69303035736084],["違う",-12.693042755126951],["▁нерв",-12.69309902191162],["▁Ган",-12.693114280700684],["لے",-12.693137168884276],["жит",-12.69318389892578],["▁veic",-12.693184852600098],["▁actividade",-12.693199157714844],["ât",-12.693249702453612],["▁জেলা",-12.693262100219728],["mant",-12.693306922912598],["▁bø",-12.693319320678713],["альных",-12.69333553314209],["▁šir",-12.693361282348633],["▁abon",-12.693367958068848],["▁पेश",-12.693379402160645],["▁месяц",-12.693413734436035],["▁erken",-12.693426132202148],["▁យ៉ាង",-12.693428993225098],["-01-",-12.693438529968262],["zele",-12.693439483642578],["地址",-12.6934814453125],["డే",-12.69349479675293],["eter",-12.69351291656494],["▁دیں",-12.693523406982422],["▁sexa",-12.693533897399902],["njak",-12.693541526794434],["เย็ด",-12.69355297088623],["▁seuraava",-12.693605422973633],["րան",-12.693607330322266],["ФОТО",-12.69362735748291],["選手",-12.693643569946287],["▁družbe",-12.693648338317873],["▁eyni",-12.69367218017578],["▁trīs",-12.693673133850098],["trá",-12.693702697753906],["▁heye",-12.69371223449707],["▁шаар",-12.693717956542969],["uota",-12.69372272491455],["Ph",-12.693734169006348],["▁一",-12.693747520446776],["▁ideia",-12.693755149841309],["▁findes",-12.693792343139648],["ပွဲ",-12.69387149810791],["▁ሞ",-12.69388198852539],["▁boş",-12.693902969360352],["▁Усе",-12.69390869140625],["▁американ",-12.693957328796388],["▁dando",-12.693965911865234],["ujemo",-12.69396686553955],["▁estudio",-12.693989753723145],["fro",-12.69400119781494],["▁वेळ",-12.694008827209473],["telu",-12.694148063659668],["▁памет",-12.694150924682615],["▁yakni",-12.694188117980955],["վար",-12.694231033325195],["IJA",-12.69425106048584],["▁penso",-12.69427490234375],["▁Mou",-12.694280624389648],["cea",-12.694332122802734],["燃",-12.694385528564451],["▁race",-12.694400787353516],["▁تاثیر",-12.694414138793944],["▁остава",-12.694416999816896],["▁రాజ",-12.694437980651855],["▁чыккан",-12.69444751739502],["▁למצוא",-12.694448471069336],["▁हूं",-12.694452285766602],["ഘ",-12.69446086883545],["ຫນຶ່ງ",-12.69447135925293],["▁duidelijk",-12.694500923156738],["πλη",-12.694519996643066],["▁имеют",-12.694520950317385],["ติดตาม",-12.69452953338623],["zania",-12.69455337524414],["мес",-12.694555282592772],["▁മോ",-12.69456672668457],["▁போல",-12.694578170776367],["▁שונים",-12.69459342956543],["▁sound",-12.694598197937012],["▁debería",-12.694608688354492],["ባል",-12.69463062286377],["▁CEO",-12.694634437561035],["ब्",-12.694672584533691],["ாள்",-12.694674491882324],["▁efekty",-12.69468593597412],["▁adet",-12.694690704345703],["come",-12.694700241088867],["▁responde",-12.694716453552246],["ucci",-12.694746017456056],["▁வெளி",-12.694757461547852],["בית",-12.694785118103027],["ẹ",-12.694786071777344],["▁reden",-12.69481372833252],["▁posar",-12.69485092163086],["▁respira",-12.69485569000244],["ovom",-12.694887161254885],["dova",-12.694903373718262],["面前",-12.69493293762207],["▁Hata",-12.694939613342283],["▁kluba",-12.694960594177246],["▁rey",-12.694973945617676],["ಟಾ",-12.694984436035156],["kene",-12.695021629333496],["▁dif",-12.695070266723633],["кција",-12.695077896118164],["onym",-12.695080757141112],["mendi",-12.695100784301758],["▁баз",-12.695119857788086],["▁tieši",-12.695164680480955],["▁ಜಿ",-12.69520092010498],["▁Aika",-12.69520664215088],["нээ",-12.695210456848145],["▁Merc",-12.695231437683104],["▁ಮಹಾ",-12.695240020751951],["тари",-12.69526195526123],["▁cant",-12.69526195526123],["▁magas",-12.695269584655762],["▁Filmibeat",-12.695279121398926],["▁Çünkü",-12.695279121398926],["▁Олександр",-12.695279121398926],["▁politisk",-12.695280075073242],["▁116",-12.695290565490724],["▁Ourense",-12.695290565490724],["▁кабинет",-12.69529151916504],["มากขึ้น",-12.695294380187988],["laat",-12.695310592651367],["▁نبود",-12.695310592651367],["uksessa",-12.695354461669922],["▁rzecz",-12.695366859436035],["ывать",-12.695379257202148],["▁Aja",-12.695394515991213],["▁telefono",-12.69540786743164],["▁jardin",-12.695416450500488],["▁anime",-12.69543170928955],["▁schw",-12.695474624633787],["▁تطبيق",-12.695484161376951],["واز",-12.69550323486328],["▁rab",-12.695511817932127],["နိုင်ငံ",-12.69551944732666],["iyyat",-12.695539474487305],["2/",-12.695542335510254],["lwa",-12.695574760437012],["▁Απ",-12.695575714111328],["▁təsir",-12.695581436157228],["▁تقدم",-12.695627212524414],["▁KAR",-12.69564437866211],["導致",-12.69571018218994],["▁пожар",-12.695749282836914],["▁τρ",-12.69575023651123],["▁කෝ",-12.695771217346191],["تش",-12.695792198181152],["ilin",-12.695815086364746],["store",-12.695837020874023],["▁Топ",-12.695852279663086],["▁maailman",-12.695853233337402],["▁departe",-12.695916175842283],["▁инф",-12.695950508117676],["▁lỗi",-12.695961952209473],["ことに",-12.695963859558104],["▁orde",-12.696002006530762],["▁خراب",-12.69603157043457],["▁držav",-12.696038246154783],["▁dagoen",-12.69604778289795],["oči",-12.696054458618164],["▁הד",-12.696063995361328],["日起",-12.696078300476074],["体験",-12.696099281311035],["illi",-12.696110725402832],["▁застосування",-12.696111679077148],["▁अदालत",-12.696111679077148],["▁స్వ",-12.696151733398438],["祭",-12.696240425109863],["▁kaka",-12.696252822875977],["▁घ्या",-12.696261405944824],["▁תש",-12.696280479431152],["▁titolo",-12.696297645568848],["/01/",-12.696333885192873],["▁Helsingin",-12.696359634399414],["▁Vei",-12.696385383605955],["▁shows",-12.696404457092283],["riak",-12.696412086486816],["delt",-12.696426391601562],["όταν",-12.696476936340332],["▁ligt",-12.696544647216797],["fico",-12.69654655456543],["erde",-12.696560859680176],["▁какви",-12.69656753540039],["▁ഹാ",-12.69661808013916],["▁якому",-12.696619987487791],["▁среща",-12.696653366088867],["escola",-12.696660995483398],["လမ်း",-12.696667671203612],["джи",-12.696675300598145],["baan",-12.696698188781738],["слаб",-12.6967134475708],["હી",-12.696720123291016],["íš",-12.696732521057127],["▁flor",-12.696759223937988],["自分が",-12.696763038635254],["▁tvoj",-12.696880340576172],["▁मांग",-12.696931838989258],["▁miễn",-12.696945190429688],["DEN",-12.696950912475586],["▁شريف",-12.696981430053713],["▁każdego",-12.696982383728027],["eca",-12.69698715209961],["▁паспорт",-12.697025299072266],["ుకున్న",-12.697061538696287],["stroj",-12.69707202911377],["άμε",-12.697072982788086],["ifu",-12.697086334228516],["▁használ",-12.697100639343262],["▁тен",-12.697111129760742],["▁axborot",-12.697115898132324],["▁möjlighet",-12.697120666503906],["ಖ",-12.697161674499512],["扩大",-12.697163581848145],["▁لقد",-12.697184562683104],["▁godinu",-12.69721221923828],["离开",-12.697225570678713],["▁реформ",-12.697232246398926],["▁Rusi",-12.697240829467772],["▁عقد",-12.697254180908203],["ീസ്",-12.69725513458252],["ньне",-12.697260856628418],["製品",-12.69727611541748],["olle",-12.69728183746338],["▁Jed",-12.697283744812012],["▁bid",-12.69730281829834],["-100",-12.6973237991333],["sap",-12.697410583496094],["▁εξα",-12.69742202758789],["kuja",-12.697443962097168],["sats",-12.697484970092772],["▁quanh",-12.697542190551758],["▁regionale",-12.69755744934082],["ిస్తున్న",-12.697591781616213],["▁ગુ",-12.697612762451172],["▁suspend",-12.697624206542969],["▁посе",-12.697626113891602],["▁muid",-12.697690963745115],["▁mengge",-12.69770622253418],["▁müzakirə",-12.697766304016112],["afa",-12.697771072387695],["動作",-12.697775840759276],["▁nghỉ",-12.69777774810791],["kolo",-12.697782516479492],["▁نکل",-12.69778537750244],["▁bleibt",-12.697790145874023],["Ben",-12.697793960571287],["▁Terbaru",-12.69780158996582],["▁آنان",-12.6978120803833],["kini",-12.697821617126465],["िन्",-12.697851181030272],["高雄",-12.697855949401855],["▁komu",-12.697857856750488],["▁verschiedenen",-12.697932243347168],["▁raka",-12.697948455810549],["ĝoj",-12.698010444641112],["▁vaj",-12.698064804077148],["رية",-12.698075294494627],["▁1919",-12.698121070861816],["сум",-12.698145866394045],["▁ihany",-12.698176383972168],["жим",-12.698214530944824],["▁parça",-12.698220252990724],["▁роди",-12.698226928710938],["▁dagli",-12.698232650756836],["ყოფ",-12.698264122009276],["▁ບັນດາ",-12.69827651977539],["▁આપી",-12.69834327697754],["れた",-12.698347091674805],["▁saken",-12.698420524597168],["▁Ör",-12.698423385620115],["ców",-12.698443412780762],["ліва",-12.69845199584961],["▁musim",-12.698508262634276],["▁व्यापार",-12.69853401184082],["▁రె",-12.698564529418944],["▁રહ્યા",-12.698586463928224],["lichkeit",-12.698598861694336],["ृ",-12.698603630065918],["อากาศ",-12.698610305786133],["▁zdjęcia",-12.69861125946045],["▁ممالک",-12.698614120483398],["▁Idag",-12.698643684387209],["ampi",-12.698678970336914],["реди",-12.698678970336914],["▁ngờ",-12.698684692382812],["▁Hải",-12.69873046875],["营销",-12.698735237121582],["дели",-12.698792457580566],["从事",-12.6987943649292],["相对",-12.698797225952148],["jani",-12.698803901672363],["▁وج",-12.698830604553224],["▁профил",-12.698838233947754],["▁כלל",-12.698858261108398],["▁Mia",-12.69886875152588],["लु",-12.69887638092041],["▁tirsan",-12.698885917663574],["▁Ș",-12.698932647705078],["▁díky",-12.698965072631836],[".5.",-12.698973655700684],["RIS",-12.69900894165039],["を持って",-12.699050903320312],["čuje",-12.699051856994627],["▁femme",-12.699051856994627],["议",-12.699172019958496],["СС",-12.69920253753662],["UTC",-12.699209213256836],["[5]",-12.699212074279783],["cient",-12.699212074279783],["▁llarg",-12.699252128601074],["bic",-12.69926643371582],["περ",-12.6992826461792],["εια",-12.699288368225098],["旁",-12.699334144592283],["នោះ",-12.699347496032717],["альний",-12.69936752319336],["▁sağla",-12.69937515258789],["ést",-12.699395179748535],["电脑",-12.699405670166016],["値",-12.699417114257812],["ヒ",-12.699430465698242],["ন্দ",-12.699432373046877],["▁ಮಾಡಿದ",-12.699440002441406],["▁특",-12.699442863464355],["▁आम्ही",-12.699445724487305],["덕",-12.699445724487305],["▁klaar",-12.699460983276367],["▁marco",-12.699467658996582],["▁otthon",-12.69949436187744],["▁▼",-12.699499130249023],["▁обучение",-12.699518203735352],["ಟರ್",-12.699522018432615],["▁Soci",-12.699529647827148],["▁daude",-12.69953727722168],["▁hiru",-12.699552536010742],["દિ",-12.699581146240234],["▁سری",-12.69963264465332],["▁йому",-12.699668884277344],["说道",-12.699694633483888],["attu",-12.699719429016112],["▁키",-12.69973373413086],["వర్",-12.699735641479492],["▁České",-12.6997652053833],["weise",-12.699769973754885],["▁Damit",-12.69979476928711],["zwi",-12.699809074401855],["▁safe",-12.699825286865234],["▁Ő",-12.699905395507812],["рып",-12.699943542480469],["▁टे",-12.699974060058594],["▁molts",-12.700037002563477],["บู",-12.700127601623535],["▁základ",-12.700169563293455],["шку",-12.700191497802734],["عاد",-12.70020580291748],["▁navig",-12.700240135192873],["▁risc",-12.700244903564451],["엄",-12.700277328491213],["▁milhões",-12.700281143188477],["▁UNESCO",-12.700286865234377],["▁매우",-12.700312614440918],["▁dostop",-12.700322151184082],["▁தூ",-12.70032787322998],["qul",-12.700334548950195],["▁близько",-12.700340270996094],["▁Educación",-12.700361251831056],["пъл",-12.700366020202637],["▁кр",-12.700373649597168],["▁ଜିଲ୍ଲା",-12.700389862060549],["ంబ",-12.70041275024414],["▁сметка",-12.700428009033203],["▁(19)",-12.700435638427734],["▁omgeving",-12.700498580932615],["mico",-12.70050811767578],["▁рис",-12.70053005218506],["ciem",-12.700539588928224],["▁хө",-12.700565338134766],["▁aldığı",-12.70057201385498],["▁membri",-12.700589179992676],["▁کشورهای",-12.700626373291016],["דות",-12.700645446777344],["▁Rot",-12.700645446777344],["zung",-12.70064640045166],["▁안내",-12.700650215148926],["▁Maja",-12.700681686401367],["यं",-12.700708389282228],["▁ljus",-12.700748443603516],["▁կոչ",-12.700769424438477],["都市",-12.700806617736816],["▁Jahres",-12.700831413269045],["術",-12.700841903686523],["▁ירושלים",-12.700867652893066],["ssy",-12.700873374938965],["英國",-12.700922966003418],["▁קרי",-12.700968742370604],["▁компани",-12.700994491577148],["▁znám",-12.701048851013184],["סן",-12.701072692871094],["▁імя",-12.70110321044922],["กล้อง",-12.701116561889648],["▁Christ",-12.701123237609863],["տեսանյութ",-12.70112419128418],["▁ตาม",-12.701138496398926],["▁Cá",-12.70114803314209],["oitu",-12.701210021972656],["▁നടത്തിയ",-12.701212882995604],["▁Svenska",-12.701224327087402],["▁Pār",-12.701236724853516],["ことも",-12.701252937316896],["пір",-12.701273918151855],["រី",-12.701276779174805],["ילים",-12.701282501220703],["▁Temple",-12.70128345489502],["nav",-12.701286315917969],["▁میر",-12.701306343078612],["▁жауап",-12.701319694519045],["▁podnik",-12.70132541656494],["၏။",-12.701346397399902],["káž",-12.701379776000977],["▁Avec",-12.701393127441406],["▁dagaal",-12.70139503479004],["▁zein",-12.701404571533203],["▁Baca",-12.701444625854492],["▁besluit",-12.701449394226074],["bron",-12.701452255249023],["▁XV",-12.701468467712402],["más",-12.701472282409668],["▁ည",-12.701481819152832],["にして",-12.701485633850098],["দেশ",-12.701493263244627],["lann",-12.701520919799805],["▁দেখা",-12.701544761657717],["▁сый",-12.701568603515623],["▁poble",-12.701570510864258],["▁Augen",-12.701584815979004],["▁ඔයා",-12.70160961151123],["rufen",-12.701642036437988],["加快",-12.701690673828123],["▁nuevas",-12.70169734954834],["▁cikk",-12.701698303222656],["နေ့",-12.701723098754885],["ctum",-12.701742172241213],["hami",-12.701749801635742],["▁efa",-12.70175075531006],["▁publicat",-12.701752662658691],["ट्",-12.701814651489258],["სურ",-12.701848030090332],["žel",-12.701857566833496],["בד",-12.701869010925291],["▁περισσότερο",-12.701899528503418],["▁колико",-12.701905250549316],["▁научно",-12.701910972595217],["▁psych",-12.701913833618164],["感谢",-12.701923370361328],["▁சென்னை",-12.70193099975586],["▁ګ",-12.701933860778809],["▁punti",-12.701937675476074],["▁шығар",-12.70195198059082],["▁الإثنين",-12.70195484161377],["▁ಸಾಹಿತ್ಯ",-12.70195484161377],["▁बेला",-12.70195770263672],["rimi",-12.701959609985352],["▁تلفن",-12.7019624710083],["▁वृ",-12.701972007751465],["▁семинар",-12.701988220214844],["▁cumshot",-12.701993942260742],["▁типа",-12.702017784118652],["▁Quran",-12.70205020904541],["▁чудо",-12.702059745788574],["მონ",-12.702072143554688],["韩",-12.70207405090332],["ලිය",-12.702077865600586],["▁žensk",-12.702157020568848],["▁njegova",-12.70215892791748],["แผน",-12.70217514038086],["▁senang",-12.702190399169922],["▁inside",-12.702211380004885],["color",-12.702229499816896],["tūra",-12.702248573303224],["▁السل",-12.70229148864746],["▁Onze",-12.702366828918455],["▁kimse",-12.70241928100586],["▁учурда",-12.702435493469238],["▁TIL",-12.702442169189451],["▁प्रथम",-12.7024564743042],["▁يمكنك",-12.702479362487791],["▁registro",-12.70253086090088],["▁мовы",-12.702594757080078],["ilio",-12.702649116516112],["▁డి",-12.702651977539062],["▁otvoren",-12.70265769958496],["وو",-12.702712059020996],["▁დავ",-12.702717781066896],["platz",-12.702747344970703],["ឺ",-12.70274829864502],["▁పే",-12.70276927947998],["ኃ",-12.70279026031494],["▁początku",-12.702792167663574],["▁Александар",-12.702792167663574],["▁одговара",-12.702792167663574],["▁ਪੋਰਨ",-12.702792167663574],["셔",-12.702792167663574],["▁Ngoài",-12.70279598236084],["ေလာက္",-12.702800750732422],["▁klaus",-12.702801704406738],["▁ազգային",-12.702808380126951],["isessa",-12.702811241149902],["▁daher",-12.702815055847168],["ليل",-12.702851295471191],["▁Bán",-12.702866554260254],["▁សុ",-12.702876091003418],["zadə",-12.702878952026367],["▁Osman",-12.702898025512695],["▁färg",-12.702898979187012],["▁அடி",-12.702903747558594],["ោះ",-12.702909469604492],["▁toppen",-12.702947616577148],["▁Glo",-12.70296573638916],["▁ಶಾ",-12.70297908782959],["▁tải",-12.703022003173828],["২",-12.703057289123535],["گز",-12.703065872192385],["▁sebagian",-12.70307159423828],["▁ИЗ",-12.703080177307127],["▁қор",-12.703118324279783],["spir",-12.703131675720217],["diti",-12.703134536743164],["оли",-12.703174591064451],["▁Jako",-12.703190803527832],["▁kishi",-12.703217506408691],["▁kufi",-12.70322322845459],["bine",-12.70324993133545],["▁interno",-12.703265190124512],["භා",-12.703288078308104],["մամբ",-12.703307151794434],["▁etxe",-12.703313827514648],["▁알려",-12.703333854675291],["▁Dro",-12.70334815979004],["▁tranquil",-12.703351020812988],["▁moderno",-12.70337200164795],["▁Ahmet",-12.703384399414062],["zaj",-12.70340061187744],["φη",-12.703429222106934],["▁óta",-12.70345973968506],["nili",-12.703496932983398],["▁خاندان",-12.703505516052246],["الا",-12.703508377075195],["▁niemand",-12.703519821166992],["arne",-12.70354175567627],["运营",-12.70354175567627],["▁بری",-12.7035551071167],["ክስ",-12.703607559204102],["▁परिणाम",-12.703607559204102],["流程",-12.703617095947266],["ൌ",-12.703630447387695],["▁particulier",-12.703630447387695],["▁множество",-12.703630447387695],["▁Phasellus",-12.703631401062012],["▁اسمبلی",-12.703632354736328],["▁आनंद",-12.703632354736328],["▁감사합니다",-12.703639030456545],["▁ktorej",-12.703645706176758],["linna",-12.703670501708984],["niť",-12.7036714553833],["信用",-12.703675270080566],["нения",-12.703720092773438],["敢",-12.7037353515625],["▁eyes",-12.703746795654297],["▁دوه",-12.703764915466309],["стран",-12.703783988952637],["▁вір",-12.703786849975586],["แนะนํา",-12.703803062438965],["▁chóng",-12.703803062438965],["▁ભાગ",-12.70380973815918],["▁պատ",-12.703813552856444],["▁Stad",-12.703864097595217],["ေၾကာင္း",-12.703871726989746],["▁LV",-12.703892707824709],["ান",-12.703898429870604],["верен",-12.703909873962402],["▁sexta",-12.703940391540527],["laden",-12.70394229888916],["▁Sipas",-12.703944206237791],["▁даље",-12.703969955444336],["magan",-12.703972816467283],["▁نما",-12.704011917114258],["▁ജയ",-12.70404052734375],["▁ઊ",-12.704059600830078],["הר",-12.704110145568848],["年轻",-12.704113960266112],["▁這",-12.704116821289062],["▁сајт",-12.704123497009276],["▁χώρο",-12.704129219055176],["▁работает",-12.704157829284668],["ահար",-12.704160690307615],["ທຸກ",-12.704181671142578],["▁araştırma",-12.704191207885742],["落实",-12.704193115234377],["novo",-12.704197883605955],["▁šīs",-12.704208374023438],["▁andare",-12.704218864440918],["▁kaart",-12.70422077178955],["rando",-12.704253196716309],["地位",-12.704266548156738],["▁කළා",-12.704280853271484],["СКИ",-12.704323768615724],["ālās",-12.704355239868164],["▁contrario",-12.704378128051758],["▁eerst",-12.70438289642334],["००",-12.704404830932615],["stelle",-12.704407691955566],["ได้อย่าง",-12.704428672790527],["練習",-12.704444885253906],["▁ჩ",-12.70444679260254],["▁budeme",-12.704455375671388],["ጁ",-12.704466819763184],["▁۱۳۹۶",-12.70447063446045],["▁ūdens",-12.704471588134766],["▁დიდ",-12.704474449157717],["▁তবে",-12.70447826385498],["៨",-12.70449447631836],["▁Einsatz",-12.704503059387209],["ാലും",-12.7045259475708],["▁бастап",-12.704530715942385],["typ",-12.70455837249756],["▁ডা",-12.704559326171877],["▁Primer",-12.704581260681152],["▁gemaak",-12.704586029052734],["▁29-",-12.70458984375],["▁терен",-12.704612731933594],["▁Így",-12.704623222351074],["▁أهل",-12.704635620117188],["/07/",-12.704636573791504],["haan",-12.704668998718262],["FR",-12.70469570159912],["▁ଲ",-12.704707145690918],["larıyla",-12.704740524291992],["лө",-12.704789161682127],["▁Gang",-12.70479965209961],["dist",-12.704821586608888],["帶來",-12.704843521118164],["टन",-12.70484447479248],["▁MW",-12.70486068725586],["rinta",-12.704870223999023],["ここ",-12.704874992370604],["▁fiske",-12.704875946044922],["іміз",-12.704879760742188],["▁Selamat",-12.704925537109377],["▁кір",-12.704951286315918],["▁순",-12.704965591430664],["▁ziņas",-12.705005645751951],["▁کردی",-12.70510482788086],["▁جعل",-12.70512580871582],["▁konsert",-12.7051362991333],["وٹ",-12.705141067504885],["តិ",-12.705163955688477],["▁filter",-12.705183029174805],["หน",-12.70521640777588],["▁takım",-12.70521640777588],["प्प",-12.70522403717041],["▁jaunā",-12.705254554748535],["▁coba",-12.705266952514648],["妳",-12.705288887023926],["▁देखा",-12.70529842376709],["bilis",-12.705304145812988],["▁bisogno",-12.70530891418457],["▁življenje",-12.705312728881836],["▁CL",-12.705313682556152],["دق",-12.705336570739746],["▁täällä",-12.705339431762695],["▁صدا",-12.705345153808594],["fter",-12.70534610748291],["▁صادق",-12.705352783203123],["▁colocar",-12.705362319946287],["▁Copa",-12.705377578735352],["▁tướng",-12.705377578735352],["▁amp",-12.705464363098145],["▁тисяч",-12.70548152923584],["inform",-12.705484390258787],["▁doba",-12.705487251281738],["kcja",-12.705568313598633],["▁manga",-12.70557689666748],["▁Terima",-12.705580711364746],["資金",-12.705594062805176],["юр",-12.70561981201172],["▁χα",-12.705626487731934],["нкі",-12.705647468566896],["▁документов",-12.705647468566896],["[...]",-12.705670356750488],["▁imun",-12.705717086791992],["▁годину",-12.705718994140623],["▁Condi",-12.705729484558104],["▁Corre",-12.705754280090332],["개의",-12.705771446228027],["ЦИ",-12.705780029296877],["wagen",-12.705816268920898],["חיל",-12.70582675933838],["▁honum",-12.705833435058594],["version",-12.705856323242188],["▁Nëse",-12.705864906311035],["▁mejorar",-12.705896377563477],["шілік",-12.70590114593506],["iltä",-12.70593547821045],["▁guarda",-12.705942153930664],["▁половина",-12.705951690673828],["पूर",-12.705962181091309],["▁ایف",-12.705968856811523],["pali",-12.705974578857422],["순",-12.705978393554688],["▁તેમ",-12.705987930297852],["▁كرد",-12.706000328063965],["forening",-12.706063270568848],["▁devem",-12.706072807312012],["▁መር",-12.706089973449709],["▁промо",-12.706120491027832],["änder",-12.706121444702148],["肩",-12.706130027770996],["▁دوسری",-12.706149101257324],["▁അങ്ങനെ",-12.706170082092283],["▁nesa",-12.70619297027588],["增强",-12.706199645996094],["▁ത്ഥ",-12.70622444152832],["jame",-12.706271171569824],["▁والو",-12.706306457519531],["▁terdiri",-12.706311225891112],["▁disponibles",-12.706326484680176],["▁ساختمان",-12.706438064575195],["▁objav",-12.706439018249512],["keen",-12.70646858215332],["wać",-12.706503868103027],["▁χαρ",-12.70650577545166],["▁Narod",-12.706515312194824],["▁tenha",-12.706543922424316],["▁block",-12.706607818603516],["▁krig",-12.706615447998049],["▁agro",-12.706616401672363],["▁жасау",-12.70663833618164],["λος",-12.706677436828612],["▁butik",-12.706720352172852],["zuk",-12.706722259521484],["完整",-12.706744194030762],["ომ",-12.706748962402344],["กลับมา",-12.706768035888672],["▁Gesellschaft",-12.706769943237305],["ોમાં",-12.706781387329102],["ttle",-12.706825256347656],["കാല",-12.706830024719238],["▁novin",-12.706833839416504],["ກະ",-12.70683479309082],["ップ",-12.70683479309082],["▁Drama",-12.706852912902832],["▁cerc",-12.706856727600098],["▁وإن",-12.706931114196776],["▁atrast",-12.706932067871094],["尖",-12.706954956054688],["韓國",-12.706961631774902],["ゆ",-12.706975936889648],["検索",-12.706979751586914],["руп",-12.70698356628418],["訪問",-12.706986427307127],["▁القانون",-12.706988334655762],["▁calquera",-12.706995964050291],["UMA",-12.707026481628418],["▁kalan",-12.707029342651367],["navn",-12.707037925720217],["▁orain",-12.707039833068848],["▁Germania",-12.707050323486328],["سار",-12.707101821899414],["лим",-12.707112312316896],["таш",-12.707167625427246],["▁contribui",-12.707170486450195],["▁hvort",-12.707170486450195],["▁sorun",-12.707178115844728],["ater",-12.707182884216309],["icas",-12.707301139831545],["다고",-12.707322120666504],["▁అలా",-12.707340240478516],["▁voltak",-12.70736026763916],["▁uue",-12.707380294799805],["ρή",-12.707403182983398],["▁حج",-12.70742130279541],["tuva",-12.707441329956056],["蓝",-12.707465171813965],["▁Höhe",-12.707499504089355],["▁שפּיל",-12.707514762878418],["нички",-12.707548141479492],["▁voel",-12.707573890686035],["▁allergi",-12.70760726928711],["▁яку",-12.707633018493652],["YE",-12.707684516906738],["▁клиент",-12.707688331604004],["▁prodej",-12.707697868347168],["▁helppo",-12.707712173461914],["quia",-12.70774269104004],["됐다",-12.707744598388672],["赵",-12.70777702331543],["之间的",-12.707798957824709],["许",-12.70780086517334],["▁뉴스",-12.707815170288086],["věd",-12.707820892333984],["▁giornata",-12.70783233642578],["▁thử",-12.707849502563477],["чове",-12.707856178283691],["▁Spaß",-12.707860946655272],["▁Божи",-12.707868576049805],["koon",-12.707877159118652],["▁کله",-12.707877159118652],["trūk",-12.707889556884766],["זו",-12.707924842834473],["λαβ",-12.707934379577637],["▁aften",-12.707945823669434],["یار",-12.707952499389648],["ίσει",-12.707959175109863],["šno",-12.707985877990724],["▁билээ",-12.70799446105957],["Ob",-12.708025932312012],["▁elmi",-12.708043098449709],["▁Technology",-12.708051681518556],["▁Split",-12.708059310913086],["ாலும்",-12.70808219909668],["▁pož",-12.708102226257324],["▁કરીને",-12.708127975463867],["овая",-12.708157539367676],["▁داریم",-12.708174705505373],["exe",-12.708194732666016],["ਮੇ",-12.70823860168457],["▁ግብ",-12.708324432373049],["ပြည်",-12.708328247070312],["▁Иран",-12.708365440368652],["lete",-12.7083740234375],["▁2500",-12.70840549468994],["gano",-12.708434104919434],["āra",-12.708477020263672],["▁ଆପଣ",-12.708477020263672],["▁koste",-12.708491325378418],["▁hombre",-12.708492279052734],["URI",-12.708551406860352],["▁mõ",-12.708595275878906],["▁dram",-12.708598136901855],["imui",-12.708625793457031],["îyê",-12.708662986755373],["▁yhdessä",-12.708674430847168],["▁Жогорку",-12.708674430847168],["▁କେନ୍ଦ୍ର",-12.708678245544434],["▁земље",-12.70868682861328],["रू",-12.708747863769531],["▁اعلی",-12.708747863769531],["衣服",-12.70877742767334],["▁ගියා",-12.708795547485352],["▁occasion",-12.708834648132324],["የው",-12.70884132385254],["jeća",-12.708873748779297],["▁денеска",-12.708884239196776],["▁центар",-12.708916664123535],["▁gert",-12.708930015563965],["ουρ",-12.708978652954102],["▁colon",-12.708986282348633],["▁хабар",-12.709030151367188],["▁väike",-12.70905590057373],["以來",-12.709057807922363],["▁mandi",-12.709063529968262],["▁inspiration",-12.709065437316896],["▁conduc",-12.70913314819336],["▁이해",-12.70913791656494],["▁beach",-12.709163665771484],["▁energije",-12.709172248840332],["oiden",-12.709199905395508],["有限",-12.709226608276367],["ື",-12.709248542785645],["▁raamatu",-12.709298133850098],["ught",-12.709409713745115],["▁político",-12.709444999694824],["▁тон",-12.709447860717772],["▁మంత్రి",-12.70946216583252],["掌",-12.709470748901367],["▁milieu",-12.709471702575684],["▁hilang",-12.709482192993164],["תן",-12.709489822387695],["▁mahdollista",-12.709517478942873],["ử",-12.70952033996582],["▁რატომ",-12.709531784057615],["վան",-12.709574699401855],["ಫ",-12.70958137512207],["▁пласт",-12.709599494934082],["▁Maxim",-12.709653854370115],["▁mniej",-12.709659576416016],["dale",-12.709662437438965],["▁podatke",-12.70966625213623],["μένες",-12.709680557250977],["טיק",-12.709687232971191],["▁заң",-12.70969581604004],["▁नजर",-12.709701538085938],["▁అదే",-12.709705352783203],["▁STR",-12.709715843200684],["▁آسان",-12.709782600402832],["ించి",-12.709814071655272],["▁arc",-12.70981502532959],["▁kjøpe",-12.709835052490234],["▁imkan",-12.7098388671875],["ៅ",-12.709861755371094],["▁१७",-12.709861755371094],["▁hui",-12.709864616394045],["▁praneš",-12.709901809692385],["КТ",-12.709919929504396],["tih",-12.709920883178713],["▁иза",-12.709943771362305],["▁ေရ",-12.70995807647705],["jast",-12.709970474243164],["▁dál",-12.709976196289062],["ERI",-12.70997714996338],["styr",-12.709991455078123],["ታው",-12.71006202697754],["New",-12.710073471069336],["▁ambos",-12.710087776184082],["αμ",-12.710094451904297],["▁camino",-12.710097312927246],["ried",-12.71015453338623],["▁السابق",-12.710177421569824],["▁별",-12.710200309753418],["▁Uusi",-12.710212707519531],["▁termini",-12.710216522216797],["ေမ",-12.71024227142334],["zima",-12.71025848388672],["იერ",-12.710280418395996],["▁sám",-12.71029567718506],["▁atual",-12.710312843322754],["▁Sida",-12.710315704345703],["▁Pública",-12.71036148071289],["▁प्रशासन",-12.71036148071289],["▁Bảo",-12.710362434387209],["▁Mungkin",-12.710362434387209],["últim",-12.710384368896484],["▁സ്റ്റ",-12.710386276245115],["ndam",-12.710457801818848],["פיל",-12.710461616516112],["oldal",-12.710476875305176],["HM",-12.71052074432373],["ininkų",-12.710535049438477],["лох",-12.71059513092041],["▁açıklama",-12.710609436035156],["హ్",-12.710614204406738],["▁וכ",-12.710631370544434],["עם",-12.71067714691162],["ikko",-12.71068000793457],["▁إذ",-12.71068000793457],["▁באר",-12.710680961608888],["lopp",-12.710716247558594],["казан",-12.710721015930176],["▁хорош",-12.710759162902832],["▁Sku",-12.71076488494873],["▁našim",-12.710787773132324],["ちゃん",-12.710845947265623],["koko",-12.710883140563965],["▁प्यार",-12.710911750793455],["LIN",-12.71092700958252],["rip",-12.71092700958252],["▁tehty",-12.710930824279783],["▁১২",-12.710938453674316],["▁Lalu",-12.710943222045898],["ینی",-12.710948944091797],["▁izatea",-12.710981369018556],["లై",-12.711030960083008],["▁вижда",-12.711031913757324],["ენი",-12.711034774780272],["▁koka",-12.711050987243652],["▁gj",-12.711060523986816],["▁clean",-12.711095809936523],["lako",-12.711101531982422],["▁caro",-12.711141586303713],["紫",-12.711159706115724],["設置",-12.711198806762695],["▁находится",-12.711210250854492],["▁계속",-12.711215019226074],["▁ସମୟରେ",-12.711263656616213],["▁أمر",-12.711285591125488],["▁ଧ",-12.711292266845703],["inf",-12.711318016052246],["▁ਆਪ",-12.711355209350586],["bka",-12.71135711669922],["▁Aba",-12.711359977722168],["reo",-12.7113676071167],["▁Rey",-12.711421966552734],["▁Haar",-12.711525917053224],["▁meðal",-12.711542129516602],["▁Unit",-12.711559295654297],["▁суду",-12.711569786071776],["nün",-12.711596488952637],["gén",-12.71162223815918],["▁Qə",-12.711623191833496],["▁Gü",-12.711750984191896],["אס",-12.711770057678224],["▁bakar",-12.71177577972412],["pani",-12.711783409118652],["不敢",-12.711792945861816],["▁jei",-12.711801528930664],["▁변화",-12.71181297302246],["▁lenge",-12.71185302734375],["கே",-12.71186637878418],["▁פל",-12.711904525756836],["▁Wah",-12.711907386779783],["tub",-12.711926460266112],["කරු",-12.711926460266112],["ళ్లు",-12.711933135986328],["105",-12.711968421936035],["فور",-12.71200180053711],["▁shtëpi",-12.712013244628906],["tallet",-12.712023735046388],["Жа",-12.7120361328125],["▁nhiệt",-12.712055206298828],["▁вне",-12.712100982666016],["▁tids",-12.712102890014648],["▁woman",-12.712102890014648],["▁ഇങ്ങനെ",-12.712114334106444],["داء",-12.712146759033203],["யூ",-12.712157249450684],["▁снимки",-12.712164878845217],["▁gerekli",-12.712172508239746],["▁Take",-12.712231636047363],["Mon",-12.712263107299805],["trat",-12.712274551391602],["▁Francia",-12.712285995483398],["גיע",-12.712286949157717],["▁praš",-12.712288856506348],["▁왜",-12.712299346923828],["สอบ",-12.712324142456056],["としても",-12.712324142456056],["hetetlen",-12.712332725524902],["▁compre",-12.712336540222168],["▁States",-12.712384223937988],["מער",-12.712425231933594],["dring",-12.712461471557615],["▁شدت",-12.712491989135742],["məlidir",-12.712586402893066],["▁almal",-12.712594032287598],["bern",-12.71262550354004],["▁znajduje",-12.712641716003418],["▁passage",-12.712651252746582],["ከተ",-12.712652206420898],["▁јавно",-12.712793350219728],["缺乏",-12.71281909942627],["触",-12.712838172912598],["படி",-12.712850570678713],["▁fou",-12.712881088256836],["tirish",-12.712892532348633],["▁orada",-12.71290397644043],["▁zemlje",-12.712930679321287],["adora",-12.712935447692873],["köp",-12.712956428527832],["▁դպրոց",-12.712973594665527],["LAS",-12.71300983428955],["近日",-12.71303939819336],["▁gotovo",-12.713136672973633],["▁heil",-12.713149070739746],["▁qeyb",-12.713149070739746],["มะ",-12.713165283203123],["▁טר",-12.71317195892334],["▁خلکو",-12.713181495666504],["そ",-12.71318531036377],["▁обу",-12.713220596313477],["個月",-12.713238716125488],["▁eddig",-12.713250160217283],["▁zake",-12.713254928588867],["▁руку",-12.713261604309082],["жай",-12.713287353515623],["MN",-12.71328830718994],["zga",-12.713354110717772],["امی",-12.713459968566896],["gė",-12.713488578796388],["▁greit",-12.713618278503418],["▁հայկական",-12.713623046875],["▁lese",-12.713647842407228],["metra",-12.713679313659668],["▁pembe",-12.713688850402832],["▁কিন্তু",-12.713744163513184],["▁2003.",-12.713747024536133],["▁жардам",-12.71375560760498],["ชาติ",-12.713762283325195],["нци",-12.713764190673828],["▁පටන්",-12.713764190673828],["▁fidel",-12.713788032531738],["▁Lev",-12.713800430297852],["enc",-12.713807106018066],["▁hakim",-12.713842391967772],["▁ഓര്",-12.713845252990724],["ՈՒՄ",-12.713858604431152],["tail",-12.713879585266112],["änger",-12.713881492614746],["▁sní",-12.713900566101074],["bilder",-12.713913917541504],["▁Bár",-12.71391773223877],["czeń",-12.713924407958984],["▁yurt",-12.71396827697754],["▁ממ",-12.713979721069336],["حر",-12.713996887207031],["ђу",-12.714006423950195],["10%",-12.714011192321776],["▁emprego",-12.714011192321776],["ΕΣ",-12.714038848876951],["GG",-12.71409034729004],["▁odpowiedzi",-12.714120864868164],["▁பெரிய",-12.714122772216797],["▁Inf",-12.714128494262695],["報名",-12.714171409606934],["▁tion",-12.71418285369873],["▁ard",-12.714184761047363],["อ่อน",-12.714190483093262],["īgo",-12.71420192718506],["▁momentul",-12.714203834533691],["▁منبع",-12.71424961090088],["IJ",-12.714262008666992],["▁പേര",-12.714262008666992],["걸",-12.714280128479004],["ndur",-12.714312553405762],["▁نگار",-12.714351654052734],["ober",-12.714362144470217],["▁Borg",-12.714381217956545],["سلام",-12.71440601348877],["భా",-12.714412689208984],["▁ਕੋ",-12.714433670043944],["块",-12.714436531066896],["بع",-12.714442253112791],["յու",-12.71445083618164],["▁lasten",-12.714462280273438],["▁zag",-12.714470863342283],["▁التعليم",-12.7145414352417],["УС",-12.714574813842772],["რია",-12.714574813842772],["▁Beberapa",-12.714591979980469],["▁göndər",-12.714591979980469],["▁plutôt",-12.714591979980469],["▁tilgjengelig",-12.714591979980469],["▁katastrof",-12.714593887329102],["▁érdekében",-12.714598655700684],["▁عبدالله",-12.714618682861328],["▁crema",-12.71465015411377],["▁eie",-12.714676856994627],["▁Hita",-12.714696884155272],["ಯ್",-12.71471881866455],["▁მერე",-12.714743614196776],["▁sustav",-12.714765548706056],["▁ڦ",-12.714771270751951],["sore",-12.714773178100586],["▁tyle",-12.71485710144043],["year",-12.714862823486328],["ணை",-12.714900970458984],["▁소개",-12.714909553527832],["jp",-12.71491241455078],["মার",-12.714924812316896],["▁magazin",-12.714927673339844],["ניים",-12.71493148803711],["▁понять",-12.71493911743164],["▁aparte",-12.715001106262209],["റെ",-12.715008735656738],["ਵਿ",-12.715028762817385],["ებდა",-12.715031623840332],["▁shahar",-12.71503448486328],["▁klin",-12.715083122253418],["cole",-12.715086936950684],["dlu",-12.715087890625],["▁pojedin",-12.715132713317873],["صال",-12.715153694152832],["пот",-12.715205192565918],["▁malli",-12.71522045135498],["りました",-12.71522331237793],["раў",-12.715235710144045],["ಚ್",-12.715249061584473],["čeni",-12.715259552001951],["ມັນ",-12.71526336669922],["▁crime",-12.715275764465332],["gent",-12.71528434753418],["可能是",-12.715285301208496],["▁priv",-12.715291976928713],["rings",-12.71531105041504],["ога",-12.71531581878662],["▁bois",-12.715341567993164],["låt",-12.715357780456545],["▁IM",-12.715394973754885],["▁vaiku",-12.715405464172363],["buri",-12.715415954589844],["ғын",-12.715432167053224],["▁Parte",-12.715432167053224],["▁Itoobiya",-12.715439796447754],["▁vůbec",-12.715442657470703],["păr",-12.71544361114502],["▁Hul",-12.715500831604004],["▁умовах",-12.715513229370115],["ంగ్",-12.715567588806152],["▁Tentang",-12.715583801269531],["▁lime",-12.715608596801758],["▁RU",-12.715635299682615],["▁počítač",-12.715645790100098],["itats",-12.71568202972412],["▁inizia",-12.71570110321045],["ttavat",-12.71571159362793],["▁Devlet",-12.715723037719728],["vall",-12.715725898742676],["▁halaman",-12.715741157531738],["īju",-12.715764999389648],["▁comenta",-12.715859413146973],["▁מל",-12.71587371826172],["▁увек",-12.715874671936035],["まず",-12.715883255004885],["rūp",-12.715893745422363],["άκη",-12.715893745422363],["»)",-12.715895652770996],["ုံး",-12.715965270996094],["ಕೋ",-12.715968132019045],["ရွ",-12.715975761413574],["劳动",-12.71597671508789],["路上",-12.71599006652832],["resti",-12.715996742248535],["▁davant",-12.716092109680176],["ുകള",-12.716106414794922],["▁monte",-12.716108322143556],["voer",-12.716119766235352],["▁нами",-12.716140747070312],["▁proporciona",-12.716141700744627],["cele",-12.716145515441896],["▁ხან",-12.716153144836426],["▁ਪੈ",-12.716195106506348],["▁language",-12.716211318969728],["技巧",-12.716256141662598],["▁pomp",-12.716263771057127],["狀況",-12.716265678405762],["▁ଦୁଇ",-12.71627140045166],["▁rhoncus",-12.716288566589355],["▁ازدواج",-12.716288566589355],["▁கேள்வி",-12.716288566589355],["▁කොළඹ",-12.716288566589355],["▁qaba",-12.716296195983888],["▁وأضاف",-12.716310501098633],["жиг",-12.716339111328123],["werp",-12.716343879699709],["▁mass",-12.716348648071287],["▁Seu",-12.716362953186035],["אפשר",-12.716392517089844],["ruga",-12.716400146484377],["▁stadion",-12.71641731262207],["garria",-12.716422080993652],["▁இப்படி",-12.716423034667969],["▁измени",-12.716453552246094],["ատար",-12.716471672058104],["尊重",-12.716507911682127],["centr",-12.716520309448242],["ネット",-12.716593742370604],["▁الشعب",-12.71660614013672],["▁profi",-12.716635704040527],["roman",-12.716663360595703],["สภาพ",-12.716672897338867],["▁сход",-12.716681480407717],["▁Secara",-12.716737747192385],["saya",-12.716755867004396],["▁ဖြစ်",-12.716782569885254],["वं",-12.716785430908203],["ნდ",-12.716803550720217],["joj",-12.716816902160645],["▁reale",-12.716819763183594],["▁Qar",-12.71687126159668],["▁బాగా",-12.716882705688477],["▁حديث",-12.716913223266602],["чел",-12.716915130615234],["▁samarbeid",-12.716944694519045],["▁lire",-12.716971397399902],["vizsgál",-12.7169771194458],["▁ajunge",-12.71697998046875],["▁үш",-12.7169828414917],["▁настоя",-12.71700954437256],["▁crois",-12.71702003479004],["จี",-12.717055320739746],["▁ڪراچي",-12.71710968017578],["描述",-12.717120170593262],["▁खूप",-12.717138290405272],["▁necessário",-12.71713924407959],["ത്തിയ",-12.717143058776855],["▁ড",-12.717153549194336],["▁державної",-12.717170715332031],["▁Adrian",-12.717171669006348],["дэн",-12.717175483703612],["▁ترقی",-12.71718978881836],["komp",-12.71725368499756],["▁آم",-12.717270851135254],["giu",-12.717283248901367],["áci",-12.71731662750244],["▁ท",-12.717323303222656],["dilər",-12.717391967773438],["▁kron",-12.717391967773438],["▁Jednak",-12.717405319213867],["чим",-12.71744155883789],["▁barnet",-12.7174654006958],["ስታ",-12.717486381530762],["ител",-12.71749210357666],["▁månader",-12.717503547668455],["रै",-12.717504501342772],["صور",-12.71750545501709],["▁электро",-12.717512130737305],["▁മൂ",-12.717638969421388],["▁nghi",-12.717644691467283],["ukseen",-12.717645645141602],["ttes",-12.717676162719728],["▁devlet",-12.71768569946289],["▁prve",-12.717693328857422],["้อ",-12.717729568481444],["న్ని",-12.717738151550291],["▁folosit",-12.717775344848633],["▁адказ",-12.717784881591797],["irana",-12.717855453491213],["▁egyre",-12.717856407165527],["▁1,3",-12.717899322509766],["▁Kep",-12.717906951904297],["ORI",-12.71792221069336],["晓",-12.717927932739258],["▁хөтөлбөр",-12.71792984008789],["阻",-12.717936515808104],["▁инженер",-12.717988014221191],["▁lĩnh",-12.717988967895508],["▁рейтинг",-12.717988967895508],["▁دۇنيا",-12.717988967895508],["▁ሌሎች",-12.717988967895508],["▁өндөр",-12.717989921569824],["▁മുതല്",-12.717989921569824],["เกาะ",-12.71799087524414],["▁contato",-12.717997550964355],["▁جلسه",-12.718008041381836],["▁equip",-12.718057632446287],["▁Gene",-12.718073844909668],["▁පළ",-12.718092918395996],["▁hieno",-12.718131065368652],["▁путем",-12.718145370483398],["▁جيئن",-12.718146324157717],["קא",-12.71815299987793],["tame",-12.718174934387209],["▁මෙන්න",-12.71818733215332],["▁велико",-12.71820068359375],["地震",-12.718204498291016],["▁Tit",-12.718213081359863],["▁ಸಮ",-12.718215942382812],["▁Stro",-12.718233108520508],["dut",-12.718247413635254],["ache",-12.718287467956545],["▁შენი",-12.7183198928833],["▁ming",-12.718324661254885],["▁школы",-12.718345642089844],["▁protesta",-12.718347549438477],["კარგ",-12.71843433380127],["▁konstat",-12.718436241149902],["▁čet",-12.718457221984863],["▁Stone",-12.718469619750977],["▁rzeczy",-12.71847915649414],["▁Sup",-12.718502044677734],["▁Μά",-12.71850299835205],["▁ruoka",-12.718505859375],["▁Када",-12.71855640411377],["▁Bunge",-12.718557357788086],["▁Then",-12.718595504760742],["▁meme",-12.718608856201172],["▁степен",-12.718637466430664],["▁역",-12.718687057495115],["act",-12.718698501586914],["▁rota",-12.718705177307127],["▁lạ",-12.718729972839355],["мата",-12.718740463256836],["▁lég",-12.718741416931152],["▁Forex",-12.718743324279783],["劳",-12.718748092651367],["▁nästa",-12.718753814697266],["▁ocasión",-12.718753814697266],["sif",-12.718756675720217],["肯",-12.718793869018556],["転",-12.718807220458984],["中の",-12.71881103515625],["▁arsye",-12.718867301940918],["▁aplink",-12.718899726867676],["िता",-12.718904495239258],["▁болатын",-12.718915939331056],["ево",-12.71891975402832],["▁Aman",-12.71894359588623],["▁איי",-12.71896266937256],["ബാ",-12.718984603881836],["▁основу",-12.718988418579102],["▁रकम",-12.71900749206543],["▁Geschäfts",-12.719034194946287],["▁کرو",-12.719058990478516],["λάβ",-12.719128608703612],["▁positiva",-12.719133377075195],["▁설",-12.71919059753418],["▁likely",-12.71921443939209],["▁belang",-12.71921730041504],["వ్వ",-12.719226837158203],["رؤ",-12.719240188598633],["▁ütles",-12.719257354736328],["▁вашата",-12.719263076782228],["weit",-12.719294548034668],["▁kontor",-12.71930980682373],["충",-12.719314575195312],["біт",-12.719319343566896],["において",-12.71933937072754],["▁NH",-12.719343185424805],["▁ofert",-12.719348907470703],["▁доме",-12.71934986114502],["र्य",-12.71937656402588],["會員",-12.719436645507812],["見て",-12.719438552856444],["oloxía",-12.719547271728516],["▁வரு",-12.719549179077148],["▁ଜୀବନ",-12.719559669494627],["孙",-12.71962833404541],["▁کاربران",-12.719635009765623],["קש",-12.719645500183104],["▁County",-12.71969985961914],["▁؛",-12.719730377197266],["▁أحمد",-12.719732284545898],["▁ሙሉ",-12.719758987426758],["▁بڑا",-12.71977424621582],["▁uporabnik",-12.71977710723877],["ilise",-12.719782829284668],["▁qan",-12.719789505004885],["▁ହେବା",-12.719793319702148],["ुवा",-12.719854354858398],["▁հնարավոր",-12.719862937927246],["ην",-12.71986484527588],["▁vegada",-12.719867706298828],["▁vznik",-12.719898223876951],["办公室",-12.71992301940918],["ucu",-12.719950675964355],["▁savjet",-12.719961166381836],["▁студенти",-12.719975471496582],["LAH",-12.71998119354248],["▁ұйым",-12.71998119354248],["ുമോ",-12.72000789642334],["▁۹",-12.720016479492188],["ప్పు",-12.720046043395996],["daten",-12.720046997070312],["lmas",-12.720050811767578],["▁Slik",-12.720062255859377],["ilis",-12.720067024230955],["▁Kini",-12.72009563446045],["sende",-12.720102310180664],["ひ",-12.720102310180664],["▁ດ",-12.720104217529297],["pito",-12.720144271850586],["dão",-12.720145225524902],["有什么",-12.720172882080078],["lisa",-12.720190048217772],["diagnos",-12.720212936401367],["▁poeta",-12.720327377319336],["уватися",-12.720373153686523],["મો",-12.720378875732422],["▁porto",-12.720379829406738],["▁Тар",-12.7203950881958],["jää",-12.720419883728027],["ለሁ",-12.720486640930176],["pec",-12.720511436462402],["▁kəs",-12.720524787902832],["▁διάρκεια",-12.72054386138916],["▁ਸਕਦਾ",-12.72054386138916],["▁некалькі",-12.72054672241211],["▁desen",-12.720548629760742],["γων",-12.72056484222412],["▁1955",-12.720569610595703],["▁لدى",-12.720587730407717],["▁багш",-12.72061538696289],["定期",-12.72063446044922],["ผู้หญิง",-12.720718383789062],["khu",-12.72072696685791],["▁Olimp",-12.72075653076172],["每個",-12.720762252807615],["scrip",-12.720775604248049],["▁eesti",-12.720796585083008],["вни",-12.720808029174805],["volta",-12.7208251953125],["mans",-12.720897674560549],["平衡",-12.720901489257812],["▁பேர",-12.720904350280762],["ሣ",-12.720937728881836],["▁rele",-12.720939636230469],["▁Ret",-12.720977783203123],["▁zabaw",-12.720977783203123],["نهن",-12.720979690551758],["▁mbe",-12.720983505249023],["▁comh",-12.721043586730955],["ART",-12.721075057983398],["▁немного",-12.721088409423828],["LES",-12.72109031677246],["ເຖິງ",-12.721135139465332],["šky",-12.721149444580078],["anni",-12.721237182617188],["▁بيت",-12.721294403076172],["▁remain",-12.721314430236816],["regel",-12.72134494781494],["▁cz",-12.72134780883789],["▁nadal",-12.721351623535156],["》、《",-12.721363067626951],["matic",-12.721385955810549],["▁ദു",-12.721400260925291],["▁tāpēc",-12.72140407562256],["▁chiều",-12.721413612365724],["▁برداشت",-12.721427917480469],["是非",-12.721429824829102],["uwa",-12.721439361572266],["▁тешко",-12.72144889831543],["▁крем",-12.721498489379885],["▁अनि",-12.72152042388916],["▁خور",-12.72152328491211],["▁басқару",-12.721529960632324],["▁անհրաժեշտ",-12.72153091430664],["vieto",-12.721548080444336],["▁됩니다",-12.72155475616455],["मुळे",-12.721569061279297],["▁ଦେଇ",-12.721580505371094],["kent",-12.721617698669434],["▁լինել",-12.721625328063965],["▁vəzifə",-12.72162628173828],["chodzą",-12.721651077270508],["यु",-12.72165870666504],["ುವುದಿಲ್ಲ",-12.721661567687988],["▁Januar",-12.721673965454102],["▁новые",-12.721685409545898],["▁Aqua",-12.72169017791748],["▁Син",-12.72171401977539],["▁ભર",-12.721747398376465],["▁longa",-12.721756935119627],["нали",-12.721769332885742],["▁zakona",-12.72178554534912],["▁szolgáltatás",-12.721810340881348],["AMI",-12.721814155578612],["彼",-12.721857070922852],["▁pregled",-12.721900939941406],["▁jätte",-12.72191047668457],["▁Buy",-12.721915245056152],["▁යය",-12.721916198730469],["▁बहु",-12.721922874450684],["▁РА",-12.721925735473633],["▁расс",-12.721925735473633],["dió",-12.721927642822266],["▁статьи",-12.721962928771973],["▁gând",-12.721983909606934],["▁Fred",-12.722018241882324],["▁শেষ",-12.7220458984375],["тики",-12.722047805786133],["▁servizos",-12.722052574157717],["▁Mand",-12.722073554992676],["uksista",-12.72218894958496],["▁улица",-12.722190856933594],["▁ਕੰਮ",-12.722210884094238],["▁ecclesia",-12.722246170043944],["Ę",-12.722250938415527],["▁którą",-12.722250938415527],["గో",-12.722262382507324],["▁rhoi",-12.72226333618164],["ārs",-12.722270965576172],["▁тұ",-12.722278594970703],["VS",-12.72227954864502],["▁Lar",-12.722291946411133],["▁millió",-12.722293853759766],["▁ਕਰਦੇ",-12.722294807434082],["▁पहली",-12.722314834594728],["bete",-12.72231674194336],["條件",-12.722345352172852],["LEN",-12.7223482131958],["acqua",-12.722359657287598],["▁mutane",-12.722371101379396],["vle",-12.72238540649414],["▁Mua",-12.72239112854004],["▁death",-12.722400665283203],["സ്ഥ",-12.722467422485352],["▁dinero",-12.722505569458008],["ورة",-12.72251319885254],["▁ಓ",-12.722517013549805],["után",-12.722542762756348],["ە",-12.722548484802246],["指導",-12.722597122192385],["ረት",-12.722599983215332],["▁спросил",-12.722655296325684],["▁السيد",-12.722671508789062],["▁comhrá",-12.72267246246338],["▁Pier",-12.72267723083496],["ಷ್ಟ",-12.722681999206545],["▁Жо",-12.722700119018556],["▁баримт",-12.72270965576172],["ुर",-12.722712516784668],["▁тер",-12.722737312316896],["ÁR",-12.722776412963867],["▁sezonu",-12.722790718078612],["▁Коли",-12.722800254821776],["在中国",-12.722811698913574],["ʼ",-12.722867965698242],["▁ਦਿ",-12.722902297973633],["▁Ley",-12.72292709350586],["▁fikr",-12.722945213317873],["ића",-12.722954750061035],["▁Yap",-12.722989082336426],["शे",-12.72300624847412],["еве",-12.723012924194336],["dlá",-12.72304344177246],["кую",-12.723044395446776],["how",-12.723052024841309],["▁танц",-12.723063468933104],["дала",-12.723078727722168],["▁konsep",-12.7230863571167],["▁udara",-12.723121643066406],["▁Fernando",-12.723122596740724],["▁നിങ്ങളുടെ",-12.723122596740724],["▁història",-12.723124504089355],["▁qeyri",-12.723135948181152],["▁leti",-12.723142623901367],["သစ္",-12.723145484924316],["▁учи",-12.72314739227295],["▁शोध",-12.72317123413086],["ылган",-12.723193168640137],["кие",-12.723198890686035],["▁vies",-12.723255157470703],["▁facilitar",-12.723259925842283],["▁піс",-12.7232666015625],["▁ఇవ్వ",-12.723329544067385],["▁napi",-12.72335433959961],["▁klanten",-12.723370552062988],["▁waka",-12.723371505737305],["จากการ",-12.723383903503418],["▁Umoja",-12.723383903503418],["szó",-12.723402976989746],["▁besondere",-12.723421096801758],["قوم",-12.723474502563477],["▁खो",-12.723526000976562],["сні",-12.72353744506836],["عود",-12.723618507385254],["▁ଓଡ଼ିଶା",-12.72362995147705],["▁สาว",-12.723645210266112],["▁soru",-12.723652839660645],["tahan",-12.72365665435791],["gry",-12.723710060119627],["veli",-12.723731994628906],["็ด",-12.7237548828125],["नै",-12.723803520202637],["ajām",-12.72380828857422],["enten",-12.723820686340332],["▁шум",-12.72382354736328],["▁۱۲",-12.723841667175291],["הוד",-12.723877906799316],["яват",-12.723881721496582],["▁ხარ",-12.723881721496582],["ଣ୍ଟ",-12.72390079498291],["કર",-12.72391414642334],["puesto",-12.723918914794922],["▁برچسب",-12.723960876464844],["ieji",-12.72398567199707],["▁জানা",-12.723991394042969],["▁PHP",-12.723992347717283],["▁omen",-12.724014282226562],["▁Setiap",-12.724021911621094],["ટે",-12.72408390045166],["▁મળે",-12.724108695983888],["CB",-12.72413158416748],["▁Pháp",-12.724138259887695],["ንት",-12.72414493560791],["▁Този",-12.724184036254885],["zhi",-12.72418975830078],["lığa",-12.72419261932373],["girl",-12.724209785461426],["ified",-12.724230766296388],["▁díl",-12.72424602508545],["няя",-12.72425365447998],["▁Noord",-12.724268913269045],["▁органів",-12.72429656982422],["▁känns",-12.72431182861328],["▁122",-12.724326133728027],["얼",-12.72433853149414],["▁durumu",-12.72435474395752],["ruf",-12.724374771118164],["▁Ses",-12.724431037902832],["▁ползване",-12.724431991577148],["ینه",-12.72447395324707],["master",-12.724493980407717],["▁femmes",-12.724496841430664],["cultura",-12.724526405334473],["▁කලින්",-12.724528312683104],["▁Metal",-12.7245512008667],["בט",-12.72457504272461],["ڇ",-12.724591255187988],["λει",-12.724594116210938],["طي",-12.724641799926758],["▁helpen",-12.724651336669922],["▁сайхан",-12.724653244018556],["mane",-12.724674224853516],["ţele",-12.72467803955078],["▁ученици",-12.724777221679688],["ಂಡ್",-12.72478485107422],["迎",-12.724796295166016],["▁aquests",-12.724806785583496],["デザイン",-12.724808692932127],["▁României",-12.72481918334961],["▁مبارک",-12.724821090698242],["៦",-12.724825859069824],["距离",-12.724845886230469],["ยาง",-12.725014686584473],["▁күч",-12.725045204162598],["אַן",-12.725046157836914],["▁utilizando",-12.72506618499756],["▁events",-12.725068092346191],["▁reguli",-12.72507095336914],["-08-",-12.725096702575684],["ынды",-12.725110054016112],["▁Anfang",-12.725115776062012],["▁оде",-12.725115776062012],["▁viso",-12.725120544433594],["汇",-12.725141525268556],["▁ficou",-12.725149154663086],["▁jõud",-12.725149154663086],["▁jane",-12.725174903869627],["▁nhi",-12.725191116333008],["ตัด",-12.725199699401855],["▁대통령",-12.725302696228027],["▁ønske",-12.7253999710083],["▁тө",-12.725419044494627],["▁vre",-12.725458145141602],["बार",-12.725464820861816],["桃",-12.725469589233398],["▁okolo",-12.72551441192627],["▁kellett",-12.725557327270508],["吴",-12.725573539733888],["▁milijuna",-12.725604057312012],["▁kommunen",-12.725632667541504],["▁morto",-12.725671768188477],["▁Anadolu",-12.725674629211426],["▁fortfarande",-12.725674629211426],["▁октомври",-12.725674629211426],["▁сколько",-12.725674629211426],["▁مرحله",-12.725674629211426],["▁이렇게",-12.725674629211426],["▁veľký",-12.725680351257324],["▁Sada",-12.725707054138184],["PK",-12.7257080078125],["▁روایت",-12.725716590881348],["лого",-12.725747108459473],["▁בתחום",-12.725749969482422],["▁Wol",-12.725751876831056],["}}",-12.725775718688965],["▁simboli",-12.725778579711914],["време",-12.72581958770752],["▁mnoho",-12.72584629058838],["加盟",-12.725868225097656],["ууда",-12.725918769836426],["▁mogoče",-12.725918769836426],["իլ",-12.726025581359863],["▁mutlu",-12.726051330566406],["▁सम्बन्ध",-12.72612762451172],["▁arteko",-12.726133346557615],["▁kriz",-12.726133346557615],["▁sürü",-12.72617530822754],["▁ғылыми",-12.726202011108398],["spiller",-12.726202964782717],["▁గో",-12.726216316223145],["lhe",-12.726222038269045],["▁klip",-12.726224899291992],["▁హా",-12.726240158081056],["▁πολιτική",-12.72624969482422],["zę",-12.726284980773926],["▁کمتر",-12.726288795471191],["▁Alp",-12.726293563842772],["δει",-12.726338386535645],["▁Millionen",-12.726346969604492],["word",-12.726350784301758],["agh",-12.7263765335083],["▁nechce",-12.726417541503906],["▁лева",-12.726430892944336],["ことができます",-12.72643756866455],["беков",-12.726439476013184],["▁daž",-12.726466178894045],["έκ",-12.726484298706056],["漏",-12.726486206054688],["梁",-12.726503372192385],["▁Zur",-12.726505279541016],["▁vrata",-12.726542472839355],["▁روحانی",-12.72654914855957],["▁fháil",-12.72655200958252],["▁колко",-12.726564407348633],["▁qaz",-12.726574897766112],["▁بلکه",-12.72657585144043],["▁heter",-12.72659683227539],["▁aporta",-12.72659969329834],["▁группы",-12.72659969329834],["xos",-12.726601600646973],["▁他",-12.726633071899414],["▁ignor",-12.72663402557373],["▁យើង",-12.72666072845459],["책",-12.726679801940918],["▁დაე",-12.726682662963867],["▁verhaal",-12.726694107055664],["▁Lord",-12.726743698120115],["хвал",-12.726762771606444],["▁spend",-12.726774215698242],["sburg",-12.72678279876709],["▁ស្រុក",-12.726814270019531],["▁পরি",-12.726859092712402],["▁değildir",-12.726863861083984],["τερο",-12.726871490478516],["göngu",-12.726948738098145],["让你",-12.726953506469728],["евич",-12.726964950561523],["bý",-12.726981163024902],["▁ಮಾಡುವ",-12.726996421813965],["▁dà",-12.727004051208496],["▁ABC",-12.727015495300291],["দে",-12.727048873901367],["▁turnir",-12.72705364227295],["大陸",-12.72714900970459],["▁ligj",-12.727149963378906],["▁ఎం",-12.727150917053224],["▁प्रभाव",-12.727185249328612],["▁Są",-12.727197647094728],["্ৰ",-12.7272367477417],["▁вход",-12.72726058959961],["▁ከሚ",-12.727277755737305],["รม",-12.727280616760254],["...(",-12.727291107177734],["▁важна",-12.727312088012695],["▁čeka",-12.72732639312744],["▁dokumentu",-12.727384567260742],["슈",-12.727389335632324],["也就是",-12.727395057678224],["▁ISBN",-12.727397918701172],["▁համակարգ",-12.727399826049805],["week",-12.72740364074707],["▁אולי",-12.727413177490234],["開発",-12.727431297302246],["▁നടത്ത",-12.727432250976562],["▁ऑफ",-12.72743797302246],["▁เขา",-12.727462768554688],["亚洲",-12.72746467590332],["ដៃ",-12.727482795715332],["rub",-12.727486610412598],["▁општине",-12.727514266967772],["▁passende",-12.727516174316406],["▁Futbol",-12.727535247802734],["▁วัด",-12.727547645568848],["▁organitza",-12.727551460266112],["blu",-12.727561950683594],["▁irány",-12.727582931518556],["vád",-12.72758674621582],["▁neod",-12.727591514587402],["▁настан",-12.727621078491213],["▁поз",-12.727657318115234],["있",-12.727680206298828],["▁ਮਾਰ",-12.72769832611084],["ಗಳಿಂದ",-12.72772979736328],["透露",-12.727747917175291],["50%",-12.727765083312988],["anlar",-12.727778434753418],["▁vî",-12.727787971496582],["▁最近",-12.72780990600586],["▁Februari",-12.72781753540039],["▁comfort",-12.727818489074709],["ainak",-12.72788143157959],["▁populære",-12.727890968322754],["technik",-12.727928161621094],["LOG",-12.727946281433104],["▁sista",-12.727980613708496],["MAT",-12.728034019470217],["issimo",-12.72803783416748],["▁trin",-12.72804069519043],["chel",-12.72805404663086],["▁fark",-12.728066444396973],["▁riba",-12.72808074951172],["สิ",-12.728102684020996],["▁novela",-12.728102684020996],["▁compromiso",-12.728106498718262],["ბლ",-12.728110313415527],["▁Noor",-12.72811508178711],["LARI",-12.728121757507324],["▁плане",-12.728143692016602],["▁иметь",-12.72818088531494],["נדל",-12.728188514709473],["కె",-12.728196144104004],["лөр",-12.72821807861328],["പ്പോൾ",-12.728241920471191],["▁Toe",-12.72824764251709],["▁carácter",-12.728249549865724],["▁kívül",-12.728249549865724],["▁خواهند",-12.72825050354004],["▁تعمیر",-12.728254318237305],["▁رایگان",-12.728257179260254],["比賽",-12.728289604187012],["ಂದು",-12.72829532623291],["▁ႏွစ္",-12.72835922241211],["การใช้",-12.728373527526855],["▁Moja",-12.728382110595703],["▁După",-12.7283935546875],["rinn",-12.728413581848145],["យ៉ាង",-12.728419303894045],["手術",-12.728442192077637],["▁emel",-12.728443145751951],["▁istri",-12.728452682495115],["▁varð",-12.72847843170166],["▁බවයි",-12.728507995605469],["▁wygląda",-12.72854995727539],["。。。",-12.728567123413086],["llon",-12.728583335876465],["▁שח",-12.72858715057373],["▁roedd",-12.728594779968262],["▁получен",-12.728619575500488],["ចូល",-12.728652000427246],["▁styrke",-12.728670120239258],["▁Tablet",-12.728679656982422],["клон",-12.728714942932127],["ανα",-12.72872257232666],["νου",-12.728736877441406],["ხედ",-12.728740692138672],["кос",-12.728750228881836],["نيا",-12.728815078735352],["▁Loo",-12.728837966918944],["一日",-12.728840827941896],["▁ihop",-12.728849411010742],["▁זאל",-12.72886848449707],["ruz",-12.728915214538574],["ੰਨ",-12.72892951965332],["▁korak",-12.72893524169922],["▁buona",-12.72900676727295],["▁Vod",-12.729015350341797],["酷",-12.729055404663086],["▁Eb",-12.729063034057615],["拼",-12.7290678024292],["▁방송",-12.729083061218262],["خو",-12.729086875915527],["▁Тоа",-12.729108810424805],["▁মন্তব্য",-12.729108810424805],["▁Məlumat",-12.72910976409912],["▁अच्छा",-12.729121208190918],["▁Haupt",-12.729155540466309],["▁gitar",-12.729201316833496],["▁tegu",-12.72923755645752],["ىز",-12.729275703430176],["并不是",-12.729321479797363],["▁donna",-12.729352951049805],["▁।’",-12.729364395141602],["▁glass",-12.729366302490234],["▁sii",-12.729373931884766],["广州",-12.729394912719728],["▁Timur",-12.729415893554688],["▁stick",-12.729416847229004],["նե",-12.72942066192627],["งค์",-12.729466438293455],["anın",-12.729467391967772],["ตรวจสอบ",-12.72947120666504],["▁потреба",-12.729472160339355],["VM",-12.729485511779783],["يط",-12.72950267791748],["▁Ваше",-12.729530334472656],["saker",-12.729544639587402],["स्ट्र",-12.729557037353516],["ដា",-12.729561805725098],["▁määra",-12.72960376739502],["spr",-12.729606628417969],["▁Energi",-12.729631423950195],["▁javno",-12.729642868041992],["▁zinazo",-12.729748725891112],["▁batteri",-12.72977352142334],["게임",-12.729805946350098],["▁llega",-12.729812622070312],["▁داشتن",-12.729815483093262],["▁क्र",-12.729856491088867],["▁Mela",-12.72986125946045],["▁maraq",-12.72988986968994],["▁drum",-12.729894638061523],["无论",-12.729905128479004],["▁आने",-12.72992706298828],["μέ",-12.729928970336914],["▁plads",-12.72993278503418],["▁schöne",-12.729934692382812],["ière",-12.729938507080078],["充滿",-12.729938507080078],["煙",-12.729951858520508],["▁scuola",-12.72996997833252],["▁прежде",-12.72996997833252],["▁ունեցել",-12.72996997833252],["▁cinayət",-12.729970932006836],["▁txiki",-12.729970932006836],["▁зөв",-12.729971885681152],["▁କହିଛନ୍ତି",-12.729973793029783],["▁வெற்றி",-12.729973793029783],["▁පොඩි",-12.729975700378418],["นะคะ",-12.729978561401367],["▁услуга",-12.72998046875],["▁načrt",-12.729985237121582],["▁порядок",-12.729985237121582],["iker",-12.730005264282228],["▁torsdag",-12.730015754699709],["िएका",-12.73001766204834],["었",-12.730025291442873],["កា",-12.730051040649414],["▁thinking",-12.730053901672363],["▁إدارة",-12.730063438415527],["βάλ",-12.73006534576416],["▁مستوى",-12.730113983154297],["ירים",-12.7301607131958],["örd",-12.730192184448242],["закон",-12.730219841003418],["▁зем",-12.730252265930176],["▁bucur",-12.730283737182615],["▁veter",-12.730365753173828],["▁SPA",-12.73040008544922],["▁traži",-12.730408668518066],["▁فرم",-12.730428695678713],["чны",-12.730436325073242],["▁הן",-12.730451583862305],["▁глав",-12.730460166931152],["нев",-12.730464935302734],["lehti",-12.730469703674316],["▁samarbejde",-12.730470657348633],["เท่า",-12.730494499206545],["cito",-12.730511665344238],["商業",-12.730581283569336],["linta",-12.730597496032717],["ԱԿ",-12.730603218078612],["▁আস",-12.730603218078612],["▁ڪر",-12.730606079101562],["бър",-12.730626106262209],["ದರ",-12.730629920959473],["ਤਰ",-12.730646133422852],["loo",-12.730687141418455],["▁ideo",-12.73071575164795],["▁aftur",-12.730720520019531],["▁vender",-12.730764389038086],["Ap",-12.730786323547363],["▁persoal",-12.73080348968506],["▁ይህንን",-12.730813026428224],["κό",-12.730823516845703],["▁ئىلگىرى",-12.730831146240234],["▁အဲဒီ",-12.730831146240234],["▁අවසන්",-12.730834007263184],["確保",-12.7308349609375],["▁đỏ",-12.73084545135498],["▁रूपमा",-12.730863571166992],["ТЕР",-12.73086643218994],["AVA",-12.730880737304688],["▁충",-12.730897903442385],["вий",-12.730912208557127],["ization",-12.730921745300291],["▁hızlı",-12.73092555999756],["▁kiun",-12.730937957763672],["▁sosten",-12.730939865112305],["▁čase",-12.73098087310791],["▁затоа",-12.731084823608398],["▁váš",-12.731109619140623],["బా",-12.731122970581056],["▁turismo",-12.731146812438965],["вите",-12.731154441833496],["▁لنا",-12.73118782043457],["▁૨",-12.731192588806152],["▁پژوهش",-12.731201171875],["stadt",-12.731234550476074],["▁његов",-12.731234550476074],["▁skład",-12.731283187866213],["rku",-12.731306076049805],["▁siç",-12.731335639953612],["וא",-12.731348037719728],["▁hiper",-12.73135757446289],["▁Şe",-12.731406211853027],["ژن",-12.731426239013672],["▁однос",-12.731426239013672],["▁friss",-12.731431007385254],["▁noma",-12.73145580291748],["بور",-12.73147964477539],["▁ontwikkeling",-12.73147964477539],["▁فرصت",-12.731484413146973],["▁მაგ",-12.73149299621582],["▁годы",-12.73149871826172],["▁ОТ",-12.731504440307615],["▁дис",-12.731504440307615],["esten",-12.731534004211426],["راج",-12.731536865234377],["▁மி",-12.731589317321776],["τυπ",-12.731602668762209],["▁Ova",-12.731610298156738],["▁электр",-12.73162078857422],["եցին",-12.731639862060549],["▁култура",-12.731671333312988],["Der",-12.731687545776367],["經典",-12.731689453125],["ୌ",-12.731690406799316],["▁प्रयत्न",-12.731693267822266],["▁kterou",-12.731698989868164],["▁ponovno",-12.731701850891112],["▁čaj",-12.73170566558838],["дії",-12.73175811767578],["▁fixa",-12.73182487487793],["▁najmä",-12.731842041015623],["▁١",-12.731878280639648],["tävät",-12.731908798217772],["ခံရ",-12.73196029663086],["FU",-12.73203945159912],["▁службе",-12.732047080993652],["▁Oru",-12.732069969177246],["▁ihmis",-12.73208236694336],["▁sæti",-12.732139587402344],["▁commande",-12.73215103149414],["שור",-12.732175827026367],["▁действие",-12.73219394683838],["міт",-12.73220443725586],["мур",-12.73222541809082],["enses",-12.732272148132324],["gili",-12.73228359222412],["▁ຄໍາ",-12.732294082641602],["▁jonge",-12.732303619384766],["▁हेर",-12.732308387756348],["▁ഹി",-12.732316970825195],["ន្ត",-12.732349395751951],["▁Tio",-12.732370376586914],["▁თავ",-12.732372283935549],["▁score",-12.732401847839355],["▁звук",-12.732439041137695],["▁1954",-12.732462882995604],["▁hút",-12.732463836669922],["慶",-12.732491493225098],["▁Sot",-12.73250961303711],["▁وك",-12.732532501220703],["▁тэм",-12.732535362243652],["▁Ljubljana",-12.732556343078612],["▁bọn",-12.73255729675293],["▁kelkaj",-12.732585906982422],["▁afaceri",-12.732600212097168],["CIÓN",-12.73261260986328],["推广",-12.732620239257812],["▁sieci",-12.732622146606444],["ncang",-12.732623100280762],["шылық",-12.732650756835938],["eeyey",-12.732664108276367],["▁quảng",-12.732705116271973],["ಜೆ",-12.732707977294922],["▁תמיד",-12.73271656036377],["▁ومع",-12.732718467712402],["▁тең",-12.732759475708008],["ዋል፡፡",-12.73277187347412],["shtë",-12.73277759552002],["▁यथा",-12.73277759552002],["▁الأرض",-12.73280143737793],["▁mohou",-12.732808113098145],["▁व्यवहार",-12.732826232910156],["▁jestli",-12.732840538024902],["▁ieri",-12.732842445373535],["▁դաս",-12.73287868499756],["▁غا",-12.732890129089355],["▁době",-12.73289394378662],["ዙ",-12.732898712158203],["也會",-12.73289966583252],["▁түрде",-12.732922554016112],["နှစ်",-12.732955932617188],["▁Nick",-12.73296356201172],["▁കേട്ട",-12.7329683303833],["▁၅",-12.7329683303833],["idé",-12.73300075531006],["状态",-12.733037948608398],["▁Republic",-12.733080863952637],["▁Jazz",-12.73308277130127],["執",-12.733098030090332],["▁ліку",-12.73310661315918],["▁بحران",-12.733113288879396],["βου",-12.733115196228027],["▁തോ",-12.733120918273926],["▁නැවත",-12.733135223388672],["▁visai",-12.733155250549316],["▁понад",-12.73317527770996],["▁مب",-12.733205795288086],["berri",-12.733220100402832],["▁üles",-12.73324203491211],["wasan",-12.733247756958008],["▁Herz",-12.73326587677002],["▁केवल",-12.733272552490234],["▁nazir",-12.73333740234375],["▁fantastic",-12.733343124389648],["▁Ond",-12.733344078063965],["ኦ",-12.733353614807127],["▁občin",-12.733386039733888],["ภูมิ",-12.733393669128418],["▁quần",-12.73341941833496],["▁колектив",-12.733423233032228],["▁esetben",-12.733427047729492],["AWA",-12.73345184326172],["▁Cross",-12.733453750610352],["▁אן",-12.733466148376465],["รายงาน",-12.733469009399414],["▁করেছেন",-12.733484268188477],["▁பார்த்த",-12.733508110046388],["▁آتش",-12.733521461486816],["▁ခု",-12.73353672027588],["▁עכשיו",-12.733538627624512],["▁nuestras",-12.733555793762209],["▁тела",-12.733558654785156],["▁먹",-12.733574867248535],["ИЯ",-12.733603477478027],["▁kultúr",-12.73362159729004],["▁krás",-12.733623504638672],["lott",-12.733634948730469],["▁рата",-12.733635902404783],["KM",-12.733636856079102],["φι",-12.733643531799316],["▁والن",-12.733670234680176],["▁vocês",-12.73367404937744],["▁लागू",-12.733692169189451],["сион",-12.733723640441896],["▁SAD",-12.733749389648438],["▁فضا",-12.733758926391602],["站在",-12.73377799987793],["▁দি",-12.733802795410156],["fyn",-12.733857154846191],["MK",-12.733871459960938],["faa",-12.7339448928833],["▁huquq",-12.733983993530272],["ທີ",-12.734004974365234],["▁сма",-12.734004974365234],["ppar",-12.734014511108398],["▁rrugë",-12.73405647277832],["、2",-12.734065055847168],["យក",-12.734100341796877],["▁एकता",-12.734113693237305],["▁설치",-12.734128952026367],["▁derecho",-12.73414421081543],["ГУ",-12.734163284301758],["▁kasino",-12.734167098999023],["▁برق",-12.734171867370604],["▁детски",-12.734196662902832],["響",-12.734227180480955],["机关",-12.734246253967283],["▁concello",-12.734272003173828],["ሟ",-12.73428440093994],["▁legalább",-12.734292984008787],["▁plane",-12.734294891357422],["▁Regula",-12.734307289123535],["▁नम्बर",-12.7343111038208],["ovalo",-12.734319686889648],["KEN",-12.734362602233888],["▁ቁ",-12.734371185302734],["▁শেখ",-12.734378814697266],["▁мр",-12.734384536743164],["tinė",-12.734417915344238],["▁कृष्ण",-12.734442710876465],["וף",-12.734458923339844],["▁Turk",-12.734466552734377],["▁자신의",-12.734467506408691],["▁tera",-12.734503746032717],["▁destru",-12.734514236450195],["ówki",-12.73454475402832],["来了",-12.734572410583496],["▁samping",-12.734580993652344],["ival",-12.734623908996582],["▁brukes",-12.73463249206543],["ٹک",-12.734642028808594],["▁piste",-12.734673500061035],["▁teşekkür",-12.734699249267578],["რებს",-12.73470401763916],["cím",-12.734722137451172],["▁avtor",-12.73473834991455],["それが",-12.73475170135498],["▁venster",-12.734753608703612],["你們",-12.734763145446776],["▁pirk",-12.734773635864258],["aíocht",-12.734787940979004],["▁desember",-12.734795570373535],["יבות",-12.734798431396484],["cru",-12.734804153442385],["▁паро",-12.7348051071167],["കാ",-12.734806060791016],["φόρ",-12.73483180999756],["gumu",-12.734841346740724],["фарм",-12.734841346740724],["▁sortir",-12.73484992980957],["ოვ",-12.734875679016112],["▁Ito",-12.734883308410645],["漫",-12.73489761352539],["deb",-12.734898567199709],["cot",-12.734907150268556],["▁Boga",-12.734911918640137],["мп",-12.73493480682373],["▁punta",-12.73495388031006],["▁koniec",-12.73496150970459],["ottam",-12.734975814819336],["είτε",-12.735010147094728],["▁rann",-12.735013008117676],["▁স্বাস্থ্য",-12.735015869140623],["▁xil",-12.735017776489258],["▁arról",-12.735058784484863],["▁Yar",-12.735064506530762],["貌",-12.735076904296877],["邦",-12.73511028289795],["婚姻",-12.735122680664062],["মন্ত্রী",-12.735125541687012],["▁אב",-12.735127449035645],["▁erbjuder",-12.735149383544922],["▁տալիս",-12.735153198242188],["▁décembre",-12.73515510559082],["▁taha",-12.735166549682615],["▁doet",-12.73517608642578],["ادی",-12.735187530517578],["▁ٻڌ",-12.73520851135254],["▁تاہم",-12.735220909118652],["▁Просто",-12.735261917114258],["▁Ձեր",-12.735268592834473],["▁nasional",-12.735275268554688],["нение",-12.735279083251951],["▁quos",-12.7352933883667],["▁seveda",-12.73532009124756],["షి",-12.735368728637695],["▁фронт",-12.735377311706545],["▁ናቸው",-12.735377311706545],["▁Hola",-12.735380172729492],["cite",-12.735430717468262],["▁старт",-12.735445976257324],["alom",-12.73546028137207],["▁јас",-12.73547077178955],["▁lumi",-12.735501289367676],["▁145",-12.73552131652832],["개월",-12.73554801940918],["ាយ",-12.735550880432127],["▁åpne",-12.73556423187256],["▁требало",-12.735601425170898],["▁Liste",-12.735608100891112],["PRO",-12.73563289642334],["▁prvý",-12.735652923583984],["▁ελ",-12.735660552978516],["▁잡",-12.73567008972168],["ሉት",-12.735713005065918],["▁hôn",-12.735733032226562],["予約",-12.735754013061523],["▁kuba",-12.735755920410156],["▁ម្នាក់",-12.73576831817627],["▁balkon",-12.735776901245115],["▁xizmat",-12.735777854919434],["чный",-12.735787391662598],["▁técnicas",-12.735790252685549],["▁மீ",-12.735819816589355],["▁двох",-12.735834121704102],["▁luku",-12.73583984375],["▁мыйзам",-12.735854148864746],["▁තමන්",-12.735894203186035],["čev",-12.735931396484377],["▁icon",-12.735947608947754],["▁വേണ്ട",-12.735955238342283],["撞",-12.735977172851562],["▁เดือน",-12.735984802246094],["ໂ",-12.73598575592041],["▁موفق",-12.736005783081056],["▁Конечно",-12.736014366149902],["▁पत्नी",-12.73601531982422],["▁२०७४",-12.736021041870115],["▁لذلك",-12.736045837402344],["▁ennek",-12.736059188842772],["▁gillar",-12.736091613769531],["▁इससे",-12.73614501953125],["▁Тро",-12.736164093017578],["▁Số",-12.73617935180664],["▁началото",-12.736199378967283],["▁skut",-12.7362060546875],["دخل",-12.736207962036133],["▁시스템",-12.736221313476562],["▁coses",-12.73624038696289],["的产品",-12.73624324798584],["ycznych",-12.736248970031738],["▁חדר",-12.736265182495115],["▁gratuito",-12.736306190490724],["▁propos",-12.736310005187988],["ሰጥ",-12.736336708068848],["▁menulis",-12.736347198486328],["ований",-12.73634910583496],["tomi",-12.736364364624023],["▁clima",-12.736400604248049],["ткан",-12.736409187316896],["▁prst",-12.736410140991213],["ức",-12.736475944519045],["▁حالا",-12.736479759216309],["腹",-12.736485481262209],["▁søger",-12.736493110656738],["▁Price",-12.736495971679688],["AIN",-12.736515045166016],["ยอด",-12.736533164978027],["▁тры",-12.73656177520752],["▁udah",-12.736594200134276],["үнүн",-12.736634254455566],["▁security",-12.73664093017578],["Ich",-12.736654281616213],["▁Danske",-12.736665725708008],["ложи",-12.736682891845703],["盒",-12.736682891845703],["▁Jeho",-12.736696243286133],["▁tohoto",-12.736703872680664],["▁erau",-12.736722946166992],["▁Иванов",-12.736733436584473],["▁Fen",-12.736736297607422],["▁όλο",-12.7367582321167],["顯",-12.736761093139648],["yh",-12.736797332763672],["lashtirish",-12.736806869506836],["lykke",-12.736806869506836],["нього",-12.73684024810791],["常常",-12.7368745803833],["▁nærheden",-12.736894607543944],["▁zorgen",-12.736916542053224],["▁لفظ",-12.736928939819336],["▁‪",-12.736934661865234],["▁cores",-12.736946105957031],["▁sosiaali",-12.736974716186523],["য়ের",-12.736981391906738],["voir",-12.73698902130127],["▁przeprowadz",-12.737062454223633],["IEN",-12.737085342407228],["▁veril",-12.73711395263672],["лече",-12.737133979797363],["▁paraan",-12.737156867980955],["▁մարզ",-12.73716640472412],["▁تحقیقات",-12.737194061279297],["lul",-12.737204551696776],["ፍት",-12.7372465133667],["prost",-12.737247467041016],["ovana",-12.737249374389648],["▁तसेच",-12.737250328063965],["ுடன்",-12.73725700378418],["▁دیدار",-12.73726749420166],["▁izgleda",-12.737348556518556],["▁Sila",-12.737367630004885],["▁definitiva",-12.737380027770996],["db",-12.737382888793944],["▁kishin",-12.737414360046388],["တက္",-12.737438201904297],["▁destul",-12.737438201904297],["▁դր",-12.737444877624512],["▁Gram",-12.737457275390623],["▁кызы",-12.737467765808104],["▁پئي",-12.73747730255127],["▁робіт",-12.737493515014648],["NÍ",-12.737509727478027],["હિ",-12.737510681152344],["▁filo",-12.737537384033203],["每一",-12.737542152404783],["сып",-12.737556457519531],["▁genial",-12.737573623657228],["aikan",-12.737590789794922],["▁sjuk",-12.737598419189451],["▁Гре",-12.737600326538086],["ટ્ર",-12.737611770629885],["▁eux",-12.73763656616211],["нули",-12.737659454345703],["شه",-12.737689971923828],["▁मिला",-12.737692832946776],["▁vão",-12.73769760131836],["LB",-12.737707138061523],["▁mian",-12.73770809173584],["▁וח",-12.737728118896484],["▁Києві",-12.737749099731444],["▁початку",-12.737749099731444],["▁ديسمبر",-12.737749099731444],["▁közös",-12.737765312194824],["▁וועלט",-12.737789154052734],["11)",-12.737792015075684],["▁definir",-12.737798690795898],["▁creat",-12.737812042236328],["ιερ",-12.737870216369627],["ອອກ",-12.737871170043944],["▁لار",-12.737874984741213],["▁نار",-12.737892150878906],["▁Kraft",-12.73792839050293],["यों",-12.737942695617676],["▁MED",-12.737958908081056],["ဆက်",-12.73796272277832],["▁Пр",-12.737963676452637],["▁აღარ",-12.737997055053713],["▁olje",-12.738000869750977],["▁volk",-12.738000869750977],["▁Motiv",-12.73802661895752],["▁lento",-12.738030433654783],["▁Ген",-12.73803997039795],["र्न",-12.73805046081543],["烈",-12.73808765411377],["▁విషయం",-12.738113403320312],["▁पाठ",-12.738114356994627],["жар",-12.73813533782959],["▁bashkë",-12.738151550292969],["▁Vaš",-12.7381591796875],["▁Gl",-12.738283157348633],["也许",-12.738319396972656],["▁päälle",-12.738368034362791],["▁sheria",-12.738383293151855],["▁राजा",-12.738385200500488],["raum",-12.73838710784912],["▁voluntari",-12.73838710784912],["▁mill",-12.738444328308104],["▁Vul",-12.738462448120115],["llit",-12.738463401794434],["▁dogovor",-12.738499641418455],["ded",-12.738505363464355],["豐",-12.738564491271973],["▁Anno",-12.738603591918944],["eanna",-12.738609313964844],["ໄຊ",-12.738622665405272],["▁význam",-12.738636016845703],["▁nära",-12.738640785217283],["▁такође",-12.738641738891602],["TOS",-12.738648414611816],["spur",-12.738677978515623],["чество",-12.738680839538574],["▁absurd",-12.738683700561523],["▁GU",-12.738690376281738],["ensemble",-12.738704681396484],["▁દેશ",-12.738720893859863],["ધા",-12.738722801208496],["geführt",-12.738747596740724],["的問題",-12.738821983337402],["▁resultater",-12.73882293701172],["▁면",-12.738844871520996],["▁chegou",-12.73886013031006],["▁šeit",-12.738862991333008],["▁धा",-12.73890781402588],["▁eam",-12.738932609558104],["ہے",-12.738959312438965],["alto",-12.738962173461914],["▁vuole",-12.738966941833496],["▁환경",-12.739043235778809],["▁بق",-12.739048957824709],["schi",-12.739079475402832],["▁yadda",-12.73910427093506],["▁Alb",-12.739136695861816],["▁yoga",-12.739157676696776],["doras",-12.739158630371094],["▁નો",-12.739177703857422],["▁Andrea",-12.739219665527344],["ികള്",-12.739225387573242],["bene",-12.73930549621582],["▁185",-12.739328384399414],["▁Vice",-12.739351272583008],["▁Með",-12.739355087280272],["قار",-12.739365577697754],["▁tipus",-12.739371299743652],["▁పొంద",-12.73939037322998],["▁бак",-12.739422798156738],["▁phim",-12.739449501037598],["▁tiesiog",-12.739474296569824],["▁neviem",-12.73947811126709],["▁Urlaub",-12.73948574066162],["▁utolsó",-12.73948574066162],["▁rregull",-12.739490509033203],["▁мога",-12.739501953125],["大幅",-12.739507675170898],["▁uddannelse",-12.739509582519531],["▁tiesības",-12.739519119262695],["▁mesure",-12.739547729492188],["▁اخیر",-12.7395658493042],["χή",-12.739577293395996],["ໃຫຍ່",-12.739603996276855],["тердің",-12.73960781097412],["عر",-12.739632606506348],["ক্স",-12.739641189575195],["ලෝ",-12.73965549468994],["▁Mikro",-12.739665031433104],["▁Mess",-12.739678382873535],["maid",-12.739686012268066],["fyll",-12.739752769470217],["種類",-12.739761352539062],["ल्ड",-12.739794731140137],["▁selge",-12.73983097076416],["▁teil",-12.73983669281006],["ЕК",-12.739837646484377],["ျမ",-12.739860534667969],["▁قوي",-12.73987102508545],["▁sebou",-12.739885330200195],["将会",-12.739931106567385],["▁белги",-12.739961624145508],["nelle",-12.739977836608888],["ន្ទ",-12.73998737335205],["▁quin",-12.739988327026367],["膜",-12.739996910095217],["werken",-12.74000072479248],["▁качества",-12.740004539489746],["▁Rec",-12.740041732788086],["▁direction",-12.740063667297363],["dice",-12.740068435668944],["dził",-12.740110397338867],["▁Куп",-12.740131378173828],["▁ด",-12.740132331848145],["▁eksport",-12.740199089050291],["lava",-12.740230560302734],["klet",-12.74028491973877],["贴",-12.740286827087402],["▁terenie",-12.740289688110352],["ÉR",-12.740291595458984],["ائز",-12.740336418151855],["ቪ",-12.740346908569336],["▁कधी",-12.740358352661133],["▁जाएगा",-12.740363121032717],["▁saanud",-12.740367889404297],["▁رکھنے",-12.740370750427246],["istic",-12.740412712097168],["▁Υ",-12.740413665771484],["▁mencoba",-12.740423202514648],["▁өөрийн",-12.740431785583496],["▁новата",-12.740448951721191],["顾",-12.740468978881836],["▁besteht",-12.74050998687744],["本来",-12.74050998687744],["▁국제",-12.740518569946287],["▁šios",-12.740550994873049],["▁ర్",-12.740574836730955],["טש",-12.74061679840088],["dire",-12.74063205718994],["▁ඉඩ",-12.740677833557127],["▁kursi",-12.740693092346191],["▁evangeli",-12.74069595336914],["儿子",-12.740703582763672],["▁conjunto",-12.740724563598633],["мену",-12.740735054016112],["ेषु",-12.74075698852539],["ENG",-12.740757942199709],["▁қара",-12.740764617919922],["หมอ",-12.740768432617188],["▁perchè",-12.740782737731934],["ነሳ",-12.740798950195312],["▁Politi",-12.740811347961426],["▁kompakt",-12.74081325531006],["▁společnosti",-12.74082851409912],["▁предава",-12.740904808044434],["▁Stil",-12.740909576416016],["ცვა",-12.740917205810549],["ήματος",-12.740937232971191],["alım",-12.740939140319824],["カー",-12.740948677062988],["▁sinni",-12.74098014831543],["▁מעל",-12.74103832244873],["▁iqtisadi",-12.74107551574707],["▁일반",-12.741128921508787],["宿",-12.741133689880373],["genera",-12.74114990234375],["კავშირ",-12.741185188293455],["alter",-12.74120044708252],["▁활용",-12.741225242614746],["▁প্রধান",-12.741230010986328],["▁факултет",-12.741230964660645],["▁settembre",-12.741239547729492],["▁помош",-12.741249084472656],["▁voto",-12.741262435913086],["▁ήδη",-12.741262435913086],["▁mivel",-12.741270065307615],["matta",-12.7412748336792],["alma",-12.741288185119627],["▁parle",-12.741312980651855],["▁августа",-12.74132251739502],["▁Plo",-12.74138069152832],["randa",-12.74138641357422],["▁здрав",-12.74145793914795],["▁୭",-12.741487503051758],["▁Moj",-12.74152374267578],["▁SEM",-12.741578102111816],["▁energy",-12.741620063781738],["積極",-12.741632461547852],["▁detalj",-12.741637229919434],["小编",-12.741684913635254],["▁opción",-12.74169921875],["lamak",-12.741722106933594],["nac",-12.741724014282228],["房子",-12.741729736328123],["fini",-12.741827964782717],["การแข่งขัน",-12.741829872131348],["▁секунд",-12.741854667663574],["▁ಇದ್ದ",-12.741869926452637],["дзь",-12.741886138916016],["▁geliyor",-12.741887092590332],["▁Cloud",-12.741893768310549],["jy",-12.741897583007812],["kiä",-12.741945266723633],["▁ল",-12.741983413696287],["Ze",-12.741997718811035],["RED",-12.741999626159668],["▁ways",-12.7420015335083],["liu",-12.742009162902832],["▁szinte",-12.742026329040527],["▁Element",-12.74203109741211],["ssé",-12.742032051086426],["▁hvilken",-12.742042541503906],["▁kasutada",-12.742051124572754],["▁καν",-12.74205207824707],["уучу",-12.742059707641602],["니까",-12.74206829071045],["宏",-12.742085456848145],["บริเวณ",-12.742095947265623],["▁Kemudian",-12.74209690093994],["▁menghasilkan",-12.74209690093994],["▁සොයා",-12.742106437683104],["▁uporabe",-12.742107391357422],["جاز",-12.742111206054688],["▁pretend",-12.742180824279783],["ארג",-12.742196083068848],["▁сезона",-12.742215156555176],["▁אינו",-12.742238998413086],["rila",-12.742265701293944],["▁በስ",-12.742284774780272],["note",-12.742297172546388],["ಟೆ",-12.742352485656738],["ئون",-12.742385864257812],["നീ",-12.742392539978027],["▁cabeza",-12.742504119873049],["עז",-12.742510795593262],["▁lira",-12.742528915405272],["▁Жу",-12.742551803588867],["lised",-12.742586135864258],["▁comentari",-12.74259090423584],["▁слов",-12.742637634277344],["早上",-12.742653846740724],["▁پاي",-12.742657661437988],["▁strat",-12.742668151855469],["▁Sob",-12.742687225341797],["▁longe",-12.742706298828123],["zón",-12.742728233337402],["hro",-12.742741584777832],["▁ମହିଳା",-12.742745399475098],["▁kraja",-12.74278163909912],["▁хэн",-12.742803573608398],["ково",-12.742812156677246],["аад",-12.742826461791992],["تس",-12.742836952209473],["▁liek",-12.742846488952637],["▁coloca",-12.742847442626951],["废",-12.742868423461914],["▁nazionale",-12.742871284484863],["▁nouvelles",-12.742875099182127],["örü",-12.74288558959961],["▁jeune",-12.742929458618164],["▁Magaalada",-12.742968559265137],["▁uchaguzi",-12.742968559265137],["▁vďaka",-12.742968559265137],["▁Vit",-12.742984771728516],["▁Teo",-12.742989540100098],["oly",-12.743003845214844],["гри",-12.743011474609377],["ထောင်",-12.74301815032959],["▁पुरस्कार",-12.743023872375488],["▁Consell",-12.74302864074707],["فى",-12.74303150177002],["سور",-12.743056297302246],["ڪن",-12.743069648742676],["题",-12.743091583251951],["▁գիտ",-12.743106842041016],["▁politique",-12.74312686920166],["▁ചെയ്യുക",-12.74313259124756],["ڌ",-12.74315357208252],["言葉",-12.743220329284668],["ကုန်",-12.743228912353516],["barn",-12.743242263793944],["める",-12.743345260620115],["pped",-12.743391036987305],["føring",-12.743399620056152],["1.2",-12.743408203125],["▁També",-12.743463516235352],["▁compris",-12.743490219116213],["十年",-12.743497848510742],["▁თბილისის",-12.743504524230955],["သံုး",-12.743515014648438],["▁ostatni",-12.743517875671388],["時候",-12.74353313446045],["kna",-12.743534088134766],["/04/",-12.743605613708496],["ラン",-12.743607521057127],["unud",-12.743656158447266],["用于",-12.743709564208984],["2.5",-12.74374294281006],["sluit",-12.74375820159912],["▁sonder",-12.743768692016602],["▁exercit",-12.743791580200195],["▁Jā",-12.743825912475586],["テレビ",-12.743830680847168],["knak",-12.743841171264648],["▁različnih",-12.743841171264648],["▁շարք",-12.743841171264648],["▁espanyol",-12.743842124938965],["▁개인정보",-12.743846893310549],["▁einhver",-12.743847846984863],["▁ዘመን",-12.74388027191162],["ekli",-12.74392032623291],["▁bật",-12.743922233581545],["стью",-12.743928909301758],["forme",-12.743941307067873],["itatem",-12.743977546691896],["▁protection",-12.743983268737791],["▁tanti",-12.74399185180664],["▁strøm",-12.744014739990234],["sdóttir",-12.744025230407717],["▁ክርስቲያን",-12.744036674499512],["▁एक्स",-12.74403953552246],["acje",-12.74404525756836],["дас",-12.744059562683104],["بە",-12.744067192077637],["ልን",-12.744071006774902],["▁letos",-12.744118690490724],["▁letih",-12.744122505187988],["했",-12.74413776397705],["ترا",-12.744199752807615],["▁Ainda",-12.744244575500488],["vlja",-12.744245529174805],["▁Nii",-12.744251251220703],["pression",-12.744285583496094],["▁banc",-12.744292259216309],["▁LAN",-12.744364738464355],["在这",-12.74438190460205],["İL",-12.744400024414062],["▁элемент",-12.744418144226074],["λό",-12.744454383850098],["▁Viz",-12.744497299194336],["▁កាលពី",-12.744499206542969],["▁identifica",-12.744510650634766],["начал",-12.744528770446776],["が必要",-12.744536399841309],["さんが",-12.744542121887209],["graphic",-12.74455738067627],["▁fyll",-12.744617462158203],["▁1600",-12.744661331176758],["portal",-12.74468231201172],["сме",-12.744695663452148],["▁raf",-12.744701385498049],["enberg",-12.744702339172363],["tiku",-12.744714736938477],["▁hasonló",-12.744714736938477],["▁पर्यटन",-12.744714736938477],["tvar",-12.744722366333008],["▁국민",-12.74473762512207],["дак",-12.744741439819336],["▁تازہ",-12.744742393493652],["مپ",-12.744778633117676],["ပိုင်း",-12.744779586791992],["▁Wohnung",-12.744793891906738],["▁menjawab",-12.744796752929688],["▁زمين",-12.744816780090332],["කොට",-12.744820594787598],["ทะเล",-12.744821548461914],["▁ლარი",-12.744832992553713],["▁الأمن",-12.744850158691406],["หลวง",-12.744863510131836],["▁aposta",-12.744882583618164],["▁പാര്",-12.744888305664062],["▁dəstək",-12.744914054870604],["éria",-12.74497127532959],["▁dobrý",-12.744977951049805],["▁væ",-12.744979858398438],["▁समा",-12.744990348815918],["ሆ",-12.745018005371094],["▁مرا",-12.745081901550291],["▁רא",-12.745088577270508],["▁태",-12.74510669708252],["ուղ",-12.745111465454102],["ట్ల",-12.745112419128418],["▁prezzo",-12.74513816833496],["▁utawa",-12.74515438079834],["kých",-12.745247840881348],["▁zár",-12.745279312133787],["▁spec",-12.74530029296875],["▁styrk",-12.745330810546877],["znak",-12.745357513427734],["ayay",-12.745381355285645],["meen",-12.745383262634276],["▁batek",-12.745388984680176],["▁слаб",-12.745402336120604],["ань",-12.745406150817873],["▁snakke",-12.745424270629885],["▁Jane",-12.7454252243042],["▁QU",-12.745450019836426],["▁huone",-12.745450973510742],["▁సంస్థ",-12.745471954345703],["мас",-12.745487213134766],["▁informācija",-12.745498657226562],["▁బీ",-12.74549961090088],["▁бұ",-12.745502471923828],["陪",-12.74550724029541],["лений",-12.74552059173584],["uudesta",-12.745579719543455],["▁réponse",-12.74558925628662],["▁Vaa",-12.745597839355469],["▁UEFA",-12.745603561401367],["▁جگہ",-12.745615005493164],["▁അവരുടെ",-12.745620727539062],["▁sehemu",-12.745660781860352],["▁북한",-12.745667457580566],["▁Maro",-12.745670318603516],["▁jede",-12.745673179626465],["десят",-12.745676040649414],["▁നിങ്ങള്",-12.745677947998049],["坐在",-12.745687484741213],["▁Ame",-12.74571132659912],["heder",-12.745814323425291],["éré",-12.745814323425291],["ជាមួយ",-12.745819091796877],["plant",-12.745830535888672],["▁سۇ",-12.745834350585938],["▁milik",-12.745862007141112],["jul",-12.745864868164062],["▁البر",-12.745880126953123],["lb",-12.745928764343262],["жне",-12.745957374572754],["ПА",-12.745984077453612],["isho",-12.746001243591309],["▁ಜಾ",-12.74601936340332],["▁constat",-12.746031761169434],["▁hô",-12.746038436889648],["▁Улс",-12.746068954467772],["▁Сал",-12.746071815490724],["koulu",-12.746137619018556],["▁tante",-12.746138572692873],["▁тогава",-12.74619960784912],["تۈر",-12.746216773986816],["▁Quel",-12.746224403381348],["▁migliore",-12.746233940124512],["klád",-12.746235847473145],["এস",-12.74623680114746],["ാമ",-12.746251106262209],["▁նախ",-12.746267318725586],["▁yangi",-12.74628448486328],["鼻",-12.746295928955078],["▁recipe",-12.746308326721191],["▁kontroli",-12.746319770812988],["▁beh",-12.746320724487305],["stem",-12.746373176574709],["▁Fá",-12.746384620666504],["▁pół",-12.746403694152832],["▁ਵਾਰ",-12.746423721313477],["нске",-12.74642848968506],["ञ्ज",-12.746432304382324],["英雄",-12.746442794799805],["▁Kann",-12.746447563171388],["防止",-12.746450424194336],["지고",-12.746450424194336],["▁innerhalb",-12.746463775634766],["▁রয়েছে",-12.746463775634766],["▁πληροφορίες",-12.746464729309082],["▁स्पष्ट",-12.746468544006348],["▁trận",-12.746478080749512],["▁свята",-12.74649143218994],["▁ወቅት",-12.746493339538574],["▁денес",-12.74651336669922],["hei",-12.746525764465332],["▁مطرح",-12.746527671813965],["▁Hướng",-12.74653148651123],["▁beneficia",-12.746540069580078],["▁skre",-12.746543884277344],["-28",-12.746552467346191],["lmi",-12.74660301208496],["▁оператор",-12.746603965759276],["▁eben",-12.746631622314451],["йшов",-12.74663257598877],["▁степени",-12.746641159057615],["▁ביום",-12.746641159057615],["▁atividades",-12.746662139892578],["很快",-12.746670722961426],["HL",-12.746674537658691],["▁عملیات",-12.746686935424805],["nizi",-12.746689796447754],["طالب",-12.746715545654297],["▁consenti",-12.746732711791992],["▁negocio",-12.746766090393066],["Те",-12.746781349182127],["▁Bord",-12.74680233001709],["▁wanda",-12.746819496154783],["દી",-12.746853828430176],["skola",-12.746861457824709],["ຕະ",-12.746914863586426],["▁ویلي",-12.746928215026855],["cah",-12.74693202972412],["වැ",-12.746977806091309],["▁mulheres",-12.747025489807127],["▁Roll",-12.747035026550291],["宗教",-12.747040748596191],["▁ensino",-12.747041702270508],["▁случаи",-12.74704647064209],["主动",-12.747051239013672],["▁ვე",-12.747055053710938],["▁फल",-12.7470703125],["▁високи",-12.747076034545898],["altres",-12.747106552124023],["▁பய",-12.747108459472656],["thair",-12.747129440307615],["▁шоу",-12.7471342086792],["▁Reykjavík",-12.747148513793944],["挑",-12.747173309326172],["kako",-12.747174263000488],["ାଉ",-12.747199058532717],["来到",-12.747201919555664],["▁Finanz",-12.747251510620115],["حاول",-12.74725341796875],["潤",-12.747257232666016],["utin",-12.747331619262695],["123",-12.74733829498291],["쉬",-12.747339248657228],["เยอะ",-12.747340202331545],["▁μέρος",-12.747342109680176],["▁ತಾಜಾ",-12.747343063354492],["alda",-12.747360229492188],["▁رکن",-12.747363090515137],["▁mou",-12.747366905212402],["▁ποτέ",-12.747368812561035],["▁aktuelle",-12.747369766235352],["▁kijk",-12.747369766235352],["ટો",-12.74737548828125],["▁सवाल",-12.747376441955566],["▁riesgo",-12.747395515441896],["curs",-12.747418403625488],["▁свим",-12.747509002685549],["▁Hassan",-12.747518539428713],["一大",-12.747520446777344],["үүн",-12.74753475189209],["▁yetir",-12.747567176818848],["ΡΙ",-12.74757480621338],["▁különböző",-12.747588157653809],["▁وارو",-12.74761199951172],["▁prenos",-12.747663497924805],["ລາ",-12.747732162475586],["▁troba",-12.747732162475586],["vatel",-12.747756004333496],["ireann",-12.747801780700684],["▁Ένα",-12.74780559539795],["▁sop",-12.747825622558594],["▁பெற்ற",-12.74782943725586],["▁ጠቅላይ",-12.747835159301758],["▁expres",-12.747854232788086],["▁ماہ",-12.74791145324707],["▁ipak",-12.747926712036133],["のみ",-12.747926712036133],["ipe",-12.747941970825195],["▁violent",-12.747960090637209],["CU",-12.747965812683104],["भित्र",-12.747986793518066],["▁نمود",-12.748042106628418],["జే",-12.748085975646973],["▁soort",-12.748095512390137],["lgi",-12.74811840057373],["排名",-12.74811840057373],["▁ලංකාව",-12.748125076293944],["▁médico",-12.748127937316896],["▁drug",-12.748132705688477],["▁과",-12.74815845489502],["垃圾",-12.748167037963867],["▁نب",-12.74817180633545],["▁ଅନ୍ୟ",-12.748187065124512],["▁որեւէ",-12.748215675354004],["▁έκανε",-12.74821662902832],["μένο",-12.748218536376951],["▁عوامل",-12.748242378234863],["стал",-12.74824333190918],["▁Дар",-12.748272895812988],["▁acestei",-12.74828052520752],["▁aziende",-12.74828052520752],["的那",-12.74828052520752],["ਸਤ",-12.74830722808838],["▁etme",-12.748313903808594],["شهد",-12.748321533203123],["ému",-12.74835968017578],["少女",-12.748366355895996],["▁ಆರ್",-12.74839687347412],["레이",-12.748414039611816],["解释",-12.748419761657717],["വീ",-12.74848461151123],["▁סט",-12.748523712158203],["▁الجديدة",-12.748544692993164],["stum",-12.748567581176758],["เหลือ",-12.748591423034668],["იზ",-12.748598098754885],["اسي",-12.748701095581056],["▁شکن",-12.74870491027832],["اشت",-12.748709678649902],["νω",-12.748714447021484],["▁Tudi",-12.74871826171875],["▁multiple",-12.748720169067385],["นม",-12.748823165893556],["ضا",-12.74889850616455],["middag",-12.748955726623535],["▁једном",-12.74897289276123],["▁Salut",-12.748979568481444],["▁ትምህርት",-12.748992919921877],["▁langa",-12.748998641967772],["▁ಯೋಜನೆ",-12.749003410339355],["▁talán",-12.749008178710938],["؟!",-12.749024391174316],["巧",-12.749049186706545],["మూ",-12.749062538146973],["▁Жер",-12.749068260192873],["vaka",-12.749085426330566],["▁tratar",-12.749088287353516],["ກອງປະຊຸມ",-12.74909210205078],["▁1.4",-12.74909210205078],["▁pakalpojumu",-12.74909210205078],["▁vacances",-12.74909210205078],["▁ઉપયોગ",-12.74909496307373],["▁tudja",-12.749095916748049],["school",-12.749099731445312],["▁Guerra",-12.749100685119627],["umbu",-12.749120712280272],["▁hitam",-12.749120712280272],["▁hvernig",-12.749122619628906],["Ur",-12.749127388000488],["▁механизм",-12.749134063720703],["▁onnistu",-12.74915885925293],["chos",-12.749165534973145],["▁nemusí",-12.749216079711914],["▁Security",-12.749221801757812],["▁koli",-12.749223709106444],["▁þessi",-12.749258041381836],["▁bőr",-12.749265670776367],["▁Sever",-12.749300956726074],["عرف",-12.749309539794922],["▁هنگام",-12.749310493469238],["DV",-12.749311447143556],["sehen",-12.749346733093262],["uria",-12.74937343597412],["▁Dana",-12.749409675598145],["▁semalam",-12.749423027038574],["ាំ",-12.749446868896484],["▁၇",-12.749451637268066],["▁AND",-12.749496459960938],["▁slá",-12.749537467956545],["▁व्य",-12.749537467956545],["▁अर्ब",-12.74953842163086],["рош",-12.749555587768556],["lög",-12.749560356140137],["▁−",-12.749567031860352],["▁रोक",-12.749579429626465],["บิ",-12.749592781066896],["▁మాట",-12.749598503112791],["▁Quart",-12.749613761901855],["▁hevitra",-12.749750137329102],["▁mňa",-12.749752044677734],["ראַ",-12.74978733062744],["▁abrir",-12.749802589416504],["▁sposob",-12.749823570251465],["▁kursus",-12.749858856201172],["qish",-12.74986743927002],["▁кла",-12.749919891357422],["▁મન",-12.74994659423828],["ķī",-12.749955177307127],["▁ගත්",-12.749961853027344],["بط",-12.749966621398926],["оос",-12.750018119812012],["▁pakaian",-12.750018119812012],["άει",-12.750029563903809],["כב",-12.750065803527832],["ANS",-12.75009536743164],["伴",-12.750123977661133],["▁பொது",-12.750160217285156],["sein",-12.750182151794434],["▁impe",-12.750185012817385],["▁שעות",-12.750210762023926],["▁그는",-12.750215530395508],["よね",-12.750218391418455],["søk",-12.75023365020752],["arbeiten",-12.750250816345217],["જન",-12.750258445739746],["▁street",-12.75026798248291],["ngkap",-12.750271797180176],["RG",-12.750283241271973],["ត្រូវ",-12.7503023147583],["▁Norte",-12.750411033630373],["讓我",-12.750415802001951],["▁articole",-12.750484466552734],["▁junior",-12.750492095947266],["▁consta",-12.75051498413086],["▁успе",-12.750515937805176],["แม",-12.75054168701172],["صدر",-12.750545501708984],["nnut",-12.750563621520996],["цый",-12.750567436218262],["▁tədbir",-12.750588417053224],["ело",-12.750633239746094],["▁bə",-12.750652313232422],["▁verksamhet",-12.750655174255373],["ေျမ",-12.75065803527832],["▁భారత",-12.75066089630127],["væ",-12.750702857971191],["上升",-12.750737190246582],["▁zeigen",-12.75074291229248],["laridan",-12.750800132751465],["muse",-12.750834465026855],["▁tomēr",-12.750835418701172],["▁kanan",-12.750839233398438],["參考",-12.750842094421388],["нү",-12.750847816467283],["▁člověk",-12.750849723815918],["▁चित्रपट",-12.75085163116455],["▁İzmir",-12.750858306884766],["▁10-15",-12.750886917114258],["▁квартал",-12.750903129577637],["▁świecie",-12.75090503692627],["▁tany",-12.7509126663208],["rão",-12.750919342041016],["ические",-12.75093936920166],["▁Sign",-12.750946998596191],["еща",-12.75095272064209],["▁Hukum",-12.750959396362305],["ёл",-12.750961303710938],["▁کرکے",-12.750964164733888],["扣",-12.75099277496338],["▁Косово",-12.750993728637695],["یڈ",-12.750998497009276],["者的",-12.751005172729492],["utilisation",-12.751007080078123],["▁თავს",-12.751014709472656],["▁മല",-12.751015663146973],["stern",-12.751049041748049],["▁fearr",-12.751058578491213],["▁वडा",-12.751059532165527],["▁agra",-12.751062393188477],["▁تی",-12.751129150390623],["dry",-12.751133918762209],["▁NRK",-12.751147270202637],["▁konst",-12.751150131225586],["CHE",-12.751219749450684],["လ်",-12.751224517822266],["dā",-12.751233100891112],["▁Roger",-12.751269340515137],["▁kora",-12.751276969909668],["▁luk",-12.751310348510742],["vorm",-12.75132656097412],["ندگان",-12.75133228302002],["▁якой",-12.75135612487793],["▁ონლაინ",-12.751365661621094],["▁أنا",-12.751391410827637],["▁dream",-12.751395225524902],["бели",-12.751399040222168],["ാണു",-12.75141429901123],["▁prost",-12.75141716003418],["koja",-12.751423835754396],["орон",-12.751440048217772],["▁vozila",-12.751457214355469],["历",-12.75148582458496],["▁31,",-12.751516342163086],["жки",-12.7515287399292],["▁kalt",-12.75155544281006],["▁elä",-12.751578330993652],["▁وبعد",-12.751606941223145],["▁Beauty",-12.751646041870115],["skóla",-12.751646995544434],["▁teva",-12.75165843963623],["醫院",-12.751717567443848],["▁राजधानी",-12.751729011535645],["▁gånger",-12.751730918884276],["ವ್",-12.751769065856934],["當地",-12.75179672241211],["தில்",-12.751798629760742],["▁Mens",-12.751856803894045],["▁Corte",-12.751874923706056],["▁നെ",-12.751898765563965],["자는",-12.751914024353027],["▁платеж",-12.751916885375977],["▁길",-12.751941680908203],["▁trhu",-12.751957893371582],["nagar",-12.75196647644043],["oid",-12.751985549926758],["▁flesta",-12.751985549926758],["حا",-12.751989364624023],["ιλ",-12.752002716064451],["▁seem",-12.752020835876465],["אַנ",-12.752032279968262],["▁버",-12.752055168151855],["▁حذف",-12.75206184387207],["▁viera",-12.752067565917969],["erako",-12.752070426940918],["▁math",-12.752097129821776],["▁eveniment",-12.752140045166016],["▁ბი",-12.752147674560549],["▁dowladda",-12.752174377441406],["▁сақтау",-12.752211570739746],["▁Itse",-12.752227783203123],["▁eksp",-12.752279281616213],["▁domaine",-12.752284049987791],["▁улам",-12.752334594726562],["NIK",-12.752345085144045],["▁състав",-12.752364158630373],["ΚΑ",-12.752368927001951],["energia",-12.752370834350586],["▁Iso",-12.752375602722168],["▁Questa",-12.752433776855469],["чено",-12.75244426727295],["▁neli",-12.75251293182373],["▁ستر",-12.752519607543944],["▁ಯಾವ",-12.75258731842041],["▁Fy",-12.752592086791992],["äll",-12.752603530883787],["ുമ്പോൾ",-12.752609252929688],["▁온라인",-12.752613067626951],["▁물론",-12.752625465393066],["▁قام",-12.752635955810549],["ခရီး",-12.752644538879396],["▁septembrie",-12.75266170501709],["ගා",-12.752662658691406],["▁nö",-12.752678871154783],["▁Airport",-12.75269603729248],["ΜΑ",-12.752696990966797],["▁জীবন",-12.75271224975586],["▁Beta",-12.752739906311035],["▁acesso",-12.752758979797363],["▁çıktı",-12.75277614593506],["▁แบบ",-12.752782821655272],["mızda",-12.75278377532959],["▁અમે",-12.752789497375488],["▁amo",-12.752816200256348],["▁guy",-12.752830505371094],["shiriki",-12.752840042114258],["ଲୋ",-12.752840995788574],["årig",-12.752848625183104],["ERO",-12.75286102294922],["çin",-12.752880096435549],["▁ген",-12.752970695495604],["▁zote",-12.75297737121582],["▁кезінде",-12.752985954284668],["ébe",-12.753003120422363],["urat",-12.75303840637207],["▁خلق",-12.753045082092283],["kone",-12.753067016601562],["▁эрүүл",-12.75308609008789],["▁Title",-12.753180503845217],["AST",-12.753212928771973],["▁نق",-12.75322437286377],["ദ്ര",-12.75327491760254],["▁Մեր",-12.75327968597412],["អាច",-12.75331974029541],["▁KL",-12.753329277038574],["BD",-12.753389358520508],["бач",-12.753421783447266],["ዞ",-12.753422737121582],["एल",-12.753437042236328],["▁gegn",-12.753437042236328],["רג",-12.753451347351074],["膠",-12.753474235534668],["▁Phòng",-12.753495216369627],["▁개최",-12.753495216369627],["▁кошт",-12.753510475158691],["▁проекту",-12.753510475158691],["▁എത്ര",-12.753527641296388],["верш",-12.753594398498535],["▁ਬੇ",-12.753605842590332],["▁rød",-12.75363063812256],["▁chó",-12.753671646118164],["ሄ",-12.75369358062744],["▁bó",-12.753703117370604],["▁elde",-12.753714561462402],["▁inson",-12.753719329833984],["▁बनाउने",-12.753728866577148],["ļo",-12.753753662109377],["▁Wohn",-12.753798484802246],["voll",-12.753814697265623],["▁privi",-12.753819465637209],["▁vakantie",-12.753841400146484],["▁Астана",-12.753846168518066],["терді",-12.753849029541016],["İZ",-12.753863334655762],["jár",-12.753896713256836],["▁litr",-12.75391960144043],["անա",-12.75401782989502],["telja",-12.754108428955078],["▁Russia",-12.754110336303713],["▁několik",-12.754110336303713],["geld",-12.754136085510254],["regi",-12.754185676574709],["▁стих",-12.754283905029297],["ੀਆ",-12.754301071166992],["נען",-12.754359245300291],["飲",-12.75436019897461],["Ū",-12.754371643066406],["▁području",-12.754371643066406],["▁truyện",-12.754371643066406],["▁सांसद",-12.75437355041504],["▁oshirish",-12.754374504089355],["▁목록",-12.754375457763672],["▁छोड़",-12.754382133483888],["▁እንደሆነ",-12.754382133483888],["لاف",-12.754387855529783],["▁eş",-12.754404067993164],["เรียก",-12.754434585571287],["ΟΣ",-12.754439353942873],["льний",-12.754474639892578],["▁Національн",-12.754515647888184],["▁lavet",-12.754539489746094],["වල්",-12.754558563232422],["▁કોઇ",-12.754570960998535],["▁ועל",-12.754572868347168],["录",-12.754656791687012],["▁każdym",-12.754697799682615],["▁personlig",-12.754706382751465],["əl",-12.754728317260742],["praw",-12.754753112792969],["ન્ડ",-12.75479507446289],["ग्रह",-12.754839897155762],["strup",-12.754857063293455],["uce",-12.75486660003662],["عل",-12.754913330078123],["▁projekte",-12.754913330078123],["▁Phi",-12.754968643188477],["ioù",-12.754980087280272],["▁interesting",-12.754984855651855],["tros",-12.754997253417969],["▁tæt",-12.754997253417969],["jący",-12.7550048828125],["陆",-12.755008697509766],["▁Izrael",-12.755083084106444],["bik",-12.755084037780762],["▁partneri",-12.75510025024414],["▁Fox",-12.755102157592772],["▁taget",-12.755114555358888],["▁Mohd",-12.755120277404783],["duh",-12.75515079498291],["部署",-12.755162239074709],["AIS",-12.755173683166504],["samt",-12.755184173583984],["▁perfil",-12.755197525024414],["▁Category",-12.755254745483398],["▁mchezo",-12.755254745483398],["▁ఇతర",-12.75526237487793],["▁positif",-12.75527572631836],["▁Пар",-12.755278587341309],["nį",-12.755279541015623],["hist",-12.755284309387209],["▁міст",-12.755290031433104],["തിന്",-12.755292892456056],["▁postoji",-12.75529670715332],["지를",-12.75532054901123],["ABA",-12.755325317382812],["▁kód",-12.755325317382812],["പ്പു",-12.755331993103027],["贷款",-12.755342483520508],["▁Þar",-12.755369186401367],["▁стро",-12.755404472351074],["人数",-12.75546169281006],["шно",-12.755465507507324],["дете",-12.75546646118164],["▁Τρ",-12.75547218322754],["▁Bond",-12.755520820617676],["терге",-12.755558967590332],["▁ادب",-12.755602836608888],["▁Deo",-12.75561809539795],["siyasi",-12.755656242370604],["▁Hund",-12.755664825439451],["▁río",-12.755684852600098],["▁Pac",-12.755724906921388],["ctic",-12.755739212036133],["▁Premio",-12.755760192871094],["ीम",-12.755762100219728],["▁повторно",-12.755773544311523],["▁вами",-12.755779266357422],["ನ್ಸ್",-12.755813598632812],["▁պատրաստ",-12.755815505981444],["▁οποίος",-12.755818367004396],["▁Αθήνα",-12.755855560302734],["ەل",-12.755866050720217],["▁gór",-12.755876541137695],["▁mne",-12.75596046447754],["λεί",-12.755962371826172],["▁ພະ",-12.756011962890623],["male",-12.756013870239258],["ድን",-12.75602912902832],["▁cunoscut",-12.756044387817385],["▁бүт",-12.75605010986328],["▁Abbas",-12.756064414978027],["▁صالح",-12.756064414978027],["凯",-12.756075859069824],["icus",-12.756077766418455],["▁Федер",-12.756080627441406],["측",-12.756095886230469],["lığını",-12.756108283996582],["ギ",-12.756121635437012],["koľvek",-12.75613498687744],["ครอบครัว",-12.756136894226074],["iscono",-12.756138801574709],["▁Saúde",-12.756138801574709],["▁គ្នា",-12.756144523620604],["▁ຫນ້າ",-12.756147384643556],["ιστική",-12.756155967712402],["▁screen",-12.756175994873049],["了很多",-12.756175994873049],["ป้องกัน",-12.756179809570312],["▁زخمی",-12.756189346313477],["필",-12.756192207336426],["იკ",-12.756196975708008],["测试",-12.756221771240234],["▁ukaz",-12.75622844696045],["られます",-12.756272315979004],["▁impression",-12.756290435791016],["▁parent",-12.75631046295166],["ನೋ",-12.756339073181152],["设",-12.756373405456545],["▁Deci",-12.75639820098877],["orient",-12.756404876708984],["▁strate",-12.756406784057615],["▁न्याय",-12.756433486938477],["돌",-12.756439208984377],["nız",-12.756443977355955],["▁resa",-12.756473541259766],["▁ማስ",-12.756539344787598],["อุปกรณ์",-12.756542205810549],["▁idei",-12.756542205810549],["▁comarca",-12.756571769714355],["▁ସମ୍",-12.75657558441162],["▁избори",-12.756611824035645],["χει",-12.756625175476074],["▁remo",-12.756648063659668],["▁prognoz",-12.756658554077148],["▁відео",-12.756661415100098],["▁Герман",-12.756741523742676],["102",-12.75674533843994],["lumot",-12.756778717041016],["ലേ",-12.756786346435549],["▁dilihat",-12.756792068481444],["▁shpreh",-12.756806373596191],["วัย",-12.756823539733888],["ույ",-12.75685214996338],["▁jamii",-12.756860733032228],["▁trzy",-12.756878852844238],["▁난",-12.75693130493164],["▁grei",-12.756932258605955],["▁Sự",-12.75694179534912],["考慮",-12.756986618041992],["弗",-12.75698947906494],["▁vývoj",-12.756996154785156],["▁Sayt",-12.756998062133787],["ବାକୁ",-12.757004737854004],["നിയ",-12.757015228271484],["▁kūr",-12.757018089294434],["森林",-12.757019996643066],["နောက်",-12.757020950317385],["kolwiek",-12.7570219039917],["▁दोस्रो",-12.7570219039917],["▁Najeriya",-12.757022857666016],["에서도",-12.757039070129396],["▁rose",-12.757043838500977],["▁paralel",-12.757055282592772],["тардын",-12.75705909729004],["▁أكبر",-12.757064819335938],["ască",-12.75709056854248],["digt",-12.757102012634276],["▁exposición",-12.757104873657228],["▁दर्ज",-12.757110595703123],["▁mayoría",-12.757125854492188],["▁БА",-12.757125854492188],["లూ",-12.757144927978516],["▁gere",-12.757187843322754],["ደን",-12.757194519042969],["▁Spr",-12.757234573364258],["▁mendengar",-12.757242202758787],["▁Vegas",-12.757259368896484],["▁Jay",-12.7572603225708],["ించే",-12.757314682006836],["▁Fotos",-12.757328987121582],["▁Ša",-12.757329940795898],["rådet",-12.75734519958496],["▁எழுத",-12.757347106933594],["аться",-12.757369995117188],["▁hinaus",-12.757370948791504],["▁sopiva",-12.757386207580566],["▁مقدار",-12.757389068603516],["երում",-12.757390975952148],["▁valori",-12.75739574432373],["Ce",-12.757416725158691],["bulo",-12.757428169250488],["газ",-12.757441520690918],["▁নিউজ",-12.75747013092041],["āp",-12.757471084594728],["КС",-12.7575044631958],["נן",-12.7575101852417],["▁sekret",-12.757537841796877],["▁dikenal",-12.75754165649414],["▁Стар",-12.757542610168455],["kooda",-12.757545471191406],["fot",-12.757549285888672],["ຽນ",-12.75755786895752],["▁buyur",-12.757563591003418],["▁endelig",-12.757583618164062],["部屋",-12.757712364196776],["πορ",-12.757730484008787],["▁Arsenal",-12.75774383544922],["uesit",-12.757767677307127],["丰",-12.757848739624023],["មា",-12.757890701293944],["▁ท่าน",-12.757896423339844],["ۃ",-12.757906913757324],["ជើង",-12.757906913757324],["▁Madaxweyne",-12.757906913757324],["▁Daftar",-12.757908821105955],["accueil",-12.757917404174805],["▁nedeniyle",-12.757919311523438],["▁sucesso",-12.757920265197754],["▁ਕਰਕੇ",-12.757928848266602],["▁veikt",-12.757936477661133],["▁padom",-12.758020401000977],["▁자리",-12.758024215698242],["▁Nev",-12.758071899414062],["今まで",-12.758075714111328],["▁tanış",-12.75811004638672],["फ्",-12.758146286010742],["▁jobbet",-12.758193016052246],["▁Եթե",-12.758198738098145],["▁skep",-12.758218765258787],["記事",-12.758243560791016],["nīca",-12.75826930999756],["ស៊ី",-12.758281707763672],["ambo",-12.75831413269043],["새",-12.758323669433594],["ኞች",-12.758358001708984],["kowa",-12.758362770080566],["▁специально",-12.758367538452148],["ДЕ",-12.758393287658691],["▁kongres",-12.758407592773438],["သက်",-12.758414268493652],["▁Disk",-12.758414268493652],["nggu",-12.758424758911133],["▁sepanjang",-12.758456230163574],["кру",-12.758488655090332],["слаў",-12.758501052856444],["▁수도",-12.758563995361328],["▁ქვეყნის",-12.758566856384276],["tamise",-12.758593559265137],["▁jotta",-12.758593559265137],["▁khóa",-12.758631706237791],["zare",-12.758639335632324],["σια",-12.75864315032959],["вела",-12.75864601135254],["▁מנהל",-12.758676528930664],["סר",-12.75868320465088],["falls",-12.758708953857422],["▁bado",-12.758716583251951],["▁စိတ္",-12.758733749389648],["▁буу",-12.758744239807127],["॑",-12.758792877197266],["▁AQSh",-12.758792877197266],["▁spesielt",-12.758792877197266],["۸",-12.758795738220217],["ІН",-12.758797645568848],["▁महत्वपूर्ण",-12.758797645568848],["▁انجمن",-12.758800506591797],["▁जस",-12.758800506591797],["▁silloin",-12.758804321289062],["▁Luật",-12.758814811706545],["▁विवाद",-12.758835792541504],["оц",-12.758854866027832],["طار",-12.75889492034912],["▁avuto",-12.758933067321776],["abord",-12.75900650024414],["▁energet",-12.759014129638672],["▁기대",-12.75903034210205],["▁qualitat",-12.759033203125],["▁линк",-12.759045600891112],["ıq",-12.759048461914062],["带着",-12.759078979492188],["是一",-12.759081840515137],["▁තිබෙන",-12.759087562561035],["▁막",-12.759121894836426],["▁είτε",-12.75913429260254],["▁koszt",-12.759148597717283],["▁prva",-12.759149551391602],["▁Licht",-12.75919246673584],["▁čisto",-12.759215354919434],["ገደ",-12.759217262268066],["▁ofici",-12.759222030639648],["▁faca",-12.759230613708496],["▁уни",-12.759235382080078],["日在",-12.75925636291504],["▁Hart",-12.759264945983888],["▁serwis",-12.759276390075684],["▁Kino",-12.759278297424316],["ਕਰ",-12.75930118560791],["нам",-12.75934600830078],["▁kupa",-12.759417533874512],["îl",-12.759428977966309],["kung",-12.75947380065918],["เส้นเลือดขอด",-12.759478569030762],["stą",-12.759520530700684],["ఆర్",-12.759526252746582],["ydı",-12.759552001953123],["▁erano",-12.75955295562744],["▁Tân",-12.759561538696287],["NB",-12.759572982788086],["ԱԿԱՆ",-12.759594917297363],["▁uke",-12.759613037109377],["ሥ",-12.759614944458008],["ക്കാര",-12.75963020324707],["рина",-12.75965690612793],["τσι",-12.75967502593994],["▁некоторые",-12.759679794311523],["▁कौन",-12.759711265563965],["▁ኦ",-12.759711265563965],["安裝",-12.759724617004396],["▁disposizione",-12.75972843170166],["▁kinni",-12.759748458862305],["ህን",-12.759771347045898],["axe",-12.759772300720217],["▁календар",-12.759787559509276],["ikė",-12.75978946685791],["▁Sexy",-12.759790420532228],["▁جرم",-12.75979232788086],["籍",-12.759795188903809],["න්නේ",-12.759809494018556],["месе",-12.759814262390137],["▁välis",-12.759818077087402],["hets",-12.759851455688477],["▁semester",-12.759861946105955],["▁Sayfa",-12.759889602661133],["▁eco",-12.75992202758789],["▁Isang",-12.759960174560549],["▁qə",-12.759968757629396],["▁Santos",-12.75998306274414],["lanadi",-12.759989738464355],["▁Бір",-12.759997367858888],["▁سریال",-12.760003089904783],["▁zemlji",-12.760029792785645],["▁lyder",-12.76004123687744],["▁кру",-12.760076522827148],["▁ә",-12.760082244873049],["▁segja",-12.760088920593262],["▁లు",-12.760106086730955],["▁raad",-12.760109901428224],["нув",-12.760140419006348],["దో",-12.760181427001951],["ብረ",-12.760209083557127],["主题",-12.76022243499756],["▁meist",-12.760250091552734],["tokea",-12.760263442993164],["▁कला",-12.760272026062012],["хон",-12.76027774810791],["bac",-12.76028060913086],["▁политички",-12.760293960571287],["груз",-12.7603178024292],["▁всей",-12.760319709777832],["حم",-12.760323524475098],["▁bölüm",-12.760379791259766],["pā",-12.760445594787598],["▁özü",-12.760461807250977],["▁Mani",-12.760470390319824],["▁aflat",-12.760483741760254],["▁побед",-12.76053524017334],["ips",-12.76054859161377],["▁ஏன்",-12.760550498962402],["십",-12.760562896728516],["▁ప్రా",-12.760573387145996],["▁Fusce",-12.760576248168944],["עלות",-12.76058292388916],["▁vorbe",-12.76058864593506],["▁kapa",-12.76059913635254],["▁قرارداد",-12.760621070861816],["▁yhtä",-12.760623931884766],["ilma",-12.76066780090332],["plik",-12.760713577270508],["øg",-12.760748863220217],["▁pukul",-12.760831832885742],["▁Максим",-12.760838508605955],["oittaa",-12.76084327697754],["▁brukt",-12.76088047027588],["тей",-12.760947227478027],["▁Ді",-12.760976791381836],["▁küsimus",-12.760979652404783],["staat",-12.760992050170898],["▁talab",-12.76100254058838],["▁menjadikan",-12.761005401611328],["▁Στα",-12.761011123657228],["unii",-12.761034965515137],["yfik",-12.76103973388672],["▁역사",-12.76103973388672],["โรงเรียน",-12.761077880859377],["▁ក្រុមហ៊ុន",-12.761094093322754],["2020",-12.761099815368652],["лүү",-12.761110305786133],["Post",-12.761128425598145],["gde",-12.761160850524902],["▁7)",-12.76116180419922],["▁pán",-12.761234283447266],["▁syy",-12.761239051818848],["▁kova",-12.761255264282228],["▁Кога",-12.761274337768556],["kää",-12.76129150390625],["▁خاک",-12.761320114135742],["ланды",-12.76132106781006],["▁trece",-12.76132869720459],["▁கடந்த",-12.76135540008545],["▁чар",-12.761377334594728],["▁ekster",-12.761380195617676],["▁XVII",-12.761401176452637],["猛",-12.761412620544434],["烤",-12.761423110961914],["บัญชี",-12.76145362854004],["▁Только",-12.761455535888672],["▁XIII",-12.76146125793457],["▁기록",-12.76147174835205],["▁contempla",-12.761482238769531],["iris",-12.76148509979248],["▁Menu",-12.761494636535645],["varu",-12.76149845123291],["▁يونيو",-12.761504173278809],["ਮਾਨ",-12.76150608062744],["▁לראות",-12.761518478393556],["▁chờ",-12.761552810668944],["▁ຫຼື",-12.761557579040527],["ବାର",-12.76158332824707],["▁Nazirliyinin",-12.761584281921388],["۶",-12.76158618927002],["ARD",-12.761636734008787],["rasi",-12.761664390563965],["陸",-12.761712074279783],["ţiile",-12.761725425720217],["▁seçki",-12.761730194091797],["いません",-12.761734008789062],["▁budaya",-12.761737823486328],["▁व्या",-12.76176643371582],["▁fordel",-12.761770248413086],["▁metsä",-12.761775016784668],["▁což",-12.7617769241333],["▁toplam",-12.76179313659668],["▁cash",-12.76182746887207],["laboració",-12.761848449707031],["vies",-12.76185131072998],["rnos",-12.761876106262209],["▁belirt",-12.761886596679688],["기에",-12.761950492858888],["яла",-12.761957168579102],["▁rör",-12.761967658996582],["чыць",-12.761975288391112],["▁bertanya",-12.7620210647583],["yib",-12.7620267868042],["анта",-12.762042999267578],["నె",-12.762091636657717],["▁creu",-12.762109756469728],["▁Vista",-12.76211643218994],["笔",-12.76215648651123],["▁Down",-12.762166976928713],["▁trailer",-12.762168884277344],["▁druhý",-12.762214660644531],["▁Modell",-12.762249946594238],["▁plaza",-12.762253761291504],["▁impacto",-12.762306213378906],["▁entro",-12.762341499328612],["▁pengetahuan",-12.762343406677246],["цією",-12.762346267700195],["▁נאך",-12.76236629486084],["▁fleiri",-12.762373924255373],["एको",-12.762380599975586],["▁simplu",-12.762381553649902],["බි",-12.762399673461914],["▁fanns",-12.76248550415039],["▁Rede",-12.762497901916504],["▁smile",-12.76253890991211],["▁postul",-12.762545585632324],["▁ನಗರ",-12.762560844421388],["gati",-12.762614250183104],["▁natt",-12.762642860412598],["▁Sə",-12.762646675109863],["▁muzej",-12.762656211853027],["社區",-12.762675285339355],["▁귀",-12.762699127197266],["▁allan",-12.762724876403809],["▁salute",-12.762735366821287],["λλα",-12.762739181518556],["结束",-12.762786865234377],["▁119",-12.762809753417969],["▁tenim",-12.76289176940918],["UC",-12.762897491455078],["▁incorpora",-12.762903213500977],["તુ",-12.762908935546877],["▁Flor",-12.762919425964355],["▁წელი",-12.762937545776367],["▁113",-12.762950897216797],["ଘ",-12.76298713684082],["عان",-12.763006210327148],["▁बिना",-12.763006210327148],["▁cree",-12.763014793395996],["▁Lud",-12.763015747070312],["▁hangi",-12.76304531097412],["▁Organ",-12.763047218322754],["▁Головна",-12.763050079345703],["ический",-12.763057708740234],["▁taller",-12.763070106506348],["日期",-12.763078689575195],["▁rig",-12.763091087341309],["ንስ",-12.763102531433104],["▁antiga",-12.763110160827637],["▁passado",-12.76313304901123],["▁augst",-12.763142585754396],["▁agter",-12.763177871704102],["▁lepší",-12.763197898864746],["員工",-12.763198852539062],["جيل",-12.763212203979492],["煮",-12.763213157653809],["▁berusaha",-12.763311386108398],["صدق",-12.76332664489746],["▁לפ",-12.76334285736084],["γνωσ",-12.763355255126951],["uts",-12.76335620880127],["▁Iza",-12.76336669921875],["ូរ",-12.76340675354004],["▁124",-12.763444900512695],["▁dès",-12.763452529907228],["▁mantener",-12.763495445251465],["vao",-12.763514518737791],["ələr",-12.76351547241211],["▁timmar",-12.763543128967283],["888",-12.763612747192385],["komu",-12.763617515563965],["univers",-12.763617515563965],["▁пів",-12.763626098632812],["▁belgi",-12.76364803314209],["시장",-12.763656616210938],["tojn",-12.763660430908203],["▁មុន",-12.763662338256836],["ПО",-12.76372241973877],["▁erreichen",-12.763723373413086],["▁Luk",-12.763769149780272],["ఐ",-12.763792991638184],["▁işlem",-12.76380729675293],["▁valeur",-12.76380729675293],["▁загад",-12.763846397399902],["food",-12.76388168334961],["ല്ലോ",-12.763914108276367],["ారా",-12.763916969299316],["vač",-12.763923645019531],["▁පමණක්",-12.76393222808838],["තුර",-12.763933181762695],["▁Марк",-12.763940811157228],["youtube",-12.763943672180176],["▁siswa",-12.763958930969238],["▁येत",-12.763980865478516],["▁teatr",-12.763994216918944],["▁upplýsingar",-12.76401424407959],["▁Monitor",-12.764019966125488],["говори",-12.764025688171388],["▁പറയുന്നു",-12.764032363891602],["kert",-12.764039039611816],["▁வைத்த",-12.764047622680664],["▁hlavne",-12.764049530029297],["▁ಕಾರ್ಯಕ್ರಮ",-12.764058113098145],["▁áit",-12.764060974121094],["▁처음",-12.764081954956056],["ritu",-12.764093399047852],["tiile",-12.7640962600708],["劉",-12.764097213745115],["▁aksi",-12.764105796813965],["หรือไม่",-12.764108657836914],["ျပည္",-12.764116287231444],["▁histórico",-12.764123916625977],["▁rob",-12.764124870300291],["▁భారీ",-12.764124870300291],["▁декабря",-12.76412582397461],["rda",-12.764130592346191],["μορφ",-12.764142990112305],["телите",-12.76414680480957],["▁ہوگا",-12.76416015625],["▁پوليس",-12.764163970947266],["▁કારણે",-12.764164924621582],["كتب",-12.764269828796388],["▁sporto",-12.764275550842283],["ִ",-12.76433277130127],["▁primeiros",-12.764365196228027],["▁راز",-12.764389991760254],["laik",-12.76440143585205],["▁поб",-12.76440143585205],["▁criminal",-12.764409065246582],["▁xong",-12.76442050933838],["▁உள்ளது",-12.764452934265137],["पासून",-12.764482498168944],["▁ಮೋದಿ",-12.764507293701172],["ിക്കാന്",-12.764532089233398],["▁حي",-12.764573097229004],["лна",-12.764585494995115],["ຟ",-12.764599800109863],["▁Тој",-12.764606475830078],["▁Phil",-12.764649391174316],["▁игр",-12.764650344848633],["▁живи",-12.76467227935791],["inizio",-12.764713287353516],["▁vrou",-12.764715194702148],["他说",-12.764716148376465],["▁кай",-12.76475429534912],["▁særlig",-12.764781951904297],["▁destinat",-12.764784812927246],["▁नेकपा",-12.764842987060549],["jk",-12.764843940734863],["план",-12.764848709106444],["ulle",-12.764878273010254],["▁кеңес",-12.764880180358888],["▁uzna",-12.764886856079102],["▁ویل",-12.76492404937744],["▁Sci",-12.764939308166504],["▁Bahkan",-12.764944076538086],["赶",-12.7649564743042],["▁معاون",-12.764962196350098],["▁مسیر",-12.764982223510742],["▁ашиг",-12.764988899230955],["ર્ટ",-12.764994621276855],["два",-12.765003204345703],["▁તરીકે",-12.7650146484375],["▁вполне",-12.765015602111816],["▁кілька",-12.765016555786133],["▁Teknologi",-12.765020370483398],["রো",-12.76502513885498],["▁matu",-12.765027046203612],["▁היי",-12.76502799987793],["▁Vic",-12.765029907226562],["▁Vivamus",-12.765036582946776],["ਰਨ",-12.765047073364258],["▁xuño",-12.765047073364258],["альная",-12.765057563781738],["▁ልጅ",-12.765079498291016],["▁raw",-12.765122413635254],["▁Esp",-12.765128135681152],["ೊಂದು",-12.765142440795898],["провод",-12.765201568603516],["▁Landes",-12.765241622924805],["erto",-12.765263557434082],["paka",-12.765313148498535],["악",-12.765340805053713],["▁مقدم",-12.765356063842772],["▁luas",-12.765381813049316],["▁Jesu",-12.765403747558594],["の方が",-12.76546859741211],["▁egna",-12.765514373779297],["inys",-12.76555061340332],["▁parque",-12.765573501586914],["▁kötü",-12.765589714050291],["▁брак",-12.765609741210938],["▁idioma",-12.765636444091797],["▁našem",-12.765650749206545],["meer",-12.765689849853516],["▁ddy",-12.765718460083008],["šė",-12.76574420928955],["уулж",-12.76577091217041],["▁проводи",-12.76587200164795],["gha",-12.765876770019531],["érie",-12.76588249206543],["против",-12.765887260437012],["▁mengeluarkan",-12.765907287597656],["▁αποτέλεσμα",-12.765907287597656],["▁pertanyaan",-12.765908241271973],["機會",-12.76591682434082],["▁جسٹس",-12.765929222106934],["登場",-12.76593017578125],["▁blijft",-12.765934944152832],["▁የፖለቲካ",-12.765942573547363],["ნილი",-12.76595687866211],["▁அணி",-12.765978813171388],["▁280",-12.765985488891602],["къс",-12.766024589538574],["▁Dig",-12.766066551208496],["▁کارت",-12.766095161437988],["yf",-12.76609706878662],["īti",-12.766098022460938],["ючи",-12.766108512878418],["▁وليس",-12.76611042022705],["非常に",-12.766111373901367],["▁onları",-12.766112327575684],["桌",-12.766114234924316],["utas",-12.766120910644531],["▁Erfahrung",-12.766139030456545],["ичне",-12.766179084777832],["зне",-12.766180992126465],["▁ബന്ധ",-12.766182899475098],["ваюць",-12.766213417053224],["moto",-12.766215324401855],["▁данные",-12.766218185424805],["▁Kurdî",-12.76623249053955],["kv",-12.766261100769045],["ljena",-12.766277313232422],["▁nông",-12.766300201416016],["▁dori",-12.766307830810549],["vî",-12.766316413879396],["▁mole",-12.766364097595217],["いく",-12.766395568847656],["kele",-12.766403198242188],["িয়া",-12.766416549682615],["▁Cyn",-12.766443252563477],["▁Wilaya",-12.766449928283691],["▁Kod",-12.766457557678224],["▁디",-12.766505241394045],["tott",-12.766559600830078],["大家都",-12.76657009124756],["▁Dark",-12.766587257385254],["会有",-12.76661205291748],["▁മനസ്സ",-12.766647338867188],["kud",-12.76665210723877],["iling",-12.7666654586792],["▁picture",-12.766681671142578],["рту",-12.766687393188477],["tél",-12.766693115234377],["şk",-12.766725540161133],["敗",-12.766732215881348],["착",-12.766749382019045],["推荐",-12.766769409179688],["▁sayesinde",-12.766799926757812],["▁cải",-12.766800880432127],["▁kíván",-12.766800880432127],["▁ਕੁਝ",-12.766801834106444],["wari",-12.766839027404783],["▁ראשון",-12.76684856414795],["▁maan",-12.76686954498291],["▁затем",-12.766886711120604],["GR",-12.766910552978516],["▁стану",-12.766911506652832],["▁အာ",-12.766913414001465],["▁관한",-12.766921043395996],["▁brig",-12.766972541809082],["▁Grazie",-12.766990661621094],["ဝါ",-12.767007827758787],["modern",-12.767008781433104],["ढ़",-12.76701545715332],["▁üld",-12.76701545715332],["ନୀ",-12.76705837249756],["ఫీ",-12.767067909240724],["يدي",-12.76710319519043],["▁ANG",-12.767108917236328],["的研究",-12.767122268676758],["▁Rest",-12.767178535461426],["▁Sebab",-12.767179489135742],["▁ລ",-12.767189979553224],["▁(22)",-12.76722240447998],["sou",-12.767227172851562],["▁gələn",-12.76722812652588],["▁geliştir",-12.767253875732422],["▁күй",-12.767255783081056],["▁vanuit",-12.767271041870115],["▁таза",-12.767285346984863],["bü",-12.767289161682127],["▁dintr",-12.767292976379396],["▁kona",-12.767305374145508],["▁안전",-12.767308235168455],["긴",-12.76731014251709],["▁설명",-12.767315864562988],["ického",-12.767339706420898],["ائع",-12.767358779907228],["ሽን",-12.767374038696287],["▁জি",-12.767375946044922],["keer",-12.767391204833984],["▁टा",-12.767398834228516],["diya",-12.767426490783691],["▁piac",-12.767440795898438],["konda",-12.767444610595703],["▁سج",-12.76745891571045],["▁heard",-12.767476081848145],["ggen",-12.767478942871094],["▁şeyler",-12.767485618591309],["▁pí",-12.767496109008787],["▁надання",-12.767500877380373],["審",-12.767577171325684],["▁деце",-12.767632484436035],["普遍",-12.767633438110352],["兄弟",-12.76764678955078],["▁hiss",-12.767667770385742],["יום",-12.767671585083008],["調査",-12.767675399780272],["▁principales",-12.76767635345459],["▁1953",-12.767680168151855],["昔",-12.76768684387207],["▁ultimi",-12.767688751220703],["ରଣ",-12.767691612243652],["▁ਪਿੰਡ",-12.767695426940918],["▁કર્યું",-12.767706871032717],["entreprise",-12.767712593078612],["▁říká",-12.767728805541992],["▁certe",-12.767746925354004],["başı",-12.767751693725586],["▁psico",-12.767759323120115],["다가",-12.767773628234863],["▁körül",-12.767783164978027],["▁apă",-12.767867088317873],["대학교",-12.767889976501465],["ළි",-12.767909049987791],["▁tiket",-12.767909049987791],["▁Ұ",-12.767932891845703],["▁аппарат",-12.767946243286133],["▁added",-12.767956733703612],["▁realidad",-12.76796817779541],["▁kože",-12.767974853515623],["раща",-12.76797866821289],["▁brenda",-12.768024444580078],["▁Shin",-12.768026351928713],["▁alikuwa",-12.768027305603027],["▁gép",-12.76803207397461],["isiä",-12.768047332763672],["លេខ",-12.768071174621582],["다는",-12.768073081970217],["▁ଜଣ",-12.768111228942873],["▁produktów",-12.768115043640137],["▁помог",-12.768144607543944],["▁naša",-12.768152236938477],["dhur",-12.768211364746094],["▁nef",-12.768221855163574],["▁yritys",-12.768230438232422],["终",-12.768237113952637],["య్",-12.768293380737305],["▁പോലും",-12.76830005645752],["▁nuevos",-12.768312454223633],["सल",-12.76837158203125],["▁bereit",-12.7683744430542],["▁Champions",-12.76841926574707],["ινή",-12.768428802490234],["▁झाल्या",-12.768463134765623],["踏",-12.768463134765623],["auti",-12.768485069274902],["bier",-12.7684965133667],["▁deschis",-12.768543243408203],["▁jeu",-12.768547058105469],["▁magic",-12.768568992614746],["▁જીવન",-12.768576622009276],["ırdı",-12.76858139038086],["▁uda",-12.768583297729492],["▁część",-12.768588066101074],["▁Teraz",-12.768589973449709],["▁tato",-12.768589973449709],["teis",-12.768593788146973],["▁බෑ",-12.76860523223877],["▁proč",-12.768609046936035],["▁منتقل",-12.768613815307615],["▁yönelik",-12.768616676330566],["▁ସହିତ",-12.7686185836792],["▁nenos",-12.76865005493164],["▁Monat",-12.768669128417969],["▁schlecht",-12.768675804138184],["▁saor",-12.768678665161133],["wordpress",-12.768691062927246],["▁tina",-12.768704414367676],["▁прояв",-12.768707275390623],["▁گوید",-12.76870822906494],["▁সরকার",-12.76871109008789],["▁हवा",-12.7687406539917],["▁кодекс",-12.768754959106444],["▁ekan",-12.768765449523926],["戲",-12.768884658813477],["▁bidrag",-12.76889419555664],["▁obsahuje",-12.768922805786133],["▁zákona",-12.768936157226562],["▁suchen",-12.768943786621094],["شون",-12.76895523071289],["ROS",-12.768966674804688],["▁flat",-12.768997192382812],["▁alue",-12.768998146057127],["▁чином",-12.769047737121582],["让我们",-12.769097328186035],["▁müü",-12.769122123718262],["▁figyel",-12.76916790008545],["斷",-12.769207000732422],["▁حسين",-12.769211769104004],["▁urmă",-12.769219398498535],["▁پاره",-12.769221305847168],["ayı",-12.769268989562988],["母亲",-12.76927375793457],["題",-12.769312858581545],["▁spiega",-12.769320487976074],["▁ձեւ",-12.769347190856934],["ที่คุณ",-12.769356727600098],["▁organisation",-12.769363403320312],["aki",-12.769427299499512],["▁ofrecer",-12.76943016052246],["麥",-12.769436836242676],["岩",-12.76944637298584],["ике",-12.769471168518066],["▁terletak",-12.769484519958496],["▁الیکشن",-12.769484519958496],["▁sinne",-12.769490242004396],["▁however",-12.769523620605469],["θηκαν",-12.769561767578123],["ਘ",-12.76956844329834],["ስብ",-12.769572257995604],["▁seule",-12.769579887390137],["▁fö",-12.7695894241333],["žne",-12.769591331481934],["▁тил",-12.769597053527832],["Europa",-12.769631385803224],["不但",-12.769691467285156],["▁бих",-12.76970386505127],["міс",-12.769723892211914],["▁vállal",-12.769749641418455],["▁oče",-12.769769668579102],["▁신청",-12.769781112670898],["▁oppure",-12.769824981689451],["defini",-12.769835472106934],["▁הער",-12.769837379455566],["сії",-12.769875526428224],["▁стать",-12.769878387451172],["▁אחרים",-12.769920349121094],["▁համա",-12.769947052001951],["▁isegi",-12.770027160644531],["▁זייער",-12.77004623413086],["▁глаза",-12.770049095153809],["▁betyr",-12.770062446594238],["pár",-12.770075798034668],["▁acto",-12.7700834274292],["▁sạch",-12.770155906677246],["น้ํามัน",-12.770160675048828],["▁❤",-12.770163536071776],["▁Jaya",-12.770191192626951],["▁boot",-12.770191192626951],["▁குழந்தை",-12.77019214630127],["ときに",-12.770223617553713],["▁flag",-12.770225524902344],["▁Kemal",-12.77022647857666],["▁aş",-12.770254135131836],["▁obter",-12.770256042480469],["ถนน",-12.770275115966797],["சம்",-12.770291328430176],["▁imate",-12.77035140991211],["ไฟฟ้า",-12.770357131958008],["▁ziet",-12.770359992980955],["▁tink",-12.770365715026855],["▁napríklad",-12.770379066467283],["▁naozaj",-12.770383834838867],["გებ",-12.770393371582031],["ลบ",-12.770394325256348],["▁Senior",-12.77040958404541],["▁Major",-12.770430564880373],["▁trattamento",-12.770440101623535],["ಧಿ",-12.770442008972168],["зен",-12.770451545715332],["▁سمیت",-12.770472526550291],["▁проведе",-12.77047348022461],["▁naujo",-12.770479202270508],["Bra",-12.77050495147705],["▁prêt",-12.770509719848633],["▁общества",-12.77052879333496],["евски",-12.770529747009276],["ສານ",-12.77055549621582],["yczne",-12.770569801330566],["▁apps",-12.770578384399414],["basa",-12.770585060119627],["▁දක්වා",-12.77061367034912],["上で",-12.77061367034912],["▁spital",-12.770663261413574],["рх",-12.770670890808104],["▁Εκ",-12.770684242248535],["ደረገ",-12.7706937789917],["▁структура",-12.77073574066162],["▁línea",-12.770736694335938],["iñ",-12.770759582519531],["▁Situ",-12.770763397216797],["نون",-12.77077293395996],["чэння",-12.770776748657228],["▁activ",-12.770776748657228],["ሎች",-12.770804405212402],["તે",-12.770811080932615],["▁ferdig",-12.770819664001465],["▁versión",-12.770833969116213],["▁selamat",-12.77086067199707],["▁Бала",-12.770872116088867],["▁зона",-12.77089500427246],["▁126",-12.770919799804688],["▁прво",-12.770936965942385],["▁Address",-12.770963668823242],["▁псих",-12.770976066589355],["▁modelos",-12.771173477172852],["▁мови",-12.771181106567385],["▁národ",-12.771195411682127],["▁Kuna",-12.771212577819824],["വല",-12.77121925354004],["iile",-12.771221160888672],["▁qazan",-12.771272659301758],["股份有限公司",-12.771273612976074],["▁രാഷ്ട്രീയ",-12.771275520324709],["ထူး",-12.771294593811035],["ជូន",-12.7712984085083],["▁వద్ద",-12.771327018737791],["стры",-12.771339416503906],["▁Eur",-12.77137565612793],["▁Час",-12.771394729614258],["▁পার",-12.771407127380373],["ág",-12.771414756774902],["▁kỷ",-12.771421432495115],["ჭირ",-12.7714262008667],["▁Þú",-12.771478652954102],["▁liegen",-12.77149772644043],["masına",-12.771498680114746],["▁eksperiment",-12.771504402160645],["▁acc",-12.771506309509276],["treten",-12.77151584625244],["▁spē",-12.771529197692873],["îş",-12.771535873413086],["▁sebagaimana",-12.771538734436035],["ရပ္",-12.771541595458984],["لىكى",-12.771561622619627],["لىرىنىڭ",-12.77163314819336],["▁കണ്ടു",-12.771663665771484],["▁gue",-12.771698951721191],["▁റി",-12.77171230316162],["posição",-12.771730422973633],["upe",-12.771758079528809],["тинг",-12.771759033203123],["▁brutal",-12.771770477294922],["▁Nina",-12.771793365478516],["▁BH",-12.77186679840088],["▁nyingi",-12.771872520446776],["ຕາມ",-12.771900177001951],["▁आवड",-12.771900177001951],["▁erfahren",-12.77191925048828],["jaan",-12.771946907043455],["▁aufge",-12.771986961364746],["▁жо",-12.772000312805176],["იღ",-12.772010803222656],["▁muuten",-12.772012710571287],["▁månad",-12.772021293640137],["▁uudis",-12.772025108337402],["▁fama",-12.772049903869627],["▁fehér",-12.772056579589844],["▁რამ",-12.77207374572754],["▁muz",-12.772075653076172],["▁risco",-12.772103309631348],["lanmış",-12.772149085998535],["胃",-12.772149085998535],["ಠ",-12.772150039672852],["jir",-12.772164344787598],["▁çështje",-12.772173881530762],["▁রহমান",-12.772173881530762],["▁ਪੁਲਿਸ",-12.772173881530762],["▁பள்ளி",-12.77217960357666],["▁assunto",-12.772180557250977],["ίν",-12.772187232971191],["อะ",-12.772191047668455],["chwa",-12.772198677062988],["▁להגיע",-12.772214889526367],["▁ec",-12.772222518920898],["meyen",-12.772242546081545],["kazuje",-12.772255897521973],["▁تې",-12.772297859191896],["▁pull",-12.772299766540527],["▁הזמן",-12.772299766540527],["▁ಅದರ",-12.772314071655272],["▁روسیه",-12.772323608398438],["▁dolayı",-12.772327423095703],["▁tett",-12.772330284118652],["vrat",-12.772332191467283],["▁دیتے",-12.772343635559082],["▁празник",-12.77235221862793],["▁մտ",-12.772354125976562],["工作人员",-12.77244472503662],["väg",-12.772455215454102],["▁പര",-12.772478103637695],["ڪس",-12.772500038146973],["▁udvalg",-12.772505760192873],["▁conveni",-12.772528648376465],["▁reality",-12.772553443908691],["angal",-12.772565841674805],["▁Kristo",-12.772591590881348],["▁конкурент",-12.772597312927246],["▁Lista",-12.772610664367676],["anu",-12.77261447906494],["ੱਖ",-12.77263641357422],["▁продолжи",-12.772724151611328],["▁Oma",-12.772751808166504],["▁tiedot",-12.77276611328125],["▁ବିଜେପି",-12.77277660369873],["▁eduki",-12.772807121276855],["ுவ",-12.772809028625488],["▁episode",-12.772867202758787],["▁tokia",-12.772892951965332],["▁Cơ",-12.772894859313965],["פני",-12.772910118103027],["ವಿನ",-12.77291774749756],["▁Kapa",-12.772948265075684],["赢",-12.77296543121338],["ನಲ್ಲಿ",-12.77298355102539],["čnim",-12.772992134094238],["▁samalla",-12.772998809814451],["tau",-12.773054122924805],["▁آزمون",-12.773072242736816],["▁مشهور",-12.773080825805664],["▁מדובר",-12.773089408874512],["巨大",-12.773094177246094],["▁canlı",-12.77310562133789],["▁Flere",-12.773106575012209],["хар",-12.773116111755373],["くる",-12.773160934448242],["▁sínum",-12.773180961608888],["ታችን",-12.773186683654783],["▁않은",-12.77320671081543],["rädd",-12.773218154907228],["ဌာန",-12.77325439453125],["▁Nikola",-12.773326873779297],["▁suntem",-12.773334503173828],["▁elk",-12.773345947265623],["нав",-12.773348808288574],["вле",-12.773369789123535],["дри",-12.773409843444824],["▁beag",-12.773415565490724],["▁facendo",-12.773420333862305],["▁නොවේ",-12.77342414855957],["manın",-12.77349853515625],["▁çəki",-12.773524284362791],["▁uair",-12.773578643798828],["vni",-12.773615837097168],["▁Buat",-12.773616790771484],["vl",-12.773627281188965],["完美",-12.773656845092772],["▁заклад",-12.773666381835938],["▁augusti",-12.77366828918457],["就算",-12.773755073547363],["是因为",-12.773797988891602],["ţei",-12.773798942565918],["▁arriba",-12.77380657196045],["kraj",-12.773825645446776],["ťa",-12.77382755279541],["stjerne",-12.773846626281738],["▁müəllim",-12.773868560791016],["▁מכל",-12.773879051208496],["gerð",-12.773893356323242],["kuje",-12.77392292022705],["埋",-12.773931503295898],["zva",-12.773948669433594],["▁EST",-12.77394962310791],["ଠ",-12.77396297454834],["▁тарабынан",-12.773971557617188],["▁पोखरा",-12.773971557617188],["වරු",-12.773987770080566],["▁mjaft",-12.77399444580078],["▁behind",-12.774007797241213],["sab",-12.774012565612791],["▁daiteke",-12.774015426635742],["ಗೊಂಡ",-12.77401638031006],["▁impuls",-12.774029731750488],["▁ඔහුගේ",-12.774046897888184],["▁Drive",-12.774052619934082],["▁கட்ட",-12.774073600769045],["ଗ୍",-12.774077415466309],["▁ejus",-12.774093627929688],["▁positivo",-12.774110794067385],["ະ",-12.77411651611328],["醫療",-12.774134635925291],["▁ferða",-12.77415370941162],["▁fjerne",-12.774198532104492],["φερ",-12.774211883544922],["interes",-12.774237632751465],["neen",-12.774263381958008],["▁ဆ",-12.774264335632324],["лл",-12.774271965026855],["ວັນທີ",-12.774277687072754],["▁további",-12.77428150177002],["▁부분",-12.774292945861816],["▁hó",-12.77429485321045],["▁behandle",-12.774298667907717],["▁הל",-12.774301528930664],["▁260",-12.77430248260498],["iyyə",-12.77431297302246],["▁Hz",-12.774334907531738],["SER",-12.774335861206056],["졌다",-12.774346351623535],["▁kab",-12.77436351776123],["▁կան",-12.77436351776123],["/05",-12.774377822875977],["▁ecc",-12.774406433105469],["させ",-12.77441692352295],["ჩვენ",-12.774433135986328],["otra",-12.774435997009276],["▁cò",-12.77443790435791],["ਜੀ",-12.774441719055176],["▁Тим",-12.774457931518556],["▁Hvað",-12.7744722366333],["▁negocia",-12.774481773376465],["▁Gj",-12.77449893951416],["▁Lakin",-12.774516105651855],["覺",-12.774520874023438],["▁zvuk",-12.77455997467041],["▁vaata",-12.774563789367676],["▁денег",-12.774609565734863],["▁valore",-12.774617195129396],["עור",-12.774628639221191],["♥",-12.77463150024414],["ड्या",-12.774660110473633],["▁bull",-12.774665832519531],["សង្",-12.774741172790527],["▁അവസാന",-12.77474308013916],["▁pagka",-12.774778366088867],["▁પ્રેમ",-12.774797439575195],["▁رهيا",-12.77481460571289],["▁alene",-12.774829864501951],["▁punë",-12.774839401245115],["nier",-12.774845123291016],["▁egyéb",-12.774861335754396],["▁cases",-12.774862289428713],["▁hizkuntza",-12.774873733520508],["▁ວີໂອເອ",-12.774873733520508],["▁بالأ",-12.774889945983888],["▁donné",-12.774897575378418],["▁적용",-12.774903297424316],["▁31-",-12.77492904663086],["हरूको",-12.774934768676758],["▁অ্যা",-12.774934768676758],["ári",-12.774943351745604],["▁öppna",-12.77495574951172],["▁tộc",-12.77496337890625],["ekre",-12.775010108947754],["ヶ月",-12.77503776550293],["▁kapan",-12.775066375732422],["▁ჰა",-12.775070190429688],["atlar",-12.77511215209961],["▁foglalkoz",-12.775153160095217],["▁einde",-12.775163650512695],["▁élő",-12.775163650512695],["▁дээд",-12.77517032623291],["▁tenga",-12.775208473205566],["▁sín",-12.775209426879885],["ंगी",-12.775219917297363],["▁կենտրոն",-12.775239944458008],["▁захисту",-12.775242805480955],["alai",-12.77524757385254],["Kan",-12.77534008026123],["▁Peri",-12.77536392211914],["▁свети",-12.775389671325684],["orra",-12.775408744812012],["▁engel",-12.775415420532228],["▁kolla",-12.775477409362791],["miyor",-12.775484085083008],["фан",-12.77549171447754],["issime",-12.775510787963867],["▁vivir",-12.775517463684082],["▁thảo",-12.77565097808838],["rain",-12.775668144226074],["人民币",-12.775710105895996],["10)",-12.775712013244627],["▁गुण",-12.775718688964844],["딩",-12.775751113891602],["▁ଲାଗି",-12.775764465332031],["نك",-12.775769233703612],["▁Kabupaten",-12.77577304840088],["▁đừng",-12.775779724121094],["▁паз",-12.77579402923584],["▁Tap",-12.775794982910156],["▁Gäste",-12.775797843933104],["▁पछि",-12.775818824768066],["dų",-12.775866508483888],["ຮູ້",-12.77590274810791],["▁bashk",-12.77590560913086],["▁cường",-12.775915145874023],["דזש",-12.775933265686035],["▁siano",-12.775945663452148],["▁Deel",-12.775996208190918],["▁ишке",-12.775996208190918],["sprechen",-12.77602195739746],["ौँ",-12.77602767944336],["τεί",-12.776037216186523],["дається",-12.776040077209473],["ryk",-12.77606201171875],["ЧЕ",-12.776063919067385],["شارك",-12.776068687438965],["▁표",-12.776092529296877],["يده",-12.77609634399414],["处于",-12.776103019714355],["tanut",-12.776113510131836],["▁Ekim",-12.776124000549316],["▁opus",-12.776159286499023],["ເສ",-12.77617359161377],["能量",-12.776183128356934],["▁locuri",-12.776226043701172],["▁қалай",-12.77622890472412],["▁ваша",-12.77633285522461],["▁Seorang",-12.77634334564209],["ueshme",-12.776385307312012],["tolewa",-12.776403427124023],["ਰਾਂ",-12.776426315307615],["▁потреб",-12.776426315307615],["▁rêve",-12.776432037353516],["▁Пловдив",-12.776448249816896],["ائب",-12.776455879211426],["▁Мак",-12.776456832885742],["▁zavod",-12.77649211883545],["枚",-12.776494979858398],["埃",-12.776512145996094],["▁Tes",-12.77651596069336],["भी",-12.776541709899902],["Св",-12.77655029296875],["▁spesiale",-12.776556015014648],["▁Essa",-12.776561737060549],["奶",-12.77656364440918],["焦",-12.776573181152344],["ником",-12.776581764221191],["▁pohyb",-12.77658748626709],["▁постав",-12.776606559753418],["缩",-12.776607513427734],["▁varsa",-12.776634216308594],["▁ร",-12.77663803100586],["匯",-12.77663803100586],["নার",-12.776653289794922],["▁လာ",-12.77665901184082],["▁Ерөнхий",-12.776674270629885],["طلق",-12.776679039001465],["みたい",-12.776680946350098],["▁ગયો",-12.776681900024414],["ழை",-12.77668285369873],["▁buổi",-12.776705741882324],["૩",-12.776721954345703],["▁faoin",-12.776725769042969],["▁Чтобы",-12.776737213134766],["▁будуць",-12.776752471923828],["نظم",-12.77678680419922],["izen",-12.77681827545166],["chow",-12.77682876586914],["▁request",-12.776837348937988],["▁ebben",-12.776891708374023],["▁seku",-12.776923179626465],["▁results",-12.776939392089844],["▁honek",-12.776979446411133],["ЗА",-12.77699089050293],["▁чим",-12.77699089050293],["▁Continua",-12.776991844177246],["Pra",-12.776996612548828],["ኖር",-12.77702522277832],["▁गोल",-12.77703094482422],["▁байланыс",-12.777045249938965],["czka",-12.777052879333496],["lela",-12.777058601379396],["▁muhim",-12.777076721191406],["▁ukuran",-12.77708625793457],["▁þátt",-12.77708911895752],["▁북",-12.777095794677734],["irí",-12.777118682861328],["绝对",-12.777119636535645],["▁TAN",-12.777121543884276],["▁policija",-12.777151107788086],["fina",-12.777156829833984],["▁ഫാ",-12.777162551879885],["▁száma",-12.777182579040527],["▁túto",-12.777207374572754],["▁నెల",-12.777219772338867],["uoto",-12.777225494384766],["sões",-12.777295112609863],["▁নে",-12.777313232421877],["▁sari",-12.777345657348633],["لعب",-12.777355194091797],["▁Koo",-12.777359008789062],["▁vendita",-12.777369499206545],["▁законом",-12.77737045288086],["EGA",-12.777373313903809],["▁nomen",-12.777379989624023],["ady",-12.777387619018556],["രുത്",-12.777388572692873],["ರಿಂದ",-12.777403831481934],["▁împ",-12.777426719665527],["จา",-12.77752685546875],["动力",-12.777565956115724],["▁కేసీఆర్",-12.777576446533203],["▁განმავლობაში",-12.777576446533203],["▁orqali",-12.77757740020752],["▁λέει",-12.777580261230469],["▁үлкен",-12.77758502960205],["▁thiệu",-12.777585983276367],["erande",-12.777606964111328],["▁lazımdır",-12.777618408203123],["czej",-12.77761936187744],["▁dziś",-12.777627944946287],["▁Ish",-12.777673721313477],["▁грама",-12.777687072753906],["tapi",-12.777688026428224],["طا",-12.777703285217283],["有可能",-12.777731895446776],["ovun",-12.777754783630373],["▁লা",-12.777767181396484],["▁100.000",-12.777840614318848],["amini",-12.777898788452148],["▁jong",-12.777902603149414],["ektor",-12.77791690826416],["全新",-12.777938842773438],["lazi",-12.777955055236816],["▁மெ",-12.777959823608398],["ське",-12.778011322021484],["शो",-12.77805233001709],["дүк",-12.778058052062988],["▁президента",-12.77811336517334],["STO",-12.778120040893556],["▁குறை",-12.778120994567873],["▁възраст",-12.778141975402832],["lliset",-12.778151512145996],["ará",-12.778185844421388],["1/",-12.778205871582031],["▁gela",-12.778216361999512],["جہ",-12.778220176696776],["听到",-12.778228759765623],["sela",-12.778261184692385],["ത്വ",-12.77827262878418],["▁شمالی",-12.77828311920166],["▁emer",-12.77829647064209],["lığın",-12.778300285339355],["▁حب",-12.7783203125],["▁elementi",-12.778347969055176],["شد",-12.778356552124023],["企业的",-12.778359413146973],["合格",-12.778420448303224],["▁ped",-12.778444290161133],["ሹ",-12.77847957611084],["عمل",-12.778485298156738],["大小",-12.778515815734863],["▁lepiej",-12.77851676940918],["▁noong",-12.778531074523926],["segur",-12.778594017028809],["سك",-12.778600692749023],["▁vaik",-12.77860164642334],["▁ម៉ោង",-12.778666496276855],["▁جاتے",-12.778680801391602],["शु",-12.778681755065918],["▁samu",-12.778719902038574],["▁lép",-12.77872085571289],["▁nusi",-12.778721809387209],["▁اليمن",-12.7787446975708],["▁খা",-12.778779029846191],["▁privilegi",-12.778780937194824],["▁Quod",-12.778800010681152],["▁دارید",-12.77882194519043],["▁پيا",-12.778828620910645],["/13",-12.778846740722656],["▁Uno",-12.778868675231934],["zati",-12.778885841369627],["▁Եվ",-12.779004096984863],["▁sulit",-12.779078483581545],["ቤት",-12.779096603393556],["kee",-12.7791109085083],["▁폭",-12.7791166305542],["séget",-12.779136657714844],["RN",-12.779172897338867],["▁мати",-12.779260635375977],["▁саясат",-12.779284477233888],["▁Koma",-12.779291152954102],["นับ",-12.779302597045898],["itelj",-12.779349327087402],["▁Lot",-12.7793607711792],["ութ",-12.779377937316896],["揚",-12.779378890991213],["▁նկատմամբ",-12.779386520385742],["▁líder",-12.779393196105955],["▁المعلومات",-12.77943992614746],["ۇڭ",-12.779464721679688],["ปกติ",-12.77947235107422],["▁можност",-12.779476165771484],["իստ",-12.779478073120115],["▁tahap",-12.779481887817385],["tojo",-12.779491424560549],["▁systému",-12.779497146606444],["म्प",-12.779526710510254],["▁kvinna",-12.77956485748291],["jši",-12.779571533203123],["▁Step",-12.779585838317873],["▁produção",-12.779592514038086],["▁Analytics",-12.779627799987791],["▁اسرائیل",-12.779632568359377],["stica",-12.779644012451172],["▁Sø",-12.779688835144045],["▁Voices",-12.779695510864258],["人気",-12.779707908630373],["▁दें",-12.779714584350586],["▁прошло",-12.779794692993164],["▁имаме",-12.779808044433594],["ância",-12.779824256896973],["8)",-12.779873847961426],["▁procedure",-12.779889106750488],["▁органа",-12.779914855957031],["▁ปรสิต",-12.779919624328612],["▁daun",-12.779934883117676],["лије",-12.779959678649902],["ግባ",-12.779964447021484],["▁ствара",-12.780040740966797],["▁öðrum",-12.780049324035645],["പ്പാ",-12.780085563659668],["高速",-12.780117988586426],["▁ажиллах",-12.780129432678224],["ιστεί",-12.780131340026855],["ాయ",-12.7801513671875],["ако",-12.780158996582031],["állítás",-12.780159950256348],["▁Ово",-12.780180931091309],["▁κατάσταση",-12.78018856048584],["Се",-12.780200958251951],["▁concern",-12.780202865600586],["▁фирма",-12.780217170715332],["ඳු",-12.780229568481444],["ابت",-12.780241012573242],["恐怖",-12.780254364013672],["ferd",-12.780255317687988],["▁موسیقی",-12.780291557312012],["▁ottobre",-12.78029727935791],["កំ",-12.780301094055176],["▁шеф",-12.780351638793944],["▁niệm",-12.78036403656006],["ىن",-12.780365943908691],["මයි",-12.780370712280272],["ვან",-12.780381202697754],["▁TM",-12.780381202697754],["нот",-12.780410766601562],["▁saga",-12.780417442321776],["▁دۆلەت",-12.780444145202637],["▁rövid",-12.780449867248535],["▁babe",-12.780472755432127],["▁Yol",-12.780508041381836],["tusta",-12.780557632446287],["▁strategie",-12.7805814743042],["▁aspectos",-12.780619621276855],["▁pikku",-12.780632019042969],["▁պահանջ",-12.780641555786133],["表达",-12.780649185180664],["ातून",-12.780653953552246],["▁khảo",-12.78070068359375],["عمار",-12.780755996704102],["▁gerekir",-12.780779838562012],["ರನ್ನು",-12.78078842163086],["▁nóng",-12.78078842163086],["▁tosi",-12.780791282653809],["▁organizat",-12.78081226348877],["pó",-12.780813217163086],["▁काल",-12.780861854553224],["▁ساده",-12.780869483947754],["ología",-12.780891418457031],["▁Pom",-12.780891418457031],["iskā",-12.780900955200195],["▁Võ",-12.78091526031494],["▁๑",-12.780927658081056],["침",-12.780951499938965],["▁Dus",-12.780957221984863],["ಶಿ",-12.780982971191406],["ფას",-12.780996322631836],["▁arbeiten",-12.781007766723633],["▁Nə",-12.781113624572754],["▁matematik",-12.781124114990234],["faj",-12.7811279296875],["альный",-12.78114128112793],["提前",-12.781157493591309],["炸",-12.781167030334473],["▁bajar",-12.781172752380373],["▁dhig",-12.78117561340332],["邀請",-12.781180381774902],["▁herhangi",-12.78119659423828],["เหนือ",-12.781198501586914],["伝",-12.781238555908203],["▁کردار",-12.781245231628418],["▁раду",-12.781286239624023],["▁souhaite",-12.78130054473877],["▁Kỳ",-12.781302452087402],["kunna",-12.781349182128906],["▁מפ",-12.781357765197754],["▁යම්",-12.781365394592283],["SKO",-12.781368255615234],["▁nädala",-12.781377792358398],["▁שירות",-12.781405448913574],["▁melu",-12.781408309936523],["▁최대",-12.78146266937256],["ungi",-12.781464576721191],["▁professionals",-12.781472206115724],["дје",-12.781498908996582],["ాన",-12.781502723693848],["▁Αρ",-12.78159523010254],["▁rade",-12.781596183776855],["▁Јован",-12.781607627868652],["▁armi",-12.7816162109375],["▁puteti",-12.781675338745115],["gų",-12.781698226928713],["▁மீது",-12.781719207763672],["▁Logi",-12.781721115112305],["▁Lok",-12.78172492980957],["lenen",-12.781728744506836],["▁පිළිබඳව",-12.781739234924316],["▁Denna",-12.78175163269043],["سازی",-12.78176212310791],["▁Mog",-12.781766891479492],["▁Чо",-12.781807899475098],["▁smatra",-12.78182601928711],["▁୧୦",-12.78182601928711],["▁سالن",-12.78182888031006],["▁үн",-12.781846046447754],["рани",-12.781883239746094],["▁Still",-12.781888008117676],["грам",-12.781890869140623],["▁moitas",-12.78189468383789],["rsat",-12.781941413879396],["▁decembrie",-12.78197956085205],["רגש",-12.782012939453123],["▁न्यूज",-12.782052040100098],["▁fortuna",-12.78209114074707],["▁пайыз",-12.78210163116455],["▁Phật",-12.782109260559082],["▁čempion",-12.782123565673828],["เดิม",-12.782140731811523],["▁spal",-12.782146453857422],["▁القرآن",-12.78215789794922],["▁മേ",-12.78216552734375],["CHI",-12.782167434692385],["reita",-12.782180786132812],["ומי",-12.782180786132812],["▁berubah",-12.782209396362305],["▁ਧ",-12.782215118408203],["▁xabar",-12.782221794128418],["▁ilegal",-12.782224655151367],["▁leggere",-12.78225803375244],["порт",-12.78227996826172],["▁tarpe",-12.78231716156006],["▁medicina",-12.78232479095459],["▁instan",-12.782367706298828],["▁úč",-12.78241729736328],["▁gawin",-12.782429695129396],["直播",-12.782489776611328],["óta",-12.782612800598145],["▁ജോലി",-12.782629013061523],["hiri",-12.782636642456056],["▁idem",-12.782645225524902],["▁տան",-12.782675743103027],["ଦ୍",-12.782705307006836],["▁ризик",-12.782707214355469],["▁selesai",-12.78271770477295],["▁حوالي",-12.782732963562012],["▁osvoji",-12.7827787399292],["Nor",-12.782793045043944],["▁хэмжээ",-12.782809257507324],["זכר",-12.78285789489746],["▁uc",-12.782866477966309],["▁Şu",-12.782902717590332],["નુ",-12.782926559448242],["▁heli",-12.78294563293457],["堅",-12.782959938049316],["股票",-12.783003807067873],["▁αγώνα",-12.783011436462402],["▁असलेल्या",-12.783015251159668],["▁Можно",-12.78304386138916],["Mul",-12.783044815063477],["▁كر",-12.783047676086426],["Խ",-12.783073425292969],["▁लक्ष्य",-12.783092498779297],["负责人",-12.783109664916992],["دير",-12.783116340637209],["▁tận",-12.78311824798584],["chází",-12.78315258026123],["СА",-12.78317928314209],["сім",-12.783183097839355],["ட்டா",-12.783190727233888],["▁ڏئي",-12.78319263458252],["▁त्याला",-12.783227920532228],["▁krat",-12.783246040344238],["▁проведения",-12.78332233428955],["連絡",-12.783331871032717],["▁Παρ",-12.783336639404297],["acaktır",-12.783370971679688],["dhar",-12.783401489257812],["▁psa",-12.783473014831545],["▁балалар",-12.783485412597656],["arrak",-12.783565521240234],["tró",-12.783609390258787],["ಾರ್",-12.783621788024902],["▁ଓଡିଶା",-12.783662796020508],["▁Contoh",-12.783666610717772],["jou",-12.78368854522705],["▁blogs",-12.78368854522705],["gün",-12.783690452575684],["▁corps",-12.783692359924316],["▁қаласы",-12.783719062805176],["▁clave",-12.783721923828123],["lahan",-12.783727645874023],["▁podía",-12.783731460571287],["पति",-12.783775329589844],["▁kezelés",-12.783793449401855],["ี่",-12.78380012512207],["tist",-12.783821105957031],["▁Scott",-12.783843040466309],["▁процесу",-12.78384780883789],["▁ug",-12.783853530883787],["வுக்கு",-12.783880233764648],["▁පසුගිය",-12.783907890319824],["▁noastră",-12.783917427062988],["▁perkhidmatan",-12.783917427062988],["زين",-12.783924102783203],["▁Kristus",-12.78392505645752],["▁ايس",-12.783987045288086],["▁эң",-12.784008979797363],["▁vrst",-12.784013748168944],["dává",-12.784038543701172],["ર્ડ",-12.784051895141602],["▁فهو",-12.78408145904541],["▁Aleksandr",-12.78411865234375],["▁غني",-12.784122467041016],["▁kveld",-12.784132957458496],["tran",-12.784177780151367],["twór",-12.784186363220217],["▁Ned",-12.784191131591797],["ကူး",-12.78420066833496],["▁options",-12.784205436706545],["dec",-12.78421401977539],["▁letak",-12.78421401977539],["ڏي",-12.784217834472656],["▁ว",-12.784257888793944],["ωσε",-12.784278869628906],["▁Dieser",-12.784295082092283],["oire",-12.784296035766602],["ਬੀ",-12.784296989440918],["▁différents",-12.784306526184082],["▁Kao",-12.784309387207031],["▁усё",-12.784356117248535],["▁ვინ",-12.784361839294434],["▁vælge",-12.784391403198242],["áp",-12.784425735473633],["▁hech",-12.7844877243042],["▁Lima",-12.784496307373049],["▁aucun",-12.784594535827637],["▁chữ",-12.784605979919434],["▁vissa",-12.784608840942385],["客人",-12.784623146057127],["рб",-12.784682273864746],["ခဲ့တဲ့",-12.784697532653809],["▁[3]",-12.784712791442873],["▁ideje",-12.784753799438477],["hale",-12.78475570678711],["▁parha",-12.784760475158691],["▁Kalo",-12.784761428833008],["▁disposition",-12.784772872924805],["▁κάποια",-12.784826278686523],["▁మాత్రమే",-12.784826278686523],["▁ଗୋଟିଏ",-12.78482723236084],["▁سوریه",-12.784841537475586],["▁ഇപ്പോള്",-12.784855842590332],["하였다",-12.784859657287598],["▁سرد",-12.78486442565918],["▁otvor",-12.784883499145508],["気が",-12.784889221191406],["▁ач",-12.784893035888672],["لد",-12.784908294677734],["ătoare",-12.784911155700684],["▁Generalitat",-12.784913063049316],["авалі",-12.784936904907228],["▁rû",-12.784955024719238],["▁comunidade",-12.784957885742188],["▁ဝ",-12.784960746765137],["élé",-12.785027503967283],["▁mura",-12.78506088256836],["ាន",-12.785088539123535],["▁ମୋ",-12.785117149353027],["roh",-12.785144805908203],["stret",-12.785162925720217],["hman",-12.785163879394531],["alue",-12.785170555114746],["lend",-12.785204887390137],["▁위치",-12.785215377807615],["▁رهبر",-12.785248756408691],["▁esset",-12.785269737243652],["7-",-12.785277366638184],["▁Ville",-12.785279273986816],["wind",-12.785290718078612],["נטי",-12.78529453277588],["тагы",-12.78529930114746],["▁раза",-12.78532600402832],["▁roditelj",-12.785335540771484],["▁outside",-12.7853422164917],["▁ណា",-12.785356521606444],["▁sata",-12.785367012023926],["্রি",-12.785369873046877],["дея",-12.78538703918457],["▁Tanpa",-12.78538990020752],["▁implementa",-12.785408973693848],["▁اید",-12.785419464111328],["▁fei",-12.785429000854492],["▁Sali",-12.785449981689451],["▁muhimu",-12.785478591918944],["▁študent",-12.785479545593262],["▁پیل",-12.785490036010742],["壓力",-12.785510063171388],["▁lumin",-12.785511016845703],["▁Nagyon",-12.785512924194336],["▁ගා",-12.785521507263184],["бив",-12.785555839538574],["★",-12.785576820373535],["ìo",-12.785590171813965],["▁asli",-12.785624504089355],["▁ଯୋଗ",-12.785636901855469],["िये",-12.785638809204102],["是由",-12.785638809204102],["ยังมี",-12.785645484924316],["状况",-12.785646438598633],["▁nave",-12.785650253295898],["逼",-12.785656929016112],["教師",-12.785686492919922],["进步",-12.785717964172363],["▁regola",-12.78571891784668],["▁Paano",-12.785725593566896],["▁hərəkət",-12.785737037658691],["▁respuesta",-12.785737037658691],["▁društva",-12.78573989868164],["主要是",-12.78573989868164],["імі",-12.785747528076172],["▁Sera",-12.78575611114502],["лоп",-12.785758972167969],["▁کرکٹ",-12.785810470581056],["माथि",-12.785812377929688],["ครีม",-12.785818099975586],["duš",-12.785837173461914],["▁søndag",-12.785842895507812],["▁Hoy",-12.785900115966797],["▁позволяет",-12.78590202331543],["thon",-12.785906791687012],["▁plana",-12.785907745361328],["liegen",-12.785919189453123],["▁ప్రభుత్వ",-12.785921096801758],["دەك",-12.785943031311035],["▁volle",-12.785968780517578],["voima",-12.786012649536133],["▁nauk",-12.786014556884766],["zip",-12.786049842834473],["ებენ",-12.7860689163208],["mano",-12.78610897064209],["▁qaar",-12.786141395568848],["roma",-12.78614330291748],["များကို",-12.786192893981934],["حه",-12.786200523376465],["▁prawa",-12.786210060119627],["▁celular",-12.786242485046388],["▁قلم",-12.786242485046388],["▁голови",-12.786259651184082],["▁ות",-12.786262512207031],["▁Пі",-12.786273956298828],["▁frumos",-12.786280632019045],["ීමට",-12.786337852478027],["πί",-12.78636074066162],["▁једно",-12.786421775817873],["לד",-12.78646183013916],["íochta",-12.786466598510742],["λιο",-12.786478996276855],["נר",-12.786526679992676],["▁малки",-12.786556243896484],["▁cuore",-12.786569595336914],["▁ČR",-12.786575317382812],["čer",-12.786578178405762],["奈",-12.786588668823242],["▁člena",-12.78658962249756],["▁დას",-12.786606788635254],["▁транс",-12.786611557006836],["驻",-12.786612510681152],["機械",-12.786624908447266],["▁Paket",-12.786625862121582],["▁وق",-12.786646842956545],["▁deoarece",-12.786649703979492],["▁fyra",-12.78665828704834],["▁cuatro",-12.786659240722656],["▁nyújt",-12.786665916442873],["▁بولىدۇ",-12.786665916442873],["्रा",-12.786666870117188],["▁gebeur",-12.78667163848877],["▁Теперь",-12.786673545837402],["▁בתי",-12.786697387695312],["xy",-12.78670883178711],["▁Aralık",-12.78672981262207],["▁chiqar",-12.78673267364502],["▁قان",-12.786737442016602],["▁televizi",-12.78674030303955],["ürü",-12.7867431640625],["▁Pasa",-12.786744117736816],["▁цяпер",-12.78674602508545],["Cor",-12.786746978759766],["▁მამა",-12.786751747131348],["▁чого",-12.786755561828612],["കി",-12.78676700592041],["ivanje",-12.786823272705078],["▁juna",-12.786835670471191],["asikan",-12.786864280700684],["▁ງານ",-12.786870956420898],["▁ֆ",-12.78687858581543],["▁aqua",-12.786890983581545],["▁Schüler",-12.786903381347656],["▁woord",-12.786905288696287],["▁우리는",-12.786905288696287],["▁боя",-12.786940574645996],["úin",-12.78695583343506],["neu",-12.786957740783691],["аваў",-12.78696632385254],["▁پوش",-12.786980628967283],["▁Мін",-12.78698444366455],["▁چیست",-12.786993026733398],["ahay",-12.787033081054688],["▁yosh",-12.787052154541016],["ères",-12.787063598632812],["убав",-12.787073135375977],["▁الوطنية",-12.78708553314209],["точ",-12.787124633789062],["▁올해",-12.787158966064451],["▁pís",-12.787166595458984],["▁tūkst",-12.787179946899414],["▁ମୁ",-12.787187576293944],["ených",-12.787224769592283],["народ",-12.787251472473145],["▁عا",-12.78727912902832],["▁حمله",-12.78728485107422],["▁قالب",-12.787291526794434],["▁oportunidade",-12.78729248046875],["▁бизнеса",-12.78730583190918],["ජා",-12.787312507629396],["чува",-12.787368774414062],["▁πει",-12.78737449645996],["▁सञ्चालन",-12.787381172180176],["▁பழ",-12.78741455078125],["▁samman",-12.787415504455566],["▁municipi",-12.787445068359377],["force",-12.787468910217283],["קול",-12.787471771240234],["▁이름",-12.787473678588867],["ljiva",-12.787474632263184],["lel",-12.787483215332031],["▁SER",-12.787494659423828],["▁Nacht",-12.787497520446776],["▁مسافر",-12.78750705718994],["▁ମହା",-12.787522315979004],["▁Cos",-12.78754425048828],["▁1952",-12.787546157836914],["▁profissional",-12.787558555603027],["▁სწორედ",-12.787558555603027],["▁istället",-12.78756332397461],["▁matri",-12.78756332397461],["▁զարգացման",-12.787565231323242],["▁csinál",-12.787569999694824],["лози",-12.78757095336914],["▁लगानी",-12.787606239318848],["▁Δι",-12.787641525268556],["▁îşi",-12.787644386291504],["▁Bella",-12.787650108337402],["▁liian",-12.787657737731934],["▁Зо",-12.787663459777832],["lser",-12.78766632080078],["▁iespējas",-12.787672996520996],["ុង",-12.787677764892578],["▁krv",-12.78769874572754],["atis",-12.787700653076172],["▁अर्को",-12.787701606750488],["▁third",-12.787714958190918],["▁construction",-12.787742614746094],["▁colabora",-12.787750244140623],["▁מנ",-12.787750244140623],["ires",-12.787772178649902],["▁обича",-12.787775993347168],["❤",-12.787779808044434],["аты",-12.787806510925291],["ரீ",-12.78780746459961],["▁сложно",-12.787826538085938],["▁talo",-12.787837028503418],["▁åbne",-12.787837028503418],["不起",-12.787839889526367],["▁såg",-12.78785800933838],["▁inicial",-12.787861824035645],["▁solución",-12.78786277770996],["▁création",-12.787875175476074],["▁عز",-12.78787899017334],["jára",-12.787928581237791],["▁þig",-12.78792953491211],["▁للا",-12.787954330444336],["▁pigment",-12.787962913513184],["لية",-12.788008689880373],["的時間",-12.788041114807127],["▁Біз",-12.788046836853027],["kw",-12.788047790527344],["type",-12.788068771362305],["ģē",-12.78808307647705],["БЕ",-12.78813934326172],["▁rrë",-12.788155555725098],["▁valt",-12.78818702697754],["ХА",-12.788198471069336],["▁ресторан",-12.788202285766602],["ပူ",-12.788209915161133],["-27",-12.788217544555664],["反而",-12.788219451904297],["θεσ",-12.78822898864746],["▁green",-12.788281440734863],["న్నారు",-12.788302421569824],["▁tuin",-12.788348197937012],["▁разных",-12.788363456726074],["▁উপজেলা",-12.788393020629885],["▁nebyl",-12.788409233093262],["▁tiña",-12.788437843322754],["▁Sá",-12.788442611694336],["▁gider",-12.788447380065918],["FP",-12.78846263885498],["યો",-12.78848361968994],["等方面",-12.788485527038574],["▁идет",-12.788490295410156],["▁انهيء",-12.78850555419922],["tukset",-12.788529396057127],["▁музыка",-12.788532257080078],["▁диску",-12.788548469543455],["fach",-12.78855800628662],["資格",-12.78857707977295],["ВИ",-12.78858470916748],["▁عقل",-12.78858470916748],["如下",-12.788585662841797],["▁mend",-12.788593292236328],["▁aktuell",-12.788604736328123],["▁potenc",-12.788634300231934],["▁чинь",-12.788639068603516],["ଠାରୁ",-12.788640975952148],["▁якості",-12.788662910461426],["▁sakė",-12.788666725158691],["▁Tee",-12.788704872131348],["▁nih",-12.788744926452637],["▁बोर्ड",-12.78874683380127],["\").",-12.788771629333496],["بہ",-12.788774490356444],["▁komunikacij",-12.788776397705078],["▁Lø",-12.788829803466797],["वाल",-12.788854598999023],["मधील",-12.788857460021973],["▁הגדול",-12.788859367370604],["CHA",-12.788867950439451],["aç",-12.788867950439451],["▁돌",-12.78888988494873],["▁bases",-12.788901329040527],["ថ",-12.78890323638916],["▁kontrakt",-12.788925170898438],["▁running",-12.788925170898438],["▁ଯାହା",-12.78892707824707],["្ល",-12.788931846618652],["▁Paz",-12.78898811340332],["ibe",-12.788994789123535],["▁dama",-12.78902530670166],["▁၀",-12.789048194885254],["kav",-12.789050102233888],["▁comentarii",-12.789055824279783],["▁thar",-12.789055824279783],["έλα",-12.78905963897705],["▁Clo",-12.78905963897705],["ดิน",-12.789081573486328],["uotas",-12.789132118225098],["▁majandus",-12.789137840270996],["▁ओली",-12.789137840270996],["言って",-12.789155960083008],["很难",-12.78922176361084],["▁некои",-12.789222717285156],["▁qonun",-12.789238929748535],["▁Classic",-12.789262771606444],["重视",-12.789273262023926],["▁dňa",-12.789292335510254],["▁summer",-12.78929328918457],["▁החדש",-12.78932762145996],["રુ",-12.789329528808594],["hah",-12.789342880249023],["墨",-12.789344787597656],["狀態",-12.789344787597656],["▁sök",-12.789356231689451],["▁muni",-12.789382934570312],["▁متفاوت",-12.789384841918944],["▁համապատասխան",-12.789385795593262],["▁Quận",-12.789393424987791],["రై",-12.789410591125488],["жава",-12.78945255279541],["▁Sag",-12.789458274841309],["▁يقوم",-12.78950309753418],["vende",-12.789520263671877],["loma",-12.78952407836914],["▁одржа",-12.789525985717772],["的影响",-12.789538383483888],["▁ก่อน",-12.789546012878418],["▁tóc",-12.789560317993164],["gner",-12.789588928222656],["▁annað",-12.789629936218262],["▁дена",-12.78963565826416],["▁Dnes",-12.789661407470703],["▁zosta",-12.789666175842283],["▁svete",-12.789681434631348],["▁riječ",-12.78968906402588],["▁детето",-12.789759635925291],["▁wisata",-12.78978157043457],["▁очекува",-12.78979206085205],["▁voksen",-12.789800643920898],["▁Þá",-12.789828300476074],["▁бързо",-12.78982925415039],["▁трав",-12.789839744567873],["пул",-12.78986358642578],["терін",-12.789889335632324],["▁mili",-12.789889335632324],["▁hokej",-12.789904594421388],["▁ಪರ",-12.78993797302246],["▁занят",-12.78996467590332],["เย",-12.789971351623535],["▁verkar",-12.789994239807127],["kera",-12.790021896362305],["▁pulak",-12.79006576538086],["تميز",-12.790087699890137],["▁ء",-12.790149688720703],["▁perde",-12.79015064239502],["ќа",-12.790152549743652],["▁түрлі",-12.790160179138184],["▁hráč",-12.790163040161133],["▁നിന്ന",-12.790180206298828],["▁ରହି",-12.790194511413574],["ându",-12.790205955505373],["кыр",-12.790209770202637],["▁energetic",-12.790223121643066],["তো",-12.790247917175291],["▁зелен",-12.790277481079102],["ellinen",-12.790290832519531],["ຂ່າວວີໂອເອ",-12.79029655456543],["ฟุตบอล",-12.790298461914062],["▁muha",-12.790298461914062],["▁nhằm",-12.790302276611328],["▁σελίδα",-12.790303230285645],["▁सिर्फ",-12.790306091308594],["▁forskjellige",-12.790323257446287],["▁하루",-12.790328025817873],["▁gelede",-12.79033660888672],["▁nivå",-12.790345191955566],["▁tuturor",-12.790364265441896],["▁مخصوص",-12.790375709533691],["▁Né",-12.790393829345703],["▁vários",-12.790393829345703],["עים",-12.790401458740234],["▁maybe",-12.790411949157717],["јо",-12.79043960571289],["▁снима",-12.790456771850586],["▁آيو",-12.790475845336914],["▁نتائج",-12.790478706359863],["▁públic",-12.790482521057127],["▁Освен",-12.790498733520508],["▁даму",-12.79050350189209],["ตุ",-12.79053020477295],["budowa",-12.79055118560791],["▁гри",-12.790556907653809],["▁nieco",-12.7905855178833],["▁dernière",-12.790613174438477],["有點",-12.790616989135742],["த்தா",-12.790650367736816],["न्ड",-12.790663719177246],["習慣",-12.79067325592041],["▁kanta",-12.790678024291992],["فن",-12.790679931640623],["▁fed",-12.790757179260254],["▁vechi",-12.79076099395752],["▁самі",-12.790791511535645],["னே",-12.790803909301758],["उन",-12.790809631347656],["▁الدولية",-12.790821075439451],["نۍ",-12.790837287902832],["በል",-12.790849685668944],["puri",-12.790864944458008],["▁læ",-12.790882110595703],["▁dide",-12.790885925292969],["▁foran",-12.790902137756348],["blogg",-12.790903091430664],["യ്യ",-12.790942192077637],["כא",-12.79096508026123],["▁광",-12.79102897644043],["▁Digi",-12.79103183746338],["▁Mö",-12.79103946685791],["щини",-12.791051864624023],["▁ساق",-12.791051864624023],["▁Sina",-12.791060447692873],["ņiem",-12.791077613830566],["▁esperar",-12.791077613830566],["▁Luci",-12.791085243225098],["▁visoko",-12.791101455688477],["▁ॐ",-12.791108131408691],["评价",-12.791176795959473],["ิล",-12.791182518005373],["▁اجازت",-12.791213035583496],["▁специјал",-12.791216850280762],["▁10.00",-12.791229248046877],["▁Timo",-12.791234016418455],["stid",-12.79123592376709],["▁natuur",-12.791242599487305],["▁война",-12.79124641418457],["▁Color",-12.791276931762695],["▁ბუ",-12.791278839111328],["▁linha",-12.791292190551758],["▁ତାହା",-12.791297912597656],["បង",-12.791298866271973],["▁लिख",-12.791301727294922],["▁باوجود",-12.791306495666504],["ვარი",-12.791352272033691],["▁Περι",-12.79136848449707],["▁جامعة",-12.791397094726562],["▁ಗೋ",-12.791406631469728],["▁Apo",-12.791415214538574],["▁duce",-12.791486740112305],["を受け",-12.791497230529783],["สนาม",-12.79151439666748],["▁День",-12.791532516479492],["គេ",-12.79153537750244],["▁Arc",-12.791560173034668],["▁Russ",-12.791566848754885],["▁ಅಲ್ಲ",-12.79157543182373],["hver",-12.79169750213623],["▁kampa",-12.791714668273926],["▁sommar",-12.791725158691406],["▁tech",-12.79173755645752],["ұлы",-12.79174518585205],["▁القر",-12.79176902770996],["tena",-12.791788101196287],["צים",-12.79180145263672],["▁ዘንድ",-12.7918062210083],["OY",-12.791824340820312],["▁жылдан",-12.791831016540527],["▁berta",-12.79184341430664],["▁søke",-12.791865348815918],["▁lakás",-12.791866302490234],["▁یافت",-12.791868209838867],["▁إي",-12.791878700256348],["ేందుకు",-12.791892051696776],["▁Fem",-12.791927337646484],["▁зависимости",-12.791929244995115],["▁පො",-12.791933059692385],["▁Аль",-12.7919340133667],["▁stră",-12.791969299316406],["취",-12.792001724243164],["▁२१",-12.79201602935791],["маған",-12.792040824890137],["▁persen",-12.792071342468262],["▁Kl",-12.792092323303224],["кому",-12.792118072509766],["寝",-12.792118072509766],["مەن",-12.792123794555664],["όμαστε",-12.792129516601562],["▁kualitas",-12.792129516601562],["▁πρόγραμμα",-12.792131423950195],["▁liet",-12.792137145996094],["järjestelmä",-12.792147636413574],["▁הזכויות",-12.79216194152832],["▁Его",-12.792166709899902],["▁distintos",-12.792189598083496],["▁económica",-12.792203903198242],["▁Govern",-12.792232513427734],["მენტ",-12.792258262634276],["нк",-12.792296409606934],["▁tarvi",-12.792298316955566],["izada",-12.792304039001465],["▁사람이",-12.792318344116213],["▁erittäin",-12.792340278625488],["▁võiks",-12.792365074157717],["▁Hain",-12.79239273071289],["▁minum",-12.79242706298828],["▁Ву",-12.792428970336914],["fanyika",-12.792445182800291],["▁kedy",-12.79245948791504],["▁dzīves",-12.792505264282228],["▁məlumata",-12.792587280273438],["▁samym",-12.792587280273438],["▁início",-12.792624473571776],["▁propaganda",-12.792646408081056],["▁Роман",-12.79267406463623],["20%",-12.79269027709961],["cë",-12.792691230773926],["▁Преди",-12.792731285095217],["ाउँदै",-12.79277515411377],["▁moli",-12.792780876159668],["治理",-12.79278564453125],["▁자신",-12.792832374572754],["▁1,6",-12.792841911315918],["▁museo",-12.792877197265623],["ยังไม่",-12.792880058288574],["▁czeka",-12.792890548706056],["▁приятел",-12.792895317077637],["宅",-12.792925834655762],["▁Նոր",-12.792928695678713],["െന്ന",-12.79294204711914],["榜",-12.792969703674316],["▁finger",-12.792994499206545],["閉",-12.793006896972656],["ワ",-12.79301929473877],["Ľ",-12.793045997619627],["▁մարդկանց",-12.793045997619627],["▁Cen",-12.793051719665527],["▁stanowi",-12.793071746826172],["▁vaccin",-12.79308032989502],["ထုတ္",-12.793096542358398],["▁អាមេរិក",-12.79310131072998],["▁thăm",-12.793108940124512],["wijk",-12.793123245239258],["▁ngon",-12.793129920959473],["▁Leh",-12.793134689331056],["▁contrato",-12.793150901794434],["AJA",-12.79315185546875],["ूर",-12.79315185546875],["▁निकल",-12.79315185546875],["▁tội",-12.79316234588623],["ിരിക്കുന്ന",-12.793169975280762],["tyä",-12.793180465698242],["▁خودش",-12.793224334716797],["വരെ",-12.793264389038086],["ാറ",-12.79326629638672],["▁scrie",-12.793278694152832],["▁increase",-12.79328727722168],["▁şəhərində",-12.793293952941896],["Из",-12.79331398010254],["صنع",-12.79332447052002],["▁סרט",-12.79332447052002],["▁ขนาด",-12.793340682983398],["▁citit",-12.79338264465332],["พวกเขา",-12.7933931350708],["▁நாம்",-12.79342269897461],["▁VİDEO",-12.793476104736328],["▁nia",-12.79348087310791],["îk",-12.793481826782228],["▁የት",-12.793481826782228],["ەم",-12.793492317199709],["▁ehti",-12.79349422454834],["íd",-12.793505668640137],["▁נאר",-12.7935209274292],["▁бэ",-12.79354763031006],["▁κου",-12.793554306030272],["▁אצל",-12.79356288909912],["▁144",-12.793580055236816],["цији",-12.793582916259766],["ikku",-12.793596267700195],["няў",-12.79360008239746],["▁woning",-12.793607711791992],["รถยนต์",-12.793608665466309],["jau",-12.793620109558104],["▁perform",-12.793621063232422],["▁regione",-12.793634414672852],["хра",-12.793657302856444],["ише",-12.79369068145752],["▁senare",-12.793764114379885],["▁उन",-12.793770790100098],["▁әлем",-12.793800354003906],["▁sof",-12.793822288513184],["▁solutions",-12.793824195861816],["▁hjälpa",-12.793856620788574],["jl",-12.793907165527344],["▁الحرب",-12.793911933898926],["▁रख",-12.793923377990724],["赫",-12.79392433166504],["ikk",-12.793951034545898],["▁краін",-12.793951988220217],["▁تاکید",-12.793963432312012],["▁податоци",-12.79396629333496],["rmasjonskapsler",-12.793968200683594],["▁Nguyên",-12.793968200683594],["▁knowledge",-12.793970108032228],["▁месту",-12.793993949890137],["画像",-12.79399585723877],["▁Mheshimiwa",-12.793999671936035],["災",-12.794068336486816],["▁Agro",-12.79407024383545],["▁циљ",-12.79407024383545],["▁Lap",-12.79407787322998],["хад",-12.79408359527588],["▁kesi",-12.794090270996094],["▁נח",-12.794096946716309],["▁گذر",-12.794132232666016],["▁consolida",-12.794156074523926],["-26",-12.794218063354492],["வாக",-12.79422664642334],["▁1800",-12.794289588928224],["▁yani",-12.794305801391602],["ające",-12.794315338134766],["apkan",-12.794319152832031],["▁یہی",-12.794358253479004],["Chi",-12.794363021850586],["▁منتخب",-12.794366836547852],["უშ",-12.794374465942385],["أس",-12.794395446777344],["▁IND",-12.794408798217772],["▁объем",-12.794450759887695],["▁AKP",-12.794495582580566],["▁maksud",-12.794498443603516],["▁bize",-12.794511795043944],["ത്തും",-12.794562339782717],["▁verse",-12.794572830200195],["▁durumda",-12.79460620880127],["▁کریم",-12.794713020324709],["▁tuy",-12.794737815856934],["▁عرضه",-12.79475212097168],["loni",-12.794764518737791],["▁kering",-12.794779777526855],["▁pend",-12.79480266571045],["▁pama",-12.79481315612793],["azioa",-12.794821739196776],["مې",-12.79482364654541],["▁mud",-12.794837951660156],["意大利",-12.79485034942627],["▁आपका",-12.794857025146484],["รส",-12.794865608215332],["▁While",-12.794885635375977],["▁علت",-12.794949531555176],["▁ചേര്",-12.794949531555176],["▁edərək",-12.794950485229492],["▁vond",-12.794950485229492],["▁هستید",-12.794952392578123],["▁кораб",-12.794963836669922],["▁روپے",-12.794981956481934],["▁започва",-12.79499340057373],["▁முடி",-12.795007705688477],["▁piace",-12.79502010345459],["스터",-12.795023918151855],["▁الجو",-12.795051574707031],["▁зал",-12.795065879821776],["zle",-12.795109748840332],["irano",-12.795123100280762],["▁versione",-12.795123100280762],[")(",-12.79517650604248],["▁analyse",-12.79517936706543],["kalo",-12.795207977294922],["▁snabbt",-12.795212745666504],["àna",-12.7952241897583],["kn",-12.79527759552002],["قدم",-12.795289993286133],["▁dite",-12.795300483703612],["▁kutumia",-12.79533863067627],["▁familien",-12.795372009277344],["▁χωρ",-12.795439720153809],["illes",-12.79549503326416],["▁خواست",-12.795525550842283],["▁ഓരോ",-12.795550346374512],["▁തര",-12.795560836791992],["اتهم",-12.795648574829102],["סה",-12.79567050933838],["▁Wu",-12.795700073242188],["▁undervisning",-12.79570198059082],["ीचा",-12.795732498168944],["staviti",-12.795736312866213],["VEL",-12.795738220214844],["▁Themen",-12.7957763671875],["▁așa",-12.795798301696776],["▁նույն",-12.795799255371094],["▁વચ્ચે",-12.79580020904541],["▁Entscheidung",-12.795801162719728],["▁razmišlja",-12.795801162719728],["▁त्यात",-12.795809745788574],["角度",-12.795852661132812],["▁deputat",-12.795879364013672],["▁государства",-12.79588794708252],["▁רוב",-12.795892715454102],["▁ಫೋನ್",-12.795912742614746],["▁complica",-12.795938491821287],["▁траг",-12.795947074890137],["▁отговор",-12.795949935913086],["▁مسلمانوں",-12.795949935913086],["ange",-12.795963287353516],["ೀಯ",-12.795991897583008],["▁võimalus",-12.796016693115234],["▁açıq",-12.796024322509766],["ຜ",-12.796029090881348],["▁арм",-12.79603099822998],["▁nesten",-12.796038627624512],["▁кожата",-12.796049118041992],["qqa",-12.79605197906494],["▁verdiyi",-12.796096801757812],["出的",-12.796113014221191],["▁halten",-12.796119689941406],["႐",-12.796124458312988],["▁beeld",-12.79612636566162],["jmy",-12.796159744262695],["adaha",-12.796164512634276],["言い",-12.796165466308594],["ियन",-12.796175956726074],["▁queremos",-12.796256065368652],["▁aile",-12.79626750946045],["acijos",-12.796276092529297],["toimi",-12.796276092529297],["stånd",-12.79628562927246],["也就",-12.79629135131836],["ouse",-12.796343803405762],["▁guys",-12.796358108520508],["▁පසුව",-12.79637622833252],["▁Šī",-12.796393394470217],["ั",-12.796401023864746],["URU",-12.79641342163086],["▁wpływ",-12.796422004699709],["hẳng",-12.796433448791504],["▁electoral",-12.796441078186035],["paik",-12.796542167663574],["▁запа",-12.796554565429688],["▁lum",-12.796555519104004],["▁dianggap",-12.796573638916016],["ньо",-12.796575546264648],["▁165",-12.79658031463623],["▁نس",-12.796606063842772],["ត់",-12.796640396118164],["霸",-12.79666519165039],["▁հաստատ",-12.79666805267334],["怎样",-12.796685218811035],["ambito",-12.796686172485352],["▁Хан",-12.796690940856934],["捕",-12.796696662902832],["๕",-12.79671859741211],["▁محسوس",-12.796722412109377],["▁කළේ",-12.796758651733398],["вайте",-12.79677963256836],["▁متحدہ",-12.79677963256836],["▁kasus",-12.796788215637209],["▁homme",-12.796797752380373],["担心",-12.79680347442627],["▁interval",-12.796834945678713],["▁approach",-12.79685401916504],["ებისთვის",-12.796862602233888],["▁designer",-12.796876907348633],["▁कार्यक्रममा",-12.796880722045898],["هان",-12.796882629394531],["▁uzman",-12.796918869018556],["▁official",-12.796963691711426],["▁magistr",-12.797022819519045],["قال",-12.797030448913574],["▁auta",-12.797030448913574],["的孩子",-12.79703426361084],["ėmė",-12.797060012817385],["ljiv",-12.797069549560549],["▁kiva",-12.797110557556152],["▁known",-12.797120094299316],["바이",-12.797133445739746],["▁tiedä",-12.797183990478516],["▁repas",-12.79720973968506],["иль",-12.797219276428224],["▁elemento",-12.79723072052002],["▁лаборатор",-12.797231674194336],["▁Pari",-12.797245025634766],["▁brun",-12.797271728515623],["ওয়া",-12.797287940979004],["issement",-12.79729175567627],["▁qrup",-12.797320365905762],["▁täy",-12.797327995300291],["hehe",-12.79733180999756],["τηκε",-12.79733943939209],["▁thank",-12.797354698181152],["尤其",-12.797405242919922],["liet",-12.797419548034668],["▁europea",-12.79742431640625],["▁Technik",-12.797442436218262],["▁Nye",-12.797459602355955],["▁країни",-12.79746437072754],["▁मन्त्रालय",-12.797500610351562],["▁DES",-12.797527313232422],["lassen",-12.797544479370115],["▁presyo",-12.7975492477417],["ੰਗ",-12.797553062438965],["แอ",-12.79758358001709],["▁കവിത",-12.797597885131836],["▁rağmen",-12.79764175415039],["▁каждого",-12.79764175415039],["▁शामिल",-12.797645568847656],["▁چھوڑ",-12.797646522521973],["▁сфере",-12.797647476196287],["交換",-12.797648429870604],["▁ใช้",-12.797652244567873],["▁sahəsində",-12.797663688659668],["tso",-12.797664642333984],["▁qëllim",-12.797684669494627],["▁купува",-12.797704696655272],["බ්",-12.797706604003906],["▁pribadi",-12.79776096343994],["▁треть",-12.79776096343994],["▁apartament",-12.797762870788574],["修正",-12.797773361206056],["▁එකතු",-12.797778129577637],["▁André",-12.7977933883667],["ほとんど",-12.79788589477539],["ിരിക്ക",-12.797886848449709],["▁Pulau",-12.797901153564451],["▁କାମ",-12.797927856445312],["▁sendt",-12.797945022583008],["▁prest",-12.797967910766602],["లం",-12.79798412322998],["▁አብ",-12.797986030578612],["▁doc",-12.79798698425293],["▁proste",-12.798022270202637],["▁kredyt",-12.7980318069458],["üle",-12.798070907592772],["group",-12.798157691955566],["konferenc",-12.79816722869873],["ената",-12.798210144042969],["▁Muito",-12.798230171203612],["▁seen",-12.798250198364258],["▁Prac",-12.798290252685549],["マン",-12.798293113708496],["▁tråd",-12.79830551147461],["▁kailangan",-12.79831314086914],["кові",-12.798314094543455],["اقل",-12.798319816589355],["▁మొ",-12.798333168029783],["▁secund",-12.79833984375],["biya",-12.79837703704834],["klub",-12.798392295837402],["沈",-12.798404693603516],["▁ବୁ",-12.79841136932373],["▁לנ",-12.798418045043944],["preci",-12.798443794250488],["ურად",-12.798453330993652],["спан",-12.798460006713867],["uomo",-12.798519134521484],["moz",-12.798544883728027],["奧",-12.798555374145508],["׳",-12.79856300354004],["▁giugno",-12.79856300354004],["▁perspectiva",-12.79856300354004],["▁powierzchni",-12.79856300354004],["▁طبیعی",-12.798563957214355],["თო",-12.79857063293457],["同學",-12.798572540283203],["▁מערכת",-12.798577308654783],["▁Select",-12.798583984375],["歯",-12.798591613769531],["▁Apabila",-12.79859447479248],["ดร",-12.798609733581545],["▁स्वागत",-12.798613548278809],["ただ",-12.798614501953123],["rial",-12.79863452911377],["▁Emma",-12.798664093017578],["▁برو",-12.798666954040527],["arri",-12.798707962036133],["正確",-12.79872703552246],["▁случаях",-12.79875373840332],["овање",-12.79877471923828],["▁Жар",-12.79879665374756],["▁dys",-12.798819541931152],["ồn",-12.798821449279783],["▁insist",-12.798830032348633],["很有",-12.798833847045898],["▁합",-12.798835754394531],["ается",-12.798839569091797],["ച്ചി",-12.798848152160645],["utilizzo",-12.798851013183594],["ungu",-12.79885482788086],["क्षा",-12.79887580871582],["jalan",-12.7988920211792],["▁Іван",-12.798905372619627],["▁unsur",-12.798954010009766],["▁tidlig",-12.798967361450195],["lti",-12.798972129821776],["▁ചു",-12.798979759216309],["ے۔",-12.798994064331056],["▁Rome",-12.799005508422852],["▁flytte",-12.799039840698242],["मेंट",-12.79905605316162],["Tak",-12.799112319946287],["表面",-12.799132347106934],["▁näi",-12.799139976501465],["頻",-12.799153327941896],["gebied",-12.799166679382324],["hh",-12.799181938171388],["▁Edhe",-12.799201011657717],["▁Rein",-12.799250602722168],["nieki",-12.799251556396484],["▁kateri",-12.799281120300291],["γραφή",-12.79933261871338],["▁идеи",-12.799333572387695],["tangaza",-12.799349784851074],["ωτ",-12.799373626708984],["▁бод",-12.799378395080566],["▁Volkswagen",-12.799389839172363],["ხმარ",-12.799391746520996],["DN",-12.799408912658691],["设施",-12.799430847167969],["▁mærke",-12.799440383911133],["▁ຕ",-12.799456596374512],["EH",-12.799463272094728],["ประชาชน",-12.79948616027832],["▁તમારી",-12.799487113952637],["▁קענען",-12.79949188232422],["▁الكتاب",-12.799500465393066],["▁síðan",-12.799501419067385],["▁Dezember",-12.799506187438965],["▁месяцев",-12.799513816833496],["▁maeneo",-12.799520492553713],["lone",-12.799528121948242],["വാദ",-12.799546241760254],["▁نقصان",-12.799551963806152],["trzyma",-12.799555778503418],["▁közép",-12.799567222595217],["ნელ",-12.799577713012695],["▁reeds",-12.799590110778809],["کې",-12.799609184265137],["협",-12.799609184265137],["lija",-12.799612998962402],["▁prezenta",-12.799615859985352],["▁formul",-12.799660682678224],["▁Komisi",-12.799676895141602],["eyên",-12.79968547821045],["▁елемент",-12.79969310760498],["▁Пет",-12.79970932006836],["ाले",-12.799723625183104],["दू",-12.799742698669434],["▁percorso",-12.799745559692385],["▁beta",-12.799750328063965],["haz",-12.79976749420166],["▁счита",-12.799813270568848],["▁होणार",-12.799818992614746],["ivan",-12.799819946289062],["ရက္",-12.799832344055176],["ାୟ",-12.799859046936035],["எஸ்",-12.79987907409668],["mpion",-12.799882888793944],["▁Syria",-12.799912452697754],["▁longer",-12.799924850463867],["▁servicii",-12.799924850463867],["СЕ",-12.799933433532717],["horn",-12.799941062927246],["richten",-12.799969673156738],["kela",-12.799979209899902],["▁Automat",-12.799986839294434],["AY",-12.800077438354492],["ị",-12.800087928771973],["BRE",-12.800105094909668],["מען",-12.800107955932615],["Qa",-12.800150871276855],["料金",-12.800152778625488],["▁ಸ್ವ",-12.800156593322754],["format",-12.800158500671388],["▁saison",-12.800162315368652],["سے",-12.800165176391602],["▁Mili",-12.800166130065918],["▁ketiga",-12.800193786621094],["▁Нав",-12.80019760131836],["vela",-12.800198554992676],["model",-12.80021858215332],["לון",-12.8002290725708],["τήριο",-12.800240516662598],["асць",-12.800243377685549],["들과",-12.800277709960938],["reza",-12.80029582977295],["▁daftar",-12.800300598144531],["вого",-12.80031394958496],["▁ular",-12.800328254699709],["始终",-12.800336837768556],["oder",-12.800341606140137],["喊",-12.800354957580566],["――",-12.800374031066896],["▁privire",-12.80038070678711],["▁1.6",-12.800381660461426],["нама",-12.800399780273438],["훈",-12.800406455993652],["▁richiesta",-12.800409317016602],["▁отношении",-12.800409317016602],["▁ճանապարհ",-12.800409317016602],["หาก",-12.800418853759766],["▁કરતા",-12.800427436828612],["АЗ",-12.80042839050293],["▁शाह",-12.80044937133789],["▁predstavi",-12.800455093383787],["圖片",-12.800457954406738],["▁araya",-12.800461769104004],["tatu",-12.800472259521484],["▁ekkert",-12.800493240356444],["▁माध्यम",-12.80050277709961],["▁келеді",-12.800504684448242],["▁وہاں",-12.800535202026367],["zicht",-12.800585746765137],["▁Tamén",-12.80059814453125],["ጣት",-12.800620079040527],["▁دقیقه",-12.800652503967283],["▁herî",-12.80066967010498],["▁pitanje",-12.800673484802246],["▁noci",-12.800687789916992],["ạch",-12.800701141357422],["▁droga",-12.800742149353027],["psy",-12.800752639770508],["▁takes",-12.80075454711914],["▁έχω",-12.800788879394531],["構",-12.80080509185791],["▁EF",-12.800813674926758],["eiden",-12.800825119018556],["pica",-12.800830841064451],["kið",-12.80086612701416],["шна",-12.800877571105955],["▁фіз",-12.800896644592283],["koni",-12.800908088684082],["▁Hause",-12.800909042358398],["ვლა",-12.800949096679688],["▁dieťa",-12.800957679748535],["paus",-12.801002502441406],["ంగ",-12.80104160308838],["▁composto",-12.801060676574709],["▁Наша",-12.8010835647583],["▁разі",-12.801098823547363],["▁osale",-12.801116943359377],["▁kuli",-12.801129341125488],["andro",-12.801156044006348],["性格",-12.801159858703612],["زع",-12.801166534423828],["▁neb",-12.801176071166992],["▁mette",-12.801203727722168],["▁pemba",-12.801227569580078],["范",-12.801234245300291],["lusi",-12.80123519897461],["▁라",-12.80126667022705],["▁fjöl",-12.801267623901367],["国务院",-12.801280975341797],["лян",-12.801284790039062],["▁ٻي",-12.801287651062012],["......”",-12.80129051208496],["▁Csak",-12.801307678222656],["阴",-12.801307678222656],["จําหน่าย",-12.801331520080566],["▁विज्ञापन",-12.801332473754885],["ИЧ",-12.8013334274292],["▁namreč",-12.8013334274292],["▁અથવા",-12.8013334274292],["▁beginnen",-12.801335334777832],["▁inoltre",-12.801337242126465],["▁Իսկ",-12.801340103149414],["▁ഇതു",-12.801350593566896],["lief",-12.80135440826416],["▁اشتراک",-12.80135440826416],["ုတ္",-12.801380157470703],["▁ročník",-12.801405906677246],["▁Legal",-12.801419258117676],["ລັດ",-12.80142879486084],["提示",-12.801445960998535],["iči",-12.801475524902344],["▁تقوم",-12.801535606384276],["よい",-12.801551818847656],["▁časť",-12.801563262939451],["▁החיים",-12.801573753356934],["▁பின்",-12.801581382751465],["▁Каб",-12.801593780517578],["बन्ध",-12.801609992980955],["▁агент",-12.801610946655272],["▁njerëz",-12.801640510559082],["▁Eric",-12.801650047302246],["laisia",-12.801660537719728],["fam",-12.801761627197266],["▁devant",-12.801775932312012],["▁седмица",-12.80178165435791],["▁Tay",-12.80178451538086],["▁smrti",-12.80178451538086],["▁możesz",-12.801785469055176],["▁anlat",-12.80178928375244],["zeug",-12.801794052124023],["▁upravo",-12.801807403564451],["▁elegante",-12.80180835723877],["տար",-12.801846504211426],["▁평",-12.801856994628906],["▁jurnal",-12.80186367034912],["▁kafa",-12.801868438720703],["ଗଲା",-12.801924705505373],["▁pozwala",-12.801937103271484],["vero",-12.801949501037598],["▁Ahli",-12.801966667175291],["visor",-12.802002906799316],["▁برتر",-12.802008628845217],["▁(21)",-12.802027702331545],["▁လုပ်",-12.802051544189451],["▁односи",-12.80205249786377],["▁ទឹក",-12.802071571350098],["▁Bratislava",-12.80207633972168],["▁Vier",-12.802119255065918],["▁تقریبا",-12.80213451385498],["▁행사",-12.802173614501951],["▁cuanto",-12.802190780639648],["▁Farm",-12.80221939086914],["▁Mba",-12.802233695983888],["▁целом",-12.802233695983888],["贈",-12.802236557006836],["chodzi",-12.802238464355469],["▁tambahan",-12.80224323272705],["ภาษา",-12.802248001098633],["▁shot",-12.802252769470217],["▁շուրջ",-12.802258491516112],["▁స్టార్",-12.80225944519043],["▁уровня",-12.802260398864746],["▁miljoen",-12.802264213562012],["▁dinheiro",-12.802268981933594],["▁kb",-12.80227279663086],["▁اعمال",-12.802282333374023],["▁popraw",-12.802294731140137],["▁roja",-12.802294731140137],["पू",-12.802352905273438],["▁xúc",-12.80235767364502],["▁בשנת",-12.802358627319336],["▁biedt",-12.802359580993652],["toe",-12.80237865447998],["▁שתי",-12.802431106567385],["▁ಹಿ",-12.802474975585938],["كەن",-12.802484512329102],["▁ride",-12.802619934082031],["ไว",-12.80262565612793],["Петербург",-12.802638053894045],["▁qilgan",-12.80264663696289],["▁ਇੰ",-12.8026704788208],["fei",-12.802682876586914],["eknek",-12.80268383026123],["ીએ",-12.802691459655762],["▁සමහර",-12.80270767211914],["мақ",-12.80271339416504],["ുകയാണ്",-12.802715301513672],["▁Llan",-12.802726745605469],["▁bë",-12.802776336669922],["▁horiek",-12.802806854248049],["▁Ecco",-12.80280876159668],["rób",-12.802824020385742],["setzen",-12.802886009216309],["▁själva",-12.802898406982422],["▁}",-12.802900314331056],["▁savi",-12.802924156188965],["▁វ",-12.802949905395508],["▁תוך",-12.802952766418455],["szak",-12.802961349487305],["ái",-12.802968978881836],["alat",-12.802984237670898],["▁культури",-12.802986145019531],["رە",-12.80306339263916],["孩子的",-12.803070068359377],["বর্",-12.80307960510254],["ராக",-12.80314826965332],["餅",-12.80315399169922],["▁spiritu",-12.803167343139648],["ျဖင့္",-12.803183555603027],["▁Glück",-12.803184509277344],["▁ذریعے",-12.803184509277344],["▁później",-12.80318546295166],["▁Eğer",-12.803186416625977],["▁keresztül",-12.80319595336914],["▁propter",-12.80319595336914],["▁tullut",-12.803199768066406],["▁patik",-12.803200721740724],["▁боломж",-12.803216934204102],["▁государственной",-12.80325412750244],["تب",-12.803263664245604],["▁napred",-12.803289413452148],["χρον",-12.803296089172363],["▁elämän",-12.80331039428711],["ขาด",-12.80333137512207],["勞",-12.803336143493652],["▁rodzaju",-12.803337097167969],["▁individ",-12.803363800048828],["▁partire",-12.803378105163574],["▁chỉnh",-12.803380966186523],["VU",-12.803411483764648],["▁ලියන්න",-12.803436279296877],["▁gora",-12.803457260131836],["ока",-12.803471565246582],["βι",-12.80348777770996],["▁Dead",-12.803497314453123],["īd",-12.80352020263672],["회의",-12.80352783203125],["而不是",-12.80354118347168],["▁Produ",-12.803550720214844],["▁decision",-12.803566932678224],["тъ",-12.803592681884766],["−",-12.80360507965088],["▁الملك",-12.80360507965088],["ጆ",-12.803635597229004],["▁amser",-12.803647994995115],["▁אלע",-12.80365753173828],["רשת",-12.803692817687988],["▁hie",-12.80370044708252],["iyoruz",-12.803741455078123],["▁Mà",-12.803756713867188],["ത്തോടെ",-12.80375862121582],["laku",-12.803810119628906],["▁brei",-12.803824424743652],["描",-12.8038911819458],["下载",-12.803898811340332],["家具",-12.80392360687256],["▁prona",-12.803939819335938],["▁Сега",-12.803943634033203],["мил",-12.803961753845217],["一生",-12.804019927978516],["▁Duke",-12.80405044555664],["espera",-12.804075241088867],["FO",-12.804085731506348],["▁לכ",-12.804086685180664],["▁тарихы",-12.804094314575195],["▁ਦਿੱਤੀ",-12.80411148071289],["▁регистрации",-12.804118156433104],["▁жөніндегі",-12.804119110107422],["istisk",-12.80412483215332],["▁పూర్తి",-12.80412769317627],["▁meidän",-12.804128646850586],["▁එහෙත්",-12.804131507873535],["ေသး",-12.80415153503418],["▁យ",-12.804154396057127],["営業",-12.804207801818848],["опа",-12.804217338562012],["กล่าว",-12.804221153259276],["ेस",-12.804224967956545],["ረዳ",-12.804226875305176],["▁рамките",-12.8042631149292],["▁Xem",-12.804283142089844],["bral",-12.804308891296388],["фо",-12.80434226989746],["OUT",-12.804365158081056],["▁دانست",-12.804396629333496],["ाद",-12.804407119750977],["สั่งซื้อ",-12.80445957183838],["MR",-12.804461479187012],["▁beszél",-12.804465293884276],["▁habang",-12.80447483062744],["▁شول",-12.80447483062744],["▁الكريم",-12.80452823638916],["▁piemēram",-12.804553985595703],["▁oben",-12.804580688476562],["▁ontvang",-12.80459976196289],["▁işin",-12.804601669311523],["UNA",-12.804614067077637],["sare",-12.804645538330078],["สิ่งที่",-12.80466651916504],["▁Бр",-12.804693222045898],["▁кажу",-12.804695129394531],["цел",-12.804696083068848],["少年",-12.804696083068848],["ised",-12.804706573486328],["▁kurum",-12.804719924926758],["▁makin",-12.80474090576172],["▁deine",-12.80479621887207],["กอง",-12.804827690124512],["▁acumula",-12.804831504821776],["eceği",-12.804849624633787],["▁crisis",-12.804880142211914],["▁hned",-12.804890632629396],["šim",-12.804899215698242],["સે",-12.80490493774414],["ahu",-12.80491542816162],["plas",-12.804916381835938],["▁ગો",-12.804923057556152],["▁jne",-12.804925918579102],["▁Demokrat",-12.80496597290039],["ωσ",-12.804990768432615],["ቼ",-12.80500316619873],["藤",-12.80500602722168],["шын",-12.805031776428224],["▁mkutano",-12.805038452148438],["▁ਦਿੱਲੀ",-12.805038452148438],["▁مجبور",-12.805044174194336],["▁nhạc",-12.805046081542969],["▁piše",-12.805062294006348],["▁ərazi",-12.805093765258787],["ulás",-12.805098533630373],["▁lunas",-12.8051118850708],["▁Xiaomi",-12.805129051208496],["▁могуць",-12.80514430999756],["ລີ",-12.80514907836914],["▁orgán",-12.805157661437988],["▁Алекс",-12.805163383483888],["▁ಮುಖ್ಯ",-12.805173873901367],["маў",-12.805187225341797],["AHA",-12.805198669433594],["onal",-12.805205345153809],["દાર",-12.805214881896973],["▁קול",-12.805227279663086],["▁saç",-12.805234909057615],["▁прост",-12.80527400970459],["▁tron",-12.80531883239746],["▁Taman",-12.805355072021484],["▁FAQ",-12.805367469787598],["使用者",-12.80539321899414],["▁dipun",-12.805438041687012],["人類",-12.805448532104492],["الية",-12.805459976196287],["Õ",-12.805464744567873],["▁diagnostic",-12.805465698242188],["▁ಹೇಗೆ",-12.80548858642578],["ແຂວງ",-12.805489540100098],["თხო",-12.805529594421388],["▁нашим",-12.80556297302246],["▁namo",-12.805575370788574],["▁Ioan",-12.805593490600586],["Lİ",-12.805603981018066],["▁heer",-12.805612564086914],["▁virš",-12.805635452270508],["▁learning",-12.805659294128418],["luc",-12.805665969848633],["▁specifik",-12.80568790435791],["▁rie",-12.805706977844238],["▁realizat",-12.80572509765625],["dži",-12.805764198303224],["роп",-12.80579662322998],["odpowiedni",-12.805808067321776],["▁statisti",-12.805871963500977],["▁hüquqları",-12.805902481079102],["ckých",-12.805903434753418],["▁орд",-12.805931091308594],["แสง",-12.8059663772583],["▁경험",-12.805971145629885],["▁ஒன்று",-12.805973052978516],["▁Phương",-12.80597686767578],["▁мотор",-12.806001663208008],["anima",-12.806004524230955],["вацца",-12.806015968322754],["ILE",-12.806022644042969],["▁Mkurugenzi",-12.80602741241455],["▁spirit",-12.806036949157717],["كۈ",-12.806049346923828],["▁Neem",-12.80605697631836],["ismi",-12.80606746673584],["▁odluči",-12.806084632873535],["vky",-12.8060884475708],["જે",-12.806107521057127],["▁történet",-12.806161880493164],["▁お",-12.806181907653809],["hung",-12.80618381500244],["▁Bunu",-12.806193351745604],["լե",-12.8062162399292],["▁swe",-12.806220054626465],["▁mentor",-12.80622386932373],["▁അവിടെ",-12.806278228759766],["▁ngữ",-12.806306838989258],["▁nampak",-12.806324005126951],["▁musikk",-12.806325912475586],["▁ဗ",-12.806329727172852],["▁elimin",-12.806365966796877],["ລົງ",-12.80638313293457],["ូត",-12.806398391723633],["ान्त",-12.80639934539795],["▁primjer",-12.80644416809082],["▁Ден",-12.8064546585083],["▁chiama",-12.80648136138916],["▁దగ్గర",-12.806493759155272],["ident",-12.806496620178224],["▁veteran",-12.806538581848145],["сип",-12.806541442871094],["▁prezi",-12.806550979614258],["▁febrer",-12.80655288696289],["лно",-12.806571006774902],["▁fanno",-12.806577682495115],["meister",-12.806602478027344],["▁MAT",-12.80661392211914],["банк",-12.806615829467772],["ивать",-12.806618690490724],["▁bestaan",-12.806622505187988],["▁Máy",-12.806640625],["takuwa",-12.806641578674316],["່ງ",-12.806678771972656],["WS",-12.806684494018556],["▁ajung",-12.806705474853516],["▁තිබෙනවා",-12.806711196899414],["ยุ",-12.806724548339844],["▁hív",-12.806747436523438],["▁omogoča",-12.806747436523438],["puu",-12.806770324707031],["▁zimanê",-12.806824684143066],["▁Notre",-12.806832313537598],["grav",-12.806836128234863],["▁ധ",-12.806839942932127],["▁pojav",-12.806841850280762],["ज्ञा",-12.806867599487305],["ଞ୍ଜ",-12.806892395019531],["▁нөхцөл",-12.806896209716797],["▁ڳوٺ",-12.806896209716797],["▁ਤੁਸੀਂ",-12.806896209716797],["明星",-12.806899070739746],["▁увагу",-12.80690097808838],["▁кръв",-12.806903839111328],["▁태그",-12.80690574645996],["ites",-12.80691623687744],["▁martes",-12.80692195892334],["жити",-12.806939125061035],["보험",-12.806939125061035],["▁Тр",-12.806941032409668],["▁jina",-12.80695629119873],["▁काफी",-12.806958198547363],["laza",-12.806970596313477],["▁cantidad",-12.806983947753906],["▁zeggen",-12.80699062347412],["▁اوهان",-12.807035446166992],["▁Также",-12.807065963745115],["▁vsako",-12.807066917419434],["▁مايو",-12.807074546813965],["▁głos",-12.80707836151123],["▁kaupa",-12.807085990905762],["▁kẻ",-12.80709457397461],["чей",-12.807104110717772],["▁기능",-12.807156562805176],["يدة",-12.80716037750244],["▁anonym",-12.8071928024292],["▁Saa",-12.807196617126465],["▁sax",-12.80722999572754],["eso",-12.807238578796388],["ζα",-12.807239532470703],["писан",-12.807255744934082],["▁Dk",-12.80727195739746],["▁වුණා",-12.807280540466309],["▁Keh",-12.807291030883787],["▁sympa",-12.807296752929688],["szcza",-12.80733871459961],["▁führt",-12.807348251342772],[".8.",-12.807358741760254],["▁skolen",-12.807365417480469],["▁nowych",-12.807367324829102],["▁تف",-12.807369232177734],["lnik",-12.807382583618164],["registra",-12.807382583618164],["女生",-12.807413101196287],["▁Јавн",-12.807421684265137],["▁deixe",-12.807432174682615],["żył",-12.807491302490234],["▁मलाई",-12.807500839233398],["▁пояс",-12.807510375976562],["創意",-12.80751132965088],["iña",-12.807519912719728],["slagen",-12.80752944946289],["ىت",-12.80753231048584],["▁koskaan",-12.80770778656006],["植",-12.807708740234377],["▁samlet",-12.807727813720703],["CZ",-12.80773639678955],["游客",-12.807769775390623],["щения",-12.807778358459473],["▁жертв",-12.807785987854004],["xx",-12.80778694152832],["ທີ່ມີ",-12.807812690734863],["スタッフ",-12.807812690734863],["รางวัล",-12.807825088500977],["▁geschikt",-12.807830810546877],["▁bebe",-12.807849884033203],["▁टोली",-12.807853698730469],["▁informacje",-12.807876586914062],["▁objektiv",-12.807876586914062],["▁နှစ်",-12.807906150817873],["▁الأخيرة",-12.80795955657959],["▁මගින්",-12.807963371276855],["What",-12.807965278625488],["▁ටිකක්",-12.808005332946776],["▁izve",-12.80800724029541],["▁Jackson",-12.808011054992676],["▁Rady",-12.808013916015623],["▁Aut",-12.808022499084473],["▁tỏ",-12.808065414428713],["▁vold",-12.808090209960938],["▁Perdana",-12.808096885681152],["▁denomina",-12.808114051818848],["альних",-12.80811595916748],["▁latach",-12.80811595916748],["▁толку",-12.808120727539062],["▁benzin",-12.808127403259276],["▁mjini",-12.808173179626465],["▁Gure",-12.80817413330078],["▁qor",-12.80817413330078],["▁esku",-12.808177947998049],["ải",-12.808180809020996],["▁Sav",-12.808189392089844],["дэ",-12.808271408081056],["യിലേക്ക്",-12.808338165283203],["▁sf",-12.808378219604492],["чани",-12.808382034301758],["zta",-12.808394432067873],["▁kertoo",-12.80839729309082],["▁konuda",-12.808411598205566],["▁ହୋଇଥିବା",-12.808479309082031],["▁saad",-12.80848503112793],["wald",-12.808489799499512],["▁kisah",-12.80850887298584],["▁MK",-12.80854320526123],["▁технолог",-12.808585166931152],["hv",-12.808618545532228],["ολογία",-12.8086519241333],["▁Եր",-12.80866241455078],["▁දැ",-12.80866527557373],["▁comprend",-12.808710098266602],["պի",-12.80875015258789],["೩",-12.808757781982422],["▁Dağlıq",-12.808757781982422],["สมัย",-12.808758735656738],["▁עיצוב",-12.808758735656738],["▁මන්ත්",-12.808758735656738],["▁සමාගම",-12.808759689331056],["▁پہلی",-12.808767318725586],["▁jude",-12.8087739944458],["▁نمایید",-12.808777809143066],["мб",-12.80884075164795],["щото",-12.80886173248291],["▁цялата",-12.808874130249023],["▁UT",-12.80890655517578],["Eu",-12.808917999267578],["แก้ว",-12.80893611907959],["▁மணி",-12.808948516845703],["▁пайдалану",-12.808963775634766],["▁communi",-12.808967590332031],["-05-",-12.80897045135498],["▁گوش",-12.808979988098145],["妻",-12.809036254882812],["▁дії",-12.809040069580078],["▁يوه",-12.80906105041504],["▁længere",-12.809142112731934],["াম",-12.809154510498049],["▁improve",-12.809176445007324],["▁ეროვნული",-12.809198379516602],["▁elkar",-12.809206008911133],["▁ogranicz",-12.809211730957031],["▁Tao",-12.809237480163574],["▁ruim",-12.809314727783203],["▁сваю",-12.809319496154783],["▁نیم",-12.809364318847656],["▁يول",-12.80937671661377],["▁aige",-12.809381484985352],["▁Sunday",-12.809450149536133],["▁bağ",-12.809467315673828],["वून",-12.80951690673828],["gesetzt",-12.809523582458496],["Ų",-12.809526443481444],["▁simpati",-12.80953311920166],["▁fejl",-12.809537887573242],["▁oportunidad",-12.809541702270508],["▁normativa",-12.80955696105957],["▁jual",-12.80955982208252],["ပြင်",-12.80959129333496],["▁فرانس",-12.809629440307615],["▁سوالات",-12.809633255004885],["▁kontekst",-12.809642791748049],["▁маркетинг",-12.809647560119627],["messe",-12.809684753417969],["ດັ່ງກ່າວ",-12.809688568115234],["▁conhecimento",-12.80968952178955],["▁متاثر",-12.80969524383545],["▁등이",-12.80970287322998],["禁止",-12.809718132019045],["över",-12.809738159179688],["wezi",-12.809747695922852],["နက္",-12.809764862060549],["پى",-12.809813499450684],["▁lecz",-12.809836387634276],["▁(22",-12.80985164642334],["▁қой",-12.809852600097656],["▁računa",-12.809882164001465],["▁Wit",-12.809887886047363],["▁obci",-12.809900283813477],["▁songs",-12.809944152832031],["▁prawo",-12.809958457946776],["▁학교",-12.810001373291016],["▁ਵਾਲੀ",-12.810011863708496],["▁여자",-12.810013771057127],["▁Court",-12.810035705566406],["▁بحر",-12.810073852539062],["யர்",-12.81011199951172],["automat",-12.8101167678833],["ړ",-12.81011962890625],["▁olo",-12.810136795043944],["flo",-12.81014347076416],["▁crédito",-12.81016731262207],["יפו",-12.810185432434082],["▁trade",-12.810187339782717],["цията",-12.810205459594728],["tsin",-12.810209274291992],["вое",-12.810219764709473],["1998",-12.810242652893066],["คลิป",-12.810254096984863],["▁gravid",-12.81026554107666],["▁काढ",-12.810279846191406],["▁kinder",-12.810285568237305],["шће",-12.810335159301758],["▁Kya",-12.810354232788086],["▁dda",-12.810429573059082],["自分で",-12.81043815612793],["▁મુ",-12.810455322265623],["▁usted",-12.81046199798584],["міз",-12.810466766357422],["uld",-12.810490608215332],["▁야",-12.810492515563965],["personal",-12.810494422912598],["বাদ",-12.810556411743164],["▁општина",-12.810559272766112],["▁trafi",-12.810599327087402],["▁Hair",-12.810613632202148],["ばかり",-12.81061553955078],["▁රාජපක්ෂ",-12.810623168945312],["▁μηχαν",-12.810626983642578],["添加",-12.810649871826172],["เท้า",-12.81067943572998],["▁hadda",-12.810690879821776],["▁వంటి",-12.81069278717041],["▁எதிர்",-12.810731887817385],["خير",-12.810749053955078],["▁pedir",-12.810750007629396],["▁сондай",-12.81077003479004],["▁filozofi",-12.81081199645996],["▁пя",-12.810860633850098],["poko",-12.810873031616213],["ię",-12.810877799987791],["質問",-12.810911178588867],["ესი",-12.810941696166992],["▁preparar",-12.810956001281738],["▁ներկայաց",-12.810961723327637],["▁കുറ്റ",-12.810979843139648],["kkaan",-12.810997009277344],["▁Tarif",-12.810998916625977],["ຕາ",-12.811006546020508],["▁pourrait",-12.811040878295898],["▁Parlamento",-12.811049461364746],["ьным",-12.811060905456545],["▁хүр",-12.81108570098877],["лев",-12.811097145080566],["▁iż",-12.811100006103516],["▁लगाए",-12.81110668182373],["ումներ",-12.811135292053224],["▁criar",-12.811163902282717],["造型",-12.811230659484863],["pón",-12.811253547668455],["▁keuze",-12.811272621154783],["დია",-12.81130027770996],["▁Teatro",-12.81131649017334],["ก็จะ",-12.811331748962402],["▁ਕਾਰਨ",-12.8113431930542],["steen",-12.811370849609377],["tilgan",-12.81137752532959],["тол",-12.81138801574707],["▁mbali",-12.811393737792969],["▁орын",-12.811408996582031],["झा",-12.811418533325195],["▁kele",-12.811440467834473],["وٽ",-12.8114595413208],["檢",-12.811477661132812],["▁ствари",-12.811483383178713],["▁რ",-12.811485290527344],["▁munca",-12.81152057647705],["吳",-12.811532020568848],["▁अफ",-12.811545372009276],["▁ଟି",-12.811551094055176],["投稿",-12.811553001403809],["กีฬา",-12.81155490875244],["▁Москве",-12.811555862426758],["▁безпеки",-12.811555862426758],["▁বিভিন্ন",-12.811555862426758],["▁завдання",-12.811561584472656],["กรณี",-12.811565399169922],["▁Temmuz",-12.811566352844238],["▁rrezik",-12.81157112121582],["▁bryster",-12.811578750610352],["▁Κι",-12.811591148376465],["tach",-12.811606407165527],["▁taona",-12.811617851257324],["จํานวน",-12.811636924743652],["▁vía",-12.811667442321776],["цин",-12.811671257019045],["tök",-12.811692237854004],["इन",-12.8117036819458],["▁Fore",-12.811726570129396],["▁ceremoni",-12.811727523803713],["▁previ",-12.811731338500977],["▁المه",-12.811731338500977],["ự",-12.811753273010254],["ifikasi",-12.811755180358888],["▁sean",-12.811777114868164],["▁moviment",-12.811826705932615],["子供",-12.811830520629885],["løse",-12.811845779418944],["▁Many",-12.811854362487791],["▁değiş",-12.811859130859377],["発表",-12.811863899230955],["▁sahabat",-12.811874389648438],["личен",-12.811880111694336],["▁jatuh",-12.811932563781738],["▁затвор",-12.811949729919434],["▁Öl",-12.811966896057127],["▁bakit",-12.81202793121338],["▁scritto",-12.812047958374023],["▁aumentar",-12.812067031860352],["▁күт",-12.812078475952148],["ages",-12.812081336975098],["vku",-12.812082290649414],["bûna",-12.812089920043944],["▁rost",-12.812098503112791],["ਵਾਰ",-12.812103271484377],["そうな",-12.812121391296388],["ntia",-12.812172889709473],["عار",-12.812190055847168],["更有",-12.812199592590332],["▁právě",-12.812207221984863],["ນໍາ",-12.812228202819824],["▁kø",-12.81228733062744],["精彩",-12.81230640411377],["ימ",-12.81231689453125],["▁Naib",-12.81232452392578],["▁വീഡിയോ",-12.812337875366213],["部門",-12.812337875366213],["รั",-12.812347412109377],["grup",-12.81238079071045],["-10-",-12.812386512756348],["▁įvair",-12.81239891052246],["▁Bari",-12.812420845031738],["▁2000.",-12.812461853027344],["▁affect",-12.812490463256836],["▁Riz",-12.81250286102295],["▁baxay",-12.81251621246338],["gid",-12.812520027160645],["▁газет",-12.812528610229492],["ନୁ",-12.812542915344238],["▁सीमा",-12.812542915344238],["ԻՆ",-12.812552452087402],["päev",-12.812557220458984],["▁Нар",-12.812569618225098],["▁различных",-12.812570571899414],["stiti",-12.812583923339844],["ону",-12.812589645385742],["▁майже",-12.812591552734377],["▁verze",-12.81260871887207],["▁કારણ",-12.812625885009766],["▁vota",-12.812644958496094],["bawi",-12.812651634216309],["าน",-12.812674522399902],["သတ္",-12.812674522399902],["పడ",-12.812724113464355],["▁işləri",-12.812726974487305],["▁Nije",-12.812745094299316],["▁нове",-12.812751770019531],["時は",-12.81277084350586],["▁skabe",-12.812789916992188],["шей",-12.812790870666504],["▁autoridades",-12.812804222106934],["▁կողմ",-12.81281280517578],["▁משהו",-12.812838554382324],["▁úsáid",-12.812840461730955],["▁Și",-12.812865257263184],["▁Vend",-12.81287956237793],["▁เขต",-12.812881469726562],["▁náš",-12.812911033630373],["ුවේ",-12.812917709350586],["▁comanda",-12.812926292419434],["kár",-12.813000679016112],["▁mood",-12.813039779663086],["▁დედა",-12.813045501708984],["▁պարզ",-12.813048362731934],["▁العرب",-12.813098907470703],["▁طلا",-12.81312084197998],["LOR",-12.813121795654297],["ሚያ",-12.81313419342041],["HK",-12.813152313232422],["ဆု",-12.81316089630127],["rād",-12.813187599182127],["にとって",-12.813202857971191],["▁määri",-12.813214302062988],["shta",-12.813217163085938],["ירן",-12.813237190246582],["▁harta",-12.813275337219238],["▁дов",-12.813292503356934],["cci",-12.813295364379885],["ěř",-12.813319206237791],["sura",-12.813350677490234],["▁sperma",-12.813360214233398],["▁మరి",-12.813371658325195],["粒",-12.813386917114258],["▁examen",-12.81340217590332],["溪",-12.81340503692627],["▁രാജ",-12.8134126663208],["▁আটক",-12.813430786132812],["▁ልዩ",-12.813430786132812],["▁Ελλάδας",-12.813431739807127],["▁efect",-12.813446998596191],["jimo",-12.813447952270508],["▁окуу",-12.813508987426758],["%。",-12.813511848449709],["▁نئے",-12.813565254211426],["▁Justin",-12.8135986328125],["road",-12.813613891601562],["▁اٿ",-12.813620567321776],["▁ഇഷ്ട",-12.813665390014648],["▁lande",-12.813668251037598],["▁farma",-12.81368923187256],["рев",-12.813698768615724],["khi",-12.81369972229004],["▁läpi",-12.813716888427734],["▁Rahman",-12.813721656799316],["▁තුල",-12.813725471496582],["▁berasal",-12.813746452331545],["rike",-12.8137788772583],["▁болест",-12.813796043395996],["▁procede",-12.81380844116211],["ଗା",-12.81381130218506],["▁پذیر",-12.813828468322754],["žuje",-12.81383991241455],["▁bilim",-12.813862800598145],["▁ఎక్కువ",-12.81387424468994],["▁ເອ",-12.813876152038574],["▁కార్",-12.813905715942385],["官网",-12.813908576965332],["▁الولايات",-12.813919067382812],["般",-12.813926696777344],["▁zahtev",-12.81394100189209],["ياب",-12.81395149230957],["▁sise",-12.814027786254885],["เทคโนโลยี",-12.814045906066896],["gré",-12.814066886901855],["jącej",-12.814079284667969],["ზო",-12.814088821411133],["▁laske",-12.814114570617676],["νει",-12.814194679260254],["χος",-12.814212799072266],["ڪل",-12.81424331665039],["▁Genom",-12.81426239013672],["▁नेतृत्व",-12.8142671585083],["▁3%",-12.8142728805542],["биз",-12.814300537109377],["děj",-12.81430435180664],["▁சே",-12.81431007385254],["▁gou",-12.814339637756348],["脂肪",-12.81434440612793],["▁ہونا",-12.814346313476562],["▁manis",-12.81435203552246],["▁പദ്ധതി",-12.81436252593994],["▁khổ",-12.814371109008787],["▁σχετικά",-12.81437873840332],["▁नमः",-12.81440258026123],["共享",-12.814408302307127],["▁المح",-12.814430236816406],["▁rado",-12.814433097839355],["▁فراهم",-12.814446449279783],["▁кры",-12.814448356628418],["โอกาส",-12.814451217651367],["ที่ไม่",-12.814467430114746],["ući",-12.814476013183594],["ИК",-12.814541816711426],["▁קוד",-12.814555168151855],["como",-12.814556121826172],["▁asigura",-12.81455898284912],["▁upozor",-12.81457805633545],["▁кро",-12.814582824707031],["▁Doctor",-12.814584732055664],["▁മലയാളി",-12.814620971679688],["gredi",-12.81462574005127],["▁documentos",-12.8146390914917],["studio",-12.814640045166016],["ුවා",-12.81464958190918],["▁shir",-12.814659118652344],["▁बर्",-12.814702987670898],["qda",-12.814706802368164],["antaj",-12.814732551574709],["▁dét",-12.814754486083984],["▁mulher",-12.814770698547363],["дів",-12.814799308776855],["icum",-12.81481647491455],["▁Legend",-12.81482219696045],["▁aktyw",-12.814865112304688],["▁прис",-12.814885139465332],["▁შევ",-12.814885139465332],["珍",-12.814886093139648],["îm",-12.81489086151123],["▁testo",-12.814902305603027],["▁бро",-12.81495761871338],["탈",-12.814981460571287],["▁rust",-12.814990997314451],["▁Town",-12.814992904663086],["တရား",-12.815032958984377],["лени",-12.81505298614502],["▁міськ",-12.81506061553955],["▁առաջարկ",-12.815067291259766],["▁συντ",-12.815082550048828],["ദാ",-12.81508731842041],["▁bronz",-12.815104484558104],["ICI",-12.815109252929688],["▁koll",-12.815114974975586],["されました",-12.815114974975586],["asema",-12.81513500213623],["inās",-12.81513500213623],["▁nifer",-12.815146446228027],["▁ostatní",-12.815149307250977],["τον",-12.815166473388672],["yddol",-12.815168380737305],["tanud",-12.815184593200684],["▁существ",-12.815195083618164],["▁Pare",-12.815214157104492],["▁Pena",-12.81521701812744],["▁jab",-12.815234184265137],["קרה",-12.815262794494627],["▁alls",-12.815266609191896],["ailleurs",-12.815272331237791],["ን፣",-12.815272331237791],["ლუ",-12.815274238586426],["▁prvom",-12.815288543701172],["nčių",-12.815299987792969],["▁आगामी",-12.815299987792969],["▁ಬಿಡುಗಡೆ",-12.815299987792969],["▁stejně",-12.815300941467283],["BT",-12.815309524536133],["▁តើ",-12.815309524536133],["▁velge",-12.815315246582031],["收集",-12.815346717834473],["▁ሊያ",-12.815353393554688],["▁naken",-12.81536102294922],["▁Lite",-12.8153715133667],["[6]",-12.815377235412598],["▁مواقع",-12.815381050109863],["▁الموقع",-12.81539821624756],["▁keha",-12.815400123596191],["nicima",-12.815403938293455],["വുമായി",-12.815403938293455],["▁Baha",-12.815404891967772],["▁Dich",-12.815471649169922],["▁organisasi",-12.815479278564451],["▁דעת",-12.815479278564451],["tatás",-12.81548023223877],["▁tenê",-12.815502166748049],["▁NS",-12.815519332885742],["ന്റ്",-12.815543174743652],["▁trek",-12.815553665161133],["▁Umum",-12.815580368041992],["llin",-12.815592765808104],["▁operasi",-12.81560516357422],["lange",-12.815606117248535],["対策",-12.815625190734863],["▁ئىدى",-12.81564998626709],["ன்னு",-12.815651893615724],["▁центру",-12.815699577331545],["esség",-12.815714836120604],["ຍາ",-12.815729141235352],["ંક",-12.815796852111816],["ininkai",-12.815802574157717],["तर्फ",-12.815810203552246],["عارض",-12.815825462341309],["Бат",-12.815828323364258],["▁لذا",-12.815835952758787],["ງານ",-12.815839767456056],["▁cinci",-12.815876960754396],["plica",-12.81590175628662],["▁Booking",-12.81593132019043],["▁цифр",-12.81595516204834],["kalan",-12.815963745117188],["▁vsaj",-12.815963745117188],["даць",-12.815979957580566],["▁14:00",-12.815984725952148],["צת",-12.81601619720459],["▁mégis",-12.816020011901855],["শি",-12.81605052947998],["BN",-12.81605625152588],["▁šim",-12.816068649291992],["êre",-12.816076278686523],["शील",-12.81607723236084],["atif",-12.816086769104004],["ናል",-12.81609058380127],["▁aday",-12.816128730773926],["母親",-12.816173553466797],["▁lien",-12.816180229187012],["uosi",-12.81618595123291],["其它",-12.816194534301758],["▁МО",-12.816228866577148],["▁føler",-12.816234588623049],["▁mjestu",-12.816238403320312],["▁Gracias",-12.816250801086426],["ымыз",-12.816251754760742],["▁ਸਾਰੇ",-12.816251754760742],["мұ",-12.816255569458008],["▁wurdt",-12.816255569458008],["▁நேர",-12.816261291503906],["▁ارشد",-12.81627082824707],["schein",-12.81627368927002],["▁бонус",-12.816276550292969],["高的",-12.81634521484375],["▁ingat",-12.816346168518066],["▁určite",-12.816351890563965],["▁bijna",-12.816353797912598],["▁reven",-12.816367149353027],["▁Medio",-12.816370010375977],["▁grupe",-12.816375732421877],["▁formazione",-12.816387176513672],["▁maður",-12.816389083862305],["stina",-12.816434860229492],["▁بچه",-12.816463470458984],["▁करणे",-12.816469192504885],["шне",-12.816473007202148],["▁SIA",-12.816483497619627],["▁temperature",-12.816487312316896],["▁accepta",-12.81651210784912],["▁fila",-12.816540718078612],["ນີ",-12.816563606262209],["ичка",-12.816604614257812],["ətə",-12.816622734069824],["▁Rich",-12.816637992858888],["▁халық",-12.816644668579102],["πολ",-12.816657066345217],["cali",-12.816690444946287],["екс",-12.816699028015137],["Sal",-12.816701889038086],["він",-12.816706657409668],["város",-12.816739082336426],["▁Num",-12.816768646240234],["▁تاریخی",-12.81676959991455],["МС",-12.816784858703612],["話題",-12.816798210144045],["LIG",-12.816808700561523],["▁marche",-12.816822052001951],["▁Kto",-12.816826820373535],["▁producent",-12.8168306350708],["▁žu",-12.816835403442385],["▁Város",-12.816839218139648],["ԱՏ",-12.81684684753418],["لاء",-12.816854476928713],["▁አንዱ",-12.816858291625977],["▁cineva",-12.816875457763672],["mental",-12.816912651062012],["িল",-12.81692600250244],["▁ಹಣ",-12.816939353942873],["▁muud",-12.81696891784668],["ปู",-12.817017555236816],["ída",-12.817062377929688],["stelu",-12.817087173461914],["əli",-12.817087173461914],["▁សំ",-12.81710720062256],["▁реализ",-12.81711196899414],["▁сигурно",-12.817123413085938],["▁trh",-12.817131042480469],["▁ዓመታት",-12.817176818847656],["▁hjerte",-12.817177772521973],["▁පළමු",-12.817184448242188],["▁septembra",-12.817188262939451],["▁غذایی",-12.81718921661377],["▁Pina",-12.817197799682615],["▁janvier",-12.817198753356934],["▁dítě",-12.81719970703125],["▁лепш",-12.81720733642578],["nefnd",-12.817253112792969],["▁Wala",-12.817253112792969],["榮",-12.8173246383667],["۳",-12.817331314086914],["andra",-12.817363739013672],["▁үе",-12.817371368408203],["csa",-12.81739902496338],["▁ሙ",-12.817415237426758],["izer",-12.8174467086792],["▁jir",-12.817500114440918],["▁გამოი",-12.81751823425293],["anche",-12.817548751831056],["явление",-12.81756591796875],["▁پانچ",-12.817575454711914],["▁revol",-12.817591667175291],["▁frukt",-12.817608833312988],["eş",-12.817609786987305],["уди",-12.817667961120604],["ledi",-12.817687034606934],["άζει",-12.817754745483398],["ഷ്ട",-12.817781448364258],["▁poema",-12.8178129196167],["▁ڌ",-12.817829132080078],["▁وكل",-12.817837715148926],["ladh",-12.817845344543455],["ړه",-12.817856788635254],["▁juliol",-12.817859649658203],["▁ప్రధాన",-12.81786823272705],["▁राजनीति",-12.817875862121582],["▁dvou",-12.817899703979492],["▁bäst",-12.81790256500244],["चो",-12.817904472351074],["▁besøg",-12.817912101745604],["чить",-12.817934036254885],["xel",-12.817943572998049],["▁ormai",-12.817944526672363],["lium",-12.81797981262207],["бү",-12.817983627319336],["▁քաղաք",-12.817986488342283],["▁khô",-12.818010330200195],["േയും",-12.818038940429688],["▁ХХ",-12.818060874938965],["KIN",-12.818098068237305],["kooli",-12.818115234375],["▁moontlik",-12.818119049072266],["▁julio",-12.818133354187012],["▁escorte",-12.81813621520996],["denken",-12.818143844604492],["▁சிறு",-12.818150520324709],["▁çevre",-12.818199157714844],["▁historii",-12.818227767944336],["▁Fahr",-12.8182373046875],["bond",-12.818244934082031],["▁miért",-12.818244934082031],["изъм",-12.818267822265623],["ядзе",-12.81830596923828],["PG",-12.818310737609863],["երին",-12.81833553314209],["▁attack",-12.818337440490724],["▁цим",-12.818337440490724],["▁solidari",-12.81834602355957],["szczy",-12.818440437316896],["▁lac",-12.818493843078612],["▁ಒಂದ",-12.818513870239258],["சன்",-12.81851863861084],["▁Chama",-12.818523406982422],["гада",-12.81853199005127],["▁Vec",-12.81853485107422],["▁külön",-12.818557739257812],["иле",-12.818602561950684],["▁spletni",-12.818633079528809],["▁Florida",-12.818638801574709],["▁Need",-12.818719863891602],["ியது",-12.818732261657717],["lind",-12.81873893737793],["şar",-12.818756103515623],["НІ",-12.818818092346191],["▁व्यवसाय",-12.818841934204102],["▁types",-12.818848609924316],["▁लाल",-12.818876266479492],["▁dauka",-12.81888484954834],["위원회",-12.818903923034668],["▁hodně",-12.81890869140625],["▁әрі",-12.818923950195312],["ири",-12.818949699401855],["mişti",-12.819008827209473],["▁çeşitli",-12.819014549255373],["▁nomi",-12.819015502929688],["เฉพาะ",-12.819031715393066],["▁Làm",-12.819032669067385],["▁هیواد",-12.81903839111328],["œ",-12.819048881530762],["٠",-12.819052696228027],["▁menjelaskan",-12.81905746459961],["▁thấp",-12.81905746459961],["▁ගුවන්",-12.81905746459961],["▁이러한",-12.81905746459961],["▁ମୁଁ",-12.819061279296877],["▁حرفه",-12.819083213806152],["学会",-12.819103240966797],["▁Fam",-12.81911563873291],["之下",-12.819128036499023],["▁ಎಂದ",-12.819151878356934],["▁Маг",-12.819167137145996],["▁escritor",-12.819198608398438],["▁Hack",-12.819212913513184],["raí",-12.8192138671875],["อบ",-12.819217681884766],["▁Васил",-12.819226264953612],["лих",-12.81923007965088],["▁Rit",-12.819254875183104],["чний",-12.819268226623535],["消防",-12.819269180297852],["留学",-12.819270133972168],["ഹി",-12.81933879852295],["րո",-12.819350242614746],["ંત",-12.819371223449709],["▁හො",-12.819384574890137],["▁Dai",-12.81938648223877],["işti",-12.819397926330566],["▁feest",-12.819424629211426],["знание",-12.81942653656006],["vaj",-12.819428443908691],["▁Skype",-12.819450378417969],["chef",-12.819463729858398],["▁imati",-12.819499969482422],["▁ਡ",-12.819502830505373],["▁ఎన్టీఆర్",-12.81954288482666],["▁Bayern",-12.819589614868164],["▁Сі",-12.819595336914062],["衛生",-12.819622039794922],["єдн",-12.81963062286377],["▁gosta",-12.8197660446167],["▁hinta",-12.819788932800291],["օ",-12.819822311401367],["NER",-12.819829940795898],["дад",-12.819854736328123],["呈現",-12.81985569000244],["شرف",-12.819863319396973],["▁liela",-12.819866180419922],["▁domeniul",-12.819915771484377],["▁karte",-12.81994915008545],["starf",-12.819971084594728],["món",-12.819974899291992],["▁ھا",-12.819988250732422],["ເບິ່ງທາງ",-12.820000648498535],["▁ünnep",-12.820001602172852],["▁어느",-12.820002555847168],["ในช่วง",-12.820006370544434],["صيب",-12.820009231567385],["▁drejtë",-12.820012092590332],["▁সকল",-12.820012092590332],["gezet",-12.82002067565918],["▁манастир",-12.82003402709961],["▁گاز",-12.820043563842772],["▁каква",-12.820055961608888],["大阪",-12.82005786895752],["зви",-12.82009983062744],["▁langer",-12.820107460021973],["lenme",-12.820110321044922],["▁(2017)",-12.820110321044922],["rost",-12.820113182067873],["koń",-12.820122718811035],["▁Kje",-12.82013702392578],["▁второй",-12.820149421691896],["offer",-12.820199966430664],["ገኘ",-12.820213317871094],["▁inclusiv",-12.820229530334473],["▁virker",-12.82023811340332],["ění",-12.820239067077637],["ГО",-12.820239067077637],["▁ایمان",-12.820252418518066],["லும்",-12.820261001586914],["▁билет",-12.820271492004396],["的能力",-12.820279121398926],["isé",-12.82029151916504],["uzu",-12.820313453674316],["▁దేశ",-12.820316314697266],["妻子",-12.820324897766112],["▁धु",-12.82032585144043],["▁sagði",-12.820337295532228],["▁שהם",-12.820348739624023],["▁Mile",-12.82034969329834],["usia",-12.820375442504885],["▁шах",-12.820387840270996],["▁материали",-12.82040023803711],["▁Saf",-12.82040309906006],["▁مطالعه",-12.820408821105955],["▁מדי",-12.820414543151855],["▁scena",-12.820439338684082],["▁จํานวน",-12.82044792175293],["▁Bry",-12.820449829101562],["▁kakovost",-12.820486068725586],["▁Xasan",-12.82052993774414],["▁ถึง",-12.820530891418455],["▁ဤ",-12.82058811187744],["UG",-12.820622444152832],["ಗೊಳಿಸ",-12.820636749267578],["აბ",-12.820652961730955],["ubah",-12.820653915405272],["▁cancel",-12.820677757263184],["taba",-12.820693016052246],["▁education",-12.820711135864258],["meden",-12.820771217346191],["▁sellise",-12.82077693939209],["同志",-12.820815086364746],["▁cambios",-12.820823669433594],["▁disponibili",-12.820842742919922],["ลม",-12.820849418640137],["▁Div",-12.820867538452148],["▁жаб",-12.820890426635742],["່າ",-12.820911407470703],["全世界",-12.82094669342041],["але",-12.820950508117676],["▁квалитет",-12.820956230163574],["৩",-12.821001052856444],["mistä",-12.821014404296877],["▁דקות",-12.82101821899414],["▁Ευ",-12.821022987365724],["▁tret",-12.82102870941162],["党的",-12.821059226989746],["ceri",-12.821072578430176],["▁visiem",-12.821101188659668],["▁elfogad",-12.821120262145996],["ធ",-12.821139335632324],["ിന",-12.821147918701172],["▁Acum",-12.821149826049805],["veti",-12.821165084838867],["▁Bosne",-12.821170806884766],["▁порно",-12.821185111999512],["leben",-12.821187019348145],["шар",-12.821196556091309],["Mas",-12.821209907531738],["▁배우",-12.8212308883667],["ителни",-12.821234703063965],["orde",-12.821242332458496],["▁marque",-12.82125759124756],["น่ารัก",-12.821288108825684],["adar",-12.821290969848633],["ბრი",-12.821298599243164],["▁уда",-12.821311950683594],["полит",-12.821318626403809],["ічних",-12.821331977844238],["්න",-12.82137393951416],["▁नही",-12.821377754211426],["意思",-12.821392059326172],["pię",-12.821404457092283],["▁polas",-12.821416854858398],["▁patrimonio",-12.821443557739258],["▁ზ",-12.821462631225586],["▁ชั้น",-12.821467399597168],["▁rendre",-12.821480751037598],["▁တစ်",-12.821480751037598],["тыў",-12.821497917175291],["llum",-12.821503639221191],["▁但",-12.821537017822266],["▁pho",-12.821539878845217],["尊",-12.821544647216797],["छा",-12.821563720703123],["▁cilvēki",-12.821566581726074],["▁saanut",-12.821572303771973],["▁перспектив",-12.82158088684082],["比例",-12.821582794189451],["vör",-12.82158660888672],["▁nik",-12.821598052978516],["▁לצ",-12.821626663208008],["암",-12.821645736694336],["▁viaggio",-12.82166576385498],["ходит",-12.821674346923828],["年の",-12.821735382080078],["▁polic",-12.821755409240724],["▁techniques",-12.821775436401367],["uring",-12.821794509887695],["▁ຂ",-12.821805000305176],["▁yote",-12.821813583374023],["▁Disc",-12.821816444396973],["扎",-12.821817398071287],["▁четвер",-12.821823120117188],["▁companies",-12.821839332580566],["▁Jeżeli",-12.821885108947754],["▁bazen",-12.821885108947754],["▁වැදගත්",-12.821885108947754],["▁industry",-12.82188606262207],["▁respublika",-12.82188606262207],["▁Sơn",-12.821887969970703],["▁kõrval",-12.821893692016602],["▁septembre",-12.821897506713867],["▁marki",-12.821904182434082],["▁жы",-12.821907997131348],[".........",-12.82192325592041],["▁ఉన్నాయి",-12.821932792663574],["یات",-12.822071075439451],["▁ช",-12.82207202911377],["▁tramite",-12.822091102600098],["▁הזו",-12.822094917297363],["လမ္း",-12.822098731994627],["▁Zaman",-12.822099685668944],["▁آسمان",-12.822124481201172],["▁МА",-12.822139739990234],["▁prou",-12.822202682495115],["▁שוב",-12.822224617004396],["სული",-12.822246551513672],["ιγ",-12.822272300720217],["▁katerih",-12.822355270385742],["▁үл",-12.822376251220703],["▁socda",-12.822388648986816],["szych",-12.822400093078612],["▁język",-12.822402000427246],["▁kaitse",-12.82241153717041],["fí",-12.822428703308104],["▁pristup",-12.822443008422852],["хаа",-12.822446823120115],["▁identitet",-12.822455406188965],["▁ئار",-12.82247829437256],["▁Waxaan",-12.82248306274414],["ємо",-12.822524070739746],["▁garage",-12.822538375854492],["ரின்",-12.82256317138672],["▁verilən",-12.822571754455566],["▁hitro",-12.822620391845703],["Ł",-12.822633743286133],["▁बघ",-12.822672843933104],["entra",-12.822694778442385],["ଜେ",-12.822713851928713],["▁lạc",-12.822728157043455],["▁burde",-12.82273006439209],["▁πληρ",-12.822731971740724],["สัน",-12.822734832763672],["不需要",-12.822744369506836],["నో",-12.82275104522705],["▁thua",-12.822759628295898],["siai",-12.822786331176758],["riet",-12.822793006896973],["ेंगे",-12.82282829284668],["▁település",-12.822829246520996],["▁συνέχεια",-12.822829246520996],["▁ድርጅት",-12.822829246520996],["▁kesalahan",-12.822830200195312],["▁বিশেষ",-12.822832107543944],["သမီး",-12.822858810424805],["pite",-12.82285976409912],["bidea",-12.822891235351562],["▁kate",-12.822891235351562],["▁Bakanlığı",-12.822901725769045],["▁montes",-12.822927474975586],["▁konsider",-12.822942733764648],["▁yoz",-12.822945594787598],["acions",-12.822959899902344],["▁chacun",-12.82296657562256],["▁адна",-12.822973251342772],["aggi",-12.822986602783203],["▁mundu",-12.823017120361328],["▁joskus",-12.823025703430176],["▁koncern",-12.823050498962402],["רכי",-12.823062896728516],["▁आएका",-12.823064804077148],["▁шаг",-12.823097229003906],["南京",-12.823100090026855],["▁Galiza",-12.823166847229004],["BRA",-12.82317352294922],["вки",-12.823190689086914],["ريد",-12.823196411132812],["▁huvud",-12.823214530944824],["ിച്ചത്",-12.823240280151367],["ovan",-12.823244094848633],["vēl",-12.823260307312012],["ėjų",-12.823287010192873],["▁uuden",-12.823320388793944],["▁Italien",-12.823354721069336],["▁Gab",-12.823391914367676],["yaya",-12.823406219482422],["▁ਕੁ",-12.823406219482422],["▁moments",-12.823436737060549],["▁Xin",-12.82347011566162],["▁чему",-12.823487281799316],["▁båt",-12.823495864868164],["▁зависит",-12.82349681854248],["▁Eo",-12.823551177978516],["බා",-12.823562622070312],["ผลงาน",-12.823562622070312],["▁Mom",-12.823575973510742],["φο",-12.823578834533691],["ipa",-12.823606491088867],["▁dicho",-12.823607444763184],["▁tree",-12.823615074157717],["qaad",-12.823631286621094],["▁꼭",-12.823637962341309],["께",-12.823671340942385],["▁gefunden",-12.82369327545166],["▁públicas",-12.823704719543455],["কারী",-12.823719024658203],["nium",-12.823770523071287],["▁aspoň",-12.823774337768556],["▁hấp",-12.823774337768556],["▁pembangunan",-12.823774337768556],["увач",-12.823787689208984],["▁agad",-12.823792457580566],["▁ಕೋಟಿ",-12.823803901672363],["▁lilla",-12.82382106781006],["▁magis",-12.823822021484377],["llahu",-12.82382583618164],["арна",-12.823827743530272],["▁करण्यासाठी",-12.82383918762207],["siyle",-12.823845863342283],["есі",-12.82385540008545],["ражение",-12.82386875152588],["▁plaas",-12.82386875152588],["▁форме",-12.823871612548828],["▁emit",-12.82387638092041],["▁광고",-12.823892593383787],["▁днів",-12.82390594482422],["einek",-12.82392692565918],["hitaji",-12.823930740356444],["ใส",-12.82394313812256],["adress",-12.823970794677734],["▁Jordi",-12.824007987976074],["▁ಏ",-12.824031829833984],["Up",-12.824042320251465],["▁fonte",-12.82405948638916],["▁աս",-12.824076652526855],["▁Jūsu",-12.824124336242676],["роў",-12.824127197265623],["naam",-12.824137687683104],["▁jogos",-12.824163436889648],["żyć",-12.824188232421877],["ေပၚ",-12.824216842651367],["metr",-12.824226379394531],["ಾಯ",-12.824256896972656],["參",-12.824264526367188],["▁tavalla",-12.82427215576172],["▁Engels",-12.824311256408691],["▁اح",-12.824344635009766],["▁lze",-12.82435417175293],["▁motive",-12.824398040771484],["áme",-12.824432373046877],["ಗಾಗಿ",-12.82444667816162],["EY",-12.824460983276367],["atsiooni",-12.82446575164795],["▁الدم",-12.824552536010742],["ياء",-12.82457447052002],["▁Sher",-12.824575424194336],["▁valuta",-12.824576377868652],["ស្ត",-12.82458782196045],["▁Tür",-12.824588775634766],["ታት",-12.824606895446776],["mayan",-12.824609756469728],["ಗೂ",-12.824612617492676],["▁বিশ্ব",-12.824637413024902],["▁pedagog",-12.824644088745115],["▁зээл",-12.824651718139648],["БИ",-12.824660301208496],["ኳ",-12.824710845947266],["▁svjet",-12.824712753295898],["▁thabhairt",-12.82472038269043],["▁गाउँपालिका",-12.824721336364746],["▁डॉक्टर",-12.824726104736328],["▁Lek",-12.824753761291504],["▁análise",-12.824755668640137],["▁Prema",-12.824773788452148],["jumiem",-12.82477855682373],["ДП",-12.824817657470703],["▁ortak",-12.824847221374512],["▁موسم",-12.824858665466309],["押",-12.824870109558104],["μάτων",-12.824892044067385],["oci",-12.824896812438965],["▁corect",-12.824938774108888],["કલ",-12.82494068145752],["▁Cé",-12.824949264526367],["rummet",-12.824957847595217],["бран",-12.824970245361328],["▁raskt",-12.82498550415039],["▁Köp",-12.825011253356934],["▁trabajar",-12.82501220703125],["▁фон",-12.82503890991211],["▁المزيد",-12.825139045715332],["▁diante",-12.825149536132812],["▁administrator",-12.825162887573242],["▁Ref",-12.825164794921877],["▁vostro",-12.82516860961914],["▁nep",-12.82517910003662],["dita",-12.825180053710938],["▁receive",-12.825196266174316],["ائیں",-12.82519817352295],["ბრ",-12.825233459472656],["▁верх",-12.825246810913086],["erland",-12.825272560119627],["▁erit",-12.82528591156006],["▁FE",-12.825347900390623],["▁rester",-12.825368881225586],["▁kaldır",-12.825387954711914],["៍",-12.825399398803713],["▁pazar",-12.825407028198242],["▁սիր",-12.825417518615724],["ulin",-12.825485229492188],["▁қалды",-12.825491905212402],["nr",-12.825522422790527],["يري",-12.825538635253906],["LAND",-12.825560569763184],["▁갈",-12.825563430786133],["▁siguientes",-12.825570106506348],["▁ndio",-12.82559585571289],["▁savaş",-12.825597763061523],["▁komplik",-12.82560920715332],["▁morali",-12.825638771057127],["万人",-12.825657844543455],["▁форму",-12.825660705566406],["▁пријатељ",-12.82566738128662],["ilte",-12.825669288635254],["▁повинні",-12.825669288635254],["▁ئادەم",-12.825669288635254],["▁mwisho",-12.825671195983888],["▁માત્ર",-12.82568645477295],["ებოდა",-12.825691223144531],["▁потім",-12.825701713562012],["▁alltså",-12.825708389282228],["▁further",-12.825714111328123],["liklar",-12.825722694396973],["кър",-12.825743675231934],["▁காண",-12.825754165649414],["पर्",-12.825760841369627],["▁Tirk",-12.825767517089844],["equip",-12.825801849365234],["的情况下",-12.825812339782717],["▁هوندو",-12.82582664489746],["▁ჯა",-12.825846672058104],["demokra",-12.825854301452637],["薪",-12.82590389251709],["▁zowel",-12.82591724395752],["永远",-12.825919151306152],["ape",-12.825921058654783],["▁ilg",-12.825925827026367],["etas",-12.825944900512695],["eixen",-12.825950622558594],["▁իրական",-12.82595920562744],["ума",-12.82596206665039],["▁мір",-12.825976371765137],["میر",-12.826003074645996],["▁учнів",-12.826004981994627],["/14",-12.826026916503906],["čného",-12.826043128967283],["tused",-12.826045989990234],["▁podrá",-12.826045989990234],["ლეს",-12.82605266571045],["▁ზო",-12.82605266571045],["的高",-12.826065063476562],["▁تعرف",-12.8261079788208],["▁Paling",-12.826135635375977],["▁постоји",-12.826141357421877],["▁медицина",-12.826146125793455],["▁याद",-12.826162338256836],["▁acestora",-12.826178550720217],["▁kundi",-12.826197624206545],["spot",-12.826223373413086],["▁sharing",-12.82622528076172],["▁sayısı",-12.826260566711426],["▁117",-12.826276779174805],["▁recupera",-12.82627773284912],["ntana",-12.826315879821776],["▁hatine",-12.826326370239258],["▁ជន",-12.826343536376951],["▁listo",-12.826345443725586],["▁Episode",-12.826375961303713],["عين",-12.826391220092772],["ऱ्या",-12.82641887664795],["▁قۇ",-12.826438903808594],["▁happen",-12.826452255249023],["▁lasa",-12.82646942138672],["▁성공",-12.826492309570312],["▁బో",-12.826502799987791],["一句",-12.82650661468506],["ຫລາຍ",-12.826542854309082],["▁oui",-12.826565742492676],["俊",-12.826583862304688],["निक",-12.826602935791016],["гэ",-12.826604843139648],["▁להי",-12.826610565185549],["▁vonatkozó",-12.826615333557127],["▁хвилин",-12.826619148254396],["▁onsdag",-12.826621055603027],["▁demo",-12.826623916625977],["▁않습니다",-12.826624870300291],["积",-12.826652526855469],["▁User",-12.826665878295898],["▁Lor",-12.82667064666748],["▁लागेको",-12.826693534851074],["▁सहभागी",-12.82669734954834],["organiz",-12.826744079589844],["रं",-12.826748847961426],["რგ",-12.826765060424805],["▁desenvolver",-12.826794624328612],["▁pig",-12.826855659484863],["άνε",-12.826871871948242],["▁hanggang",-12.826873779296877],["▁عدة",-12.826952934265137],["▁השני",-12.826953887939451],["▁அடுத்த",-12.826955795288086],["ערן",-12.826963424682615],["▁Kasım",-12.826986312866213],["ეთში",-12.826991081237791],["rette",-12.826997756958008],["▁cannot",-12.827018737792969],["▁Další",-12.82709503173828],["▁kund",-12.827126502990724],["▁Takže",-12.827133178710938],["▁формира",-12.827149391174316],["▁اجتماع",-12.827188491821287],["▁първо",-12.827191352844238],["▁gül",-12.827204704284668],["▁ورته",-12.827207565307615],["▁Sing",-12.827219009399414],["öf",-12.827231407165527],["rova",-12.827250480651855],["▁fixo",-12.827261924743652],["ohjelma",-12.8272705078125],["▁Noi",-12.827288627624512],["▁गलत",-12.827305793762209],["▁Ross",-12.827362060546877],["▁پوه",-12.82736587524414],["ставку",-12.827367782592772],["igan",-12.827377319335938],["公平",-12.82737922668457],["▁отказ",-12.827393531799316],["ьно",-12.827415466308594],["▁ბავშვ",-12.827441215515137],["▁வாங்க",-12.827458381652832],["▁справа",-12.827467918395996],["ଖା",-12.82748794555664],["▁DN",-12.827491760253906],["▁техник",-12.827496528625488],["бак",-12.827503204345703],["▁Глав",-12.827522277832031],["▁dixit",-12.82752799987793],["žila",-12.827539443969728],["舒適",-12.82755184173584],["▁తరువాత",-12.827564239501951],["▁yaklaşık",-12.827566146850586],["▁держави",-12.827580451965332],["ços",-12.827624320983888],["▁ieu",-12.82763957977295],["▁вет",-12.82768440246582],["zij",-12.82769012451172],["ọc",-12.827731132507324],["ŝi",-12.827754974365234],["▁laboris",-12.827805519104004],["▁gaze",-12.827815055847168],["銷",-12.827823638916016],["יבה",-12.827831268310549],["▁Hub",-12.827844619750977],["छन्",-12.827860832214355],["▁kính",-12.827930450439451],["уса",-12.827956199645996],["rini",-12.827957153320312],["mentar",-12.827961921691896],["▁locui",-12.827963829040527],["▁129",-12.82798194885254],["▁animali",-12.82801628112793],["yba",-12.828068733215332],["ທີ່ຈະ",-12.828068733215332],["ళ్ళు",-12.828084945678713],["▁financial",-12.828104972839355],["անալ",-12.828132629394531],["pras",-12.82813835144043],["Ang",-12.828143119812012],["▁दिली",-12.828152656555176],["拍攝",-12.8281831741333],[":-)",-12.82819938659668],["été",-12.828259468078612],["ધાર",-12.828264236450195],["▁Sebelum",-12.828267097473145],["▁zime",-12.828289031982422],["▁ලැබ",-12.828290939331056],[".7.",-12.828328132629396],["▁чувство",-12.828344345092772],["ாளர்",-12.82834815979004],["ībām",-12.82837963104248],["▁hlavní",-12.828388214111328],["といった",-12.828393936157228],["▁compta",-12.828396797180176],["▁gast",-12.828398704528809],["▁çift",-12.828420639038086],["▁infer",-12.82843780517578],["xas",-12.828444480895996],["▁උඹ",-12.828444480895996],["diel",-12.828453063964844],["▁ਬਾ",-12.828462600708008],["環保",-12.828478813171388],["ホテル",-12.828495979309082],["▁Wow",-12.828502655029297],["▁დასა",-12.828506469726562],["əti",-12.82850742340088],["padu",-12.828509330749512],["▁কর্মকর্তা",-12.828513145446776],["▁octombrie",-12.828514099121094],["▁ଅନେକ",-12.828514099121094],["▁ଅଭିଯୋଗ",-12.82851505279541],["▁Gui",-12.828516960144045],["▁perhatian",-12.828516960144045],["▁асыру",-12.828520774841309],["▁fünf",-12.82852268218994],["▁Γιατί",-12.828527450561523],["▁phổ",-12.828532218933104],["▁izmanto",-12.828547477722168],["▁agree",-12.828548431396484],["מור",-12.828583717346191],["rnar",-12.828585624694824],["തോ",-12.828598976135254],["▁ନେ",-12.828605651855469],["▁навук",-12.828611373901367],["▁вашиот",-12.82865047454834],["▁أبي",-12.828660011291504],["ημάτων",-12.82868194580078],["▁مرتبط",-12.828682899475098],["▁Авто",-12.82868766784668],["▁arata",-12.828710556030272],["▁ću",-12.82872486114502],["nivîs",-12.828734397888184],["▁پير",-12.828742027282717],["▁પહેલા",-12.828774452209473],["▁किए",-12.82882308959961],["▁jinsi",-12.828827857971191],["▁Fog",-12.828857421875],["▁membe",-12.828917503356934],["▁இரண்டு",-12.82893180847168],["ічна",-12.828935623168944],["▁באי",-12.828940391540527],["▁aktive",-12.828950881958008],["κτη",-12.828954696655272],["▁személyes",-12.82895851135254],["වාදය",-12.82898998260498],["tyczne",-12.82900619506836],["▁kaza",-12.829010009765623],["ວນ",-12.82901382446289],["▁setting",-12.829014778137209],["малко",-12.82904052734375],["vega",-12.82910442352295],["ಕ್ಕಾಗಿ",-12.82913303375244],["▁renk",-12.829136848449709],["▁nabi",-12.82919979095459],["▁пораз",-12.829236030578612],["▁meninggalkan",-12.829269409179688],["▁doi",-12.829273223876951],["▁størrelse",-12.829293251037598],["▁radical",-12.829299926757812],["▁choć",-12.829341888427734],["▁розмір",-12.829391479492188],["фра",-12.829407691955566],["郭",-12.829418182373049],["▁mec",-12.829463005065918],["▁سبتمبر",-12.82946491241455],["▁απόφαση",-12.829465866088867],["▁haghaidh",-12.8294677734375],["০০",-12.829468727111816],["ໃຫ້ອ່ານ",-12.82947063446045],["obre",-12.829474449157717],["▁tahan",-12.82950496673584],["▁podataka",-12.829507827758787],["ebe",-12.829511642456056],["වෙන",-12.82951545715332],["▁мерки",-12.829550743103027],["▁Situation",-12.829556465148926],["▁मात्रै",-12.829574584960938],["íomh",-12.82958984375],["▁yaşında",-12.82958984375],["▁formar",-12.82960033416748],["ৰে",-12.829605102539062],["▁اختلاف",-12.829618453979492],["▁sizi",-12.829630851745604],["▁utilizza",-12.829651832580566],["tinių",-12.829659461975098],["▁سوريا",-12.829672813415527],["ulia",-12.82970142364502],["▁ఇన్",-12.829704284667969],["▁සැ",-12.829739570617676],["▁ນໍາ",-12.829739570617676],["▁Bana",-12.829791069030762],["▁seznam",-12.829794883728027],["▁slutt",-12.829822540283203],["ဖြ",-12.829832077026367],["▁njihov",-12.829839706420898],["▁fiatal",-12.82985496520996],["▁Bert",-12.829874038696287],["tyk",-12.829900741577148],["▁rakst",-12.82990550994873],["զի",-12.829913139343262],["▁اہل",-12.829916000366213],["▁technische",-12.829943656921388],["▁vėl",-12.829947471618652],["▁provo",-12.829952239990234],["ലം",-12.829961776733398],["▁azi",-12.829968452453612],["▁кыл",-12.83006191253662],["āna",-12.830096244812012],["▁skupin",-12.830143928527832],["צבע",-12.830214500427246],["▁โรคสะเก็ดเงิน",-12.83024787902832],["ainn",-12.830252647399902],["izam",-12.83027458190918],["▁ន",-12.830327033996582],["▁dali",-12.830341339111328],["ट्स",-12.83037567138672],["▁receber",-12.830387115478516],["アメリカ",-12.83039379119873],["▁masina",-12.830397605895996],["▁فوجی",-12.830398559570312],["▁długo",-12.830408096313477],["berry",-12.83041763305664],["लाइ",-12.830472946166992],["▁невозможно",-12.830491065979004],["akkan",-12.830492973327637],["ศูนย์",-12.830504417419434],["あなたの",-12.83050537109375],["▁гады",-12.830536842346191],["▁gæ",-12.830554962158203],["greb",-12.830586433410645],["▁yerli",-12.83058738708496],["▁quais",-12.830591201782228],["▁lembra",-12.83060073852539],["▁címe",-12.83062744140625],["▁הסי",-12.830668449401855],["▁Skr",-12.830738067626951],["ხის",-12.830755233764648],["▁спре",-12.830758094787598],["▁ikus",-12.83076000213623],["▁평가",-12.830769538879396],["ындагы",-12.830830574035645],["▁posjeti",-12.830832481384276],["▁startet",-12.830842971801758],["▁ง",-12.83086109161377],["▁jari",-12.8308687210083],["▁privada",-12.830912590026855],["keli",-12.830930709838867],["ಪುರ",-12.830936431884766],["▁Adobe",-12.830947875976562],["▁restaurante",-12.830958366394045],["▁있다는",-12.830965995788574],["▁grootste",-12.83098316192627],["审",-12.8309907913208],["▁XIV",-12.8309965133667],["▁təklif",-12.831010818481444],["▁imagen",-12.831021308898926],["ដឹង",-12.831027030944824],["▁tabel",-12.831029891967772],["▁Meinung",-12.83104133605957],["▁várias",-12.831052780151367],["▁ideja",-12.8310546875],["▁evidente",-12.831083297729492],["▁muta",-12.83111572265625],["ందా",-12.831121444702148],["▁Sort",-12.831121444702148],["▁bendra",-12.831131935119627],["▁Brug",-12.831161499023438],["▁Kia",-12.83116626739502],["▁pořád",-12.831180572509766],["▁mengandung",-12.831184387207031],["▁acho",-12.831209182739258],["▁sweet",-12.83121109008789],["ranga",-12.83121395111084],["▁anteriores",-12.831228256225586],["▁dashuri",-12.8312406539917],["▁ža",-12.831268310546877],["▁ves",-12.83128261566162],["▁cuidado",-12.831283569335938],["▁ugyan",-12.831297874450684],["須",-12.831303596496582],["▁పాత్ర",-12.83131980895996],["▁Stelle",-12.831344604492188],["▁հարաբերություն",-12.83136749267578],["▁покрај",-12.83137035369873],["▁Центр",-12.831371307373049],["▁Smartphone",-12.831379890441896],["▁tänään",-12.831384658813477],["▁condizioni",-12.83141803741455],["▁آید",-12.831428527832031],["▁велика",-12.831429481506348],["ELI",-12.83147430419922],["løs",-12.831480979919434],["▁زیست",-12.831487655639648],["MIS",-12.83150577545166],["▁Toki",-12.831507682800291],["▁constant",-12.831517219543455],["▁provin",-12.831521034240724],["▁inimese",-12.831537246704102],["OJ",-12.831549644470217],["▁kronor",-12.831572532653809],["▁organization",-12.831589698791504],["हरा",-12.83160400390625],["▁materiali",-12.83160400390625],["▁gruppe",-12.831609725952148],["éra",-12.831637382507324],["▁eljárás",-12.83164405822754],["vart",-12.831657409667969],["▁zuwa",-12.831690788269045],["esebb",-12.831692695617676],["щим",-12.83171558380127],["▁nder",-12.8317289352417],["▁ասում",-12.831729888916016],["▁ಫ",-12.831747055053713],["▁gedaan",-12.831787109375],["ზარ",-12.831793785095217],["فع",-12.83180332183838],["লাম",-12.831839561462402],["សំ",-12.8318510055542],["vut",-12.831881523132324],["訴",-12.831884384155272],["美好",-12.83188533782959],["▁Gw",-12.831891059875488],["gangen",-12.831896781921388],["▁հնարավորություն",-12.83189868927002],["dyr",-12.83192539215088],["▁átt",-12.83197021484375],["▁válasz",-12.832002639770508],["يسي",-12.832005500793455],["签",-12.832005500793455],["▁(23",-12.832030296325684],["▁territorial",-12.832077026367188],["▁предложения",-12.832085609436035],["kori",-12.83210563659668],["なのか",-12.832122802734377],["▁ikki",-12.832128524780272],["▁лиш",-12.832152366638184],["ונת",-12.832175254821776],["▁sonunda",-12.832201957702637],["▁phối",-12.83222770690918],["▁kompens",-12.832236289978027],["▁marta",-12.832242965698242],["▁buruk",-12.832249641418455],["▁dział",-12.832265853881836],["宴",-12.832280158996582],["achtaí",-12.832281112670898],["▁helpo",-12.832287788391112],["คนที่",-12.832307815551758],["ماي",-12.832310676574709],["ھن",-12.832317352294922],["qet",-12.832320213317873],["▁Tyskland",-12.832321166992188],["▁şöyle",-12.832321166992188],["▁ноября",-12.832321166992188],["ैः",-12.832342147827148],["кредит",-12.832345962524414],["▁Montag",-12.832347869873049],["▁שהיא",-12.832362174987791],["▁decidir",-12.83236598968506],["等待",-12.832385063171388],["▁भाई",-12.832406997680664],["պա",-12.832437515258787],["лип",-12.832439422607422],["▁kurio",-12.832459449768066],["▁sidoo",-12.832459449768066],["▁bieden",-12.832472801208496],["▁päästä",-12.832502365112305],["公益",-12.832531929016112],["▁Ци",-12.832536697387695],["ปัจจุบัน",-12.832538604736328],["▁بص",-12.832564353942873],["▁Cafe",-12.832573890686035],["▁něj",-12.832575798034668],["Ol",-12.832585334777832],["▁puut",-12.83259105682373],["▁aventura",-12.832615852355955],["▁کپ",-12.832633972167969],["भ्य",-12.83267879486084],["▁фе",-12.83273696899414],["序",-12.832770347595217],["候",-12.832798957824709],["نفس",-12.832873344421388],["岸",-12.83287525177002],["▁გასა",-12.832883834838867],["ເຊ",-12.832921028137209],["▁бос",-12.832941055297852],["▁చా",-12.832968711853027],["▁eigin",-12.832969665527344],["▁삼",-12.832971572875977],["▁тарап",-12.833019256591797],["▁другого",-12.83303451538086],["varer",-12.833036422729492],["ක්ෂ",-12.833038330078123],["▁solen",-12.833094596862791],["▁podstaw",-12.833114624023438],["▁dane",-12.833117485046388],["ayi",-12.83313274383545],["▁മഴ",-12.833137512207031],["મર",-12.83314609527588],["▁TS",-12.8331880569458],["▁같",-12.833189964294434],["▁ہاتھ",-12.833207130432127],["后的",-12.833215713500977],["▁40-",-12.83322048187256],["קד",-12.833223342895508],["▁arhitekt",-12.833223342895508],["▁minne",-12.833242416381836],["▁munt",-12.833244323730469],["landet",-12.833258628845217],["▁цркве",-12.83327579498291],["▁legnagyobb",-12.833277702331545],["▁पढ़",-12.83327865600586],["▁comunicat",-12.833290100097656],["▁Ĉu",-12.833303451538086],["▁informat",-12.83331298828125],["වාදී",-12.833324432373049],["▁אזוי",-12.833328247070312],["້ນ",-12.833333015441896],["▁домашни",-12.833335876464844],["tsioon",-12.833352088928224],["▁довольно",-12.833374977111816],["▁Cream",-12.833378791809082],["roep",-12.833395957946776],["▁PIN",-12.833396911621094],["ശാ",-12.83340072631836],["▁المدينة",-12.833423614501951],["ogi",-12.833429336547852],["▁одлука",-12.83343505859375],["▁mukava",-12.833456993103027],["▁oed",-12.83347225189209],["▁Sinu",-12.833484649658203],["▁velik",-12.833502769470217],["jalla",-12.833503723144531],["▁bueno",-12.833508491516112],["ምር",-12.833513259887695],["▁týchto",-12.83354663848877],["▁नदी",-12.833562850952148],["خل",-12.833620071411133],["ург",-12.833636283874512],["▁carrera",-12.833642959594728],["▁расположен",-12.833642959594728],["▁келет",-12.83366584777832],["skrifter",-12.833694458007812],["gląd",-12.833696365356444],["İK",-12.83372402191162],["මන්",-12.833725929260254],["seniz",-12.833759307861328],["▁Европе",-12.833775520324709],["вач",-12.833792686462402],["ੜੇ",-12.833820343017578],["กาย",-12.83384895324707],["▁иако",-12.83386516571045],["符",-12.833903312683104],["maja",-12.8339204788208],["▁1.0",-12.833921432495115],["▁პირველ",-12.833953857421877],["▁upo",-12.833978652954102],["سپ",-12.833982467651367],["ုံ",-12.834031105041504],["▁гг",-12.834057807922363],[":0",-12.834099769592283],["стями",-12.834111213684082],["▁jeb",-12.834189414978027],["けれど",-12.834214210510254],["▁coach",-12.834221839904783],["ેડ",-12.834229469299316],["▁möjligt",-12.834230422973633],["▁teléfono",-12.834230422973633],["▁thưởng",-12.834239959716797],["▁magka",-12.83424186706543],["ировал",-12.834245681762695],["▁людзі",-12.834245681762695],["▁метою",-12.834245681762695],["▁kuha",-12.834246635437012],["ecta",-12.83425521850586],["▁יי",-12.83425998687744],["▁bulun",-12.834269523620604],["INO",-12.834308624267578],["▁pirma",-12.834318161010742],["▁бухгалтер",-12.834357261657717],["▁Hele",-12.834390640258787],["▁الأمريكية",-12.834405899047852],["kool",-12.834442138671877],["▁മലയാള",-12.834457397460938],["삼",-12.83447265625],["medel",-12.834495544433594],["▁ಏನ",-12.834501266479492],["▁Univers",-12.834503173828123],["▁seçim",-12.83450412750244],["▁moguće",-12.834506034851074],["▁знать",-12.834543228149414],["larınızı",-12.834624290466309],["▁folke",-12.834660530090332],["▁deep",-12.834662437438965],["のこと",-12.834683418273926],["מנו",-12.83470058441162],["päin",-12.834701538085938],["▁Torre",-12.834705352783203],["лийг",-12.83472728729248],["▁Universal",-12.834731101989746],["ստի",-12.834736824035645],["▁esam",-12.834772109985352],["▁وش",-12.834776878356934],["将在",-12.83478546142578],["нување",-12.834786415100098],["nivå",-12.834810256958008],["▁شرط",-12.834832191467283],["▁çat",-12.834833145141602],["sæ",-12.83484172821045],["▁kaiken",-12.834851264953612],["▁بیت",-12.834867477416992],["WAN",-12.834915161132812],["juši",-12.834928512573242],["koliv",-12.834936141967772],["▁ועד",-12.83494758605957],["نگی",-12.83495807647705],["▁repara",-12.834985733032228],["▁pion",-12.834991455078123],["şî",-12.834996223449709],["▁पुन",-12.835030555725098],["▁каса",-12.835041999816896],["▁grafic",-12.835054397583008],["ましたが",-12.835061073303224],["τικής",-12.835091590881348],["▁tipa",-12.83509922027588],["वाह",-12.835118293762209],["you",-12.835124015808104],["▁Alu",-12.835127830505373],["▁أح",-12.83513069152832],["▁ፓርቲ",-12.83513069152832],["玻璃",-12.835135459899902],["žna",-12.83515453338623],["▁equipos",-12.835192680358888],["▁sementara",-12.835195541381836],["▁Auswahl",-12.835198402404783],["ымды",-12.835199356079102],["▁בארץ",-12.835210800170898],["▁Evangel",-12.83525562286377],["rác",-12.835257530212402],["спект",-12.835257530212402],["▁자유",-12.83525848388672],["▁normas",-12.835287094116213],["▁Мер",-12.835296630859377],["ācijā",-12.835323333740234],["ମୁ",-12.835331916809082],["rantz",-12.83534812927246],["▁perhe",-12.835354804992676],["▁nasce",-12.83537483215332],["čnej",-12.83538818359375],["xim",-12.835413932800291],["▁температур",-12.835416793823242],["▁kész",-12.835476875305176],["▁guerre",-12.835485458374023],["▁सौ",-12.835487365722656],["▁فرو",-12.83550262451172],["”;",-12.835515022277832],["رک",-12.83554458618164],["▁ETA",-12.83556842803955],["▁τεχν",-12.835572242736816],["▁erős",-12.835580825805664],["家的",-12.835600852966309],["vv",-12.83560276031494],["▁১৪",-12.835611343383787],["▁оглед",-12.835620880126951],["▁neho",-12.835640907287598],["QUE",-12.83569049835205],["▁sevgi",-12.835728645324709],["▁konci",-12.835774421691896],["niejsze",-12.835779190063477],["▁smá",-12.835851669311523],["يقة",-12.835858345031738],["▁manca",-12.835872650146484],["多了",-12.835891723632812],["▁piti",-12.83592700958252],["sset",-12.835946083068848],["▁Chef",-12.83598518371582],["lardır",-12.835991859436035],["УП",-12.835996627807615],["▁директора",-12.836005210876465],["▁dată",-12.83602809906006],["pů",-12.836054801940918],["▁Pow",-12.836064338684082],["▁Got",-12.836068153381348],["त्री",-12.836078643798828],["誤",-12.836090087890623],["▁PRA",-12.836116790771484],["шэ",-12.83613109588623],["beeld",-12.836137771606444],["体制",-12.836137771606444],["೯",-12.836143493652344],["▁ମୃତ୍ୟୁ",-12.83614444732666],["▁သတင်း",-12.83614444732666],["▁pelanggan",-12.836145401000977],["▁மட்டும்",-12.836151123046877],["▁memakai",-12.836164474487305],["వం",-12.836166381835938],["▁ভালো",-12.836173057556152],["▁beetje",-12.836175918579102],["▁skór",-12.836176872253418],["诚",-12.836179733276367],["▁చెప్పారు",-12.836189270019531],["või",-12.836190223693848],["ORE",-12.836200714111328],["▁hotelu",-12.836212158203123],["▁говоря",-12.836233139038086],["▁ulo",-12.836236000061035],["▁voluptatem",-12.836292266845703],["εντ",-12.836353302001951],["راس",-12.83637237548828],["▁Kurt",-12.836374282836914],["当社",-12.836400985717772],["▁Lyn",-12.83644199371338],["ombe",-12.836478233337402],["aliment",-12.83647918701172],["▁isola",-12.8364896774292],["áis",-12.836490631103516],["æði",-12.836505889892578],["▁risultati",-12.836509704589844],["▁државе",-12.836528778076172],["▁mulleres",-12.836557388305664],["ΤΑ",-12.836647033691406],["ਦਾਰ",-12.83666706085205],["ср",-12.836695671081545],["▁metoda",-12.83670711517334],["▁Mapa",-12.836709022521973],["රිය",-12.83671760559082],["▁личности",-12.836745262145996],["sław",-12.836771965026855],["▁Hallo",-12.836774826049805],["ố",-12.836813926696776],["使用的",-12.836813926696776],["▁Vak",-12.836881637573242],["▁่",-12.836892127990724],["ประโยชน์",-12.83691120147705],["wacht",-12.8369140625],["▁Ný",-12.836934089660645],["jät",-12.83698558807373],["▁вър",-12.836987495422363],["▁органов",-12.83698844909668],["▁глобал",-12.83700180053711],["▁Prince",-12.83701515197754],["▁oferece",-12.83702564239502],["▁cyfr",-12.837032318115234],["▁अवसर",-12.8370361328125],["科研",-12.837055206298828],["脂",-12.837069511413574],["TX",-12.83708381652832],["▁zábav",-12.837095260620115],["ຊ່ວຍ",-12.837099075317385],["ෙන්න",-12.837100982666016],["▁răspuns",-12.837100982666016],["มาจาก",-12.83712100982666],["▁surgery",-12.83713722229004],["▁histoire",-12.837150573730469],["▁coche",-12.837151527404783],["▁xog",-12.837154388427734],["▁varmasti",-12.837165832519531],["▁масс",-12.837228775024414],["žy",-12.837236404418944],["rių",-12.837242126464844],["▁iniciar",-12.837265014648438],["dust",-12.837272644042969],["XI",-12.837307929992676],["▁состоя",-12.837312698364258],["▁జగన్",-12.837419509887695],["▁verkko",-12.837422370910645],["دعم",-12.837425231933594],["▁hotellet",-12.83745574951172],["▁उत्",-12.837468147277832],["▁გახ",-12.837501525878906],["▁Наш",-12.837526321411133],["▁ຂ່າວ",-12.837529182434082],["ế",-12.83754062652588],["은행",-12.837543487548828],["кала",-12.83754825592041],["trem",-12.837552070617676],["▁részt",-12.837563514709473],["▁çıkan",-12.837568283081056],["▁Apie",-12.837597846984863],["SET",-12.837627410888672],["▁ғылым",-12.837651252746582],["makan",-12.837652206420898],["▁өк",-12.837657928466797],["Mag",-12.837696075439451],["stillinger",-12.837696075439451],["▁handia",-12.837703704833984],["▁ск",-12.837725639343262],["▁Rá",-12.837790489196776],["uuden",-12.837793350219728],["lge",-12.83779525756836],["▁ચાલ",-12.83779525756836],["▁Bina",-12.837801933288574],["可以在",-12.837841987609863],["يە",-12.837855339050291],["▁නේද",-12.83785915374756],["aban",-12.837861061096191],["აცია",-12.83786964416504],["ങ്ങളിൽ",-12.837892532348633],["▁프로",-12.837897300720217],["106",-12.837900161743164],["нала",-12.837923049926758],["▁geleden",-12.837930679321287],["▁Elő",-12.837937355041504],["▁tercer",-12.83797550201416],["धर",-12.83798885345459],["▁køre",-12.83800220489502],["炒",-12.838027954101562],["ෙයි",-12.838030815124512],["▁علاوہ",-12.83806037902832],["▁ማድረግ",-12.83806037902832],["▁emakume",-12.838062286376951],["▁হাজার",-12.83806324005127],["▁Rund",-12.838072776794434],["▁Tyr",-12.838082313537598],["▁kaga",-12.838085174560549],["▁تك",-12.838120460510254],["мыс",-12.838153839111328],["▁ភាព",-12.838247299194336],["▁тренер",-12.83828067779541],["▁ರಾ",-12.838282585144045],["▁dniu",-12.838333129882812],["▁노력",-12.838349342346191],["rl",-12.838414192199709],["▁nového",-12.838418960571287],["▁השי",-12.838422775268556],["бора",-12.838423728942873],["▁grub",-12.838448524475098],["▁ნაწილი",-12.838458061218262],["luş",-12.838460922241213],["▁práctica",-12.838476181030272],["▁jinak",-12.838508605957031],["▁Sådan",-12.838545799255373],["▁MY",-12.838556289672852],["տն",-12.838584899902344],["▁выбар",-12.838604927062988],["жем",-12.83861255645752],["ponent",-12.838685989379885],["▁2014-",-12.838702201843262],["ीं",-12.838711738586426],["heli",-12.83874225616455],["▁klase",-12.838748931884766],["▁mpan",-12.838759422302246],["▁місцев",-12.83876132965088],["හේ",-12.838765144348145],["▁хочет",-12.838791847229004],["▁echter",-12.838811874389648],["▁thơ",-12.83881950378418],["▁объект",-12.838825225830078],["amaz",-12.838863372802734],["kuk",-12.838899612426758],["ଡ଼ା",-12.838902473449709],["▁Suche",-12.838903427124023],["нная",-12.838922500610352],["▁právo",-12.838932037353516],["▁noua",-12.838937759399414],["skrá",-12.838951110839844],["▁አያ",-12.83897304534912],["劑",-12.83897876739502],["▁melan",-12.838979721069336],["▁біз",-12.83898639678955],["▁আল",-12.838995933532717],["▁особливо",-12.839007377624512],["▁دولة",-12.839012145996094],["፦",-12.839019775390623],["評価",-12.839032173156738],["ხმ",-12.839033126831056],["▁svog",-12.839038848876951],["արձակ",-12.83903980255127],["▁boken",-12.839049339294434],["▁xuyên",-12.839056968688965],["▁նպատակ",-12.839059829711914],["▁familias",-12.839083671569824],["▁daarom",-12.839085578918455],["டன்",-12.839092254638672],["▁kav",-12.839092254638672],["▁трудов",-12.839111328125],["▁איש",-12.839115142822266],["▁enig",-12.839130401611328],["▁Šo",-12.839133262634276],["▁films",-12.839139938354492],["▁hene",-12.839155197143556],["มือถือ",-12.839171409606934],["▁норматив",-12.839177131652832],["mura",-12.839180946350098],["▁lalaki",-12.839184761047363],["▁انا",-12.839184761047363],["īna",-12.83920192718506],["वादी",-12.83923625946045],["লী",-12.83930492401123],["BlogThis",-12.839309692382812],["▁الصف",-12.839316368103027],["▁Báo",-12.839348793029783],["▁hendak",-12.839353561401367],["▁simplement",-12.839353561401367],["我要",-12.839356422424316],["ירט",-12.83938980102539],["初めて",-12.839432716369627],["ന്റ",-12.839436531066896],["ئات",-12.839441299438477],["▁tehtävä",-12.839442253112791],["▁kritis",-12.839445114135742],["▁സം",-12.83944606781006],["▁dovolen",-12.83945655822754],["▁رابط",-12.83949089050293],["tritt",-12.839515686035156],["▁stile",-12.83952522277832],["vaid",-12.839550971984863],["ത്താ",-12.839573860168455],["ϊ",-12.839651107788086],["也没有",-12.839654922485352],["▁มิ",-12.839658737182615],["▁problémy",-12.839669227600098],["▁pilih",-12.83969497680664],["▁tjerë",-12.839768409729004],["▁său",-12.839835166931152],["mint",-12.839868545532228],["確定",-12.839879035949709],["▁vezet",-12.839890480041504],["svet",-12.839899063110352],["▁Merk",-12.83991527557373],["ствие",-12.83991813659668],["西班牙",-12.839920043945312],["▁ຈີນ",-12.839920997619627],["▁Henrik",-12.839923858642578],["▁άρθρο",-12.839937210083008],["▁veux",-12.839940071105955],["▁продукции",-12.839981079101562],["▁уровень",-12.839981079101562],["▁української",-12.839987754821776],["іть",-12.839991569519045],["▁Blo",-12.840017318725586],["겠",-12.840035438537598],["ঠ",-12.840044975280762],["▁هجي",-12.840088844299316],["ശ്",-12.840123176574709],["▁inainte",-12.840152740478516],["看来",-12.840176582336426],["▁dés",-12.840180397033691],["ずに",-12.840208053588867],["скоро",-12.84026050567627],["▁infinit",-12.84026050567627],["îz",-12.840280532836914],["▁vreo",-12.840314865112305],["▁בהם",-12.840331077575684],["▁كن",-12.840333938598633],["▁першы",-12.840349197387695],["▁viden",-12.840350151062012],["▁યુ",-12.840350151062012],["िश",-12.840361595153809],["▁técnico",-12.840474128723145],["▁شۇ",-12.84049129486084],["▁tarjo",-12.84050178527832],["▁якім",-12.840524673461914],["луш",-12.840556144714355],["▁tárgy",-12.84055995941162],["λων",-12.840584754943848],["▁müddət",-12.840642929077148],["▁ڪريو",-12.840651512145996],["PH",-12.840697288513184],["ײ",-12.840699195861816],["ffel",-12.840706825256348],["jant",-12.840713500976562],["vima",-12.840815544128418],["بحث",-12.840826034545898],["▁پوری",-12.840834617614746],["接近",-12.840866088867188],["▁jde",-12.8408784866333],["▁활동",-12.840887069702148],["粗",-12.84089183807373],["蓋",-12.840898513793944],["ลู",-12.840912818908691],["갑",-12.8409423828125],["▁يناير",-12.840943336486816],["▁شمېر",-12.840944290161133],["ացնել",-12.84094524383545],["▁লেখা",-12.840946197509766],["▁ယ",-12.840948104858398],["▁oinarri",-12.840962409973145],["▁číslo",-12.840971946716309],["ధ్య",-12.841007232666016],["▁наука",-12.841012954711914],["உ",-12.841014862060549],["fahrt",-12.841021537780762],["▁tribunal",-12.841025352478027],["glob",-12.841032028198242],["▁Chiar",-12.84104061126709],["▁رک",-12.841052055358888],["த்தான்",-12.841076850891112],["dades",-12.841150283813477],["▁õige",-12.841179847717283],["İM",-12.84122085571289],["tiği",-12.841232299804688],["ъм",-12.8412446975708],["قط",-12.841253280639648],["موز",-12.84126091003418],["▁intervi",-12.841304779052734],["त्ता",-12.841317176818848],["ఏ",-12.841327667236328],["▁recibir",-12.841351509094238],["▁നടന്ന",-12.84138298034668],["▁χώρας",-12.841398239135742],["▁snu",-12.84140682220459],["▁oka",-12.841438293457031],["sof",-12.84145450592041],["lyst",-12.84146213531494],["ေငြ",-12.841470718383787],["▁врач",-12.841506958007812],["jában",-12.841517448425291],["▁Burg",-12.841553688049316],["であれば",-12.841560363769531],["▁Neo",-12.841561317443848],["▁pierde",-12.841564178466797],["фін",-12.84156608581543],["▁δο",-12.841581344604492],["▁расчет",-12.841586112976074],["▁şəxslər",-12.84162139892578],["zada",-12.841633796691896],["statud",-12.841634750366213],["مات",-12.841636657714844],["garan",-12.841651916503906],["▁hiány",-12.841747283935549],["元素",-12.841763496398926],["▁игре",-12.841777801513672],["▁sold",-12.841779708862305],["اقة",-12.841836929321287],["anc",-12.84184741973877],["▁Piala",-12.841856002807615],["立ち",-12.841856956481934],["▁krū",-12.841861724853516],["▁bomb",-12.84188461303711],["▁губ",-12.841885566711426],["Ар",-12.841901779174805],["▁ପକ୍ଷରୁ",-12.841904640197754],["▁maggio",-12.84192180633545],["▁خداوند",-12.84192180633545],["▁Bare",-12.841955184936523],["▁смята",-12.842002868652344],["▁giám",-12.842021942138672],["owymi",-12.84203052520752],["▁скри",-12.842037200927734],["stofnun",-12.84205436706543],["toč",-12.84206199645996],["שב",-12.84207248687744],["▁кроме",-12.842108726501465],["▁제가",-12.84214687347412],["▁باہر",-12.842150688171388],["▁derechos",-12.842170715332031],["azienda",-12.842205047607422],["とした",-12.842217445373535],["night",-12.84225082397461],["▁πι",-12.842259407043455],["stot",-12.842267990112305],["dba",-12.842270851135254],["რქ",-12.842308044433594],["▁пошто",-12.842308044433594],["近期",-12.842315673828123],["▁хүлээн",-12.842324256896973],["▁eel",-12.84235382080078],["lieb",-12.84238624572754],["▁Assist",-12.842401504516602],["▁सके",-12.842403411865234],["ڙو",-12.842405319213867],["ვლის",-12.842426300048828],["▁costi",-12.842434883117676],["▁Македон",-12.84246063232422],["▁dhac",-12.842467308044434],["▁usuario",-12.842470169067385],["原来",-12.842479705810549],["ارو",-12.842499732971191],["KAT",-12.842514038085938],["▁dispon",-12.842523574829102],["ulit",-12.842533111572266],["▁imediat",-12.842536926269531],["zionale",-12.842544555664062],["▁ଜା",-12.842551231384276],["exist",-12.842577934265137],["ША",-12.842620849609377],["จะต้อง",-12.842633247375488],["mla",-12.842673301696776],["▁sesi",-12.842718124389648],["▁നോക്ക",-12.84272289276123],["สั่ง",-12.842726707458496],["▁forslag",-12.842741012573242],["▁Klient",-12.842766761779783],["цё",-12.842772483825684],["▁המס",-12.842778205871582],["noc",-12.842790603637695],["協會",-12.842790603637695],["cioni",-12.842802047729492],["TIN",-12.842803955078123],["矿",-12.842820167541504],["▁mindst",-12.842852592468262],["▁ਸਰ",-12.842852592468262],["pirenena",-12.84286880493164],["▁व्यवस्थापन",-12.842878341674805],["▁അനുഭവ",-12.84287929534912],["▁wrote",-12.842890739440918],["▁пару",-12.842920303344728],["هين",-12.842944145202637],["ขน",-12.842955589294434],["▁പോസ്റ്റ്",-12.84295654296875],["▁бүхий",-12.843002319335938],["▁khắc",-12.843025207519531],["ວັດ",-12.84307098388672],["▁ásamt",-12.8430814743042],["пил",-12.843087196350098],["▁የሚያስ",-12.843125343322754],["▁aduce",-12.84312629699707],["▁روند",-12.843148231506348],["▁arrive",-12.843153953552246],["▁medicine",-12.843170166015623],["торы",-12.843189239501951],["▁billede",-12.8431978225708],["▁Ня",-12.843198776245115],["▁μέρα",-12.843206405639648],["uksesta",-12.843210220336914],["▁junio",-12.843215942382812],["▁mənim",-12.843215942382812],["▁სად",-12.843228340148926],["▁оқыту",-12.843239784240724],["ද්ධ",-12.84326171875],["將會",-12.843308448791504],["给你",-12.843323707580566],["友人",-12.843344688415527],["mendu",-12.843358993530272],["▁segura",-12.843376159667969],["▁village",-12.843387603759766],["نہ",-12.843395233154297],["成人",-12.84343719482422],["երով",-12.84345817565918],["Ag",-12.843459129333496],["▁gard",-12.843462944030762],["▁saying",-12.843463897705078],["▁вывод",-12.843486785888672],["laug",-12.843491554260254],["▁nostrud",-12.843514442443848],["▁næsten",-12.843515396118164],["tilbud",-12.843538284301758],["▁fungerer",-12.843568801879885],["趣",-12.843578338623049],["▁пят",-12.84358024597168],["ಆರ್",-12.843584060668944],["▁метр",-12.843647003173828],["▁vitamina",-12.84368133544922],["ең",-12.843706130981444],["▁belangrijk",-12.843709945678713],["▁증",-12.843710899353027],["ါ",-12.843735694885254],["oris",-12.843738555908203],["▁fantasi",-12.84373950958252],["▁Universitet",-12.843743324279783],["▁release",-12.843746185302734],["▁desse",-12.843756675720217],["▁স্",-12.843758583068848],["rali",-12.843759536743164],["▁porr",-12.843762397766112],["▁minuten",-12.84377098083496],["-2012",-12.843772888183594],["▁مجموع",-12.843810081481934],["▁מחיר",-12.8438138961792],["മാര",-12.843823432922363],["▁υψηλ",-12.843832015991213],["▁نخواهد",-12.843832015991213],["▁সদস্য",-12.843832969665527],["▁ਪਹਿਲਾਂ",-12.843833923339844],["▁περίπου",-12.843839645385742],["▁πολλές",-12.843839645385742],["▁informāciju",-12.843856811523438],["▁drepturile",-12.843870162963867],["▁tamil",-12.843884468078612],["zimi",-12.843908309936523],["▁szkole",-12.843916893005373],["▁marc",-12.843960762023926],["▁vaca",-12.84396266937256],["ფრ",-12.843995094299316],["▁تخ",-12.84400463104248],["▁المجلس",-12.844008445739746],["▁Ê",-12.84405517578125],["▁voice",-12.844061851501465],["▁രാജ്യ",-12.844063758850098],["حب",-12.844073295593262],["ಾವ",-12.844075202941896],["▁Qara",-12.844086647033691],["تقدم",-12.844094276428224],["▁Szent",-12.844128608703612],["▁подпис",-12.844136238098145],["boo",-12.844145774841309],["Ē",-12.844146728515623],["lâ",-12.84418773651123],["كة",-12.844195365905762],["IU",-12.844202041625977],["▁Polska",-12.844219207763672],["▁Gruppe",-12.84423542022705],["Den",-12.844297409057615],["▁volg",-12.844325065612791],["OTA",-12.844327926635742],["▁intelligent",-12.844351768493652],["kalla",-12.844356536865234],["▁μον",-12.844453811645508],["▁ላ",-12.844494819641112],["▁प्रमाण",-12.844505310058594],["cym",-12.844511985778809],["ตัน",-12.84451675415039],["▁levering",-12.844621658325195],["▁ટે",-12.844634056091309],["▁liike",-12.8446626663208],["▁उहाँ",-12.844663619995115],["▁loin",-12.844669342041016],["tren",-12.844674110412598],["▁verander",-12.84467601776123],["▁Ea",-12.844696044921877],["▁பகுதி",-12.844701766967772],["কো",-12.84473705291748],["tsen",-12.844748497009276],["객",-12.84477424621582],["▁ಚುನಾವಣೆ",-12.84477996826172],["quv",-12.844783782958984],["сли",-12.8447904586792],["▁miệng",-12.84479808807373],["▁đảo",-12.84479808807373],["▁jedynie",-12.844799995422363],["▁रिपोर्ट",-12.844801902770996],["▁saha",-12.844803810119627],["▁shar",-12.84481716156006],["▁толық",-12.84482479095459],["ன்ற",-12.844830513000488],["▁android",-12.844831466674805],["▁اقبال",-12.84486484527588],["▁count",-12.844890594482422],["▁Awards",-12.844913482666016],["▁периода",-12.844914436340332],["▁sinó",-12.844931602478027],["ülés",-12.844940185546877],["inge",-12.84494686126709],["としては",-12.844947814941406],["▁geloof",-12.844974517822266],["▁vizsgálat",-12.844980239868164],["▁לגבי",-12.845000267028809],["មុខ",-12.845017433166504],["asha",-12.845065116882324],["▁አር",-12.845074653625488],["▁Tagen",-12.845077514648438],["▁fashion",-12.8451566696167],["nchi",-12.845169067382812],["လုိ",-12.845170974731444],["ৈ",-12.845179557800291],["spira",-12.84519863128662],["δηλ",-12.845245361328123],["▁Baina",-12.845267295837402],["скім",-12.845272064208984],["ဆင္",-12.845277786254885],["▁جمعه",-12.845287322998049],["▁大",-12.845287322998049],["yol",-12.845293998718262],["▁kalla",-12.84529972076416],["乡",-12.845301628112791],["▁18.00",-12.845319747924805],["▁magari",-12.845322608947754],["▁çıx",-12.845337867736816],["▁გო",-12.845380783081056],["נז",-12.845425605773926],["▁गर्",-12.845446586608888],["فهم",-12.845505714416504],["▁عنها",-12.845529556274414],["坦",-12.845585823059082],["▁1929",-12.845617294311523],["▁viola",-12.845632553100586],["alde",-12.84565544128418],["tiske",-12.84565544128418],["uudet",-12.845656394958496],["▁İçin",-12.84567165374756],["plet",-12.845681190490724],["一緒に",-12.845687866210938],["הל",-12.84570026397705],["▁семей",-12.845701217651367],["▁ಜೊತೆ",-12.845730781555176],["▁सम्मान",-12.845731735229492],["▁nogomet",-12.845736503601074],["搜",-12.845739364624023],["▁വെള്ള",-12.845747947692873],["イメージ",-12.845748901367188],["▁miña",-12.845763206481934],["옥",-12.845763206481934],["▁področju",-12.84576416015625],["▁स्कूल",-12.84576416015625],["▁၁၉",-12.84576416015625],["▁आयोजित",-12.845765113830566],["wię",-12.8457670211792],["▁qorunur",-12.845767974853516],["▁κάτω",-12.845767974853516],["▁opportunity",-12.845779418945312],["▁использования",-12.845792770385742],["▁Labor",-12.845795631408691],["ļas",-12.84581470489502],["אז",-12.845817565917969],["▁сонго",-12.845826148986816],["▁බලා",-12.845829963684082],["▁βάση",-12.845834732055664],["▁bekle",-12.845871925354004],["姐",-12.845919609069824],["Reg",-12.845922470092772],["▁POS",-12.845932960510254],["▁Agent",-12.845956802368164],["▁ئىككى",-12.845979690551758],["▁ജോ",-12.846014976501465],["▁رسد",-12.84601879119873],["ssu",-12.84603500366211],["▁bug",-12.846040725708008],["ិន",-12.846044540405272],["▁وسط",-12.84604835510254],["▁Yeh",-12.846070289611816],["RF",-12.846094131469728],["ivit",-12.84611701965332],["ωθεί",-12.846129417419434],["▁pracovní",-12.846135139465332],["ških",-12.846151351928713],["되지",-12.846156120300291],["面积",-12.846202850341797],["讓你",-12.846214294433594],["▁પડે",-12.846220016479492],["üsü",-12.84622573852539],["▁الثانية",-12.846243858337402],["улган",-12.846281051635742],["▁funziona",-12.846291542053224],["▁yerde",-12.846315383911133],["發表",-12.846317291259766],["álló",-12.846320152282717],["▁követ",-12.846339225769045],["▁technologie",-12.84636688232422],["▁қолдану",-12.846481323242188],["▁prince",-12.84648609161377],["▁જેમ",-12.846489906311035],["▁procesa",-12.846506118774414],["▁നിർ",-12.846511840820312],["wajib",-12.846513748168944],["▁മര",-12.846516609191896],["▁Klar",-12.84653663635254],["elio",-12.846550941467283],["▁acqua",-12.846561431884766],["▁delikat",-12.846577644348145],["▁הבא",-12.84657859802246],["▁Lea",-12.846599578857422],["▁taun",-12.846601486206056],["▁terreno",-12.846606254577637],["ddu",-12.846634864807127],["▁ศูนย์",-12.846640586853027],["ότητες",-12.846652030944824],["roi",-12.846662521362305],["ంప",-12.84666633605957],["▁Deine",-12.84669017791748],["+2",-12.846731185913086],["▁संशोधन",-12.846731185913086],["▁carreira",-12.846734046936035],["suu",-12.846735000610352],["▁uge",-12.846749305725098],["▁padha",-12.84677791595459],["▁patria",-12.84678077697754],["ファイル",-12.84679126739502],["▁región",-12.846808433532717],["▁Amor",-12.846813201904297],["थे",-12.84682559967041],["рыв",-12.846900939941406],["▁avantaj",-12.846903800964355],["會有",-12.846906661987305],["▁կայ",-12.84691333770752],["ndre",-12.846938133239746],["ታል",-12.84695529937744],["なければ",-12.846956253051758],["▁ನಟ",-12.846980094909668],["▁Cand",-12.84699249267578],["фици",-12.847013473510742],["asiya",-12.847033500671388],["iškas",-12.847043991088867],["оны",-12.847043991088867],["జు",-12.847052574157717],["ріб",-12.847063064575195],["рана",-12.84707260131836],["▁יהודי",-12.847084045410156],["ന്ദ",-12.847116470336914],["▁ფა",-12.84713077545166],["簡単に",-12.847166061401367],["▁ভ",-12.847189903259276],["▁BLOG",-12.847190856933594],["islam",-12.84721851348877],["▁asti",-12.847268104553224],["md",-12.847274780273438],["▁اکثر",-12.847286224365234],["▁costo",-12.847289085388184],["archi",-12.847293853759766],["▁قائد",-12.847302436828612],["ल्ल",-12.84734058380127],["▁спад",-12.84736156463623],["ल्ला",-12.847423553466797],["Can",-12.847458839416504],["pou",-12.847484588623049],["мите",-12.847508430480955],["▁270",-12.84751033782959],["קוד",-12.847533226013184],["▁остана",-12.84755039215088],["shing",-12.847579956054688],["ෙත්",-12.847590446472168],["▁arbeta",-12.847617149353027],["▁məhkəmə",-12.847631454467772],["▁classic",-12.847634315490724],["ובה",-12.847636222839355],["ၾကည္",-12.847647666931152],["အထိ",-12.84765338897705],["เส้น",-12.847674369812012],["▁GP",-12.847699165344238],["▁أكتوبر",-12.847699165344238],["▁kito",-12.847725868225098],["കോ",-12.847734451293944],["▁Ам",-12.847736358642578],["▁културе",-12.84776210784912],["▁ตัว",-12.84776210784912],["எ",-12.847772598266602],["▁процент",-12.84784984588623],["▁autour",-12.847850799560549],["schluss",-12.847881317138672],["afrika",-12.847908020019531],["▁今",-12.847915649414062],["▁Есть",-12.847926139831545],["▁السياسية",-12.84793472290039],["▁බෝ",-12.84795093536377],["▁мекен",-12.847979545593262],["yst",-12.848017692565918],["▁bill",-12.848017692565918],["▁mái",-12.8480224609375],["▁passi",-12.848034858703612],["▁Ministerio",-12.848066329956056],["vici",-12.848088264465332],["▁هفت",-12.84809398651123],["▁otrzyma",-12.848101615905762],["不如",-12.848115921020508],["ilan",-12.848126411437988],["ასა",-12.848142623901367],["día",-12.848167419433594],["▁Viena",-12.848172187805176],["▁skra",-12.848175048828123],["▁intresse",-12.848196983337402],["ovateľ",-12.84819793701172],["更好",-12.84819793701172],["▁ഉപ",-12.848198890686035],["▁numeri",-12.848252296447754],["▁דע",-12.848259925842283],["▁tentar",-12.848291397094728],["▁لڳ",-12.84830379486084],["laydi",-12.84831428527832],["onne",-12.848322868347168],["▁विषयमा",-12.848337173461914],["▁लगे",-12.848358154296877],["▁apostol",-12.848366737365724],["(2",-12.848387718200684],["▁ماده",-12.8484468460083],["▁मुलुक",-12.848470687866213],["▁protección",-12.84847927093506],["▁bilir",-12.848482131958008],["▁നിറ",-12.848483085632324],["ĉe",-12.84848976135254],["marca",-12.848491668701172],["ESA",-12.848509788513184],["ერის",-12.848516464233398],["▁eredménye",-12.84852695465088],["&#",-12.848530769348145],["▁глад",-12.848541259765623],["▁Chile",-12.848555564880373],["▁නීති",-12.848575592041016],["lení",-12.84860134124756],["预计",-12.848628044128418],["塗",-12.848631858825684],["مير",-12.848642349243164],["▁nais",-12.84865379333496],["なかなか",-12.84865665435791],["អោយ",-12.848666191101074],["▁eitthvað",-12.848668098449709],["▁ishtirok",-12.848668098449709],["▁çərçivəsində",-12.848668098449709],["▁μήνες",-12.848668098449709],["▁مەركىزى",-12.848668098449709],["▁विस्तार",-12.848668098449709],["▁받아",-12.848668098449709],["▁ebenfalls",-12.848671913146973],["▁edelleen",-12.848672866821287],["▁verwendet",-12.848672866821287],["滿足",-12.848672866821287],["ខ្លួន",-12.848677635192873],["▁પાણી",-12.848685264587402],["▁ademais",-12.848689079284668],["▁đích",-12.848703384399414],["總統",-12.84872817993164],["▁сургууль",-12.84873104095459],["▁اذا",-12.84873390197754],["▁ఆఫ్",-12.848737716674805],["▁mensaje",-12.84876537322998],["▁gosti",-12.848811149597168],["пля",-12.84881591796875],["kär",-12.848821640014648],["子ども",-12.848864555358888],["▁upon",-12.848868370056152],["āc",-12.84887981414795],["▁hrvatski",-12.848896980285645],["▁wale",-12.84889793395996],["▁באמת",-12.848932266235352],["▁perdu",-12.848955154418944],["goda",-12.848958015441896],["▁nut",-12.848962783813477],["▁만나",-12.84900951385498],["táv",-12.849013328552246],["▁dorë",-12.84901523590088],["▁globale",-12.84907054901123],["▁looked",-12.849095344543455],["▁kras",-12.84910011291504],["▁yeniden",-12.84913158416748],["▁२४",-12.84915256500244],["habi",-12.849177360534668],["▁فرمایا",-12.849180221557615],["ভা",-12.849205017089844],["vac",-12.849212646484377],["▁kunskap",-12.849213600158691],["▁Ljub",-12.849227905273438],["▁праца",-12.849234580993652],["ským",-12.849237442016602],["lijke",-12.849242210388184],["▁ساخته",-12.8492431640625],["▁windows",-12.849246978759766],["▁határ",-12.849271774291992],["▁पहा",-12.849328994750977],["▁піш",-12.849332809448242],["▁১১",-12.849339485168455],["ሠራ",-12.849387168884276],["▁cole",-12.849393844604492],["▁свае",-12.849407196044922],["▁користење",-12.849422454833984],["▁значення",-12.849442481994627],["▁долара",-12.849469184875488],["▁لكم",-12.849474906921388],["ਥਾ",-12.84949016571045],["▁bain",-12.849494934082031],["szym",-12.8495454788208],["▁üstün",-12.849550247192385],["▁орун",-12.849574089050291],["▁drikke",-12.849578857421877],["▁ဒ",-12.84959602355957],["睡眠",-12.849608421325684],["▁toko",-12.84961223602295],["▁Eylül",-12.849637985229492],["▁noiembrie",-12.849637985229492],["▁ноември",-12.849637985229492],["▁سۆز",-12.849637985229492],["▁ආරම්භ",-12.849637985229492],["▁előadás",-12.849660873413086],["▁امت",-12.84967803955078],["某些",-12.849681854248049],["▁adım",-12.849682807922363],["▁izgubi",-12.849743843078612],["▁bloque",-12.84976863861084],["▁Depois",-12.849771499633787],["▁الفريق",-12.849798202514648],["ეული",-12.84981632232666],["▁투자",-12.849855422973633],["▁କୌଣସି",-12.849865913391112],["rella",-12.849867820739746],["▁gani",-12.849870681762695],["▁chod",-12.849876403808594],["δρα",-12.849891662597656],["評價",-12.849894523620604],["▁1-3",-12.84990692138672],["▁SUA",-12.849920272827148],["улы",-12.849943161010742],["▁puc",-12.849945068359377],["inski",-12.849950790405272],["▁bilden",-12.84998607635498],["很好的",-12.849992752075195],["▁سندھ",-12.850006103515623],["▁алар",-12.85000991821289],["ຸດ",-12.850011825561523],["▁Record",-12.850029945373535],["ิก",-12.850031852722168],["▁flyg",-12.850051879882812],["▁importanti",-12.850055694580078],["funk",-12.850089073181152],["▁ధ",-12.850109100341797],["▁запас",-12.850111961364746],["▁började",-12.850112915039062],["handling",-12.850186347961426],["▁ផ្លូវ",-12.850200653076172],["สูงสุด",-12.850218772888184],["▁中国",-12.850224494934082],["tures",-12.850229263305664],["▁zêde",-12.850239753723145],["▁saak",-12.850242614746094],["puta",-12.850244522094728],["ična",-12.850285530090332],["గే",-12.85029125213623],["ಿಸಲು",-12.850329399108888],["▁õppe",-12.85035800933838],["راض",-12.850359916687012],["nął",-12.850418090820312],["▁представља",-12.850439071655272],["▁políticos",-12.850533485412598],["圍",-12.85055923461914],["neem",-12.850563049316406],["▁bande",-12.850564002990724],["▁थापा",-12.850573539733888],["▁distintas",-12.850591659545898],["▁cenas",-12.850607872009276],["ೆಲ್ಲ",-12.85060977935791],["▁کوتاه",-12.850610733032228],["▁sarta",-12.85061264038086],["▁କରିଛି",-12.850646018981934],["▁Serra",-12.850656509399414],["▁czego",-12.850663185119627],["▁البرنامج",-12.850680351257324],["▁mlade",-12.850689888000488],["sár",-12.850693702697754],["missions",-12.850695610046388],["▁Այն",-12.8507080078125],["ήσουμε",-12.850726127624512],["ванні",-12.850726127624512],["ням",-12.850763320922852],["▁ඊ",-12.850765228271484],["lemek",-12.85078239440918],["โบ",-12.85080337524414],["▁palu",-12.850818634033203],["пин",-12.850820541381836],["▁–",-12.85084629058838],["满意",-12.850860595703123],["▁‘’",-12.850948333740234],["біг",-12.850956916809082],["▁Гал",-12.851018905639648],["ందు",-12.851036071777344],["▁kraftig",-12.851049423217772],["▁මතක",-12.851051330566406],["▁성장",-12.851076126098633],["▁λόγο",-12.85109519958496],["new",-12.851107597351074],["▁papild",-12.85114288330078],["альні",-12.851152420043944],["▁доклад",-12.851152420043944],["▁among",-12.85116195678711],["tela",-12.851177215576172],["▁ظهر",-12.851177215576172],["▁Xan",-12.851195335388184],["▁руху",-12.851213455200195],["öitä",-12.851216316223145],["▁departament",-12.85122013092041],["egli",-12.851299285888672],["▁parem",-12.851337432861328],["▁стра",-12.85140895843506],["mela",-12.851431846618652],["▁berba",-12.851465225219728],["منٹ",-12.85150909423828],["▁Hea",-12.851518630981444],["▁σπίτι",-12.851524353027344],["▁қоғам",-12.851532936096191],["霍",-12.851540565490724],["fug",-12.851567268371582],["Ò",-12.851580619812012],["▁изглежда",-12.851580619812012],["▁वजह",-12.851580619812012],["▁предназначен",-12.851581573486328],["▁супраць",-12.851581573486328],["▁racconta",-12.85158348083496],["▁միջեւ",-12.851585388183594],["▁ikan",-12.85158920288086],["▁Ausbildung",-12.851593017578123],["▁chéad",-12.851621627807615],["[3]",-12.851624488830566],["right",-12.851664543151855],["တစ္ခု",-12.85166835784912],["▁ورک",-12.851688385009766],["تیا",-12.851701736450195],["အရာ",-12.851727485656738],["▁njen",-12.85175609588623],["geng",-12.851764678955078],["альные",-12.85177230834961],["▁görüntü",-12.851783752441406],["▁กม",-12.85188102722168],["OW",-12.851899147033691],["னின்",-12.851909637451172],["album",-12.851922035217283],["▁наблюда",-12.8519287109375],["▁vrhu",-12.851947784423828],["▁teško",-12.85196304321289],["▁हाथ",-12.851972579956056],["▁طب",-12.852004051208496],["warte",-12.852010726928713],["▁güç",-12.852022171020508],["▁जगह",-12.85202407836914],["лень",-12.85202693939209],["ክል",-12.852032661437988],["▁euroa",-12.85204029083252],["▁deutschen",-12.852051734924316],["▁skupine",-12.85206413269043],["▁Institu",-12.852084159851074],["▁রে",-12.852096557617188],["▁geboren",-12.852117538452148],["▁vontade",-12.852126121520996],["▁മുഖ്യമന്ത്രി",-12.852160453796388],["LERİ",-12.852170944213867],["▁suu",-12.85219669342041],["▁seguintes",-12.85220432281494],["▁perang",-12.85222053527832],["▁වැට",-12.852237701416016],["▁아닌",-12.852277755737305],["▁consultar",-12.852282524108888],["一步",-12.852288246154783],["ischer",-12.85232639312744],["読",-12.852351188659668],["ística",-12.852377891540527],["▁жур",-12.852383613586426],["လိ",-12.852417945861816],["▁домов",-12.85247802734375],["ръч",-12.852523803710938],["▁پسر",-12.852545738220217],["ընթաց",-12.852547645568848],["▁chắn",-12.852553367614746],["▁īpaši",-12.852553367614746],["▁компанії",-12.852553367614746],["▁במהלך",-12.852554321289062],["▁2002.",-12.852558135986328],["▁informácií",-12.852564811706545],["▁протокол",-12.852566719055176],["fahren",-12.85257339477539],["▁Сен",-12.85257625579834],["▁izateko",-12.852579116821287],["аванне",-12.85258674621582],["▁ejer",-12.852602005004885],["▁kantor",-12.852649688720703],["▁komið",-12.852678298950195],["▁Hasta",-12.852680206298828],["▁boks",-12.852691650390623],["orta",-12.852723121643066],["▁сохран",-12.852733612060549],["dito",-12.852753639221191],["▁protokol",-12.852761268615724],["入れ",-12.852782249450684],["▁vincula",-12.852784156799316],["வீ",-12.852787971496582],["▁منظر",-12.852790832519531],["ထင္",-12.85282039642334],["▁brut",-12.852822303771973],["loc",-12.85283374786377],["मय",-12.852834701538086],["▁vaikų",-12.852850914001465],["ँदै",-12.85285758972168],["郵",-12.85288143157959],["ਸੇ",-12.852896690368652],["רוק",-12.852898597717283],["ания",-12.852907180786133],["▁kasama",-12.852911949157717],["本公司",-12.85293960571289],["нец",-12.852950096130373],["▁3.2",-12.852984428405762],["▁Vz",-12.852986335754396],["ექს",-12.85299587249756],["▁вино",-12.853005409240724],["▁alati",-12.853035926818848],["▁miesiąc",-12.853041648864746],["▁լի",-12.85304355621338],["▁Aero",-12.853044509887695],["▁едва",-12.85305404663086],["▁નવી",-12.853059768676758],["лг",-12.85307788848877],["▁Mitglieder",-12.85307788848877],["▁ଡ",-12.85308074951172],["ұл",-12.853100776672363],["▁vietos",-12.853108406066896],["▁kaba",-12.853113174438477],["ضة",-12.853154182434082],["▁keçirilməsi",-12.853185653686523],["でしたが",-12.85321044921875],["▁varför",-12.853224754333496],["▁äiti",-12.85325050354004],["ття",-12.853263854980469],["▁choose",-12.853279113769531],["соци",-12.85329246520996],["▁Čí",-12.853325843811035],["日电",-12.853334426879885],["▁conduct",-12.853338241577148],["▁cakap",-12.853339195251465],["▁करती",-12.85339069366455],["▁adgang",-12.853408813476562],["▁کت",-12.853421211242676],["เฮ",-12.853480339050291],["▁بالنسبة",-12.85348129272461],["佐",-12.853509902954102],["答案",-12.853514671325684],["▁działalności",-12.853529930114746],["▁संगीत",-12.853546142578123],["▁erabiltzen",-12.85354995727539],["▁ගොඩ",-12.853551864624023],["▁კარგად",-12.853565216064451],["▁আরও",-12.853580474853516],["Quran",-12.85359001159668],["▁hafði",-12.853591918945312],["იო",-12.853614807128906],["▁tilgang",-12.853646278381348],["syen",-12.853652000427246],["呼吸",-12.85368537902832],["tham",-12.85372543334961],["12)",-12.853739738464355],["▁ПРИ",-12.853754997253418],["▁VO",-12.853846549987791],["iones",-12.85386562347412],["gels",-12.853897094726562],["▁publicación",-12.853970527648926],["jának",-12.853999137878418],["▁alias",-12.85400104522705],["▁Phong",-12.85402011871338],["▁puls",-12.854026794433594],["▁Gh",-12.854049682617188],["▁niên",-12.854056358337402],["▁vapaa",-12.854086875915527],["laim",-12.85409164428711],["aḥ",-12.854140281677246],["ීන්",-12.85414218902588],["▁titel",-12.854167938232422],["ုိင္",-12.854205131530762],["▁Linh",-12.854209899902344],["▁ohje",-12.854215621948242],["ాలకు",-12.854222297668455],["▁muh",-12.854236602783203],["▁Bosni",-12.854255676269531],["▁Kristian",-12.854303359985352],["ATU",-12.8543119430542],["▁mơ",-12.854312896728516],["▁inclusive",-12.854318618774414],["▁vendin",-12.854361534118652],["ल्स",-12.854384422302246],["nék",-12.854390144348145],["ದ್ರ",-12.854411125183104],["يته",-12.854418754577637],["▁иностран",-12.854437828063965],["腸",-12.854469299316406],["▁برگزاری",-12.85448169708252],["거리",-12.854496002197266],["▁keçiriləcək",-12.854501724243164],["▁knjige",-12.854501724243164],["▁артыкул",-12.854501724243164],["▁নভেম্বর",-12.854501724243164],["▁биринчи",-12.854503631591797],["▁થશે",-12.854509353637695],["▁Afrikaanse",-12.85451602935791],["▁شائع",-12.854522705078123],["▁լինի",-12.854540824890137],["▁והוא",-12.85456371307373],["▁المملكة",-12.854567527770996],["ρχ",-12.854571342468262],["တိုက္",-12.854591369628906],["または",-12.854591369628906],["▁diritto",-12.854612350463867],["Ра",-12.85463047027588],["เขต",-12.854641914367676],["▁Кли",-12.854658126831056],["иев",-12.854666709899902],["1999",-12.854713439941406],["▁olu",-12.854713439941406],["liğini",-12.854736328125],["的精神",-12.854774475097656],["▁kontrolli",-12.854803085327148],["▁sisu",-12.854825019836426],["▁börn",-12.854862213134766],["▁زه",-12.854865074157717],["чал",-12.854890823364258],["▁republiky",-12.854903221130373],["▁sarja",-12.854903221130373],["▁Zie",-12.85490894317627],["▁рядом",-12.854913711547852],["▁povrch",-12.8549165725708],["▁gesels",-12.854926109313965],["▁pent",-12.85497760772705],["рска",-12.85499668121338],["▁бирок",-12.855022430419922],["યોગ",-12.855047225952148],["골",-12.855095863342283],["валь",-12.855114936828612],["▁රශ්න",-12.85512351989746],["▁kovin",-12.855127334594728],["▁locais",-12.855162620544434],["▁printre",-12.855175018310549],["▁psycholog",-12.85517692565918],["хэн",-12.855210304260254],["男女",-12.855222702026367],["▁ritmo",-12.855225563049316],["▁puan",-12.855228424072266],["jero",-12.85523796081543],["▁stk",-12.855299949645996],["▁obce",-12.85531234741211],["ribus",-12.855319023132324],["▁tady",-12.855355262756348],["ಶಾ",-12.855396270751951],["ντο",-12.8554048538208],["탁",-12.855453491210938],["쇼",-12.855478286743164],["▁potrebné",-12.855480194091797],["▁2001.",-12.855483055114746],["▁xornada",-12.855484008789062],["▁aðeins",-12.855485916137695],["▁perasaan",-12.855493545532228],["▁violencia",-12.855493545532228],["▁הארץ",-12.855494499206545],["ਣੀ",-12.855525970458984],["▁هتل",-12.855530738830566],["法人",-12.855545043945312],["တစ်ခု",-12.85555362701416],["▁amiga",-12.855560302734377],["кція",-12.855592727661133],["▁illo",-12.855598449707031],["κη",-12.85562515258789],["▁जिल्हा",-12.855642318725586],["anoj",-12.85570240020752],["将来",-12.855703353881836],["▁결",-12.855730056762695],["▁gross",-12.855731010437012],["▁ట",-12.85573673248291],["▁අවස්ථාව",-12.855741500854492],["▁dirigent",-12.855757713317873],["▁свог",-12.85577392578125],["人們",-12.855777740478516],["ntä",-12.85581111907959],["արտ",-12.855815887451172],["확",-12.855835914611816],["prie",-12.855881690979004],["бот",-12.85588264465332],["မွန္",-12.8558931350708],["▁параметр",-12.85591983795166],["eille",-12.855926513671877],["സ്സ",-12.855929374694824],["▁quant",-12.855931282043455],["▁enak",-12.855941772460938],["wende",-12.855956077575684],["sland",-12.855960845947266],["▁Fé",-12.85598850250244],["▁pysty",-12.855995178222656],["▁አሳ",-12.8560152053833],["oana",-12.856037139892578],["▁၆",-12.856037139892578],["шт",-12.856046676635742],["gás",-12.856060981750488],["▁பெயர்",-12.856159210205078],["▁זע",-12.856160163879396],["▁અપ",-12.856188774108888],["ИЈА",-12.856206893920898],["▁일부",-12.856220245361328],["ΛΑ",-12.856221199035645],["present",-12.856252670288086],["▁miliard",-12.856252670288086],["truk",-12.856292724609377],["られない",-12.856314659118652],["врат",-12.85632038116455],["▁Йо",-12.856335639953612],["ickej",-12.85634422302246],["▁juiste",-12.856367111206056],["ವನ",-12.856372833251951],["响",-12.856383323669434],["=2",-12.856396675109863],["▁laina",-12.85642433166504],["▁адукацыі",-12.856453895568848],["▁ώρες",-12.856454849243164],["▁rodo",-12.85649871826172],["нско",-12.856502532958984],["▁शुभ",-12.856526374816896],["▁þannig",-12.856529235839844],["▁помощ",-12.856548309326172],["▁कहानी",-12.856590270996094],["▁pien",-12.856614112854004],["▁hiri",-12.8566312789917],["اڪ",-12.85663890838623],["▁большой",-12.856651306152344],["legar",-12.856690406799316],["▁NBA",-12.856691360473633],["西方",-12.856729507446287],["▁өлкө",-12.856736183166504],["madı",-12.856740951538086],["▁tüket",-12.856767654418944],["▁እግዚአብሔር",-12.856821060180664],["lemez",-12.85682773590088],["走了",-12.856834411621094],["mannen",-12.856837272644045],["▁позитив",-12.856849670410156],["▁السي",-12.856867790222168],["tian",-12.856881141662598],["▁arbetar",-12.856891632080078],["కార",-12.85689926147461],["်ာ",-12.856904983520508],["▁fullt",-12.85692024230957],["rados",-12.856966018676758],["ixe",-12.856982231140137],["▁toimii",-12.856998443603516],["▁angle",-12.857013702392578],["Para",-12.857033729553224],["ረስ",-12.857073783874512],["▁plin",-12.857090950012209],["թա",-12.857195854187012],["▁школу",-12.857200622558594],["ેસ",-12.85720920562744],["▁жел",-12.857221603393556],["раст",-12.85723876953125],["▁method",-12.857298851013184],["▁klass",-12.85731029510498],["▁toime",-12.857328414916992],["▁mõtte",-12.857373237609863],["ありがとうございました",-12.857423782348633],["saidia",-12.857429504394531],["▁niekoľko",-12.857431411743164],["▁മത്സര",-12.857431411743164],["打开",-12.857450485229492],["สบาย",-12.857457160949709],["ਇ",-12.857460975646973],["▁କରିବେ",-12.857477188110352],["▁Ведь",-12.857481002807615],["▁xur",-12.85748291015625],["▁deschide",-12.857488632202148],["▁nặng",-12.857489585876465],["▁голяма",-12.857492446899414],["▁الخارجية",-12.857492446899414],["▁จัด",-12.857492446899414],["▁Vida",-12.857514381408691],["ഗ്ര",-12.857553482055664],["▁Hoteller",-12.857589721679688],["▁eran",-12.857620239257812],["▁obav",-12.857627868652344],["▁phúc",-12.85764980316162],["どこ",-12.857667922973633],["▁domo",-12.857672691345217],["ixa",-12.857683181762695],["šek",-12.857687950134276],["tagelse",-12.85769271850586],["▁ništa",-12.857695579528809],["▁Simp",-12.857796669006348],["▁ആയ",-12.85780143737793],["▁caminho",-12.857816696166992],["fram",-12.857818603515623],["yā",-12.857824325561523],["▁akcept",-12.857827186584473],["▁Josep",-12.857830047607422],["▁যাবে",-12.85784912109375],["▁nivelul",-12.85785961151123],["▁yüksel",-12.857906341552734],["▁lahir",-12.85792064666748],["тағы",-12.857922554016112],["amine",-12.857942581176758],["▁isiku",-12.858037948608398],["غى",-12.858095169067385],["oihin",-12.858102798461914],["▁Self",-12.858124732971191],["▁ღ",-12.858150482177734],["жка",-12.858156204223633],["ترو",-12.858179092407228],["дела",-12.858205795288086],["▁roi",-12.858209609985352],["▁nazar",-12.858217239379885],["▁fille",-12.85822296142578],["EEN",-12.858229637145996],["sible",-12.858230590820312],["▁fud",-12.858230590820312],["▁wybor",-12.858233451843262],["物品",-12.858264923095703],["▁fwy",-12.858268737792969],["▁ธ",-12.85828685760498],["onego",-12.85830783843994],["▁teisi",-12.85831356048584],["▁громадськ",-12.858317375183104],["eink",-12.858332633972168],["фал",-12.85840129852295],["▁moltes",-12.858404159545898],["▁ВМРО",-12.858409881591797],["▁علاقے",-12.858409881591797],["▁अगाडि",-12.858409881591797],["▁mañana",-12.858412742614746],["▁Nadu",-12.858442306518556],["▁Newsletter",-12.858452796936035],["ក់",-12.858510971069336],["▁Meie",-12.85852336883545],["Home",-12.858552932739258],["▁çalışmaları",-12.85855484008789],["▁kommunal",-12.85857105255127],["▁మీకు",-12.858636856079102],["hind",-12.85863971710205],["▁താര",-12.858683586120604],["▁Aqui",-12.858692169189451],["▁क्रममा",-12.858704566955566],["▁Yoga",-12.858726501464844],["करण",-12.85875129699707],["▁Pru",-12.85875129699707],["ensa",-12.858753204345703],["▁크",-12.858778953552246],["▁провести",-12.858781814575195],["Estat",-12.858802795410156],["chie",-12.858826637268066],["一片",-12.858827590942385],["ацію",-12.85883903503418],["▁občan",-12.858842849731444],["▁Nahi",-12.858845710754396],["▁디자인",-12.85884952545166],["▁magát",-12.858871459960938],["قراء",-12.858920097351074],["▁مهندسی",-12.858926773071287],["▁urin",-12.85893440246582],["▁Neben",-12.858942031860352],["▁વ્યક્તિ",-12.858953475952148],["▁videoer",-12.858965873718262],["föld",-12.85898780822754],["තුමා",-12.858996391296388],["ULA",-12.858997344970703],["▁sivusto",-12.859000205993652],["▁hoito",-12.859003067016602],["ագործ",-12.85902214050293],["nikom",-12.859024047851562],["плы",-12.859025955200195],["▁Figur",-12.85904026031494],["lø",-12.85904312133789],["▁kelle",-12.859044075012209],["უბ",-12.85907745361328],["▁bilgiler",-12.85910415649414],["▁bieg",-12.859107971191406],["▁16:00",-12.859128952026367],["أن",-12.859150886535645],["▁Krishna",-12.85915470123291],["▁começa",-12.85920238494873],["アン",-12.85921859741211],["▁बिहार",-12.85922145843506],["▁Latina",-12.859232902526855],["帝",-12.859251976013184],["5-",-12.8592529296875],["ЭЭ",-12.859253883361816],["брати",-12.859256744384766],["▁záv",-12.859257698059082],["▁fatta",-12.85927963256836],["大大",-12.859296798706056],["填",-12.859310150146484],["ታይ",-12.85933780670166],["▁arall",-12.85936164855957],["volg",-12.859366416931152],["▁दैनिक",-12.859389305114746],["▁ඉවත්",-12.85942268371582],["▁Может",-12.859423637390137],["▁novembra",-12.859430313110352],["▁aurre",-12.859431266784668],["▁Development",-12.85944366455078],["▁మొదటి",-12.859456062316896],["경제",-12.859467506408691],["▁architekt",-12.859502792358398],["▁հարկ",-12.859519958496094],["μπο",-12.859530448913574],["▁адресу",-12.859537124633787],["▁Organisation",-12.859548568725586],["▁IF",-12.859560012817385],["Mac",-12.859590530395508],["ראו",-12.859597206115724],["▁SUPER",-12.859626770019531],["▁науково",-12.859636306762695],["ترك",-12.859699249267578],["dico",-12.85971450805664],["▁operaci",-12.859734535217283],["▁kapcsolat",-12.859747886657717],["▁məktəb",-12.85976219177246],["▁पाए",-12.859784126281738],["▁ಕೊ",-12.85979175567627],["▁шу",-12.859797477722168],["tona",-12.859804153442385],["定位",-12.859899520874023],["▁Mitt",-12.859945297241213],["슬",-12.859957695007324],["คัน",-12.859975814819336],["poz",-12.859976768493652],["▁مسؤول",-12.860042572021484],["ຈັດ",-12.860045433044434],["▁inimesed",-12.860084533691406],["▁नया",-12.860118865966797],["▁स्तर",-12.860124588012695],["▁miel",-12.860125541687012],["maker",-12.86015510559082],["ités",-12.860158920288086],["▁दुर्घटना",-12.86019229888916],["ňova",-12.860199928283691],["▁hyvää",-12.86021327972412],["---",-12.860225677490234],["атися",-12.860237121582031],["▁وٺي",-12.860264778137209],["▁1999.",-12.86027717590332],["▁EA",-12.86028289794922],["愈",-12.860315322875977],["ष्य",-12.86032009124756],["▁되어",-12.86032485961914],["Tr",-12.86036205291748],["▁αγορά",-12.860366821289062],["វៀតណាម",-12.860368728637695],["▁dólares",-12.860369682312012],["▁පුලුවන්",-12.860369682312012],["▁spennende",-12.860370635986328],["亞洲",-12.860377311706545],["▁عملکرد",-12.860379219055176],["dovolj",-12.860383987426758],["▁Isabel",-12.860384941101074],["我不",-12.860389709472656],["▁նրանք",-12.860405921936035],["▁ଅନୁ",-12.860405921936035],["سين",-12.860419273376465],["accord",-12.860433578491213],["▁võtta",-12.86044216156006],["▁crisi",-12.860448837280272],["▁odpor",-12.860466957092283],["▁diambil",-12.86048412322998],["▁커",-12.860491752624512],["▁सचिव",-12.860513687133787],["▁Можете",-12.860520362854004],["▁pandangan",-12.860538482666016],["▁ખૂબ",-12.860555648803713],["▁aiuta",-12.860629081726074],["telj",-12.86063003540039],["▁કેમ",-12.860633850097656],["лит",-12.86067008972168],["лни",-12.860698699951172],["▁tool",-12.860795974731444],["▁kapal",-12.860797882080078],["▁недоста",-12.860806465148926],["▁propre",-12.86081886291504],["▁slika",-12.860832214355469],["▁сла",-12.860845565795898],["▁מצב",-12.860869407653809],["▁پایین",-12.860870361328123],["られている",-12.860878944396973],["gale",-12.8608980178833],["▁ამას",-12.860905647277832],["▁제작",-12.860949516296388],["推動",-12.860958099365234],["ຫວຽດນາມ",-12.86095905303955],["▁טיפול",-12.860979080200195],["พื้น",-12.86103057861328],["▁specie",-12.861066818237305],["▁чай",-12.861068725585938],["▁Rə",-12.861074447631836],["ruan",-12.861087799072266],["▁fris",-12.86109733581543],["▁medic",-12.861112594604492],["▁viesti",-12.861154556274414],["▁часот",-12.861173629760742],["onda",-12.861186981201172],["virk",-12.861190795898438],["ขณะ",-12.861228942871094],["▁divê",-12.861262321472168],["nton",-12.861288070678713],["ュ",-12.861319541931152],["דל",-12.861320495605469],["two",-12.861328125],["搶",-12.86133098602295],["ڼ",-12.861337661743164],["みんな",-12.861343383789062],["▁Bürger",-12.861348152160645],["▁ianuarie",-12.861351013183594],["▁memenuhi",-12.861351013183594],["▁conteúdo",-12.86135196685791],["▁dlouho",-12.86135196685791],["▁technology",-12.861370086669922],["▁detalii",-12.861372947692873],["hə",-12.861387252807615],["ਜ਼ਾ",-12.86138916015625],["▁φορές",-12.861390113830566],["▁бүрэн",-12.861391067504885],["▁الشباب",-12.861393928527832],["▁ගැ",-12.861393928527832],["▁کرتی",-12.86141300201416],["▁נראה",-12.861413955688477],["▁kendini",-12.861417770385742],["▁tand",-12.861441612243652],["▁بلاگ",-12.861451148986816],["ვად",-12.861461639404297],["kait",-12.86147403717041],["▁terkait",-12.86148166656494],["▁Myös",-12.861493110656738],["▁उहाँले",-12.861494064331056],["▁maliyyə",-12.8615140914917],["▁kannattaa",-12.86153793334961],["▁tarta",-12.86153793334961],["50,000",-12.861562728881836],["▁Him",-12.861594200134276],["▁eign",-12.861645698547363],["專家",-12.861717224121094],["joitus",-12.861754417419434],["dığımız",-12.861766815185549],["▁Moon",-12.86181640625],["▁presentes",-12.861817359924316],["医学",-12.861857414245604],["▁apska",-12.861858367919922],["ljenje",-12.861863136291504],["insa",-12.861878395080566],["1997",-12.86188507080078],["espère",-12.861958503723145],["▁1933",-12.861967086791992],["▁gelegen",-12.861990928649902],["لەرنىڭ",-12.86199188232422],["▁dvě",-12.862003326416016],["▁KOM",-12.86200714111328],["▁категория",-12.862052917480469],["▁tasarım",-12.862077713012695],["▁Emil",-12.86208438873291],["ัก",-12.862085342407228],["▁بهداشت",-12.862086296081545],["▁სიმ",-12.862102508544922],["▁Два",-12.862114906311035],["फर",-12.862137794494627],["▁sobi",-12.862139701843262],["коп",-12.862143516540527],["▁айта",-12.86214828491211],["ovna",-12.862208366394045],["▁المنا",-12.86221408843994],["▁Ocean",-12.86224365234375],["പോ",-12.862265586853027],["misión",-12.862281799316406],["▁gratuita",-12.862292289733888],["秦",-12.8623046875],["▁хороо",-12.862309455871582],["▁ห",-12.862312316894531],["▁функции",-12.862326622009276],["▁mkubwa",-12.862333297729492],["▁mübarizə",-12.862333297729492],["▁نوفمبر",-12.862333297729492],["▁ያህል",-12.86234188079834],["▁iyadoo",-12.862347602844238],["ਪਾ",-12.862348556518556],["▁dú",-12.862369537353516],["▁उसकी",-12.862375259399414],["▁gången",-12.86237621307373],["▁رسمی",-12.862382888793944],["írás",-12.862404823303224],["▁sinna",-12.862421989440918],["▁Kiitos",-12.862446784973145],["spetto",-12.862460136413574],["▁ondo",-12.862516403198242],["▁Nchi",-12.86252212524414],["аємо",-12.862524032592772],["▁Publik",-12.862561225891112],["▁esperant",-12.862598419189451],["▁sidii",-12.862648963928224],["market",-12.862668991088867],["▁చేస్తున్న",-12.862675666809082],["、3",-12.862688064575195],["▁surprise",-12.862727165222168],["▁поля",-12.862732887268066],["乗",-12.862775802612305],["▁kļū",-12.86279010772705],["▁minút",-12.86284351348877],["▁நிலை",-12.86288356781006],["▁dość",-12.862887382507324],["kontrol",-12.862927436828612],["ነቱ",-12.862943649291992],["▁nyingine",-12.862998962402344],["▁이벤트",-12.863003730773926],["▁aktivit",-12.863015174865724],["▁mladih",-12.86301612854004],["▁συμβ",-12.863038063049316],["▁placa",-12.863042831420898],["vés",-12.863043785095217],["▁épület",-12.863054275512695],["▁ອອກ",-12.863061904907228],["レン",-12.863107681274414],["だけでなく",-12.863126754760742],["▁పద",-12.863152503967283],["dani",-12.86323070526123],["covi",-12.86326789855957],["ниця",-12.863268852233888],["▁предел",-12.863269805908203],["给予",-12.863269805908203],["▁səs",-12.863279342651367],["-01",-12.863280296325684],["Mus",-12.863287925720217],["ESTA",-12.863290786743164],["▁vegeta",-12.863290786743164],["▁hûn",-12.863292694091797],["耶",-12.86330509185791],["′",-12.863308906555176],["▁δηλαδή",-12.863316535949709],["▁ऑनलाइन",-12.863316535949709],["▁ölçü",-12.863321304321287],["▁список",-12.863321304321287],["▁iskola",-12.863325119018556],["▁گوگل",-12.86332893371582],["▁વિશે",-12.863333702087402],["▁confu",-12.8633394241333],["прос",-12.863343238830566],["以下の",-12.863344192504885],["ране",-12.863365173339844],["ໃສ່",-12.863386154174805],["arsa",-12.86343002319336],["לילה",-12.863433837890623],["▁publicar",-12.863465309143066],["▁taobh",-12.863476753234863],["▁όλη",-12.863516807556152],["▁verð",-12.863545417785645],["▁союз",-12.86361026763916],["算是",-12.863625526428224],["▁zahar",-12.863655090332031],["▁dəyiş",-12.863665580749512],["trau",-12.863694190979004],["▁현대",-12.86369514465332],["ντα",-12.863765716552734],["▁കുട്ടി",-12.863788604736328],["رڪ",-12.863795280456545],["вої",-12.863799095153809],["חנות",-12.863801956176758],["بيع",-12.863826751708984],["▁investering",-12.863879203796388],["ულია",-12.863882064819336],["▁prosto",-12.86388874053955],["lanıb",-12.863927841186523],["ysa",-12.863945960998535],["loha",-12.86395263671875],["▁ሚኒስትር",-12.863967895507812],["▁Niet",-12.863978385925291],["ида",-12.86398220062256],["▁juin",-12.863983154296877],["ፖ",-12.863990783691406],["▁საკუთარი",-12.863993644714355],["在于",-12.86399745941162],["بس",-12.864038467407228],["▁Тема",-12.864059448242188],["▁Zakon",-12.86406135559082],["embe",-12.864062309265137],["ÄR",-12.864080429077148],["lol",-12.86411476135254],["▁αυ",-12.864140510559082],["ออ",-12.86414909362793],["Ali",-12.86417007446289],["▁מסו",-12.864181518554688],["ಿರ",-12.8641939163208],["▁druhé",-12.864216804504396],["вај",-12.864255905151367],["することで",-12.864295959472656],["▁hÉireann",-12.864300727844238],["▁вместо",-12.864302635192873],["▁anns",-12.864304542541504],["▁TAR",-12.8643159866333],["▁реализации",-12.8643159866333],["▁Vilnius",-12.8643217086792],["[9]",-12.86432933807373],["▁criteri",-12.864349365234377],["íci",-12.864352226257324],["▁ग्राहक",-12.864368438720703],["cast",-12.86437702178955],["dood",-12.864385604858398],["▁lijkt",-12.864397048950195],["▁voorzien",-12.864442825317385],["ован",-12.864445686340332],["▁архив",-12.864471435546877],["っていた",-12.864492416381836],["schop",-12.864493370056152],["lozi",-12.864521980285645],["fő",-12.864526748657228],["tref",-12.864532470703123],["▁saudara",-12.864569664001465],["▁Милан",-12.864585876464844],["rupa",-12.86458969116211],["▁රජය",-12.86467170715332],["▁rayonu",-12.864712715148926],["ถ่าย",-12.864728927612305],["▁tue",-12.864754676818848],["න්ඩ",-12.864789009094238],["▁hästi",-12.864798545837402],["iras",-12.8648099899292],["kush",-12.864819526672363],["▁చెందిన",-12.864859580993652],["▁lið",-12.864867210388184],["داري",-12.864893913269045],["▁WI",-12.864898681640623],["čenje",-12.864906311035156],["▁boende",-12.864914894104004],["▁items",-12.864916801452637],["KP",-12.864930152893066],["▁KY",-12.86495304107666],["СК",-12.864962577819824],["▁jiné",-12.864978790283203],["拔",-12.864981651306152],["ちゃ",-12.86500358581543],["TON",-12.865022659301758],["资格",-12.865026473999023],["တယ်။",-12.865031242370604],["▁فساد",-12.865032196044922],["ikum",-12.86503791809082],["▁баға",-12.865082740783691],["дана",-12.865100860595703],["qon",-12.865114212036133],["▁zatím",-12.865152359008787],["มอบ",-12.865171432495115],["▁ದಿ",-12.865175247192385],["בנה",-12.865178108215332],["▁franco",-12.865267753601074],["紋",-12.865275382995604],["▁gemeinsam",-12.86528491973877],["▁որոնց",-12.865285873413086],["сот",-12.86528778076172],["လုိ႔",-12.865302085876465],["▁shule",-12.86533546447754],["▁ලිපි",-12.865338325500488],["▁əlaqədar",-12.865339279174805],["▁turk",-12.865341186523438],["▁január",-12.865346908569336],["УМ",-12.865357398986816],["▁дал",-12.86539077758789],["สุดท้าย",-12.865412712097168],["▁indien",-12.865461349487305],["▁игру",-12.865464210510254],["डो",-12.865472793579102],["优秀",-12.86547565460205],["▁País",-12.865479469299316],["chal",-12.865498542785645],["იკა",-12.86552619934082],["оре",-12.865548133850098],["▁військ",-12.865548133850098],["多的",-12.865565299987791],["▁expedi",-12.865578651428224],["抵",-12.865697860717772],["美的",-12.865711212158203],["聞いて",-12.865730285644531],["▁thúc",-12.865737915039062],["▁Заев",-12.86574935913086],["▁návrh",-12.865751266479492],["ĥ",-12.865852355957031],["িন",-12.865859985351562],["▁ຍັງ",-12.865862846374512],["▁ខ",-12.865911483764648],["פע",-12.86591911315918],["食事",-12.86596393585205],["kani",-12.865978240966797],["おります",-12.865994453430176],["ések",-12.866012573242188],["▁રૂ",-12.866056442260742],["▁azzal",-12.86606788635254],["▁साध",-12.866069793701172],["μού",-12.866081237792969],["▁labada",-12.866084098815918],["▁gratia",-12.866086959838867],["kassa",-12.866103172302246],["▁Dinge",-12.866125106811523],["▁alır",-12.866129875183104],["૧",-12.866171836853027],["逐",-12.866212844848633],["▁sene",-12.866265296936035],["▁kiểu",-12.866272926330566],["▁факультет",-12.866272926330566],["▁κανείς",-12.866277694702148],["▁reizi",-12.866280555725098],["▁Lisäksi",-12.866283416748049],["▁código",-12.86628532409668],["居住",-12.866321563720703],["▁šio",-12.866323471069336],["пт",-12.86634349822998],["tävling",-12.86635398864746],["စည္း",-12.86636734008789],["▁береді",-12.866376876831056],["▁Shir",-12.866436004638672],["ārā",-12.866460800170898],["มนุษย์",-12.866462707519531],["ଭି",-12.86647129058838],["▁आण",-12.866509437561035],["ագիր",-12.866512298583984],["▁SY",-12.866525650024414],["tuh",-12.866546630859377],["ït",-12.866549491882324],["▁хоть",-12.866561889648438],["овы",-12.866565704345703],["ნეს",-12.86658763885498],["ától",-12.866588592529297],["▁መረጃ",-12.866602897644045],["▁εργ",-12.86660861968994],["▁Ծ",-12.86661148071289],["ႏွ",-12.866613388061523],["quina",-12.86663055419922],["usse",-12.866643905639648],["బ్బ",-12.86664581298828],["▁vrste",-12.866655349731444],["刚刚",-12.866679191589355],["财",-12.866681098937988],["technic",-12.86669635772705],["▁дахин",-12.866724967956545],["سام",-12.866774559020996],["▁kāda",-12.86679744720459],["▁Cod",-12.866799354553224],["த்திய",-12.866804122924805],["αντ",-12.86681842803955],["▁efectua",-12.866819381713867],["enä",-12.866825103759766],["ándose",-12.866829872131348],["▁BP",-12.866830825805664],["مح",-12.86686897277832],["fær",-12.866891860961914],["▁viste",-12.866918563842772],["▁சொல்லி",-12.86697006225586],["▁Bom",-12.866975784301758],["linge",-12.866986274719238],["▁Delhi",-12.867009162902832],["▁deyir",-12.867015838623049],["▁ਮੈ",-12.86703109741211],["▁језик",-12.867043495178224],["grado",-12.867077827453612],["செ",-12.867115020751951],["make",-12.867124557495115],["裁",-12.867151260375977],["宋",-12.867186546325684],["▁절",-12.867188453674316],["წმ",-12.86721134185791],["▁Trip",-12.867222785949709],["OLO",-12.86722469329834],["▁सहज",-12.867237091064451],["脱毛",-12.867242813110352],["РТ",-12.8672456741333],["▁ಪು",-12.867259979248049],["▁knjiga",-12.867262840270996],["▁друж",-12.867263793945312],["▁שמורות",-12.867265701293944],["▁Walaupun",-12.867268562316896],["▁puţin",-12.867271423339844],["いただき",-12.867283821105955],["▁diadakan",-12.867284774780272],["▁kwani",-12.86730670928955],["▁തുടര്",-12.867313385009766],["▁תר",-12.867316246032717],["sından",-12.867338180541992],["▁שזה",-12.867372512817385],["చూ",-12.867384910583496],["tette",-12.867390632629396],["fix",-12.867444038391112],["▁igennem",-12.86745834350586],["žem",-12.86749267578125],["▁egter",-12.867517471313477],["▁جمال",-12.867571830749512],["▁quatro",-12.86760139465332],["крит",-12.867609024047852],["▁orvos",-12.867636680603027],["▁tiszt",-12.867658615112305],["▁valu",-12.867661476135254],["▁jaringan",-12.867695808410645],["▁fiú",-12.86771011352539],["ennoù",-12.867755889892578],["5°",-12.867798805236816],["▁liit",-12.86781120300293],["τέρ",-12.86782169342041],["드리",-12.86784553527832],["所在",-12.867850303649902],["▁premium",-12.867853164672852],["नमा",-12.867862701416016],["▁Oi",-12.867867469787598],["▁škod",-12.867868423461914],["▁físico",-12.867911338806152],["дарын",-12.867915153503418],["▁éppen",-12.867927551269531],["▁Cami",-12.867935180664062],["¡",-12.867959022521973],["▁knew",-12.867972373962402],["twitter",-12.86798858642578],["вени",-12.86798858642578],["дина",-12.867995262145996],["▁hoppas",-12.86800479888916],["▁සත්",-12.868034362792969],["Pan",-12.868045806884766],["tlen",-12.868138313293455],["▁kanton",-12.86815357208252],["▁nokta",-12.868158340454102],["测",-12.86816692352295],["▁Wort",-12.868181228637695],["zł",-12.868193626403809],["▁பேர்",-12.868231773376465],["cid",-12.86825180053711],["▁ಕೆಲವು",-12.868255615234377],["▁шайлоо",-12.868258476257324],["▁yhden",-12.868268966674805],["▁Terbaik",-12.86827564239502],["▁Channel",-12.868282318115234],["▁discrimina",-12.868282318115234],["▁қыз",-12.868303298950195],["▁daleko",-12.868308067321776],["▁sedangkan",-12.868310928344728],["▁ठर",-12.868350982666016],["diğiniz",-12.86835765838623],["▁آورد",-12.868408203125],["▁įreng",-12.868423461914062],["lenie",-12.868427276611328],["▁підприємств",-12.868454933166504],["нове",-12.86845588684082],["ประเทศไทย",-12.86849880218506],["▁aikaan",-12.868502616882324],["▁boss",-12.868510246276855],["▁úrad",-12.868511199951172],["▁eura",-12.868513107299805],["▁ვიცი",-12.868521690368652],["▁formu",-12.86852741241455],["▁veckan",-12.868534088134766],["▁preu",-12.868542671203612],["ולי",-12.868565559387209],["▁VAL",-12.868571281433104],["plic",-12.868582725524902],["▁grow",-12.868597030639648],["овым",-12.86861515045166],["▁оние",-12.868653297424316],["ակարգ",-12.868695259094238],["▁해외",-12.868743896484377],["▁linda",-12.868759155273438],["loka",-12.868782043457031],["ווא",-12.86878490447998],["ుకునే",-12.86884307861328],["▁session",-12.86891269683838],["Mal",-12.868969917297363],["פעיל",-12.86898136138916],["▁Heim",-12.869043350219728],["ッチ",-12.86905574798584],["▁cour",-12.869062423706056],["▁పాట",-12.869115829467772],["▁ваше",-12.86916160583496],["▁entend",-12.86916732788086],["tım",-12.869196891784668],["▁διαβ",-12.869200706481934],["siu",-12.869208335876465],["▁મોદી",-12.869223594665527],["▁menikmati",-12.869236946105955],["▁nätdejting",-12.869237899780272],["ąją",-12.86923885345459],["▁Teilnehmer",-12.869240760803224],["▁ٿئي",-12.869242668151855],["▁старонка",-12.869253158569336],["住房",-12.86926555633545],["გურ",-12.869269371032717],["▁기타",-12.869288444519045],["ITU",-12.869304656982422],["▁Nhân",-12.869311332702637],["▁consegue",-12.869332313537598],["▁layanan",-12.869333267211914],["عط",-12.869342803955078],["▁dél",-12.869364738464355],["▁එයා",-12.869397163391112],["▁varten",-12.869400024414062],["▁видите",-12.86940860748291],["▁plas",-12.869412422180176],["veis",-12.869415283203123],["心中",-12.86941909790039],["▁kompetent",-12.869441986083984],["▁programmi",-12.869443893432615],["▁profes",-12.869457244873049],["신문",-12.869461059570312],["▁מז",-12.869473457336426],["ուտ",-12.86947536468506],["▁дій",-12.86949348449707],["terior",-12.869507789611816],["ものを",-12.869526863098145],["▁басты",-12.869540214538574],["▁vybaven",-12.869568824768066],["▁өтті",-12.8695707321167],["▁필요한",-12.869604110717772],["▁Muda",-12.869608879089355],["▁guard",-12.869613647460938],["ELL",-12.86963176727295],["מין",-12.869647026062012],["ിയുടെ",-12.869654655456545],["▁чому",-12.869662284851074],["▁flas",-12.869675636291504],["kua",-12.869683265686035],["▁tjenester",-12.869698524475098],["病院",-12.86970043182373],["ират",-12.869714736938477],["▁სუ",-12.869765281677246],["▁সহ",-12.869771003723145],["▁abri",-12.869773864746094],["ीर",-12.869806289672852],["▁നിന്നു",-12.86982250213623],["▁required",-12.869857788085938],["▁ampla",-12.869866371154783],["▁kerto",-12.869873046875],["tionis",-12.869912147521973],["▁පල",-12.869932174682615],["▁ulio",-12.869935989379885],["ますが",-12.869937896728516],["▁රෝ",-12.869965553283691],["fes",-12.869987487792969],["▁alkalmaz",-12.870002746582031],["▁mekan",-12.870052337646484],["▁ním",-12.870057106018066],["▁Süd",-12.870065689086914],["▁วิธีการ",-12.870073318481444],["ምና",-12.87008285522461],["▁vedea",-12.87009334564209],["пл",-12.870097160339355],["▁чин",-12.870116233825684],["даж",-12.87012004852295],["▁tərəf",-12.87013053894043],["ራስ",-12.87016773223877],["坏",-12.870192527770996],["跡",-12.870194435119627],["మం",-12.870223999023438],["▁stagione",-12.870226860046388],["▁بواسطة",-12.870226860046388],["▁ਉਚਾਰਨ",-12.870226860046388],["੧",-12.870227813720703],["▁pueblo",-12.870227813720703],["▁ગઈ",-12.870241165161133],["bras",-12.870244026184082],["▁überhaupt",-12.870248794555664],["▁професій",-12.870257377624512],["▁תודה",-12.870266914367676],["▁vall",-12.870267868041992],["必ず",-12.870267868041992],["▁British",-12.870290756225586],["уга",-12.870295524597168],["▁jednog",-12.8702974319458],["▁usually",-12.870309829711914],["▁dolu",-12.870320320129396],["▁احترام",-12.87032985687256],["ایش",-12.870342254638672],["▁ಅಲ್ಲಿ",-12.870361328125],["▁Més",-12.870363235473633],["▁Юр",-12.870378494262695],["ografía",-12.870388984680176],["▁isn",-12.870407104492188],["▁certeza",-12.870412826538086],["ബു",-12.87044906616211],["▁гласа",-12.870474815368652],["▁играть",-12.870482444763184],["▁ఏం",-12.87053108215332],["限定",-12.870535850524902],["▁Фото",-12.8705472946167],["கர",-12.870577812194824],["▁ជាង",-12.870590209960938],["▁አካባቢ",-12.87059211730957],["▁prostora",-12.870649337768556],["▁мөнгө",-12.8706636428833],["▁egg",-12.870688438415527],["инов",-12.870707511901855],["▁melyek",-12.870712280273438],["\";",-12.870722770690918],["▁ਕਰਦਾ",-12.87073802947998],["лыш",-12.870738983154297],["giri",-12.870749473571776],["ોની",-12.870757102966309],["▁novada",-12.870770454406738],["tume",-12.870789527893066],["▁segons",-12.870792388916016],["▁naviga",-12.870798110961914],["▁пров",-12.870809555053713],["شة",-12.870817184448242],["▁рынка",-12.87084674835205],["▁سپر",-12.870878219604492],["puh",-12.870938301086426],["▁իրավունք",-12.87095069885254],["▁sayt",-12.870978355407717],["▁Dru",-12.870990753173828],["▁კომპანია",-12.871013641357422],["▁konstant",-12.871023178100586],["▁ରଖି",-12.871026992797852],["ડો",-12.871027946472168],["▁კუ",-12.871044158935549],["ரும்",-12.871051788330078],["同事",-12.871057510375977],["▁Espero",-12.871087074279783],["▁ചിന്ത",-12.87110710144043],["▁mažiau",-12.871131896972656],["▁haur",-12.871153831481934],["ท้อง",-12.871158599853516],["▁тоді",-12.871159553527832],["መስ",-12.871163368225098],["▁Bernard",-12.871174812316896],["parta",-12.871203422546388],["വെ",-12.871212005615234],["служб",-12.871217727661133],["▁adipisicing",-12.871217727661133],["▁memastikan",-12.871217727661133],["▁suất",-12.871217727661133],["▁ಸಾಮಾಜಿಕ",-12.871217727661133],["▁dürfen",-12.87121868133545],["▁akkurat",-12.871220588684082],["צט",-12.871235847473145],["▁פאַר",-12.871241569519045],["▁ఇంకా",-12.871241569519045],["▁зазнач",-12.871256828308104],["ဆင်",-12.871262550354004],["▁Stephen",-12.871265411376951],["Tìm",-12.871270179748535],["或许",-12.871310234069824],["▁löy",-12.871312141418455],["▁слика",-12.871326446533203],["毕业",-12.87134075164795],["▁خودم",-12.871347427368164],["▁лични",-12.871352195739746],["▁түрі",-12.871366500854492],["piste",-12.871376991271973],["▁رسانی",-12.871394157409668],["oną",-12.8714017868042],["ฮา",-12.871423721313477],["▁Association",-12.871423721313477],["▁이전",-12.87142562866211],["安装",-12.87144660949707],["▁oven",-12.871454238891602],["▁començar",-12.87145709991455],["▁yiri",-12.87147045135498],["점을",-12.871481895446776],["▁chlap",-12.87149429321289],["▁Prime",-12.871500015258787],["ಯ್ಯ",-12.871509552001951],["▁barra",-12.87151336669922],["ราคาถูก",-12.871585845947266],["λω",-12.871602058410645],["辦理",-12.871603965759276],["▁bj",-12.871606826782228],["▁ürünleri",-12.871622085571287],["▁oru",-12.87163257598877],["チェック",-12.871638298034668],["▁období",-12.87168788909912],["doma",-12.871691703796388],["isiert",-12.871702194213867],["gatan",-12.871728897094728],["▁відпові",-12.87173557281494],["stavljen",-12.871761322021484],["rusi",-12.871764183044434],["▁Prije",-12.871784210205078],["хгүй",-12.871814727783203],["ฐาน",-12.871837615966797],["тө",-12.871919631958008],["rry",-12.871925354003906],["foje",-12.871932983398438],["▁билим",-12.871939659118652],["lux",-12.871957778930664],["ทร",-12.871959686279297],["ಾದರೂ",-12.87196922302246],["▁rama",-12.87197208404541],["ఈ",-12.872003555297852],["▁hamil",-12.872003555297852],["▁Азаттык",-12.8720064163208],["가는",-12.872069358825684],["▁Нам",-12.872097969055176],["ສ້າງ",-12.872098922729492],["▁ұста",-12.872137069702148],["▁Francis",-12.872150421142578],["▁configura",-12.872163772583008],["ဴ",-12.87216854095459],["▁Bygg",-12.872169494628906],["▁nieko",-12.872204780578612],["▁মৃত্যু",-12.872210502624512],["न्या",-12.872225761413574],["▁Velkommen",-12.872235298156738],["▁güven",-12.87224292755127],["▁offerte",-12.872262001037598],["kauden",-12.872271537780762],["▁міської",-12.872274398803713],["крет",-12.872278213500977],["每一个",-12.87229824066162],["маш",-12.872323036193848],["ଣୀ",-12.872336387634276],["мом",-12.87234592437744],["▁izglītības",-12.872364044189451],["▁மத்திய",-12.872393608093262],["ړو",-12.872414588928224],["▁Nepal",-12.872414588928224],["▁gyakorlat",-12.872425079345703],["ηθούν",-12.872459411621094],["্ট",-12.872493743896484],["ении",-12.87251091003418],["occasion",-12.872516632080078],["晶",-12.872525215148926],["▁보면",-12.872554779052734],["ľav",-12.872573852539062],["ંચ",-12.872615814208984],["▁организм",-12.8726167678833],["hør",-12.872620582580566],["laina",-12.872654914855955],["ERT",-12.872672080993652],["μαν",-12.87268352508545],["▁ចេញ",-12.87271785736084],["사이트",-12.872747421264648],["▁indik",-12.872794151306152],["šle",-12.872796058654783],["hulu",-12.872821807861328],["еф",-12.872838973999023],["合い",-12.872848510742188],["OI",-12.872919082641602],["▁Epi",-12.872919082641602],["ရှ",-12.872925758361816],["ალა",-12.872928619384766],["方も",-12.872949600219728],["▁വരുന്ന",-12.872961044311523],["▁značky",-12.872977256774902],["ены",-12.87299919128418],["访问",-12.873003959655762],["▁தே",-12.873023986816406],["▁prevent",-12.87303638458252],["はない",-12.87303638458252],["fice",-12.873045921325684],["▁претседател",-12.873054504394531],["tiska",-12.87310028076172],["يوم",-12.873125076293944],["▁meva",-12.873126029968262],["▁innymi",-12.87316608428955],["▁studen",-12.873170852661133],["уют",-12.873172760009766],["▁ସମସ୍ତ",-12.873188018798828],["ѳ",-12.873202323913574],["▁bermanfaat",-12.87320327758789],["▁chwarae",-12.87320327758789],["▁српске",-12.87320327758789],["▁बच्चों",-12.87320327758789],["▁діяльність",-12.873204231262209],["▁końcu",-12.873205184936523],["бель",-12.873230934143066],["▁olacağı",-12.873250007629396],["miseksi",-12.87326431274414],["▁elabora",-12.87326431274414],["▁ĉu",-12.87326431274414],["çãeste",-12.873286247253418],["▁ميل",-12.873300552368164],["▁szo",-12.873318672180176],["▁İki",-12.873351097106934],["schlag",-12.873353958129885],["αγγελ",-12.873382568359377],["világ",-12.873406410217283],["▁tune",-12.873437881469728],["tūr",-12.873449325561523],["▁Tiu",-12.873452186584473],["▁ಹು",-12.873475074768066],["परि",-12.8734769821167],["NAS",-12.873506546020508],["бл",-12.87351417541504],["љени",-12.873541831970217],["▁ydych",-12.873550415039062],["վե",-12.873557090759276],["▁материјал",-12.87356185913086],["▁తాజా",-12.873597145080566],["থা",-12.873610496520996],["▁Váš",-12.873620986938477],["مم",-12.873621940612791],["জে",-12.87363052368164],["dār",-12.873638153076172],["▁cd",-12.873647689819336],["女儿",-12.873653411865234],["כנים",-12.873675346374512],["▁Syed",-12.87367820739746],["信心",-12.873679161071776],["ാര",-12.87371063232422],["▁Нас",-12.873720169067385],["▁તેમના",-12.873743057250977],["แน่นอน",-12.87378978729248],["▁បច្ចេកវិទ្យា",-12.873823165893556],["▁kontakte",-12.873856544494627],["зім",-12.873859405517578],["dokument",-12.873860359191896],["▁serem",-12.873872756958008],["▁colore",-12.873907089233398],["▁орталығы",-12.873910903930664],["für",-12.873913764953612],["nemer",-12.873924255371094],["▁రే",-12.87393283843994],["emme",-12.873940467834473],["VC",-12.873946189880373],["ësht",-12.873948097229004],["▁كي",-12.873990058898926],["▁Boh",-12.874024391174316],["▁esses",-12.87409210205078],["മന",-12.874105453491213],["▁podobne",-12.874114990234377],["误",-12.87412166595459],["steuer",-12.874128341674805],["geha",-12.874160766601562],["pret",-12.874176025390623],["▁omnium",-12.874178886413574],["▁matière",-12.874192237854004],["▁Sputnik",-12.874197006225586],["▁đẩy",-12.874197006225586],["▁gyvenimo",-12.874197959899902],["zera",-12.874235153198242],["▁elan",-12.87424659729004],["CG",-12.87425136566162],["▁poze",-12.874252319335938],["▁wartości",-12.874293327331545],["Ку",-12.874317169189451],["▁žino",-12.874327659606934],["Ć",-12.874330520629885],["▁vardag",-12.874372482299805],["▁Too",-12.874381065368652],["▁godz",-12.874407768249512],["▁úplne",-12.874446868896484],["▁ದೇವ",-12.87445831298828],["▁prachtige",-12.874482154846191],["craft",-12.874484062194824],["▁obi",-12.874484062194824],["▁compensa",-12.87449550628662],["▁představ",-12.874502182006836],["▁ส่ง",-12.874503135681152],["▁sína",-12.874505996704102],["▁trà",-12.874551773071287],["▁บริการ",-12.874554634094238],["ക്കുന്നു",-12.8745698928833],["▁Gua",-12.874574661254885],["▁praat",-12.874588012695312],["enz",-12.874608039855955],["اقت",-12.874611854553224],["▁nuit",-12.874650955200195],["jahr",-12.874661445617676],["▁അട",-12.874670028686523],["▁ulici",-12.874679565429688],["ರೀ",-12.87472438812256],["▁หน้า",-12.874728202819824],["▁exemplar",-12.874780654907228],["▁ଅବ",-12.87478256225586],["क्टर",-12.874814987182615],["ודה",-12.874815940856934],["ൂർ",-12.874825477600098],["▁мрежа",-12.874878883361816],["▁uka",-12.874882698059082],["вија",-12.874887466430664],["avana",-12.874907493591309],["kən",-12.874991416931152],["یدن",-12.874995231628418],["▁wild",-12.87499713897705],["▁posizione",-12.875014305114746],["र्ज",-12.875021934509276],["▁ලබන",-12.875072479248049],["حضر",-12.87512493133545],["likud",-12.875130653381348],["বর",-12.875164031982422],["Ệ",-12.875170707702637],["הם",-12.875178337097168],["▁eienskappe",-12.875192642211914],["ാസ",-12.875194549560549],["▁powinien",-12.875194549560549],["óm",-12.875199317932127],["▁Мор",-12.87522315979004],["お金",-12.875229835510254],["ségek",-12.875231742858888],["▁energía",-12.875234603881836],["mhar",-12.875236511230469],["काल",-12.87524127960205],["▁روزنامه",-12.875243186950684],["▁Universität",-12.875248908996582],["▁۱۵",-12.87525463104248],["表明",-12.87527847290039],["کاری",-12.875286102294922],["ឹង",-12.875286102294922],["▁2007,",-12.87531852722168],["▁marang",-12.875322341918944],["▁लें",-12.875338554382324],["valda",-12.875358581542969],["▁slučaju",-12.87540340423584],["▁kutil",-12.875423431396484],["▁شناخت",-12.875448226928713],["▁tolle",-12.87545108795166],["对此",-12.87548542022705],["jącego",-12.87550926208496],["日は",-12.87551498413086],["▁Основ",-12.875515937805176],["lök",-12.875580787658691],["āni",-12.875584602355955],["▁විදියට",-12.875604629516602],["იძე",-12.875609397888184],["穩定",-12.875614166259766],["ેલ",-12.875627517700195],["していく",-12.875683784484863],["▁બોલ",-12.87572956085205],["ထိုး",-12.875738143920898],["▁നേര",-12.8757905960083],["變化",-12.875799179077148],["ино",-12.875825881958008],["▁core",-12.875840187072754],["▁механ",-12.87586784362793],["ilay",-12.875869750976562],["اما",-12.875869750976562],["pier",-12.875871658325195],["▁биће",-12.875895500183104],["dating",-12.875914573669434],["▁clinic",-12.875944137573242],["ფერი",-12.87594509124756],["▁1,8",-12.87595272064209],["لز",-12.875967979431152],["tohet",-12.876011848449709],["라인",-12.876032829284668],["▁яр",-12.8760347366333],["▁κορ",-12.87604522705078],["▁<<",-12.87607192993164],["▁əməliyyat",-12.87607192993164],["Alba",-12.876072883605955],["▁хий",-12.876072883605955],["▁பாட",-12.876073837280272],["ěk",-12.876144409179688],["ወስ",-12.87614917755127],["▁participants",-12.876150131225586],["逛",-12.876164436340332],["▁منهم",-12.87616729736328],["▁врата",-12.876176834106444],["vt",-12.87618637084961],["▁Clinton",-12.876188278198242],["▁znaleźć",-12.876188278198242],["▁дослідження",-12.876190185546877],["▁Όταν",-12.87619400024414],["▁Explorer",-12.876201629638672],["▁yoğun",-12.87621021270752],["▁ಹಾಕ",-12.876215934753418],["▁direttamente",-12.876218795776367],["sisi",-12.876227378845217],["▁később",-12.876277923583984],["▁Dok",-12.876340866088867],["▁români",-12.876341819763184],["▁tók",-12.8763427734375],["▁Hö",-12.876346588134766],["▁webu",-12.876384735107422],["ত্র",-12.876386642456056],["යකට",-12.876421928405762],["▁ര",-12.876429557800291],["▁maddə",-12.876436233520508],["▁Status",-12.87648105621338],["больш",-12.876510620117188],["▁huono",-12.876510620117188],["لائ",-12.876554489135742],["ின",-12.876591682434082],["bost",-12.876605033874512],["▁хе",-12.876617431640623],["ฟ้า",-12.87661838531494],["▁näy",-12.87661838531494],["Ė",-12.87664794921875],["jami",-12.876703262329102],["unuz",-12.876726150512695],["ከሰ",-12.87676239013672],["▁eil",-12.876774787902832],["ونة",-12.876789093017578],["▁alus",-12.876790046691896],["▁beth",-12.876809120178224],["ਪਾਲ",-12.876837730407717],["▁tapet",-12.87685775756836],["▁Độ",-12.87686538696289],["рган",-12.876874923706056],["▁მოუ",-12.876877784729004],["▁కేంద్ర",-12.876908302307127],["▁ніяк",-12.876917839050291],["▁गएको",-12.876919746398926],["ntys",-12.876937866210938],["▁WE",-12.876982688903809],["kū",-12.87698745727539],["▁Sco",-12.876996040344238],["гро",-12.876998901367188],["▁svojho",-12.877002716064451],["щі",-12.877007484436035],["▁اٹھا",-12.87702178955078],["▁קשה",-12.877050399780272],["czone",-12.877080917358398],["美味",-12.877110481262209],["iêu",-12.877119064331056],["▁continuación",-12.877120971679688],["▁сүр",-12.877127647399902],["ក្រុម",-12.877138137817385],["错误",-12.877148628234863],["▁závod",-12.87717628479004],["тат",-12.877178192138672],["▁аркылуу",-12.877184867858888],["▁گزشتہ",-12.877184867858888],["▁විශාල",-12.877185821533203],["▁Dobrý",-12.877191543579102],["▁puțin",-12.877203941345217],["ట్స్",-12.877222061157228],["pār",-12.87723159790039],["近年来",-12.87723445892334],["▁storitev",-12.877263069152832],["▁استقلال",-12.877264022827148],["ገን",-12.87727165222168],["▁AA",-12.877281188964844],["بىر",-12.877285957336426],["▁пече",-12.877288818359377],["▁Özel",-12.877321243286133],["ଦୁ",-12.877326011657717],["▁доказ",-12.877333641052246],["TRE",-12.87734317779541],["▁ತು",-12.877349853515623],["▁depend",-12.877367973327637],["рит",-12.877392768859863],["▁entreprises",-12.877416610717772],["汽",-12.877416610717772],["ידו",-12.87747287750244],["快乐",-12.877503395080566],["▁toinen",-12.877524375915527],["▁3-5",-12.877528190612791],["▁null",-12.877533912658691],["▁Fashion",-12.877545356750488],["이었다",-12.877562522888184],["▁podpis",-12.877573013305664],["▁gagn",-12.877595901489258],["▁خانم",-12.877613067626951],["della",-12.877618789672852],["▁सरकारको",-12.877641677856444],["nému",-12.877659797668455],["▁дети",-12.877723693847656],["▁而",-12.87775993347168],["OH",-12.877771377563477],["▁المرأة",-12.877778053283691],["▁بیرون",-12.877816200256348],["▁Delta",-12.87782096862793],["過程",-12.877821922302246],["рий",-12.877838134765623],["нє",-12.877852439880373],["話を",-12.877880096435549],["▁megoldás",-12.877887725830078],["ბს",-12.87791633605957],["▁niets",-12.877922058105469],["zony",-12.87794017791748],["▁οδηγ",-12.877971649169922],["▁wody",-12.87797737121582],["GER",-12.877978324890137],["▁товаров",-12.877985000610352],["▁නෙ",-12.878007888793944],["▁дуб",-12.87801742553711],["taget",-12.878060340881348],["有效的",-12.878061294555664],["torilor",-12.878077507019045],["bred",-12.878082275390623],["▁Pack",-12.87808609008789],["涼",-12.878143310546877],["ozott",-12.87816333770752],["▁riferimento",-12.878183364868164],["▁alguien",-12.878185272216797],["▁demasiado",-12.87818717956543],["過ぎ",-12.878198623657228],["▁штраф",-12.878204345703123],["▁नौ",-12.878204345703123],["▁ხდება",-12.878206253051758],["მზად",-12.878213882446287],["▁eiusmod",-12.878260612487791],["▁kt",-12.87826156616211],["ცე",-12.878311157226562],["საგან",-12.878334045410156],["සර",-12.878377914428713],["rici",-12.878382682800291],["Dİ",-12.87839412689209],["▁தலைவர்",-12.878399848937988],["féle",-12.878402709960938],["▁liña",-12.878460884094238],["үүлж",-12.878466606140137],["▁cử",-12.878475189208984],["▁kön",-12.878539085388184],["चन",-12.878547668457031],["▁sarili",-12.87855625152588],["ezko",-12.87861442565918],["ilə",-12.87861442565918],["ліп",-12.878625869750977],["▁مبارزه",-12.87862777709961],["තිය",-12.878628730773926],["కులు",-12.87864589691162],["▁Един",-12.878647804260254],["主義",-12.878681182861328],["18)",-12.87873363494873],["izmus",-12.878735542297363],["верх",-12.878747940063477],["rest",-12.878761291503906],["▁Chal",-12.878771781921388],["▁imper",-12.878803253173828],["かどうか",-12.878816604614258],["պես",-12.878828048706056],["▁invalid",-12.87883472442627],["harra",-12.87883758544922],["▁ಹೊ",-12.878846168518066],["ထားတဲ့",-12.878851890563965],["إعادة",-12.878865242004396],["▁samling",-12.87888526916504],["eño",-12.878890991210938],["▁Desi",-12.878961563110352],["▁Husk",-12.878961563110352],["▁ül",-12.878963470458984],["▁postaci",-12.87901782989502],["▁aho",-12.879057884216309],["▁talking",-12.879058837890623],["▁kuriem",-12.879063606262209],["▁grupi",-12.879072189331056],["▁garai",-12.879074096679688],["شور",-12.879087448120115],["▁bulunduğu",-12.879094123840332],["▁cũ",-12.879158973693848],["▁ratione",-12.879168510437012],["ոռ",-12.879173278808594],["▁പക്ഷെ",-12.87917709350586],["เลี้ยง",-12.879178047180176],["▁sejumlah",-12.87918186187744],["▁Þegar",-12.87918186187744],["▁جشنواره",-12.87918186187744],["▁ವ್ಯಕ್ತಿ",-12.87918186187744],["▁ਸਕੂਲ",-12.879182815551758],["▁жыццё",-12.879183769226074],["▁думку",-12.87918758392334],["▁صاف",-12.879205703735352],["thal",-12.879229545593262],["osas",-12.87924098968506],["▁bersih",-12.879271507263184],["ामा",-12.879281997680664],["▁کاملا",-12.879315376281738],["ásának",-12.879323959350586],["▁konuştu",-12.879323959350586],["하거나",-12.879323959350586],["סים",-12.879392623901367],["fari",-12.879404067993164],["▁میشه",-12.87940502166748],["▁použit",-12.879460334777832],["▁sabah",-12.879461288452148],["redo",-12.87948989868164],["▁1921",-12.879490852355955],["силен",-12.87950038909912],["▁хай",-12.879535675048828],["▁қо",-12.879584312438965],["ылған",-12.879619598388672],["阵",-12.87963581085205],["lüğü",-12.87964916229248],["▁осве",-12.879681587219238],["▁КР",-12.87974452972412],["בש",-12.879756927490234],["限り",-12.879759788513184],["▁módon",-12.87977695465088],["▁získa",-12.879789352416992],["▁خبره",-12.879796981811523],["VIN",-12.879806518554688],["▁Rou",-12.879806518554688],["▁तेल",-12.879895210266112],["▁biztos",-12.879935264587402],["▁juures",-12.879942893981934],["▁Mine",-12.8799467086792],["”(",-12.879969596862791],["कारी",-12.879972457885742],["czego",-12.880030632019045],["▁aktív",-12.880043029785156],["▁Pool",-12.880061149597168],["▁رغم",-12.880106925964355],["▁alkalmazás",-12.880141258239746],["设置",-12.880141258239746],["隱",-12.880148887634276],["viin",-12.880156517028809],["ਅਰ",-12.880163192749023],["สนุก",-12.880181312561035],["mæssig",-12.880182266235352],["▁ڊاڪٽر",-12.880182266235352],["▁Ysgol",-12.880184173583984],["▁ሰዓት",-12.8801851272583],["밀",-12.8801851272583],["▁આવ્યો",-12.88018798828125],["▁боломжтой",-12.880192756652832],["なぜ",-12.880196571350098],["▁പൊതു",-12.880204200744627],["▁அல்லது",-12.880206108093262],["▁చేయండి",-12.880215644836426],["▁jakiś",-12.88022804260254],["ndaki",-12.880236625671388],["maž",-12.880239486694336],["▁Nin",-12.880242347717283],["これから",-12.8802490234375],["AJI",-12.880268096923828],["শন",-12.880278587341309],["ιζ",-12.880292892456056],["▁خيال",-12.880303382873535],["цов",-12.880321502685549],["ុក",-12.880348205566406],["мс",-12.88035488128662],["▁листопада",-12.88035774230957],["▁웹",-12.88035774230957],["▁domini",-12.880366325378418],["▁quienes",-12.880367279052734],["జర్",-12.880373001098633],["▁näyttää",-12.880404472351074],["ที่ผ่านมา",-12.880410194396973],["▁плюс",-12.88041877746582],["ว่าจะ",-12.880425453186035],["vání",-12.880433082580566],["▁tuote",-12.880435943603516],["АТА",-12.880437850952148],["ที่เรา",-12.880446434020996],["šao",-12.88046169281006],["▁разам",-12.88047218322754],["ٽر",-12.88047695159912],["ाह",-12.880500793457031],["TZ",-12.88050365447998],["▁Quảng",-12.88050365447998],["uutta",-12.880512237548828],["▁dormir",-12.880548477172852],["▁pomocy",-12.880555152893066],["▁njima",-12.880558013916016],["وپ",-12.880562782287598],["ပင်",-12.880574226379396],["urdu",-12.880579948425291],["vard",-12.880624771118164],["▁lock",-12.88064193725586],["ḥ",-12.880650520324709],["ρκ",-12.880694389343262],["bę",-12.880778312683104],["ინა",-12.8808012008667],["▁Crea",-12.88084316253662],["▁krom",-12.880846977233888],["▁caur",-12.8809232711792],["▁Educa",-12.880976676940918],["▁exerci",-12.880986213684082],["lier",-12.881043434143066],["Ah",-12.881073951721191],["ないように",-12.881108283996582],["▁βι",-12.881109237670898],["erő",-12.881162643432615],["▁ପ୍ରଥମ",-12.881166458129885],["инен",-12.881168365478516],["histoire",-12.881182670593262],["ૅ",-12.881183624267578],["▁хөгжлийн",-12.881183624267578],["▁ស្តាប់",-12.881183624267578],["بدأ",-12.881184577941896],["▁iegūt",-12.881185531616213],["▁सुरक्षित",-12.881185531616213],["▁родители",-12.881187438964844],["▁થયો",-12.881187438964844],["▁ಹಿಂದೆ",-12.88118839263916],["▁laman",-12.881193161010742],["▁движение",-12.881195068359377],["▁карактер",-12.881196022033691],["ติดต่อ",-12.88119888305664],["▁जानिए",-12.881199836730955],["▁Viac",-12.88120937347412],["▁ظلم",-12.88120937347412],["▁განაცხადა",-12.881217956542969],["▁Touch",-12.8812255859375],["▁പോലീസ്",-12.881231307983398],["▁kompetenc",-12.881251335144045],["elas",-12.88125228881836],["энэ",-12.881265640258787],["▁hour",-12.881270408630373],["▁veći",-12.88129711151123],["biga",-12.881321907043455],["▁soud",-12.881341934204102],["นาง",-12.881351470947266],["▁konser",-12.881366729736328],["▁Lag",-12.881378173828123],["ിലൂടെ",-12.881413459777832],["diu",-12.88141632080078],["ებლად",-12.881422996520996],["▁dė",-12.881470680236816],["▁نر",-12.881479263305664],["▁ಪದ",-12.881502151489258],["Report",-12.881518363952637],["ваецца",-12.881526947021484],["▁Tí",-12.881583213806152],["▁şəhəri",-12.881586074829102],["▁płyt",-12.881601333618164],["▁ugyanis",-12.881610870361328],["bolag",-12.881644248962402],["munt",-12.881709098815918],["分別",-12.88173007965088],["▁gigi",-12.881763458251951],["изација",-12.88177490234375],["ข้อความ",-12.881790161132812],["▁superar",-12.881810188293455],["...[",-12.881823539733888],["生存",-12.88185691833496],["艺",-12.881871223449709],["▁رای",-12.8818998336792],["▁атқа",-12.881913185119627],["▁toivo",-12.88192367553711],["▁pääse",-12.881957054138184],["▁превоз",-12.881976127624512],["▁joog",-12.881980895996094],["sure",-12.881999015808104],["▁hård",-12.88201141357422],["▁ΤΟ",-12.882017135620115],["nami",-12.882033348083496],["való",-12.882055282592772],["øn",-12.882091522216797],["▁stari",-12.882119178771973],["▁Tera",-12.882137298583984],["arm",-12.882155418395996],["ģ",-12.882162094116213],["BP",-12.882184982299805],["▁Kesehatan",-12.882184982299805],["▁Çünki",-12.882184982299805],["▁ಮಾಧ್ಯಮ",-12.882184982299805],["▁համագործակց",-12.88218593597412],["▁صحبت",-12.88218593597412],["▁ያል",-12.882187843322754],["KR",-12.882190704345703],["▁службы",-12.882190704345703],["▁korang",-12.88219165802002],["▁դեռ",-12.882192611694336],["▁کیږي",-12.882192611694336],["běh",-12.882204055786133],["▁stalo",-12.88220500946045],["▁Jón",-12.882216453552246],["▁ezelőtt",-12.882220268249512],["▁episcop",-12.88222312927246],["▁жатыр",-12.882231712341309],["▁jetën",-12.88227367401123],["זן",-12.882285118103027],["▁ਜ਼",-12.88230037689209],["▁elast",-12.882332801818848],["distan",-12.882333755493164],["vog",-12.882384300231934],["▁ikiwa",-12.88241481781006],["Oh",-12.882415771484377],["шая",-12.882417678833008],["▁About",-12.88246250152588],["▁Hero",-12.882466316223145],["▁Levi",-12.882469177246094],["Ни",-12.88247299194336],["▁PVC",-12.88247299194336],["▁අස",-12.882492065429688],["меча",-12.8825101852417],["▁azon",-12.882521629333496],["透明",-12.882532119750977],["казва",-12.882542610168455],["مام",-12.882545471191406],["▁жүргіз",-12.88255214691162],["▁esim",-12.882560729980469],["▁Ske",-12.88259220123291],["qal",-12.88261604309082],["▁winkel",-12.882622718811035],["sier",-12.882655143737791],["節目",-12.882671356201172],["ừ",-12.88267421722412],["๊ก",-12.882682800292969],["▁yarış",-12.882686614990234],["会員",-12.882704734802246],["єв",-12.882731437683104],["▁встреча",-12.882736206054688],["▁иргэд",-12.8827543258667],["▁nesse",-12.88278579711914],["שאר",-12.882905960083008],["▁ေနာက္",-12.882906913757324],["nosi",-12.882933616638184],["пир",-12.882964134216309],["riez",-12.8829927444458],["▁تاج",-12.882993698120115],["кола",-12.883000373840332],["သုံး",-12.883024215698242],["▁kauni",-12.88302993774414],["弟",-12.883031845092772],["▁پڑ",-12.88304615020752],["▁neft",-12.88306713104248],["传播",-12.883140563964844],["stjórn",-12.883158683776855],["기관",-12.883166313171388],["▁хүүхэд",-12.883188247680664],["▁Gemeinde",-12.883193969726562],["ลีก",-12.883200645446776],["▁гаруй",-12.883200645446776],["▁սովոր",-12.88321018218994],["ИД",-12.883225440979004],["▁bağla",-12.883225440979004],["▁vhodné",-12.883228302001951],["▁Bw",-12.883231163024902],["▁Antalya",-12.883233070373535],["▁पुनः",-12.883248329162598],["▁decreto",-12.883251190185549],["▁Çi",-12.883260726928713],["robe",-12.88326358795166],["TUR",-12.883265495300291],["▁Solar",-12.883272171020508],["▁কী",-12.883288383483888],["▁کئے",-12.883295059204102],["▁алдын",-12.883298873901367],["GM",-12.883305549621582],["▁poderia",-12.883318901062012],["▁mogen",-12.883333206176758],["▁profesionales",-12.883360862731934],["టింగ్",-12.883377075195312],["▁бізнес",-12.883382797241213],["▁ekipa",-12.88339900970459],["職業",-12.88340663909912],["დებოდა",-12.883415222167969],["စြာ",-12.883417129516602],["▁xullo",-12.883417129516602],["JEN",-12.883442878723145],["のでしょうか",-12.883463859558104],["實際",-12.883492469787598],["licht",-12.883541107177734],["教室",-12.883553504943848],["▁Mw",-12.88358211517334],["შვილ",-12.883593559265137],["ютер",-12.88361644744873],["zli",-12.88361930847168],["▁Текст",-12.883647918701172],["▁hotelli",-12.883675575256348],["▁njim",-12.883712768554688],["一套",-12.883719444274902],["▁المج",-12.883721351623535],["▁cocina",-12.8837251663208],["世紀",-12.88372802734375],["fran",-12.88373565673828],["▁שלום",-12.883800506591797],["กล",-12.883827209472656],["▁Cao",-12.88383960723877],["▁Unu",-12.8838529586792],["▁मां",-12.88387966156006],["зра",-12.883899688720703],["▁ਲਿਆ",-12.883907318115234],["▁banca",-12.883913040161133],["चल",-12.883916854858398],["▁проведен",-12.883962631225586],["▁දැක",-12.883966445922852],["må",-12.8839750289917],["▁ឱ្យ",-12.884039878845217],["-29",-12.88404655456543],["şîn",-12.884078025817873],["▁문의",-12.884108543395996],["▁tama",-12.884119987487791],["▁شور",-12.88412380218506],["zorg",-12.88413143157959],["▁вибор",-12.88414192199707],["帕",-12.88415241241455],["fäl",-12.884159088134766],["▁parroquia",-12.88419246673584],["▁včetně",-12.88419246673584],["▁ಸಮಸ್ಯೆ",-12.88419246673584],["▁පස්සේ",-12.884193420410156],["▁kandungan",-12.884195327758787],["▁стоимость",-12.884196281433104],["▁තීරණය",-12.884203910827637],["▁침",-12.884215354919434],["▁municipio",-12.88422679901123],["▁yöntem",-12.884238243103027],["luse",-12.88424587249756],["jina",-12.884289741516112],["ထက္",-12.88429832458496],["ئە",-12.884300231933594],["▁veilig",-12.884329795837402],["▁zrobi",-12.884334564208984],["▁elemente",-12.884350776672363],["մար",-12.884353637695312],["▁tark",-12.884359359741213],["▁nuôi",-12.884395599365234],["طب",-12.884407043457031],["ত্ব",-12.884429931640623],["izzato",-12.884435653686523],["▁krop",-12.884441375732422],["▁kaupunki",-12.884442329406738],["मृ",-12.884456634521484],["အတွင်း",-12.884458541870115],["بعد",-12.884503364562988],["▁doce",-12.88454532623291],["opi",-12.884554862976074],["▁løsning",-12.884559631347656],["رل",-12.884575843811035],["不得不",-12.884587287902832],["▁আই",-12.884613990783691],["્ટ",-12.88461971282959],["準",-12.884623527526855],["ាក់",-12.884642601013184],["▁Silver",-12.884651184082031],["国外",-12.884671211242676],["▁Andri",-12.884716033935549],["sætning",-12.884730339050291],["يڪ",-12.884753227233888],["終於",-12.884756088256836],["друже",-12.88478946685791],["▁bestaat",-12.884793281555176],["տու",-12.88481616973877],["▁Tube",-12.8848237991333],["ဂ်",-12.884836196899414],["▁повтор",-12.884867668151855],["▁ствар",-12.88488483428955],["avanje",-12.884902000427246],["▁자기",-12.884932518005373],["лити",-12.884960174560549],["▁saira",-12.884976387023926],["▁Muu",-12.88497829437256],["▁rozvoj",-12.884990692138672],["tuin",-12.88501262664795],["▁taifa",-12.885026931762695],["ატ",-12.88504123687744],["จ่าย",-12.885053634643556],["节目",-12.885101318359377],["εύει",-12.885114669799805],["!).",-12.885122299194336],["▁Sản",-12.885133743286133],["쳐",-12.88518238067627],["▁بناء",-12.885183334350586],["ユーザー",-12.885184288024902],["يمة",-12.885186195373535],["撮影",-12.885191917419434],["วิทยา",-12.8851957321167],["▁приоритет",-12.885197639465332],["▁Православ",-12.885202407836914],["▁земљи",-12.885204315185549],["▁gerekiyor",-12.885210990905762],["▁tilbyr",-12.88523006439209],["▁ontwerp",-12.885233879089355],["▁ΚΑΙ",-12.885241508483888],["තල",-12.885266304016112],["왕",-12.885271072387695],["▁مسلسل",-12.885275840759276],["ường",-12.885281562805176],["▁naprej",-12.885283470153809],["▁Бос",-12.88528823852539],["▁absolv",-12.885299682617188],["▁siker",-12.88536548614502],["▁اهمیت",-12.885395050048828],["Porn",-12.88539981842041],["▁көтер",-12.88540744781494],["▁చిత్ర",-12.88540744781494],["帰",-12.885424613952637],["tieto",-12.885440826416016],["読み",-12.885457038879396],["▁koru",-12.88548183441162],["imine",-12.88549518585205],["かけ",-12.885537147521973],["/2008",-12.885543823242188],["ським",-12.88558864593506],["pc",-12.885623931884766],["▁kuto",-12.88563060760498],["హా",-12.885632514953612],["▁ВАС",-12.885647773742676],["▁romana",-12.885649681091309],["ព្រ",-12.885653495788574],["▁umur",-12.8856840133667],["▁kontu",-12.885699272155762],["▁فو",-12.885699272155762],["▁유지",-12.88570499420166],["我觉得",-12.885772705078123],["zava",-12.885777473449709],["特定",-12.88579273223877],["сс",-12.885823249816896],["▁Lug",-12.885835647583008],["▁acredita",-12.885854721069336],["▁Hyvä",-12.885860443115234],["▁ministre",-12.885860443115234],["sən",-12.885872840881348],["oan",-12.885909080505373],["▁slap",-12.88591194152832],["▁Balkan",-12.88592529296875],["/17",-12.885933876037598],["▁risque",-12.885948181152344],["▁collabora",-12.885951042175291],["▁Zara",-12.885960578918455],["inimo",-12.885974884033203],["▁జీ",-12.88606071472168],["▁तस्य",-12.886072158813477],["Wir",-12.88607406616211],["▁obisk",-12.886133193969728],["▁liom",-12.886171340942385],["蔡",-12.886178016662598],["▁Vaata",-12.886194229125977],["▁الاقوامی",-12.88620376586914],["▁Laman",-12.886207580566406],["▁આપણે",-12.886223793029783],["▁सूची",-12.88624382019043],["▁ahayd",-12.886250495910645],["▁bund",-12.886259078979492],["▁tartott",-12.886259078979492],["▁עליו",-12.88627815246582],["▁privacy",-12.886280059814451],["▁гэрээ",-12.886320114135742],["▁Sozial",-12.88632583618164],["▁svým",-12.886345863342283],["▁آگ",-12.886357307434082],["▁કરોડ",-12.886366844177246],["▁tutorial",-12.886369705200195],["▁რუსეთის",-12.886380195617676],["kārto",-12.88642120361328],["突出",-12.886425971984863],["▁svom",-12.886451721191406],["▁maso",-12.886465072631836],["رمان",-12.886480331420898],["ъя",-12.886486053466797],["▁مك",-12.886487007141112],["▁nájdete",-12.886497497558594],["▁израз",-12.88651180267334],["skelb",-12.886521339416504],["ዘመ",-12.886524200439451],["ຶ",-12.886533737182615],["▁+1",-12.886542320251465],["তু",-12.886547088623049],["▁בט",-12.88656234741211],["▁sprzeda",-12.886563301086426],["▁ART",-12.88662052154541],["▁सक",-12.88663101196289],["▁Yahoo",-12.886652946472168],["▁vardı",-12.886695861816406],["▁kobiet",-12.88669776916504],["▁ferma",-12.886743545532228],["ම්බ",-12.886751174926758],["▁dashur",-12.886802673339844],["▁tegn",-12.886821746826172],["▁mitme",-12.886833190917969],["▁گم",-12.886881828308104],["▁Британ",-12.88689136505127],["στι",-12.886897087097168],["▁julkis",-12.886911392211914],["难以",-12.886940956115724],["▁eiga",-12.886957168579102],["▁разд",-12.88696002960205],["觉",-12.886978149414062],["lopen",-12.88701629638672],["▁defnyddio",-12.88702392578125],["▁Saar",-12.88703441619873],["▁سیمه",-12.887042999267578],["óż",-12.887043952941896],["▁päivän",-12.887065887451172],["▁متحده",-12.887084007263184],["變成",-12.887103080749512],["jedi",-12.887127876281738],["▁yuk",-12.887162208557127],["昌",-12.887166023254396],["▁මීට",-12.88717269897461],["nés",-12.887182235717772],["▁свом",-12.887191772460938],["▁ekonom",-12.887192726135254],["▁période",-12.887211799621582],["EVA",-12.887212753295898],["▁знает",-12.88722038269043],["▁koku",-12.887229919433594],["▁변경",-12.887256622314451],["سلط",-12.887264251708984],["▁Jezus",-12.887289047241213],["▁сваёй",-12.887320518493652],["▁עצמו",-12.8873291015625],["▁sava",-12.88735294342041],["▁Aller",-12.887354850769045],["araka",-12.88735580444336],["▁pec",-12.887369155883787],["▁Сил",-12.887380599975586],["▁движения",-12.887421607971191],["▁Osiyo",-12.88743019104004],["வரும்",-12.88743782043457],["▁KK",-12.88743782043457],["▁Passa",-12.887534141540527],["йка",-12.88754653930664],["▁ବହୁ",-12.887550354003906],["▁आउने",-12.887579917907717],["屬於",-12.887587547302246],["▁fren",-12.887598991394045],["▁skirt",-12.887617111206056],["▁hek",-12.887639045715332],["▁critica",-12.887653350830078],["തീ",-12.88767147064209],["aniu",-12.887691497802734],["ड़े",-12.887722969055176],["OVA",-12.887738227844238],["ტერი",-12.887738227844238],["နိ",-12.887762069702148],["DAR",-12.887849807739258],["▁merr",-12.88785171508789],["▁ច្រើន",-12.887863159179688],["▁rib",-12.887866020202637],["▁противо",-12.88788604736328],["ченко",-12.887904167175291],["なり",-12.887931823730469],["▁îm",-12.887937545776367],["▁rechts",-12.887962341308594],["▁ദിവസ",-12.887993812561035],["lezi",-12.88800048828125],["▁оне",-12.888004302978516],["灰",-12.88801097869873],["▁ljubav",-12.888041496276855],["vention",-12.88806438446045],["▁Verein",-12.888081550598145],["късно",-12.888094902038574],["حدث",-12.888111114501951],["▁нерсе",-12.888124465942385],["中共",-12.888137817382812],["-2014",-12.888172149658203],["▁Psi",-12.88817310333252],["拆",-12.888189315795898],["▁göstərir",-12.888193130493164],["挑戰",-12.88819980621338],["▁pintura",-12.888205528259276],["▁Việc",-12.888218879699709],["▁berlangsung",-12.888218879699709],["▁profissionais",-12.888218879699709],["szel",-12.888228416442873],["▁აგ",-12.88824462890625],["フェ",-12.8882474899292],["njena",-12.888279914855955],["घा",-12.88828945159912],["▁Tagged",-12.888307571411133],["▁breast",-12.888307571411133],["▁Andreas",-12.88831901550293],["▁razy",-12.88835620880127],["▁کمپنی",-12.8883695602417],["▁property",-12.88837432861328],["គ្រ",-12.888388633728027],["navyo",-12.88840389251709],["▁pályázat",-12.88841152191162],["▁cevap",-12.88846206665039],["▁অনু",-12.888471603393556],["▁Буга",-12.888473510742188],["ật",-12.888479232788086],["▁beharko",-12.888507843017578],["oitus",-12.88852310180664],["▁eten",-12.888551712036133],["טאַ",-12.88856315612793],["tiny",-12.888577461242676],["▁Bug",-12.88858699798584],["ører",-12.888592720031738],["увања",-12.888619422912598],["▁Bj",-12.88862419128418],["▁обрати",-12.88863468170166],["▁Materi",-12.888668060302734],["有多",-12.888684272766112],["ären",-12.88868808746338],["▁ക്കാര്",-12.888689041137695],["▁bate",-12.888696670532228],["▁soul",-12.888700485229492],["▁pilsētas",-12.88874053955078],["▁обид",-12.888805389404297],["zahl",-12.888845443725586],["▁effetti",-12.88886547088623],["▁1/4",-12.888869285583496],["င့်",-12.88888168334961],["रज",-12.888900756835938],["▁වෙන්නේ",-12.888904571533203],["يٽ",-12.888931274414062],["▁ζητ",-12.888933181762695],["akin",-12.888945579528809],["▁tarix",-12.888948440551758],["tävä",-12.888995170593262],["▁pomoći",-12.889012336730955],["čar",-12.88902759552002],["▁haja",-12.889031410217283],["bhe",-12.889092445373535],["▁проб",-12.889095306396484],["തം",-12.889111518859863],["▁Persi",-12.889115333557127],["đen",-12.88913345336914],["pira",-12.88916301727295],["nejši",-12.889168739318848],["▁paese",-12.889177322387695],["בסיס",-12.889179229736328],["桑",-12.889184951782228],["/2007",-12.889192581176758],["▁ключ",-12.88919448852539],["▁كېيىن",-12.889216423034668],["▁artykuł",-12.889227867126465],["▁छलफल",-12.889227867126465],["▁гривень",-12.889229774475098],["▁የታ",-12.889230728149414],["▁ହେଉଛି",-12.88923168182373],["▁작품",-12.88923454284668],["മായിരുന്നു",-12.889239311218262],["▁오후",-12.889265060424805],["wijs",-12.889273643493652],["▁налог",-12.889273643493652],["▁metropol",-12.889280319213867],["иска",-12.88929843902588],["▁membros",-12.88930606842041],["born",-12.889328956604004],["ົ",-12.8893461227417],["ڏا",-12.889348030090332],["▁manque",-12.889364242553713],["▁Коментари",-12.889375686645508],["명의",-12.889381408691406],["▁hələ",-12.889391899108888],["LINE",-12.889398574829102],["нуць",-12.889404296875],["▁جالب",-12.88941478729248],["”?",-12.889418601989746],["krij",-12.889470100402832],["▁ഇവ",-12.889503479003906],["的第一",-12.889506340026855],["▁repeti",-12.889511108398438],["▁adatok",-12.88954734802246],["gst",-12.889548301696776],["▁ווא",-12.88957977294922],["sager",-12.8895845413208],["▁Afg",-12.889642715454102],["▁modeller",-12.88966178894043],["ສໍາລັບ",-12.889681816101074],["ingas",-12.889684677124023],["ጠር",-12.88969612121582],["▁konsum",-12.88970947265625],["▁frustra",-12.889721870422363],["fell",-12.889753341674805],["▁tenaga",-12.88976764678955],["▁nachádza",-12.889806747436523],["contra",-12.889809608459473],["▁שבע",-12.889809608459473],["▁աշխատ",-12.889873504638672],["▁dawî",-12.88988971710205],["▁(23)",-12.889896392822266],["▁vijesti",-12.889901161193848],["▁හොද",-12.889944076538086],["ღი",-12.889963150024414],["ilən",-12.88996696472168],["സു",-12.889972686767578],["▁이유",-12.889972686767578],["组成",-12.889994621276855],["▁gost",-12.890006065368652],["▁որոշ",-12.890059471130373],["ıdır",-12.89007568359375],["▁bello",-12.890156745910645],["▁Haz",-12.890172958374023],["▁calle",-12.890181541442873],["තුරු",-12.890183448791504],["▁saling",-12.890189170837402],["îst",-12.89022445678711],["▁необ",-12.89023780822754],["▁означава",-12.89023780822754],["▁कानून",-12.890239715576172],["▁દરેક",-12.890239715576172],["▁ಪ್ರಕರಣ",-12.890241622924805],["เวอร์",-12.89024829864502],["▁Mundo",-12.890254020690918],["▁हुँदा",-12.890260696411133],["▁עדיין",-12.890268325805664],["▁дитя",-12.89028263092041],["▁Isus",-12.890283584594728],["▁ریزی",-12.890283584594728],["ზის",-12.890299797058104],["▁Μετά",-12.890303611755373],["▁هستیم",-12.890304565429688],["▁chiqarish",-12.890310287475586],["pugna",-12.890315055847168],["▁hawl",-12.890344619750977],["▁үйлдвэр",-12.89034652709961],["▁بايد",-12.890355110168455],["▁semangat",-12.89038372039795],["▁əlaqə",-12.890400886535645],["Institut",-12.89040756225586],["▁Søk",-12.890421867370604],["▁fotbal",-12.8904447555542],["▁Pata",-12.89045524597168],["deild",-12.890463829040527],["▁cuisine",-12.89049243927002],["כול",-12.890504837036133],["▁سون",-12.890508651733398],["▁tumor",-12.890558242797852],["árias",-12.890596389770508],["▁дене",-12.890603065490724],["dden",-12.890604972839355],["▁mondiale",-12.890607833862305],["▁bach",-12.890657424926758],["▁Quin",-12.890671730041504],["▁آی",-12.890698432922363],["šení",-12.890701293945312],["▁pura",-12.890711784362791],["▁نمبر",-12.890717506408691],["▁uki",-12.890745162963867],["恋",-12.890756607055664],["ตรวจ",-12.890810012817385],["▁ырчы",-12.89081573486328],["▁շարունակ",-12.890822410583496],["▁Свят",-12.89085292816162],["ាង",-12.890864372253418],["▁výbor",-12.890872955322266],["szín",-12.890877723693848],["klasse",-12.89088249206543],["▁/>",-12.890887260437012],["▁händer",-12.890888214111328],["▁കൊണ്ട",-12.89089012145996],["cure",-12.89090347290039],["▁беле",-12.890909194946287],["เอก",-12.890934944152832],["▁kaha",-12.89097785949707],["▁kvartal",-12.890992164611816],["свід",-12.890995979309082],["加大",-12.891018867492676],["▁месяца",-12.891020774841309],["▁остров",-12.891021728515623],["▁작성",-12.891036033630373],["▁θέλει",-12.89104175567627],["دع",-12.891042709350586],["▁tuj",-12.89105224609375],["▁prokuror",-12.891070365905762],["itik",-12.891119956970217],["▁Almanya",-12.891206741333008],["▁ਫ਼",-12.891209602355955],["▁तत्",-12.891227722167969],["▁خیابان",-12.89124870300293],["▁ಕೃಷಿ",-12.89124870300293],["▁scientific",-12.891249656677246],["▁минимум",-12.891249656677246],["▁genç",-12.89126682281494],["▁عربستان",-12.891276359558104],["▁dibin",-12.89133071899414],["▁buz",-12.891337394714355],["qay",-12.891353607177734],["fant",-12.891369819641112],["▁расте",-12.891379356384276],["fie",-12.891398429870604],["വില്",-12.891429901123049],["แพ",-12.891475677490234],["▁guia",-12.891496658325195],["▁yeux",-12.891511917114258],["▁Noen",-12.891566276550291],["はありません",-12.891633987426758],["කරුවන්",-12.891634941101074],["▁equipa",-12.89163589477539],["▁eragin",-12.891637802124023],["ფორმ",-12.891690254211426],["▁coopera",-12.891701698303224],["▁ಪ್ರಾ",-12.89171314239502],["▁kans",-12.891735076904297],["cenia",-12.891767501831056],["▁Alfa",-12.891779899597168],["隻",-12.891830444335938],["ด์",-12.89184284210205],["хийн",-12.891846656799316],["▁echte",-12.891853332519531],["▁открива",-12.891860008239746],["的位置",-12.891942024230955],["▁зур",-12.891942977905272],["▁AU",-12.891947746276855],["▁vyz",-12.891980171203612],["▁земји",-12.891984939575195],["▁spy",-12.892008781433104],["▁sabihin",-12.89203929901123],["▁visų",-12.892080307006836],["▁கண",-12.892088890075684],["▁bureau",-12.892091751098633],["ိုင်",-12.892127990722656],["那样",-12.892130851745604],["Ё",-12.892176628112791],["今回の",-12.89219570159912],["თვ",-12.892206192016602],["▁Kevin",-12.892210006713867],["日本人",-12.892219543457031],["▁betaal",-12.892229080200195],["▁관계",-12.89223289489746],["िज",-12.89223575592041],["****",-12.89224338531494],["▁ಚಾ",-12.892245292663574],["ຝ",-12.892252922058104],["▁ବିଭିନ୍ନ",-12.892260551452637],["▁дальше",-12.892261505126951],["▁හරහා",-12.89226245880127],["បំផុត",-12.892263412475586],["▁lørdag",-12.892263412475586],["▁siūlo",-12.89227294921875],["ວາງ",-12.89228343963623],["▁అంటూ",-12.892285346984863],["▁przyczyn",-12.892288208007812],["เจ้าหน้าที่",-12.892289161682127],["▁aghaidh",-12.892292976379396],["▁verba",-12.892304420471191],["▁sploh",-12.892308235168455],["ອງ",-12.892309188842772],["▁ব্যা",-12.892332077026367],["▁Rä",-12.892352104187012],["顧客",-12.892354011535645],["▁Игра",-12.89236068725586],["▁subsidi",-12.892366409301758],["▁świata",-12.892378807067873],["▁gastos",-12.89238452911377],["▁gazete",-12.892414093017578],["▁હાથ",-12.892452239990234],["▁દા",-12.892457008361816],["कं",-12.892462730407717],["▁پلان",-12.892508506774902],["▁demokratik",-12.892533302307127],["▁hamar",-12.892578125],["කුත්",-12.89258098602295],["▁lön",-12.892600059509276],["laisen",-12.89263153076172],["▁ٻه",-12.892645835876465],["▁Ava",-12.892648696899414],["లపై",-12.892663955688477],["▁Beg",-12.892664909362791],["▁LIVE",-12.892678260803224],["▁virksomheder",-12.892690658569336],["▁vahe",-12.892702102661133],["▁doute",-12.892717361450195],["▁undang",-12.892746925354004],["▁Smo",-12.892763137817385],["▁министър",-12.892794609069824],["▁honen",-12.892861366271973],["不良",-12.892870903015137],["▁response",-12.89287567138672],["▁meydana",-12.8928804397583],["▁БОЛ",-12.892902374267578],["ല്ലേ",-12.89291286468506],["শে",-12.892925262451172],["▁nedan",-12.892926216125488],["▁secteur",-12.892934799194336],["ПС",-12.892993927001951],["ráz",-12.89303207397461],["ुल",-12.893057823181152],["▁гас",-12.893061637878418],["bini",-12.893067359924316],["▁வைத்து",-12.893096923828123],["jalo",-12.893109321594238],["ందుకు",-12.893111228942873],["▁chroni",-12.89311981201172],["ውያን",-12.89312744140625],["тивно",-12.893128395080566],["▁blue",-12.89315414428711],["reak",-12.893158912658691],["绿",-12.893160820007324],["▁fus",-12.893162727355955],["▁pedra",-12.893195152282717],["這一",-12.89321994781494],["▁vola",-12.893221855163574],["sarja",-12.893234252929688],["▁ترس",-12.893238067626951],["▁အစိုးရ",-12.893271446228027],["▁гісторыі",-12.893274307250977],["▁جيڪڏهن",-12.893274307250977],["▁दर्ता",-12.893278121948242],["▁habla",-12.893298149108888],["▁монгол",-12.893298149108888],["▁грудня",-12.893304824829102],["▁Všechn",-12.89331340789795],["▁בכלל",-12.89332103729248],["ੋਰ",-12.893324851989746],["cú",-12.89333438873291],["▁ibland",-12.893335342407228],["▁Китай",-12.893349647521973],["▁Tut",-12.893399238586426],["▁اسٹ",-12.893400192260742],["photo",-12.893421173095703],["▁publika",-12.893442153930664],["ałem",-12.893489837646484],["▁ငါ",-12.893505096435549],["ເອົາ",-12.893506050109863],["▁Jas",-12.893548011779783],["▁الشعر",-12.893580436706545],["▁naredi",-12.89358901977539],["polo",-12.89359188079834],["मुख",-12.893597602844238],["让他",-12.893598556518556],["▁بيا",-12.893630981445312],["▁kryesore",-12.893648147583008],["रस",-12.893651008605955],["стала",-12.893665313720703],["ļiem",-12.893678665161133],["▁djece",-12.893694877624512],["已有",-12.89370346069336],["▁خبرې",-12.893750190734863],["▁vajalik",-12.893758773803713],["▁политики",-12.89384651184082],["▁foot",-12.893860816955566],["enburg",-12.893919944763184],["فاء",-12.893924713134766],["נצ",-12.893963813781738],["మెంట్",-12.89398956298828],["daw",-12.894013404846191],["▁absorb",-12.894031524658203],["▁жените",-12.894058227539062],["କାରୀ",-12.894086837768556],["▁бит",-12.894115447998049],["1,5",-12.894137382507324],["▁ಪಕ್ಷ",-12.894168853759766],["▁ខាង",-12.894173622131348],["▁acele",-12.894203186035156],["▁தமிழக",-12.894210815429688],["▁alkoholi",-12.894217491149902],["віль",-12.894231796264648],["飽",-12.894253730773926],["させる",-12.894272804260254],["▁ගිහින්",-12.894288063049316],["▁Жалпы",-12.89428997039795],["▁төсөл",-12.89428997039795],["▁бизнис",-12.894296646118164],["▁አላ",-12.894331932067873],["食べ",-12.894333839416504],["▁bukti",-12.8943452835083],["០០",-12.89435577392578],["▁temi",-12.89436149597168],["રમ",-12.894375801086426],["▁gözəl",-12.894412994384766],["▁medium",-12.894424438476562],["уудын",-12.89445686340332],["▁Κο",-12.894458770751951],["▁1-0",-12.894474029541016],["пя",-12.89447784423828],["▁سپس",-12.89447784423828],["▁согласно",-12.894497871398926],["▁මුල්",-12.894511222839355],["▁volledig",-12.89451503753662],["यू",-12.894518852233888],["▁sapere",-12.894522666931152],["їв",-12.894532203674316],["▁ngôi",-12.894543647766112],["▁terkenal",-12.894574165344238],["▁بولسا",-12.894619941711426],["▁поток",-12.894639015197754],["evole",-12.894648551940918],["來到",-12.894661903381348],["▁uşaq",-12.894675254821776],["स्प",-12.894695281982422],["▁potrebe",-12.894767761230469],["leven",-12.894819259643556],["lysning",-12.894848823547363],["高い",-12.894848823547363],["ås",-12.894855499267578],["▁rapporto",-12.89486312866211],["▁ичинде",-12.894866943359377],["jän",-12.894886016845703],["▁Tallinn",-12.894927978515623],["ဘီ",-12.894967079162598],["▁యా",-12.89496898651123],["▁મહા",-12.894973754882812],["központ",-12.894978523254396],["▁phê",-12.894979476928713],["▁зва",-12.894983291625977],["čast",-12.89498805999756],["▁საჭირო",-12.895008087158203],["▁megér",-12.89507293701172],["▁місця",-12.895073890686035],["ላችሁ",-12.895078659057615],["▁gerçekleş",-12.895095825195312],["▁Jakob",-12.895115852355955],["▁ildən",-12.895135879516602],["▁משה",-12.895155906677246],["nina",-12.895171165466309],["▁inkişafı",-12.895174026489258],["IKE",-12.89517593383789],["▁eventual",-12.895180702209473],["hita",-12.895181655883787],["▁mâ",-12.895216941833496],["渠道",-12.89525032043457],["毕竟",-12.895251274108888],["▁päris",-12.895262718200684],["▁Reporter",-12.895268440246582],["ंच्या",-12.89527416229248],["▁ਗੱਲ",-12.895276069641112],["sist",-12.89527702331543],["▁турнир",-12.895281791687012],["▁iletişim",-12.895303726196287],["▁mapupuksa",-12.895303726196287],["▁කෙසේ",-12.895304679870604],["▁существует",-12.895305633544922],["▁बाजार",-12.895308494567873],["▁людям",-12.89531135559082],["kawa",-12.895315170288086],["▁ирсэн",-12.895319938659668],["▁Maya",-12.895320892333984],["▁ਜਾਂਦਾ",-12.8953218460083],["▁turėtų",-12.895341873168944],["ическая",-12.895378112792969],["▁erabaki",-12.8953857421875],["rwy",-12.89541244506836],["年龄",-12.895419120788574],["▁ഒര",-12.895427703857422],["▁ຍ",-12.895427703857422],["▁fuerte",-12.895462989807127],["▁søge",-12.895509719848633],["▁училище",-12.895520210266112],["▁leader",-12.895522117614746],["රණය",-12.895540237426758],["▁ரூ",-12.895547866821287],["▁රහ",-12.895554542541504],["▁Пад",-12.895572662353516],["ματος",-12.895600318908691],["▁karna",-12.89560317993164],["رسي",-12.895606994628906],["▁সেই",-12.89561653137207],["დენ",-12.895621299743652],["▁Ето",-12.89565372467041],["▁า",-12.895655632019045],["▁burger",-12.895694732666016],["▁dhënë",-12.895709991455078],["▁fórum",-12.895723342895508],["īvu",-12.895730018615724],["ጥር",-12.895776748657228],["▁экономика",-12.895796775817873],["▁îmi",-12.89581298828125],["र्द",-12.895814895629885],["最多",-12.895848274230955],["gebiet",-12.89585018157959],["ār",-12.89589023590088],["▁Scho",-12.89589023590088],["▁kurdan",-12.89590072631836],["legging",-12.895951271057127],["▁ниже",-12.895956039428713],["rime",-12.895966529846191],["▁रोज",-12.895974159240724],["chè",-12.89599895477295],["▁функция",-12.896008491516112],["gelegt",-12.896013259887695],["▁pys",-12.8960599899292],["mora",-12.89608097076416],["ивши",-12.89608669281006],["▁Lun",-12.896088600158691],["tionibus",-12.896090507507324],["▁үеийн",-12.896100997924805],["կին",-12.896111488342283],["▁питање",-12.896127700805664],["ünde",-12.896140098571776],["TUS",-12.896142959594728],["▁BY",-12.896148681640623],["▁guess",-12.896153450012209],["▁רכב",-12.896153450012209],["▁घड",-12.896167755126951],["▁objetivos",-12.896188735961914],["iraju",-12.896193504333496],["entzat",-12.896197319030762],["▁මොකද",-12.896230697631836],["▁zostały",-12.89624309539795],["▁siyaset",-12.896255493164062],["逐渐",-12.89625644683838],["ຄື",-12.896273612976074],["▁كامل",-12.896288871765137],["▁शुरु",-12.896300315856934],["▁szeptember",-12.896319389343262],["▁декември",-12.896319389343262],["▁ottenere",-12.896320343017578],["▁chéile",-12.896323204040527],["▁النبي",-12.896324157714844],["▁màn",-12.896331787109377],["risk",-12.896336555480955],["▁opinión",-12.89634609222412],["▁dayan",-12.896353721618652],["γει",-12.896374702453612],["уха",-12.89637565612793],["hodno",-12.896376609802246],["bilité",-12.896387100219728],["▁mezzo",-12.896388053894045],["▁EV",-12.89638900756836],["▁जम",-12.896404266357422],["▁kapcsolatos",-12.896414756774902],["▁pergunta",-12.896431922912598],["몰",-12.89646053314209],["▁Kho",-12.896571159362791],["▁лог",-12.896583557128906],["social",-12.896604537963867],["negara",-12.896620750427246],["его",-12.896626472473145],["▁टाक",-12.896631240844728],["ங்களுக்கு",-12.89663791656494],["▁ironi",-12.89665985107422],["▁гост",-12.896661758422852],["нів",-12.896672248840332],["▁Bayan",-12.896678924560549],["ZU",-12.896717071533203],["▁Ethiopia",-12.89671802520752],["mbur",-12.896727561950684],["azh",-12.896739959716797],["仲",-12.89674472808838],["ιακό",-12.896753311157228],["itás",-12.896768569946287],["▁عليك",-12.89677619934082],["▁我们",-12.896780014038086],["▁آرام",-12.896796226501465],["ությունից",-12.89680004119873],["ndosi",-12.896804809570312],["▁viņi",-12.89686393737793],["▁verbo",-12.896867752075195],["▁veut",-12.896868705749512],["▁включва",-12.896869659423828],["▁escala",-12.89687728881836],["పర్",-12.896892547607422],["ლოს",-12.896921157836914],["واد",-12.896950721740724],["лац",-12.89695167541504],["ьні",-12.896966934204102],["▁Magazine",-12.896991729736328],["▁nese",-12.897002220153809],["▁தோ",-12.897048950195312],["▁Др",-12.897064208984377],["ளா",-12.89707374572754],["ივ",-12.897074699401855],["▁формат",-12.897077560424805],["▁නොහැකි",-12.897088050842283],["idio",-12.897109985351562],["προ",-12.897117614746094],["PB",-12.897159576416016],["▁Mijn",-12.89721393585205],["付出",-12.897233963012695],["▁Style",-12.897258758544922],["▁PAN",-12.897271156311035],["连续",-12.897274017333984],["หาย",-12.897316932678224],["▁eleva",-12.897319793701172],["▁latviešu",-12.897336959838867],["▁maendeleo",-12.897336959838867],["▁порушення",-12.897337913513184],["▁наистина",-12.8973388671875],["▁समाधान",-12.8973388671875],["▁xogo",-12.897339820861816],["▁ପାଣି",-12.897347450256348],["健身",-12.897348403930664],["қатынас",-12.89735507965088],["ívne",-12.89735984802246],["lation",-12.897404670715332],["▁Hiện",-12.897422790527344],["▁शासन",-12.897430419921877],["▁terlebih",-12.897436141967772],["▁مقدس",-12.897436141967772],["▁dec",-12.897499084472656],["▁skoraj",-12.89752197265625],["▁krém",-12.897555351257324],["kst",-12.89755630493164],["旅客",-12.897576332092283],["поч",-12.897629737854004],["▁নেতা",-12.897634506225586],["▁långt",-12.897649765014648],["▁mãe",-12.897650718688965],["▁ਭਰ",-12.897679328918455],["▁kW",-12.897687911987305],["▁ры",-12.89769458770752],["product",-12.897713661193848],["ствия",-12.89773941040039],["▁elimu",-12.89779567718506],["▁raš",-12.89781093597412],["vaiht",-12.89782428741455],["▁irudi",-12.89784336090088],["▁წინააღმდეგ",-12.897862434387209],["ovski",-12.897863388061523],["ಕ್ಸ್",-12.897899627685549],["▁Bhi",-12.897930145263672],["lani",-12.897941589355469],["▁weight",-12.898000717163086],["irala",-12.898004531860352],["▁Links",-12.898005485534668],["▁Bü",-12.8980131149292],["ควย",-12.898016929626465],["blon",-12.898029327392578],["▁ចិន",-12.898037910461426],["▁ойын",-12.898038864135742],["▁Basa",-12.898051261901855],["相手",-12.898054122924805],["▁ESP",-12.898056030273438],["ائف",-12.898099899291992],["▁przekaz",-12.898111343383787],["▁256",-12.898118019104004],["▁dorp",-12.898127555847168],["伝え",-12.898128509521484],["▁laps",-12.898158073425291],["რული",-12.898213386535645],["▁גיל",-12.898233413696287],["▁неиз",-12.898235321044922],["▁ځل",-12.898307800292969],["▁skóry",-12.898321151733398],["丰富",-12.898329734802246],["džių",-12.898343086242676],["ILL",-12.898347854614258],["▁alcohol",-12.898355484008787],["▁मुद्दा",-12.898355484008787],["公民",-12.898414611816406],["▁proyectos",-12.898422241210938],["今回",-12.89843463897705],["▁уул",-12.898452758789062],["▁njezin",-12.89845848083496],["▁практично",-12.898473739624023],["▁prístup",-12.898478507995604],["▁funda",-12.898515701293944],["▁Kampung",-12.898521423339844],["▁nisam",-12.898527145385742],["നായി",-12.898547172546388],["▁ovoj",-12.898547172546388],["▁لله",-12.898554801940918],["▁šia",-12.898573875427246],["▁seotud",-12.898625373840332],["▁laisv",-12.898627281188965],["人都",-12.898636817932127],["vų",-12.898642539978027],["▁kval",-12.898648262023926],["Android",-12.898662567138672],["▁korban",-12.898681640625],["ತ್ವ",-12.898687362670898],["▁midt",-12.898689270019531],["▁דברים",-12.898736953735352],["▁профес",-12.89876937866211],["▁cul",-12.898777961730955],["ครบ",-12.89878273010254],["linda",-12.898838996887209],["▁Reis",-12.898838996887209],["▁construir",-12.898845672607422],["▁Action",-12.898862838745115],["もない",-12.898863792419434],["▁225",-12.898897171020508],["▁රති",-12.89890480041504],["jua",-12.89891529083252],["▁pate",-12.89891529083252],["ДЫ",-12.898975372314451],["▁personlige",-12.89899730682373],["지도",-12.899018287658691],["țiile",-12.899051666259766],["▁írás",-12.899077415466309],["fti",-12.899087905883787],["▁ავ",-12.899185180664062],["▁இந்தியா",-12.899205207824709],["▁baya",-12.899216651916504],["▁小",-12.89923858642578],["τικού",-12.899261474609377],["droš",-12.89928150177002],["▁でも",-12.899296760559082],["public",-12.899301528930664],["cale",-12.899304389953612],["▁එසේ",-12.899312019348145],["▁vivere",-12.899320602416992],["▁Львів",-12.899346351623535],["▁මාස",-12.899370193481444],["施設",-12.899370193481444],["▁охорони",-12.899374961853027],["▁съществува",-12.899374961853027],["▁האָבן",-12.899377822875977],["▁muziek",-12.899394035339355],["▁apríl",-12.899405479431152],["▁DB",-12.89942455291748],["▁لاکھ",-12.899455070495604],["Pod",-12.899470329284668],["▁Porta",-12.899473190307615],["ывает",-12.899490356445312],["幻",-12.899490356445312],["wych",-12.899510383605955],["▁ఇంత",-12.89951515197754],["▁الواقع",-12.899523735046388],["lauf",-12.899563789367676],["sjóð",-12.899587631225586],["播放",-12.899596214294434],["▁matern",-12.89962673187256],["▁Tú",-12.899651527404783],["▁habita",-12.89965534210205],["데이",-12.89966106414795],["▁kuvat",-12.899664878845217],["▁mazāk",-12.899686813354492],["▁თე",-12.899699211120604],["▁Josef",-12.899709701538086],["▁diru",-12.899721145629885],["語言",-12.899738311767578],["行程",-12.899749755859377],["▁필",-12.89976978302002],["рада",-12.899770736694336],["ಟು",-12.899776458740234],["研究所",-12.899776458740234],["ใช้งาน",-12.89979076385498],["स्था",-12.899797439575195],["▁голову",-12.89980697631836],["▁miejsc",-12.899813652038574],["кви",-12.89981460571289],["▁propone",-12.899825096130373],["▁bedrijven",-12.899839401245115],["▁koncu",-12.89984130859375],["▁ठीक",-12.899860382080078],["coj",-12.899861335754396],["还在",-12.89989185333252],["▁formål",-12.89990520477295],["▁erabili",-12.899907112121582],["授",-12.899910926818848],["yayê",-12.899919509887695],["▁വന്നു",-12.899984359741213],["AZI",-12.900004386901855],["▁1934",-12.900009155273438],["▁منځ",-12.900029182434082],["အေၾကာင္း",-12.900110244750977],["ANTA",-12.900127410888672],["Cómo",-12.900132179260254],["実は",-12.900157928466797],["▁valgt",-12.900176048278809],["ΩΝ",-12.900177001953123],["▁личност",-12.900206565856934],["žili",-12.900217056274414],["しながら",-12.900227546691896],["επ",-12.900257110595703],["עו",-12.900257110595703],["▁127",-12.9002685546875],["▁Kopf",-12.9002685546875],["▁omni",-12.900272369384766],["шем",-12.900273323059082],["▁Фи",-12.900276184082031],["▁pequeno",-12.900285720825195],["▁Uy",-12.90030288696289],["▁sigurno",-12.900306701660156],["▁মানুষ",-12.900306701660156],["じゃない",-12.900310516357422],["stáva",-12.900341033935549],["▁cán",-12.900373458862305],["庄",-12.900375366210938],["櫃",-12.900375366210938],["क़",-12.900379180908203],["▁kështu",-12.900395393371582],["▁напълно",-12.900395393371582],["▁જોઈએ",-12.900404930114746],["▁বিএনপি",-12.90042781829834],["hop",-12.900443077087402],["banyak",-12.900452613830566],["▁ಅವರಿಗೆ",-12.900474548339844],["▁consist",-12.900479316711426],["▁קצת",-12.90049171447754],["cılar",-12.900521278381348],["▁השנה",-12.900537490844728],["▁Adres",-12.90053939819336],["▁hahaha",-12.90056324005127],["▁resolver",-12.900565147399902],["▁Integr",-12.900616645812988],["lacions",-12.900636672973633],["stve",-12.900640487670898],["টার",-12.900641441345217],["▁Vand",-12.900649070739746],["▁الإسلامية",-12.900681495666504],["▁nối",-12.9006986618042],["▁Варна",-12.90070915222168],["трима",-12.900716781616213],["koma",-12.900727272033691],["▁(24)",-12.900752067565918],["▁tău",-12.900778770446776],["usega",-12.900792121887209],["IKI",-12.900836944580078],["ஹ",-12.900861740112305],["▁•",-12.900861740112305],["进口",-12.90086841583252],["сну",-12.900873184204102],["▁Buenos",-12.900898933410645],["▁swój",-12.900904655456545],["מנה",-12.900920867919922],["▁Поли",-12.900938987731934],["▁ჯ",-12.900972366333008],["▁uveden",-12.900973320007324],["▁рай",-12.900975227355955],["750",-12.900978088378906],["▁Tudo",-12.900982856750488],["▁дні",-12.900992393493652],["▁информира",-12.90100383758545],["▁сайын",-12.901006698608398],["▁Mēs",-12.901018142700195],["▁generation",-12.90102481842041],["▁айтты",-12.901031494140623],["▁đen",-12.901040077209473],["▁nyaman",-12.90104866027832],["▁정책",-12.901053428649902],["mpang",-12.90107536315918],["ඒ",-12.901076316833496],["zaro",-12.901097297668455],["▁المال",-12.901097297668455],["pens",-12.901103973388672],["▁نیک",-12.901103973388672],["kého",-12.901113510131836],["ગર",-12.901119232177734],["mbuh",-12.901155471801758],["sätt",-12.901193618774414],["gona",-12.901208877563477],["▁البلد",-12.901239395141602],["льных",-12.901269912719728],["mee",-12.901270866394045],["▁خالد",-12.901272773742676],["handa",-12.901284217834473],["سكن",-12.90129280090332],["ljivi",-12.901310920715332],["▁nielen",-12.901315689086914],["▁CHP",-12.901342391967772],["กลาก",-12.901350021362305],["▁ចុះ",-12.90135383605957],["軟",-12.901381492614746],["▁romani",-12.901411056518556],["▁رژیم",-12.901416778564451],["▁বিরুদ্ধে",-12.901416778564451],["압",-12.901416778564451],["▁Yönetim",-12.90141773223877],["▁István",-12.901424407958984],["▁Хотя",-12.901450157165527],["臭",-12.901453971862791],["▁unele",-12.901458740234377],["▁värde",-12.901464462280272],["▁बजेट",-12.901466369628906],["эгч",-12.901469230651855],["▁조금",-12.901493072509766],["ppet",-12.901507377624512],["▁Срба",-12.901515007019045],["▁planning",-12.901530265808104],["rakan",-12.901534080505373],["долу",-12.9015531539917],["็ก",-12.901571273803713],["▁faqe",-12.90157413482666],["▁forteller",-12.901575088500977],["▁>>>",-12.901606559753418],["ړي",-12.901618957519531],["▁จน",-12.901637077331545],["女士",-12.90163803100586],["hanga",-12.901638984680176],["ۈم",-12.901644706726074],["▁Curso",-12.901671409606934],["年間",-12.901680946350098],["▁Slovensko",-12.90170192718506],["рив",-12.901723861694336],["ద్ధ",-12.901723861694336],["▁octubre",-12.901724815368652],["▁parfum",-12.901778221130373],["უდ",-12.901784896850586],["▁እንዳይ",-12.90178680419922],["loq",-12.901813507080078],["▁eszköz",-12.901813507080078],["▁pendapat",-12.901819229125977],["▁ସ୍ୱ",-12.901830673217772],["▁өрт",-12.90183162689209],["▁1:1",-12.901864051818848],["▁efektiv",-12.901884078979492],["айым",-12.901897430419922],["▁hacen",-12.901939392089844],["▁139",-12.901965141296388],["თე",-12.902006149291992],["дзел",-12.902023315429688],["▁tipp",-12.90206241607666],["värd",-12.902064323425291],["▁கல்வி",-12.90206527709961],["▁prepar",-12.902082443237305],["▁klassi",-12.902118682861328],["ometri",-12.90213108062744],["menin",-12.902132987976074],["▁stampa",-12.902137756347656],["ává",-12.902155876159668],["▁parang",-12.902179718017578],["▁դրա",-12.902182579040527],["ುತ್ತಿದ್ದಾರೆ",-12.902236938476562],["cê",-12.90223789215088],["▁luka",-12.90225315093994],["▁стая",-12.90225315093994],["jeće",-12.902254104614258],["▁الام",-12.902280807495115],["▁Patri",-12.902325630187988],["pung",-12.902350425720217],["素质",-12.902362823486328],["▁folosi",-12.9024019241333],["敵",-12.902403831481934],["▁стрел",-12.902426719665527],["階段",-12.902426719665527],["▁రివ్యూ",-12.90242862701416],["▁σύστημα",-12.90243911743164],["▁ਕਾਂਗਰਸ",-12.90243911743164],["▁മികച്ച",-12.90243911743164],["▁තොරතුරු",-12.902440071105955],["▁контекст",-12.902445793151855],["▁πράγματα",-12.902453422546388],["▁Общи",-12.902454376220703],["▁piccolo",-12.9024658203125],["▁ragazzi",-12.902474403381348],["ชนะ",-12.902481079101562],["▁ජීවිතය",-12.90248966217041],["ହୁ",-12.902512550354004],["▁antolatu",-12.9025297164917],["▁veckor",-12.902530670166016],["▁խմբ",-12.902530670166016],["▁ధర",-12.90255355834961],["▁شیخ",-12.902565956115724],["fiz",-12.90256690979004],["itatis",-12.902572631835938],["▁ништа",-12.902618408203123],["▁ട്ട്",-12.90265655517578],["▁налогов",-12.902664184570312],["▁tänka",-12.902674674987791],["▁барып",-12.902682304382324],["annu",-12.902719497680664],["▁експерт",-12.90272045135498],["▁ಮತ್ತೆ",-12.902739524841309],["▁SAL",-12.902742385864258],["▁rahva",-12.902742385864258],["▁قص",-12.902751922607422],["▁fisi",-12.902753829956056],["ős",-12.902755737304688],["rawat",-12.90275764465332],["ประกาศ",-12.902758598327637],["ၾကာ",-12.90276050567627],["Part",-12.902776718139648],["ISO",-12.902777671813965],["юн",-12.902788162231444],["▁సమ",-12.90279769897461],["司法",-12.902810096740724],["▁měli",-12.90284824371338],["dach",-12.902856826782228],["peš",-12.902873039245604],["클",-12.902880668640137],["▁prepozna",-12.902898788452148],["шкан",-12.902914047241213],["▁माल",-12.90292739868164],["uwen",-12.902952194213867],["▁Ade",-12.902963638305664],["為主",-12.90297794342041],["▁бренд",-12.903034210205078],["Wo",-12.903036117553713],["emploi",-12.90311336517334],["▁tehát",-12.903115272521973],["rell",-12.903121948242188],["하실",-12.903156280517578],["▁уақыт",-12.903173446655272],["▁Μέ",-12.903192520141602],["एक",-12.903243064880373],["▁Ér",-12.903268814086914],["▁programe",-12.903291702270508],["▁uztur",-12.903318405151367],["書き",-12.903363227844238],["▁مات",-12.903419494628906],["ienne",-12.903425216674805],["ഹാ",-12.903428077697754],["dona",-12.90345573425293],["ዳር",-12.90345573425293],["▁Nutzung",-12.903462409973145],["▁Республики",-12.903462409973145],["▁එක්සත්",-12.903462409973145],["▁Məmmədov",-12.90346336364746],["▁pup",-12.90346336364746],["▁quattro",-12.90346336364746],["▁kipindi",-12.90346908569336],["▁feedback",-12.903470993041992],["▁ринку",-12.903471946716309],["為什麼",-12.903471946716309],["顏色",-12.903482437133787],["ultimo",-12.903491020202637],["▁жағдай",-12.903512954711914],["▁비교",-12.903512954711914],["▁jurnalist",-12.903523445129396],["▁smrt",-12.903559684753418],["ంతో",-12.903564453125],["▁त्याच्या",-12.90357780456543],["▁соработка",-12.903578758239746],["▁апарат",-12.90359592437744],["▁indah",-12.903599739074709],["ანა",-12.90362548828125],["▁질문",-12.903661727905272],["生日",-12.903672218322754],["▁തീ",-12.903722763061523],["шты",-12.903743743896484],["könyv",-12.903761863708496],["лектр",-12.903778076171877],["▁Бид",-12.903801918029783],["стин",-12.903892517089844],["▁bapa",-12.903904914855955],["ួន",-12.903918266296388],["▁entreprise",-12.903934478759766],["▁даних",-12.903985023498535],["▁(2018)",-12.903986930847168],["AŞ",-12.903989791870115],["▁Nisan",-12.904017448425291],["ৰা",-12.90403652191162],["এম",-12.904041290283203],["ມີຄວາມ",-12.904054641723633],["▁Wid",-12.90405559539795],["పూర్",-12.904099464416504],["цијата",-12.904101371765137],["▁duwan",-12.904118537902832],["▁Ina",-12.90412712097168],["dīt",-12.904133796691896],["▁כפי",-12.904133796691896],["▁expo",-12.904147148132324],["広",-12.904160499572754],["中に",-12.904184341430664],["خلاق",-12.904199600219728],["▁Δε",-12.90420150756836],["endő",-12.904220581054688],["感受到",-12.90422248840332],["▁bagay",-12.904231071472168],["គ្រោះ",-12.904253959655762],["▁menghadapi",-12.904260635375977],["▁hamba",-12.90427589416504],["▁Zwei",-12.904298782348633],["▁нама",-12.904354095458984],["risida",-12.904373168945312],["▁jövő",-12.904399871826172],["baig",-12.904400825500488],["歷",-12.904410362243652],["▁josta",-12.90443229675293],["寬",-12.90445041656494],["▁Przed",-12.904470443725586],["取消",-12.904484748840332],["▁आन्दोलन",-12.904486656188965],["决",-12.904486656188965],["▁Jerusalem",-12.90448760986328],["▁entonces",-12.90448760986328],["▁kekuatan",-12.90448760986328],["▁někdo",-12.90448760986328],["▁дизајн",-12.90448760986328],["▁इस्तेमाल",-12.90448760986328],["ebuah",-12.904488563537598],["▁գումար",-12.904492378234863],["▁študij",-12.904494285583496],["難しい",-12.904498100280762],["▁película",-12.904501914978027],["▁शिक्षण",-12.90450382232666],["▁vyš",-12.904507637023926],["analyse",-12.904515266418455],["šće",-12.904518127441406],["ėjimas",-12.904539108276367],["▁Jenter",-12.904544830322266],["dant",-12.904566764831545],["споко",-12.904603958129885],["▁gaudir",-12.904621124267578],["▁млад",-12.904638290405272],["▁ඇයි",-12.90466022491455],["▁uważa",-12.904672622680664],["တိုင္း",-12.904681205749512],["એસ",-12.90473175048828],["▁ترکیه",-12.904749870300291],["nota",-12.90477180480957],["▁reach",-12.904780387878418],["▁saba",-12.90478801727295],["好き",-12.904791831970217],["▁кратко",-12.904794692993164],["▁नए",-12.90479850769043],["ბელი",-12.904804229736328],["▁évek",-12.90488052368164],["גישה",-12.904882431030272],["ējā",-12.904908180236816],["sali",-12.90492057800293],["▁sendi",-12.90496826171875],["▁मिळ",-12.90496826171875],["таж",-12.904999732971191],["▁Funk",-12.90503215789795],["▁Women",-12.905035972595217],["ப்பை",-12.905036926269531],["▁жир",-12.905058860778809],["př",-12.90509796142578],["▁դի",-12.905107498168944],["tyka",-12.905112266540527],["▁среда",-12.905142784118652],["▁माँ",-12.905144691467283],["▁यसको",-12.905184745788574],["БР",-12.90518856048584],["▁Spin",-12.905216217041016],["skipti",-12.905223846435549],["obo",-12.905226707458496],["▁wife",-12.905301094055176],["▁possi",-12.905322074890137],["▁kakor",-12.90532684326172],["▁ජනතාව",-12.905348777770996],["როს",-12.905381202697754],["orem",-12.905413627624512],["penzi",-12.905444145202637],["აკ",-12.90545654296875],["▁Име",-12.905463218688965],["profil",-12.90546989440918],["▁မှ",-12.905487060546877],["适应",-12.905488014221191],["สน",-12.905491828918455],["▁въпроси",-12.905494689941406],["▁голова",-12.905500411987305],["▁lezen",-12.90550136566162],["小朋友",-12.905502319335938],["▁khởi",-12.905512809753418],["▁нутгийн",-12.905512809753418],["▁अनुसन्धान",-12.905512809753418],["▁پارتىيە",-12.90551471710205],["▁konfirm",-12.905519485473633],["▁panorama",-12.905525207519531],["▁März",-12.905531883239746],["▁1924",-12.905558586120604],["▁četiri",-12.905558586120604],["▁مستقبل",-12.905577659606934],["ണമെന്ന്",-12.905614852905272],["▁aloit",-12.90564250946045],["▁مرض",-12.90564250946045],["▁кабар",-12.905646324157717],["▁Week",-12.905652046203612],["▁ნო",-12.905670166015623],["▁temiz",-12.905683517456056],["ファン",-12.905683517456056],["ჩა",-12.905720710754396],["▁názor",-12.90573787689209],["▁sikap",-12.905742645263672],["izzare",-12.905756950378418],["क्या",-12.905763626098633],["▁swojego",-12.905845642089844],["▁Baile",-12.90591526031494],["tuig",-12.905924797058104],["▁विज्ञान",-12.90595245361328],["▁dient",-12.905963897705078],["▁чест",-12.905964851379396],["▁ishlar",-12.90597438812256],["ლური",-12.905998229980469],["▁pav",-12.90601921081543],["asje",-12.90607738494873],["▁palet",-12.906095504760742],["case",-12.906109809875488],["▁إسرائيل",-12.906112670898438],["hamba",-12.906116485595703],["チャ",-12.906116485595703],["▁cifra",-12.906122207641602],["kerfi",-12.90612506866455],["▁хлоп",-12.906133651733398],["صة",-12.90614128112793],["▁kenal",-12.90614891052246],["موا",-12.906173706054688],["▁अभ्यास",-12.906210899353027],["▁moze",-12.906240463256836],["▁természet",-12.90626335144043],["ქს",-12.906268119812012],["อด",-12.9063081741333],["▁profund",-12.906310081481934],["▁sigue",-12.9063138961792],["▁loft",-12.90632152557373],["ڙا",-12.906331062316896],["▁ରୁ",-12.90634536743164],["bhu",-12.906397819519045],["▁sū",-12.906397819519045],["戒",-12.906400680541992],["primi",-12.906412124633787],["▁cousas",-12.906418800354004],["▁привлек",-12.906423568725586],["▁neob",-12.906472206115724],["ටු",-12.906484603881836],["乙",-12.90651512145996],["大多数",-12.906522750854492],["▁duygu",-12.906533241271973],["▁kapsamında",-12.906539916992188],["▁રહ્યો",-12.906539916992188],["콘",-12.906539916992188],["▁създаде",-12.906540870666504],["▁parfois",-12.90654182434082],["▁Vikimedia",-12.90654468536377],["▁атындағы",-12.906549453735352],["▁अन्तिम",-12.906550407409668],["ငွေ",-12.906560897827148],["产品的",-12.906561851501465],["ndada",-12.906572341918944],["▁Homo",-12.906575202941896],["▁Abraham",-12.906577110290527],["▁Русије",-12.90658950805664],["▁задачи",-12.906628608703612],["▁guvern",-12.90663242340088],["▁हुँदै",-12.90671157836914],["ጠራ",-12.906780242919922],["升级",-12.906785011291504],["▁prosím",-12.90678882598877],["ségi",-12.906789779663086],["▁ਗੁ",-12.90679168701172],["жал",-12.90684700012207],["дл",-12.906867980957031],["ဆက္",-12.906869888305664],["▁čita",-12.906904220581056],["▁Sicherheit",-12.90694522857666],["▁inga",-12.906962394714355],["▁ഭാഗ",-12.906978607177734],["をしている",-12.90701389312744],["▁बनाए",-12.90703582763672],["▁teki",-12.9070463180542],["askan",-12.907048225402832],["▁Eigen",-12.90707015991211],["rû",-12.907095909118652],["cré",-12.907115936279297],["融",-12.907121658325195],["wód",-12.907136917114258],["▁použití",-12.907139778137209],["mét",-12.9071683883667],["▁ورد",-12.90717601776123],["▁nenu",-12.907200813293455],["ມະ",-12.907219886779783],["คม",-12.907227516174316],["Ха",-12.90724277496338],["ভি",-12.907249450683594],["▁контракт",-12.907268524169922],["▁ኤ",-12.90729808807373],["فتح",-12.90730094909668],["▁bajarê",-12.907318115234377],["▁ungdoms",-12.907318115234377],["▁organizm",-12.907356262207031],["▁reca",-12.907360076904297],["▁ຮ",-12.907370567321776],["ುತ್ತಾ",-12.90737533569336],["▁பர",-12.907378196716309],["ஷா",-12.907394409179688],["टि",-12.907408714294434],["strand",-12.907425880432127],["presa",-12.90743923187256],["▁gelecek",-12.907442092895508],["▁талант",-12.907456398010254],["▁gości",-12.907465934753418],["促進",-12.907520294189451],["cris",-12.90753936767578],["률",-12.907563209533691],["▁entscheiden",-12.907567024230955],["▁διαδικασία",-12.907567024230955],["▁сна",-12.907567024230955],["▁높은",-12.907567024230955],["▁pequeña",-12.90756893157959],["▁مضمون",-12.90757179260254],["▁மூலம்",-12.90757179260254],["战争",-12.90757656097412],["▁ابزار",-12.907598495483398],["▁piškotke",-12.907600402832031],["▁ከሆነ",-12.907605171203612],["LF",-12.907611846923828],["▁Մար",-12.90761661529541],["▁Устав",-12.907638549804688],["▁каким",-12.907638549804688],["▁ສ້າງ",-12.907649040222168],["▁inglés",-12.9076509475708],["▁muestra",-12.907658576965332],["▁сака",-12.90768051147461],["ангел",-12.907713890075684],["▁مير",-12.907713890075684],["▁areas",-12.90771770477295],["▁երկրի",-12.90774154663086],["viden",-12.907774925231934],["▁hid",-12.907791137695312],["Ai",-12.907806396484377],["म्भ",-12.907827377319336],["▁टि",-12.90785789489746],["▁Zor",-12.907859802246094],["▁controllo",-12.90786075592041],["เลข",-12.907880783081056],["jale",-12.907888412475586],["alit",-12.90790557861328],["▁प्रहरीले",-12.907928466796877],["ettiin",-12.907944679260254],["▁justifica",-12.907958030700684],["ilija",-12.907971382141112],["▁קור",-12.907976150512695],["▁mikor",-12.908052444458008],["▁מעט",-12.908068656921388],["▁รีวิว",-12.908074378967283],["▁կառավարության",-12.90809726715088],["▁valmi",-12.908113479614258],["ുകയായിരുന്നു",-12.908124923706056],["▁энерг",-12.908133506774902],["rodo",-12.90814971923828],["▁disp",-12.90814971923828],["▁աղ",-12.90815258026123],["သု",-12.908164024353027],["▁конкретно",-12.908195495605469],["▁знаком",-12.908204078674316],["▁player",-12.90831184387207],["▁Kuidas",-12.908340454101562],["▁ஈ",-12.908345222473145],["skov",-12.908360481262209],["十八",-12.908374786376951],["vą",-12.908391952514648],["arium",-12.908404350280762],["▁Luka",-12.908409118652344],["▁Pig",-12.908411979675291],["ுகிறது",-12.90841579437256],["▁Brad",-12.90841579437256],["atzea",-12.908416748046877],["▁ซื้อ",-12.908416748046877],["Del",-12.908424377441406],["▁állam",-12.908469200134276],["ந்திர",-12.908479690551758],["▁одному",-12.90849494934082],["jär",-12.908499717712402],["▁Siste",-12.908509254455566],["貸",-12.908514976501465],["မြ",-12.90853214263916],["▁maza",-12.908534049987791],["▁konta",-12.908552169799805],["▁değer",-12.908555030822754],["▁limba",-12.908571243286133],["鹿",-12.90857219696045],["ण्याचा",-12.908575057983398],["▁zawiera",-12.908577919006348],["▁melaksanakan",-12.90859603881836],["▁اضافہ",-12.90859603881836],["▁डाउनलोड",-12.90859603881836],["▁धन्यवाद",-12.90859603881836],["▁ਮੈਨੂੰ",-12.908596992492676],["▁ისტორია",-12.908596992492676],["ກໍາ",-12.90860366821289],["▁дам",-12.908605575561523],["▁definitely",-12.908614158630373],["▁નહિ",-12.90861701965332],["▁García",-12.908617973327637],["▁gjendje",-12.9086275100708],["▁waardoor",-12.908656120300291],["▁شخصی",-12.90866756439209],["SON",-12.908674240112305],["▁ڏنو",-12.908676147460938],["▁simu",-12.908690452575684],["▁Müdürlüğü",-12.90869140625],["▁maaari",-12.908702850341797],["skyld",-12.908706665039062],["▁bü",-12.908706665039062],["▁მსოფლიო",-12.90871810913086],["tista",-12.908719062805176],["တိုက်",-12.90872287750244],["▁asja",-12.90873908996582],["▁غر",-12.90874481201172],["gés",-12.908770561218262],["YN",-12.908778190612791],["śnie",-12.908782958984377],["jies",-12.908785820007324],["今の",-12.908790588378906],["▁վերա",-12.908796310424805],["▁neues",-12.908803939819336],["지원",-12.908806800842283],["▁fillo",-12.908839225769045],["niecības",-12.908843994140623],["▁zehar",-12.908863067626951],["▁plag",-12.908865928649902],["▁manus",-12.908921241760254],["ložil",-12.908947944641112],["satte",-12.90895938873291],["aíochta",-12.908966064453123],["үгү",-12.908998489379885],["▁senón",-12.909013748168944],["ರಾಜ್",-12.90902328491211],["揭",-12.909058570861816],["▁hru",-12.909071922302246],["▁exige",-12.909092903137209],["୍ୱ",-12.909095764160156],["လက်",-12.9091157913208],["▁temporal",-12.909164428710938],["▁riski",-12.909199714660645],["лүк",-12.90929889678955],["▁մեկը",-12.909317016601562],["▁pohľad",-12.909324645996094],["▁składa",-12.909330368041992],["rbe",-12.90933609008789],["ស្រ",-12.909357070922852],["▁2-1",-12.909433364868164],["▁Morgen",-12.909452438354492],["φέρει",-12.909457206726074],["▁bûye",-12.909469604492188],["skil",-12.909507751464844],["▁говор",-12.90951442718506],["ờ",-12.909531593322754],["tř",-12.909570693969728],["קרא",-12.90957260131836],["▁hablar",-12.909584045410156],["очка",-12.909586906433104],["▁zdroj",-12.909588813781738],["▁habet",-12.909615516662598],["٣",-12.909623146057127],["▁Sebastian",-12.909626007080078],["▁menawarkan",-12.909626007080078],["▁yleensä",-12.909626007080078],["▁історії",-12.909626007080078],["かもしれない",-12.909632682800291],["▁Aceasta",-12.909642219543455],["▁jön",-12.90964698791504],["▁způsob",-12.909655570983888],["▁fuerza",-12.909661293029783],["культур",-12.909663200378418],["ілді",-12.909667015075684],["▁huge",-12.909668922424316],["▁העבודה",-12.909673690795898],["▁Vac",-12.909740447998049],["▁Slovenska",-12.909757614135742],["еце",-12.909759521484377],["▁രക്ഷ",-12.909762382507324],["▁čovjek",-12.90976905822754],["бру",-12.909770011901855],["▁cald",-12.909825325012209],["▁strá",-12.909828186035156],["ностью",-12.909844398498535],["關於",-12.90985107421875],["iui",-12.90989112854004],["▁dug",-12.909930229187012],["▁mély",-12.90996265411377],["ющей",-12.909975051879885],["▁loma",-12.910036087036133],["▁مرگ",-12.91005516052246],["ítani",-12.910073280334473],["▁larawan",-12.910093307495115],["▁Ira",-12.910116195678713],["ಸ್ಥ",-12.910134315490724],["▁Sach",-12.91014003753662],["▁Кер",-12.910146713256836],["▁أل",-12.91017723083496],["▁brengen",-12.910201072692873],["▁Täna",-12.910205841064451],["讓您",-12.9102144241333],["▁២០",-12.910232543945312],["ամբ",-12.910269737243652],["rifi",-12.910282135009766],["право",-12.91034698486328],["▁මිල",-12.910356521606444],["▁ידע",-12.910359382629396],["راك",-12.910367965698242],["BOR",-12.910370826721191],["cies",-12.910375595092772],["गाव",-12.910390853881836],["▁נמצא",-12.910408020019531],["▁specifi",-12.91042137145996],["▁Gear",-12.91042709350586],["▁Như",-12.9104642868042],["kį",-12.910477638244627],["▁legitim",-12.910542488098145],["▁thủy",-12.91054344177246],["пли",-12.910581588745115],["▁kuda",-12.910600662231444],["唔",-12.910633087158203],["▁Honor",-12.91064167022705],["|",-12.910651206970217],["▁ўсіх",-12.910656929016112],["▁سربراہ",-12.910656929016112],["▁চট্টগ্রাম",-12.910656929016112],["▁아래",-12.910656929016112],["▁Képviselő",-12.91065788269043],["ումների",-12.91066551208496],["▁Điện",-12.910670280456545],["▁በማለት",-12.910674095153809],["▁أثناء",-12.91069221496582],["▁illustra",-12.910710334777832],["▁ڪندو",-12.910714149475098],["▁державного",-12.910768508911133],["קות",-12.910774230957031],["▁поза",-12.910774230957031],["▁profunda",-12.910786628723145],["▁Garten",-12.910828590393066],["▁dreptul",-12.910836219787598],["▁zločin",-12.91089153289795],["▁öğrenciler",-12.910894393920898],["otni",-12.910922050476074],["▁시설",-12.910935401916504],["▁eriti",-12.91094207763672],["▁спец",-12.910959243774414],["▁ಮೂಲ",-12.91096305847168],["▁teine",-12.910964965820312],["կար",-12.910977363586426],["ম্প",-12.910983085632324],["▁vietu",-12.910983085632324],["▁istəyir",-12.91099452972412],["گرد",-12.911016464233398],["▁финанси",-12.911017417907717],["▁комплект",-12.911022186279297],["pomen",-12.91102409362793],["▁eventi",-12.911025047302246],["▁идва",-12.911038398742676],["▁יכולים",-12.911049842834473],["भार",-12.911077499389648],["cılık",-12.91109848022461],["tuen",-12.911124229431152],["▁إنه",-12.911128044128418],["▁dž",-12.91113567352295],["فس",-12.911136627197266],["cī",-12.911139488220217],["வாய்",-12.911145210266112],["▁św",-12.91115665435791],["▁يس",-12.911195755004885],["▁svou",-12.9111967086792],["▁αξι",-12.9111967086792],["izza",-12.91123390197754],["лайн",-12.911252975463867],["OTO",-12.911290168762209],["șe",-12.911346435546877],["চা",-12.911355018615724],["▁भवन",-12.911365509033203],["▁Boot",-12.911368370056152],["kasi",-12.911376953125],["தால்",-12.911409378051758],["▁plazo",-12.911412239074709],["▁म्हण",-12.911425590515137],["ingo",-12.911447525024414],["▁Nature",-12.911466598510742],["▁Pierre",-12.911548614501951],["▁dalis",-12.911561012268066],["▁zis",-12.911602020263672],["▁halt",-12.911627769470217],["ုိး",-12.911649703979492],["оград",-12.91165828704834],["▁ສີ",-12.911665916442873],["▁Γι",-12.911672592163086],["▁Entertainment",-12.911688804626465],["▁ઇન્",-12.911691665649414],["ေထာင္",-12.911693572998049],["▁وسله",-12.911713600158691],["▁Muha",-12.91171646118164],["▁nevi",-12.91171646118164],["▁Kerk",-12.911717414855955],["▁εγώ",-12.911717414855955],["▁Chicago",-12.911723136901855],["▁15:00",-12.911724090576172],["▁вялікі",-12.91172981262207],["▁יא",-12.911738395690918],["▁prag",-12.911741256713867],["▁modele",-12.91177463531494],["алық",-12.91179084777832],["目の",-12.911794662475586],["▁susah",-12.91180419921875],["▁Support",-12.911820411682127],["дре",-12.91185474395752],["▁आत्म",-12.911857604980469],["▁تنه",-12.911870002746582],["▁Quer",-12.911890029907228],["▁ALL",-12.911895751953123],["mming",-12.91191291809082],["今回は",-12.91191864013672],["▁giy",-12.911921501159668],["▁kāds",-12.911970138549805],["ÖR",-12.911983489990234],["sidir",-12.912015914916992],["▁Klinik",-12.91202163696289],["१९",-12.912038803100586],["džiai",-12.912071228027344],["▁hatás",-12.912092208862305],["▁Profil",-12.912109375],["mig",-12.912110328674316],["▁skriv",-12.912148475646973],["▁এমন",-12.912155151367188],["▁compromet",-12.91217803955078],["ӨР",-12.912205696105955],["▁huy",-12.912220001220703],["▁Парт",-12.91222095489502],["別人",-12.912222862243652],["▁అక్కడ",-12.912236213684082],["▁данном",-12.912263870239258],["▁Thiru",-12.912307739257812],["▁Beratung",-12.912327766418455],["Sen",-12.912359237670898],["Ծ",-12.912364959716797],["▁chut",-12.912381172180176],["▁johta",-12.91241455078125],["▁121",-12.912435531616213],["stiprinā",-12.912439346313477],["▁wonderful",-12.912446975708008],["▁intui",-12.91246509552002],["▁dây",-12.9124755859375],["byr",-12.912487030029297],["ffin",-12.91249179840088],["▁famoso",-12.9125394821167],["▁sesión",-12.912558555603027],["▁bulu",-12.91256332397461],["comp",-12.912569046020508],["ѓа",-12.912628173828123],["фил",-12.912649154663086],["▁pessoal",-12.912656784057615],["▁Medien",-12.912680625915527],["吐",-12.912680625915527],["時間が",-12.91269302368164],["頓",-12.912701606750488],["нча",-12.912707328796388],["වෙනි",-12.912713050842283],["▁Davlat",-12.912721633911133],["▁luyện",-12.912721633911133],["▁ඇමති",-12.912721633911133],["▁Singapore",-12.91272258758545],["▁Sosyal",-12.912726402282717],["▁Museu",-12.91273021697998],["武器",-12.912731170654297],["求め",-12.91274070739746],["▁влияние",-12.91277027130127],["▁حفاظت",-12.912774085998535],["ຍັງ",-12.912786483764648],["就不",-12.912797927856444],["▁mier",-12.912802696228027],["▁करेंगे",-12.912814140319824],["▁situazione",-12.912826538085938],["▁դուք",-12.912837028503418],["tsiya",-12.91286849975586],["▁yekê",-12.912897109985352],["▁strādā",-12.912898063659668],["▁stanje",-12.91290283203125],["central",-12.912904739379885],["પણ",-12.912904739379885],["▁банков",-12.912930488586426],["▁stojí",-12.912961959838867],["▁മത",-12.91297721862793],["စုံ",-12.913002014160156],["意義",-12.913016319274902],["ដី",-12.913019180297852],["पणे",-12.913031578063965],["▁Още",-12.91307258605957],["qib",-12.913080215454102],["▁Manu",-12.913091659545898],["▁plata",-12.913110733032228],["▁услов",-12.913147926330566],["▁صنعتی",-12.9131498336792],["偷",-12.913168907165527],["ტენ",-12.913169860839844],["▁تعاون",-12.913193702697754],["▁Linda",-12.913213729858398],["▁bansa",-12.913287162780762],["▁verta",-12.91329574584961],["▁kommuner",-12.913330078125],["tesi",-12.91335391998291],["▁රීඩා",-12.913390159606934],["ಾಟ",-12.913393020629885],["baru",-12.913395881652832],["干部",-12.91342830657959],["▁결정",-12.913448333740234],["▁lielā",-12.913459777832031],["商店",-12.913459777832031],["gleich",-12.913469314575195],["▁rúm",-12.913470268249512],["xhi",-12.913477897644045],["க்கை",-12.91349983215332],["तम",-12.913524627685549],["tolo",-12.91354274749756],["▁Kongres",-12.913565635681152],["лях",-12.913576126098633],["ዘጋ",-12.913578987121582],["rise",-12.913579940795898],["डर",-12.91359519958496],["pozn",-12.913606643676758],["ንድ",-12.91362476348877],["bæ",-12.913629531860352],["õi",-12.913644790649414],["▁غور",-12.913671493530272],["▁toidu",-12.913703918457031],["▁tko",-12.91370677947998],["рой",-12.913728713989258],["▁wasn",-12.913737297058104],["สัตว์",-12.913752555847168],["0€",-12.9137544631958],["▁həmçinin",-12.913756370544434],["▁nabízí",-12.913756370544434],["▁ongeveer",-12.913756370544434],["▁විවිධ",-12.913756370544434],["▁dokaz",-12.9137601852417],["fyl",-12.913789749145508],["▁autonom",-12.91379165649414],["ဖူး",-12.913803100585938],["▁Закону",-12.91380786895752],["▁шарт",-12.913837432861328],["acte",-12.913872718811035],["ugh",-12.913898468017578],["▁naturlig",-12.913917541503906],["▁2000-",-12.913958549499512],["▁알고",-12.913975715637209],["▁zemi",-12.913978576660156],["▁ইন",-12.913979530334473],["чките",-12.913981437683104],["▁នា",-12.913981437683104],["වරුන්",-12.91398811340332],["▁الأم",-12.91398811340332],["קייט",-12.913991928100586],["▁Vũ",-12.91400909423828],["▁εμπ",-12.914019584655762],["▁இருப்ப",-12.914047241210938],["▁wall",-12.914056777954102],["▁mengenal",-12.914064407348633],["▁cult",-12.91407871246338],["skott",-12.914084434509276],["▁miền",-12.91409683227539],["▁Spill",-12.914112091064451],["év",-12.9141206741333],["▁belangrik",-12.91412353515625],["▁ඩ",-12.9141845703125],["▁રિ",-12.914222717285156],["▁يع",-12.914229393005373],["▁Katalog",-12.91423797607422],["ection",-12.914240837097168],["攝影",-12.914243698120115],["▁skozi",-12.914257049560549],["koch",-12.914261817932127],["▁ആണ",-12.91427230834961],["▁disku",-12.91427516937256],["APA",-12.914298057556152],["ខ្ញុំ",-12.914299964904783],["▁groz",-12.914318084716797],["▁viloyati",-12.91433811187744],["▁зарим",-12.914392471313477],["ሰር",-12.914398193359377],["▁అను",-12.914401054382324],["шкі",-12.914427757263184],["责",-12.914440155029297],["gida",-12.914464950561523],["άκης",-12.914474487304688],["▁කිරීමේ",-12.914483070373535],["gebracht",-12.914484024047852],["runa",-12.914485931396484],["▁problemet",-12.914511680603027],["▁சோ",-12.914517402648926],["żu",-12.91455364227295],["bido",-12.914555549621582],["▁устав",-12.914575576782228],["▁মনে",-12.914596557617188],["STRA",-12.914628028869627],["sler",-12.914648056030272],["而已",-12.91465187072754],["▁Rent",-12.914678573608398],["▁ψυχ",-12.91469669342041],["▁пошук",-12.914700508117676],["іє",-12.914701461791992],["sė",-12.914703369140623],["hey",-12.914730072021484],["பே",-12.91476058959961],["机场",-12.914765357971191],["ලො",-12.914780616760254],["لىش",-12.91478443145752],["▁svůj",-12.914791107177734],["▁раньше",-12.914791107177734],["▁તમામ",-12.914791107177734],["▁Netflix",-12.914795875549316],["▁jawatan",-12.914796829223633],["▁purchase",-12.914796829223633],["૪",-12.914833068847656],["dels",-12.914841651916504],["▁mangsa",-12.9148588180542],["▁lähes",-12.914872169494627],["▁رابطہ",-12.914884567260742],["▁ನನಗೆ",-12.914884567260742],["▁suda",-12.91489028930664],["groep",-12.91489601135254],["▁zara",-12.91492748260498],["▁गर्छ",-12.914931297302246],["▁Hugo",-12.914946556091309],["ែង",-12.914958000183104],["▁Þór",-12.91498851776123],["▁museum",-12.91501235961914],["▁መግለጫ",-12.915033340454102],["기가",-12.9150390625],["▁teoria",-12.915078163146973],["نشر",-12.915082931518556],["▁အား",-12.91510772705078],["▁hyper",-12.915127754211426],["▁رفتن",-12.91513729095459],["是不",-12.91514015197754],["▁Aff",-12.915148735046388],["▁Kenn",-12.915162086486816],["▁авар",-12.915166854858398],["▁dağ",-12.915213584899902],["▁pinaka",-12.915218353271484],["▁대학",-12.915239334106444],["▁asegura",-12.915255546569824],["ष्ठ",-12.915266036987305],["موس",-12.915270805358888],["▁이를",-12.91529369354248],["ВР",-12.915302276611328],["▁agat",-12.915308952331545],["czeniu",-12.915325164794922],["eadh",-12.915329933166504],["▁lära",-12.9153413772583],["Min",-12.915375709533691],["▁કિ",-12.915404319763184],["但在",-12.915406227111816],["▁egyszerű",-12.915431022644045],["▁reisi",-12.915432929992676],["막",-12.91543674468994],["भिडियो",-12.915440559387209],["▁utenti",-12.915445327758787],["▁1,4",-12.915448188781738],["bawa",-12.915468215942385],["▁сай",-12.915481567382812],["▁valstī",-12.915491104125977],["ලෙ",-12.915494918823242],["▁usado",-12.915497779846191],["▁sorry",-12.915508270263672],["เหตุ",-12.915552139282228],["▁ເຮັດ",-12.91555881500244],["▁Lah",-12.915570259094238],["זל",-12.915620803833008],["▁собак",-12.915626525878906],["▁លើក",-12.915626525878906],["lím",-12.915641784667969],["ském",-12.915654182434082],["▁ўдзел",-12.915657043457031],["▁међународн",-12.91565990447998],["▁ग्र",-12.91566562652588],["▁grab",-12.915668487548828],["▁subiect",-12.915677070617676],["super",-12.915681838989258],["গে",-12.915701866149902],["▁titta",-12.915722846984863],["▁שבו",-12.915738105773926],["ιακή",-12.915748596191406],["▁ଛ",-12.91576099395752],["逐步",-12.915769577026367],["posito",-12.915788650512695],["筋",-12.91579818725586],["▁créer",-12.915800094604492],["▁işlə",-12.915802001953123],["そういう",-12.915823936462402],["▁Është",-12.915827751159668],["▁σχόλια",-12.915827751159668],["▁запазени",-12.915827751159668],["▁некоторых",-12.915827751159668],["▁რაღაც",-12.915827751159668],["▁written",-12.9158296585083],["▁لحاظ",-12.9158296585083],["▁департамент",-12.915842056274414],["▁хамтран",-12.915857315063477],["дні",-12.915860176086426],["▁mã",-12.91586971282959],["▁증가",-12.9158935546875],["ಕ್ಕಿ",-12.915935516357422],["νουν",-12.915975570678713],["tatis",-12.915976524353027],["зал",-12.915980339050291],["▁Bridge",-12.915985107421877],["▁कोरिया",-12.915990829467772],["licher",-12.91599178314209],["კენ",-12.91600513458252],["▁Presidenti",-12.916030883789062],["改造",-12.916048049926758],["▁billet",-12.91604995727539],["▁possibilité",-12.916058540344238],["▁baro",-12.91608428955078],["▁ھەق",-12.91609001159668],["тични",-12.916099548339844],["▁วันที่",-12.916129112243652],["▁Tale",-12.916189193725586],["인지",-12.9161958694458],["▁sender",-12.916213035583496],["jącą",-12.916218757629396],["▁Hoc",-12.91624641418457],["▁досега",-12.916257858276367],["▁(26",-12.916264533996582],["düğü",-12.916279792785645],["авання",-12.91628646850586],["▁пола",-12.916301727294922],["▁fiyat",-12.916316032409668],["sete",-12.916324615478516],["▁обще",-12.916325569152832],["வரை",-12.916352272033691],["▁procedura",-12.916373252868652],["ვდა",-12.916393280029297],["ğin",-12.916425704956056],["▁tədbirlər",-12.91644287109375],["ბერ",-12.916447639465332],["вољ",-12.916467666625977],["▁sju",-12.916468620300291],["▁bây",-12.91646957397461],["चि",-12.916495323181152],["shinda",-12.916510581970217],["нц",-12.91655158996582],["ώνουν",-12.916570663452148],["▁capacidad",-12.916574478149414],["▁uživa",-12.916581153869627],["eltä",-12.916597366333008],["▁נש",-12.91660213470459],["▁interés",-12.916631698608398],["▁Þessi",-12.916669845581056],["▁Samuel",-12.916701316833496],["▁olmadığı",-12.916748046875],["▁გზა",-12.916753768920898],["ीले",-12.916773796081545],["응",-12.91681957244873],["▁கேட்ட",-12.916825294494627],["芬",-12.916831970214844],["▁વિચાર",-12.916833877563477],["▁reali",-12.916839599609377],["արի",-12.91684341430664],["▁asociación",-12.91684627532959],["▁sezoni",-12.91685390472412],["▁Pemerintah",-12.916865348815918],["ျခား",-12.916866302490234],["▁ಆಟ",-12.916877746582031],["▁feita",-12.916884422302246],["▁hemsida",-12.91690444946289],["เป็นการ",-12.916913986206056],["▁déli",-12.91691780090332],["ייד",-12.9169282913208],["▁kall",-12.916939735412598],["▁privado",-12.91697120666504],["▁قار",-12.916987419128418],["▁Kedua",-12.91699504852295],["hoito",-12.917000770568848],["ਣੇ",-12.917022705078123],["日至",-12.917028427124023],["სენ",-12.917074203491213],["▁pokok",-12.917089462280272],["▁kaste",-12.917091369628906],["athu",-12.917102813720703],["ching",-12.91711711883545],["pano",-12.917119026184082],["nima",-12.917134284973145],["▁अन्तर",-12.917147636413574],["▁مقالات",-12.91715145111084],["sít",-12.91716194152832],["ένα",-12.91717529296875],["ндай",-12.91718864440918],["▁SF",-12.917195320129396],["ದ್ಯ",-12.917198181152344],["śmie",-12.91719913482666],["▁corte",-12.917214393615724],["▁zuzen",-12.917269706726074],["信任",-12.917293548583984],["इट",-12.9172945022583],["▁comporta",-12.917296409606934],["▁Gore",-12.917301177978516],["▁fuqi",-12.917315483093262],["台北市",-12.917338371276855],["▁cut",-12.917348861694336],["ถาม",-12.917356491088867],["▁ਆਈ",-12.91737937927246],["▁koske",-12.917412757873535],["▁รวม",-12.917412757873535],["nula",-12.917414665222168],["▁agam",-12.917417526245115],["▁превод",-12.917420387268066],["▁разу",-12.917430877685549],["▁Oui",-12.917435646057127],["私も",-12.917452812194824],["▁govor",-12.917550086975098],["▁Daugiau",-12.91755485534668],["▁lograr",-12.917567253112791],["uðu",-12.91761302947998],["▁Demir",-12.917628288269045],["▁ffordd",-12.917632102966309],["▁gond",-12.917704582214355],["▁kelia",-12.917712211608888],["▁aztán",-12.91773796081543],["▁Wang",-12.917741775512695],["בטח",-12.917743682861328],["monia",-12.917753219604492],["▁вуч",-12.917769432067873],["لح",-12.917826652526855],["▁wasu",-12.91783332824707],["▁Ilma",-12.917853355407717],["¶",-12.917902946472168],["▁псориазис",-12.917903900146484],["▁Ինչպես",-12.9179048538208],["▁мрежи",-12.917905807495115],["▁اکنون",-12.917909622192385],["upi",-12.917911529541016],["▁kardeş",-12.917919158935549],["▁داره",-12.917924880981444],["▁maten",-12.917954444885254],["▁szakmai",-12.917959213256836],["▁ыр",-12.917967796325684],["minti",-12.917969703674316],["▁рівня",-12.917991638183594],["▁plotë",-12.918021202087402],["▁musika",-12.91802215576172],["▁ഗോ",-12.918025970458984],["區域",-12.918068885803224],["مش",-12.918098449707031],["lép",-12.918126106262209],["▁dolog",-12.918133735656738],["мери",-12.918153762817385],["▁laiks",-12.9181547164917],["▁dirige",-12.91815948486328],["▁gagawin",-12.91819953918457],["▁levere",-12.918210983276367],["▁увеличи",-12.918217658996582],["▁CSS",-12.918238639831545],["tram",-12.918242454528809],["▁början",-12.91824436187744],["▁Dieses",-12.918251037597656],["▁Пара",-12.918254852294922],["▁│",-12.918379783630373],["▁Nim",-12.918394088745115],["▁pasaulio",-12.918401718139648],["診",-12.918405532836914],["▁прям",-12.91840934753418],["▁Inga",-12.918412208557127],["自行",-12.918426513671877],["▁مسئول",-12.918469429016112],["▁ജെ",-12.918476104736328],["тична",-12.91848850250244],["態",-12.918497085571287],["چار",-12.918505668640137],["▁λα",-12.918527603149414],["总结",-12.918527603149414],["▁tari",-12.918538093566896],["▁ceza",-12.918540954589844],["піл",-12.91855525970459],["▁ավարտ",-12.918563842773438],["▁partid",-12.918574333190918],["▁கல",-12.91858959197998],["ಕಾರಿ",-12.91863250732422],["▁okolic",-12.918639183044434],["gou",-12.918643951416016],["ثار",-12.918645858764648],["▁कॉ",-12.91866970062256],["əcəyi",-12.91868019104004],["▁kujt",-12.918681144714355],["পে",-12.918683052062988],["▁kuştin",-12.918696403503418],["ßen",-12.918700218200684],["▁képes",-12.918704986572266],["▁Laut",-12.918705940246582],["▁Isso",-12.918709754943848],["ësisë",-12.918715476989746],["▁نم",-12.918793678283691],["пев",-12.918810844421388],["▁دان",-12.918831825256348],["▁Mí",-12.918864250183104],["▁Tule",-12.918892860412598],["ကုန္",-12.91891098022461],["丸",-12.918930053710938],["өм",-12.91894245147705],["▁erfarenhet",-12.91894245147705],["▁српског",-12.91894245147705],["▁أغسطس",-12.91894245147705],["▁Hercegovine",-12.918944358825684],["▁ਪਾਰਟੀ",-12.9189453125],["▁Membuat",-12.91894817352295],["▁আৰু",-12.918949127197266],["▁Tiranë",-12.918960571289062],["▁ฉัน",-12.918967247009276],["▁brief",-12.91897201538086],["rats",-12.918974876403809],["▁Jud",-12.918980598449709],["▁District",-12.91898250579834],["acord",-12.919014930725098],["▁قومي",-12.919017791748049],["▁موقف",-12.919053077697754],["сом",-12.919082641601562],["▁психо",-12.919084548950195],["▁вось",-12.919111251831056],["而在",-12.919129371643066],["▁ෆ",-12.919150352478027],["cký",-12.919151306152344],["dern",-12.919153213500977],["pov",-12.919157028198242],["▁Porque",-12.91915798187256],["▁भगवान",-12.91922664642334],["▁küsi",-12.91923713684082],["▁myslím",-12.919264793395996],["▁ফোন",-12.919268608093262],["yse",-12.919272422790527],["staða",-12.91929531097412],["▁deporte",-12.919317245483398],["ုတ်",-12.919320106506348],["▁кеп",-12.919341087341309],["ล่า",-12.919352531433104],["MAR",-12.919377326965332],["▁રાજ",-12.919393539428713],["ుడి",-12.919411659240724],["▁입니다",-12.919415473937988],["▁Posta",-12.91944408416748],["лк",-12.91946029663086],["▁مردان",-12.919468879699709],["んでいる",-12.919480323791504],["▁วิ",-12.919486045837402],["FM",-12.91948699951172],["▁今年",-12.919493675231934],["▁회",-12.919525146484377],["▁direla",-12.919535636901855],["tova",-12.919543266296388],["NAN",-12.919546127319336],["▁skills",-12.919549942016602],["ЧИ",-12.91958236694336],["თუ",-12.919593811035156],["မရ",-12.919595718383787],["బీ",-12.919604301452637],["ELA",-12.91963005065918],["▁തുടങ്ങി",-12.919632911682127],["ишь",-12.919662475585938],["特性",-12.919673919677734],["صو",-12.919684410095217],["▁தனி",-12.91969871520996],["▁Lam",-12.919715881347656],["мит",-12.91973114013672],["одо",-12.919748306274414],["▁음",-12.919750213623049],["ေတြ႔",-12.9197998046875],["истов",-12.91985034942627],["▁roligt",-12.919855117797852],["oksi",-12.919864654541016],["子的",-12.919864654541016],["▁غم",-12.919879913330078],["▁య",-12.919881820678713],["тану",-12.91989517211914],["장은",-12.91993522644043],["▁פרטי",-12.91994571685791],["ดังกล่าว",-12.919958114624023],["▁கட்சி",-12.9199800491333],["▁município",-12.919983863830566],["▁1400",-12.9199857711792],["ေတာ႔",-12.920023918151855],["plá",-12.920028686523438],["▁inclus",-12.920039176940918],["▁kisebb",-12.920045852661133],["▁สี",-12.92005729675293],["▁православ",-12.920086860656738],["▁cher",-12.920100212097168],["▁qildi",-12.920117378234863],["▁Energia",-12.920125961303713],["▁tzv",-12.920126914978027],["▁flest",-12.920127868652344],["▁mez",-12.920135498046877],["▁zostanie",-12.920145988464355],["▁सिद्ध",-12.920146942138672],["▁Ponte",-12.92014980316162],["▁chip",-12.92014980316162],["▁ఉప",-12.920175552368164],["▁Čas",-12.920181274414062],["▁digo",-12.920195579528809],["ητές",-12.920196533203123],["▁traje",-12.920206069946287],["▁oog",-12.9202299118042],["ujący",-12.920235633850098],["▁yaliyo",-12.920287132263184],["▁බස්",-12.9202880859375],["▁Pembe",-12.92029857635498],["▁ліс",-12.920320510864258],["▁hauek",-12.920377731323242],["▁คลิป",-12.92039966583252],["▁pens",-12.92041015625],["masta",-12.920422554016112],["shir",-12.920438766479492],["unde",-12.920486450195312],["arch",-12.920487403869627],["▁potrebuje",-12.92049789428711],["▁ქუ",-12.920523643493652],["Centr",-12.920530319213867],["▁mulut",-12.92053508758545],["នាទី",-12.920564651489258],["▁town",-12.920567512512209],["ickým",-12.920568466186523],["▁яму",-12.920605659484863],["ବୁ",-12.920609474182127],["▁Möglichkeiten",-12.92061710357666],["▁दिले",-12.920639991760254],["▁בכ",-12.920642852783203],["▁کب",-12.92068576812744],["േറ്റ",-12.920719146728516],["lose",-12.92077350616455],["ിക്കുക",-12.920774459838867],["▁దు",-12.920788764953612],["重點",-12.92081069946289],["brać",-12.920817375183104],["ザ",-12.920862197875977],["oran",-12.92086696624756],["以上の",-12.92087745666504],["נער",-12.920886993408203],["ანტი",-12.920900344848633],["▁customers",-12.920905113220217],["herr",-12.920943260192873],["дим",-12.920966148376465],["▁betala",-12.92097282409668],["▁도시",-12.920979499816896],["▁esperienza",-12.92099952697754],["rska",-12.921000480651855],["串",-12.921000480651855],["лови",-12.921014785766602],["▁δρόμο",-12.921014785766602],["▁দিবস",-12.921025276184082],["▁সিলেট",-12.921027183532717],["▁yliopisto",-12.92103099822998],["▁قىلدى",-12.921046257019045],["▁заман",-12.92108917236328],["scription",-12.921090126037598],["극",-12.92110824584961],["▁آنچه",-12.92113971710205],["▁Gazte",-12.921142578125],["▁aine",-12.921162605285645],["▁कायम",-12.92117977142334],["▁кылып",-12.921187400817873],["▁Ig",-12.921202659606934],["▁vrouw",-12.921205520629885],["▁යයි",-12.921215057373049],["▁Tell",-12.921223640441896],["▁textos",-12.921223640441896],["ケース",-12.921236991882324],["▁Tüm",-12.921265602111816],["▁takk",-12.921278953552246],["創業",-12.921284675598145],["isille",-12.92128562927246],["▁problemen",-12.92128562927246],["▁لک",-12.92129611968994],["▁идеал",-12.921310424804688],["▁szükség",-12.921319007873535],["▁according",-12.921335220336914],["kuri",-12.921344757080078],["▁송",-12.921344757080078],["▁краіны",-12.921359062194824],["▁jolla",-12.921379089355469],["▁قوت",-12.921388626098633],["karta",-12.921393394470217],["▁základní",-12.921451568603516],["▁کہتے",-12.92147731781006],["rið",-12.921483993530272],["▁kém",-12.921485900878906],["▁Kana",-12.921496391296388],["▁kurį",-12.9215087890625],["▁sangue",-12.921533584594728],["▁hoogte",-12.921545028686523],["ెం",-12.92155647277832],["▁新",-12.921566009521484],["ήστε",-12.9215669631958],["▁castell",-12.92157745361328],["▁გვერდი",-12.921591758728027],["baik",-12.921600341796877],["▁Johnson",-12.921629905700684],["нняў",-12.921661376953123],["▁depresi",-12.921701431274414],["▁полага",-12.92174243927002],["▁kraji",-12.921751022338867],["▁अल",-12.921786308288574],["▁Damer",-12.921798706054688],["▁Tata",-12.921814918518066],["▁Quelle",-12.921849250793455],["档",-12.921850204467772],["▁hoge",-12.92185878753662],["▁132",-12.921863555908203],["▁1990-",-12.921863555908203],["moon",-12.921869277954102],["မဲ",-12.921882629394531],["▁[4]",-12.92197608947754],["مكان",-12.922009468078612],["INT",-12.922017097473145],["▁spielen",-12.922035217285156],["▁gouvernement",-12.922067642211914],["▁rhwng",-12.922067642211914],["pír",-12.922076225280762],["▁મુખ્ય",-12.922080993652344],["▁благодаря",-12.922101020812988],["▁nosi",-12.922136306762695],["▁يكن",-12.92215061187744],["მოქმედებ",-12.922157287597656],["пло",-12.9221773147583],["ordre",-12.922178268432615],["▁informasiya",-12.922201156616213],["wrdd",-12.922202110290527],["ån",-12.92220973968506],["▁pengu",-12.922222137451172],["嚴重",-12.922233581542969],["▁tago",-12.922255516052246],["chov",-12.922282218933104],["ផ្",-12.922348976135254],["druck",-12.922364234924316],["បទ",-12.922364234924316],["သင့်",-12.922369003295898],["▁नोट",-12.922430038452148],["ფიქრ",-12.922443389892578],["▁dolgo",-12.922468185424805],["▁Koi",-12.922499656677246],["身份",-12.922531127929688],["cè",-12.922562599182127],["▁Potter",-12.922576904296877],["lanish",-12.922593116760254],["▁inscrit",-12.922605514526367],["很少",-12.922611236572266],["kui",-12.922616004943848],["▁tuk",-12.922616958618164],["▁ხომ",-12.922646522521973],["▁starte",-12.922648429870604],["▁efectos",-12.922701835632324],["▁Вони",-12.922731399536133],["пущен",-12.922761917114258],["▁naging",-12.922762870788574],["▁próbál",-12.922772407531738],["▁ගත්ත",-12.922779083251951],["丝",-12.922779083251951],["▁የምት",-12.92282772064209],["▁utca",-12.92283058166504],["ื่อ",-12.922833442687988],["▁BAB",-12.9229097366333],["щение",-12.922938346862791],["інь",-12.922943115234377],["▁kabisa",-12.922947883605955],["▁Sedan",-12.922969818115234],["▁tulad",-12.922975540161133],["чине",-12.923030853271484],["▁Force",-12.92303466796875],["▁зала",-12.923036575317385],["ړې",-12.923104286193848],["လုပ်ငန်း",-12.923110961914062],["▁اصفهان",-12.92311191558838],["▁березня",-12.923114776611328],["▁Mayıs",-12.92312240600586],["шење",-12.923128128051758],["▁свега",-12.923128128051758],["五年",-12.923133850097656],["▁Kurdan",-12.923147201538086],["出来る",-12.92319107055664],["גוד",-12.923195838928224],["▁nhờ",-12.923195838928224],["▁kuriuos",-12.923206329345703],["▁सूर्य",-12.923206329345703],["▁Mond",-12.92321491241455],["στρ",-12.923221588134766],["ფარ",-12.92323398590088],["ខែ",-12.923234939575195],["▁КА",-12.923251152038574],["▁рев",-12.923291206359863],["▁dinamik",-12.92330265045166],["▁notiek",-12.923303604125977],["▁rup",-12.923317909240724],["▁רש",-12.923322677612305],["▁سفید",-12.92332649230957],["▁ministar",-12.923345565795898],["▁lecture",-12.923352241516112],["▁gesto",-12.92336368560791],["▁betaling",-12.923379898071287],["▁ആയി",-12.92339324951172],["trip",-12.923420906066896],["▁endre",-12.923423767089844],["ედ",-12.923449516296388],["orul",-12.923452377319336],["γον",-12.923481941223145],["ώνεται",-12.923504829406738],["▁Roberto",-12.923521995544434],["ける",-12.923529624938965],["læk",-12.923538208007812],["гат",-12.923568725585938],["গুলি",-12.923571586608888],["ەپ",-12.923584938049316],["▁mucha",-12.923639297485352],["open",-12.923649787902832],["енных",-12.923665046691896],["латын",-12.923666954040527],["ల్స్",-12.923690795898438],["▁ಜಿಲ್ಲಾ",-12.923698425292969],["ন্ন",-12.923713684082031],["▁вашите",-12.923735618591309],["valta",-12.923742294311523],["▁мной",-12.923772811889648],["поште",-12.923784255981444],["▁Tier",-12.923805236816406],["▁razie",-12.923873901367188],["▁Беларус",-12.923877716064451],["GAN",-12.923924446105955],["жение",-12.923934936523438],["ক্",-12.92395305633545],["žič",-12.923962593078612],["▁95%",-12.92400360107422],["▁оору",-12.924015045166016],["▁모바일",-12.924015998840332],["飞机",-12.92401885986328],["▁kampen",-12.924023628234863],["лети",-12.924039840698242],["▁ál",-12.924042701721191],["监",-12.924043655395508],["ומות",-12.924050331115724],["vreme",-12.92405891418457],["▁നിര",-12.924072265625],["lona",-12.92407512664795],["▁origin",-12.924089431762695],["koha",-12.924095153808594],["вра",-12.92410659790039],["ھے",-12.92410945892334],["мог",-12.924112319946287],["枪",-12.924113273620604],["▁seven",-12.92412281036377],["两人",-12.9241361618042],["▁पृष्ठ",-12.924139022827148],["▁dava",-12.92414665222168],["กฎหมาย",-12.924155235290527],["▁випадку",-12.924156188964844],["▁PROGRAM",-12.92415714263916],["▁կարելի",-12.92416286468506],["▁campagne",-12.924165725708008],["▁gehien",-12.924174308776855],["сол",-12.924181938171388],["▁చేసింది",-12.924185752868652],["buh",-12.924196243286133],["ጠቅ",-12.924198150634766],["RÁ",-12.924213409423828],["▁امداد",-12.924214363098145],["▁estrategia",-12.924254417419434],["▁waarbij",-12.92429542541504],["▁Tony",-12.924299240112305],["6)",-12.92431354522705],["ഴു",-12.92431926727295],["▁INTER",-12.924365043640137],["дба",-12.924384117126465],["▁arabera",-12.924399375915527],["estudi",-12.924421310424805],["▁waarom",-12.924434661865234],["▁Υπ",-12.924478530883787],["ിക്കും",-12.92453384399414],["terapeut",-12.924556732177734],["▁svojej",-12.924562454223633],["mayın",-12.924564361572266],["▁změn",-12.924566268920898],["生意",-12.924579620361328],["▁اگه",-12.924606323242188],["нти",-12.924619674682615],["▁ડે",-12.924628257751465],["ناه",-12.924642562866213],["▁कति",-12.924653053283691],["▁ଏହାର",-12.92466926574707],["hắc",-12.92470932006836],["jere",-12.9247407913208],["▁sota",-12.92477321624756],["ENE",-12.92478370666504],["▁blood",-12.924787521362305],["вч",-12.924805641174316],["ገኝ",-12.924806594848633],["ෘ",-12.924822807312012],["▁vyk",-12.924822807312012],["ована",-12.924838066101074],["▁installation",-12.924847602844238],["▁النا",-12.92487907409668],["合法",-12.924888610839844],["▁једног",-12.924908638000488],["tinu",-12.92492961883545],["dding",-12.924964904785156],["▁lägga",-12.924994468688965],["ДС",-12.924997329711914],["▁몰",-12.92505168914795],["▁duur",-12.925077438354492],["▁їм",-12.925078392028809],["עבד",-12.925095558166504],["▁kaupungin",-12.925097465515137],["▁лак",-12.92510986328125],["xistin",-12.925151824951172],["Ռ",-12.925151824951172],["ပေါ်",-12.92516803741455],["▁לבית",-12.925175666809082],["▁bruges",-12.92518138885498],["损失",-12.925182342529297],["▁Կար",-12.925195693969728],["17)",-12.925196647644045],["ጄ",-12.92520236968994],["▁заштита",-12.925207138061523],["▁πίσω",-12.92520809173584],["▁Family",-12.925211906433104],["щині",-12.925213813781738],["▁Thực",-12.925215721130373],["▁პატარა",-12.92521858215332],["żni",-12.925251007080078],["▁البلاد",-12.925251007080078],["▁የምን",-12.925259590148926],["▁Hvorfor",-12.925271034240724],["▁संबंध",-12.925278663635254],["▁diverses",-12.92527961730957],["▁eigenlijk",-12.925307273864746],["unjuk",-12.925314903259276],["▁Wald",-12.925323486328123],["▁شكل",-12.925339698791504],["▁ለሚ",-12.92535400390625],["eline",-12.92536449432373],["▁Ντ",-12.925366401672363],["энне",-12.925374031066896],["▁priva",-12.925381660461426],["▁nimeni",-12.925387382507324],["ვიდა",-12.925408363342283],["ลอ",-12.9254150390625],["ಆ",-12.925422668457031],["liek",-12.925455093383787],["▁paha",-12.925497055053713],["▁земель",-12.925536155700684],["▁Baj",-12.925555229187012],["保留",-12.92556381225586],["IMO",-12.925565719604492],["▁rəhbəri",-12.925591468811035],["▁Viet",-12.925613403320312],["leş",-12.92563819885254],["▁kedai",-12.925640106201172],["▁εφ",-12.925649642944336],["trykk",-12.925652503967283],["tām",-12.925676345825195],["▁strom",-12.925698280334473],["▁1951",-12.925708770751951],["ञ्च",-12.9257230758667],["▁Little",-12.925758361816406],["▁geweest",-12.925763130187988],["▁ständig",-12.925769805908203],["▁смрт",-12.925779342651367],["dop",-12.925780296325684],["држи",-12.925789833068848],["වීමට",-12.925804138183594],["▁(25)",-12.92580795288086],["үш",-12.925811767578123],["ebat",-12.92583465576172],["లె",-12.925858497619627],["▁esker",-12.925881385803224],["▁priamo",-12.925897598266602],["▁Cooper",-12.925901412963867],["▁sprawdz",-12.925917625427246],["▁mõju",-12.92593002319336],["▁Kati",-12.925932884216309],["lines",-12.925935745239258],["▁tabu",-12.9259672164917],["izmu",-12.925983428955078],["▁Karin",-12.925985336303713],["vd",-12.92600154876709],["▁Quem",-12.926003456115724],["▁şəxs",-12.926007270812988],["▁Usta",-12.926051139831545],["BAR",-12.92607879638672],["ുത",-12.926104545593262],["▁2.3",-12.92612075805664],["чид",-12.926155090332031],["盖",-12.92617130279541],["实际上",-12.926175117492676],["کسی",-12.926177978515623],["മെ",-12.926201820373535],["▁haku",-12.92621612548828],["▁martie",-12.926239967346191],["▁କରାଯାଇଛି",-12.926254272460938],["▁інтернет",-12.926255226135254],["สื่อ",-12.926263809204102],["▁жовтня",-12.926268577575684],["▁біздің",-12.926276206970217],["▁tragedi",-12.926299095153809],["▁авсан",-12.926305770874023],["▁Diego",-12.926350593566896],["kaart",-12.926356315612791],["лиз",-12.92637062072754],["▁सक्ने",-12.926386833190918],["▁yuz",-12.926427841186523],["▁gunakan",-12.926432609558104],["▁terbuka",-12.926433563232422],["чэ",-12.926440238952637],["▁buruz",-12.92644214630127],["具备",-12.926453590393066],["▁максимально",-12.926481246948242],["қын",-12.926506996154783],["▁perfecta",-12.926507949829102],["ซ์",-12.926549911499023],["▁القص",-12.926579475402832],["▁pasien",-12.926584243774414],["▁nju",-12.926595687866213],["toire",-12.92660427093506],["कोट",-12.926606178283691],["▁ову",-12.926645278930664],["▁ڏيڻ",-12.926647186279297],["φε",-12.926648139953612],["▁voll",-12.926661491394045],["▁Pink",-12.926669120788574],["▁жоспар",-12.926669120788574],["▁répond",-12.926680564880373],["hvi",-12.926724433898926],["▁карата",-12.92672634124756],["▁prevista",-12.926774978637695],["ჯე",-12.926816940307615],["فار",-12.926820755004885],["اعم",-12.92683219909668],["▁дуж",-12.926847457885742],["▁jobban",-12.92690372467041],["kalu",-12.926918983459473],["kban",-12.926959991455078],["▁ludzie",-12.92697525024414],["cach",-12.926976203918455],["STI",-12.92699146270752],["beta",-12.926996231079102],["uhkan",-12.927021980285645],["▁Rib",-12.927053451538086],["dho",-12.92707633972168],["crit",-12.92710304260254],["▁administración",-12.927103996276855],["▁мамлекет",-12.927105903625488],["▁mengha",-12.927123069763184],["▁prostoru",-12.927148818969728],["▁ਕਲ",-12.927163124084473],["google",-12.927189826965332],["ፈው",-12.927202224731444],["▁principali",-12.927213668823242],["遍",-12.927234649658203],["濃",-12.927257537841797],["羽",-12.927258491516112],["Non",-12.927286148071287],["▁Büyük",-12.92729663848877],["▁odgovara",-12.927297592163086],["▁кіраўнік",-12.927297592163086],["▁انرژی",-12.927297592163086],["▁येथील",-12.927297592163086],["▁dijadikan",-12.927298545837402],["▁жыцця",-12.927300453186035],["批准",-12.927301406860352],["▁क्षमता",-12.927303314208984],["▁ըստ",-12.92730712890625],["єднання",-12.927316665649414],["▁Đối",-12.92733669281006],["まして",-12.927345275878906],["▁جزء",-12.927349090576172],["▁Радио",-12.927355766296388],["▁trem",-12.927361488342283],["କେ",-12.927366256713867],["ഫി",-12.927379608154297],["▁заврши",-12.927380561828612],["lugu",-12.927417755126951],["ಎಲ್",-12.92741870880127],["uyu",-12.927434921264648],["راح",-12.927434921264648],["adon",-12.927465438842772],["язок",-12.927496910095217],["stock",-12.927507400512695],["冒",-12.927555084228516],["▁cidades",-12.92757511138916],["▁braucht",-12.92758083343506],["▁apply",-12.92762565612793],["▁خوف",-12.927656173706056],["ügy",-12.927667617797852],["сайт",-12.9276704788208],["▁рост",-12.927672386169434],["▁nouveaux",-12.927719116210938],["引き",-12.927726745605469],["inom",-12.927730560302734],["פער",-12.927730560302734],["ப்பட்டது",-12.927730560302734],["▁شکار",-12.927773475646973],["alni",-12.927779197692873],["dorf",-12.927783966064451],["▁షా",-12.92780590057373],["rur",-12.927810668945312],["▁contenu",-12.927821159362791],["▁pierwsze",-12.927830696105955],["▁тул",-12.92785358428955],["אַס",-12.927863121032717],["▁aquestes",-12.927871704101562],["ეტ",-12.92789363861084],["▁culturale",-12.927908897399902],["паль",-12.927913665771484],["▁robo",-12.927916526794434],["copi",-12.92792797088623],["ຫລ",-12.92792797088623],["国民",-12.927938461303713],["јом",-12.927953720092772],["▁NOR",-12.927959442138672],["▁propi",-12.927979469299316],["іні",-12.927984237670898],["tlu",-12.927997589111328],["손",-12.92800998687744],["▁рецепт",-12.928034782409668],["▁обратно",-12.92803955078125],["▁citiem",-12.928048133850098],["▁EC",-12.928071022033691],["▁පුරා",-12.928081512451172],["▁учени",-12.928085327148438],["mærke",-12.928088188171388],["▁目前",-12.928092002868652],["▁vostre",-12.928092956542969],["träg",-12.928129196166992],["▁gadis",-12.92815399169922],["ภา",-12.92819881439209],["▁компонент",-12.928223609924316],["еп",-12.928230285644531],["▁καλό",-12.928242683410645],["▁protect",-12.92824935913086],["siin",-12.928266525268556],["більш",-12.92827320098877],["起こ",-12.928279876708984],["▁ukr",-12.928296089172363],["ējs",-12.928316116333008],["▁Jugend",-12.928317070007324],["▁सहमति",-12.928338050842283],["▁Mohammad",-12.92834758758545],["▁تعیین",-12.92834758758545],["▁पुन्हा",-12.92834758758545],["▁diperlukan",-12.928352355957031],["▁سپریم",-12.92837619781494],["▁بروز",-12.928382873535156],["▁esport",-12.928417205810549],["▁conferma",-12.928438186645508],["▁gastronomi",-12.92844581604004],["สนใจ",-12.92845058441162],["▁Sanat",-12.928458213806152],["▁conseil",-12.928484916687012],["رور",-12.928485870361328],["▁Lille",-12.928488731384276],["▁Jabatan",-12.928515434265137],["▁outfit",-12.928518295288086],["▁lõpu",-12.92852783203125],["▁граѓаните",-12.928546905517578],["▁Kurz",-12.928550720214844],["▁şirket",-12.928586959838867],["▁ஆக",-12.92862319946289],["▁حادث",-12.92863941192627],["mman",-12.928650856018066],["тах",-12.928674697875977],["տեր",-12.928675651550291],["▁shugaban",-12.92867946624756],["hya",-12.928680419921877],["figur",-12.928690910339355],["▁xəbərlər",-12.92869472503662],["▁keskus",-12.928696632385254],["▁větší",-12.92873191833496],["▁វិ",-12.92873191833496],["▁quiere",-12.92875862121582],["jaus",-12.928760528564451],["▁नाहीत",-12.928787231445312],["▁շրջան",-12.928791999816896],["▁መን",-12.928866386413574],["▁શા",-12.92887020111084],["giver",-12.928889274597168],["Port",-12.928903579711914],["▁semuanya",-12.92891788482666],["ську",-12.92892074584961],["ხს",-12.928922653198242],["norm",-12.928947448730469],["▁생활",-12.928984642028809],["▁شم",-12.92905330657959],["▁commun",-12.929069519042969],["ገና",-12.92907428741455],["ກ່ຽວກັບ",-12.929079055786133],["▁muligheder",-12.929081916809082],["▁přímo",-12.929105758666992],["▁rastlin",-12.929121971130373],["▁projekto",-12.929155349731444],["▁tute",-12.92917537689209],["▁Пла",-12.929179191589355],["▁ਮੰਗ",-12.929179191589355],["ေရာ",-12.929211616516112],["▁qilingan",-12.92923069000244],["▁بعدی",-12.929243087768556],["ئل",-12.929279327392578],["▁relatie",-12.92930030822754],["牢",-12.929327964782717],["yay",-12.929332733154297],["输",-12.929333686828612],["hee",-12.929359436035156],["結束",-12.929367065429688],["تز",-12.92937183380127],["▁potřeb",-12.929375648498535],["誘",-12.929377555847168],["헌",-12.929391860961914],["▁чыгып",-12.929397583007812],["▁Սերժ",-12.929397583007812],["▁அனைத்து",-12.929397583007812],["▁Όπως",-12.929399490356444],["▁одном",-12.92944049835205],["▁purpose",-12.929441452026367],["▁ilmais",-12.929468154907228],["Air",-12.92947006225586],["ΕΚ",-12.929498672485352],["▁acceder",-12.92950439453125],["▁riječi",-12.929520606994627],["▁spro",-12.929530143737791],["telt",-12.929534912109377],["▁сцен",-12.929548263549805],["▁kuuluu",-12.92956256866455],["学术",-12.929580688476562],["▁choisi",-12.929636001586914],["▁lab",-12.92966651916504],["▁nee",-12.92967128753662],["工资",-12.929677963256836],["▁ಅವ",-12.929681777954102],["ством",-12.929696083068848],["लेले",-12.92969799041748],["bby",-12.929706573486328],["▁4.5",-12.92971134185791],["▁Einzel",-12.929713249206545],["學院",-12.92976188659668],["▁departamento",-12.929802894592283],["ЛЕ",-12.929816246032717],["นคร",-12.929818153381348],["▁dolazi",-12.929827690124512],["扶",-12.929847717285156],["▁предизвика",-12.929850578308104],["▁நெ",-12.92986011505127],["רוס",-12.929876327514648],["1996",-12.929903030395508],["вад",-12.929938316345217],["▁мл",-12.92994213104248],["▁Exp",-12.929950714111328],["▁punës",-12.929974555969238],["llisuus",-12.929998397827148],["▁వాటి",-12.930018424987791],["▁TAK",-12.930026054382324],["▁mercat",-12.93002986907959],["▁الجن",-12.930047035217283],["ongeza",-12.930065155029297],["▁ընդուն",-12.93007755279541],["▁obres",-12.930091857910156],["ικο",-12.930094718933104],["▁tribu",-12.930098533630373],["entzia",-12.930113792419434],["▁kínál",-12.930119514465332],["city",-12.93012237548828],["▁spēlē",-12.93012523651123],["▁commercial",-12.930158615112305],["▁Nada",-12.930168151855469],["රත්න",-12.930169105529783],["DEM",-12.930185317993164],["▁veidi",-12.93020725250244],["eling",-12.930209159851074],["▁king",-12.930218696594238],["▁часова",-12.930224418640137],["▁Tämän",-12.93023681640625],["▁moyen",-12.930252075195312],["▁Ota",-12.930264472961426],["▁போட்டி",-12.930286407470703],["▁Ym",-12.930306434631348],["▁Ат",-12.930317878723145],["▁лікар",-12.93032169342041],["▁פני",-12.930336952209473],["▁napravi",-12.930400848388672],["▁kalk",-12.930407524108888],["妈",-12.930413246154783],["困難",-12.930418968200684],["緣",-12.930437088012695],["▁ന്ന്",-12.930438995361328],["eck",-12.930439949035645],["市场的",-12.93044376373291],["▁октября",-12.930452346801758],["▁រូប",-12.93045425415039],["▁Крім",-12.93045711517334],["▁אומר",-12.930462837219238],["▁હશે",-12.930471420288086],["krę",-12.930474281311035],["▁չեմ",-12.930480003356934],["kvalitet",-12.930498123168944],["▁quận",-12.930512428283691],["ុន",-12.930553436279297],["რით",-12.930566787719728],["▁କୋ",-12.930566787719728],["පැ",-12.930567741394045],["▁зрения",-12.930571556091309],["ienė",-12.930574417114258],["വന്",-12.930603981018066],["▁nivell",-12.930627822875977],["آم",-12.930645942687988],["хот",-12.930652618408203],["▁stranica",-12.930652618408203],["szcze",-12.930668830871582],["ETE",-12.930686950683594],["▁التح",-12.930693626403809],["ених",-12.930703163146973],["▁qaaday",-12.930715560913086],["▁dossier",-12.93071746826172],["▁نرخ",-12.930719375610352],["bike",-12.930734634399414],["▁Σκ",-12.930744171142578],["▁taht",-12.930752754211426],["▁ramme",-12.930756568908691],["коли",-12.93077564239502],["курс",-12.930793762207031],["UH",-12.930805206298828],["▁алма",-12.930806159973145],["▁Schutz",-12.93080711364746],["آخر",-12.930810928344728],["▁සැක",-12.930834770202637],["tsy",-12.9308443069458],["▁Mach",-12.930877685546877],["jarige",-12.93088150024414],["▁locatie",-12.93088150024414],["▁လက်",-12.930901527404783],["▁Jaan",-12.93093967437744],["tva",-12.930941581726074],["казаць",-12.930974960327148],["▁ഫ്ര",-12.93099308013916],["▁värvi",-12.930994033813477],["▁Ця",-12.931018829345703],["▁കല",-12.931026458740234],["▁Ofisi",-12.93102741241455],["▁മണി",-12.931035041809082],["▁פחות",-12.931059837341309],["ිලා",-12.931075096130373],["▁einn",-12.93107795715332],["▁UR",-12.9310941696167],["чым",-12.931133270263672],["лана",-12.93113899230957],["書いて",-12.93116283416748],["ことです",-12.931177139282228],["▁otak",-12.93118667602539],["րոն",-12.931201934814451],["▁експерти",-12.931214332580566],["дають",-12.931228637695312],["መረ",-12.931229591369627],["▁príprav",-12.931235313415527],["ปิ",-12.931275367736816],["▁Шо",-12.931289672851562],["▁Alap",-12.931300163269045],["▁circo",-12.931303977966309],["▁السوري",-12.93130588531494],["сиз",-12.931317329406738],["▁Punkt",-12.931334495544434],["▁chuir",-12.931353569030762],["▁täysin",-12.931360244750977],["▁vídeos",-12.931377410888672],["▁(28",-12.93138313293457],["▁prů",-12.93138313293457],["икалық",-12.931386947631836],["▁yaka",-12.931395530700684],["早餐",-12.93141269683838],["専門",-12.931414604187012],["▁पूजा",-12.93144416809082],["hyr",-12.931445121765137],["▁cata",-12.931462287902832],["▁Jab",-12.931478500366213],["والي",-12.931488990783691],["▁Elena",-12.931489944458008],["▁Gean",-12.931496620178224],["ፒ",-12.931501388549805],["▁misschien",-12.931501388549805],["▁தொடர்",-12.931504249572754],["▁komisi",-12.93150520324707],["nggung",-12.931511878967283],["gud",-12.931512832641602],["▁Sağlık",-12.93152141571045],["cionado",-12.931527137756348],["▁difference",-12.93154525756836],["▁feature",-12.931550979614258],["VV",-12.931553840637209],["石油",-12.931565284729004],["▁darbi",-12.931591987609863],["mentes",-12.931629180908203],["▁lepšie",-12.93164348602295],["sade",-12.931652069091797],["▁Veel",-12.931652069091797],["▁rue",-12.931679725646973],["▁Svensk",-12.931702613830566],["▁analog",-12.9317045211792],["▁läs",-12.93173599243164],["愛情",-12.93174934387207],["njang",-12.931758880615234],["▁slova",-12.931764602661133],["▁sofa",-12.931769371032717],["▁составляет",-12.931771278381348],["γου",-12.931797981262209],["▁بحق",-12.931825637817385],["▁ଧରି",-12.93183135986328],["ğın",-12.931832313537598],["▁yenye",-12.931832313537598],["▁dik",-12.931836128234863],["▁ystod",-12.93183708190918],["велик",-12.931855201721191],["麦",-12.931872367858888],["ymą",-12.931873321533203],["крити",-12.931878089904783],["▁mojej",-12.93188190460205],["▁ऊ",-12.93191146850586],["sula",-12.931923866271973],["▁първи",-12.931941986083984],["▁виступ",-12.931944847106934],["▁होला",-12.93195629119873],["▁देखें",-12.93197250366211],["▁Girona",-12.931984901428224],["▁савет",-12.931997299194336],["▁ગણ",-12.932047843933104],["samling",-12.932058334350586],["▁allora",-12.932066917419434],["Ан",-12.932069778442385],["РК",-12.932069778442385],["izira",-12.93207836151123],["žina",-12.932082176208496],["пати",-12.93212890625],["▁створен",-12.932168006896973],["cato",-12.932188034057615],["▁hodiny",-12.932198524475098],["цер",-12.932207107543944],["▁qat",-12.932247161865234],["isc",-12.932271003723145],["▁patients",-12.932313919067385],["▁keskustelu",-12.932337760925291],["▁effective",-12.932348251342772],["munk",-12.93238639831543],["▁sleep",-12.932391166687012],["▁អំពី",-12.932393074035645],["import",-12.932409286499023],["▁Esti",-12.932462692260742],["järje",-12.932475090026855],["▁توی",-12.932486534118652],["▁mbili",-12.932506561279297],["▁dicit",-12.932517051696776],["ẽ",-12.932519912719728],["都很",-12.932520866394045],["岡",-12.93253231048584],["છ",-12.932538032531738],["▁REG",-12.932552337646484],["냐",-12.9325532913208],["▁memerlukan",-12.932555198669434],["▁Əgər",-12.932555198669434],["▁հեղինակ",-12.932555198669434],["▁ਤੁਹਾਨੂੰ",-12.932555198669434],["▁යොමු",-12.932555198669434],["▁življenja",-12.9325590133667],["▁ඇතුළු",-12.9325590133667],["▁anonim",-12.932572364807127],["ົກ",-12.93258285522461],["ules",-12.93258571624756],["▁адапт",-12.932591438293455],["▁పట్ట",-12.932615280151367],["يض",-12.932626724243164],["▁lorsque",-12.932632446289062],["▁nomo",-12.932644844055176],["برد",-12.932686805725098],["▁Unser",-12.932696342468262],["▁visą",-12.932740211486816],["▁локал",-12.932818412780762],["▁Akt",-12.932821273803713],["ੁੱ",-12.93284511566162],["ование",-12.932869911193848],["бом",-12.932897567749023],["تجرب",-12.932902336120604],["stift",-12.93292999267578],["▁dibawah",-12.932930946350098],["▁сов",-12.932931900024414],["▁okolí",-12.932934761047363],["RJ",-12.932945251464844],["▁خبرو",-12.93294620513916],["色彩",-12.932957649230955],["▁Bible",-12.932977676391602],["біль",-12.932989120483398],["▁โรงเรียน",-12.93299388885498],["ிடம்",-12.93300724029541],["ضيف",-12.933021545410156],["▁ଲକ୍ଷ",-12.933037757873535],["▁publike",-12.933046340942385],["▁nede",-12.933060646057127],["യർ",-12.933075904846191],["▁notice",-12.933082580566406],["▁grader",-12.933088302612305],["▁بیشتری",-12.933104515075684],["▁tygodni",-12.933116912841797],["خوان",-12.933152198791504],["στερ",-12.933172225952148],["▁možda",-12.933183670043944],["▁കള",-12.93320083618164],["imenti",-12.933204650878906],["▁Mona",-12.933210372924805],["▁zir",-12.93322467803955],["▁gagal",-12.933250427246094],["ෙහි",-12.933290481567385],["runā",-12.93329620361328],["▁יחד",-12.933314323425291],["mart",-12.933330535888672],["▁Angela",-12.93333625793457],["▁недели",-12.933350563049316],["▁टु",-12.933395385742188],["daha",-12.933409690856934],["▁كۈچ",-12.93341827392578],["▁ინფორმაცია",-12.93342113494873],["▁Ukraina",-12.933473587036133],["▁रचना",-12.933486938476562],["кој",-12.93349266052246],["▁unico",-12.93349266052246],["Zi",-12.933500289916992],["ಿಂಗ್",-12.933504104614258],["TET",-12.933523178100586],["▁ਦੇਣ",-12.933523178100586],["▁ohja",-12.933530807495115],["协调",-12.933551788330078],["줄",-12.93356227874756],["冊",-12.933578491210938],["檢查",-12.933582305908203],["既然",-12.933585166931152],["▁බ්",-12.933587074279783],["یانو",-12.933595657348633],["▁σειρά",-12.933610916137695],["▁Центар",-12.933610916137695],["▁ڪڏهن",-12.933610916137695],["ほ",-12.933610916137695],["▁εκτός",-12.933621406555176],["▁trải",-12.933635711669922],["▁설정",-12.933643341064451],["▁անվան",-12.93365478515625],["▁तुमच्या",-12.933658599853516],["ですか",-12.933687210083008],["▁patro",-12.93369197845459],["sinn",-12.933734893798828],["▁attraktiv",-12.93374729156494],["യിലും",-12.933753967285156],["ບົດ",-12.933755874633787],["▁Även",-12.933772087097168],["tikan",-12.93377685546875],["ବନ",-12.93379020690918],["▁ovan",-12.93386459350586],["▁Τζ",-12.93391227722168],["tals",-12.933914184570312],["▁Dupa",-12.93397331237793],["▁sætte",-12.933977127075195],["▁conhecer",-12.933993339538574],["▁nini",-12.934020042419434],["ură",-12.934033393859863],["uum",-12.934042930603027],["▁રા",-12.93405055999756],["▁поче",-12.934057235717772],["▁лета",-12.934074401855469],["yung",-12.93407917022705],["▁خپر",-12.934080123901367],["atos",-12.934083938598633],["▁donat",-12.934104919433594],["▁ბრ",-12.934146881103516],["ျပီ",-12.93416976928711],["▁occhi",-12.934174537658691],["clam",-12.93418025970459],["دها",-12.934206008911133],["терин",-12.934213638305664],["▁ederek",-12.93423080444336],["▁اتر",-12.934263229370115],["ystä",-12.934271812438965],["חלק",-12.934298515319824],["lager",-12.934306144714355],["خوا",-12.934351921081545],["оти",-12.934354782104492],["ируется",-12.934375762939451],["▁տարած",-12.934418678283691],["ఎం",-12.93442440032959],["▁DM",-12.934428215026855],["jącym",-12.934432983398438],["дання",-12.934443473815918],["くて",-12.934470176696776],["二人",-12.934476852416992],["▁izb",-12.93451690673828],["breyt",-12.934525489807127],["sere",-12.934526443481444],["▁impro",-12.934553146362305],["▁pokaz",-12.934553146362305],["▁গা",-12.934568405151367],["яться",-12.934581756591797],["▁Khmer",-12.934603691101074],["▁Paa",-12.934618949890137],["京都",-12.934633255004885],["mát",-12.934661865234377],["▁земја",-12.934661865234377],["source",-12.934669494628906],["▁රිකට්",-12.934669494628906],["▁woh",-12.934670448303224],["▁Именно",-12.934674263000488],["▁sikerült",-12.934675216674805],["▁ऐसी",-12.934683799743652],["▁bilərsiniz",-12.934685707092283],["▁гі",-12.934700965881348],["▁kuti",-12.934709548950195],["PAR",-12.934724807739258],["▁Európai",-12.934751510620115],["▁viv",-12.934772491455078],["▁komoly",-12.93477725982666],["likku",-12.934783935546877],["ften",-12.93480396270752],["▁kaufen",-12.934806823730469],["ără",-12.934814453125],["ႏုိင္",-12.934820175170898],["დინ",-12.934839248657228],["▁ایشان",-12.934842109680176],["▁menunggu",-12.934844017028809],["▁Informasi",-12.93484592437744],["▁proven",-12.93484878540039],["450",-12.93489933013916],["estro",-12.934913635253906],["лены",-12.934918403625488],["ywać",-12.934934616088867],["▁Gujarati",-12.9349365234375],["两年",-12.934976577758787],["▁sedem",-12.934991836547852],["▁Type",-12.934992790222168],["▁dispose",-12.935006141662598],["▁puoi",-12.935017585754396],["▁ident",-12.935030937194824],["▁etsi",-12.93504524230957],["▁교수",-12.935080528259276],["ვერ",-12.93508529663086],["▁mëdha",-12.935125350952148],["纪",-12.935135841369627],["▁сургалт",-12.93516445159912],["▁տրամադր",-12.935172080993652],["కల్",-12.935181617736816],["ушы",-12.935200691223145],["cyjny",-12.935214042663574],["▁komedi",-12.93521785736084],["▁medicinsk",-12.935219764709473],["สถานที่",-12.935227394104004],["cę",-12.935264587402344],["સર",-12.935270309448242],["दो",-12.935273170471191],["ാണ",-12.935330390930176],["▁პი",-12.935336112976074],["ỷ",-12.935338973999023],["▁сторону",-12.935351371765137],["ZER",-12.935352325439451],["asto",-12.935380935668944],["▁выступ",-12.935380935668944],["sivu",-12.935417175292969],["▁Haa",-12.935420989990234],["たくさん",-12.935423851013184],["▁1925",-12.93544101715088],["-60",-12.935463905334473],["▁kriv",-12.93547821044922],["▁पेज",-12.935480117797852],["▁исправ",-12.9354887008667],["▁мов",-12.935491561889648],["ینو",-12.93554401397705],["▁((",-12.935554504394531],["ანს",-12.935558319091797],["▁Kost",-12.9356107711792],["▁экенин",-12.93562126159668],["▁reklama",-12.93564796447754],["dž",-12.935651779174805],["▁találkoz",-12.93565845489502],["腐",-12.93565845489502],["ances",-12.935670852661133],["дэр",-12.93570613861084],["វិ",-12.93570613861084],["▁coordina",-12.935720443725586],["แหล่ง",-12.93572235107422],["ᆞ",-12.935723304748535],["▁Tunnisteet",-12.935723304748535],["▁يەنە",-12.935723304748535],["▁विवरण",-12.935723304748535],["홀",-12.935723304748535],["ໄພ",-12.935728073120115],["▁decembra",-12.935728073120115],["▁gotin",-12.93574047088623],["▁kontrast",-12.935752868652344],["▁상태",-12.935755729675291],["▁неговите",-12.935760498046877],["▁zak",-12.935762405395508],["▁сіл",-12.935768127441406],["▁lift",-12.93577766418457],["izēt",-12.935806274414062],["愿意",-12.93582534790039],["彼此",-12.935829162597656],["▁Valentin",-12.935855865478516],["▁documents",-12.935856819152832],["yö",-12.935885429382324],["▁Ы",-12.935888290405272],["▁өзі",-12.935890197753906],["▁положи",-12.935894012451172],["ധ്യ",-12.935901641845703],["فته",-12.93592643737793],["▁Tran",-12.935949325561523],["▁lány",-12.935965538024902],["▁cilin",-12.935972213745115],["▁medici",-12.935972213745115],["▁пути",-12.935975074768066],["▁chemin",-12.93600368499756],["▁황",-12.936031341552734],["жыл",-12.936037063598633],["auteur",-12.936089515686035],["järg",-12.9360933303833],["▁зураг",-12.936116218566896],["ிட",-12.936141967773438],["vč",-12.936142921447754],["ණ්ඩ",-12.93617343902588],["▁কো",-12.936175346374512],["▁Pá",-12.936198234558104],["▁жить",-12.93620491027832],["▁dolg",-12.936211585998535],["▁naudoti",-12.936213493347168],["Zo",-12.936251640319824],["▁Simple",-12.936256408691406],["▁benzer",-12.936275482177734],["▁кайра",-12.936324119567873],["一款",-12.936362266540527],["▁acquis",-12.93640422821045],["▁candidato",-12.936407089233398],["▁znanja",-12.93645191192627],["▁ارتفاع",-12.936466217041016],["▁Player",-12.936494827270508],["ryhmä",-12.936498641967772],["▁hverdag",-12.936501502990724],["▁DIY",-12.936509132385254],["אחד",-12.93651008605957],["étude",-12.936525344848633],["▁börja",-12.936534881591797],["▁fazem",-12.93655014038086],["берг",-12.936586380004885],["▁merkezi",-12.936616897583008],["▁Cau",-12.936627388000488],["▁möte",-12.936633110046388],["dzić",-12.936707496643066],["伟",-12.936713218688965],["▁המי",-12.93673610687256],["සිංහ",-12.93674373626709],["imba",-12.936761856079102],["เกาหลี",-12.936779975891112],["▁अपडेट",-12.936781883239746],["▁තත්ත්වය",-12.936781883239746],["álně",-12.93678379058838],["न्छ",-12.936785697937012],["▁nigdy",-12.936786651611328],["▁වෙබ්",-12.936786651611328],["▁intelektual",-12.936789512634276],["▁бірінші",-12.93679428100586],["▁فترة",-12.936795234680176],["▁каражат",-12.93679904937744],["▁मुल",-12.936806678771973],["өд",-12.936808586120604],["▁Volgens",-12.936823844909668],["вате",-12.936856269836426],["▁Julia",-12.93688678741455],["バス",-12.93688678741455],["▁xal",-12.936901092529297],["คอน",-12.936902046203612],["รู้สึก",-12.936906814575195],["▁prata",-12.936988830566406],["лиги",-12.937015533447266],["▁yapıyor",-12.93703842163086],["뉴",-12.937051773071287],["▁expérience",-12.93708038330078],["▁వాడ",-12.937113761901855],["ላል",-12.93711757659912],["▁ब्र",-12.93711757659912],["fej",-12.93712043762207],["धे",-12.937138557434082],["▁döntés",-12.937170028686523],["▁funksion",-12.937170028686523],["чала",-12.937211990356444],["▁boka",-12.93721866607666],["sako",-12.937243461608888],["▁älskar",-12.93724536895752],["▁cuius",-12.937267303466797],["DIN",-12.93728256225586],["识",-12.937294006347656],["ológico",-12.937349319458008],["σετε",-12.937372207641602],["▁голем",-12.93741512298584],["天津",-12.937427520751951],["杭州",-12.937445640563965],["零售",-12.937470436096191],["етті",-12.937475204467772],["大事",-12.937478065490724],["ब्ल",-12.937488555908203],["stabil",-12.937506675720217],["tane",-12.937507629394531],["▁robust",-12.93751049041748],["▁történt",-12.937541007995604],["▁madax",-12.937546730041504],["などで",-12.937588691711426],["▁חש",-12.93761157989502],["▁Buku",-12.937644004821776],["▁ismét",-12.937665939331056],["porn",-12.937702178955078],["cko",-12.937713623046877],["ncing",-12.937766075134276],["▁эп",-12.93777561187744],["вшись",-12.937788963317873],["גות",-12.93779754638672],["房地产",-12.9378023147583],["▁profile",-12.93781280517578],["ኣ",-12.937835693359377],["תק",-12.93783950805664],["▁ಕಡಿಮೆ",-12.937840461730955],["혈",-12.937840461730955],["Ě",-12.937841415405272],["▁نژاد",-12.937841415405272],["▁홈페이지",-12.937846183776855],["▁nhiêu",-12.93784999847412],["▁થયા",-12.93784999847412],["సె",-12.93785572052002],["▁doivent",-12.937859535217283],["▁Semoga",-12.937875747680664],["हरूले",-12.937895774841309],["▁lietuvių",-12.937899589538574],["▁Academia",-12.93793773651123],["ziya",-12.937946319580078],["نل",-12.93795394897461],["▁arkiv",-12.938011169433594],["laz",-12.938024520874023],["▁barát",-12.9380521774292],["Yo",-12.938058853149414],["emel",-12.938101768493652],["▁svojom",-12.938102722167969],["▁జరిగిన",-12.938116073608398],["odob",-12.938121795654297],["▁zela",-12.938125610351562],["反应",-12.938132286071776],["itātes",-12.938148498535156],["ώθηκε",-12.938150405883787],["▁posu",-12.938187599182127],["▁pomocą",-12.938190460205078],["▁Жел",-12.938220977783203],["nava",-12.938227653503418],["▁kalah",-12.938230514526367],["▁wystaw",-12.938244819641112],["▁مهال",-12.938282012939451],["▁هی",-12.938292503356934],["▁vriend",-12.938324928283691],["▁american",-12.938326835632324],["ρας",-12.938337326049805],["РА",-12.93834114074707],["▁spelen",-12.938343048095703],["如果您",-12.938403129577637],["ineen",-12.93841552734375],["ыла",-12.938425064086914],["▁Oil",-12.93842887878418],["ዶች",-12.938451766967772],["▁reprezent",-12.938477516174316],["τρό",-12.938505172729492],["ရင္း",-12.938525199890137],["▁Muuse",-12.938543319702148],["▁عید",-12.938549041748049],["എഫ്",-12.938693046569824],["산업",-12.938711166381836],["amb",-12.938722610473633],["لغان",-12.938726425170898],["vande",-12.938728332519531],["▁Lid",-12.938730239868164],["вис",-12.938745498657228],["▁Andrew",-12.93874740600586],["▁Πά",-12.938773155212402],["művész",-12.93885326385498],["▁šamp",-12.938867568969728],["讚",-12.93887424468994],["เสมอ",-12.9389009475708],["▁നടപടി",-12.938901901245115],["욕",-12.938901901245115],["▁вересня",-12.938902854919434],["▁войны",-12.938907623291016],["▁더욱",-12.938907623291016],["▁କଳା",-12.938925743103027],["▁آذر",-12.938936233520508],["бој",-12.938940048217772],["▁equi",-12.938952445983888],["▁Studie",-12.938960075378418],["▁particip",-12.938966751098633],["▁તેમને",-12.93896770477295],["տես",-12.938983917236328],["ieni",-12.939009666442873],["▁أيام",-12.93906021118164],["rren",-12.93910312652588],["▁център",-12.93910789489746],["▁weinig",-12.93911361694336],["▁හැර",-12.939144134521484],["piga",-12.93917751312256],["強調",-12.939189910888672],["Don",-12.939230918884276],["▁Lelaki",-12.939239501953123],["▁симптоми",-12.939257621765137],["ຊື່",-12.939258575439451],["שום",-12.93926239013672],["уване",-12.939302444458008],["യോടെ",-12.939327239990234],["▁membro",-12.93934726715088],["ຸກ",-12.93936538696289],["▁Qe",-12.939401626586914],["ಂಟ್",-12.939438819885254],["▁allra",-12.939444541931152],["deks",-12.93949031829834],["▁múlt",-12.939519882202148],["▁lives",-12.93955135345459],["▁çar",-12.939567565917969],["cious",-12.939570426940918],["▁zegt",-12.939574241638184],["▁olevat",-12.939579963684082],["betaling",-12.939604759216309],["面临",-12.939616203308104],["▁Azken",-12.939640998840332],["မဟုတ္",-12.939654350280762],["ecto",-12.939656257629396],["værk",-12.939659118652344],["र्व",-12.939669609069824],["▁coincid",-12.93967342376709],["▁світі",-12.939682006835938],["rimo",-12.939688682556152],["▁ድ",-12.939704895019531],["▁Bale",-12.939717292785645],["▁متعدد",-12.939727783203123],["▁Dhi",-12.939730644226074],["▁قل",-12.939732551574709],["ιάς",-12.939741134643556],["▁slit",-12.939743995666504],["対",-12.939748764038086],["nette",-12.939778327941896],["პორტ",-12.939812660217283],["کری",-12.939834594726562],["olin",-12.939844131469728],["тельной",-12.939859390258787],["▁myö",-12.939878463745115],["挂",-12.939884185791016],["软",-12.93989086151123],["rəm",-12.939894676208496],["ੱਧ",-12.939908027648926],["▁1932",-12.939935684204102],["▁começar",-12.939947128295898],["▁ылайык",-12.939963340759276],["ანის",-12.939987182617188],["▁shfaq",-12.93999195098877],["▁ghe",-12.940001487731934],["▁амьдрал",-12.940008163452148],["ЭН",-12.940017700195312],["▁piştî",-12.940018653869627],["дугаар",-12.94003677368164],["Hel",-12.940037727355955],["▁Jetzt",-12.940048217773438],["▁получения",-12.940052032470703],["▁idén",-12.94005298614502],["上網",-12.940065383911133],["▁է՝",-12.940071105957031],["rane",-12.940085411071776],["网友",-12.940128326416016],["鸡",-12.940140724182127],["naa",-12.940145492553713],["▁dituzten",-12.940149307250977],["รายละเอียด",-12.940152168273926],["туп",-12.940184593200684],["▁رییس",-12.94019889831543],["ndas",-12.940239906311035],["વન",-12.94025707244873],["▁zetten",-12.940293312072754],["▁આવ્યા",-12.940320014953612],["fű",-12.940322875976562],["▁skifte",-12.94032859802246],["▁жат",-12.940337181091309],["▁Weise",-12.940351486206056],["нику",-12.94036865234375],["▁Спо",-12.94037628173828],["▁решава",-12.940386772155762],["▁Md",-12.94039821624756],["pė",-12.940417289733888],["▁Tü",-12.940476417541504],["▁suit",-12.940478324890137],["тін",-12.94048309326172],["▁konfigur",-12.940500259399414],["▁Enter",-12.940516471862791],["▁shaq",-12.940518379211426],["▁Look",-12.940524101257324],["▁нові",-12.940526962280272],["ΙΣ",-12.94057273864746],["▁simptom",-12.940587997436523],["▁analys",-12.940597534179688],["▁Мај",-12.940604209899902],["teki",-12.940622329711914],["▁KAN",-12.940634727478027],["▁חוק",-12.94063663482666],["整體",-12.94063949584961],["▁гроб",-12.940645217895508],["စိ",-12.94064998626709],["▁мэд",-12.940668106079102],["taş",-12.9407320022583],["▁teise",-12.94074249267578],["▁recenzi",-12.940766334533691],["нное",-12.940775871276855],["向上",-12.940786361694336],["▁PK",-12.94082260131836],["▁סל",-12.94082260131836],["年底",-12.94083309173584],["倫",-12.940842628479004],["▁فهم",-12.940853118896484],["ခန္း",-12.940881729125977],["▁Orta",-12.94089698791504],["çilər",-12.94091510772705],["▁atingi",-12.940925598144531],["▁seura",-12.94095230102539],["▁verdiği",-12.940957069396973],["▁tratament",-12.940980911254885],["▁dunyo",-12.940993309020996],["▁طاقت",-12.940998077392578],["付き",-12.940999031066896],["тоо",-12.941006660461426],["ત્ત",-12.941006660461426],["▁cuestión",-12.941025733947754],["▁otázky",-12.941025733947754],["▁Renault",-12.94102668762207],["▁saugomos",-12.94102668762207],["▁તેમણે",-12.941027641296388],["▁დაიწყო",-12.941027641296388],["▁sạn",-12.941031455993652],["これまで",-12.941038131713867],["▁Heart",-12.9410400390625],["▁viktigt",-12.941059112548828],["kuha",-12.941088676452637],["ggan",-12.941091537475586],["вок",-12.94109344482422],["hhh",-12.941109657287598],["pedi",-12.941116333007812],["ESTE",-12.94112777709961],["hja",-12.941134452819824],["▁odbył",-12.94113540649414],["时候",-12.941153526306152],["ताना",-12.941157341003418],["WAR",-12.941204071044922],["لىقىنى",-12.941217422485352],["▁path",-12.941228866577148],["▁məlumatına",-12.94126033782959],["ives",-12.9412841796875],["▁indeks",-12.941302299499512],["endelea",-12.94134521484375],["ouvre",-12.94134521484375],["▁perut",-12.941356658935549],["dám",-12.941359519958496],["▁aprecia",-12.941391944885254],["▁Dragon",-12.941400527954102],["geh",-12.94140338897705],["▁РС",-12.941441535949709],["▁descend",-12.941457748413086],["▁ayrıca",-12.941465377807615],["▁znamená",-12.941475868225098],["▁దేవ",-12.941479682922363],["▁карти",-12.941490173339844],["▁interested",-12.941551208496094],["▁кес",-12.941553115844728],["▁nedostat",-12.941567420959473],["pap",-12.941577911376951],["▁እንዲያ",-12.94158172607422],["▁Dic",-12.941658973693848],["▁segle",-12.941670417785645],["▁goes",-12.941696166992188],["▁дос",-12.941705703735352],["سخ",-12.94171905517578],["▁তারা",-12.941725730895996],["▁naby",-12.941739082336426],["kelijk",-12.941747665405272],["вик",-12.941754341125488],["бок",-12.941770553588867],["▁والب",-12.941774368286133],["▁श्र",-12.941802978515623],["▁മതി",-12.941827774047852],["▁দা",-12.941828727722168],["▁донесе",-12.941853523254396],["▁المش",-12.941856384277344],["▁katere",-12.941876411437988],["▁হলে",-12.94188404083252],["▁länger",-12.941906929016112],["▁Sudah",-12.941954612731934],["▁робота",-12.941977500915527],["澤",-12.941999435424805],["▁aider",-12.942026138305664],["ologic",-12.942046165466309],["▁документы",-12.94205093383789],["▁уран",-12.942052841186523],["licit",-12.942076683044434],["▁régi",-12.942078590393066],["優質",-12.942081451416016],["▁dessutom",-12.942090034484863],["влия",-12.942093849182127],["▁ஸ்ரீ",-12.942094802856444],["▁terras",-12.942095756530762],["▁لندن",-12.94211196899414],["वेद",-12.942116737365724],["▁normalt",-12.94212532043457],["логи",-12.94212818145752],["▁أهم",-12.942137718200684],["▁diterima",-12.942164421081545],["▁స్ట",-12.942185401916504],["џи",-12.942200660705566],["Қазақстан",-12.942218780517578],["ETI",-12.942220687866213],["▁आव",-12.942231178283691],["▁gyvena",-12.942242622375488],["▁telur",-12.942242622375488],["டே",-12.94225788116455],["ONI",-12.942259788513184],["عو",-12.942325592041016],["gép",-12.942362785339355],["ваю",-12.942377090454102],["▁سگ",-12.94239616394043],["ाएर",-12.942402839660645],["ლს",-12.94243049621582],["ಿವೆ",-12.942458152770996],["koti",-12.942471504211426],["▁аба",-12.94247341156006],["тиме",-12.942499160766602],["ṇ",-12.942501068115234],["ცხა",-12.942513465881348],["або",-12.942530632019045],["проб",-12.942530632019045],["▁Ghi",-12.942547798156738],["యే",-12.942549705505373],["ไร",-12.942584991455078],["▁जास्त",-12.94262409210205],["▁الصين",-12.942631721496582],["▁dienā",-12.942641258239746],["▁ود",-12.942652702331545],["पेक्षा",-12.942659378051758],["tanya",-12.942693710327148],["ុះ",-12.942703247070312],["▁पढ",-12.942703247070312],["вани",-12.942705154418944],["▁rhe",-12.942727088928224],["▁Format",-12.942745208740234],["ჯა",-12.942749977111816],["สะ",-12.94275951385498],["▁Especial",-12.942766189575195],["▁ദൈവ",-12.942781448364258],["Hy",-12.942792892456056],["16)",-12.942890167236328],["visión",-12.942935943603516],["▁קא",-12.942953109741213],["ادات",-12.942974090576172],["шић",-12.942977905273438],["YK",-12.943008422851562],["▁lava",-12.943011283874512],["▁субъект",-12.943015098571776],["▁weiteren",-12.943022727966309],["▁mehrere",-12.943028450012209],["爽",-12.943058013916016],["矛盾",-12.94306755065918],["▁парт",-12.943092346191406],["清潔",-12.943110466003418],["▁Award",-12.943111419677734],["▁miš",-12.94312572479248],["мис",-12.943151473999023],["▁opak",-12.943151473999023],["▁cầm",-12.943154335021973],["▁terpaksa",-12.943154335021973],["▁άλλες",-12.943154335021973],["▁প্রশ্ন",-12.943154335021973],["▁trenutno",-12.943157196044922],["▁diciembre",-12.943158149719238],["▁İsrail",-12.943158149719238],["▁дитини",-12.943158149719238],["▁Technologie",-12.943160057067873],["ვლე",-12.943161964416504],["▁آسیب",-12.943163871765137],["dge",-12.943164825439451],["▁časopis",-12.943170547485352],["▁विकेट",-12.943170547485352],["▁чоловік",-12.943212509155272],["▁देता",-12.943220138549805],["სხ",-12.943241119384766],["▁sikit",-12.943249702453612],["▁pantalla",-12.943263053894045],["▁tukaj",-12.943269729614258],["▁vänner",-12.94327449798584],["▁suosi",-12.943285942077637],["ічного",-12.94332504272461],["ACE",-12.943408012390137],["rande",-12.943428993225098],["▁Suur",-12.943466186523438],["Das",-12.9434814453125],["▁dienu",-12.943490982055664],["▁Lü",-12.943499565124512],["▁모델",-12.943501472473145],["检",-12.94350528717041],["ئان",-12.943514823913574],["byte",-12.943520545959473],["үк",-12.943532943725586],["šni",-12.94355297088623],["ΠΑ",-12.94355583190918],["▁matapos",-12.943573951721191],["ดวง",-12.94357967376709],["ರಾದ",-12.943591117858888],["šuje",-12.943599700927734],["▁öğrenci",-12.943642616271973],["міністра",-12.9436674118042],["▁INFORMA",-12.94367218017578],["▁подо",-12.943700790405272],["▁издание",-12.943743705749512],["▁будем",-12.943744659423828],["▁varnost",-12.943748474121094],["▁기사",-12.943770408630373],["수가",-12.943774223327637],["▁Кас",-12.943777084350586],["總是",-12.9437894821167],["▁accord",-12.943815231323242],["्ज",-12.943840980529783],["▁Rasa",-12.943893432617188],["▁المصري",-12.943896293640137],["▁kuvia",-12.94390106201172],["▁երգ",-12.943913459777832],["عی",-12.943921089172363],["siti",-12.943927764892578],["ambia",-12.943954467773438],["стори",-12.94395923614502],["▁Gast",-12.94395923614502],["ADE",-12.944035530090332],["▁cabelo",-12.944059371948242],["▁sahib",-12.944072723388672],["Ein",-12.944108963012695],["kiwa",-12.944120407104492],["يرا",-12.944133758544922],["▁гурт",-12.944133758544922],["geno",-12.94414234161377],["▁сбор",-12.944157600402832],["systemet",-12.94416618347168],["oaren",-12.944178581237791],["▁Dubai",-12.94419002532959],["▁помощь",-12.94419765472412],["▁аты",-12.944207191467283],["▁Komunejo",-12.944220542907717],["▁վճար",-12.944222450256348],["▁પટેલ",-12.944223403930664],["▁ምስ",-12.944233894348145],["▁luki",-12.944244384765623],["▁ümber",-12.94424533843994],["ឱ្យ",-12.94424819946289],["▁sürekli",-12.944276809692385],["▁temelj",-12.94428253173828],["▁가입",-12.944286346435549],["▁ياكى",-12.944293975830078],["ОВА",-12.94429874420166],["ことができる",-12.944307327270508],["居然",-12.944314002990724],["▁ڪندا",-12.944321632385254],["▁Жаңа",-12.94432258605957],["▁chlad",-12.94433879852295],["ുകളില്",-12.944365501403809],["▁मर",-12.94438362121582],["▁Haj",-12.944397926330566],["▁lavori",-12.944409370422363],["tifica",-12.944470405578612],["▁kävi",-12.944485664367676],["ڃ",-12.944502830505373],["▁ಸ್ಥಾನ",-12.944513320922852],["မဲ့",-12.944514274597168],["samma",-12.944567680358888],["โทร",-12.944578170776367],["▁netop",-12.944586753845217],["▁дълго",-12.944597244262695],["▁прызна",-12.944604873657228],["gə",-12.944635391235352],["洲",-12.944663047790527],["ຫາ",-12.944679260253906],["rante",-12.94472312927246],["220",-12.944738388061523],["▁lekarz",-12.944742202758787],["追加",-12.944743156433104],["▁жағдайда",-12.944746971130373],["次の",-12.944756507873535],["▁classes",-12.944807052612305],["▁admira",-12.944809913635254],["▁ζωής",-12.944811820983888],["지역",-12.94483757019043],["shon",-12.944865226745604],["▁аспект",-12.944866180419922],["pah",-12.94489288330078],["實現",-12.944896697998049],["ιές",-12.944906234741213],["▁serio",-12.944979667663574],["ope",-12.945001602172852],["▁содержа",-12.945009231567385],["▁мага",-12.945016860961914],["▁നടക്കുന്ന",-12.945033073425291],["▁ແລ້ວ",-12.94504165649414],["▁şans",-12.945042610168455],["тны",-12.945073127746582],["ďte",-12.94511890411377],["дардың",-12.94512176513672],["▁mash",-12.945128440856934],["МЕН",-12.945148468017578],["划",-12.945182800292969],["▁sprememb",-12.945216178894045],["危险",-12.945230484008787],["練",-12.945240020751951],["▁müavini",-12.945287704467772],["▁வாழ்க்கை",-12.945289611816406],["ເຮັດໃຫ້",-12.945297241210938],["▁холбоотой",-12.945297241210938],["ayey",-12.945313453674316],["▁olacaktır",-12.945335388183594],["ιες",-12.94534397125244],["▁horretan",-12.945356369018556],["▁dita",-12.945359230041504],["▁مفید",-12.945371627807615],["▁tvrd",-12.945378303527832],["▁Moh",-12.945380210876465],["далі",-12.94540023803711],["▁écrit",-12.945423126220703],["▁قابلیت",-12.945472717285156],["VAL",-12.9454984664917],["ۈر",-12.945500373840332],["▁pacientes",-12.94550609588623],["רשות",-12.945511817932127],["▁ажиллагааны",-12.945537567138672],["▁Жив",-12.945564270019531],["990",-12.945576667785645],["▁Работ",-12.945603370666504],["▁blij",-12.945616722106934],["▁դու",-12.94569492340088],["▁habis",-12.945712089538574],["зва",-12.945727348327637],["▁контролю",-12.945727348327637],["▁vento",-12.945738792419434],["mekuwa",-12.945773124694824],["▁ဂ",-12.945815086364746],["▁今天",-12.945817947387695],["▁թվականին",-12.945820808410645],["mode",-12.94583797454834],["▁Carol",-12.945850372314451],["▁sancti",-12.945868492126465],["ไม่สามารถ",-12.945895195007324],["bam",-12.94591999053955],["▁แ",-12.94597053527832],["ിക്കുന്നത്",-12.94599437713623],["▁fisik",-12.946005821228027],["puluh",-12.946022987365724],["▁AJ",-12.946029663085938],["▁nastavi",-12.946097373962402],["▁foydalanish",-12.946102142333984],["▁прв",-12.9461030960083],["▁անել",-12.946105003356934],["daba",-12.94617748260498],["▁опас",-12.946185111999512],["▁ჩემს",-12.946240425109863],["▁Low",-12.946259498596191],["▁మహా",-12.946293830871582],["歸",-12.946304321289062],["เลือด",-12.946351051330566],["ṭ",-12.9463529586792],["▁biashara",-12.946355819702148],["낸",-12.946355819702148],["мага",-12.946359634399414],["▁перевір",-12.946362495422363],["▁معصوم",-12.946386337280272],["▁atenção",-12.946398735046388],["iline",-12.94643497467041],["▁дури",-12.946457862854004],["▁känna",-12.946487426757812],["▁Patrick",-12.946500778198242],["▁heißt",-12.946538925170898],["produkter",-12.946557998657228],["သူမ်ား",-12.94656467437744],["▁इसका",-12.946579933166504],["▁yaqin",-12.94658088684082],["▁realist",-12.946611404418944],["▁द्या",-12.946612358093262],["▁скла",-12.94661808013916],["浴",-12.946624755859377],["stände",-12.946626663208008],["べ",-12.946660041809082],["と同じ",-12.946666717529297],["▁yenidən",-12.946669578552246],["łuż",-12.94668674468994],["▁Тази",-12.946706771850586],["bole",-12.946707725524902],["▁ไม่มี",-12.946709632873535],["ज्ञान",-12.946730613708496],["ພັດທະນາ",-12.946747779846191],["▁komplekt",-12.946762084960938],["▁ਕਰਨਾ",-12.946789741516112],["▁folytat",-12.946799278259276],["ბურ",-12.946802139282228],["▁drets",-12.946806907653809],["じゃ",-12.946839332580566],["▁ရက္",-12.946846961975098],["▁ув",-12.946850776672363],["▁bíl",-12.94686794281006],["ბულ",-12.94688892364502],["▁운",-12.94689655303955],["বী",-12.94696044921875],["hü",-12.946974754333496],["vér",-12.94698429107666],["სტ",-12.94700050354004],["▁देते",-12.947007179260254],["浪",-12.947020530700684],["▁Mensch",-12.947021484375],["ವರಿಗೆ",-12.947038650512695],["▁Girls",-12.947066307067873],["▁деди",-12.947072982788086],["▁1923",-12.947161674499512],["fut",-12.94716453552246],["▁contacte",-12.94716739654541],["▁کھ",-12.94717788696289],["▁DIE",-12.947185516357422],["▁განა",-12.947185516357422],["▁reer",-12.947202682495115],["સ્ક",-12.947236061096191],["▁Seo",-12.947264671325684],["ማን",-12.947296142578123],["▁responsabilidad",-12.947340965270996],["▁மொழி",-12.947352409362791],["▁አይደለም",-12.947365760803224],["孤",-12.94737148284912],["▁реально",-12.947381973266602],["meny",-12.947395324707031],["網頁",-12.94742202758789],["▁menjalankan",-12.947425842285156],["▁sắp",-12.947425842285156],["▁themselves",-12.947425842285156],["▁ନିର୍ବାଚନ",-12.947425842285156],["▁января",-12.947428703308104],["▁ቁጥር",-12.947428703308104],["▁arbejder",-12.947431564331056],["▁белгілі",-12.947437286376951],["zab",-12.947440147399902],["▁Deputación",-12.947443962097168],["▁ofisi",-12.94745635986328],["▁κλ",-12.947480201721191],["ብን",-12.947481155395508],["ထုတ်",-12.947486877441406],["ჩუ",-12.94749927520752],["غو",-12.947507858276367],["▁exercitation",-12.947516441345217],["聊天",-12.94752311706543],["▁Jah",-12.947548866271973],["Sc",-12.947551727294922],["▁Savi",-12.94756031036377],["বল",-12.947566032409668],["▁தன்",-12.947576522827148],["▁Евро",-12.947580337524414],["▁Selatan",-12.947613716125488],["dome",-12.947614669799805],["revolu",-12.947629928588867],["▁쓰",-12.947632789611816],["fiq",-12.947635650634766],["ніс",-12.947644233703612],["▁román",-12.947647094726562],["τρέ",-12.94767951965332],["▁Παρα",-12.947697639465332],["▁နဲ႔",-12.94773292541504],["RV",-12.947734832763672],["▁draai",-12.947738647460938],["▁ракет",-12.947739601135254],["Το",-12.947782516479492],["▁сир",-12.94779109954834],["▁alb",-12.947793960571287],["iška",-12.947794914245604],["▁வேண்டிய",-12.947796821594238],["▁اینترنتی",-12.947818756103516],["▁Құ",-12.947823524475098],["▁kontaktannonser",-12.947839736938477],["άζ",-12.947853088378906],["ramai",-12.947870254516602],["soft",-12.947884559631348],["▁repro",-12.94797706604004],["ಲೀ",-12.947983741760254],["äder",-12.948009490966797],["៣០",-12.948009490966797],["▁guma",-12.948016166687012],["採",-12.94803524017334],["▁мени",-12.948060035705566],["▁bibli",-12.948079109191896],["▁televisi",-12.94808292388916],["▁Inilah",-12.94810390472412],["▁我們",-12.948118209838867],["▁зали",-12.948142051696776],["▁указ",-12.948145866394045],["bn",-12.94817066192627],["▁порт",-12.948193550109863],["▁pier",-12.948198318481444],["وہ",-12.948206901550291],["▁Արմեն",-12.94822597503662],["▁አገልግሎት",-12.948248863220217],["▁хэсэг",-12.948253631591797],["reb",-12.948270797729492],["พัน",-12.94827651977539],["▁hember",-12.948284149169922],["neri",-12.948293685913086],["ွန်",-12.948305130004885],["▁sebelah",-12.94833755493164],["draug",-12.948345184326172],["▁slík",-12.948373794555664],["▁бях",-12.948378562927246],["პატი",-12.94839096069336],["સો",-12.948427200317385],["▁nevez",-12.948433876037598],["簽",-12.948439598083496],["state",-12.948443412780762],["പ്പെടുന്ന",-12.948482513427734],["Comparteix",-12.94849681854248],["▁viesnīcu",-12.94849681854248],["▁صحیح",-12.94849681854248],["▁மீண்டும்",-12.94849681854248],["▁मदद",-12.948503494262695],["ອມ",-12.948515892028809],["rē",-12.948516845703123],["▁њега",-12.948555946350098],["▁Bə",-12.948559761047363],["▁Kategorie",-12.948577880859377],["▁kuning",-12.948623657226562],["▁technique",-12.94862461090088],["各自",-12.948638916015623],["妙",-12.948644638061523],["▁Salvador",-12.94864559173584],["心配",-12.948673248291016],["▁вигляді",-12.948675155639648],["▁illi",-12.948684692382812],["▁haciendo",-12.948697090148926],["ෂා",-12.94871425628662],["τής",-12.948721885681152],["bani",-12.948728561401367],["▁سير",-12.948755264282228],["уро",-12.94879913330078],["рку",-12.948811531066896],["▁aí",-12.948833465576172],["போது",-12.948837280273438],["isierung",-12.948878288269045],["▁Mø",-12.948880195617676],["▁insanın",-12.94888687133789],["▁уред",-12.94892406463623],["▁этап",-12.948945999145508],["вић",-12.948972702026367],["▁rasp",-12.948973655700684],["mede",-12.949020385742188],["▁दिला",-12.949026107788086],["▁၁၀",-12.949042320251465],["▁insu",-12.949055671691896],["▁tehnika",-12.94906234741211],["esz",-12.94907569885254],["جلس",-12.949078559875488],["▁ipso",-12.94908046722412],["yaan",-12.949089050292969],["▁Besuch",-12.949111938476562],["原來",-12.94911766052246],["▁général",-12.949122428894045],["▁daļa",-12.949155807495115],["変化",-12.949155807495115],["▁कम्पनी",-12.949226379394531],["▁4.1",-12.94925308227539],["▁самой",-12.949263572692873],["▁군",-12.94930648803711],["▁نە",-12.949325561523438],["ثير",-12.949349403381348],["▁атты",-12.949429512023926],["גער",-12.949459075927734],["▁Мис",-12.94946575164795],["vro",-12.949480056762695],["ėtų",-12.949480056762695],["▁سائ",-12.949496269226074],["▁kere",-12.949507713317873],["бук",-12.949508666992188],["▁circuit",-12.949517250061035],["жени",-12.949522972106934],["ალის",-12.949532508850098],["▁ବୋ",-12.949548721313477],["▁Afghanistan",-12.949567794799805],["▁маз",-12.949567794799805],["▁بنابراین",-12.949567794799805],["▁entwickelt",-12.94956874847412],["▁működés",-12.94956874847412],["▁државни",-12.94957160949707],["▁শেয়ার",-12.949572563171388],["▁എങ്ങനെ",-12.949578285217283],["▁Webwerf",-12.949579238891602],["▁Account",-12.949580192565918],["פיתוח",-12.949589729309082],["एफ",-12.949596405029297],["නායක",-12.949603080749512],["▁Крас",-12.949603080749512],["▁Preto",-12.949609756469728],["вају",-12.949612617492676],["采购",-12.949617385864258],["▁ಅನು",-12.949634552001951],["▁ДЕ",-12.94963550567627],["▁Café",-12.949644088745115],["▁Họ",-12.94965648651123],["▁ಸುದ್ದಿಗಳು",-12.949674606323242],["؟؟",-12.949692726135254],["▁климат",-12.94972038269043],["ନେ",-12.949743270874023],["▁sept",-12.949743270874023],["ropa",-12.949782371520996],["▁мақсаты",-12.949787139892578],["МК",-12.949789047241213],["δες",-12.949800491333008],["Tİ",-12.94985294342041],["▁операция",-12.949858665466309],["▁ઘર",-12.949877738952637],["म्ब",-12.949905395507812],["過程中",-12.949908256530762],["▁Admin",-12.949931144714355],["福利",-12.949960708618164],["Ik",-12.94997501373291],["▁likvid",-12.949976921081545],["▁eurot",-12.949986457824709],["▁ກະ",-12.950013160705566],["▁Koment",-12.950032234191896],["▁બંધ",-12.950061798095703],["reik",-12.950105667114258],["▁Fyw",-12.950106620788574],["ьную",-12.950112342834473],["தன்",-12.950128555297852],["atko",-12.950130462646484],["cykl",-12.950150489807127],["▁Sya",-12.950161933898926],["NIA",-12.950164794921877],["▁Στ",-12.950190544128418],["▁restitu",-12.950199127197266],["▁escu",-12.950201034545898],["▁स्थिति",-12.950203895568848],["▁გამოც",-12.950207710266112],["▁Dobro",-12.950218200683594],["▁જી",-12.950222969055176],["▁nadie",-12.950258255004885],["СТА",-12.950294494628906],["اهي",-12.950313568115234],["ส่วนลด",-12.950361251831056],["cionar",-12.950374603271484],["zw",-12.950374603271484],["ຕັ້ງ",-12.950376510620115],["▁कब",-12.950387954711914],["▁팀",-12.950387954711914],["▁initial",-12.950390815734863],["▁låt",-12.95040798187256],["arg",-12.950474739074709],["үүчү",-12.950490951538086],["添",-12.950509071350098],["赴",-12.950509071350098],["إن",-12.95053768157959],["执",-12.95054054260254],["▁फार",-12.95055866241455],["▁számít",-12.950573921203612],["їзд",-12.950575828552246],["默",-12.950578689575195],["▁תא",-12.950579643249512],["lod",-12.950594902038574],["iky",-12.950611114501951],["جال",-12.950621604919434],["քս",-12.950624465942385],["▁продължава",-12.950634956359863],["ปล่อย",-12.950639724731444],["▁diketahui",-12.950641632080078],["▁маңызды",-12.950641632080078],["▁فبراير",-12.950641632080078],["▁toimub",-12.950643539428713],["▁fàcil",-12.950644493103027],["▁европски",-12.95064640045166],["▁όλων",-12.950647354125977],["الات",-12.950648307800291],["▁telefón",-12.95066261291504],["▁досить",-12.950666427612305],["▁xalqaro",-12.95068645477295],["prave",-12.950687408447266],["▁минимал",-12.950690269470217],["▁znan",-12.95069980621338],["▁المنتدى",-12.95069980621338],["แพ้",-12.950739860534668],["നില്",-12.950798034667969],["分子",-12.950798034667969],["oasa",-12.950801849365234],["▁Volvo",-12.950803756713867],["▁uniform",-12.95082187652588],["▁ползва",-12.95082187652588],["▁araç",-12.950835227966309],["▁норма",-12.950873374938965],["spite",-12.950882911682127],["йс",-12.95089340209961],["▁solicitar",-12.95089340209961],["▁şah",-12.95093822479248],["▁murid",-12.950984001159668],["▁sacra",-12.950986862182615],["ибиз",-12.950993537902832],["▁szeretné",-12.95099639892578],["▁બદલ",-12.950997352600098],["▁Egypt",-12.951008796691896],["▁atlet",-12.95103645324707],["▁ڄاڻ",-12.95103645324707],["▁pö",-12.951048851013184],["genommen",-12.9510498046875],["ထဲက",-12.95107364654541],["SN",-12.951080322265623],["▁hente",-12.95111083984375],["さんは",-12.951112747192385],["▁καθαρ",-12.951114654541016],["ക്കുക",-12.95111846923828],["▁gus",-12.951132774353027],["بلغ",-12.951168060302734],["▁1935",-12.951177597045898],["פעל",-12.951189041137695],["fö",-12.951211929321287],["କୋ",-12.95121955871582],["▁ئۆل",-12.95124053955078],["▁тулд",-12.951263427734377],["▁gitt",-12.951284408569336],["يلا",-12.951292991638184],["▁groß",-12.95129680633545],["හෙ",-12.951302528381348],["▁जाए",-12.95130443572998],["бүлө",-12.951332092285156],["▁Стара",-12.951354026794434],["чага",-12.951363563537598],["▁хэр",-12.951376914978027],["▁រក",-12.951390266418455],["ნები",-12.951470375061035],["▁ਤਾ",-12.951472282409668],["▁ಕೊಟ್ಟ",-12.951529502868652],["▁вред",-12.95154857635498],["▁tane",-12.951562881469728],["▁সম্",-12.951581954956056],["▁ឈ្មោះ",-12.951584815979004],["▁Hali",-12.951611518859863],["▁չկա",-12.95161247253418],["ลึก",-12.951618194580078],["▁եւս",-12.951621055603027],["▁smil",-12.95168113708496],["คอมพิวเตอร์",-12.951714515686035],["ฉบับ",-12.951714515686035],["▁étaient",-12.951717376708984],["▁ترامپ",-12.9517183303833],["सक",-12.951722145080566],["ざ",-12.951727867126465],["▁egindako",-12.951729774475098],["▁נוסף",-12.951729774475098],["つい",-12.951732635498049],["▁ಇಂದು",-12.95175552368164],["▁údajů",-12.951770782470703],["▁Türkçe",-12.951774597167969],["▁dư",-12.951774597167969],["▁собою",-12.951788902282717],["már",-12.951799392700195],["▁баа",-12.95180606842041],["ندی",-12.951807975769045],["▁ground",-12.95181655883789],["▁134",-12.95183277130127],["▁İm",-12.95185089111328],["▁itulah",-12.951878547668455],["▁შეა",-12.951909065246582],["▁feed",-12.951921463012695],["نش",-12.951977729797363],["АРА",-12.951980590820312],["▁Pertama",-12.951982498168944],["र्ग",-12.952019691467283],["▁SW",-12.952032089233398],["ኤል",-12.952045440673828],["▁automatisch",-12.952070236206056],["ელის",-12.952101707458496],["irao",-12.952125549316406],["рел",-12.952133178710938],["▁qualifica",-12.952134132385254],["очен",-12.952139854431152],["▁lup",-12.952141761779783],["▁ملکی",-12.952269554138184],["γνώ",-12.952275276184082],["جعل",-12.952277183532717],["ROM",-12.952353477478027],["▁stvara",-12.952354431152344],["способ",-12.95236110687256],["кас",-12.952362060546877],["MOS",-12.952375411987305],["nty",-12.952386856079102],["પર",-12.952407836914062],["았",-12.952422142028809],["చా",-12.952423095703123],["▁hohe",-12.95242404937744],["▁socijal",-12.952442169189451],["▁voivat",-12.952446937561035],["▁тогаш",-12.952507972717283],["▁rapor",-12.952516555786133],["רן",-12.95252799987793],["анне",-12.952532768249512],["дыгын",-12.9525728225708],["▁възможно",-12.952582359313965],["▁ورزشی",-12.952588081359863],["كات",-12.952651977539062],["▁Spel",-12.952661514282228],["▁Wein",-12.952672004699709],["vata",-12.952672958374023],["avit",-12.952676773071287],["стрі",-12.952676773071287],["▁ਰੋ",-12.95268440246582],["লু",-12.952689170837402],["▁diplomat",-12.95269775390625],["▁लिन",-12.952703475952148],["▁ts",-12.952723503112791],["Im",-12.952733993530272],["辣",-12.95275592803955],["▁reactie",-12.952778816223145],["intérêt",-12.95279026031494],["▁Kementerian",-12.95279026031494],["▁növbəti",-12.95279026031494],["▁podmínky",-12.95279026031494],["▁pridržane",-12.95279026031494],["▁względu",-12.95279026031494],["▁nəzarət",-12.952791213989258],["▁تقديم",-12.952791213989258],["欲しい",-12.95279312133789],["▁Astfel",-12.952794075012209],["▁જાણો",-12.952816009521484],["▁주변",-12.95281982421875],["▁विवाह",-12.952821731567385],["▁amik",-12.952832221984863],["▁भो",-12.95285701751709],["▁pah",-12.952875137329102],["dido",-12.952889442443848],["ضغط",-12.952920913696287],["▁CN",-12.952922821044922],["▁алдында",-12.952924728393556],["▁Gren",-12.95292854309082],["▁RES",-12.9529390335083],["▁өгөх",-12.952975273132324],["▁сақта",-12.952997207641602],["▁ବିଶ୍ୱ",-12.95300579071045],["▁moji",-12.953007698059082],["保證",-12.953007698059082],["▁frais",-12.95301342010498],["centrum",-12.953031539916992],["dict",-12.95303726196289],["▁ფუ",-12.953051567077637],["МИ",-12.953058242797852],["▁ndodh",-12.953083038330078],["▁세상",-12.95309829711914],["ମାନଙ୍କୁ",-12.953103065490724],["▁dispone",-12.953112602233888],["▁கவிதை",-12.953120231628418],["▁બો",-12.953134536743164],["ologiya",-12.95313549041748],["の場合",-12.95313835144043],["▁szemben",-12.953154563903809],["▁recuperar",-12.953163146972656],["▁байгуулах",-12.953169822692873],["▁европейски",-12.95319652557373],["Українськ",-12.953205108642578],["▁रुप",-12.953241348266602],["▁dingen",-12.953310012817385],["▁API",-12.953330993652344],["보기",-12.953361511230469],["▁accu",-12.953371047973633],["сың",-12.953389167785645],["▁जनाको",-12.953414916992188],["FOR",-12.95341682434082],["▁книж",-12.95341682434082],["▁بابا",-12.953425407409668],["hela",-12.95343780517578],["แบบนี้",-12.953474044799805],["▁вік",-12.953506469726562],["हू",-12.953585624694824],["▁uus",-12.953630447387695],["▁tuto",-12.95363426208496],["lucht",-12.95363712310791],["▁başvuru",-12.95364761352539],["不在",-12.95366096496582],["民宿",-12.95366096496582],["▁ঘ",-12.953701972961426],["▁kommende",-12.95371150970459],["▁Magic",-12.95372200012207],["▁партии",-12.953722953796388],["έπ",-12.953741073608398],["▁bisher",-12.953741073608398],["følge",-12.953750610351562],["▁Chr",-12.953780174255373],["▁folgende",-12.95378589630127],["▁விட்டு",-12.953810691833496],["lmaydi",-12.953811645507812],["milla",-12.953817367553713],["јем",-12.953824043273926],["ೃ",-12.953824043273926],["▁trist",-12.953829765319824],["対象",-12.953837394714355],["嘛",-12.953839302062988],["בוד",-12.953858375549316],["สัปดาห์",-12.953866004943848],["ٔ",-12.953866958618164],["▁mühüm",-12.953866958618164],["▁പക്ഷേ",-12.953866958618164],["▁Temperatur",-12.95386791229248],["▁κυρίως",-12.95386791229248],["▁Сейчас",-12.953869819641112],["▁संगठन",-12.953869819641112],["▁أع",-12.953880310058594],["▁dziecka",-12.953885078430176],["▁केन्द्रीय",-12.953892707824709],["▁ΗΠΑ",-12.953914642333984],["▁pł",-12.953943252563477],["▁турат",-12.953947067260742],["▁तेव्हा",-12.953968048095703],["▁Brexit",-12.9539794921875],["▁aula",-12.953980445861816],["avgift",-12.953990936279297],["▁culoare",-12.954004287719728],["▁സംഭവ",-12.954007148742676],["▁Deg",-12.954022407531738],["▁roimh",-12.954028129577637],["▁VAR",-12.9540433883667],["ТР",-12.954046249389648],["▁1.1.",-12.954054832458496],["▁ٹر",-12.95407009124756],["MUS",-12.954108238220217],["eseen",-12.954132080078123],["dessous",-12.954148292541504],["▁لطفا",-12.95416259765625],["▁našej",-12.954164505004885],["download",-12.954188346862791],["▁osobi",-12.95419692993164],["▁contenido",-12.954217910766602],["šč",-12.95423698425293],["वड",-12.954242706298828],["▁ambil",-12.95427417755127],["▁луѓето",-12.954300880432127],["kjen",-12.954302787780762],["▁اخت",-12.954338073730469],["▁Bilim",-12.954379081726074],["▁Ukrain",-12.954380989074709],["▁Tang",-12.954392433166504],["▁doğal",-12.9544095993042],["▁යාව",-12.954413414001465],["▁tekee",-12.954429626464844],["▁tia",-12.954438209533691],["▁colli",-12.95444679260254],["▁partiya",-12.95447826385498],["▁yaşı",-12.954512596130373],["vég",-12.95452880859375],["үүлсэн",-12.954538345336914],["사를",-12.954544067382812],["rese",-12.954549789428713],["▁realizada",-12.954553604125977],["▁girtin",-12.954557418823242],["zeniu",-12.954565048217772],["▁Bland",-12.954585075378418],["ประสบการณ์",-12.954586029052734],["▁ચા",-12.954590797424316],["▁Februar",-12.954591751098633],["VAR",-12.954631805419922],["▁estructura",-12.954641342163086],["▁1928",-12.954657554626465],["ạc",-12.95468807220459],["▁bagno",-12.954692840576172],["▁прекра",-12.954712867736816],["juht",-12.954750061035156],["risë",-12.954769134521484],["▁الحديث",-12.95479679107666],["▁වහන්සේ",-12.95481777191162],["▁افتتاح",-12.954835891723633],["▁गाव",-12.954842567443848],["在我",-12.954845428466797],["▁Кру",-12.954849243164062],["▁babi",-12.954862594604492],["▁arbitra",-12.954867362976074],["▁kulttuuri",-12.95487117767334],["▁alunos",-12.954874038696287],["奔",-12.954899787902832],["フィ",-12.954909324645996],["▁Author",-12.95494270324707],["▁넘",-12.95494270324707],["▁Cumhurbaşkanı",-12.954944610595703],["▁vështirë",-12.954944610595703],["▁نتایج",-12.954944610595703],["ordin",-12.954947471618652],["పె",-12.954947471618652],["▁progetti",-12.954955101013184],["▁ادارے",-12.954955101013184],["▁Cyngor",-12.954962730407717],["▁தகவல்",-12.954963684082031],["▁dēļ",-12.954983711242676],["▁сума",-12.9550142288208],["장을",-12.955033302307127],["▁نظم",-12.955034255981444],["▁министар",-12.955038070678713],["▁қарай",-12.95504665374756],["▁2013-",-12.95506477355957],["▁حملہ",-12.955107688903809],["scribi",-12.955111503601074],["സർ",-12.955140113830566],["حافظ",-12.95517349243164],["संग",-12.95518970489502],["krov",-12.955209732055664],["inhos",-12.955228805541992],["मत",-12.955228805541992],["duga",-12.955235481262209],["▁çək",-12.955235481262209],["mira",-12.95525360107422],["▁atrodas",-12.955276489257812],["▁aktuellen",-12.955277442932127],["▁profe",-12.955277442932127],["ює",-12.955286979675291],["▁maraton",-12.955293655395508],["െങ്കില്",-12.955304145812988],["▁ஆர்",-12.955309867858888],["metri",-12.955314636230469],["bréf",-12.955323219299316],["▁טובה",-12.955361366271973],["رع",-12.955371856689451],["sabi",-12.955397605895996],["▁Taka",-12.955402374267578],["▁tirar",-12.955403327941896],["▁користе",-12.95541286468506],["▁aqueles",-12.955413818359377],["▁jednou",-12.955430030822754],["▁Zagreba",-12.9554443359375],["▁อา",-12.955451011657717],["fio",-12.95547580718994],["▁şeyi",-12.95548152923584],["ສູງ",-12.955487251281738],["▁сел",-12.955498695373535],["▁राय",-12.955509185791016],["▁tytuł",-12.955512046813965],["▁центральн",-12.95551872253418],["▁pełni",-12.955546379089355],["ደርግ",-12.955554962158203],["▁djelo",-12.955554962158203],["▁постави",-12.95557975769043],["زة",-12.955585479736328],["▁കര്",-12.955596923828123],["することができます",-12.955596923828123],["▁vidas",-12.95561408996582],["▁toma",-12.95561695098877],["▁խաղ",-12.955621719360352],["HAR",-12.955629348754885],["лиза",-12.955666542053224],["ύχ",-12.955673217773438],["▁Stato",-12.95570468902588],["ご利用",-12.95572566986084],["▁helgen",-12.955747604370115],["▁sfi",-12.955754280090332],["▁ثلاث",-12.955755233764648],["ุน",-12.955771446228027],["▁beru",-12.955785751342772],["▁السعودي",-12.955815315246582],["▁කෑ",-12.955833435058594],["▁idade",-12.955842971801758],["그래",-12.955849647521973],["نال",-12.955875396728516],["▁higi",-12.955877304077148],["▁তিন",-12.95588207244873],["▁파일",-12.955904960632324],["XO",-12.955924034118652],["▁мастер",-12.955936431884766],["▁appartement",-12.95595932006836],["污",-12.955975532531738],["▁virke",-12.955988883972168],["پن",-12.956010818481444],["♡",-12.95602321624756],["▁tablo",-12.956050872802734],["▁травня",-12.956074714660645],["▁सकिन्छ",-12.956090927124023],["ิด",-12.956099510192873],["▁COMP",-12.9561185836792],["▁Bhu",-12.956136703491213],["▁komentář",-12.956168174743652],["▁এটা",-12.956175804138184],["▁Despre",-12.956186294555664],["▁schema",-12.956189155578612],["▁thẳng",-12.95619010925293],["▁haz",-12.956225395202637],["ισχύ",-12.956235885620115],["ИР",-12.956255912780762],["▁بهمن",-12.956274032592772],["▁angel",-12.956291198730469],["▁özünü",-12.956308364868164],["▁екран",-12.956338882446287],["▁якої",-12.95634651184082],["▁maggior",-12.95635223388672],["ကြာ",-12.956379890441896],["▁nedenle",-12.956403732299805],["jia",-12.95641040802002],["▁ndeshje",-12.95641040802002],["▁Voit",-12.956413269042969],["▁nível",-12.956413269042969],["védelmi",-12.956415176391602],["▁lõ",-12.956424713134766],["MV",-12.956430435180664],["bju",-12.956439971923828],["▁define",-12.956498146057127],["▁এস",-12.95651912689209],["▁saņemt",-12.95652198791504],["átu",-12.956523895263672],["▁Professional",-12.956571578979492],["දෙන",-12.95661449432373],["▁lõpeta",-12.956639289855955],["▁لمن",-12.956674575805664],["都没有",-12.956685066223145],["zod",-12.956700325012209],["▁Mund",-12.956700325012209],["آن",-12.956704139709473],["的不",-12.956719398498535],["ฐ",-12.956725120544434],["ଜନ",-12.956727981567385],["▁Praw",-12.956727981567385],["menys",-12.956775665283203],["tiq",-12.956785202026367],["oxid",-12.956823348999023],["масы",-12.95683765411377],["вищ",-12.9568452835083],["▁starost",-12.956856727600098],["ėjau",-12.95686149597168],["ရည္",-12.956876754760742],["ásával",-12.956933975219728],["нять",-12.956950187683104],["▁برداری",-12.956954956054688],["儀",-12.956992149353027],["▁anbefale",-12.95701503753662],["涉",-12.95702075958252],["新北市",-12.957030296325684],["▁kredīt",-12.95706272125244],["▁palm",-12.957080841064451],["oooo",-12.957087516784668],["▁perniagaan",-12.957103729248049],["▁Σύμφωνα",-12.957103729248049],["▁изключително",-12.957104682922363],["▁ପରବର୍ତ୍ତୀ",-12.95710563659668],["▁सबैभन्दा",-12.957119941711426],["▁își",-12.957123756408691],["iç",-12.957127571105955],["▁Axmed",-12.95712947845459],["▁порядку",-12.957148551940918],["ставен",-12.957192420959473],["拿到",-12.957204818725586],["▁situação",-12.957256317138672],["▁ինքն",-12.957268714904783],["적이",-12.957274436950684],["▁actions",-12.957298278808594],["lida",-12.95731258392334],["▁usługi",-12.957332611083984],["▁morgon",-12.957347869873049],["最重要的",-12.957357406616213],["▁dispositivo",-12.957368850708008],["▁Iru",-12.957374572753906],["▁ხე",-12.957403182983398],["▁генер",-12.957409858703612],["▁особа",-12.95741367340088],["दीप",-12.957433700561523],["▁MER",-12.957452774047852],["tură",-12.957463264465332],["▁yön",-12.957531929016112],["▁Кат",-12.957541465759276],["▁вклад",-12.95754623413086],["▁اهداف",-12.957552909851074],["۵۰",-12.957559585571287],["▁şehir",-12.957573890686035],["▁Nesta",-12.9575834274292],["小姐",-12.9576416015625],["▁وکړه",-12.957651138305664],["▁سالم",-12.95765495300293],["▁என்ப",-12.957676887512209],["แท้",-12.957683563232422],["▁čim",-12.957704544067385],["▁dividi",-12.957731246948242],["zale",-12.95777988433838],["▁drejtat",-12.9578275680542],["பட",-12.957842826843262],["▁తీసుకు",-12.957842826843262],["▁الأس",-12.95784854888916],["cká",-12.957870483398438],["▁continuo",-12.957881927490234],["▁فضل",-12.95788288116455],["▁trăi",-12.957890510559082],["▁штат",-12.957893371582031],["▁홈",-12.957904815673828],["考えて",-12.957915306091309],["▁oplevelse",-12.957927703857422],["ành",-12.9579439163208],["▁საზოგადოება",-12.957948684692385],["▁Alg",-12.957972526550291],["mix",-12.957979202270508],["serv",-12.958002090454102],["▁Дол",-12.958003044128418],["▁kwenda",-12.958012580871582],["▁جے",-12.958016395568848],["▁vendég",-12.958023071289062],["▁играта",-12.958024978637695],["▁získal",-12.958033561706545],["▁जर",-12.958036422729492],["▁доба",-12.958044052124023],["▁egyszer",-12.958048820495604],["▁јавни",-12.958090782165527],["legu",-12.958099365234377],["laf",-12.95810890197754],["磁",-12.958147048950195],["знав",-12.95816135406494],["داع",-12.958168029785156],["▁تعریف",-12.958184242248535],["▁მიხედვით",-12.958184242248535],["▁халықаралық",-12.958185195922852],["▁POD",-12.958191871643066],["▁bëjnë",-12.958206176757812],["▁golongan",-12.958206176757812],["▁origina",-12.958209037780762],["৯",-12.958244323730469],["▁puedo",-12.958251953125],["▁يوجد",-12.958255767822266],["▁pruža",-12.958257675170898],["▁ცხოვრება",-12.958264350891112],["▁өткөн",-12.958300590515137],["▁trasforma",-12.95830535888672],["▁sagde",-12.958307266235352],["▁Phú",-12.9583158493042],["اجر",-12.958321571350098],["လုိက္",-12.958321571350098],["▁brev",-12.958348274230955],["Dar",-12.95836067199707],["יוו",-12.95840835571289],["▁معنا",-12.958413124084473],["▁Lik",-12.958416938781738],["▁ترك",-12.958434104919434],["IGA",-12.95844841003418],["▁gelijk",-12.958456039428713],["▁հարցեր",-12.958456039428713],["▁аракет",-12.958460807800291],["▁europeo",-12.958466529846191],["lase",-12.958513259887695],["▁principe",-12.958514213562012],["▁gyors",-12.95853328704834],["մբ",-12.958592414855955],["സ്ഥാന",-12.958612442016602],["ຢາ",-12.95862865447998],["אנט",-12.95863914489746],["One",-12.95864200592041],["ైనా",-12.958656311035156],["▁osztály",-12.958680152893066],["▁përmes",-12.958683013916016],["的世界",-12.958693504333496],["גיש",-12.958700180053713],["ლობა",-12.958712577819824],["▁նշել",-12.958727836608888],["ាប់",-12.95872974395752],["▁lol",-12.95876121520996],["ודי",-12.958791732788086],["ນະ",-12.958803176879885],["កាត់",-12.95883846282959],["▁kristen",-12.958850860595703],["▁sociedad",-12.958890914916992],["▁juhtu",-12.958895683288574],["▁Kaza",-12.95890998840332],["▁irá",-12.95893096923828],["▁остане",-12.958938598632812],["▁tambah",-12.958983421325684],["▁گردی",-12.959022521972656],["оод",-12.95904541015625],["▁נכון",-12.95904541015625],["▁तैयार",-12.959070205688477],["lová",-12.959089279174805],["ises",-12.95909023284912],["▁lindo",-12.95910930633545],["▁референдум",-12.959117889404297],["▁reflekt",-12.959129333496094],["▁Moo",-12.959139823913574],["бија",-12.959157943725586],["issent",-12.959158897399902],["นั่น",-12.959189414978027],["īšana",-12.959199905395508],["იის",-12.959223747253418],["▁sözü",-12.959239959716797],["▁Außerdem",-12.959266662597656],["▁Châu",-12.959266662597656],["▁bầu",-12.959266662597656],["▁indrindra",-12.959266662597656],["▁ерөнхий",-12.959266662597656],["▁ټاکنو",-12.959266662597656],["▁ಭೇಟಿ",-12.959266662597656],["▁გარეშე",-12.959268569946287],["▁szerződés",-12.959271430969238],["▁תי",-12.959274291992188],["▁stockholm",-12.959275245666504],["▁forbedre",-12.959283828735352],["сця",-12.95928955078125],["▁ayant",-12.959304809570312],["▁لمدة",-12.959341049194336],["▁abba",-12.959346771240234],["ುವಂತೆ",-12.959378242492676],["▁жооп",-12.959380149841309],["▁habit",-12.95938491821289],["följ",-12.959385871887209],["සේ",-12.959397315979004],["▁રો",-12.959479331970217],["▁тиіс",-12.959489822387695],["▁sfer",-12.959492683410645],["▁නිර්මාණය",-12.95949649810791],["▁jus",-12.95949935913086],["▁Rh",-12.959511756896973],["▁tempos",-12.959555625915527],["laman",-12.95958137512207],["▁momba",-12.959602355957031],["▁میچ",-12.959602355957031],["ಂಚ",-12.959640502929688],["ngiz",-12.959641456604004],["ější",-12.959650993347168],["白色",-12.959651947021484],["▁خروج",-12.9596586227417],["▁Qor",-12.959689140319824],["ήσεων",-12.95970058441162],["▁vazut",-12.959711074829102],["번호",-12.95973777770996],["▁ქვე",-12.95974349975586],["▁институции",-12.959756851196287],["љен",-12.9597749710083],["▁igitur",-12.959783554077148],["▁настав",-12.959800720214844],["▁predpis",-12.959818840026855],["▁leith",-12.959869384765623],["▁Frans",-12.959882736206056],["▁bota",-12.95990753173828],["▁गणेश",-12.959943771362305],["▁mislim",-12.959956169128418],["15)",-12.959980964660645],["РГ",-12.960031509399414],["▁రామ",-12.960033416748049],["▁előre",-12.96003532409668],["▁adrese",-12.960043907165527],["ider",-12.960044860839844],["▁લાખ",-12.960090637207031],["▁Aires",-12.960105895996094],["▁gets",-12.960129737854004],["▁Сви",-12.9601469039917],["ático",-12.96015167236328],["мира",-12.960200309753418],["▁fshat",-12.96022891998291],["хат",-12.96023178100586],["▁maalin",-12.960245132446287],["▁ដាក់",-12.960262298583984],["▁Rab",-12.960272789001465],["▁Danmarks",-12.960285186767578],["宇宙",-12.960309982299805],["▁layihə",-12.960326194763184],["ebla",-12.960328102111816],["▁محور",-12.960342407226562],["Շ",-12.960350036621094],["▁beschikbaar",-12.960350036621094],["▁поддршка",-12.960350036621094],["▁ร้าน",-12.960360527038574],["▁ಪಡೆಯ",-12.960372924804688],["▁væk",-12.960376739501951],["▁Maroela",-12.960382461547852],["▁gallu",-12.960413932800291],["ούσαν",-12.96042251586914],["▁рослин",-12.960439682006836],["▁ຂຶ້ນ",-12.960444450378418],["▁▪",-12.960456848144531],["pravo",-12.960458755493164],["▁situasi",-12.960479736328123],["ქონ",-12.960514068603516],["▁گه",-12.960518836975098],["评估",-12.96053981781006],["▁холбоо",-12.96058750152588],["▁publique",-12.960589408874512],["▁galegos",-12.960591316223145],["tusi",-12.96059226989746],["porno",-12.96059799194336],["▁भएर",-12.96060562133789],["რკ",-12.960634231567385],["ાસ",-12.96065616607666],["ပန္း",-12.960660934448242],["▁ближ",-12.960662841796877],["▁сайты",-12.96069049835205],["出会い",-12.960705757141112],["ీయ",-12.960713386535645],["ціон",-12.960719108581545],["וויי",-12.960747718811035],["▁kostnad",-12.96075439453125],["uris",-12.9607572555542],["ັ",-12.96076202392578],["道德",-12.960765838623049],["ಐ",-12.960772514343262],["▁Trá",-12.96079444885254],["▁ስር",-12.96081256866455],["▁képviselő",-12.960817337036133],["▁spēles",-12.96082592010498],["▁gü",-12.960833549499512],["▁Feder",-12.960994720458984],["▁výhod",-12.96102523803711],["غي",-12.961033821105955],["lóg",-12.96104335784912],["▁répondre",-12.96104335784912],["IKO",-12.961065292358398],["alen",-12.961066246032717],["deren",-12.961069107055664],["▁сл",-12.961071968078612],["▁Logga",-12.961077690124512],["huhu",-12.961085319519045],["▁ձ",-12.961089134216309],["fos",-12.961095809936523],["▁امکانات",-12.961106300354004],["tric",-12.96110725402832],["▁చేస",-12.961124420166016],["ησαν",-12.96113109588623],["▁Schule",-12.961139678955078],["▁zmiany",-12.96116542816162],["▁työn",-12.961182594299316],["гледа",-12.961190223693848],["▁leku",-12.961200714111328],["▁گرفتن",-12.961210250854492],["▁pocz",-12.961236953735352],["▁presencia",-12.9612398147583],["▁abad",-12.961248397827148],["▁खुल",-12.961260795593262],["▁dank",-12.961297988891602],["skrib",-12.961318969726562],["kowi",-12.961353302001951],["剂",-12.961377143859863],["▁നയ",-12.961380958557127],["▁dones",-12.961387634277344],["▁nových",-12.961431503295898],["▁keuntungan",-12.961434364318848],["▁січня",-12.961434364318848],["▁områder",-12.961441040039062],["▁සේවය",-12.961442947387695],["▁Конкурс",-12.961444854736328],["▁facili",-12.961453437805176],["▁orð",-12.961459159851074],["▁vienmēr",-12.96146011352539],["▁Kültür",-12.961465835571287],["▁Töö",-12.961496353149414],["▁послова",-12.961504936218262],["▁وقف",-12.96151065826416],["▁áhrif",-12.96152400970459],["▁fazendo",-12.96152687072754],["▁kjæresten",-12.961541175842283],["pac",-12.961557388305664],["▁szerv",-12.961570739746094],["▁git",-12.961578369140623],["cijo",-12.961584091186523],["▁क्ल",-12.96159553527832],["▁subir",-12.961600303649902],["▁pamet",-12.9616117477417],["kerül",-12.961626052856444],["▁neutral",-12.961626052856444],["▁ສໍາ",-12.961630821228027],["лична",-12.961647987365724],["ату",-12.961658477783203],["▁жөн",-12.961662292480469],["▁బ్ర",-12.961666107177734],["▁przedstawi",-12.961670875549316],["▁ændre",-12.961670875549316],["▁ተከ",-12.961682319641112],["▁गाउँ",-12.961687088012695],["▁उठा",-12.961700439453123],["▁ചെറു",-12.961706161499023],["▁reaksi",-12.961712837219238],["▁legislativ",-12.961713790893556],["ковий",-12.961715698242188],["▁otkri",-12.961727142333984],["▁شرق",-12.961746215820312],["▁meilleur",-12.961790084838867],["▁léir",-12.9617919921875],["▁koostöö",-12.961793899536133],["್ಮ",-12.961825370788574],["▁pesca",-12.96183967590332],["anos",-12.961853981018066],["मन्त्री",-12.96187686920166],["▁çatdır",-12.961886405944824],["▁лиценз",-12.961886405944824],["人在",-12.96189022064209],["▁merak",-12.961892127990724],["▁minuto",-12.961898803710938],["dib",-12.961901664733888],["▁Önkormányzat",-12.961901664733888],["vanie",-12.961902618408203],["▁Dú",-12.96190357208252],["cala",-12.961909294128418],["▁Prak",-12.961942672729492],["řit",-12.961943626403809],["vę",-12.961981773376465],["▁ມັນ",-12.961987495422363],["失去",-12.962003707885742],["▁հիմնական",-12.962007522583008],["mıyor",-12.96201992034912],["▁Bruk",-12.962031364440918],["simi",-12.9620361328125],["いか",-12.962041854858398],["▁Takk",-12.962042808532717],["▁skru",-12.96206283569336],["▁quán",-12.962098121643066],["▁Paper",-12.962130546569824],["▁труб",-12.962135314941406],["imid",-12.962150573730469],["▁saņem",-12.96215534210205],["izki",-12.96216869354248],["▁მეტ",-12.962181091308594],["Nov",-12.962190628051758],["▁процедура",-12.962198257446287],["احت",-12.962206840515137],["▁maku",-12.962210655212402],["ደም",-12.962236404418944],["▁regis",-12.962263107299805],["各类",-12.962278366088867],["ēties",-12.962308883666992],["▁coñecer",-12.962308883666992],["▁ամենա",-12.962319374084473],["illon",-12.962332725524902],["▁Bizim",-12.962339401245115],["tler",-12.962346076965332],["▁zaten",-12.962357521057127],["▁euskara",-12.96237564086914],["▁подоб",-12.962395668029783],["dzīvo",-12.962431907653809],["map",-12.962437629699709],["▁haram",-12.962440490722656],["▁Û",-12.962446212768556],["▁ganzen",-12.962447166442873],["IMI",-12.962448120117188],["sort",-12.962462425231934],["牧",-12.962470054626465],["TRI",-12.96251392364502],["▁അമേരിക്ക",-12.962520599365234],["▁블로그",-12.962520599365234],["▁նախկին",-12.962523460388184],["▁Wien",-12.962528228759766],["▁இன்னும்",-12.962529182434082],["▁कार्ड",-12.962530136108398],["▁телевиз",-12.962532997131348],["▁identificar",-12.962535858154297],["▁باشگاه",-12.962549209594728],["▁ว่า",-12.96255588531494],["▁tider",-12.962567329406738],["▁الشخص",-12.962573051452637],["▁1/3",-12.962586402893066],["▁zid",-12.962590217590332],["eman",-12.96259307861328],["▁kuk",-12.96261215209961],["موظف",-12.96261501312256],["▁ඔන්න",-12.962651252746582],["▁daardie",-12.962658882141112],["▁பிற",-12.962669372558594],["kauppa",-12.96268081665039],["เข้าใจ",-12.96269989013672],["▁golpe",-12.962726593017578],["▁робити",-12.962727546691896],["colli",-12.962732315063477],["▁etməyə",-12.96274185180664],["▁elektronisk",-12.962779998779297],["bje",-12.96278953552246],["▁groei",-12.962797164916992],["▁karty",-12.9628267288208],["ότερο",-12.962854385375977],["ゲ",-12.96285915374756],["ခ်စ္",-12.962862968444824],["▁എല്ലാം",-12.962894439697266],["▁פרק",-12.962930679321287],["▁HDP",-12.96294116973877],["IOS",-12.962953567504885],["▁Pré",-12.962957382202148],["▁beteg",-12.962969779968262],["▁ಗಳು",-12.962985038757324],["▁Æ",-12.96300983428955],["תנו",-12.963018417358398],["ਝ",-12.963045120239258],["huma",-12.963046073913574],["szter",-12.963101387023926],["▁своем",-12.963125228881836],["▁מחשב",-12.96313190460205],["dei",-12.963133811950684],["對方",-12.963150024414062],["▁necessita",-12.963196754455566],["三十",-12.963208198547363],["▁kio",-12.963211059570312],["활",-12.963228225708008],["▁ఖ",-12.96324348449707],["▁Rusya",-12.963245391845703],["സില്",-12.96325397491455],["hnu",-12.963254928588867],["ლობ",-12.963278770446776],["ոմ",-12.963295936584473],["おり",-12.963301658630373],["▁anului",-12.963315963745115],["▁заявление",-12.963321685791016],["seda",-12.963330268859863],["▁practica",-12.963367462158203],["ಂಕ",-12.963372230529783],["▁Ong",-12.963375091552734],["bum",-12.96337604522705],["ကေတာ့",-12.963379859924316],["▁офис",-12.963397026062012],["▁poziom",-12.963431358337402],["зин",-12.963467597961426],["ಗೇ",-12.963467597961426],["▁كون",-12.963489532470703],["нског",-12.96352195739746],["SY",-12.96353244781494],["▁বেশি",-12.963533401489258],["▁Ес",-12.963544845581056],["ታቸውን",-12.963561058044434],["sain",-12.963584899902344],["▁bidra",-12.963586807250977],["巴黎",-12.963590621948242],["提供的",-12.963590621948242],["ด่วน",-12.963605880737305],["▁gjithashtu",-12.963607788085938],["▁menimbulkan",-12.963607788085938],["▁pertandingan",-12.963607788085938],["▁буџет",-12.963607788085938],["▁తెలిపారు",-12.963607788085938],["▁በኩል",-12.963607788085938],["cán",-12.96360969543457],["▁যদি",-12.963613510131836],["▁भर्ती",-12.963621139526367],["▁झा",-12.963628768920898],["▁भएपछि",-12.96363353729248],["▁Connect",-12.963637351989746],["▁intel",-12.963645935058594],["▁කට",-12.963655471801758],["▁vəsait",-12.963659286499023],["▁بغیر",-12.963672637939451],["▁Mó",-12.963708877563477],["▁तह",-12.963744163513184],["▁CHI",-12.963751792907717],["▁utilitza",-12.963780403137209],["▁잠",-12.963780403137209],["▁commission",-12.963804244995115],["▁ઓફ",-12.963823318481444],["▁Odisha",-12.963831901550291],["▁കാര്യം",-12.963836669921877],["▁Marta",-12.963837623596191],["lığında",-12.963858604431152],["vanjem",-12.963859558105469],["▁листа",-12.96389102935791],["▁Ур",-12.96389389038086],["Cha",-12.963924407958984],["▁Oz",-12.963949203491213],["▁ชื่อ",-12.963951110839844],["▁لوړ",-12.963966369628906],["▁kişinin",-12.963971138000488],["bêj",-12.96397304534912],["▁voiture",-12.96398639678955],["▁jobba",-12.963988304138184],["▁шал",-12.963995933532717],["装修",-12.96401596069336],["▁gebracht",-12.964029312133787],["▁toată",-12.964034080505373],["ply",-12.964038848876951],["waith",-12.96406078338623],["ன்ன",-12.964106559753418],["izu",-12.964139938354492],["▁stam",-12.964143753051758],["▁הבי",-12.964170455932615],["▁აკ",-12.964184761047363],["▁२२",-12.964218139648438],["αφ",-12.96423625946045],["▁distance",-12.964272499084473],["vente",-12.964282035827637],["näkö",-12.964298248291016],["作家",-12.964314460754396],["unti",-12.964317321777344],["▁Governo",-12.964354515075684],["▁escr",-12.964367866516112],["▁Dina",-12.964385986328123],["ായിരിക്കും",-12.964422225952148],["▁TT",-12.964435577392578],["▁черв",-12.964466094970703],["▁модель",-12.964488983154297],["tention",-12.964494705200195],["▁моли",-12.96450424194336],["▁vigor",-12.964505195617676],["程式",-12.96456527709961],["▁રોજ",-12.964570999145508],["թեր",-12.964571952819824],["атты",-12.96457290649414],["DEL",-12.96457576751709],["хова",-12.964604377746582],["ਧਾਰ",-12.964614868164062],["స్తుంది",-12.964634895324709],["▁አም",-12.964641571044922],["孕",-12.964651107788086],["▁unidades",-12.964677810668944],["ย้าย",-12.964692115783691],["▁वातावरण",-12.96469497680664],["ေပါင္း",-12.964698791503906],["▁süreci",-12.96470069885254],["▁phận",-12.964703559875488],["▁सुंदर",-12.964703559875488],["ဆေး",-12.964716911315918],["▁πώς",-12.964726448059082],["▁ପୂର୍ବରୁ",-12.964728355407717],["▁handicap",-12.964733123779297],["▁akce",-12.96474552154541],["▁కదా",-12.964756965637209],["াহ",-12.964771270751951],["▁távol",-12.964777946472168],["▁Fyrir",-12.964778900146484],["▁mogę",-12.964780807495115],["▁આપે",-12.96479034423828],["ałam",-12.964835166931152],["促",-12.964837074279783],["▁தேவை",-12.964844703674316],["深刻",-12.964845657348633],["▁disposición",-12.964848518371582],["▁cien",-12.964874267578123],["▁tiam",-12.9649019241333],["aars",-12.964920043945312],["size",-12.964950561523438],["▁pussy",-12.964957237243652],["▁BNG",-12.964973449707031],["▁herunder",-12.96498203277588],["”、“",-12.964985847473145],["▁Usu",-12.964998245239258],["добрите",-12.965009689331056],["ಾರ",-12.965036392211914],["ацията",-12.965065002441406],["▁related",-12.965078353881836],["▁namenjen",-12.965126991271973],["▁ઉપ",-12.96513557434082],["▁organizacija",-12.965137481689451],["▁demás",-12.965139389038086],["▁က်",-12.965164184570312],["▁Board",-12.965171813964844],["ОБ",-12.965197563171388],["▁MAG",-12.96523094177246],["ေနရာ",-12.96523380279541],["▁Recep",-12.965251922607422],["ราชการ",-12.96526050567627],["בח",-12.96526336669922],["чать",-12.965267181396484],["▁DIS",-12.965277671813965],["kulia",-12.965279579162598],["ĵ",-12.965296745300291],["▁medan",-12.96530818939209],["aff",-12.96533203125],["▁Industrie",-12.965337753295898],["▁szöveg",-12.965343475341797],["▁ľud",-12.965411186218262],["▁μικρό",-12.965418815612791],["▁verifica",-12.965429306030272],["lmagan",-12.965431213378906],["▁पिता",-12.965449333190918],["lende",-12.965478897094728],["▁tək",-12.965509414672852],["▁عین",-12.965518951416016],["امي",-12.965519905090332],["▁Đó",-12.965567588806152],["víz",-12.965585708618164],["▁studier",-12.96559238433838],["▁Prvi",-12.965598106384276],["▁75%",-12.965646743774414],["peng",-12.965649604797363],["▁piemērot",-12.965653419494627],["▁Invest",-12.965655326843262],["▁спира",-12.965668678283691],["▁rápida",-12.965741157531738],["ហ្វ",-12.965746879577637],["ช่าง",-12.965784072875977],["▁дзяцей",-12.965785026550291],["▁विधानसभा",-12.96578598022461],["▁joko",-12.965786933898926],["etter",-12.965803146362305],["入力",-12.965805053710938],["▁прекрасно",-12.965807914733888],["▁چۈش",-12.96581745147705],["的作品",-12.965826988220217],["▁eba",-12.965827941894531],["季度",-12.965831756591797],["tree",-12.965840339660645],["▁terceiro",-12.96584129333496],["▁qarshi",-12.965843200683594],["▁yapılacak",-12.96584415435791],["▁takaisin",-12.965846061706545],["▁سکتی",-12.965865135192873],["▁АД",-12.965883255004885],["▁beauty",-12.96591567993164],["▁requer",-12.96592140197754],["āri",-12.9659423828125],["yir",-12.965944290161133],["▁Holm",-12.965956687927246],["▁Lem",-12.965968132019045],["▁ľudia",-12.96597671508789],["▁geno",-12.96601104736328],["▁valitse",-12.966015815734863],["ਵਾਲ",-12.96603012084961],["▁Gum",-12.966054916381836],["▁järjest",-12.96606731414795],["▁प्रस्तुत",-12.96607494354248],["▁kolem",-12.966094017028809],["▁transit",-12.966109275817873],["▁ເຖິງ",-12.966116905212402],["แล",-12.966129302978516],["Trans",-12.96614933013916],["▁പോസ്റ്റ",-12.96615505218506],["▁೧",-12.966156959533691],["▁responsables",-12.966163635253906],["iyorum",-12.966181755065918],["▁svojich",-12.96619987487793],["solu",-12.96621799468994],["strán",-12.966229438781738],["▁servizo",-12.966252326965332],["▁huyện",-12.966254234313965],["51)",-12.96627426147461],["240",-12.966286659240724],["fritt",-12.966288566589355],["▁iunie",-12.966288566589355],["▁مکان",-12.966351509094238],["▁imali",-12.966378211975098],["▁Борис",-12.96638011932373],["▁Naše",-12.96639347076416],["▁merke",-12.966398239135742],["meniz",-12.96639919281006],["ayaasha",-12.966403007507324],["गल",-12.966429710388184],["▁العالمية",-12.966447830200195],["▁fursa",-12.966465950012209],["▁durata",-12.966486930847168],["▁quartier",-12.966498374938965],["ärt",-12.966506958007812],["▁aprilie",-12.966530799865724],["ୀର",-12.966533660888672],["гова",-12.966558456420898],["પ્",-12.966558456420898],["PV",-12.966585159301758],["▁ray",-12.966594696044922],["▁사이",-12.966599464416504],["▁ryb",-12.966609001159668],["▁поп",-12.96661376953125],["लै",-12.96662712097168],["▁डाल",-12.966644287109377],["▁बर्ष",-12.96666145324707],["▁promover",-12.966692924499512],["▁спомен",-12.96669578552246],["없이",-12.966702461242676],["stø",-12.96672534942627],["▁சீ",-12.966734886169434],["生活的",-12.96673583984375],["▁видно",-12.966741561889648],["鼓",-12.966778755187988],["▁каких",-12.966792106628418],["芳",-12.966794967651367],["▁legenda",-12.966808319091797],["碳",-12.96682357788086],["▁รถ",-12.966845512390137],["สะดวก",-12.96687126159668],["▁wśród",-12.966875076293944],["▁leeyahay",-12.966876029968262],["▁претставува",-12.966876983642578],["▁således",-12.966879844665527],["▁गयी",-12.966888427734377],["คิดว่า",-12.966948509216309],["▁tutkimus",-12.966948509216309],["▁وچ",-12.96695327758789],["▁اعت",-12.966996192932127],["рик",-12.967008590698242],["▁Domino",-12.96702003479004],["тэр",-12.967050552368164],["▁אלי",-12.96706771850586],["▁tape",-12.967093467712402],["▁Tant",-12.967101097106934],["▁helposti",-12.96711540222168],["▁Turismo",-12.967132568359377],["▁έν",-12.967153549194336],["▁साथै",-12.967159271240234],["▁በም",-12.967182159423828],["▁이미",-12.967182159423828],["▁vég",-12.96718406677246],["▁Nazar",-12.96719455718994],["▁Vores",-12.967329978942873],["▁общем",-12.967340469360352],["чына",-12.967367172241213],["▁Illa",-12.96737003326416],["▁Boston",-12.967391967773438],["▁самолет",-12.967411994934082],["▁információk",-12.967424392700195],["▁střed",-12.967432975769045],["▁Cik",-12.967440605163574],["និ",-12.967461585998535],["▁deutsche",-12.967474937438965],["▁vård",-12.96750545501709],["▁cento",-12.96752643585205],["▁Reserved",-12.967569351196287],["このような",-12.96758270263672],["чок",-12.967592239379885],["▁imel",-12.967608451843262],["명이",-12.967612266540527],["ibig",-12.967622756958008],["▁Ori",-12.967632293701172],["▁mogelijkheden",-12.967658996582031],["лади",-12.967681884765623],["ணம்",-12.96768856048584],["▁Deep",-12.967702865600586],["နံ",-12.967707633972168],["▁ನೇ",-12.96774673461914],["ਾਨ",-12.967754364013672],["▁opsies",-12.967787742614746],["punkter",-12.96779441833496],["▁želje",-12.967803001403809],["sektor",-12.96782398223877],["Tre",-12.967836380004885],["ಟ್ಟಿ",-12.967845916748049],["yó",-12.967872619628906],["gando",-12.96787452697754],["▁ಹೀಗೆ",-12.967878341674805],["的同时",-12.967886924743652],["ेक",-12.967888832092283],["▁bygger",-12.967896461486816],["▁предусмотрен",-12.967901229858398],["ဖြင့်",-12.967914581298828],["舒",-12.967918395996094],["▁Казахстан",-12.96796703338623],["▁февруари",-12.96796703338623],["▁دوباره",-12.96796703338623],["▁აშშ",-12.96796703338623],["컨",-12.96796703338623],["▁élmény",-12.968006134033203],["legri",-12.96800708770752],["▁hizmeti",-12.96800708770752],["▁એવું",-12.968031883239746],["▁எல்லாம்",-12.968048095703123],["▁apri",-12.968053817749023],["▁μέ",-12.96808910369873],["grin",-12.96810245513916],["▁បន្ត",-12.968111038208008],["▁atera",-12.968127250671388],["▁contest",-12.968128204345703],["▁Vasta",-12.968141555786133],["চার",-12.968147277832031],["атори",-12.968167304992676],["▁bērniem",-12.968167304992676],["▁Ngo",-12.96817398071289],["רוב",-12.968180656433104],["velli",-12.968183517456056],["jke",-12.968186378479004],["element",-12.968188285827637],["etlen",-12.96819019317627],["▁നോക്കി",-12.96821117401123],["▁Ferrari",-12.968214988708496],["▁Juri",-12.968232154846191],["▁Such",-12.968255996704102],["▁картин",-12.968256950378418],["▁shpejt",-12.968263626098633],["▁Chin",-12.96827507019043],["▁logic",-12.96827793121338],["iyah",-12.96828556060791],["кач",-12.968338966369627],["▁፣",-12.968340873718262],["▁цикл",-12.96837043762207],["週間",-12.968433380126951],["▁орто",-12.968456268310549],["办理",-12.968499183654783],["power",-12.968502044677734],["▁עמוד",-12.96854019165039],["തോടെ",-12.968545913696287],["▁ടെ",-12.968552589416504],["▁Úr",-12.968555450439451],["▁suflet",-12.968558311462402],["▁capire",-12.968576431274414],["▁साम",-12.968609809875488],["راز",-12.968655586242676],["▁voet",-12.968657493591309],["вялі",-12.9686918258667],["тке",-12.968704223632812],["ۋە",-12.968705177307127],["▁vef",-12.968720436096191],["്യാ",-12.968728065490724],["كب",-12.968732833862305],["oilla",-12.968737602233888],["വു",-12.968738555908203],["▁fyrr",-12.968750953674316],["▁Mint",-12.96876621246338],["घाट",-12.968772888183594],["inama",-12.9688138961792],["▁Latest",-12.968838691711426],["bian",-12.968841552734377],["▁vlasy",-12.968852996826172],["מיר",-12.968864440917969],["sette",-12.96886920928955],["▁छान",-12.968897819519045],["eacht",-12.968902587890623],["דבר",-12.968920707702637],["čilo",-12.968945503234863],["▁Pale",-12.968950271606444],["מייל",-12.968966484069824],["▁character",-12.968971252441406],["НИЙ",-12.96897315979004],["هات",-12.969005584716797],["▁ರಾಜ",-12.96900749206543],["েশন",-12.969008445739746],["plu",-12.969009399414062],["▁basic",-12.969017028808594],["重庆",-12.969019889831545],["mizin",-12.96905517578125],["▁bombe",-12.969056129455566],["sæt",-12.969058990478516],["▁Holiday",-12.969059944152832],["▁Vilniuje",-12.969059944152832],["▁말씀",-12.969059944152832],["▁نکته",-12.969060897827148],["▁допомоги",-12.969061851501465],["▁жүргізу",-12.96906280517578],["▁አባላት",-12.969064712524414],["▁Federaalka",-12.969069480895996],["▁क्योंकि",-12.969069480895996],["▁wako",-12.969070434570312],["▁položaj",-12.969073295593262],["▁hadal",-12.969083786010742],["ผู้ชาย",-12.969093322753906],["▁محبوب",-12.969096183776855],["▁ràng",-12.969097137451172],["▁Amir",-12.969101905822754],["ëm",-12.96910285949707],["▁koe",-12.969127655029297],["▁таке",-12.969130516052246],["▁radost",-12.969133377075195],["▁pappa",-12.969181060791016],["දින",-12.969219207763672],["ерт",-12.969274520874023],["▁Hof",-12.969298362731934],["amar",-12.969308853149414],["овали",-12.969334602355955],["▁экономикалық",-12.969335556030272],["▁វិញ",-12.96934413909912],["▁pasaules",-12.969362258911133],["▁қал",-12.969367980957031],["▁erfolgt",-12.969386100769045],["virta",-12.969502449035645],["▁fæ",-12.969518661499023],["løsning",-12.969523429870604],["meid",-12.969538688659668],["▁условий",-12.969544410705566],["▁Ferenc",-12.9695463180542],["গত",-12.969568252563477],["▁음악",-12.969568252563477],["しっかり",-12.96958351135254],["xona",-12.969649314880373],["▁pasauli",-12.969655990600586],["تابع",-12.969682693481444],["▁صفحات",-12.969715118408203],["▁слав",-12.969752311706545],["▁والج",-12.969779014587402],["সার",-12.969796180725098],["▁Сад",-12.969796180725098],["ністю",-12.969803810119627],["媒",-12.969822883605955],["▁noteikumi",-12.969828605651855],["ರಣ",-12.969895362854004],["▁ihtiyaç",-12.969908714294434],["▁afstand",-12.9699125289917],["regul",-12.969934463500977],["物理",-12.96995449066162],["စက္",-12.969964981079102],["▁Joo",-12.969982147216797],["▁baar",-12.969985961914062],["▁sebesar",-12.969985961914062],["īties",-12.970022201538086],["▁jemi",-12.97003173828125],["▁suç",-12.970036506652832],["បា",-12.97005558013916],["▁Ξ",-12.97006130218506],["▁الخط",-12.970094680786133],["丈夫",-12.970108032226562],["眠",-12.970114707946776],["越南",-12.97012996673584],["▁fuld",-12.970135688781738],["醫師",-12.970145225524902],["▁berê",-12.97014617919922],["ົາ",-12.970151901245115],["▁sytuacji",-12.97015380859375],["▁регламент",-12.97015380859375],["▁սեփական",-12.97015380859375],["▁Academy",-12.970155715942385],["▁בצורה",-12.970157623291016],["полни",-12.970159530639648],["vých",-12.970160484313965],["▁ilişkin",-12.970160484313965],["▁ပို",-12.970163345336914],["▁كافة",-12.970171928405762],["▁egitea",-12.970181465148926],["▁заступник",-12.970190048217772],["▁Flori",-12.970200538635254],["СУ",-12.970205307006836],["midi",-12.970213890075684],["▁Onko",-12.97021770477295],["csak",-12.97023868560791],["▁avto",-12.970247268676758],["▁nina",-12.97024917602539],["▁sowohl",-12.970256805419922],["нике",-12.970269203186035],["liitto",-12.970279693603516],["▁ሆ",-12.970282554626465],["図",-12.97028636932373],["▁velkommen",-12.970295906066896],["▁לד",-12.970296859741213],["▁Gdy",-12.97030258178711],["ណាស់",-12.97032356262207],["▁connais",-12.970335960388184],["▁שהו",-12.970345497131348],["▁ies",-12.970351219177246],["▁Володимир",-12.970383644104004],["stuk",-12.970386505126951],["▁Elektron",-12.970389366149902],["▁klientów",-12.970401763916016],["▁დავით",-12.970406532287598],["▁самый",-12.970420837402344],["hni",-12.970449447631836],["景點",-12.970454216003418],["▁flytta",-12.970458984375],["ಲೋ",-12.970474243164062],["▁maioria",-12.970484733581545],["▁fogad",-12.970486640930176],["▁normativ",-12.97050952911377],["МЕ",-12.970513343811035],["▁teken",-12.970525741577148],["30)",-12.970551490783691],["RAL",-12.970587730407717],["▁Интер",-12.970603942871094],["कर्ता",-12.970629692077637],["▁ڇ",-12.970643043518066],["iski",-12.970650672912598],["▁provided",-12.970670700073242],["slaget",-12.970698356628418],["luft",-12.970714569091797],["လျ",-12.970714569091797],["chant",-12.970796585083008],["▁говорил",-12.970842361450195],["Ô",-12.970843315124512],["yap",-12.970849990844728],["▁Ваша",-12.970865249633787],["▁05.",-12.970877647399902],["कृ",-12.970885276794434],["пија",-12.970901489257812],["▁sri",-12.970937728881836],["▁tyr",-12.97095012664795],["あげ",-12.970951080322266],["▁Слу",-12.97095775604248],["▁skat",-12.97096061706543],["యన్",-12.97097396850586],["§",-12.970999717712402],["▁պատճառ",-12.971009254455566],["▁Ред",-12.97102165222168],["▁الاح",-12.971034049987791],["ziehen",-12.971080780029297],["▁colle",-12.971083641052246],["ogen",-12.971084594726562],["▁1912",-12.971097946166992],["此前",-12.971110343933104],["▁කරනු",-12.971120834350586],["▁Tank",-12.971123695373535],["нити",-12.971144676208496],["ความคิดเห็น",-12.971163749694824],["र्ष",-12.97116470336914],["▁Mitte",-12.971168518066406],["兴趣",-12.971183776855469],["賓",-12.971185684204102],["쟁",-12.971210479736328],["淨",-12.97121810913086],["یوې",-12.971235275268556],["▁Ljubljani",-12.9712495803833],["▁economía",-12.9712495803833],["▁Goberno",-12.971250534057615],["▁dagdagan",-12.971256256103516],["▁первую",-12.971256256103516],["▁ځکه",-12.971259117126465],["▁پخش",-12.97126007080078],["iisa",-12.971268653869627],["▁ετών",-12.971274375915527],["▁அதன்",-12.971281051635742],["ništvo",-12.97129726409912],["▁ત્રણ",-12.971322059631348],["ಬೇ",-12.971328735351562],["ізації",-12.971335411071776],["▁Բա",-12.971353530883787],["▁હા",-12.971353530883787],["कड",-12.971354484558104],["▁ilm",-12.971391677856444],["dacht",-12.971410751342772],["3,000",-12.971426963806152],["▁Jepang",-12.97144889831543],["يخ",-12.971450805664062],["▁ሃ",-12.971454620361328],["▁згад",-12.97146224975586],["▁Modul",-12.971508026123049],["▁марк",-12.97152328491211],["▁политически",-12.971532821655272],["▁puso",-12.971534729003906],["▁pikeun",-12.971550941467283],["▁Hii",-12.971567153930664],["רף",-12.971576690673828],["▁ikasle",-12.97158432006836],["인데",-12.971590042114258],["gine",-12.97160816192627],["▁хугацаа",-12.971644401550291],["▁Франция",-12.97165298461914],["▁Andra",-12.971665382385254],["овата",-12.971681594848633],["▁sniedz",-12.97169303894043],["ျမင္",-12.971698760986328],["iniu",-12.971699714660645],["Tar",-12.97173309326172],["▁သင်",-12.971738815307615],["load",-12.9717435836792],["▁Sử",-12.971749305725098],["行銷",-12.971752166748049],["њима",-12.971763610839844],["ytu",-12.971768379211426],["▁emberi",-12.971781730651855],["▁vild",-12.971784591674805],["太陽",-12.97182559967041],["▁ГО",-12.971847534179688],["▁కోర",-12.971850395202637],["▁claim",-12.971858978271484],["▁casual",-12.971875190734863],["▁آخری",-12.971880912780762],["알",-12.971914291381836],["È",-12.971925735473633],["▁beti",-12.971929550170898],["letter",-12.971949577331545],["▁محا",-12.971951484680176],["▁гл",-12.97198486328125],["dier",-12.971994400024414],["▁Pavel",-12.97200870513916],["▁해결",-12.972033500671388],["▁konten",-12.972040176391602],["▁stavb",-12.97204875946045],["▁πόλη",-12.972063064575195],["▁негатив",-12.972084999084473],["пет",-12.972085952758787],["▁Etxe",-12.972107887268066],["▁коре",-12.972108840942385],["▁спро",-12.972153663635254],["▁опозиц",-12.972186088562012],["แตก",-12.972251892089844],["peti",-12.972256660461426],["▁በየ",-12.972256660461426],["▁fura",-12.972257614135742],["▁Bö",-12.97225856781006],["▁варто",-12.972275733947754],["幾乎",-12.972285270690918],["青少年",-12.972289085388184],["摇",-12.972290992736816],["▁oczywiście",-12.972345352172852],["▁tavsiye",-12.972345352172852],["▁інформацію",-12.972345352172852],["▁علاقه",-12.972345352172852],["▁ხშირად",-12.972345352172852],["▁ጀምሮ",-12.972345352172852],["벨",-12.972345352172852],["▁иногда",-12.972347259521484],["発売",-12.972347259521484],["▁الأخرى",-12.972355842590332],["ውም",-12.972357749938965],["▁започне",-12.972362518310549],["▁enhver",-12.972363471984863],["▁bergerak",-12.972370147705078],["▁فروشگاه",-12.972373962402344],["▁поруч",-12.972376823425291],["恒",-12.972382545471191],["▁Nyheter",-12.972390174865724],["▁мили",-12.972404479980469],["進一步",-12.972407341003418],["▁hlavně",-12.972411155700684],["▁দেওয়া",-12.97241497039795],["သေ",-12.972423553466797],["▁дві",-12.97243881225586],["▁chiqish",-12.972439765930176],["▁hazai",-12.972453117370604],["▁Cost",-12.972455978393556],["▁ប្រើ",-12.97246265411377],["▁2018)",-12.972487449645996],["▁svetlo",-12.972487449645996],["▁числа",-12.972487449645996],["夫妻",-12.972522735595703],["▁Universiteti",-12.972529411315918],["▁लिये",-12.972530364990234],["vių",-12.972545623779297],["▁eitt",-12.97258758544922],["ywa",-12.972616195678713],["▁ආකාරය",-12.97261905670166],["▁обеща",-12.972620010375977],["青春",-12.972641944885254],["▁талаптар",-12.97265625],["▁közben",-12.972675323486328],["በው",-12.97269344329834],["▁treff",-12.972713470458984],["ावे",-12.972752571105955],["اغ",-12.97280216217041],["▁brauc",-12.972804069519045],["▁camiño",-12.972829818725586],["▁semoga",-12.97283172607422],["▁آمار",-12.972867012023926],["▁Kindern",-12.972883224487305],["▁podatki",-12.97293186187744],["stis",-12.972936630249023],["gið",-12.972949981689451],["cover",-12.972957611083984],["▁향",-12.972959518432615],["Mad",-12.97296905517578],["▁سبک",-12.972980499267578],["سف",-12.972992897033691],["▁menambah",-12.97300148010254],["▁дороги",-12.973016738891602],["▁trouble",-12.973031044006348],["老板",-12.97303295135498],["tanto",-12.973055839538574],["▁Rend",-12.973078727722168],["υχ",-12.97312831878662],["čenie",-12.973154067993164],["▁කඩ",-12.973183631896973],["▁qol",-12.97320556640625],["/09",-12.973217010498049],["funda",-12.973220825195312],["ίδες",-12.973238945007324],["кажа",-12.97324275970459],["ائون",-12.973286628723145],["▁tətbiq",-12.973316192626951],["treb",-12.973321914672852],["дия",-12.973340034484863],["▁situaci",-12.97336769104004],["▁පර",-12.973380088806152],["昨日",-12.973388671875],["ळ्या",-12.97340202331543],["▁Cea",-12.973408699035645],["▁vaste",-12.973411560058594],["ењето",-12.973416328430176],["▁například",-12.973443031311035],["▁мэдээллийн",-12.973443031311035],["▁ਸਕਦੇ",-12.973443031311035],["▁ଯେଉଁ",-12.973443031311035],["▁কলেজ",-12.973443984985352],["рег",-12.973448753356934],["▁masjid",-12.973455429077148],["နဲ",-12.973459243774414],["▁Δείτε",-12.973461151123049],["▁Efrînê",-12.973464012145996],["前の",-12.97347354888916],["▁Că",-12.973509788513184],["▁كند",-12.97353744506836],["真實",-12.973538398742676],["▁huko",-12.97354221343994],["多い",-12.97354507446289],["▁berhenti",-12.973546028137209],["▁ያለውን",-12.973546028137209],["ERS",-12.973577499389648],["▁처리",-12.973594665527344],["rore",-12.97362995147705],["▁áfram",-12.973661422729492],["▁vượt",-12.97367000579834],["ženi",-12.973681449890137],["▁amis",-12.973718643188477],["bā",-12.973737716674805],["્મ",-12.973780632019045],["▁Ақ",-12.973814010620115],["ないと",-12.973888397216797],["▁lugu",-12.973898887634276],["▁ktorým",-12.973919868469238],["יער",-12.973956108093262],["▁মেয়ে",-12.973974227905272],["дз",-12.973995208740234],["▁ຮ່ວມ",-12.974005699157717],["▁Oder",-12.974026679992676],["▁Λε",-12.974031448364258],["រូប",-12.974045753479004],["▁prijav",-12.974071502685549],["▁Jej",-12.974084854125977],["▁economi",-12.974087715148926],["闻",-12.974101066589355],["▁klu",-12.974108695983888],["▁duela",-12.974151611328123],["▁Marea",-12.974164009094238],["ząc",-12.974199295043944],["▁лов",-12.97420597076416],["krist",-12.974206924438477],["wnie",-12.974223136901855],["▁зуб",-12.974225997924805],["▁trust",-12.974252700805664],["▁मार्च",-12.974252700805664],["frem",-12.97425651550293],["▁خشک",-12.974279403686523],["实在",-12.97428035736084],["▁anta",-12.974298477172852],["▁площ",-12.974321365356444],["▁представители",-12.974322319030762],["巨大的",-12.974335670471191],["▁сложи",-12.974369049072266],["vež",-12.974370002746582],["aniya",-12.974393844604492],["ваш",-12.974400520324709],["fuz",-12.974440574645996],["▁jelentős",-12.97446346282959],["तां",-12.974470138549805],["机械",-12.974491119384766],["zado",-12.97450828552246],["娛樂",-12.974515914916992],["莊",-12.974538803100586],["▁entfernt",-12.974542617797852],["▁nemzetközi",-12.974542617797852],["▁związku",-12.974542617797852],["▁Ազգային",-12.974542617797852],["▁رقابت",-12.974544525146484],["▁მიიღო",-12.974544525146484],["▁브랜드",-12.9745454788208],["▁سفارت",-12.974546432495115],["ေၾကာင့္",-12.974549293518066],["▁українських",-12.974549293518066],["▁procesos",-12.97456169128418],["لىشى",-12.974575996398926],["sions",-12.97459888458252],["▁ក្រុង",-12.974610328674316],["ýt",-12.974615097045898],["▁tīk",-12.974637985229492],["▁Kiss",-12.974649429321287],["δά",-12.974651336669922],["▁misalnya",-12.974653244018556],["▁Εξ",-12.974661827087402],["▁인기",-12.974675178527832],["▁Ark",-12.97469711303711],["▁týka",-12.974720001220703],["henti",-12.974743843078612],["▁ಬ್ಯಾ",-12.974748611450195],["ikos",-12.9747896194458],["▁legi",-12.974799156188965],["▁алат",-12.974801063537598],["▁далі",-12.974802017211914],["szek",-12.974817276000977],["▁správy",-12.974827766418455],["13)",-12.97482967376709],["▁שמו",-12.974846839904783],["5.000",-12.97485637664795],["iskt",-12.97487449645996],["▁index",-12.97489070892334],["▁müdafiə",-12.974892616271973],["▁nécessaire",-12.974902153015137],["كو",-12.974909782409668],["▁turism",-12.97494125366211],["▁그러",-12.974960327148438],["▁ただ",-12.974968910217283],["▁плохо",-12.97500705718994],["yeke",-12.975011825561523],["▁ír",-12.975020408630373],["▁reputa",-12.975040435791016],["yri",-12.975056648254396],["▁optimist",-12.97507095336914],["▁борбору",-12.97509765625],["▁účet",-12.97510814666748],["ோடு",-12.975112915039062],["▁বাংলাদেশের",-12.975117683410645],["▁berapa",-12.975123405456545],["пушта",-12.975131034851074],["стрел",-12.975194931030272],["тъп",-12.97519874572754],["▁специалист",-12.975215911865234],["▁ensure",-12.975232124328612],["mişdir",-12.97524642944336],["▁Sev",-12.97525119781494],["▁angol",-12.975258827209473],["▁gadus",-12.975276947021484],["ອບ",-12.975278854370115],["oten",-12.975297927856444],["▁میرا",-12.975297927856444],["octubre",-12.97531032562256],["▁ndal",-12.975313186645508],["ماس",-12.975339889526367],["▁مقاومت",-12.97535228729248],["▁інш",-12.975396156311035],["▁പ്പ",-12.975434303283691],["▁اقتصادي",-12.97544765472412],["▁holdt",-12.97547435760498],["ssin",-12.97548007965088],["▁lieto",-12.975481033325195],["▁kirjoit",-12.975516319274902],["▁nạn",-12.97553825378418],["▁ensemble",-12.975544929504396],["▁iyon",-12.97562313079834],["▁Poly",-12.975634574890137],["ବର୍ତ୍ତୀ",-12.975640296936035],["▁häufig",-12.975642204284668],["▁ஆசிரியர்",-12.9756441116333],["▁ಶಾಸಕ",-12.9756441116333],["▁işğal",-12.975645065307615],["▁паміж",-12.975648880004885],["হে",-12.9756498336792],["▁скорее",-12.975651741027832],["▁alábbi",-12.975653648376465],["чого",-12.97565460205078],["énergie",-12.975656509399414],["▁travaux",-12.97565746307373],["▁bah",-12.975658416748049],["▁tré",-12.975671768188477],["▁ຫາ",-12.97567367553711],["▁mañá",-12.975679397583008],["▁prohibi",-12.97568702697754],["▁төрөл",-12.97568702697754],["▁Kron",-12.975709915161133],["▁Нью",-12.975712776184082],["muut",-12.975717544555664],["ຢູ່ໃນ",-12.975749015808104],["ဆီး",-12.975749015808104],["▁Azt",-12.975749015808104],["▁familiares",-12.975749969482422],["▁uygulan",-12.975768089294434],["▁תחת",-12.975769996643066],["▁!!!!",-12.975772857666016],["▁fresh",-12.975838661193848],["ايت",-12.975850105285645],["▁aborda",-12.975913047790527],["▁tamo",-12.975930213928224],["▁zbi",-12.975969314575195],["▁२८",-12.975979804992676],["fag",-12.97598361968994],["▁գյուղ",-12.975995063781738],["▁Johor",-12.976018905639648],["Mari",-12.97602081298828],["▁hond",-12.976034164428713],["応",-12.976055145263672],["▁tampak",-12.976117134094238],["toma",-12.976119041442873],["▁Speed",-12.976125717163086],["▁selles",-12.9761381149292],["មែន",-12.976154327392578],["▁Buhari",-12.97619342803955],["▁ወር",-12.97620964050293],["итель",-12.976214408874512],["haru",-12.976287841796877],["▁praia",-12.97633171081543],["▁essas",-12.97636890411377],["ാസ്",-12.976390838623049],["▁―",-12.976396560668944],["▁ලැබෙන",-12.97643756866455],["mışdır",-12.97644329071045],["ნას",-12.97644329071045],["▁영상",-12.976463317871094],["▁açı",-12.97649383544922],["▁zdravotn",-12.976517677307127],["▁egymás",-12.976534843444824],["uvi",-12.976539611816406],["trek",-12.976564407348633],["дка",-12.976566314697266],["▁utvalg",-12.976592063903809],["υπουργ",-12.976613998413086],["召",-12.976618766784668],["zali",-12.976629257202148],["▁tegi",-12.976651191711426],["一同",-12.976707458496094],["▁زندگي",-12.976719856262209],["案例",-12.976728439331056],["ыт",-12.976729393005373],["▁socials",-12.9767427444458],["▁alternatief",-12.976749420166016],["▁vry",-12.976750373840332],["▁директно",-12.976751327514648],["ובי",-12.976754188537598],["音楽",-12.976754188537598],["▁hüquq",-12.976761817932127],["事が",-12.976768493652344],["▁salir",-12.976771354675291],["▁рез",-12.976774215698242],["angl",-12.976778030395508],["▁türlü",-12.976778030395508],["▁ਕੀਤੇ",-12.976784706115724],["ڑی",-12.976818084716797],["ылады",-12.976821899414062],["▁wewe",-12.976835250854492],["▁хүү",-12.976884841918944],["▁regalo",-12.976892471313477],["▁تاسو",-12.976908683776855],["dente",-12.976930618286133],["▁database",-12.976935386657717],["▁Дэ",-12.97693920135498],["ndlich",-12.976951599121094],["▁virs",-12.97695541381836],["чком",-12.976964950561523],["чнай",-12.976967811584473],["▁என்னை",-12.977001190185549],["angkat",-12.977002143859863],["ИЗ",-12.977018356323242],["රය",-12.977018356323242],["ạt",-12.977030754089355],["▁maidir",-12.977032661437988],["▁deneyim",-12.977035522460938],["のだ",-12.97703742980957],["ولی",-12.977042198181152],["▁muusika",-12.97704792022705],["▁precej",-12.977051734924316],["▁အခု",-12.977078437805176],["díj",-12.977097511291504],["於是",-12.977115631103516],["▁19.00",-12.97712516784668],["▁További",-12.977127075195312],["▁ularning",-12.977127075195312],["чам",-12.977128982543944],["▁modificar",-12.977150917053224],["▁संसद",-12.977203369140623],["真的是",-12.977240562438965],["вао",-12.977252006530762],["▁своята",-12.977276802062988],["ということで",-12.977288246154783],["παθ",-12.977316856384276],["مہ",-12.977367401123049],["skolan",-12.977399826049805],["มากๆ",-12.977418899536133],["▁කරගන්න",-12.977452278137209],["etur",-12.977502822875977],["▁šau",-12.97753620147705],["քնն",-12.977540016174316],["▁дейност",-12.977556228637695],["▁Span",-12.977559089660645],["ságot",-12.977561950683594],["هود",-12.977632522583008],["▁Revi",-12.97763442993164],["▁намага",-12.977696418762209],["цыйна",-12.977723121643066],["墙",-12.977740287780762],["▁1922",-12.977747917175291],["▁estrada",-12.977757453918455],["澳",-12.977766036987305],["lee",-12.977774620056152],["搜索",-12.97780990600586],["ВС",-12.977812767028809],["▁dyre",-12.977832794189451],["▁kokonais",-12.977837562561035],["dö",-12.97784423828125],["▁आवाज",-12.977845191955566],["बरोबर",-12.977846145629885],["সমূহ",-12.977846145629885],["▁Sic",-12.977846145629885],["▁consectetuer",-12.977846145629885],["▁gennaio",-12.977846145629885],["บัตร",-12.9778470993042],["▁وكذلك",-12.9778470993042],["▁Đảng",-12.977859497070312],["▁séjour",-12.97787094116211],["▁fysisk",-12.977872848510742],["▁مند",-12.977890014648438],["bånd",-12.977896690368652],["▁sırasında",-12.977904319763184],["▁គណបក្ស",-12.977937698364258],["enes",-12.977953910827637],["phu",-12.978014945983888],["වලින්",-12.97801685333252],["准确",-12.97805404663086],["Bas",-12.978059768676758],["▁eind",-12.978068351745604],["dwa",-12.978073120117188],["▁Herbal",-12.97808837890625],["แยก",-12.978089332580566],["▁markazi",-12.978102684020996],["▁wynika",-12.978111267089844],["▁പറ",-12.97811794281006],["ssent",-12.978120803833008],["▁കിയ",-12.978131294250488],["▁хал",-12.978137016296388],["▁करके",-12.978143692016602],["▁znajdują",-12.978156089782717],["മാന",-12.97815990447998],["▁കാര്",-12.978192329406738],["ደረግ",-12.978193283081056],["▁سریع",-12.978203773498535],["reto",-12.97821044921875],["▁ສຸດ",-12.978226661682127],["▁લા",-12.97824478149414],["▁seama",-12.978248596191406],["верс",-12.97826099395752],["меж",-12.978269577026367],["ที่ได้",-12.978282928466797],["тично",-12.978294372558594],["▁drži",-12.978300094604492],["clo",-12.978302001953123],["▁Bandung",-12.978312492370604],["病人",-12.97831916809082],["▁класи",-12.978344917297363],["▁activités",-12.978347778320312],["▁míg",-12.978349685668944],["▁الكر",-12.978349685668944],["izia",-12.978371620178224],["体现",-12.978385925292969],["▁kær",-12.97839641571045],["▁اعضای",-12.978412628173828],["▁ٿيل",-12.9784517288208],["ിയത്",-12.978453636169434],["थो",-12.978466033935549],["ếu",-12.97846794128418],["▁eis",-12.97851276397705],["▁svoji",-12.978519439697266],["IER",-12.97854995727539],["▁Abd",-12.97854995727539],["▁religio",-12.978550910949709],["▁Yet",-12.978578567504885],["▁কু",-12.978594779968262],["zdrav",-12.978609085083008],["сън",-12.978618621826172],["▁ordin",-12.978650093078612],["▁darrer",-12.978652954101562],["▁предлог",-12.978665351867676],["▁performans",-12.978681564331056],["0,0",-12.978690147399902],["ył",-12.978692054748535],["▁מטר",-12.978692054748535],["▁Antara",-12.97869873046875],["UDI",-12.978729248046877],["מט",-12.978739738464355],["▁زنده",-12.978748321533203],["ลํา",-12.978792190551758],["льо",-12.978796005249023],["ભા",-12.978816986083984],["ट्ट",-12.978824615478516],["▁അടി",-12.978829383850098],["які",-12.97883129119873],["▁במיוחד",-12.978833198547363],["خانه",-12.978845596313477],["чити",-12.978846549987791],["Ny",-12.97884750366211],["زال",-12.978854179382324],["пле",-12.97885513305664],["▁conte",-12.97885799407959],["ψηφ",-12.978891372680664],["شعر",-12.978906631469728],["gele",-12.978907585144045],["▁Cool",-12.97891330718994],["▁dormi",-12.978918075561523],["▁akun",-12.978919982910156],["▁başında",-12.97893524169922],["敏感",-12.97894287109375],["ေခၚ",-12.978949546813965],["▁ویژگی",-12.978949546813965],["▁Department",-12.97895050048828],["▁Marko",-12.97895050048828],["▁necessary",-12.97895050048828],["▁článku",-12.97895050048828],["▁επειδή",-12.97895050048828],["▁സ്വന്തം",-12.97895050048828],["▁ਉੱਤੇ",-12.978957176208496],["▁сваіх",-12.978958129882812],["▁июля",-12.978960037231444],["▁հայտնի",-12.978960990905762],["дсан",-12.979004859924316],["▁هيٺ",-12.97901725769043],["▁Taylor",-12.979019165039062],["ของการ",-12.97903060913086],["▁producción",-12.979043960571287],["हत",-12.979084014892578],["▁مشهد",-12.979118347167969],["တၱ",-12.9791259765625],["▁албай",-12.979144096374512],["▁fale",-12.979154586791992],["▁istor",-12.97918701171875],["▁գն",-12.97919464111328],["▁kultuuri",-12.979206085205078],["▁ensin",-12.979323387145996],["▁penelitian",-12.979328155517578],["▁बताएका",-12.979331016540527],["▁hørt",-12.97934913635254],["ırlar",-12.979354858398438],["«,",-12.979355812072754],["▁abo",-12.97935676574707],["KON",-12.979362487792969],["OX",-12.979365348815918],["DAL",-12.979377746582031],["▁Arch",-12.97938060760498],["▁Turizm",-12.97938060760498],["adigan",-12.979382514953612],["עמ",-12.979462623596191],["▁සේ",-12.979475021362305],["▁പേ",-12.97950839996338],["mızı",-12.97952651977539],["ıyoruz",-12.979546546936035],["▁patří",-12.979547500610352],["強い",-12.979555130004885],["asu",-12.979575157165527],["▁Сара",-12.979580879211426],["▁tesis",-12.979619979858398],["yos",-12.979642868041992],["▁dicas",-12.979657173156738],["▁anmeldelser",-12.979659080505373],["▁fransk",-12.979693412780762],["čili",-12.979713439941406],["푸",-12.979743957519531],["▁शु",-12.97975254058838],["heure",-12.979762077331545],["lamp",-12.9797945022583],["ອນ",-12.979804039001465],["▁aktiviti",-12.979811668395996],["ates",-12.979820251464844],["▁ilmiy",-12.979820251464844],["спек",-12.979850769042969],["▁نقد",-12.979899406433104],["ሩት",-12.979912757873535],["▁Zij",-12.979925155639648],["103",-12.97993278503418],["▁města",-12.97995376586914],["ປະຊາຊົນ",-12.97996711730957],["▁bout",-12.979982376098633],["homme",-12.97998332977295],["▁Oso",-12.979987144470217],["▁nächste",-12.979987144470217],["▁چوڭ",-12.98003387451172],["▁σημαντικό",-12.98004150390625],["総",-12.980045318603516],["▁rýchlo",-12.98005199432373],["▁Ynglŷn",-12.98005485534668],["▁économique",-12.98005485534668],["▁مذاکرات",-12.98005485534668],["▁सोशल",-12.98005485534668],["зел",-12.980069160461426],["▁Emer",-12.98007869720459],["▁sist",-12.98008155822754],["▁velký",-12.980084419250488],["истите",-12.98009967803955],["ющая",-12.98009967803955],["Bal",-12.980132102966309],["▁غو",-12.980156898498535],["നാണ്",-12.980159759521484],["ないので",-12.98017120361328],["▁hobby",-12.980177879333496],["▁бағдарламасы",-12.980185508728027],["▁regime",-12.980192184448242],["▁تامین",-12.980194091796877],["▁başlayan",-12.980204582214355],["teknik",-12.98020839691162],["▁වෙනත්",-12.98020839691162],["125",-12.980213165283203],["▁తగ్గ",-12.980228424072266],["▁Naz",-12.980230331420898],["▁kväll",-12.980237007141112],["▁төлө",-12.980273246765137],["车辆",-12.980276107788086],["mī",-12.98029899597168],["▁seuraa",-12.98032569885254],["▁աշխարհի",-12.980341911315918],["▁हामीले",-12.980350494384766],["▁Yaş",-12.98035717010498],["төн",-12.98037338256836],["kling",-12.9804048538208],["▁mabilis",-12.980411529541016],["▁archi",-12.980428695678713],["▁Stru",-12.98043155670166],["▁joissa",-12.980443954467772],["φυλ",-12.980483055114746],["▁bait",-12.98048496246338],["вших",-12.98049259185791],["▁tension",-12.980509757995604],["mér",-12.980514526367188],["▁FL",-12.980524063110352],["onge",-12.980530738830566],["▁takto",-12.980536460876465],["▁Pli",-12.98053741455078],["▁રાજ્ય",-12.980586051940918],["ობდა",-12.980609893798828],["他們的",-12.980672836303713],["ANJE",-12.980680465698242],["երն",-12.980731964111328],["▁పూ",-12.980748176574709],["▁ਦਲ",-12.9807710647583],["apparat",-12.980852127075195],["▁Игр",-12.980859756469728],["魯",-12.980881690979004],["▁forvente",-12.980911254882812],["вана",-12.980912208557127],["▁illum",-12.980929374694824],["▁Gustav",-12.980948448181152],["▁ски",-12.980957984924316],["▁papír",-12.98097038269043],["Amerika",-12.980990409851074],["▁fuga",-12.980992317199709],["▁korja",-12.981003761291504],["▁izv",-12.981019020080566],["ಬ್ಬ",-12.98104476928711],["围",-12.981053352355955],["isset",-12.981067657470703],["▁अवस्थामा",-12.98110580444336],["▁ამიტომ",-12.981148719787598],["မ်ဳိး",-12.981159210205078],["▁अन्तर्राष्ट्रिय",-12.981161117553713],["▁පවතින",-12.981161117553713],["▁միջոցով",-12.981167793273926],["▁αφορά",-12.981170654296877],["▁4.2",-12.981194496154783],["[10]",-12.981197357177734],["▁desenvolvemento",-12.981199264526367],["▁باي",-12.981216430664062],["▁trouvé",-12.981219291687012],["▁دیگه",-12.981221199035645],["▁निकै",-12.981233596801758],["▁operat",-12.98123836517334],["まずは",-12.981245994567873],["▁tamaño",-12.981250762939451],["تطور",-12.9812593460083],["יצר",-12.981271743774414],["▁Dự",-12.981274604797363],["भेद",-12.98128890991211],["▁entorno",-12.981304168701172],["Esp",-12.98131275177002],["цкай",-12.981313705444336],["▁ქალი",-12.981315612792969],["dhan",-12.98138427734375],["▁తెలియ",-12.981389999389648],["いただく",-12.98139762878418],["▁الإسلام",-12.98140811920166],["ອຍ",-12.981410026550291],["▁molta",-12.981419563293455],["▁Fitness",-12.981427192687988],["ლინ",-12.981465339660645],["ექ",-12.981468200683594],["arda",-12.981500625610352],["yes",-12.981512069702148],["റും",-12.981514930725098],["▁ఉండే",-12.981515884399414],["દ્",-12.98151683807373],["onyesha",-12.981527328491213],["dula",-12.98153591156006],["▁João",-12.98155689239502],["roon",-12.981558799743652],["▁tantum",-12.981572151184082],["▁Mă",-12.981575965881348],["▁башта",-12.981587409973145],["▁ഷം",-12.981595039367676],["▁Sao",-12.98163604736328],["usto",-12.981654167175291],["zeti",-12.981683731079102],["חזר",-12.981711387634276],["▁пис",-12.981719017028809],["годишњ",-12.981722831726074],["▁suivant",-12.98173999786377],["月底",-12.98175811767578],["もし",-12.981775283813477],["מבצע",-12.98179531097412],["hull",-12.981809616088867],["▁చెయ్య",-12.98183536529541],["▁obitelji",-12.981836318969728],["မို",-12.981839179992676],["ன்று",-12.981854438781738],["歩",-12.98188018798828],["▁abban",-12.981892585754396],["▁мөр",-12.981914520263672],["weka",-12.98192024230957],["▁mondial",-12.981938362121582],["시는",-12.981959342956545],["▁allow",-12.98198413848877],["▁İs",-12.982003211975098],["Ser",-12.982010841369627],["▁quarto",-12.982012748718262],["▁quick",-12.982012748718262],["ισμένο",-12.982023239135742],["чили",-12.982072830200195],["▁السياسي",-12.982080459594728],["▁kalli",-12.98208713531494],["သက္",-12.982100486755373],["olah",-12.982107162475586],["सकाळ",-12.982118606567385],["вым",-12.982138633728027],["pian",-12.98214340209961],["යන්ට",-12.982171058654783],["Cre",-12.982197761535645],["啟",-12.982197761535645],["▁vozi",-12.98219871520996],["KET",-12.982207298278809],["奉",-12.98220920562744],["▁Дом",-12.982234001159668],["▁alguém",-12.98226833343506],["▁üzərində",-12.98226833343506],["▁гэдгийг",-12.98226833343506],["▁Música",-12.982269287109377],["▁Dlatego",-12.982271194458008],["▁domanda",-12.982274055480955],["▁ಶಾಲೆ",-12.982281684875488],["▁gegenüber",-12.982282638549805],["▁Webseite",-12.98228931427002],["▁රියා",-12.98228931427002],["▁berkembang",-12.982295036315918],["▁elekt",-12.982298851013184],["▁Haziran",-12.982317924499512],["ያው",-12.982319831848145],["▁Männer",-12.982336044311523],["▁statement",-12.982340812683104],["▁نحن",-12.98235321044922],["▁عبور",-12.98236846923828],["ສົມ",-12.982373237609863],["ેશન",-12.982375144958496],["нээс",-12.982391357421877],["▁usor",-12.982414245605469],["▁Situs",-12.982439994812012],["▁NB",-12.982450485229492],["ējās",-12.98245620727539],["▁مؤ",-12.982544898986816],["▁mwezi",-12.98256015777588],["▁ayah",-12.982561111450195],["ردو",-12.982562065124512],["▁trap",-12.982571601867676],["▁думи",-12.982589721679688],["▁واقعی",-12.982603073120115],["▁Kristin",-12.982616424560549],["ITY",-12.982622146606444],["▁आवश्यकता",-12.982630729675291],["▁וכן",-12.98264503479004],["edad",-12.982666015625],["သလို",-12.982693672180176],["ورا",-12.98271369934082],["สมัคร",-12.982728004455566],["▁bitki",-12.982746124267578],["בלה",-12.98276710510254],["▁sezón",-12.982768058776855],["▁ماس",-12.982820510864258],["▁dwie",-12.982824325561523],["▁darab",-12.982828140258787],["६",-12.982831001281738],["▁citas",-12.982834815979004],["தும்",-12.9828519821167],["几年",-12.982854843139648],["laisten",-12.982870101928713],["▁stats",-12.98288917541504],["▁Даже",-12.982894897460938],["▁خپله",-12.982914924621582],["listen",-12.98292350769043],["ナー",-12.982939720153809],["▁hátt",-12.982955932617188],["▁piece",-12.983010292053224],["▁Мұ",-12.983013153076172],["לק",-12.983015060424805],["▁навч",-12.98306369781494],["军事",-12.983078002929688],["▁ceļo",-12.983107566833496],["▁ቆ",-12.983114242553713],["▁гад",-12.983131408691406],["gäng",-12.983137130737305],["▁عقب",-12.983150482177734],["▁ანუ",-12.983160018920898],["âng",-12.983180046081545],["JK",-12.983199119567873],["▁Vra",-12.983200073242188],["arul",-12.98321533203125],["▁joven",-12.983241081237791],["▁ρ",-12.983248710632324],["אור",-12.983262062072754],["▁advokat",-12.983274459838867],["▁állapot",-12.983274459838867],["狀",-12.98329734802246],["屏",-12.983315467834473],["▁dak",-12.983384132385254],["▁समयमा",-12.983388900756836],["▁Commission",-12.983403205871582],["▁خرداد",-12.983404159545898],["了一下",-12.98340892791748],["擔心",-12.983424186706545],["▁kojim",-12.98342514038086],["▁súhlas",-12.98343276977539],["▁کمپ",-12.98344898223877],["▁dicir",-12.983471870422363],["iczne",-12.983497619628906],["▁ажы",-12.983501434326172],["▁hírek",-12.983511924743652],["▁लेखक",-12.98351764678955],["▁haddii",-12.983532905578612],["▁બી",-12.983538627624512],["bino",-12.983548164367676],["▁realit",-12.983551025390623],["▁Sten",-12.983552932739258],["▁tehtud",-12.98357391357422],["▁новый",-12.983576774597168],["▁informācijas",-12.983583450317385],["▁տարվա",-12.983586311340332],["еры",-12.983602523803713],["▁анализа",-12.983606338500977],["accés",-12.983617782592772],["ទា",-12.983617782592772],["▁herbal",-12.983630180358888],["dida",-12.983638763427734],["ácií",-12.983644485473633],["▁ಮಕ್ಕಳ",-12.983660697937012],["▁გულ",-12.983661651611328],["grani",-12.98367404937744],["▁149",-12.98370361328125],["▁колку",-12.983712196350098],["цыйны",-12.98373794555664],["nede",-12.983766555786133],["▁sann",-12.983768463134766],["▁480",-12.98381805419922],["观众",-12.983834266662598],["▁trimestre",-12.983848571777344],["▁Pé",-12.98386001586914],["kab",-12.983864784240724],["▁budak",-12.983867645263672],["▁sokkal",-12.983875274658203],["▁Исто",-12.983877182006836],["▁obsahu",-12.983880043029783],["्ये",-12.983901023864746],["▁Organis",-12.983905792236328],["ضاء",-12.983952522277832],["ଦେ",-12.983960151672363],["▁parts",-12.983967781066896],["▁מאת",-12.983983039855955],["▁требования",-12.98398494720459],["process",-12.98398780822754],["ელს",-12.983990669250488],["最初",-12.984006881713867],["▁sket",-12.984018325805664],["▁asso",-12.984055519104004],["ception",-12.98405933380127],["ווער",-12.984101295471191],["өк",-12.984105110168455],["▁Rafael",-12.984171867370604],["წყობ",-12.98417854309082],["नर",-12.984193801879885],["▁meteor",-12.984200477600098],["▁ಸಂಬಂಧ",-12.984212875366213],["▁икон",-12.984217643737791],["श्र",-12.984244346618652],["严",-12.984270095825195],["lekt",-12.984280586242676],["▁atender",-12.984281539916992],["品质",-12.984281539916992],["▁segi",-12.984292030334473],["قاد",-12.984326362609863],["▁tööta",-12.984326362609863],["投票",-12.984359741210938],["▁источник",-12.984365463256836],["િટ",-12.984368324279783],["▁우리가",-12.984375953674316],["▁عليهم",-12.984414100646973],["主人",-12.984418869018556],["▁करणार",-12.984435081481934],["放弃",-12.98443603515625],["瘦",-12.984457969665527],["貿易",-12.984463691711426],["痛苦",-12.984468460083008],["ოლი",-12.984478950500488],["▁बीजेपी",-12.984487533569336],["▁વધારે",-12.984487533569336],["▁неки",-12.984495162963867],["▁июня",-12.984498023986816],["▁nombreuses",-12.98450756072998],["下了",-12.98450756072998],["▁Төрийн",-12.98453140258789],["roid",-12.984545707702637],["完了",-12.984566688537598],["SKE",-12.98459529876709],["▁продължи",-12.984597206115724],["在这个",-12.98461627960205],["▁pomáha",-12.984620094299316],["κού",-12.98463535308838],["▁avril",-12.984657287597656],["▁регистр",-12.984657287597656],["▁büyü",-12.984659194946287],["ნით",-12.984673500061035],["نګ",-12.984681129455566],["חלה",-12.984686851501465],["▁өткөр",-12.984708786010742],["ējiem",-12.984728813171388],["▁gutxi",-12.984734535217283],["▁মাধ্যমে",-12.984736442565918],["▁ασφαλ",-12.98474407196045],["▁这",-12.984753608703612],["▁asing",-12.98477268218994],["ですので",-12.984774589538574],["▁체",-12.984795570373535],["规则",-12.984807014465332],["▁chirurg",-12.984810829162598],["▁ຕາມ",-12.984813690185549],["რომ",-12.984816551208496],["izacji",-12.98486042022705],["wara",-12.984862327575684],["▁պաշտպան",-12.98486328125],["▁골",-12.984870910644531],["▁gale",-12.984878540039062],["ložit",-12.984891891479492],["▁Podle",-12.984914779663086],["▁ഭാര്യ",-12.984943389892578],["ivät",-12.98496913909912],["▁Sent",-12.98497486114502],["epe",-12.984983444213867],["▁Itt",-12.984986305236816],["▁gravi",-12.984992980957031],["níka",-12.98501205444336],["▁promis",-12.98501205444336],["▁كثير",-12.985031127929688],["kron",-12.985039710998535],["done",-12.985058784484863],["▁yılı",-12.985062599182127],["▁Donc",-12.98508071899414],["sual",-12.98509407043457],["ເຄ",-12.985111236572266],["тегі",-12.985118865966797],["▁的相簿",-12.985146522521973],["▁सत्ता",-12.985162734985352],["▁ନାମ",-12.985172271728516],["▁vyše",-12.98517894744873],["▁Մենք",-12.98520565032959],["ENTE",-12.985224723815918],["ተና",-12.985239028930664],["raq",-12.98523998260498],["▁इसी",-12.985276222229004],["UAN",-12.985295295715332],["وء",-12.985307693481444],["/03/",-12.985316276550291],["ssant",-12.985321998596191],["foreningen",-12.985346794128418],["గం",-12.985376358032228],["ético",-12.985382080078123],["▁vjen",-12.985383987426758],["あと",-12.98542308807373],["សម្រាប់",-12.985445022583008],["▁razmer",-12.985445976257324],["▁beradi",-12.985447883605955],["▁кори",-12.985450744628906],["ہا",-12.985454559326172],["kasa",-12.985491752624512],["▁Euch",-12.985517501831056],["启动",-12.985557556152344],["眉",-12.985562324523926],["▁loca",-12.98557186126709],["▁ಸಾಲ",-12.985572814941406],["▁čím",-12.985573768615724],["ပြီ",-12.98557472229004],["פאר",-12.985575675964355],["ونې",-12.985578536987305],["苹果",-12.985597610473633],["▁notwendig",-12.98559856414795],["▁περισσότερα",-12.98559856414795],["▁삭제",-12.985601425170898],["▁Masjid",-12.985607147216797],["▁ԱԺ",-12.985607147216797],["ຂໍ້ມູນ",-12.985608100891112],["▁मतदान",-12.985608100891112],["▁ተቀ",-12.985613822937012],["▁теме",-12.98562240600586],["▁március",-12.985623359680176],["▁محترم",-12.985630989074709],["▁повідомляє",-12.985633850097656],["▁Hótel",-12.985634803771973],["▁šodien",-12.985634803771973],["▁alumnes",-12.985639572143556],["▁آصف",-12.985668182373049],["▁پاڼه",-12.985679626464844],["▁toata",-12.985689163208008],["▁ərazisində",-12.985733032226562],["kerja",-12.985737800598145],["गड",-12.985755920410156],["lazak",-12.985783576965332],["▁ນັກ",-12.98581314086914],["▁mayo",-12.985814094543455],["ובע",-12.98584270477295],["▁ليست",-12.985848426818848],["▁Erotisk",-12.98586082458496],["▁befindet",-12.98587417602539],["şin",-12.985875129699709],["ΠΕ",-12.985876083374023],["STER",-12.985895156860352],["ξει",-12.985897064208984],["▁Geen",-12.985929489135742],["▁menys",-12.985939025878906],["▁δικαι",-12.985967636108398],["▁Иако",-12.98597240447998],["▁INS",-12.98597526550293],["vezet",-12.9860258102417],["▁mė",-12.986032485961914],["▁Gesetz",-12.986048698425291],["fara",-12.986075401306152],["▁скуп",-12.986093521118164],["сей",-12.986106872558594],["▁قو",-12.986108779907228],["▁helpu",-12.986136436462402],["ग्र",-12.98614501953125],["lof",-12.986166000366213],["▁فنی",-12.986183166503906],["ább",-12.98622989654541],["▁missä",-12.986233711242676],["▁алба",-12.986247062683104],["ֹ",-12.98627471923828],["▁Sint",-12.986278533935549],["▁odol",-12.986310005187988],["prox",-12.986348152160645],["▁Ł",-12.986400604248049],["ფლ",-12.986404418945312],["▁1916",-12.986404418945312],["▁ಸೋ",-12.986411094665527],["tear",-12.98641872406006],["▁значительно",-12.986459732055664],["▁Baie",-12.98649787902832],["rakstīt",-12.986499786376951],["▁трех",-12.98650074005127],["▁CAN",-12.986509323120115],["▁vrei",-12.986509323120115],["thing",-12.986542701721191],["▁ඕ",-12.98654842376709],["▁viêm",-12.98655128479004],["▁পাওয়া",-12.98655128479004],["▁ауданы",-12.986559867858888],["▁corpora",-12.98657512664795],["▁лок",-12.986580848693848],["▁mwenye",-12.986584663391112],["інше",-12.986601829528809],["jdzie",-12.986618041992188],["টির",-12.98662281036377],["▁weke",-12.986635208129885],["▁žl",-12.986660957336426],["▁osim",-12.986679077148438],["karte",-12.986686706542969],["▁začal",-12.986692428588867],["依頼",-12.986701011657717],["▁Giấy",-12.98671054840088],["▁πρόβλημα",-12.98671054840088],["▁засобів",-12.98671054840088],["▁इसमें",-12.98671054840088],["obiettivo",-12.986711502075195],["▁رہنما",-12.986711502075195],["▁سى",-12.986712455749512],["pë",-12.98671531677246],["▁არსებობს",-12.98671531677246],["kozó",-12.986736297607422],["▁empresarial",-12.986737251281738],["▁käyttöön",-12.986738204956056],["ΤΙ",-12.986764907836914],["▁visā",-12.986777305603027],["οφ",-12.98678493499756],["ţilor",-12.986787796020508],["ੂਰ",-12.986798286437988],["▁погляд",-12.986804008483888],["ကျော်",-12.986809730529783],["▁bức",-12.98682975769043],["億元",-12.986858367919922],["▁หลัง",-12.986861228942873],["▁excel",-12.986862182617188],["alus",-12.986897468566896],["دود",-12.986915588378906],["▁Udal",-12.986926078796388],["▁väi",-12.9869384765625],["▁دریا",-12.986984252929688],["▁информација",-12.987003326416016],["▁іске",-12.987013816833496],["้ม",-12.98704433441162],["▁filosofi",-12.987058639526367],["chana",-12.987074851989746],["UNT",-12.987128257751465],["▁спорту",-12.987133026123049],["під",-12.987140655517578],["▁Trade",-12.987171173095703],["▁directe",-12.987175941467283],["▁чува",-12.987194061279297],["▁abantu",-12.987196922302246],["▁حاج",-12.987211227416992],["صبح",-12.987231254577637],["各个",-12.987235069274902],["▁Анын",-12.987236976623535],["▁STA",-12.987257957458496],["യുമായി",-12.98727035522461],["mono",-12.987278938293455],["▁Hati",-12.98728370666504],["▁cánh",-12.987302780151367],["▁comunidad",-12.98732089996338],["钟",-12.98732566833496],["▁Wei",-12.987348556518556],["▁وسائل",-12.987354278564451],["▁torn",-12.987370491027832],["ніз",-12.987383842468262],["下來",-12.987398147583008],["ركز",-12.987411499023438],["žal",-12.9874267578125],["哪些",-12.987431526184082],["▁عمليات",-12.987438201904297],["יטה",-12.987465858459473],["▁Sza",-12.987465858459473],["▁Ən",-12.987483978271484],["រដ្ឋ",-12.987497329711914],["▁entire",-12.987499237060549],["▁орчин",-12.98751735687256],["ноў",-12.987523078918455],["以為",-12.987528800964355],["marknad",-12.987533569335938],["贝",-12.987541198730469],["गम",-12.987546920776367],["と思った",-12.987550735473633],["tiones",-12.987555503845217],["▁елементи",-12.987587928771973],["▁চার",-12.987608909606934],["▁Haji",-12.98761749267578],["ଦ୍ଧ",-12.987618446350098],["ωμένο",-12.987622261047363],["തെന്ന്",-12.98762321472168],["▁require",-12.987627029418944],["כתוב",-12.987641334533691],["▁millóns",-12.98764419555664],["ایت",-12.987667083740234],["צן",-12.98768138885498],["▁hoo",-12.987689971923828],["のではないでしょうか",-12.98770236968994],["关心",-12.987709999084473],["▁organizado",-12.9877290725708],["谋",-12.987759590148926],["采访",-12.987767219543455],["লেন",-12.987780570983888],["▁adik",-12.987805366516112],["▁explora",-12.987810134887695],["rè",-12.987818717956545],["▁ಮಾರುಕಟ್ಟೆ",-12.987823486328123],["▁Gespräch",-12.98782444000244],["▁Inoltre",-12.98782444000244],["▁Wednesday",-12.98782444000244],["▁зокрема",-12.98782444000244],["▁membangun",-12.987825393676758],["▁Вашингтон",-12.987825393676758],["տիկ",-12.987857818603516],["hok",-12.987870216369627],["▁kén",-12.987903594970703],["▁pitanja",-12.987905502319336],["ÖN",-12.987910270690918],["SIA",-12.987919807434082],["▁прости",-12.987920761108398],["▁رأس",-12.987922668457031],["घात",-12.987932205200195],["curr",-12.987992286682127],["െന്നു",-12.988001823425291],["▁በሰ",-12.988025665283203],["▁naiz",-12.988085746765137],["▁marina",-12.988091468811035],["▁Danke",-12.988109588623049],["актив",-12.988123893737791],["▁traballa",-12.98812770843506],["ান্ত",-12.98814296722412],["▁Pois",-12.988166809082031],["arca",-12.988168716430664],["სო",-12.98818016052246],["▁கோடி",-12.98819637298584],["ОД",-12.988203048706056],["ខ",-12.988272666931152],["▁investicij",-12.98828411102295],["řil",-12.98831558227539],["щей",-12.98832893371582],["▁auszu",-12.98834228515625],["вог",-12.988343238830566],["אָט",-12.988344192504885],["▁عرف",-12.98835563659668],["▁linje",-12.988397598266602],["▁Vân",-12.988410949707031],["▁Ča",-12.98842716217041],["▁najdete",-12.988430976867676],["кім",-12.988455772399902],["ární",-12.988460540771484],["▁keel",-12.988468170166016],["hì",-12.988480567932127],["▁warum",-12.988482475280762],["▁trest",-12.988508224487305],["▁gyerekek",-12.988526344299316],["▁нужны",-12.988532066345217],["良く",-12.988532066345217],["▁DK",-12.98853588104248],["ვო",-12.988593101501465],["▁Mula",-12.988605499267578],["▁torej",-12.988624572753906],["uis",-12.988638877868652],["irali",-12.988651275634766],["ሞት",-12.988658905029297],["▁നില്",-12.988661766052246],["▁Сум",-12.98866367340088],["мана",-12.98868465423584],["▁pohja",-12.988690376281738],["▁utzi",-12.988725662231444],["Link",-12.988757133483888],["яб",-12.98878288269043],["anger",-12.988802909851074],["▁निर्देशन",-12.988804817199709],["ոթ",-12.988816261291504],["ғыз",-12.988826751708984],["▁organizator",-12.98883056640625],["▁åpen",-12.98884105682373],["荣",-12.98887062072754],["▁కె",-12.988873481750488],["爬",-12.988890647888184],["▁жаза",-12.988898277282717],["▁লে",-12.98890495300293],["वले",-12.988909721374512],["onekana",-12.988926887512209],["▁Контакт",-12.988935470581056],["▁shërbim",-12.988938331604004],["▁Terengganu",-12.98893928527832],["▁ဘီဘီစီ",-12.98893928527832],["▁octobre",-12.988948822021484],["してしまう",-12.98895263671875],["арх",-12.98899269104004],["ທໍາ",-12.98899269104004],["סקי",-12.988993644714355],["રિ",-12.988999366760254],["▁организаций",-12.989023208618164],["▁választ",-12.98903751373291],["▁ਬੁ",-12.989046096801758],["എന്",-12.989058494567873],["▁snieg",-12.9890775680542],["▁resol",-12.989089965820312],["ਲਾਂ",-12.989096641540527],["▁индивидуал",-12.989115715026855],["負責",-12.989124298095703],["lait",-12.989130973815918],["රණ",-12.989143371582031],["faktor",-12.989147186279297],["▁websites",-12.989173889160156],["▁ilay",-12.989177703857422],["chtige",-12.989203453063965],["▁ciento",-12.989312171936035],["tuksia",-12.989313125610352],["ساعد",-12.989319801330566],["ground",-12.989325523376465],["▁vagina",-12.989355087280272],["▁смес",-12.989362716674805],["▁ones",-12.989377975463867],["▁verilib",-12.98940086364746],["▁Јо",-12.989421844482422],["минал",-12.989459037780762],["▁Made",-12.989459037780762],["▁Lad",-12.989469528198242],["ησ",-12.989489555358888],["ધી",-12.989495277404783],["րե",-12.989500045776367],["кса",-12.989558219909668],["oribus",-12.989561080932615],["▁alu",-12.98958683013916],["▁miq",-12.989599227905272],["čnost",-12.989609718322754],["▁Ռուսաստանի",-12.989612579345703],["beni",-12.989625930786133],["▁nisem",-12.989644050598145],["زون",-12.989678382873535],["▁ਬੰਦ",-12.989691734313965],["▁EURO",-12.989713668823242],["▁POL",-12.989713668823242],["▁olay",-12.98971939086914],["ლებ",-12.989754676818848],["uttaa",-12.989768981933594],["imp",-12.989787101745604],["▁133",-12.989792823791504],["▁sloven",-12.989831924438477],["नाम",-12.989840507507324],["ਟੇ",-12.98984432220459],["▁Пос",-12.989848136901855],["▁poznat",-12.98985481262207],["abend",-12.989869117736816],["▁tuntu",-12.98987865447998],["▁værdi",-12.989911079406738],["▁avance",-12.989913940429688],["િન",-12.98994255065918],["▁dalších",-12.989943504333496],["ists",-12.989956855773926],["▁2021",-12.990001678466797],["▁Diana",-12.990002632141112],["ጤ",-12.99002170562744],["▁esigenze",-12.990055084228516],["▁sẵn",-12.990055084228516],["▁ವಿಚಾರ",-12.990055084228516],["▁peringkat",-12.990057945251465],["▁රියාත්මක",-12.990059852600098],["▁Køb",-12.99006462097168],["▁დღის",-12.990068435668944],["▁UD",-12.990099906921388],["िं",-12.99010181427002],["▁cinn",-12.990117073059082],["▁mkono",-12.990124702453612],["▁아무",-12.990144729614258],["เป็นที่",-12.990152359008787],["னால்",-12.9901762008667],["▁полно",-12.990211486816406],["ũ",-12.990222930908203],["▁ĉef",-12.990230560302734],["▁desenvolve",-12.990239143371582],["▁določen",-12.990246772766112],["вший",-12.990249633789062],["▁vrije",-12.990269660949709],["տոր",-12.990286827087402],["▁лагер",-12.99032497406006],["ESS",-12.99038028717041],["▁dantza",-12.990389823913574],["ificación",-12.990418434143066],["▁ਬਿ",-12.99044418334961],["▁јасно",-12.99045467376709],["▁sorozat",-12.990466117858888],["льного",-12.99046802520752],["▁Lâm",-12.990496635437012],["isant",-12.990498542785645],["▁advers",-12.990500450134276],["मात्र",-12.990504264831545],["▁ስለዚህ",-12.990537643432615],["УН",-12.990554809570312],["žnosti",-12.990555763244627],["លេង",-12.990582466125488],["оне",-12.990586280822754],["▁Івано",-12.990592956542969],["▁İyi",-12.990594863891602],["ෆ්",-12.990598678588867],["▁cirka",-12.990602493286133],["増",-12.990604400634766],["▁مسا",-12.990606307983398],["പ്പെടുത്ത",-12.990622520446776],["myn",-12.990638732910156],["▁подходящ",-12.990677833557127],["једи",-12.990710258483888],["rası",-12.99071216583252],["▁markt",-12.990714073181152],["▁ევრო",-12.990717887878418],["▁сына",-12.990765571594238],["接触",-12.99079418182373],["▁pest",-12.990804672241213],["гаж",-12.990853309631348],["gestel",-12.990856170654297],["みました",-12.990874290466309],["னுக்கு",-12.990880012512209],["▁Hasil",-12.990901947021484],["適用",-12.990920066833496],["▁teanga",-12.99093246459961],["ທິ",-12.99094009399414],["ξαν",-12.990942001342772],["рука",-12.99095630645752],["ταλ",-12.990961074829102],["▁ગા",-12.991002082824709],["▁стави",-12.991010665893556],["▁Уж",-12.991021156311035],["▁श्रेष्ठ",-12.991031646728516],["बंध",-12.991037368774414],["▁жұмыстар",-12.991056442260742],["▁crec",-12.991110801696776],["पक",-12.991113662719728],["诗",-12.991116523742676],["ვიდ",-12.99114227294922],["▁ysgol",-12.991146087646484],["▁incididunt",-12.991172790527344],["▁ordre",-12.991172790527344],["▁yuqori",-12.991172790527344],["▁хэрхэн",-12.991175651550291],["▁cilën",-12.99117660522461],["▁экспорт",-12.99117946624756],["▁instead",-12.991181373596191],["▁ავტორი",-12.991181373596191],["▁rozwoju",-12.991182327270508],["▁wazi",-12.99118423461914],["▁gizarte",-12.991185188293455],["▁хийсэн",-12.991186141967772],["▁ڪندڙ",-12.991195678710938],["▁ناکام",-12.991235733032228],["▁pogoji",-12.991238594055176],["▁krea",-12.991272926330566],["▁provi",-12.991276741027832],["▁utanför",-12.991291046142578],["tkaz",-12.991310119628906],["▁stb",-12.99131202697754],["▁Cava",-12.99131965637207],["angat",-12.99132251739502],["zlar",-12.991332054138184],["▁ಕಳೆದ",-12.991351127624512],["▁ifølge",-12.991379737854004],["▁ongelma",-12.991388320922852],["▁kalma",-12.991394996643066],["ਬਰ",-12.991397857666016],["ტომ",-12.991398811340332],["▁reži",-12.991398811340332],["naf",-12.991415023803713],["جھ",-12.991422653198242],["neiden",-12.991435050964355],["▁Ganz",-12.991461753845217],["▁rektor",-12.991466522216797],["ೇಶ್",-12.991470336914062],["બર",-12.99148941040039],["sága",-12.991507530212402],["工商",-12.991518020629885],["नल",-12.991548538208008],["▁Тэ",-12.991567611694336],["▁स्तन",-12.991569519042969],["រំ",-12.991576194763184],["▁обязан",-12.991584777832031],["āku",-12.99159336090088],["▁trom",-12.991596221923828],["anii",-12.991622924804688],["▁económico",-12.991643905639648],["▁vivre",-12.991646766662598],["▁sabemos",-12.991647720336914],["▁uzak",-12.991676330566406],["▁lleol",-12.991686820983888],["▁prep",-12.99168872833252],["▁وص",-12.991700172424316],["▁šol",-12.991701126098633],["団",-12.991738319396973],["▁chocolate",-12.99174976348877],["▁муу",-12.991806030273438],["hà",-12.991847038269045],["ulur",-12.991849899291992],["▁Other",-12.991861343383787],["▁lekár",-12.991881370544434],["▁마음",-12.991888999938965],["ặc",-12.991898536682127],["brali",-12.99190616607666],["ышты",-12.991942405700684],["▁цю",-12.991943359375],["▁düzenlenen",-12.991944313049316],["▁өткіз",-12.991969108581545],["▁próximos",-12.991982460021973],["▁комфорт",-12.991994857788086],["čkou",-12.992003440856934],["带来的",-12.992012977600098],["ಗಣ",-12.992039680480955],["▁بخشی",-12.992047309875488],["ഴി",-12.992063522338867],["guide",-12.992069244384766],["▁Αρχ",-12.992104530334473],["▁paku",-12.992106437683104],["之間",-12.992130279541016],["▁Ալ",-12.992188453674316],["▁maanden",-12.992218017578123],["▁Արցախի",-12.992236137390137],["▁hverandre",-12.99229907989502],["لىپ",-12.992316246032717],["▁corazón",-12.992323875427246],["▁теория",-12.992326736450195],["цам",-12.992338180541992],["▁eredeti",-12.992339134216309],["▁personel",-12.992349624633787],["▁разме",-12.992364883422852],["angi",-12.992405891418455],["▁לכך",-12.992413520812988],["▁notícia",-12.99242877960205],["reta",-12.992436408996582],["▁Suriye",-12.99244785308838],["▁bulunma",-12.99246597290039],["ണമെന്ന",-12.992476463317873],["▁construct",-12.992477416992188],["▁فرزند",-12.9924898147583],["▁ಪರಿ",-12.992501258850098],["fine",-12.992506980895996],["රී",-12.992513656616213],["УБ",-12.992530822753906],["▁Wetter",-12.992545127868652],["▁procés",-12.992571830749512],["▁completely",-12.992586135864258],["funktion",-12.992588996887209],["▁sensi",-12.992609024047852],["філ",-12.992613792419434],["▁satisfac",-12.992623329162598],["ΚΟ",-12.99262523651123],["ພະ",-12.992632865905762],["Си",-12.992692947387695],["sugu",-12.992713928222656],["心情",-12.992733001708984],["אג",-12.992745399475098],["liggende",-12.99274730682373],["givelse",-12.992753028869627],["tjänst",-12.992754936218262],["▁Osta",-12.992773056030272],["▁وبال",-12.992775917053224],["房間",-12.992779731750488],["▁kontribu",-12.992786407470703],["▁Бі",-12.992798805236816],["▁Titel",-12.992813110351562],["趋势",-12.992879867553713],["ńskiego",-12.992884635925291],["▁misura",-12.992905616760254],["ችው",-12.992911338806152],["▁சட்ட",-12.992913246154783],["jauh",-12.992914199829102],["deset",-12.992920875549316],["▁136",-12.992938041687012],["▁خام",-12.992995262145996],["からは",-12.993054389953612],["來的",-12.99307346343994],["後に",-12.993083953857422],["▁çap",-12.993096351623535],["▁የስ",-12.993104934692385],["वै",-12.993181228637695],["▁мој",-12.993186950683594],["იქ",-12.993199348449709],["▁zapewnia",-12.993226051330566],["豐富",-12.993288040161133],["▁considerat",-12.993309020996094],["▁هوندي",-12.993341445922852],["ೂರ",-12.993351936340332],["▁кожны",-12.993375778198242],["▁offers",-12.993382453918455],["▁حدیث",-12.99341106414795],["▁আওয়ামী",-12.99341106414795],["▁تقسیم",-12.993413925170898],["▁ପୁଣି",-12.993413925170898],["ไฟล์",-12.993414878845217],["▁inligting",-12.99341869354248],["▁stavanger",-12.99342441558838],["▁convocatoria",-12.993425369262695],["തിക",-12.993446350097656],["▁Umno",-12.993467330932615],["▁чухал",-12.993490219116213],["▁독",-12.993494987487791],["isista",-12.993500709533691],["▁Dre",-12.99350929260254],["နယ္",-12.993523597717283],["шек",-12.993529319763184],["slim",-12.993534088134766],["▁køb",-12.993542671203612],["በትን",-12.993568420410156],["インターネット",-12.993576049804688],["wili",-12.993584632873535],["īvi",-12.993600845336914],["▁hadden",-12.993610382080078],["▁Arts",-12.99363613128662],["เค้า",-12.993639945983888],["▁alkoi",-12.993646621704102],["▁intro",-12.993656158447266],["nızda",-12.993657112121582],["▁zařízení",-12.993667602539062],["▁созда",-12.993694305419922],["ateur",-12.99370574951172],["每日",-12.993729591369627],["▁ND",-12.993734359741213],["▁ഇരു",-12.993734359741213],["까요",-12.993738174438477],["▁apapun",-12.993776321411133],["▁परेको",-12.99379062652588],["▁løpet",-12.993828773498535],["▁αποκ",-12.993831634521484],["кове",-12.993844032287598],["hatja",-12.993856430053713],["шкільн",-12.993871688842772],["ttamaan",-12.993886947631836],["ට්ට",-12.99391269683838],["▁abang",-12.993914604187012],["ିକା",-12.993915557861328],["для",-12.99392032623291],["▁תמ",-12.99392795562744],["▁நாடு",-12.993951797485352],["人家",-12.993974685668944],["గర్",-12.993979454040527],["▁május",-12.993985176086426],["ితే",-12.994014739990234],["▁רבות",-12.994014739990234],["▁avui",-12.994026184082031],["▁výraz",-12.994030952453612],["▁medlems",-12.994041442871094],["bahn",-12.994067192077637],["ִי",-12.994072914123535],["ाणि",-12.99409294128418],["ryd",-12.99414348602295],["kojen",-12.994146347045898],["બુ",-12.994147300720217],["パン",-12.994181632995604],["אמר",-12.994199752807615],["lnej",-12.994233131408691],["ĉi",-12.994236946105955],["akti",-12.994240760803224],["▁дву",-12.994243621826172],["▁află",-12.994263648986816],["▁ölkədə",-12.994263648986816],["▁Julie",-12.994264602661133],["▁vidéo",-12.99428939819336],["▁სახე",-12.994292259216309],["▁množstvo",-12.994293212890623],["▁Wen",-12.994300842285156],["▁devono",-12.994309425354004],["▁plaatsen",-12.994318962097168],["▁በሚል",-12.9943265914917],["صلى",-12.994340896606444],["▁Ahoj",-12.994345664978027],["▁occasione",-12.99436378479004],["▁doono",-12.994366645812988],["▁szuka",-12.994380950927734],["▁wid",-12.994394302368164],["▁tenido",-12.994404792785645],["▁laisser",-12.99440860748291],["allo",-12.994422912597656],["▁เม",-12.994440078735352],["sani",-12.994446754455566],["▁શકાય",-12.994478225708008],["鋼",-12.994479179382324],["▁jobber",-12.994488716125488],["تەر",-12.994494438171388],["ฏ",-12.994531631469728],["▁φαίνεται",-12.994531631469728],["▁vandaag",-12.994536399841309],["တုိ႔",-12.994539260864258],["▁כמעט",-12.994550704956056],["▁rua",-12.994573593139648],["▁પોતાની",-12.994573593139648],["▁Egipt",-12.99457550048828],["▁ಕ್ಯಾ",-12.99458122253418],["▁Isla",-12.994588851928713],["▁мақсат",-12.994616508483888],["▁organizza",-12.99463939666748],["▁hydro",-12.994668960571287],["▁hake",-12.994688034057615],["▁yerinə",-12.994704246520996],["▁piso",-12.994710922241213],["▁okazji",-12.994747161865234],["oner",-12.994760513305664],["▁necesar",-12.994762420654297],["▁konden",-12.994772911071776],["▁апа",-12.99477481842041],["▁воздух",-12.994794845581056],["▁ય",-12.99479866027832],["▁conquista",-12.994805335998535],["sori",-12.994817733764648],["▁cheap",-12.99488639831543],["▁కలిసి",-12.994894981384276],["▁repos",-12.994909286499023],["ปฏิบัติ",-12.994925498962402],["▁ಮುಂದೆ",-12.994935035705566],["一路",-12.994939804077148],["▁179",-12.99494171142578],["▁Στη",-12.994943618774414],["▁ქართველი",-12.994950294494627],["▁រួម",-12.994991302490234],["▁슬",-12.995003700256348],["PAN",-12.995047569274902],["рано",-12.995052337646484],["დებით",-12.995054244995115],["▁농",-12.995055198669434],["szár",-12.995062828063965],["▁spillere",-12.995079040527344],["▁15-20",-12.995083808898926],["▁Slov",-12.995083808898926],["▁informacijos",-12.995086669921877],["▁Topp",-12.995119094848633],["終了",-12.995162963867188],["▁Palma",-12.995182037353516],["пуск",-12.995187759399414],["ëri",-12.995244979858398],["▁avond",-12.995282173156738],["leisti",-12.995298385620115],["look",-12.995309829711914],["stæð",-12.995311737060549],["ίζονται",-12.995318412780762],["▁süt",-12.99534034729004],["▁פרא",-12.995352745056152],["อัล",-12.995356559753418],["▁2017-2018",-12.995404243469238],["▁djela",-12.99542236328125],["▁ડિ",-12.995437622070312],["▁ሁሉም",-12.995450973510742],["▁남자",-12.99551773071289],["рых",-12.995523452758787],["▁peruste",-12.995528221130373],["▁комиссия",-12.99555492401123],["▁kullanıcı",-12.995556831359863],["▁studij",-12.995556831359863],["▁Оның",-12.99555778503418],["▁проте",-12.995564460754396],["▁LT",-12.995575904846191],["招聘",-12.995604515075684],["▁దాని",-12.995613098144531],["स्थान",-12.995633125305176],["▁ענ",-12.995643615722656],["測試",-12.995645523071287],["汁",-12.995646476745604],["▁vlas",-12.995648384094238],["ေအာက္",-12.995654106140137],["▁Sonntag",-12.995655059814451],["▁नरेंद्र",-12.99565601348877],["eña",-12.99567413330078],["▁instituti",-12.99567985534668],["▁брзо",-12.995688438415527],["▁License",-12.99569034576416],["▁önem",-12.99569320678711],["▁प्रो",-12.99571132659912],["▁അതിന്റെ",-12.99573040008545],["лета",-12.99574851989746],["▁хугацаанд",-12.99575424194336],["ফা",-12.995772361755373],["izacja",-12.99577808380127],["▁предложение",-12.995779037475586],["▁कलाकार",-12.99579906463623],["▁পু",-12.995804786682127],["一件",-12.995804786682127],["▁deshalb",-12.995858192443848],["▁pub",-12.995859146118164],["зг",-12.995912551879885],["pete",-12.995915412902832],["▁құр",-12.99591827392578],["▁پاران",-12.995959281921388],["▁Gazeta",-12.995991706848145],["▁cilvēkiem",-12.99604320526123],["▁وڏو",-12.99605655670166],["▁الخبر",-12.996103286743164],["▁Њ",-12.996115684509276],["▁spelar",-12.996150016784668],["機会",-12.996150970458984],["க்கான",-12.996167182922363],["▁appena",-12.996235847473145],["▁finn",-12.996256828308104],["zed",-12.996261596679688],["ाऊ",-12.99629020690918],["▁îro",-12.996293067932127],["▁Seksi",-12.996335983276367],["▁cadeau",-12.996373176574709],["▁શેર",-12.996387481689451],["▁мемлекет",-12.99640655517578],["ដាក់",-12.996432304382324],["▁programmet",-12.996468544006348],["▁мои",-12.99647045135498],["▁nid",-12.996478080749512],["▁වර",-12.99649715423584],["▁Leave",-12.996530532836914],["icznych",-12.996542930603027],["Ư",-12.996560096740724],["▁състояние",-12.996577262878418],["ေ၀",-12.9966459274292],["▁جاي",-12.996649742126465],["▁abbia",-12.99666976928711],["網絡",-12.996712684631348],["▁rice",-12.99671459197998],["kowy",-12.996719360351562],["orat",-12.996726989746094],["éssel",-12.996743202209473],["KV",-12.996747016906738],["▁dedicat",-12.996767044067385],["▁đột",-12.996774673461914],["วิจัย",-12.996776580810549],["▁ئاللاھ",-12.996777534484863],["▁बीबीसी",-12.996777534484863],["▁ডিসেম্বর",-12.996777534484863],["▁מרכז",-12.996780395507812],["สิทธิ์",-12.996784210205078],["▁toliau",-12.996787071228027],["▁بعضی",-12.99678897857666],["Չ",-12.99679183959961],["▁Филип",-12.996804237365724],["เริ่มต้น",-12.996816635131836],["Є",-12.996820449829102],["▁ngăn",-12.996826171875],["▁Empresa",-12.996834754943848],["▁toilet",-12.996840476989746],["цата",-12.996845245361328],["▁prostě",-12.996846199035645],["▁ВО",-12.996846199035645],["▁jantung",-12.99685001373291],["▁abort",-12.996858596801758],["▁برقرار",-12.996864318847656],["િય",-12.996871948242188],["▁ambazo",-12.996923446655272],["▁nation",-12.996923446655272],["綜合",-12.99693202972412],["angel",-12.996933937072754],["▁ואני",-12.996947288513184],["Mae",-12.99695873260498],["▁mereu",-12.996978759765623],["ताल",-12.996992111206056],["▁tényleg",-12.997000694274902],["mpan",-12.997008323669434],["చీ",-12.997025489807127],["▁Geb",-12.997041702270508],["财政",-12.997051239013672],["▁Cuma",-12.997065544128418],["っと",-12.99708080291748],["▁tausta",-12.99708652496338],["▁Этот",-12.997117042541504],["تصر",-12.997122764587402],["▁Светог",-12.997137069702148],["கிறது",-12.997153282165527],["▁Series",-12.997162818908691],["事務所",-12.997163772583008],["▁공간",-12.997182846069336],["▁desert",-12.997187614440918],["▁писмо",-12.99720573425293],["▁premis",-12.99721336364746],["▁كۈن",-12.99721908569336],["▁Earth",-12.997220039367676],["▁miei",-12.997220039367676],["▁Bh",-12.997262954711914],["▁prensa",-12.997265815734863],["地下",-12.997294425964355],["事を",-12.99733066558838],["იშვილი",-12.99736499786377],["▁גא",-12.997370719909668],["▁leanas",-12.997385025024414],["▁oude",-12.99739933013916],["▁Tages",-12.997482299804688],["▁százalék",-12.99750804901123],["ياس",-12.99754238128662],["gjë",-12.99756145477295],["▁İşte",-12.99756908416748],["▁merg",-12.99757480621338],["▁chuig",-12.99759292602539],["▁Вал",-12.9976167678833],["夕",-12.997629165649414],["▁꿈",-12.997634887695312],["კულ",-12.99765682220459],["တိုး",-12.997689247131348],["▁dlho",-12.99769115447998],["▁Logg",-12.997699737548828],["▁hoş",-12.997702598571776],["在這",-12.997722625732422],["စိတ္",-12.99775218963623],["▁automatisk",-12.997780799865724],["▁1915",-12.997801780700684],["tudomány",-12.997810363769531],["鼓励",-12.997817039489746],["▁Caesar",-12.99782657623291],["▁חל",-12.997846603393556],["湿",-12.997849464416504],["堅持",-12.997867584228516],["嘅",-12.997869491577148],["도록",-12.997869491577148],["尿",-12.997875213623049],["▁እንኳን",-12.99789333343506],["▁dần",-12.997902870178224],["▁Возможно",-12.997902870178224],["▁హీరోయిన్",-12.997902870178224],["▁Istanbul",-12.99790382385254],["▁mũi",-12.99790382385254],["▁پرورش",-12.997913360595703],["▁capacità",-12.99791431427002],["ينه",-12.997931480407717],["▁CIA",-12.997932434082031],["ბელ",-12.99793815612793],["чните",-12.997960090637209],["▁Según",-12.998004913330078],["ល្អ",-12.998008728027344],["▁tisti",-12.998008728027344],["▁Viele",-12.998016357421877],["▁ôf",-12.998037338256836],["▁يعمل",-12.99804973602295],["လင္း",-12.998065948486328],["大陆",-12.998083114624023],["▁cestu",-12.998086929321287],["▁صلاح",-12.998089790344238],["▁sensu",-12.998090744018556],["▁Save",-12.998104095458984],["われ",-12.998140335083008],["▁вирус",-12.99814224243164],["▁#2",-12.99814796447754],["▁pracę",-12.998150825500488],["▁tajā",-12.99816608428955],["▁reprezentant",-12.998187065124512],["▁cami",-12.998190879821776],["▁നട",-12.998204231262209],["関",-12.998212814331056],["▁krimi",-12.998235702514648],["пек",-12.998246192932127],["▁පොත",-12.998263359069824],["▁primar",-12.998273849487305],["▁Moz",-12.998295783996582],["ctis",-12.998340606689451],["ులకు",-12.9983549118042],["ではありません",-12.998421669006348],["▁muerte",-12.998432159423828],["▁ভি",-12.998555183410645],["▁všechno",-12.998577117919922],["เดียวกัน",-12.998584747314451],["▁truy",-12.998610496520996],["jik",-12.998619079589844],["▁výber",-12.998627662658691],["cuti",-12.998652458190918],["nome",-12.998658180236816],["▁✔",-12.998703002929688],["▁1910",-12.99870777130127],["当中",-12.9987154006958],["leden",-12.998740196228027],["boru",-12.998741149902344],["▁૨૦",-12.99874210357666],["JS",-12.998775482177734],["剩",-12.998786926269531],["slapp",-12.998790740966797],["▁halk",-12.998796463012695],["иялык",-12.998806953430176],["▁লি",-12.998808860778809],["▁وارن",-12.998833656311035],["▁teacht",-12.998846054077148],["▁مفت",-12.998849868774414],["▁Arri",-12.99885368347168],["▁siraj",-12.99885368347168],["betrieb",-12.998854637145996],["ംബ",-12.998902320861816],["▁MAI",-12.998903274536133],["פוס",-12.99892807006836],["ضر",-12.998942375183104],["adis",-12.998956680297852],["▁සියලු",-12.998968124389648],["恢复",-12.998978614807127],["证券",-12.998979568481444],["▁Dela",-12.998983383178713],["肥",-12.99898624420166],["វត្ត",-12.998994827270508],["ഥ",-12.999004364013672],["▁réalisé",-12.999029159545898],["▁unterstützen",-12.999029159545898],["▁Բայց",-12.999029159545898],["▁ಆರೋಗ್ಯ",-12.999029159545898],["▁betydning",-12.999030113220217],["▁holiday",-12.999031066894531],["▁Nemzeti",-12.99904727935791],["▁teada",-12.999063491821287],["▁prekr",-12.99907398223877],["луп",-12.999077796936035],["ရေ",-12.999077796936035],["▁halen",-12.999079704284668],["წავ",-12.999086380004885],["▁ổn",-12.999089241027832],["minis",-12.999105453491213],["▁نظامی",-12.99910831451416],["▁gesagt",-12.999114990234377],["▁Sop",-12.999130249023438],["▁ରାଜ",-12.999131202697754],["hede",-12.999136924743652],["▁hạnh",-12.99914264678955],["快樂",-12.999143600463867],["▁यांना",-12.99915885925293],["office",-12.999168395996094],["dron",-12.99916934967041],["ब्य",-12.999173164367676],["▁raczej",-12.999180793762209],["tinka",-12.999189376831056],["▁sonucu",-12.999215126037598],["pino",-12.999235153198242],["▁precedente",-12.999238967895508],["▁хотын",-12.999248504638672],["▁barv",-12.999256134033203],["▁8)",-12.999258995056152],["預訂",-12.999265670776367],["ກໍ່",-12.999284744262695],["bc",-12.999320030212402],["▁мәдениет",-12.9993257522583],["▁rapporter",-12.999327659606934],["999",-12.999338150024414],["დერ",-12.99933910369873],["▁extend",-12.999350547790527],["▁ruku",-12.999354362487791],["వాడు",-12.999356269836426],["▁مط",-12.999359130859377],["▁dytë",-12.999364852905272],["বেন",-12.999369621276855],["▁योग्य",-12.999435424804688],["▁137",-12.99945831298828],["▁материалдар",-12.999468803405762],["▁alimenta",-12.999471664428713],["▁ներ",-12.999481201171877],["▁вина",-12.999492645263672],["▁душа",-12.99950885772705],["льник",-12.999512672424316],["▁Four",-12.999581336975098],["чуж",-12.99958610534668],["▁Ice",-12.999591827392578],["াস",-12.999600410461426],["veden",-12.999604225158691],["▁kilpailu",-12.999604225158691],["▁zahteva",-12.999634742736816],["նական",-12.999639511108398],["事实",-12.999693870544434],["小的",-12.999707221984863],["▁capa",-12.999716758728027],["▁Vera",-12.99972152709961],["▁Assim",-12.999743461608888],["ۇم",-12.999744415283203],["끼",-12.999750137329102],["▁morate",-12.99977970123291],["deva",-12.99979305267334],["രും",-12.999794006347656],["▁زوج",-12.999808311462402],["จร",-12.999813079833984],["▁illud",-12.999814987182615],["▁kyse",-12.999825477600098],["▁zabal",-12.99983024597168],["▁ieder",-12.999862670898438],["▁hasa",-12.999914169311523],["▁অন্য",-12.999977111816406],["арт",-12.99998664855957],["▁üht",-13.000022888183594],["の場合は",-13.000041961669922],["启",-13.000080108642578],["▁DIA",-13.000082969665527],["gelyk",-13.000088691711426],["ованих",-13.000091552734377],["▁Προσ",-13.000115394592283],["▁رکي",-13.00011920928955],["▁خىل",-13.00013256072998],["サポート",-13.000144004821776],["サイズ",-13.00015354156494],["▁spoločnosť",-13.00015640258789],["▁Почему",-13.00015640258789],["▁صنایع",-13.00015640258789],["▁організацій",-13.000164031982422],["▁राजस्थान",-13.000165939331056],["▁қоғамдық",-13.000170707702637],["hain",-13.000176429748535],["▁Auk",-13.000176429748535],["hindi",-13.000181198120115],["▁Кыргызстандын",-13.000187873840332],["▁эффект",-13.00020694732666],["▁kõ",-13.000208854675291],["▁फक्त",-13.00021266937256],["urut",-13.000221252441406],["▁होंगे",-13.00022315979004],["vát",-13.00024700164795],["▁ше",-13.00024700164795],["блю",-13.000262260437012],["▁ದೇಶ",-13.000279426574709],["ခင်",-13.000286102294922],["▁เนื่องจาก",-13.000293731689451],["teknologi",-13.000311851501465],["▁ראשי",-13.000329971313477],["▁manhã",-13.000330924987791],["▁filho",-13.00034523010254],["▁chamado",-13.000350952148438],["chilar",-13.00040054321289],["▁бақылау",-13.000421524047852],["▁القدم",-13.0004243850708],["▁දිය",-13.000431060791016],["▁Beni",-13.000436782836914],["▁půl",-13.000450134277344],["upo",-13.00045394897461],["▁ისეთი",-13.000479698181152],["▁اخلاق",-13.000483512878418],["▁المالية",-13.000484466552734],["ottelu",-13.000490188598633],["ਰੂ",-13.000493049621582],["▁новин",-13.00050163269043],["▁electronic",-13.000515937805176],["▁klubben",-13.000597953796388],["PAT",-13.000604629516602],["anske",-13.000617027282717],["ంటూ",-13.000625610351562],["▁sterke",-13.000653266906738],["▁prieten",-13.000654220581056],["▁ለምን",-13.0006685256958],["▁이동",-13.000709533691406],["▁меша",-13.000741004943848],["是要",-13.000764846801758],["niya",-13.000782012939451],["raren",-13.000800132751465],["▁מוז",-13.000808715820312],["▁ismert",-13.000819206237791],["▁مستقل",-13.000847816467283],["ότερα",-13.000864028930664],["▁Garanti",-13.000869750976562],["▁коментара",-13.000885009765623],["തന്നെ",-13.000894546508787],["▁включен",-13.000910758972168],["▁chứa",-13.00093936920166],["▁දේශ",-13.000950813293455],["▁kommit",-13.000969886779783],["▁LES",-13.000975608825684],["rth",-13.00100326538086],["▁reduk",-13.00101375579834],["lenmesi",-13.001021385192873],["▁മറ്റു",-13.00102996826172],["是有",-13.001049995422363],["▁სო",-13.001072883605955],["ऊ",-13.001073837280272],["دگی",-13.001075744628906],["ội",-13.001080513000488],["畫面",-13.001096725463867],["▁مرغ",-13.001099586486816],["▁ስን",-13.001111030578612],["▁фотографии",-13.001124382019045],["fano",-13.001126289367676],["▁jung",-13.001134872436523],["▁යනු",-13.001155853271484],["▁تحمل",-13.001173973083496],["வோ",-13.00118350982666],["▁panga",-13.00118923187256],["▁decora",-13.001200675964355],["γι",-13.001213073730469],["沃",-13.001221656799316],["▁ಕುಮಾರ್",-13.001222610473633],["лич",-13.001237869262695],["▁використовувати",-13.001248359680176],["Sy",-13.001255989074709],["дово",-13.001270294189451],["দার",-13.001272201538086],["▁moderni",-13.00127410888672],["▁উন্নয়ন",-13.0012845993042],["▁ఫోటో",-13.0012845993042],["깨",-13.001285552978516],["▁rask",-13.001290321350098],["▁అలాగే",-13.001296043395996],["情绪",-13.001299858093262],["ίνα",-13.001306533813477],["เด็ด",-13.001312255859377],["dij",-13.001336097717283],["▁ابھی",-13.001336097717283],["ماه",-13.001341819763184],["ėms",-13.001350402832031],["▁minőség",-13.001365661621094],["▁Muka",-13.00139331817627],["▁پورې",-13.001399040222168],["latih",-13.001412391662598],["এন",-13.001413345336914],["▁Počet",-13.00141429901123],["▁Cai",-13.001416206359863],["▁నె",-13.001420974731444],["рун",-13.001421928405762],["▁inti",-13.00143337249756],["▁%)",-13.00144386291504],["くなる",-13.001453399658203],["▁meilleure",-13.00145435333252],["никот",-13.001470565795898],["২০",-13.001471519470217],["▁čez",-13.001494407653809],["ဂျ",-13.001551628112791],["▁polici",-13.00161838531494],["ାନ",-13.001641273498535],["upan",-13.001642227172852],["ของเขา",-13.001659393310549],["▁posao",-13.001669883728027],["ajam",-13.001689910888672],["▁ajánl",-13.00172233581543],["mässä",-13.00173282623291],["▁місті",-13.001752853393556],["▁Skol",-13.001809120178224],["▁capit",-13.001832008361816],["ജെ",-13.001832962036133],["那种",-13.001846313476562],["▁Conven",-13.001883506774902],["▁Петр",-13.001885414123535],["▁sozial",-13.0018892288208],["▁napon",-13.00189971923828],["▁šių",-13.001952171325684],["认",-13.001956939697266],["madi",-13.001972198486328],["జై",-13.001980781555176],["▁talento",-13.001999855041504],["чиња",-13.002009391784668],["бота",-13.002017974853516],["▁ക്",-13.002049446105955],["▁دهنده",-13.002079010009766],["▁larga",-13.002081871032717],["ിങ്",-13.002094268798828],["ატი",-13.002095222473145],["▁යාපනය",-13.002095222473145],["季節",-13.002137184143066],["ıldığı",-13.002187728881836],["▁Pov",-13.00220012664795],["шиг",-13.002211570739746],["早く",-13.00223159790039],["▁138",-13.002296447753906],["▁voluptate",-13.002314567565918],["▁있는데",-13.002314567565918],["▁meillä",-13.002324104309082],["▁Соф",-13.00235080718994],["ificat",-13.002381324768066],["▁tregon",-13.002387046813965],["ກໍ",-13.002389907836914],["vagy",-13.002395629882812],["▁Cysylltwch",-13.00241470336914],["▁Mohamed",-13.00241470336914],["▁membutuhkan",-13.00241470336914],["▁vyhradené",-13.00241470336914],["▁мастацтва",-13.00241470336914],["ກົດຫມາຍ",-13.002415657043455],["▁mədəniyyət",-13.002415657043455],["一张",-13.00241756439209],["▁çevril",-13.00242042541504],["▁vandens",-13.002421379089355],["▁בשביל",-13.002423286437988],["▁మూడు",-13.002423286437988],["▁west",-13.002429008483888],["▁گاڑی",-13.002431869506836],["▁전국",-13.00245761871338],["▁мындай",-13.002459526062012],["▁historien",-13.002470970153809],["▁piccoli",-13.002480506896973],["▁ተጠ",-13.002486228942873],["lingu",-13.0024995803833],["▁gweithio",-13.002516746520996],["▁Weitere",-13.002522468566896],["▁작은",-13.00252628326416],["14)",-13.002533912658691],["АХ",-13.002568244934082],["ארבע",-13.002575874328612],["▁amerikai",-13.002585411071776],["▁המדינה",-13.002595901489258],["▁alates",-13.002613067626951],["▁facem",-13.002623558044434],["▁Navig",-13.00266933441162],["▁Pablo",-13.002677917480469],["▁Владата",-13.002723693847656],["دید",-13.00273323059082],["▁Kommun",-13.002750396728516],["▁Testament",-13.002750396728516],["▁streaming",-13.002764701843262],["shadi",-13.002771377563477],["AVE",-13.002777099609377],["diq",-13.00279426574707],["▁ကိုယ္",-13.002820014953612],["stia",-13.002840995788574],["▁grupper",-13.002845764160156],["ಕೇ",-13.002870559692385],["▁başarılı",-13.002887725830078],["Univers",-13.002894401550291],["علن",-13.002900123596191],["▁שבת",-13.002909660339355],["▁jött",-13.00294589996338],["保健",-13.002959251403809],["бург",-13.002962112426758],["YM",-13.00296688079834],["▁pequena",-13.002970695495604],["▁kampani",-13.002991676330566],["ическое",-13.003044128417969],["▁molte",-13.003066062927246],["▁سرطان",-13.003077507019045],["سىنى",-13.00307846069336],["▁bede",-13.00308322906494],["▁Кад",-13.003087043762209],["پرس",-13.00310230255127],["0%",-13.003114700317385],["一方",-13.00312328338623],["▁(27",-13.003138542175291],["misele",-13.003144264221191],["fh",-13.00315284729004],["ونکو",-13.003170013427734],["ացնում",-13.003207206726074],["yyət",-13.003220558166504],["▁APA",-13.003238677978516],["పం",-13.003252029418944],["Vis",-13.00326156616211],["▁taksi",-13.003277778625488],["чылык",-13.003366470336914],["▁verslo",-13.003381729125977],["▁faça",-13.003392219543455],["arbeid",-13.003402709960938],["прес",-13.003410339355469],["чэнне",-13.003439903259276],["▁анықта",-13.003451347351074],["▁Rossiya",-13.003461837768556],["監",-13.003467559814451],["▁lean",-13.003473281860352],["联盟",-13.00347900390625],["财务",-13.003499984741213],["▁Мінск",-13.003504753112791],["▁Верховн",-13.003509521484377],["тне",-13.003514289855955],["爲",-13.00352668762207],["폰",-13.003528594970703],["▁signo",-13.003541946411133],["ДПМНЕ",-13.003545761108398],["▁అవకాశం",-13.003545761108398],["▁варіант",-13.003546714782717],["▁dopiero",-13.00355052947998],["▁ਦੌਰਾਨ",-13.00355052947998],["▁horren",-13.003554344177246],["▁روم",-13.003592491149902],["▁sebarang",-13.003602981567385],["raksts",-13.003610610961914],["einander",-13.003631591796877],["▁soluzione",-13.003632545471191],["▁gehoor",-13.00364875793457],["υκ",-13.00365924835205],["▁радост",-13.003686904907228],["後來",-13.003687858581545],["مود",-13.003691673278809],["vissa",-13.00369358062744],["▁Prat",-13.003697395324709],["ਹੀ",-13.003703117370604],["vost",-13.003704071044922],["yog",-13.00371265411377],["దర",-13.003719329833984],["▁అయిన",-13.003748893737791],["ບ້ານ",-13.00376033782959],["▁paj",-13.003787994384766],["▁подобно",-13.00381851196289],["▁↳",-13.003820419311523],["▁koira",-13.003829956054688],["わからない",-13.003877639770508],["ਾਰ",-13.003880500793455],["rev",-13.003893852233888],["institut",-13.003928184509276],["▁గ్రామ",-13.003949165344238],["سٹر",-13.003951072692873],["енд",-13.00397491455078],["▁Eye",-13.00398063659668],["▁брой",-13.00398063659668],["plň",-13.003982543945312],["▁средство",-13.004003524780272],["vojen",-13.00403881072998],["▁zīm",-13.004045486450195],["▁tydelig",-13.00404930114746],["уулсан",-13.004074096679688],["▁സംഘ",-13.00409698486328],["retur",-13.004110336303713],["▁жет",-13.004110336303713],["▁โอ",-13.00411319732666],["▁española",-13.00416660308838],["▁лицето",-13.00418758392334],["▁сосед",-13.004193305969238],["elné",-13.004204750061035],["diğimiz",-13.004232406616213],["แก้",-13.004274368286133],["ከር",-13.004281997680664],["יח",-13.004292488098145],["روب",-13.004308700561523],["にお",-13.004314422607422],["▁മീ",-13.004317283630373],["▁nyata",-13.004374504089355],["▁kult",-13.004376411437988],["▁рода",-13.00441074371338],["▁בפ",-13.00441551208496],["▁награда",-13.00442123413086],["gués",-13.004425048828123],["XA",-13.004434585571287],["állás",-13.004448890686035],["▁katerem",-13.004473686218262],["uoju",-13.004480361938477],["ਕਾਂ",-13.004507064819336],["ພາບ",-13.004544258117676],["space",-13.004563331604004],["misesta",-13.004579544067385],["anum",-13.004594802856444],["▁maq",-13.004621505737305],["懷",-13.004621505737305],["▁વે",-13.00462818145752],["िद",-13.004631042480469],["▁guud",-13.004632949829102],["ቦች",-13.004634857177734],["▁autant",-13.004636764526367],["การใช้งาน",-13.004640579223633],["▁Dessa",-13.004642486572266],["flug",-13.004644393920898],["événement",-13.004678726196287],["▁menentukan",-13.004678726196287],["▁इसलिए",-13.004678726196287],["▁دانشجویان",-13.004685401916504],["▁становится",-13.004687309265137],["▁виробництва",-13.004694938659668],["▁తె",-13.00469970703125],["PEN",-13.004709243774414],["גז",-13.004719734191896],["RIN",-13.004724502563477],["▁Adresa",-13.004752159118652],["▁küla",-13.004754066467283],["oló",-13.004758834838867],["σιο",-13.0047607421875],["▁პრე",-13.004775047302246],["niam",-13.004794120788574],["▁split",-13.004802703857422],["▁131",-13.004837036132812],["▁Министерството",-13.004837036132812],["▁Duo",-13.004839897155762],["رض",-13.004840850830078],["▁Napoli",-13.004847526550291],["▁দেশের",-13.00486183166504],["нной",-13.004897117614746],["ottak",-13.004908561706545],["풍",-13.004913330078123],["▁ukuze",-13.004918098449709],["ിട്ടു",-13.004941940307615],["▁netto",-13.004955291748049],["是很",-13.00497055053711],["▁घेत",-13.004971504211426],["▁rúa",-13.00497817993164],["fatta",-13.004989624023438],["সর",-13.004990577697754],["▁udal",-13.004997253417969],["ילות",-13.005019187927246],["ೃತ",-13.005033493041992],["μένος",-13.005035400390623],["▁تنهن",-13.005067825317385],["▁Aste",-13.005093574523926],["▁hoàng",-13.005094528198242],["▁kategorii",-13.005097389221191],["▁پایه",-13.005098342895508],["▁perdre",-13.005120277404783],["한다는",-13.005135536193848],["▁கூற",-13.00514316558838],["idin",-13.005162239074709],["职工",-13.005163192749023],["ענע",-13.005171775817873],["άρα",-13.005173683166504],["▁devenit",-13.005182266235352],["▁responder",-13.005182266235352],["▁mell",-13.005189895629885],["eert",-13.005206108093262],["▁moes",-13.005244255065918],["ljan",-13.005254745483398],["▁veikia",-13.00526237487793],["▁jocuri",-13.00530242919922],["skolen",-13.005305290222168],["▁noqon",-13.005326271057127],["空气",-13.005329132080078],["▁nəticə",-13.005364418029783],["дис",-13.005365371704102],["երից",-13.0054292678833],["▁Борисов",-13.005452156066896],["▁februarie",-13.005453109741213],["▁ครั้ง",-13.005459785461426],["▁habitu",-13.005467414855955],["▁havi",-13.005478858947754],["梦想",-13.005484580993652],["נהג",-13.005501747131348],["▁Və",-13.005516052246094],["итет",-13.00551700592041],["KÖ",-13.005520820617676],["▁introduce",-13.00554084777832],["▁Rik",-13.00556755065918],["以上的",-13.005589485168455],["imiento",-13.005597114562988],["rät",-13.00561237335205],["▁seara",-13.00567626953125],["▁resources",-13.00568389892578],["▁иштер",-13.00568675994873],["jui",-13.005745887756348],["▁Hrvatski",-13.005762100219728],["legum",-13.005770683288574],["▁жүргөн",-13.00577449798584],["▁felles",-13.005785942077637],["▁مدار",-13.005807876586914],["ฝัน",-13.00581169128418],["▁dissabte",-13.005812644958496],["▁encourage",-13.005812644958496],["▁أعمال",-13.005820274353027],["▁Juventus",-13.005821228027344],["ininkas",-13.005830764770508],["▁Hein",-13.005831718444824],["▁hinanden",-13.005833625793455],["stofu",-13.005855560302734],["▁ក្រសួង",-13.005861282348633],["▁իրեն",-13.005871772766112],["▁Scan",-13.005878448486328],["▁Sedangkan",-13.005879402160645],["kār",-13.005908012390137],["▁fame",-13.005914688110352],["▁(2015)",-13.005928993225098],["▁стати",-13.005934715270996],["▁monter",-13.00594425201416],["▁அதிக",-13.005986213684082],["ђен",-13.006063461303713],["▁زیبایی",-13.006095886230469],["▁መጽሐፍ",-13.006101608276367],["▁아주",-13.00614070892334],["開心",-13.00614070892334],["▁түүний",-13.006146430969238],["▁testu",-13.006174087524414],["▁स्त्री",-13.006181716918944],["▁دائما",-13.00618839263916],["▁香港",-13.00618839263916],["เทพ",-13.00621223449707],["ҮҮ",-13.006237030029297],["▁BAN",-13.006261825561523],["▁tjeneste",-13.006265640258787],["▁Hún",-13.006270408630373],["▁annab",-13.00627326965332],["▁formule",-13.0062894821167],["▁ന്നു",-13.006303787231444],["▁Здрав",-13.006312370300291],["▁преглед",-13.006317138671877],["żą",-13.006319999694824],["▁нему",-13.006325721740724],["▁முன்ன",-13.00633144378662],["▁पढ़ें",-13.006341934204102],["nacht",-13.006349563598633],["▁mæ",-13.00635814666748],["▁рано",-13.006376266479492],["▁Comune",-13.00639533996582],["untu",-13.006402015686035],["▁cyd",-13.006424903869627],["anlegg",-13.006430625915527],["▁changes",-13.006431579589844],["ministra",-13.006433486938477],["▁मूल",-13.006441116333008],["랜드",-13.006450653076172],["▁savez",-13.006467819213867],["שלום",-13.00648021697998],["▁døde",-13.006482124328612],["▁私は",-13.00650119781494],["▁පොලිස්",-13.006505966186523],["▁៦",-13.006507873535156],["▁Suk",-13.006514549255373],["dienį",-13.006518363952637],["raken",-13.006521224975586],["эль",-13.0065279006958],["▁lasta",-13.006540298461914],["▁покаже",-13.006552696228027],["▁egentligen",-13.006574630737305],["▁当",-13.006574630737305],["▁부산",-13.006598472595217],["▁పీ",-13.006627082824709],["ทําการ",-13.006631851196287],["▁پاتې",-13.006633758544922],["▁මිනිස්සු",-13.006636619567873],["▁Tanah",-13.006640434265137],["SES",-13.00664234161377],["▁швидко",-13.006647109985352],["▁କୋଟି",-13.006656646728516],["ໄດ້ຮັບ",-13.006669044494627],["打算",-13.006669044494627],["вара",-13.006670951843262],["▁национални",-13.006678581237791],["▁gira",-13.00668716430664],["▁pastor",-13.00668716430664],["▁komik",-13.006710052490234],["ריה",-13.00673007965088],["▁പുറ",-13.00673770904541],["xun",-13.00674533843994],["▁styre",-13.006760597229004],["名前",-13.006791114807127],["рена",-13.006806373596191],["たち",-13.006811141967772],["▁rí",-13.006817817687988],["କର",-13.006829261779783],["▁ஜெய",-13.006854057312012],["▁எல்லா",-13.00687026977539],["收益",-13.006875038146973],["▁최고",-13.006916999816896],["Pas",-13.006919860839844],["穴",-13.006930351257324],["รู้จัก",-13.006943702697754],["▁kemungkinan",-13.00694751739502],["▁sredstva",-13.00694751739502],["▁tatsächlich",-13.00694751739502],["▁Гэхдээ",-13.00694751739502],["▁খালেদা",-13.00694751739502],["жина",-13.006950378417969],["▁ሀገር",-13.00695514678955],["▁Бат",-13.00696849822998],["▁قصد",-13.006983757019045],["▁نهاية",-13.007004737854004],["▁سرحد",-13.007013320922852],["▁населен",-13.007022857666016],["▁байлаа",-13.007025718688965],["▁питања",-13.00702667236328],["▁Steuer",-13.007064819335938],["기는",-13.007068634033203],["▁dewasa",-13.007071495056152],["шив",-13.007078170776367],["▁czasem",-13.00709342956543],["▁HAR",-13.007112503051758],["▁sait",-13.007126808166504],["▁Objekt",-13.00715160369873],["မင်း",-13.007184028625488],["▁కాని",-13.007195472717283],["anten",-13.007205963134766],["▁приклад",-13.007211685180664],["นก",-13.007214546203612],["पथ",-13.00721836090088],["ρίου",-13.007226943969728],["▁крајот",-13.007235527038574],["▁buwan",-13.007244110107422],["▁produkto",-13.007269859313965],["▁angla",-13.00729274749756],["▁apsi",-13.007311820983888],["chiq",-13.007378578186035],["▁silahkan",-13.007410049438477],["turas",-13.007427215576172],["inhas",-13.007440567016602],["▁webb",-13.007448196411133],["▁hacerlo",-13.007450103759766],["▁Ян",-13.007452011108398],["▁enlace",-13.007457733154297],["▁возраст",-13.007465362548828],["bold",-13.00746726989746],["▁gecə",-13.007468223571776],["▁Κατά",-13.007491111755373],["▁mrt",-13.007498741149902],["▁reme",-13.007533073425291],["▁κοντά",-13.007540702819824],["▁dienos",-13.007577896118164],["▁scorso",-13.007585525512695],["▁וויל",-13.00759983062744],["▁comentaris",-13.007622718811035],["▁evenimente",-13.007633209228516],["▁diesel",-13.007680892944336],["▁változat",-13.007682800292969],["▁sanc",-13.007683753967283],["▁Küche",-13.007696151733398],["▁ეგ",-13.007698059082031],["ৰি",-13.007705688476562],["▁น้ํา",-13.007713317871094],["ፓ",-13.007718086242676],["▁korist",-13.007720947265623],["▁havis",-13.007729530334473],["▁činnosti",-13.007733345031738],["ឡើង",-13.007747650146484],["finn",-13.00775909423828],["ישן",-13.007771492004396],["▁೨",-13.00778579711914],["の中",-13.00778865814209],["MF",-13.007821083068848],["▁Alat",-13.007843017578123],["kunst",-13.007858276367188],["▁fizik",-13.007868766784668],["gő",-13.007875442504885],["▁considerar",-13.007875442504885],["各大",-13.007894515991213],["gier",-13.007933616638184],["▁ਓ",-13.007989883422852],["郎",-13.008015632629396],["▁Atau",-13.00803565979004],["▁місто",-13.008049011230469],["ουσι",-13.008062362670898],["ိုင်း",-13.00806713104248],["▁ಮೊ",-13.008068084716797],["▁сорт",-13.008074760437012],["▁rajta",-13.008078575134276],["ເສດຖະກິດ",-13.00808334350586],["▁заходів",-13.008084297180176],["▁प्रविधि",-13.008084297180176],["▁ក្រ",-13.008095741271973],["▁زائد",-13.008101463317873],["▁ብለው",-13.008115768432615],["ньої",-13.008139610290527],["▁stipri",-13.008139610290527],["▁ಶಿವ",-13.008142471313477],["▁выбрать",-13.008169174194336],["▁συνα",-13.0082426071167],["▁فقال",-13.00825309753418],["▁antar",-13.008268356323242],["απ",-13.008293151855469],["▁берет",-13.00830078125],["ময়",-13.00832462310791],["▁doby",-13.008363723754885],["shen",-13.0083646774292],["SAT",-13.008370399475098],["▁risultato",-13.008378982543944],["▁alltaf",-13.008392333984377],["▁jurul",-13.008407592773438],["셨",-13.008411407470703],["▁ласка",-13.00841236114502],["▁weken",-13.008413314819336],["شكر",-13.008420944213867],["chet",-13.008426666259766],["ھە",-13.008450508117676],["▁Hayat",-13.008466720581056],["vať",-13.008488655090332],["▁zai",-13.008508682250977],["▁სამუშაო",-13.008548736572266],["▁hennar",-13.008564949035645],["ירת",-13.00857639312744],["▁Эд",-13.00858211517334],["▁suoraan",-13.008589744567873],["१०",-13.00868320465088],["ഞ",-13.008722305297852],["▁ຊຶ່ງ",-13.008748054504396],["yhtiö",-13.008761405944824],["名字",-13.008764266967772],["▁totaal",-13.008771896362305],["裂",-13.00877285003662],["著名",-13.008829116821287],["▁власник",-13.008853912353516],["ციის",-13.008880615234377],["כנה",-13.00892162322998],["serva",-13.008925437927246],["▁ظل",-13.008931159973145],["भे",-13.008949279785156],["▁установи",-13.008965492248535],["▁Слав",-13.0089693069458],["UNI",-13.009042739868164],["▁beda",-13.00904369354248],["▁ګو",-13.009055137634276],["เที่ยว",-13.00906467437744],["▁ያሉ",-13.00912094116211],["▁lidé",-13.009125709533691],["一项",-13.009159088134766],["▁samuti",-13.009164810180664],["tempo",-13.00918674468994],["eerida",-13.009197235107422],["▁successful",-13.009202003479004],["▁պատասխան",-13.0092134475708],["▁հազար",-13.009224891662598],["▁حتما",-13.009224891662598],["▁Политика",-13.009230613708496],["▁пікір",-13.009230613708496],["▁mengatasi",-13.009239196777344],["▁রি",-13.009244918823242],["▁NT",-13.009257316589355],["▁colla",-13.00926685333252],["▁역시",-13.00927448272705],["▁Norway",-13.009279251098633],["▁desea",-13.009285926818848],["▁Nec",-13.009289741516112],["satu",-13.009296417236328],["▁History",-13.009306907653809],["▁Rub",-13.009309768676758],["тул",-13.009315490722656],["▁trabaja",-13.009322166442873],["▁בזמן",-13.009325981140137],["▁HERE",-13.009331703186035],["▁первой",-13.009332656860352],["▁ሰላም",-13.009339332580566],["▁оквиру",-13.00935173034668],["▁svega",-13.009360313415527],["ывается",-13.009363174438477],["▁Kamar",-13.009377479553224],["▁Deniz",-13.009390830993652],["▁بیمه",-13.009390830993652],["▁लगी",-13.009392738342283],["ėsi",-13.009394645690918],["▁Evo",-13.00940227508545],["mén",-13.009410858154297],["▁انگلیسی",-13.009418487548828],["▁gadiem",-13.009435653686523],["▁Dienst",-13.00944709777832],["▁понял",-13.009477615356444],["▁முக்கிய",-13.009483337402344],["▁ქალაქ",-13.009489059448242],["▁Robin",-13.009509086608888],["▁bernama",-13.009525299072266],["▁העסק",-13.009527206420898],["することが",-13.009557723999023],["▁شوه",-13.009562492370604],["▁postoje",-13.009589195251465],["လ္",-13.009602546691896],["▁bade",-13.00961971282959],["ರಂಗ",-13.00963020324707],["▁kuwi",-13.00963306427002],["▁toplum",-13.009661674499512],["▁Zar",-13.009663581848145],["ήθηκαν",-13.00966453552246],["▁Industri",-13.009711265563965],["▁yanaşı",-13.009718894958496],["外国",-13.009736061096191],["džiu",-13.009754180908203],["▁ಟಿ",-13.009756088256836],["▁কেন",-13.009770393371582],["▁zabieg",-13.009772300720217],["▁apoyo",-13.009794235229492],["كشف",-13.009809494018556],["▁Năm",-13.009830474853516],["▁spu",-13.009842872619627],["ຮັບ",-13.00985050201416],["▁lennu",-13.00992202758789],["ेगा",-13.009927749633787],["ИЙ",-13.009931564331056],["▁13:00",-13.00995635986328],["▁mbele",-13.009960174560549],["▁прва",-13.009966850280762],["κες",-13.009987831115724],["▁patriot",-13.009989738464355],["ម្នាក់",-13.01001262664795],["alli",-13.010017395019531],["őség",-13.010021209716797],["▁Asocia",-13.010025024414062],["ithre",-13.010027885437012],["טרי",-13.010027885437012],["▁pourra",-13.010052680969238],["▁Kolej",-13.010054588317873],["▁ਖਾ",-13.010066986083984],["▁wekî",-13.010136604309082],["hyl",-13.010180473327637],["▁lako",-13.01020622253418],["▁Krea",-13.010229110717772],["hæ",-13.010231971740724],["▁šie",-13.010236740112305],["arde",-13.010257720947266],["gyel",-13.010271072387695],["gaben",-13.010279655456545],["▁hài",-13.010286331176758],["▁danış",-13.010306358337402],["▁świet",-13.0103120803833],["泊",-13.010320663452148],["澳大利亚",-13.010329246520996],["コメント",-13.010350227355955],["ေနာ္",-13.01035213470459],["▁თამაშები",-13.010357856750488],["▁ictimai",-13.010358810424805],["▁cəlb",-13.010360717773438],["▁cấu",-13.010360717773438],["▁sicrhau",-13.010360717773438],["▁మొత్తం",-13.010360717773438],["▁ലഭിച്ച",-13.010361671447754],["fär",-13.01036262512207],["▁ಪಡೆಯಿರಿ",-13.01036262512207],["▁Кодекс",-13.010363578796388],["▁kosa",-13.010383605957031],["▁proizvoda",-13.010391235351562],["อีกครั้ง",-13.010395050048828],["▁lækker",-13.010398864746094],["blok",-13.01040744781494],["的支持",-13.010428428649902],["▁rám",-13.010432243347168],["ларының",-13.010468482971191],["▁тез",-13.010482788085938],["gū",-13.010490417480469],["▁ലോ",-13.010515213012695],["▁hurtig",-13.01052188873291],["ňou",-13.010530471801758],["▁edecek",-13.010546684265137],["▁deseja",-13.010555267333984],["ESTI",-13.01056957244873],["尋",-13.01060962677002],["မရွိ",-13.010622024536133],["індегі",-13.010628700256348],["nām",-13.010644912719728],["▁dyn",-13.010669708251951],["اژ",-13.010683059692385],["ได้เลย",-13.010708808898926],["▁Rīgā",-13.010726928710938],["▁קלי",-13.010762214660645],["▁presi",-13.010775566101074],["▁ھەم",-13.010799407958984],["byggnad",-13.01081085205078],["▁produksi",-13.010839462280272],["▁herkes",-13.010851860046388],["▁למי",-13.010890007019045],["▁کی۔",-13.010903358459473],["▁comprendre",-13.010909080505373],["wachsen",-13.010937690734863],["عز",-13.010939598083496],["ಂಡಿ",-13.010957717895508],["цоў",-13.010972023010254],["чая",-13.010979652404783],["ร้อง",-13.010984420776367],["εδ",-13.010986328125],["user",-13.011029243469238],["▁оригинал",-13.011048316955566],["▁строк",-13.011062622070312],["بری",-13.011069297790527],["▁defend",-13.011073112487791],["lerdir",-13.01107692718506],["▁yarı",-13.011090278625488],["▁Western",-13.011098861694336],["vård",-13.011109352111816],["▁Lys",-13.011138916015623],["▁lucha",-13.0111665725708],["▁cykel",-13.011183738708496],["▁razlog",-13.011184692382812],["enean",-13.011208534240724],["ขึ้นมา",-13.011211395263672],["▁plau",-13.011238098144531],["ùi",-13.011269569396973],["টু",-13.011285781860352],["▁svého",-13.011287689208984],["▁کالا",-13.011290550231934],["▁haqda",-13.011297225952148],["שפע",-13.01129913330078],["мысл",-13.011303901672363],["аци",-13.011317253112791],["▁2006,",-13.011334419250488],["▁vett",-13.011340141296388],["▁Suka",-13.011357307434082],["lebih",-13.01136302947998],["▁antigo",-13.011384963989258],["▁praegu",-13.011385917663574],["njuje",-13.01138687133789],["▁dome",-13.011435508728027],["▁общо",-13.011448860168455],["wissel",-13.011449813842772],["IJE",-13.01145839691162],["宝宝",-13.01148509979248],["NV",-13.011487007141112],["໌",-13.011499404907228],["▁lắng",-13.011500358581545],["▁மாநில",-13.011500358581545],["▁ವಿಮರ್ಶೆ",-13.011500358581545],["▁pasaulē",-13.01150131225586],["▁Михаил",-13.011503219604492],["▁Fahrzeug",-13.011504173278809],["▁ავტო",-13.011507987976074],["▁történő",-13.011519432067873],["▁1300",-13.01152229309082],["它是",-13.011531829833984],["teeseen",-13.011551856994627],["▁Barang",-13.011557579040527],["▁свято",-13.01155948638916],["▁atelier",-13.011564254760742],["mput",-13.011574745178224],["▁سند",-13.01159381866455],["▁alda",-13.0115966796875],["▁موفقیت",-13.011600494384766],["▁ಮಾಡಲು",-13.01161003112793],["ହେ",-13.011615753173828],["nipun",-13.011619567871094],["▁посилання",-13.01162338256836],["zb",-13.011625289916992],["Nie",-13.01163387298584],["riye",-13.011635780334473],["žka",-13.011652946472168],["▁نیروهای",-13.011652946472168],["▁parku",-13.011672973632812],["▁շահ",-13.01168155670166],["bres",-13.011691093444824],["оба",-13.011720657348633],["وج",-13.01172161102295],["ANGAN",-13.01172924041748],["▁pushtet",-13.011730194091797],["▁katkı",-13.011741638183594],["IYA",-13.01174545288086],["本地",-13.011748313903809],["▁liiku",-13.011780738830566],["iems",-13.0117826461792],["gica",-13.011788368225098],["усь",-13.011789321899414],["ସେ",-13.011831283569336],["▁environment",-13.011838912963867],["▁دیا۔",-13.01184368133545],["333",-13.011870384216309],["лач",-13.011890411376951],["關注",-13.011893272399902],["▁partidos",-13.01189422607422],["▁quieres",-13.011895179748535],["▁uprave",-13.011899948120115],["▁contexto",-13.011917114257812],["▁1905",-13.01193618774414],["▁одлучи",-13.011964797973633],["kies",-13.011969566345217],["▁geral",-13.011975288391112],["▁capi",-13.01198387145996],["득",-13.011991500854492],["▁ဟ",-13.012073516845703],["لارغا",-13.0120849609375],["ैव",-13.012086868286133],["▁Since",-13.012096405029297],["▁բն",-13.012106895446776],["ရွာ",-13.012110710144045],["▁Herriko",-13.012133598327637],["อยู่ที่",-13.012149810791016],["▁وصول",-13.012155532836914],["▁försök",-13.012163162231444],["手术",-13.012178421020508],["▁산업",-13.012184143066406],["▁harap",-13.012206077575684],["▁fug",-13.012212753295898],["డో",-13.012216567993164],["cumque",-13.012227058410645],["histori",-13.012232780456545],["ବ୍",-13.012256622314451],["acağını",-13.01225757598877],["▁примерно",-13.01229763031006],["perä",-13.012316703796388],["▁ويب",-13.01234531402588],["ПП",-13.012380599975586],["▁gaf",-13.012398719787598],["▁२३",-13.012410163879396],["▁rating",-13.012451171875],["▁səhifə",-13.012476921081545],["▁อาหาร",-13.01253604888916],["drar",-13.012542724609377],["ulmuş",-13.012553215026855],["▁발전",-13.012553215026855],["▁addition",-13.01255702972412],["ettek",-13.012557983398438],["▁дозволи",-13.012572288513184],["▁ആര",-13.0125732421875],["撤",-13.012575149536133],["▁ожи",-13.01258373260498],["ЛЫ",-13.012598991394045],["▁pretek",-13.012606620788574],["ራቸው",-13.01262092590332],["▁Gian",-13.012621879577637],["μετρ",-13.012627601623535],["▁osaa",-13.012627601623535],["▁spændende",-13.01264190673828],["▁ਕਮੇਟੀ",-13.01264190673828],["▁ჰქონდა",-13.01264190673828],["▁ವೇಳೆ",-13.01264762878418],["▁ಹೆಚ್ಚಿನ",-13.01264762878418],["▁veu",-13.012649536132812],["▁хүрээнд",-13.012652397155762],["κυ",-13.012664794921877],["▁ਭਾਈ",-13.012667655944824],["▁isaga",-13.012682914733888],["▁общество",-13.012683868408203],["▁Malmö",-13.012689590454102],["दर्श",-13.012699127197266],["▁Community",-13.012712478637695],["ЛУ",-13.012728691101074],["▁ഞാന",-13.01273250579834],["▁целью",-13.012738227844238],["▁možná",-13.012741088867188],["▁حملے",-13.012746810913086],["▁Iraqê",-13.012747764587402],["మార్చు",-13.012754440307615],["μών",-13.012757301330566],["ضح",-13.012765884399414],["seen",-13.012770652770996],["▁opět",-13.012788772583008],["▁siiski",-13.012831687927246],["▁음식",-13.012834548950195],["▁Alice",-13.01284408569336],["▁Pho",-13.012850761413574],["▁alcun",-13.012852668762209],["▁concepto",-13.01288890838623],["يران",-13.012944221496582],["融资",-13.0129976272583],["ಗಿನ",-13.013011932373049],["楽しみ",-13.013026237487791],["ເຫັນ",-13.01305103302002],["▁chora",-13.013076782226562],["▁상황",-13.013090133666992],["▁darb",-13.013118743896484],["åk",-13.013145446777344],["的信息",-13.013148307800291],["عى",-13.013164520263672],["ætt",-13.013169288635254],["сцю",-13.013169288635254],["ОП",-13.013188362121582],["▁հետեւ",-13.013193130493164],["▁корпус",-13.013208389282228],["weer",-13.01321792602539],["▁наслед",-13.01322078704834],["▁Nang",-13.013248443603516],["▁চি",-13.013253211975098],["▁plati",-13.013266563415527],["▁ග්",-13.01327419281006],["رخ",-13.013315200805664],["▁ecce",-13.013333320617676],["▁гарантира",-13.013348579406738],["▁תע",-13.013349533081056],["▁страв",-13.013371467590332],["განი",-13.013386726379396],["とする",-13.013391494750977],["▁ਸੋ",-13.013395309448242],["▁Crist",-13.013401985168455],["EGO",-13.013440132141112],["▁darbības",-13.013458251953123],["▁ийм",-13.013484954833984],["化学",-13.013487815856934],["ostí",-13.01348876953125],["טייל",-13.013495445251465],["▁knjiž",-13.013497352600098],["ацији",-13.01353931427002],["▁സോ",-13.013553619384766],["▁힘",-13.013583183288574],["▁Marx",-13.0136079788208],["▁специал",-13.0136137008667],["▁нэм",-13.013614654541016],["usuari",-13.013643264770508],["▁Labai",-13.01364803314209],["іну",-13.013664245605469],["darbi",-13.013677597045898],["▁영어",-13.013689041137695],["▁можемо",-13.013711929321287],["šos",-13.013713836669922],["బో",-13.01371955871582],["▁features",-13.013741493225098],["โฆษณา",-13.013784408569336],["▁contrôle",-13.013785362243652],["▁ಪ್ರೊಫೈಲ್",-13.013785362243652],["▁Сонымен",-13.013789176940918],["▁сфері",-13.013789176940918],["▁BT",-13.013792991638184],["իզ",-13.013798713684082],["▁akizungumza",-13.013799667358398],["▁مرسته",-13.013805389404297],["▁αγων",-13.013845443725586],["hanan",-13.013866424560549],["▁diabet",-13.013866424560549],["กล่อง",-13.013876914978027],["အတူ",-13.013880729675291],["▁యూ",-13.013886451721191],["▁зобов",-13.013907432556152],["แสน",-13.013964653015137],["corrupt",-13.0139799118042],["დრო",-13.014008522033691],["joh",-13.01405429840088],["▁MV",-13.01406192779541],["▁exactly",-13.014105796813965],["▁악",-13.014114379882812],["Han",-13.014159202575684],["▁Otro",-13.014162063598633],["▁belo",-13.01420783996582],["▁dic",-13.01420783996582],["labas",-13.01421070098877],["מני",-13.014241218566896],["կու",-13.01425552368164],["kész",-13.014259338378906],["▁seaduse",-13.014259338378906],["▁održava",-13.01426124572754],["▁СУ",-13.014273643493652],["▁річ",-13.014278411865234],["ାର",-13.01430320739746],["сво",-13.01433277130127],["▁آهيان",-13.01436996459961],["▁cukr",-13.014386177062988],["νης",-13.01438808441162],["成績",-13.014405250549316],["halte",-13.014447212219238],["▁dû",-13.01445484161377],["▁لهذا",-13.01445484161377],["▁свободно",-13.014476776123049],["▁lettere",-13.014477729797363],["▁вол",-13.014483451843262],["prove",-13.014493942260742],["▁ужо",-13.014519691467283],["▁urbe",-13.014521598815918],["cząc",-13.014535903930664],["▁Restauranter",-13.014544486999512],["ettel",-13.01454734802246],["▁ประกาศ",-13.014570236206056],["▁schade",-13.014575958251951],["▁batal",-13.014581680297852],["▁Tok",-13.014607429504396],["▁iman",-13.014639854431152],["▁projets",-13.014652252197266],["esprit",-13.01466178894043],["▁కోట్ల",-13.014681816101074],["▁bawa",-13.014699935913086],["առն",-13.01472282409668],["▁строи",-13.014732360839844],["уре",-13.014742851257324],["▁vriende",-13.014748573303224],["▁restri",-13.014759063720703],["▁2.4",-13.014765739440918],["▁vodu",-13.014775276184082],["▁חול",-13.014793395996094],["ΑΡ",-13.014805793762209],["▁task",-13.01486873626709],["擺",-13.014869689941406],["сток",-13.014877319335938],["оби",-13.014888763427734],["ራል",-13.014893531799316],["missa",-13.014897346496582],["▁velké",-13.014897346496582],["lca",-13.014899253845217],["登记",-13.014899253845217],["▁riu",-13.014900207519531],["‧",-13.014922142028809],["ानां",-13.01492404937744],["ục",-13.014963150024414],["▁المسلمين",-13.014981269836426],["ร้าย",-13.015013694763184],["▁ਦੁ",-13.015019416809082],["вец",-13.015066146850586],["ຕົ້ນ",-13.0150728225708],["▁Zakona",-13.015090942382812],["haja",-13.015119552612305],["dapat",-13.01512336730957],["▁arzu",-13.015145301818848],["รูปภาพ",-13.015148162841797],["▁przew",-13.015149116516112],["▁Boss",-13.015153884887695],["▁acces",-13.0152006149292],["▁АНУ",-13.015243530273438],["▁manda",-13.015250205993652],["▁ჰო",-13.015292167663574],["▁направите",-13.01529312133789],["ंगे",-13.015308380126951],["නිය",-13.015310287475586],["▁animo",-13.015315055847168],["▁저는",-13.015318870544434],["вил",-13.015334129333496],["ĝis",-13.015365600585938],["찬",-13.01537799835205],["olio",-13.01546859741211],["▁endale",-13.015469551086426],["ସନ",-13.015483856201172],["▁besoins",-13.015510559082031],["స్తే",-13.015542984008787],["担当",-13.015580177307127],["വിന്",-13.015588760375977],["ņas",-13.015605926513672],["这位",-13.015634536743164],["ayê",-13.015647888183594],["тний",-13.01565647125244],["▁Viime",-13.01565647125244],["▁października",-13.01565647125244],["ению",-13.015665054321287],["vem",-13.01567554473877],["▁екип",-13.01569366455078],["▁kosti",-13.01569652557373],["▁lips",-13.015702247619627],["▁نائب",-13.015717506408691],["▁представя",-13.01572322845459],["ሉ።",-13.01572608947754],["▁hilja",-13.01578426361084],["έα",-13.015799522399902],["▁1-1",-13.015801429748535],["ফে",-13.015812873840332],["▁prv",-13.015826225280762],["instal",-13.015830993652344],["▁vile",-13.01584243774414],["▁کڻي",-13.015851020812988],["计算",-13.01590633392334],["gesi",-13.015911102294922],["Sun",-13.015913963317873],["▁іде",-13.01591968536377],["アル",-13.0159273147583],["طلاق",-13.01593780517578],["▁használat",-13.015995025634766],["▁έργο",-13.016047477722168],["▁주소",-13.0160551071167],["QQ",-13.016057968139648],["▁аргумент",-13.016075134277344],["▁fiets",-13.01607608795166],["▁ජනපති",-13.016077041625977],["▁heidän",-13.016080856323242],["▁Kuwa",-13.016098022460938],["▁začína",-13.01609992980957],["נם",-13.016100883483888],["病毒",-13.016108512878418],["▁strah",-13.01611042022705],["▁потоа",-13.016114234924316],["▁storitve",-13.01614475250244],["▁краще",-13.01614761352539],["▁scéal",-13.016159057617188],["研發",-13.016191482543944],["▁navber",-13.01619815826416],["▁كى",-13.016201972961426],["▁buộc",-13.016233444213867],["▁HAPA",-13.016267776489258],["▁demokrasi",-13.01626968383789],["▁Kaz",-13.016292572021484],["▁Кап",-13.016298294067385],["▁ແຫ່ງ",-13.016300201416016],["▁گول",-13.016307830810549],["▁Jonas",-13.016319274902344],["▁Brian",-13.016329765319824],["ಪಡಿಸ",-13.016334533691406],["גיה",-13.01633644104004],["mud",-13.016357421875],["▁natureza",-13.016358375549316],["ห้องพัก",-13.016362190246582],["לך",-13.016364097595217],["▁السنة",-13.01636791229248],["▁հրապարակ",-13.016389846801758],["▁minori",-13.016390800476074],["▁प्या",-13.016392707824709],["▁jääb",-13.0164213180542],["上午",-13.016430854797363],["actie",-13.016441345214844],["ത്തിലും",-13.016448974609377],["▁nejen",-13.016459465026855],["jusi",-13.016464233398438],["▁Chia",-13.016477584838867],["使命",-13.0164794921875],["▁destacar",-13.016489028930664],["zā",-13.016494750976562],["enco",-13.01650619506836],["▁kapott",-13.016508102416992],["▁נס",-13.016546249389648],["▁हुनु",-13.016557693481444],["dní",-13.016571998596191],["δή",-13.016571998596191],["▁construi",-13.016621589660645],["▁observ",-13.016640663146973],["▁اهم",-13.016645431518556],["miä",-13.016647338867188],["rdim",-13.016648292541504],["гър",-13.01665496826172],["نقل",-13.01665496826172],["▁registre",-13.01665496826172],["▁ગામ",-13.016674041748049],["tatt",-13.016680717468262],["▁Interes",-13.016718864440918],["▁Robot",-13.01673412322998],["▁Rating",-13.016743659973145],["に行く",-13.016743659973145],["ნდა",-13.016769409179688],["▁Xeral",-13.016776084899902],["▁beidh",-13.016776084899902],["▁feste",-13.016783714294434],["матов",-13.016789436340332],["vc",-13.016790390014648],["tâ",-13.016805648803713],["▁mõne",-13.016815185546877],["▁rif",-13.01684856414795],["その後",-13.016864776611328],["כמה",-13.01686668395996],["▁ለአ",-13.016901016235352],["▁મેં",-13.016934394836426],["▁죽",-13.01699447631836],["▁gəl",-13.017023086547852],["رها",-13.0170316696167],["▁farve",-13.017034530639648],["上下",-13.017101287841797],["เล่า",-13.017112731933594],["▁animais",-13.017118453979492],["▁Ен",-13.017130851745604],["▁fart",-13.017152786254885],["舰",-13.017155647277832],["ittää",-13.017167091369627],["蜜",-13.017184257507324],["ღო",-13.017207145690918],["นิยม",-13.017215728759766],["▁целия",-13.017216682434082],["척",-13.017218589782717],["▁nadat",-13.017219543457031],["‟",-13.017221450805664],["▁məşğul",-13.017221450805664],["▁μάλιστα",-13.017221450805664],["▁Здесь",-13.017221450805664],["▁людьми",-13.017221450805664],["▁hãng",-13.017224311828612],["▁શરૂ",-13.01722526550293],["▁yksityis",-13.017245292663574],["▁дата",-13.01724624633789],["▁Több",-13.017258644104004],["▁врска",-13.017287254333496],["▁weboldal",-13.017297744750977],["ेंट",-13.01729965209961],["▁добива",-13.017314910888672],["▁τελε",-13.017319679260254],["▁rode",-13.01732349395752],["aging",-13.017346382141112],["▁emigr",-13.017411231994627],["▁sē",-13.017419815063477],["여행",-13.017420768737791],["קדם",-13.017425537109377],["几天",-13.017426490783691],["نچ",-13.017433166503906],["▁பார்த்து",-13.01744556427002],["двиг",-13.017473220825195],["सम",-13.017486572265623],["▁vermiş",-13.01749324798584],["30%",-13.017496109008787],["▁Bör",-13.017505645751951],["▁එකම",-13.017531394958496],["ଶୁ",-13.017539024353027],["▁situatie",-13.017547607421877],["ىك",-13.01755428314209],["ošanas",-13.017560005187988],["▁vegan",-13.01756191253662],["▁repar",-13.017587661743164],["▁Somalia",-13.017598152160645],["▁mami",-13.017605781555176],["ραφ",-13.017613410949709],["ỗ",-13.017621040344238],["▁حكم",-13.017654418945312],["▁тощо",-13.017663955688477],["▁naudoja",-13.017669677734377],["▁απαιτ",-13.017669677734377],["▁1931",-13.017699241638184],["▁طريقة",-13.01773452758789],["ОС",-13.0177583694458],["▁Bibli",-13.017815589904783],["▁पुरा",-13.017820358276367],["▁උඩ",-13.017827033996582],["ходять",-13.017829895019531],["ppy",-13.017855644226074],["▁Eusko",-13.017866134643556],["▁filial",-13.01792335510254],["▁формул",-13.017925262451172],["ALL",-13.01799774169922],["▁vaikea",-13.018011093139648],["ناس",-13.018024444580078],["▁APP",-13.01804542541504],["eista",-13.018049240112305],["siyon",-13.01805019378662],["▁Suriya",-13.01806354522705],["ገኛ",-13.018064498901367],["▁поступ",-13.01807689666748],["рэн",-13.018105506896973],["▁tegevus",-13.018115997314451],["Tag",-13.018128395080566],["▁jier",-13.01813793182373],["▁bryd",-13.01818561553955],["▁kompa",-13.018208503723145],["월드",-13.01822566986084],["▁ምርጫ",-13.018232345581056],["▁toca",-13.01823616027832],["美女",-13.018270492553713],["hund",-13.018310546875],["gunt",-13.018315315246582],["TED",-13.018327713012695],["▁syarat",-13.018364906311035],["▁ଦେଖି",-13.018366813659668],["▁Größe",-13.018369674682615],["▁Unterstützung",-13.018369674682615],["▁шаруашылығы",-13.018369674682615],["▁सलमान",-13.018369674682615],["▁Parlimen",-13.018370628356934],["▁sữa",-13.018373489379885],["▁memahami",-13.0183744430542],["▁অর্থ",-13.018377304077148],["▁બહાર",-13.018378257751465],["▁myy",-13.018380165100098],["▁saben",-13.018392562866213],["ไข่",-13.018403053283691],["▁SHBA",-13.018407821655272],["▁Кр",-13.018424034118652],["▁ciągu",-13.018427848815918],["▁regio",-13.018431663513184],["▁Então",-13.018450736999512],["ační",-13.018454551696776],["▁urmare",-13.01848602294922],["κε",-13.018495559692385],["▁gobolka",-13.018512725830078],["▁הכר",-13.01852798461914],["merkki",-13.018548965454102],["១៦",-13.018548965454102],["▁catro",-13.018549919128418],["▁coût",-13.018550872802734],["▁Lisaks",-13.018583297729492],["ixen",-13.018596649169922],["tsch",-13.018598556518556],["▁delu",-13.018599510192873],["affaire",-13.01860809326172],["자를",-13.018611907958984],["▁үйлчилгээ",-13.018643379211426],["offre",-13.01870059967041],["▁וזה",-13.01870059967041],["ksy",-13.018710136413574],["▁писал",-13.01871109008789],["▁eto",-13.018712997436523],["▁پھ",-13.01876449584961],["▁Lago",-13.018776893615724],["▁Bunga",-13.018787384033203],["teilung",-13.018800735473633],["▁Сар",-13.018810272216797],["ريف",-13.018835067749023],["деле",-13.018840789794922],["しましょう",-13.018842697143556],["▁Segui",-13.018845558166504],["ируем",-13.01884937286377],["▁Verde",-13.018866539001465],["▁категории",-13.01889705657959],["▁DAP",-13.018901824951172],["▁पुर",-13.018911361694336],["رسی",-13.018924713134766],["互相",-13.018939018249512],["▁flokk",-13.018949508666992],["oasă",-13.018959999084473],["▁Κά",-13.018980026245115],["berto",-13.018987655639648],["ঞ্জ",-13.018997192382812],["ombre",-13.019001007080078],["▁rebus",-13.019022941589355],["થા",-13.019027709960938],["▁koo",-13.019049644470217],["մանը",-13.019050598144531],["▁prihod",-13.019054412841797],["▁ümumi",-13.019068717956545],["▁vijf",-13.019073486328123],["▁байхгүй",-13.019079208374023],["▁dicitur",-13.019099235534668],["▁årene",-13.019159317016602],["▁કર્યા",-13.019163131713867],["дача",-13.01919937133789],["տեղ",-13.019207000732422],["▁Jai",-13.019208908081056],["pix",-13.019259452819824],["▁hluti",-13.019281387329102],["人工",-13.01928424835205],["▁нормально",-13.01932144165039],["▁environ",-13.01932430267334],["/02/",-13.0193452835083],["čeno",-13.01934814453125],["▁послуги",-13.019360542297363],["pisać",-13.019372940063477],["guin",-13.01939582824707],["▁tó",-13.01939582824707],["介",-13.019399642944336],["ātā",-13.019424438476562],["ገቡ",-13.019431114196776],["▁Таму",-13.019438743591309],["ไปยัง",-13.019450187683104],["▁начало",-13.019460678100586],["脈",-13.019460678100586],["znam",-13.019469261169434],["▁başçısı",-13.019476890563965],["▁kapel",-13.01949691772461],["▁uutta",-13.019498825073242],["▁близки",-13.01951026916504],["▁bedeutet",-13.019518852233888],["▁ಟಾಪ್",-13.019518852233888],["▁pejabat",-13.019519805908203],["▁ਜਦੋਂ",-13.01952075958252],["▁täglich",-13.019521713256836],["▁juillet",-13.019522666931152],["▁свързани",-13.019522666931152],["▁редактор",-13.019523620605469],["▁143",-13.019529342651367],["▁lévő",-13.019529342651367],["▁nghiêm",-13.019530296325684],["▁पति",-13.019548416137695],["▁himself",-13.019556045532228],["しており",-13.019561767578123],["voje",-13.01957893371582],["▁ljude",-13.019586563110352],["▁realizacji",-13.01962947845459],["▁կար",-13.019638061523438],["▁દર",-13.01963996887207],["▁každého",-13.019670486450195],["▁qız",-13.019672393798828],["▁Paulus",-13.01968002319336],["TIK",-13.01968765258789],["ираат",-13.019699096679688],["െല്ലാം",-13.01972770690918],["提案",-13.019742012023926],["▁Haha",-13.019745826721191],["▁rapidement",-13.019753456115724],["您可以",-13.01975440979004],["▁həll",-13.019784927368164],["нди",-13.019789695739746],["standard",-13.01979923248291],["▁могло",-13.019807815551758],["▁Lond",-13.019816398620604],["▁ಕಾಣ",-13.019850730895996],["▁напрям",-13.019875526428224],["▁জানান",-13.019891738891602],["▁اختر",-13.019923210144045],["ssum",-13.019943237304688],["တပ္",-13.019963264465332],["地點",-13.019964218139648],["▁карма",-13.019980430603027],["sining",-13.01999568939209],["특",-13.020004272460938],["▁punkti",-13.020017623901367],["▁daging",-13.020048141479492],["▁stab",-13.02005672454834],["▁יאר",-13.020061492919922],["▁Nha",-13.020068168640137],["lieka",-13.020106315612791],["▁bendr",-13.02011489868164],["favor",-13.020130157470703],["dod",-13.020133972167969],["▁ਦਰ",-13.020133972167969],["dika",-13.02015495300293],["ಾಯಿ",-13.020159721374512],["វី",-13.020174026489258],["▁తల",-13.020174026489258],["▁אס",-13.020188331604004],["▁irre",-13.0201997756958],["யன்",-13.020227432250977],["ባሉ",-13.02024269104004],["▁mota",-13.020265579223633],["▁kayu",-13.02027416229248],["▁Бизнес",-13.020307540893556],["▁reparti",-13.020316123962402],["chin",-13.020341873168944],["ยิง",-13.020345687866213],["ടെ",-13.020389556884766],["▁varmaan",-13.020390510559082],["▁tekint",-13.02040958404541],["▁utilizado",-13.020418167114258],["▁shtet",-13.020419120788574],["▁spie",-13.020419120788574],["▁Gaur",-13.020425796508787],["▁152",-13.020442962646484],["規模",-13.020477294921877],["คร",-13.020480155944824],["tarea",-13.020502090454102],["άφ",-13.020524978637695],["▁menyusu",-13.020532608032228],["чение",-13.020575523376465],["зме",-13.020600318908691],["ចំនួន",-13.02060890197754],["▁Ambiente",-13.02060890197754],["laq",-13.020648956298828],["taí",-13.020657539367676],["▁Cinta",-13.020658493041992],["फल",-13.020662307739258],["โชว์",-13.020668029785156],["▁výsledky",-13.020669937133787],["▁προβλήματα",-13.020669937133787],["▁hluta",-13.020670890808104],["業界",-13.020684242248535],["▁hins",-13.02069854736328],["▁изменений",-13.020700454711914],["▁SEK",-13.020703315734863],["▁असतात",-13.020709991455078],["▁kapacit",-13.02073097229004],["▁aliquid",-13.020736694335938],["lwyd",-13.02073860168457],["▁naturen",-13.02076244354248],["▁esque",-13.020763397216797],["▁تاکہ",-13.020770072937012],["達成",-13.02077293395996],["▁Steven",-13.020782470703123],["▁kiti",-13.020806312561035],["▁lloj",-13.020828247070312],["känsla",-13.020831108093262],["▁ఇదే",-13.020834922790527],["udah",-13.020844459533691],["ონის",-13.020846366882324],["քան",-13.02084732055664],["drin",-13.020907402038574],["食材",-13.020920753479004],["ндер",-13.020954132080078],["▁sellele",-13.02096176147461],["▁сказаў",-13.021001815795898],["物件",-13.021014213562012],["esia",-13.021037101745604],["▁capo",-13.021051406860352],["▁સહ",-13.021084785461426],["տակ",-13.021090507507324],["ھار",-13.02109432220459],["▁güçlü",-13.021095275878906],["▁lideri",-13.02110195159912],["लिंग",-13.02114200592041],["fizetés",-13.021167755126951],["▁rép",-13.021172523498535],["▁Rua",-13.021177291870115],["2018)",-13.021190643310549],["▁словам",-13.021203994750977],["ગ્",-13.021215438842772],["ેલા",-13.021217346191406],["▁Billig",-13.021230697631836],["ાવી",-13.021235466003418],["ძლებ",-13.021241188049316],["cles",-13.021286964416504],["jegy",-13.021289825439451],["▁ръка",-13.02129364013672],["dags",-13.0212984085083],["▁финансов",-13.021321296691896],["▁vreau",-13.021333694458008],["▁학생",-13.021357536315918],["▁Musi",-13.021382331848145],["ленні",-13.021391868591309],["URE",-13.02140998840332],["லோ",-13.02141571044922],["▁Elektronik",-13.021445274353027],["▁Szak",-13.021472930908203],["▁gestión",-13.021480560302734],["▁frase",-13.021485328674316],["包裝",-13.021488189697266],["pošte",-13.021492004394531],["▁shtr",-13.021492958068848],["▁계획",-13.021506309509276],["ینگ",-13.021547317504885],["▁sinun",-13.021551132202148],["▁основном",-13.021567344665527],["孩子们",-13.02157211303711],["baş",-13.021583557128906],["第五",-13.021623611450195],["kļu",-13.02165699005127],["▁clasa",-13.021658897399902],["ଦର",-13.021663665771484],["▁akses",-13.021676063537598],["▁Austria",-13.021682739257812],["▁三",-13.021685600280762],["иња",-13.02169704437256],["成熟",-13.021724700927734],["వాద",-13.02173137664795],["▁Rīga",-13.021736145019531],["tangan",-13.02174949645996],["▁merece",-13.021750450134276],["aaral",-13.021751403808594],["▁artificial",-13.02175235748291],["▁توافق",-13.021758079528809],["迈",-13.021768569946287],["▁aplikacij",-13.021777153015137],["징",-13.021805763244627],["▁gadījumā",-13.021821975708008],["社交",-13.021822929382324],["▁esté",-13.021828651428224],["▁febreiro",-13.021828651428224],["▁савез",-13.021831512451172],["νού",-13.02183437347412],["▁traitement",-13.021835327148438],["▁rozmiar",-13.021842956542969],["▁porém",-13.021858215332031],["▁quote",-13.021872520446776],["▁제주",-13.021904945373535],["▁mestre",-13.021906852722168],["▁межах",-13.0219144821167],["▁Союз",-13.021925926208496],["▁hoof",-13.021961212158203],["▁muncă",-13.021965026855469],["对象",-13.021966934204102],["▁मिस",-13.02196979522705],["▁تعمل",-13.021984100341797],["ायचे",-13.021986961364746],["මක්",-13.021991729736328],["ані",-13.022002220153809],["▁arası",-13.022007942199709],["▁алда",-13.022013664245604],["▁ಸರಿ",-13.022018432617188],["▁dă",-13.02203369140625],["▁pneu",-13.022056579589844],["పే",-13.022079467773438],["▁estatu",-13.022095680236816],["యోగ",-13.022106170654297],["▁sağlam",-13.022123336791992],["wall",-13.022125244140623],["▁тада",-13.0221529006958],["ēto",-13.0221586227417],["中華",-13.022162437438965],["▁német",-13.022164344787598],["въ",-13.02216911315918],["انة",-13.02216911315918],["محمد",-13.02216911315918],["ड्",-13.022170066833496],["▁моей",-13.022171020507812],["▁informação",-13.02218532562256],["▁하지",-13.02220344543457],["Бож",-13.02220630645752],["▁passato",-13.022207260131836],["тты",-13.02223300933838],["▁saz",-13.022242546081545],["▁milionë",-13.02225399017334],["▁torno",-13.022276878356934],["▁pidi",-13.02227783203125],["ິນ",-13.022305488586426],["jedna",-13.02231788635254],["vred",-13.02232551574707],["▁događaj",-13.022336959838867],["▁certi",-13.022361755371094],["▁Živ",-13.022364616394045],["▁Вол",-13.022372245788574],["[7]",-13.022405624389648],["▁varmt",-13.022470474243164],["▁சென்ற",-13.02247714996338],["larınız",-13.022493362426758],["▁banco",-13.022497177124023],["▁norme",-13.022501945495604],["▁challenge",-13.022504806518556],["ហ្",-13.022520065307615],["ਢ",-13.022616386413574],["FER",-13.02261734008789],["produkt",-13.02263355255127],["▁בשל",-13.0226411819458],["ndër",-13.022642135620115],["trā",-13.022644996643066],["ደግ",-13.022676467895508],["ķe",-13.02268886566162],["σταση",-13.022699356079102],["▁Castel",-13.022727012634276],["-00",-13.022727966308594],["голов",-13.02273178100586],["▁отправи",-13.022775650024414],["▁从",-13.022785186767578],["▁ලිපිය",-13.02280044555664],["чати",-13.022809982299805],["▁ниту",-13.02285099029541],["IME",-13.02285385131836],["▁Universiti",-13.02285861968994],["▁možemo",-13.022878646850586],["스크",-13.0228910446167],["▁pohon",-13.022927284240724],["ásra",-13.022944450378418],["柱",-13.022951126098633],["പീ",-13.022974967956545],["▁γεγονός",-13.022974967956545],["▁شناسی",-13.022974967956545],["▁अमेरिकी",-13.022974967956545],["▁সেপ্টেম্বর",-13.022974967956545],["▁විස්තර",-13.022974967956545],["▁ቡድን",-13.022974967956545],["▁сертификат",-13.02297592163086],["greiðslu",-13.02298641204834],["▁intressant",-13.022989273071287],["▁đốc",-13.022992134094238],["ซู",-13.023011207580566],["▁வருட",-13.023012161254885],[".09.2018",-13.023014068603516],["▁생산",-13.023029327392578],["▁huzur",-13.023031234741213],["วด",-13.02311897277832],["▁bæði",-13.02313232421875],["▁היתה",-13.023151397705078],["▁מחו",-13.02315902709961],["phil",-13.02316951751709],["舍",-13.023183822631836],["▁zapa",-13.023207664489746],["▁lunga",-13.023212432861328],["▁ajutorul",-13.023218154907228],["туру",-13.02322483062744],["▁кожа",-13.023242950439451],["▁Özellikle",-13.023249626159668],["▁силы",-13.023260116577148],["▁giai",-13.023306846618652],["Fr",-13.02333164215088],["▁päivää",-13.023334503173828],["▁comportamento",-13.023345947265623],["▁الجيش",-13.023356437683104],["▁ხა",-13.023357391357422],["tadi",-13.023365020751951],["▁Về",-13.023397445678713],["тељ",-13.023418426513672],["▁presto",-13.023419380187988],["▁sovra",-13.023444175720217],["▁prøver",-13.02344799041748],["цкага",-13.023468017578123],["▁μιλ",-13.02347469329834],["tesse",-13.023512840270996],["▁Marin",-13.023547172546388],["▁groter",-13.023557662963867],["▁climat",-13.023563385009766],["ark",-13.023590087890623],["vné",-13.02359104156494],["▁Banco",-13.023592948913574],["▁സഹ",-13.023612022399902],["▁दिनों",-13.023618698120115],["ваше",-13.0236234664917],["れない",-13.023625373840332],["▁прилага",-13.023626327514648],["HF",-13.023634910583496],["▁lépés",-13.023653030395508],["▁categoría",-13.023664474487305],["▁determinat",-13.023667335510254],["▁dejtingsajt",-13.023685455322266],["čenja",-13.023716926574709],["▁رمز",-13.023751258850098],["▁чакыр",-13.02379035949707],["ገድ",-13.023795127868652],["检测",-13.02380084991455],["خی",-13.02383041381836],["▁Sami",-13.023836135864258],["toiminta",-13.02385711669922],["▁ամբողջ",-13.024067878723145],["聘",-13.024078369140623],["呆",-13.02408504486084],["ଡ୍",-13.024086952209473],["織",-13.024088859558104],["センター",-13.024113655090332],["紀錄",-13.024115562438965],["▁Grie",-13.024118423461914],["▁Kommunikation",-13.024129867553713],["▁Prishtinë",-13.024129867553713],["▁kepentingan",-13.024129867553713],["▁ବ୍ୟବହାର",-13.024129867553713],["▁മറുപടി",-13.024129867553713],["▁разговара",-13.024130821228027],["카지노",-13.024130821228027],["▁tillgänglig",-13.02413272857666],["ຊີວິດ",-13.024140357971191],["▁праект",-13.024147033691406],["ଦି",-13.02414894104004],["▁Leit",-13.024149894714355],["▁þann",-13.024150848388672],["▁Народ",-13.02415657043457],["▁обеспечения",-13.024157524108888],["▁ತರ",-13.024157524108888],["▁nume",-13.024158477783203],["▁ಬರಹ",-13.024160385131836],["virkning",-13.024168968200684],["▁Îranê",-13.024168968200684],["▁යැයි",-13.024170875549316],["▁उपस्थित",-13.02418327331543],["աբան",-13.02419090270996],["▁раёна",-13.024192810058594],["▁చేస్తా",-13.024226188659668],["▁parkering",-13.02423095703125],["▁Kazi",-13.024240493774414],["优化",-13.024273872375488],["▁dica",-13.024282455444336],["▁그런데",-13.024328231811523],["▁spent",-13.024335861206056],["▁ಯಾರ",-13.02434539794922],["▁تأ",-13.024359703063965],["teilt",-13.02436065673828],["▁Pik",-13.024380683898926],["▁сағат",-13.02438259124756],["▁dobry",-13.024384498596191],["خن",-13.02438735961914],["▁Vision",-13.024406433105469],["iziran",-13.02443027496338],["原则",-13.024456024169922],["arme",-13.024479866027832],["ദര്",-13.024479866027832],["фект",-13.02450180053711],["104",-13.024508476257324],["▁หนัง",-13.024523735046388],["▁ເຊິ່ງ",-13.02452564239502],["rettet",-13.024534225463867],["190",-13.02454948425293],["▁ಎಚ್",-13.024561882019045],["▁wykona",-13.024580001831056],["ပညာ",-13.024621963500977],["▁හැටි",-13.024624824523926],["maskiner",-13.024639129638672],["უქ",-13.024669647216797],["▁естествен",-13.024673461914062],["ISTA",-13.024677276611328],["▁esmu",-13.02468967437744],["▁alien",-13.02469253540039],["vosti",-13.024700164794922],["kaitse",-13.024752616882324],["▁분야",-13.024755477905272],["lick",-13.024768829345703],["uß",-13.024785041809082],["▁pripom",-13.024789810180664],["tante",-13.02481746673584],["щая",-13.024820327758787],["▁ימים",-13.02489185333252],["គូ",-13.024914741516112],["dī",-13.024953842163086],["EDI",-13.024954795837402],["വിധ",-13.0249662399292],["idő",-13.024982452392578],["▁związane",-13.025026321411133],["▁sinabi",-13.02503776550293],["▁flori",-13.025053977966309],["päeval",-13.025080680847168],["▁תגובות",-13.0250825881958],["期限",-13.025102615356444],["▁भ्रमण",-13.025110244750977],["lop",-13.025117874145508],["讯",-13.025128364562988],["sör",-13.025137901306152],["▁زیرا",-13.025177001953123],["▁Doda",-13.025187492370604],["▁පැමිණි",-13.02519989013672],["entrée",-13.025226593017578],["骗",-13.02523708343506],["енні",-13.025248527526855],["▁sobe",-13.025259017944336],["▁imeli",-13.025270462036133],["▁پنهنجو",-13.025283813476562],["▁Fernández",-13.025285720825195],["▁thắng",-13.025285720825195],["▁हिन्दी",-13.025285720825195],["▁ಆಯ್ಕೆ",-13.025285720825195],["▁أبريل",-13.025287628173828],["▁اسپتال",-13.025288581848145],["▁تحميل",-13.025293350219728],["▁kayıt",-13.025300979614258],["的就是",-13.025303840637209],["落ち",-13.025304794311523],["▁Energy",-13.025306701660156],["üket",-13.025331497192385],["sinden",-13.0253324508667],["▁знал",-13.0253324508667],["▁Asian",-13.025333404541016],["▁चाहते",-13.02534294128418],["бъл",-13.025352478027344],["▁घेऊन",-13.02535343170166],["▁læge",-13.025361061096191],["dä",-13.025375366210938],["▁оценка",-13.025403022766112],["ğını",-13.025431632995604],["вим",-13.025464057922363],["▁indo",-13.02546501159668],["▁grön",-13.025476455688477],["ينة",-13.02549934387207],["รูปแบบ",-13.025524139404297],["▁Dance",-13.025537490844728],["տան",-13.02553939819336],["▁المق",-13.025550842285156],["▁différentes",-13.025556564331056],["▁দেখ",-13.02556037902832],["▁érdemes",-13.025574684143066],["도로",-13.025575637817385],["人も",-13.025578498840332],["他是",-13.025579452514648],["▁comunitat",-13.02561855316162],["사진",-13.025620460510254],["нта",-13.025636672973633],["ىرى",-13.025636672973633],["活用",-13.02563762664795],["lmente",-13.025654792785645],["▁tagit",-13.025671005249023],["▁تاپ",-13.025694847106934],["▁itong",-13.025721549987791],["▁חיפוש",-13.02572536468506],["ۈن",-13.025741577148438],["▁konf",-13.025747299194336],["▁Lägg",-13.025778770446776],["▁Mér",-13.02581024169922],["▁Mål",-13.025814056396484],["▁aisce",-13.025846481323242],["நாய",-13.02585506439209],["नन्",-13.025887489318848],["▁planer",-13.025936126708984],["▁bánh",-13.025941848754885],["чена",-13.02602481842041],["▁алардын",-13.026049613952637],["▁Beginn",-13.02605438232422],["נח",-13.02611255645752],["▁sobą",-13.02611255645752],["▁ödeme",-13.02612018585205],["▁캐",-13.026142120361328],["tíð",-13.02615451812744],["styrelse",-13.026159286499023],["ხორცი",-13.026162147521973],["УУЛ",-13.0261869430542],["▁miljon",-13.0261869430542],["很容易",-13.02621364593506],["▁quero",-13.026275634765623],["▁Nella",-13.026286125183104],["▁правят",-13.026290893554688],["▁existi",-13.02631378173828],["िप",-13.02631664276123],["▁აუ",-13.026330947875977],["corr",-13.026361465454102],["▁aya",-13.026392936706545],["võrd",-13.026413917541504],["اگ",-13.026429176330566],["iyaan",-13.026440620422363],["▁сёння",-13.02644157409668],["▁سفارش",-13.026443481445312],["▁ବ୍ୟକ୍ତି",-13.026443481445312],["▁connaissance",-13.026444435119627],["▁всього",-13.026444435119627],["▁Ұлттық",-13.026445388793944],["▁Society",-13.026451110839844],["▁хэдэн",-13.026451110839844],["▁Έτσι",-13.02645206451416],["▁gedoen",-13.026473999023438],["▁በአንድ",-13.026497840881348],["▁меню",-13.02649974822998],["▁comunicazione",-13.026524543762209],["▁කරපු",-13.026530265808104],["▁favore",-13.02653694152832],["лс",-13.02654266357422],["▁आजको",-13.026544570922852],["ජ්",-13.026554107666016],["▁மாத",-13.026558876037598],["▁webbplats",-13.02656364440918],["▁PENG",-13.02657413482666],["▁satın",-13.026577949523926],["gelijk",-13.02657985687256],["▁خلف",-13.026588439941406],["NIC",-13.02659511566162],["▁ሚ",-13.02659511566162],["ഖ്",-13.026597023010254],["équipe",-13.026605606079102],["▁nieuws",-13.026616096496582],["▁hesap",-13.026684761047363],["iiv",-13.02673053741455],["ekonomi",-13.026735305786133],["▁Қазақстанның",-13.026739120483398],["ۈل",-13.026759147644045],["ciens",-13.02676773071289],["ที่ดีที่สุด",-13.026780128479004],["тот",-13.026796340942385],["大量的",-13.02683162689209],["新鮮",-13.026869773864746],["▁युद्ध",-13.026874542236328],["eggia",-13.026918411254885],["hwe",-13.026921272277832],["▁institución",-13.026932716369627],["▁ಮೂ",-13.026935577392578],["ЭР",-13.026947975158691],["алася",-13.026952743530272],["▁ಶಿ",-13.026957511901855],["कुमार",-13.026960372924805],["▁хэв",-13.026965141296388],["▁karin",-13.027021408081056],["▁fundament",-13.02703857421875],["▁နာရီ",-13.027044296264648],["nick",-13.027045249938965],["▁Naxçıvan",-13.02707576751709],["keppni",-13.02707862854004],["cse",-13.02708339691162],["▁қолдан",-13.027099609375],["kategori",-13.027113914489746],["▁Hvala",-13.027162551879885],["▁승",-13.0271635055542],["規",-13.027193069458008],["▁وصل",-13.027195930480955],["өң",-13.027201652526855],["્વ",-13.0272216796875],["haven",-13.02725315093994],["▁llama",-13.027277946472168],["DAY",-13.027289390563965],["▁меди",-13.027301788330078],["AKAN",-13.027361869812012],["immat",-13.02737045288086],["ējam",-13.027432441711426],["物質",-13.027451515197754],["要素",-13.027453422546388],["င့္",-13.027472496032717],["▁अन्",-13.027472496032717],["▁Ocak",-13.027473449707031],["▁Boris",-13.027528762817385],["▁Deutschen",-13.027541160583496],["▁kill",-13.027544021606444],["▁bestille",-13.02755069732666],["▁fish",-13.027551651000977],["兄",-13.02755641937256],["kres",-13.027560234069824],["clusi",-13.027578353881836],["ঞ্চ",-13.027578353881836],["быр",-13.027588844299316],["төр",-13.02759838104248],["▁kvůli",-13.027602195739746],["▁trenutku",-13.027602195739746],["▁Κυριακή",-13.027602195739746],["▁сентября",-13.027602195739746],["▁terlibat",-13.027603149414062],["▁açısından",-13.027605056762695],["▁պատկեր",-13.027606010437012],["▁iespēju",-13.027606964111328],["▁Aunque",-13.027610778808594],["▁nivo",-13.027620315551758],["▁בנ",-13.027634620666504],["▁vuoksi",-13.027639389038086],["ເຂົ້າຮ່ວມ",-13.027642250061035],["shed",-13.027650833129885],["▁обяви",-13.027663230895996],["OMA",-13.027674674987791],["▁przyzna",-13.027685165405272],["rx",-13.02769660949707],["ženje",-13.02773380279541],["kasse",-13.027735710144045],["▁bertemu",-13.027745246887209],["▁Bugün",-13.027789115905762],["▁folgenden",-13.02780055999756],["▁krvi",-13.02782917022705],["хам",-13.027836799621582],["▁reunión",-13.02784252166748],["▁medya",-13.027854919433594],["を行う",-13.027859687805176],["ovia",-13.027861595153809],["ാവുന്ന",-13.027877807617188],["ggy",-13.027889251708984],["▁certaines",-13.0278902053833],["▁Lui",-13.027896881103516],["▁obtenir",-13.02790355682373],["▁штаб",-13.027909278869627],["лази",-13.027949333190918],["grunn",-13.027952194213867],["▁mister",-13.027959823608398],["▁ağ",-13.02797794342041],["▁квартир",-13.027981758117676],["▁alege",-13.027999877929688],["▁Heute",-13.028002738952637],["ități",-13.028011322021484],["▁دیوار",-13.028037071228027],["▁জয়",-13.028040885925291],["言う",-13.028040885925291],["▁slep",-13.028051376342772],["άντ",-13.028070449829102],["▁грамадзян",-13.028093338012695],["▁receita",-13.02809715270996],["▁төрт",-13.02810764312744],["▁Ecclesia",-13.028119087219238],["ээрээ",-13.028120994567873],["xí",-13.028148651123049],["▁mkoani",-13.028162002563477],["▁publicada",-13.028164863586426],["▁Kou",-13.028169631958008],["Ζ",-13.028175354003906],["▁oportunidades",-13.028175354003906],["▁populi",-13.028194427490234],["▁ਝ",-13.028196334838867],["גב",-13.02820110321045],["▁റോ",-13.02822208404541],["▁financiar",-13.028238296508787],["bø",-13.0282564163208],["▁FIN",-13.028265953063965],["▁készül",-13.028284072875977],["▁finish",-13.028290748596191],["▁ဆရာ",-13.0283203125],["dığınız",-13.028355598449709],["▁پٽ",-13.028356552124023],["▁glan",-13.02836799621582],["▁17.00",-13.02842903137207],["▁acestor",-13.028433799743652],["▁režim",-13.028446197509766],["▁гарч",-13.028447151184082],["viel",-13.028519630432127],["▁ಇರುವ",-13.028523445129396],["ЕЛ",-13.028536796569824],["▁Proto",-13.028536796569824],["觸",-13.028552055358888],["▁Susan",-13.028581619262695],["န့်",-13.028584480285645],["▁leden",-13.028640747070312],["▁Toda",-13.028643608093262],["▁ನೀ",-13.028643608093262],["▁sayılı",-13.028687477111816],["ועד",-13.028692245483398],["▁vikt",-13.028705596923828],["滋",-13.028711318969728],["▁хотела",-13.028714179992676],["məli",-13.028724670410156],["失敗",-13.0287504196167],["やはり",-13.02875518798828],["▁சமூக",-13.028761863708496],["▁ಉತ್ತಮ",-13.028761863708496],["▁ಕುರಿತು",-13.028778076171877],["kær",-13.028783798217772],["числе",-13.028783798217772],["ุด",-13.028797149658203],["▁імені",-13.028797149658203],["▁öyle",-13.028822898864746],["▁aquell",-13.02883529663086],["▁утре",-13.028838157653809],["▁ลด",-13.028860092163086],["raman",-13.028864860534668],["hésitez",-13.028888702392578],["чић",-13.028894424438477],["▁Услови",-13.02891445159912],["▁gesehen",-13.02892017364502],["▁seng",-13.028968811035156],["▁gais",-13.029014587402344],["▁institucij",-13.02901840209961],["▁pildi",-13.02902889251709],["اده",-13.02906894683838],["▁rischio",-13.029081344604492],["СКА",-13.029090881347656],["▁دارو",-13.029104232788086],["▁buồn",-13.029108047485352],["▁nýja",-13.0291166305542],["คณะ",-13.029118537902832],["מך",-13.02912712097168],["▁пове",-13.029142379760742],["▁ਦੇਸ਼",-13.029149055480955],["▁ingenting",-13.029157638549805],["▁රේ",-13.02915859222412],["สุข",-13.029170989990234],["▁tarda",-13.029170989990234],["▁ចាប់",-13.0291748046875],["jona",-13.029190063476562],["▁Ofer",-13.029194831848145],["ಲ್ಲೂ",-13.029199600219728],["▁(26)",-13.0292387008667],["ZN",-13.029239654541016],["自动",-13.029268264770508],["▁mjeseci",-13.029294967651367],["▁главное",-13.02931022644043],["باني",-13.029325485229492],["ručuje",-13.02934741973877],["blas",-13.029348373413086],["ଛା",-13.029356002807615],["ांमध्ये",-13.02935791015625],["مەت",-13.029373168945312],["วั",-13.029388427734377],["▁üye",-13.029393196105955],["向け",-13.02940559387207],["최",-13.029413223266602],["▁पं",-13.029431343078612],["ўшы",-13.029437065124512],["▁résultats",-13.029438018798828],["▁châu",-13.029438972473145],["பதி",-13.029441833496094],["urkje",-13.029481887817385],["ику",-13.02948760986328],["▁merito",-13.029488563537598],["▁પોસ્ટ",-13.02949333190918],["▁pravil",-13.029513359069824],["Но",-13.02954387664795],["▁razume",-13.02955436706543],["▁Pav",-13.02956199645996],["▁Geh",-13.029562950134276],["haka",-13.029568672180176],["Ата",-13.029584884643556],["▁Publi",-13.02960968017578],["▁menores",-13.029621124267578],["▁necha",-13.029622077941896],["eyê",-13.029657363891602],["黑色",-13.029672622680664],["▁recommend",-13.029683113098145],["▁katolik",-13.02968978881836],["RAK",-13.029706954956056],["ведом",-13.029707908630373],["vesi",-13.029722213745115],["是最",-13.029725074768066],["னும்",-13.029751777648926],["▁loodus",-13.029767990112305],["▁daarna",-13.029799461364746],["ньні",-13.029820442199709],["jumā",-13.0298433303833],["кали",-13.02985954284668],["▁turp",-13.029870986938477],["ιος",-13.029879570007324],["▁відомо",-13.029885292053224],["貝",-13.029888153076172],["ビジネス",-13.029909133911133],["٥",-13.02992343902588],["▁jolloin",-13.02992343902588],["▁ጉዳዮች",-13.02992343902588],["▁عجیب",-13.029924392700195],["▁किशोर",-13.029926300048828],["ägare",-13.029949188232422],["▁Через",-13.029950141906738],["▁پرداز",-13.029951095581056],["▁įvertin",-13.029953956604004],["▁касается",-13.029953956604004],["▁чудес",-13.02995491027832],["▁láthair",-13.029955863952637],["▁goda",-13.02996063232422],["asă",-13.029980659484863],["▁kuchni",-13.029988288879396],["Au",-13.02999782562256],["▁rower",-13.030000686645508],["ግብ",-13.030001640319824],["HET",-13.030038833618164],["Франківськ",-13.03004264831543],["lique",-13.030044555664062],["▁കോണ്",-13.030044555664062],["Jeg",-13.030061721801758],["▁tulu",-13.030071258544922],["▁Isi",-13.030074119567873],["વાનું",-13.030083656311035],["▁father",-13.030102729797363],["بث",-13.03010368347168],["kommer",-13.030111312866213],["owni",-13.03011703491211],["पात",-13.030147552490234],["стър",-13.0301513671875],["ბრძ",-13.030193328857422],["መደ",-13.030207633972168],["▁mā",-13.030226707458496],["ικούς",-13.030247688293455],["▁prat",-13.030269622802734],["▁rozum",-13.030285835266112],["▁людина",-13.030316352844238],["▁Mitglied",-13.030317306518556],["▁любом",-13.030333518981934],["▁Dame",-13.030335426330566],["込み",-13.03034496307373],["▁labiau",-13.030375480651855],["▁mwana",-13.030389785766602],["áng",-13.03039836883545],["▁Valg",-13.030420303344728],["▁fede",-13.030421257019045],["▁тамо",-13.030431747436523],["▁사람들이",-13.030440330505373],["▁μαγ",-13.030482292175291],["▁♦",-13.03050136566162],["աժ",-13.030556678771973],["▁ѝ",-13.030625343322754],["▁kleiner",-13.030634880065918],["▁päev",-13.030648231506348],["minut",-13.030659675598145],["▁تحلیل",-13.030661582946776],["觀光",-13.030668258666992],["▁sprzedaży",-13.03067398071289],["▁Match",-13.030674934387209],["vane",-13.030684471130373],["лете",-13.030698776245115],["▁kusht",-13.0307035446167],["▁rolle",-13.030719757080078],["рген",-13.030738830566406],["class",-13.030741691589355],["▁хв",-13.030746459960938],["บอกว่า",-13.030766487121582],["▁moest",-13.030767440795898],["▁Herri",-13.030776023864746],["близ",-13.030791282653809],["▁ದಿನಾಂಕ",-13.030791282653809],["▁siêu",-13.030795097351074],["▁Больш",-13.030818939208984],["▁работни",-13.03084659576416],["▁dostal",-13.030853271484377],["/24",-13.030938148498535],["▁морал",-13.030967712402344],["▁upande",-13.030993461608888],["▁मैच",-13.031005859375],["▁produktion",-13.03100872039795],["▁сұра",-13.031011581420898],["▁Вр",-13.03101921081543],["虚",-13.031020164489746],["諸",-13.03105354309082],["棉",-13.031062126159668],["ӊ",-13.031085968017578],["ប្រើប្រាស់",-13.031085968017578],["▁üniversite",-13.031085968017578],["▁Dictionaries",-13.031086921691896],["▁ejercicio",-13.031086921691896],["▁mavjud",-13.031086921691896],["▁xaneiro",-13.031086921691896],["▁συνεργασία",-13.031086921691896],["▁என்பதை",-13.031095504760742],["▁повідомлення",-13.031098365783691],["▁snaží",-13.031120300292969],["房间",-13.031121253967283],["▁Ego",-13.031129837036133],["▁02.",-13.031139373779297],["▁급",-13.031146049499512],["kuar",-13.031147956848145],["epi",-13.031152725219728],["съд",-13.031179428100586],["▁صل",-13.0311861038208],["льным",-13.031192779541016],["▁ruko",-13.031206130981444],["▁Fine",-13.031216621398926],["▁smut",-13.03121852874756],["ોર",-13.031237602233888],["▁krim",-13.03123950958252],["रत",-13.031248092651367],["спеш",-13.031257629394531],["▁öka",-13.031270027160645],["▁බලය",-13.031278610229492],["జన",-13.031289100646973],["▁основно",-13.031290054321287],["▁milliy",-13.031329154968262],["cf",-13.031332015991213],["▁кръг",-13.031362533569336],["▁Dick",-13.031373023986816],["▁kolejne",-13.031383514404297],["▁боро",-13.031390190124512],["శి",-13.031391143798828],["▁düzen",-13.031396865844728],["ವರೆಗೆ",-13.031416893005373],["γή",-13.031437873840332],["爭",-13.031438827514648],["▁Sari",-13.031461715698242],["уть",-13.031465530395508],["ulus",-13.031466484069824],["▁HTC",-13.031469345092772],["▁গেছে",-13.031472206115724],["▁دیکھا",-13.031490325927734],["μένου",-13.03151798248291],["▁estudos",-13.031521797180176],["▁permane",-13.03153133392334],["▁Militar",-13.031539916992188],["▁fabrik",-13.031540870666504],["izmi",-13.031543731689451],["องค์",-13.031550407409668],["▁synd",-13.031567573547363],["▁reh",-13.031574249267578],["▁ölüm",-13.031580924987791],["голос",-13.031591415405272],["▁ब्रा",-13.03159236907959],["ฝาก",-13.031598091125488],["ेंद्र",-13.031608581542969],["▁neturi",-13.031609535217283],["tzak",-13.031622886657717],["▁ລົງ",-13.031631469726562],["▁bouw",-13.031660079956056],["▁tööd",-13.031670570373535],["qam",-13.03172492980957],["тви",-13.03173828125],["微博",-13.031755447387695],["νί",-13.031766891479492],["▁garantie",-13.031767845153809],["▁зависи",-13.031780242919922],["▁sinulle",-13.03178596496582],["▁Fira",-13.031797409057615],["▁ცი",-13.031843185424805],["प्रसाद",-13.031869888305664],["jamo",-13.031874656677246],["▁דוד",-13.031885147094728],["나무",-13.03188705444336],["▁யார்",-13.03191375732422],["asin",-13.031916618347168],["▁álló",-13.031916618347168],["▁kain",-13.031932830810549],["mällä",-13.031939506530762],["mekte",-13.031949043273926],["로서",-13.031952857971191],["▁ඉන්නවා",-13.031976699829102],["▁kvin",-13.032005310058594],["▁ellas",-13.03200912475586],["できません",-13.032014846801758],["▁kë",-13.03201675415039],["ೆಯ",-13.03203296661377],["▁близ",-13.032045364379885],["tempe",-13.032049179077148],["▁αυτού",-13.032049179077148],["aty",-13.032050132751465],["ができる",-13.032052993774414],["▁дүр",-13.03208065032959],["rétt",-13.032082557678224],["▁ਅਮਰੀਕਾ",-13.032084465026855],["と言って",-13.032094955444336],["ឃ",-13.03210163116455],["▁כר",-13.032108306884766],["▁Johann",-13.03213596343994],["რს",-13.032137870788574],["リン",-13.032154083251951],["▁mãi",-13.032188415527344],["عكس",-13.03219223022461],["école",-13.03222370147705],["▁eraiki",-13.032236099243164],["▁söö",-13.03224277496338],["させていただきます",-13.032243728637695],["預計",-13.032246589660645],["▁Público",-13.03225040435791],["▁empfehlen",-13.03225040435791],["▁ունեցող",-13.03225040435791],["▁පළාත්",-13.03225803375244],["▁Outlook",-13.032259941101074],["mals",-13.032270431518556],["itoare",-13.032271385192873],["▁elérhető",-13.032271385192873],["▁இருக்கிறது",-13.032271385192873],["▁Londres",-13.032296180725098],["▁данный",-13.032299995422363],["▁Dagens",-13.03230094909668],["ிருக்க",-13.032319068908691],["早期",-13.032357215881348],["resultat",-13.032370567321776],["leż",-13.03239917755127],["▁nuus",-13.03239917755127],["supp",-13.03249454498291],["fisk",-13.032512664794922],["▁kõrge",-13.032516479492188],["ပါသည္။",-13.03252410888672],["▁UNI",-13.032525062561035],["▁бичиг",-13.032551765441896],["▁გაზ",-13.03256893157959],["▁Бур",-13.032585144042969],["▁mortal",-13.032612800598145],["▁facă",-13.032625198364258],["▁өзге",-13.03262996673584],["olja",-13.032642364501951],["▁Kitab",-13.032651901245115],["當中",-13.032658576965332],["▁hâ",-13.032668113708496],["▁framför",-13.032670974731444],["uvat",-13.032727241516112],["▁ਲੱਗ",-13.032730102539062],["▁ေန",-13.032737731933594],["▁tjänster",-13.032759666442873],["気になる",-13.032771110534668],["▁ដ៏",-13.032772064208984],["▁δί",-13.0327787399292],["тима",-13.032780647277832],["▁отказа",-13.032790184020996],["▁portál",-13.032793045043944],["▁خىتاي",-13.032814979553224],["▁tlač",-13.032824516296388],["ريب",-13.032854080200195],["▁Tala",-13.032855033874512],["agne",-13.032861709594728],["▁Esi",-13.032865524291992],["▁vsebin",-13.032866477966309],["યુ",-13.032872200012209],["ਜਨ",-13.032883644104004],["ത്തിന",-13.032894134521484],["▁возник",-13.032917022705078],["▁보내",-13.032919883728027],["αίο",-13.032930374145508],["گری",-13.032938957214355],["ေဆး",-13.03298568725586],["organisa",-13.032992362976074],["چي",-13.032992362976074],["inya",-13.032994270324709],["ENI",-13.033002853393556],["▁ຕັ້ງ",-13.033013343811035],["견",-13.033015251159668],["서비스",-13.0330171585083],["dzą",-13.033069610595703],["ին՝",-13.033077239990234],["▁Bosch",-13.0330810546875],["LIS",-13.033088684082031],["рят",-13.033108711242676],["ოფ",-13.033111572265623],["SHA",-13.033116340637209],["żenie",-13.033143043518066],["ლავ",-13.033146858215332],["ίωση",-13.033156394958496],["▁død",-13.033167839050291],["▁docente",-13.033199310302734],["यि",-13.03321647644043],["▁განვითარების",-13.033233642578123],["ийски",-13.033249855041504],["▁considerado",-13.033249855041504],["ÂN",-13.033252716064451],["АГ",-13.033252716064451],["▁лев",-13.033324241638184],["▁garante",-13.03335189819336],["▁Державн",-13.033357620239258],["▁nyo",-13.03339385986328],["专门",-13.033398628234863],["▁xấu",-13.03341579437256],["▁الأعمال",-13.033419609069824],["▁Гі",-13.03342342376709],["▁Tirkiyê",-13.033427238464355],["zou",-13.03343391418457],["▁vähemalt",-13.03343677520752],["▁مۇنداق",-13.03346061706543],["▁மறு",-13.033464431762695],["პან",-13.033465385437012],["▁Очень",-13.03348445892334],["paket",-13.033495903015137],["▁заинтересован",-13.033501625061035],["ቻቸው",-13.0335054397583],["▁Косова",-13.033536911010742],["fydd",-13.033544540405272],["الى",-13.033578872680664],["なくても",-13.033596992492676],["▁berbagi",-13.03362274169922],["ಪಟ್ಟ",-13.033663749694824],["šom",-13.033666610717772],["▁Hàng",-13.033681869506836],["leis",-13.03368854522705],["njeni",-13.03368854522705],["θι",-13.033703804016112],["▁advice",-13.03377914428711],["alaman",-13.033793449401855],["platí",-13.033796310424805],["pasa",-13.033809661865234],["真的很",-13.033819198608398],["aller",-13.033841133117676],["fili",-13.033858299255373],["мисли",-13.033895492553713],["ьте",-13.033899307250977],["wyl",-13.033907890319824],["疲",-13.03392219543457],["▁Jog",-13.033934593200684],["▁dosa",-13.033952713012695],["ท่องเที่ยว",-13.033967971801758],["▁suum",-13.033967971801758],["▁arena",-13.033989906311035],["υθ",-13.033998489379885],["せる",-13.033998489379885],["АНА",-13.034002304077148],["▁webbplatsen",-13.03400707244873],["▁Lagi",-13.034031867980955],["▁amerikan",-13.03407382965088],["เข",-13.034077644348145],["రావు",-13.03408145904541],["▁ಜೀವನ",-13.034090042114258],["▁Ск",-13.034092903137209],["▁07.",-13.034097671508787],["▁gha",-13.034101486206056],["round",-13.034116744995115],["▁поверх",-13.0341215133667],["▁jiwa",-13.034156799316406],["▁जाना",-13.034164428710938],["tifik",-13.034173011779783],["telia",-13.03420352935791],["▁Право",-13.034228324890137],["▁менше",-13.034249305725098],["原則",-13.034276962280272],["щик",-13.034282684326172],["▁победа",-13.034324645996094],["UST",-13.034348487854004],["īšanas",-13.034369468688965],["իշ",-13.03437614440918],["▁natten",-13.03437614440918],["chol",-13.034390449523926],["▁୧୯",-13.034420013427734],["▁промена",-13.034432411193848],["▁proda",-13.034436225891112],["▁iepriekš",-13.03443717956543],["ciel",-13.034441947937012],["vey",-13.034442901611328],["▁5-10",-13.034454345703123],["▁статье",-13.034457206726074],["▁kişiler",-13.0344877243042],["肌膚",-13.03454303741455],["▁spillet",-13.034570693969728],["▁کھل",-13.034574508666992],["▁Syarikat",-13.03458309173584],["▁घाइते",-13.03458309173584],["▁લગ્ન",-13.034584999084473],["▁internazionale",-13.034588813781738],["ফি",-13.034590721130373],["▁स्वयं",-13.034592628479004],["▁gigant",-13.034600257873535],["eerimise",-13.034602165222168],["ഷ്യ",-13.034626007080078],["▁پە",-13.03463363647461],["▁collaborazione",-13.03463649749756],["▁ХХК",-13.034650802612305],["▁marah",-13.034662246704102],["▁Jobb",-13.034700393676758],["▁товари",-13.034700393676758],["▁имали",-13.034707069396973],["▁rupa",-13.034722328186035],["▁ئۆ",-13.034733772277832],["স্ক",-13.034740447998049],["▁беларускіх",-13.034750938415527],["的历史",-13.034783363342283],["ባለ",-13.034796714782717],["▁좋아",-13.03481101989746],["▁ток",-13.034838676452637],["▁రో",-13.034839630126951],["ებები",-13.034842491149902],["▁nämä",-13.0348482131958],["inkin",-13.034870147705078],["рия",-13.034884452819824],["▁certifica",-13.034890174865724],["▁demek",-13.034917831420898],["ابی",-13.034934997558594],["▁వచ్చి",-13.034945487976074],["étais",-13.034948348999023],["πλα",-13.03494930267334],["rány",-13.034953117370604],["▁безбедност",-13.034987449645996],["▁korkea",-13.03500747680664],["ует",-13.03501033782959],["短期",-13.035021781921388],["sv",-13.035033226013184],["▁soda",-13.035048484802246],["▁යුතුය",-13.035111427307127],["▁готови",-13.035126686096191],["▁Vaikka",-13.035130500793455],["▁sluit",-13.035149574279783],["IRE",-13.035191535949709],["suunnitelma",-13.035202026367188],["gegeben",-13.0352144241333],["▁ጌታ",-13.03521728515625],["▁Khoa",-13.035239219665527],["דם",-13.035244941711426],["స్థ",-13.035297393798828],["ப்பட்டுள்ளது",-13.035310745239258],["▁rajono",-13.035310745239258],["▁empezar",-13.035323143005373],["tons",-13.035333633422852],["▁textil",-13.035333633422852],["▁objekti",-13.035360336303713],["▁годин",-13.035367012023926],["trekken",-13.035380363464355],["vini",-13.035393714904783],["▁Jär",-13.03539752960205],["commerce",-13.035420417785645],["▁paci",-13.035423278808594],["▁همسر",-13.035426139831545],["▁každé",-13.035460472106934],["▁godzin",-13.0354642868042],["▁poleg",-13.0354642868042],["ιχ",-13.035470962524414],["▁risa",-13.035486221313477],["уру",-13.035487174987791],["▁stea",-13.035521507263184],["shni",-13.035528182983398],["८",-13.035544395446776],["▁Ý",-13.03554630279541],["▁sonst",-13.035552978515623],["rež",-13.035565376281738],["▁gjy",-13.035566329956056],["▁sklo",-13.035581588745115],["▁Bent",-13.03559684753418],["▁láb",-13.03559684753418],["સુ",-13.03562068939209],["▁ადგილი",-13.035635948181152],["▁និយាយ",-13.035640716552734],["УУ",-13.035658836364746],["▁rør",-13.035675048828123],["▁موتور",-13.035690307617188],["危害",-13.0357027053833],["▁αλ",-13.03571605682373],["ດ້ານ",-13.035736083984377],["營養",-13.035737037658691],["▁postal",-13.035737991333008],["▁erklärt",-13.035751342773438],["▁जवाब",-13.035751342773438],["▁geeignet",-13.035752296447754],["▁հենց",-13.035755157470703],["▁ცოტა",-13.035757064819336],["▁בדיוק",-13.035760879516602],["▁چاہتے",-13.035761833190918],["▁लड़की",-13.035767555236816],["▁არაფერი",-13.035770416259766],["троп",-13.035778999328612],["▁Näin",-13.035794258117676],["▁xếp",-13.03581714630127],["▁해야",-13.035829544067385],["▁medita",-13.035836219787598],["▁компаний",-13.035837173461914],["▁singular",-13.035846710205078],["▁każdej",-13.03588581085205],["▁שטי",-13.035888671875],["▁պահպան",-13.03589916229248],["态",-13.035914421081545],["提交",-13.03592300415039],["▁gle",-13.035933494567873],["▁борба",-13.035958290100098],["γρα",-13.035971641540527],["▁zama",-13.03597354888916],["▁Айт",-13.035980224609377],["▁zlep",-13.035987854003906],["▁зарплат",-13.03600788116455],["全身",-13.036009788513184],["▁Flug",-13.036014556884766],["länder",-13.03602409362793],["kwazi",-13.03604221343994],["▁tuki",-13.036055564880373],["▁various",-13.036063194274902],["數位",-13.036078453063965],["▁குறித்து",-13.036083221435549],["utada",-13.03608512878418],["▁վարչապետ",-13.036087989807127],["kep",-13.036128044128418],["▁олар",-13.036128997802734],["▁جنس",-13.036139488220217],["fører",-13.036157608032228],["ೂರು",-13.036158561706545],["ланып",-13.036161422729492],["riam",-13.036169052124023],["פג",-13.036176681518556],["ницата",-13.036179542541504],["▁Օր",-13.036179542541504],["କ୍ତ",-13.036188125610352],["▁Mert",-13.036189079284668],["▁bilgileri",-13.036192893981934],["▁sögu",-13.03620147705078],["▁ode",-13.036215782165527],["▁konkurenci",-13.03624439239502],["▁taşı",-13.036260604858398],["▁lão",-13.036274909973145],["▁yig",-13.036295890808104],["▁merita",-13.036299705505373],["▁Hoje",-13.036311149597168],["തില്",-13.036346435546877],["gav",-13.036361694335938],["▁Molt",-13.036412239074709],["▁تابع",-13.036419868469238],["▁konnten",-13.03643798828125],["לן",-13.036458969116213],["ացած",-13.036462783813477],["▁izvaja",-13.036473274230955],["иди",-13.036481857299805],["үүдө",-13.036495208740234],["▁menyi",-13.036505699157717],["▁wahr",-13.036511421203612],["▁დავა",-13.036521911621094],["▁ունեն",-13.036524772644045],["▁מאַ",-13.03653049468994],["▁ფერ",-13.03653335571289],["Jan",-13.036535263061523],["▁dữ",-13.036571502685549],["share",-13.036641120910645],["▁Ahora",-13.03664207458496],["▁metodo",-13.03665542602539],["्ल",-13.036673545837402],["posi",-13.036698341369627],["dək",-13.03671169281006],["▁teplot",-13.03671169281006],["▁heit",-13.03672218322754],["▁geldiği",-13.036724090576172],["ाए",-13.036738395690918],["▁ear",-13.036752700805664],["▁institucional",-13.036752700805664],["▁brukar",-13.036755561828612],["▁rivojlantirish",-13.03676700592041],["jeri",-13.036786079406738],["tjies",-13.036800384521484],["流量",-13.036839485168455],["擦",-13.036855697631836],["жува",-13.036866188049316],["▁Εί",-13.036894798278809],["▁divendres",-13.036920547485352],["▁gràcies",-13.036920547485352],["▁сэтгэгдэл",-13.036920547485352],["▁AH",-13.036921501159668],["▁330",-13.036930084228516],["Markaziy",-13.036932945251465],["▁catalana",-13.036934852600098],["▁sorpresa",-13.036938667297363],["ໄວ້",-13.036941528320312],["▁humb",-13.036946296691896],["いき",-13.03695011138916],["▁појави",-13.036970138549805],["▁modalità",-13.03697109222412],["level",-13.036972045898438],["gyű",-13.036974906921388],["▁Brother",-13.036985397338867],["▁luglio",-13.03700351715088],["▁dhaw",-13.037005424499512],["ឆ្",-13.037007331848145],["▁болушу",-13.037009239196776],["ອກ",-13.037017822265623],["▁مول",-13.037022590637209],["▁vuo",-13.037028312683104],["评论",-13.037028312683104],["▁өзінің",-13.037044525146484],["▁Ugu",-13.037055015563965],["ગે",-13.037075996398926],["ნოს",-13.037078857421877],["▁sodat",-13.037096977233888],["▁Mô",-13.037131309509276],["▁uppgifter",-13.037139892578123],["▁мач",-13.037147521972656],["▁होतो",-13.037174224853516],["மன்",-13.03721046447754],["▁vobis",-13.037216186523438],["▁المصرية",-13.03721809387207],["برا",-13.03723430633545],["աչ",-13.037235260009766],["▁Hind",-13.03725242614746],["▁ath",-13.03729248046875],["▁இருக்கு",-13.037310600280762],["▁antik",-13.037323951721191],["▁Odd",-13.037345886230469],["▁Voice",-13.037354469299316],["▁ważne",-13.037378311157228],["עזר",-13.037396430969238],["لوب",-13.037396430969238],["▁forstå",-13.037423133850098],["▁Giải",-13.037429809570312],["まる",-13.03744125366211],["ገኙ",-13.037442207336426],["▁metr",-13.037450790405272],["▁песни",-13.03745460510254],["▁Siden",-13.037484169006348],["▁treffen",-13.03750228881836],["▁kecuali",-13.037504196166992],["▁ព្រ",-13.03750705718994],["▁सफलता",-13.037538528442385],["▁geçmiş",-13.037549018859863],["ឡា",-13.037553787231444],["▁എന്നും",-13.03758430480957],["▁label",-13.037589073181152],["ιρ",-13.037605285644531],["ბენ",-13.037607192993164],["▁બિ",-13.037607192993164],["▁мысли",-13.037617683410645],["প্",-13.037620544433594],["▁prog",-13.037630081176758],["▁ומה",-13.037630081176758],["graaf",-13.037663459777832],["▁أنت",-13.03766918182373],["zny",-13.037679672241213],["▁arva",-13.037692070007324],["▁ooit",-13.03770637512207],["ikon",-13.037708282470703],["чај",-13.037717819213867],["γκα",-13.037718772888184],["ovy",-13.037739753723145],["▁មុខ",-13.03774356842041],["▁ทาง",-13.037749290466309],["▁ಬಾರಿ",-13.037753105163574],["arts",-13.037775039672852],["▁Пот",-13.037790298461914],["▁merasakan",-13.037795066833496],["lawan",-13.037797927856444],["LIA",-13.037842750549316],["▁розных",-13.03785800933838],["otan",-13.037872314453123],["▁Pán",-13.037879943847656],["เฉ",-13.03789520263672],["स्ती",-13.03791618347168],["ndr",-13.037973403930664],["▁воду",-13.037981033325195],["▁perda",-13.038010597229004],["壞",-13.038020133972168],["Est",-13.03803539276123],["谢",-13.038045883178713],["清晰",-13.03805446624756],["寧",-13.038066864013672],["appel",-13.038073539733888],["▁Behandlung",-13.038091659545898],["▁ակտիվ",-13.038091659545898],["▁տնօրեն",-13.038091659545898],["▁موبایل",-13.038091659545898],["▁ರಸ್ತೆ",-13.038092613220217],["▁Österreich",-13.038093566894531],["▁الحصول",-13.038105010986328],["တို႕",-13.03810691833496],["▁noviembre",-13.03810691833496],["▁yanlış",-13.038108825683594],["Les",-13.038114547729492],["▁cabeça",-13.038118362426758],["▁sometimes",-13.038121223449709],["QUI",-13.038126945495604],["казать",-13.038132667541504],["▁Credit",-13.038143157958984],["▁költség",-13.0381498336792],["▁криминал",-13.03816032409668],["▁.......",-13.038171768188477],["▁nesi",-13.038174629211426],["▁nikoli",-13.038188934326172],["▁pieno",-13.038227081298828],["داخل",-13.038265228271484],["되었습니다",-13.038278579711914],["ترجم",-13.03834629058838],["▁розгляд",-13.038386344909668],["သမား",-13.038408279418944],["▁negoci",-13.038410186767578],["▁itself",-13.038414001464844],["▁Serbi",-13.03842067718506],["▁parties",-13.038437843322754],["4000",-13.038450241088867],["▁ары",-13.038455963134766],["gyár",-13.038471221923828],["▁punktu",-13.03847599029541],["▁стаў",-13.03848361968994],["月に",-13.038488388061523],["▁economia",-13.03848934173584],["▁edição",-13.038503646850586],["▁केसी",-13.038516998291016],["liš",-13.038530349731444],["▁கார்",-13.038535118103027],["▁varandra",-13.038540840148926],["▁slogan",-13.038541793823242],["▁राहत",-13.03855323791504],["UKA",-13.038555145263672],["▁tuoi",-13.038591384887695],["eff",-13.038609504699709],["▁ਹਾ",-13.038614273071287],["త్వ",-13.038619995117188],["ставить",-13.038651466369627],["െടുത്ത",-13.038654327392578],["ваг",-13.03865909576416],["▁jeigu",-13.038662910461426],["ledig",-13.038674354553224],["▁Зоран",-13.038687705993652],["▁swa",-13.038695335388184],["▁આવ",-13.038711547851562],["реч",-13.038735389709473],["lami",-13.038792610168455],["▁aberto",-13.038814544677734],["▁register",-13.038891792297363],["ब्र",-13.038900375366213],["fotograf",-13.038910865783691],["manı",-13.038911819458008],["ലീ",-13.038918495178224],["грађ",-13.0389404296875],["▁benefit",-13.03894329071045],["▁ostale",-13.038949966430664],["äär",-13.038969993591309],["weyn",-13.038979530334473],["ធំ",-13.038999557495115],["▁المركز",-13.038999557495115],["ਪੂ",-13.03901195526123],["aldus",-13.039013862609863],["▁колу",-13.039026260375977],["▁ويندي",-13.039030075073242],["▁niistä",-13.039109230041504],["▁Unió",-13.039114952087402],["abilecek",-13.039144515991213],["▁सै",-13.03916072845459],["▁کریں۔",-13.039190292358398],["▁গণ",-13.039254188537598],["-2020",-13.03925609588623],["▁biên",-13.03925895690918],["mót",-13.039259910583496],["Ť",-13.039263725280762],["▁především",-13.039263725280762],["▁Уладзімір",-13.039263725280762],["▁будинку",-13.039265632629396],["▁рынке",-13.039267539978027],["▁उपक्रम",-13.039280891418455],["▁vsebuje",-13.03928565979004],["เพศ",-13.039288520812988],["иски",-13.039331436157228],["staje",-13.039344787597656],["▁168",-13.039353370666504],["ቀት",-13.039355278015137],["ಭಾ",-13.039361000061035],["▁obert",-13.039363861083984],["▁nguvu",-13.039365768432615],["2.1",-13.039373397827148],["基于",-13.039380073547363],["ιστικό",-13.039385795593262],["▁სამი",-13.039386749267578],["▁نکال",-13.039395332336426],["κράτη",-13.039399147033691],["▁147",-13.03940200805664],["▁Diyos",-13.039406776428224],["haram",-13.039410591125488],["▁бардык",-13.03941535949707],["▁тялото",-13.03941822052002],["▁çê",-13.039426803588867],["▁verkoop",-13.039462089538574],["▁fête",-13.039485931396484],["▁ยา",-13.03948974609375],["▁tutela",-13.0394926071167],["iešu",-13.039493560791016],["▁বিষয়ে",-13.039496421813965],["gestellt",-13.039499282836914],["▁светски",-13.039501190185549],["▁Tổ",-13.039538383483888],["▁skryf",-13.039551734924316],["yuk",-13.039556503295898],["rů",-13.039557456970217],["domin",-13.039595603942873],["夏天",-13.03960132598877],["лені",-13.039608001708984],["▁rychle",-13.03964138031006],["ົງ",-13.039653778076172],["დებ",-13.03965950012207],["dnik",-13.039666175842283],["▁fotó",-13.039669036865234],["▁koht",-13.039676666259766],["ρρ",-13.039692878723145],["ພິ",-13.03969955444336],["▁жашоо",-13.039701461791992],["auskas",-13.039714813232422],["hlás",-13.039722442626951],["වර්",-13.039738655090332],["ования",-13.039785385131836],["▁aniq",-13.039794921875],["▁عاما",-13.039794921875],["▁Ordin",-13.03980541229248],["▁kır",-13.039828300476074],["TING",-13.03983211517334],["▁ქართულად",-13.039840698242188],["voy",-13.039875984191896],["irse",-13.03988265991211],["▁فان",-13.039888381958008],["▁ribu",-13.03989028930664],["▁سعی",-13.039896965026855],["▁ബ്ലോഗ",-13.039901733398438],["▁smuk",-13.03990364074707],["сним",-13.039904594421388],["▁Shak",-13.039912223815918],["ीमा",-13.039931297302246],["▁ms",-13.039941787719728],["rzą",-13.039952278137209],["râ",-13.03996467590332],["▁နဲ့",-13.039966583251951],["▁сөйле",-13.040006637573242],["▁maniera",-13.040008544921877],["പു",-13.040010452270508],["▁BL",-13.040010452270508],["▁Yen",-13.040014266967772],["▁əmək",-13.040027618408203],["▁יודע",-13.040054321289062],["▁খান",-13.040057182312012],["▁genuen",-13.040082931518556],["تىش",-13.040122985839844],["tiivis",-13.040125846862791],["▁ਮੋ",-13.040132522583008],["ბით",-13.040133476257324],["▁thug",-13.040143013000488],["▁договору",-13.04018497467041],["▁sprog",-13.04019260406494],["hitung",-13.040210723876951],["љење",-13.040227890014648],["▁Question",-13.04024887084961],["▁accepte",-13.040254592895508],["▁판",-13.04025650024414],["▁Yaz",-13.040264129638672],["▁მოგ",-13.040266036987305],["▁aşağı",-13.040267944335938],["▁대상",-13.040279388427734],["▁Witam",-13.040297508239746],["▁බී",-13.040300369262695],["grupp",-13.040318489074709],["援",-13.04033374786377],["ински",-13.040353775024414],["вица",-13.04038429260254],["楊",-13.04042148590088],["ເລືອກ",-13.04043674468994],["▁szczególnie",-13.040437698364258],["▁хуудас",-13.040437698364258],["▁ತಂತ್ರಜ್ಞಾನ",-13.040437698364258],["▁عناصر",-13.040438652038574],["▁존재",-13.040438652038574],["▁ontmoet",-13.040440559387209],["▁تلگرام",-13.04044246673584],["わけ",-13.04045581817627],["▁slimming",-13.04045867919922],["fällig",-13.040459632873535],["▁بھائی",-13.040462493896484],["arbejde",-13.0404691696167],["elni",-13.040472984313965],["▁makkelijk",-13.040477752685549],["▁पाणी",-13.040485382080078],["▁اتاق",-13.040487289428713],["▁лад",-13.040489196777344],["▁ಅವರನ್ನು",-13.040504455566406],["▁egyetlen",-13.040521621704102],["apu",-13.040534973144531],["▁Collection",-13.040534973144531],["▁alternatif",-13.040549278259276],["▁फि",-13.040582656860352],["▁Кей",-13.040586471557615],["享",-13.040589332580566],["▁الأطفال",-13.04059886932373],["▁fach",-13.04060173034668],["laşması",-13.040624618530272],["▁рівні",-13.040627479553224],["schä",-13.040632247924805],["ಿರಿ",-13.04063606262207],["▁degan",-13.040647506713867],["▁custo",-13.040650367736816],["▁hótel",-13.040650367736816],["がありました",-13.040658950805664],["▁२०१८",-13.040663719177246],["▁capita",-13.040668487548828],["▁Olymp",-13.040677070617676],["barkan",-13.040685653686523],["ічний",-13.040688514709473],["▁อย่าง",-13.040712356567385],["▁soek",-13.040820121765137],["ादि",-13.040863990783691],["▁അല്ല",-13.040868759155272],["Эрдэнэ",-13.040876388549805],["teita",-13.04088020324707],["▁instagram",-13.040908813476562],["▁Aiz",-13.040922164916992],["▁סדר",-13.04092788696289],["ላይ",-13.04093074798584],["▁பிறந்த",-13.040958404541016],["▁بنایا",-13.040963172912598],["▁[1",-13.040975570678713],["▁ment",-13.041007995605469],["ていく",-13.041017532348633],["▁ruolo",-13.041024208068848],["▁stolt",-13.041024208068848],["▁trabalha",-13.041035652160645],["્રો",-13.041056632995604],["▁metall",-13.041058540344238],["▁targ",-13.041088104248049],["ஏ",-13.041120529174805],["▁ploch",-13.041145324707031],["σκο",-13.041155815124512],["▁Konkurs",-13.04115867614746],["▁പട",-13.04116153717041],["▁만들",-13.041194915771484],["attività",-13.041196823120115],["▁فراہم",-13.041196823120115],["熟悉",-13.041208267211914],["ənin",-13.041239738464355],["▁певн",-13.041276931762695],["不错",-13.041295051574709],["▁מאד",-13.041296005249023],["▁ações",-13.041330337524414],["ພາສາ",-13.041348457336426],["ыбыз",-13.041380882263184],["ሰበ",-13.041401863098145],["▁সম্পর্কে",-13.041412353515623],["жать",-13.041423797607422],["▁bevor",-13.041441917419434],["våg",-13.04147243499756],["անձ",-13.04147243499756],["▁작",-13.041476249694824],["shër",-13.04148006439209],["▁čist",-13.041481971740724],["játék",-13.041489601135254],["חוק",-13.041498184204102],["शू",-13.041534423828123],["▁նախագահի",-13.041536331176758],["队伍",-13.041548728942873],["ares",-13.04155445098877],["DAG",-13.041563034057615],["▁sırada",-13.04157257080078],["ljene",-13.04157543182373],["седа",-13.041580200195312],["▁dép",-13.041605949401855],["แฟชั่น",-13.04161262512207],["ထွက်",-13.04161262512207],["▁László",-13.04161262512207],["▁cittadini",-13.04161262512207],["▁পর্যন্ত",-13.04161262512207],["▁රනිල්",-13.04161262512207],["▁ભાજપ",-13.041613578796388],["▁verre",-13.04161548614502],["▁Square",-13.041617393493652],["▁ආහාර",-13.041617393493652],["同樣",-13.041618347167969],["▁etap",-13.041622161865234],["▁Workshop",-13.041634559631348],["льны",-13.04163932800293],["▁Machine",-13.041643142700195],["▁Xuân",-13.04165267944336],["איז",-13.04173183441162],["kû",-13.041738510131836],["▁satte",-13.041741371154783],["▁příjem",-13.041749954223633],["▁ներկայացուցիչ",-13.041753768920898],["▁Monday",-13.04176425933838],["पट",-13.041770935058594],["▁Desc",-13.04177951812744],["neo",-13.041790962219238],["gę",-13.041808128356934],["▁التواصل",-13.041810989379885],["▁עבר",-13.04185962677002],["ማይ",-13.041860580444336],["▁Bett",-13.041878700256348],["▁Gunnar",-13.041889190673828],["▁ട്ടി",-13.041897773742676],["▁mees",-13.041899681091309],["▁zip",-13.041913986206056],["▁ube",-13.041916847229004],["thri",-13.041924476623535],["وڙ",-13.041925430297852],["venue",-13.041949272155762],["▁Liidu",-13.041953086853027],["hoo",-13.04196071624756],["▁estable",-13.041961669921877],["▁jiray",-13.041970252990724],["▁prečo",-13.041976928710938],["फे",-13.041984558105469],["SEL",-13.042037010192873],["ымі",-13.042059898376465],["nadh",-13.042064666748049],["▁yüzden",-13.042091369628906],["▁digi",-13.04210376739502],["▁Sham",-13.042107582092283],["kind",-13.042170524597168],["▁Cei",-13.04218864440918],["shem",-13.042192459106444],["▁játsz",-13.042192459106444],["ዝም",-13.042206764221191],["▁nail",-13.042207717895508],["▁וכל",-13.042230606079102],["wki",-13.042250633239746],["▁бок",-13.042251586914062],["Jaa",-13.042253494262695],["يتها",-13.042264938354492],["ადი",-13.0423002243042],["▁zmienia",-13.042343139648438],["▁india",-13.042350769042969],["pū",-13.042353630065918],["▁efecte",-13.042359352111816],["▁prit",-13.042384147644045],["▁vyko",-13.04240894317627],["hail",-13.042413711547852],["nią",-13.0424165725708],["▁obtener",-13.042418479919434],["잡",-13.042426109313965],["▁gənc",-13.042447090148926],["▁سینما",-13.042450904846191],["▁suivi",-13.042468070983888],["▁ժամը",-13.042475700378418],["▁TË",-13.042481422424316],["písal",-13.042500495910645],["▁joiden",-13.042506217956545],["რმა",-13.042525291442873],["hele",-13.042527198791504],["▁sair",-13.042557716369627],["▁letech",-13.042637825012209],["נוס",-13.042652130126951],["Hotel",-13.042654991149902],["▁Quand",-13.04267120361328],["vole",-13.04267692565918],["khon",-13.042678833007812],["▁ଦେଶ",-13.042691230773926],["庆",-13.042710304260254],["活力",-13.04271411895752],["▁178",-13.04274559020996],["▁गठन",-13.042752265930176],["多元",-13.042757987976074],["店舗",-13.042762756347656],["泽",-13.042764663696287],["▁යන්තර",-13.042789459228516],["▁සාමාන්",-13.042789459228516],["▁nemám",-13.042805671691896],["▁čemu",-13.04281234741211],["kumu",-13.042835235595703],["شاه",-13.042840003967283],["▁êrîş",-13.042901992797852],["▁оправда",-13.0429048538208],["IRI",-13.042914390563965],["▁станови",-13.042963027954102],["▁okay",-13.042969703674316],["▁mõtle",-13.042973518371582],["пери",-13.04299259185791],["▁Эти",-13.043010711669922],["manız",-13.043021202087402],["医療",-13.043028831481934],["zono",-13.043036460876465],["ули",-13.043047904968262],["範圍",-13.043048858642578],["▁legate",-13.04307460784912],["skud",-13.04312801361084],["深度",-13.043155670166016],["▁поведение",-13.043207168579102],["▁RH",-13.043211936950684],["▁نالو",-13.04322338104248],["▁عظیم",-13.043237686157228],["▁нормал",-13.043248176574709],["▁2010-",-13.043258666992188],["ೊಂದಿಗೆ",-13.043272972106934],["िव",-13.043283462524414],["▁જવાબ",-13.043289184570312],["▁रू",-13.043301582336426],["awang",-13.043317794799805],["явления",-13.043320655822754],["▁myśl",-13.043329238891602],["shauri",-13.043349266052246],["▁aucune",-13.04337215423584],["ttanut",-13.043392181396484],["zioak",-13.04339599609375],["▁tentunya",-13.043404579162598],["▁5.1",-13.043410301208496],["редно",-13.043461799621582],["▁ხმა",-13.043462753295898],["წუ",-13.043465614318848],["▁ám",-13.04346752166748],["עין",-13.043506622314451],["▁склон",-13.043511390686035],["▁keegi",-13.043524742126465],["NIS",-13.043529510498049],["desse",-13.043534278869627],["▁NC",-13.04354190826416],["▁Као",-13.043571472167969],["▁ດ້ວຍ",-13.043577194213867],["▁espazo",-13.043601036071776],["ôi",-13.043672561645508],["▁tedavi",-13.043681144714355],["lauk",-13.04368782043457],["▁termékek",-13.043691635131836],["▁advent",-13.043693542480469],["١",-13.043706893920898],["▁обществен",-13.043710708618164],["éz",-13.043720245361328],["ګو",-13.043729782104492],["▁viza",-13.04373836517334],["▁ಹೇಳಿ",-13.043740272521973],["harc",-13.043742179870604],["▁obchodní",-13.04375457763672],["▁vrati",-13.043758392333984],["▁Ца",-13.043785095214844],["ئیں",-13.043790817260742],["▁попу",-13.043797492980955],["▁bayram",-13.043882369995115],["laska",-13.04388427734375],["tzean",-13.0438871383667],["▁Jaz",-13.0438871383667],["өгө",-13.043898582458496],["単",-13.043916702270508],["▁sürdür",-13.043917655944824],["ตํารวจ",-13.04396629333496],["▁tầm",-13.043967247009276],["▁పెళ్లి",-13.043967247009276],["▁μέτρα",-13.043971061706545],["▁Kelantan",-13.043973922729492],["▁asosiy",-13.043978691101074],["▁მინდა",-13.043988227844238],["▁ମାତ୍ର",-13.043989181518556],["▁موثر",-13.04399871826172],["iúla",-13.044022560119627],["▁condições",-13.044022560119627],["留言",-13.044024467468262],["чения",-13.044026374816896],["▁آشنا",-13.04405117034912],["lösa",-13.044055938720703],["▁перший",-13.044055938720703],["▁назначен",-13.044069290161133],["▁чечим",-13.044086456298828],["ையும்",-13.044108390808104],["マイ",-13.0441255569458],["▁இயக்க",-13.044137954711914],["▁उसने",-13.04414176940918],["▁κάποιο",-13.044150352478027],["▁چوڻ",-13.044150352478027],["رغم",-13.044157028198242],["▁орында",-13.044158935546877],["ථ",-13.04416275024414],["▁διε",-13.044169425964355],["cilik",-13.04421329498291],["▁grua",-13.04421329498291],["▁značaj",-13.044236183166504],["уудыг",-13.044245719909668],["▁remaja",-13.044254302978516],["▁Metod",-13.044273376464844],["भय",-13.04429531097412],["ங்கா",-13.04429817199707],["ước",-13.044317245483398],["▁فوری",-13.044333457946776],["▁சூ",-13.04433536529541],["culum",-13.044346809387209],["рски",-13.044353485107422],["ที่อยู่",-13.044353485107422],["▁مغ",-13.044354438781738],["▁défini",-13.044357299804688],["rzeć",-13.044361114501951],["▁Инди",-13.044398307800291],["டல்",-13.044443130493164],["57)",-13.044485092163086],["▁recordar",-13.04448699951172],["▁поправ",-13.044515609741213],["▁quas",-13.04455280303955],["▁boga",-13.044562339782717],["▁Cred",-13.044564247131348],["▁lind",-13.044577598571776],["▁សា",-13.04458236694336],["▁imagini",-13.044604301452637],["▁susret",-13.044612884521484],["▁добры",-13.0446195602417],["▁çocuklar",-13.04462718963623],["наў",-13.04465389251709],["▁pasirink",-13.044663429260254],["▁Miten",-13.044672966003418],["▁þjónustu",-13.04467487335205],["▁bahs",-13.044715881347656],["Ż",-13.044721603393556],["的手",-13.044730186462402],["mino",-13.0447416305542],["JR",-13.044745445251465],["руд",-13.044760704040527],["▁mindent",-13.044817924499512],["▁efecto",-13.04482078552246],["▁אד",-13.04483699798584],["хий",-13.04484748840332],["▁rive",-13.044866561889648],["bitur",-13.044878005981444],["▁Servi",-13.044878959655762],["でき",-13.044888496398926],["▁құқық",-13.04489040374756],["▁volas",-13.044893264770508],["רום",-13.044898986816406],["Ste",-13.044902801513672],["ස්ට",-13.044940948486328],["ਭਾ",-13.044961929321287],["अनुसार",-13.044984817504885],["▁જરૂર",-13.044998168945312],["▁Уз",-13.045000076293944],["▁bind",-13.04501247406006],["אָן",-13.045018196105955],["ností",-13.045022010803224],["▁ਛ",-13.045042991638184],["אפ",-13.045053482055664],["▁Utama",-13.045071601867676],["▁imperi",-13.045085906982422],["▁గారు",-13.045106887817385],["莎",-13.045106887817385],["גרי",-13.04511833190918],["lustra",-13.045125007629396],["▁കമ്പനി",-13.045145988464355],["▁Llywodraeth",-13.045146942138672],["▁Venezuela",-13.045146942138672],["▁தேசிய",-13.045151710510254],["▁потер",-13.045154571533203],["成都",-13.045170783996582],["▁शकता",-13.045177459716797],["hilfe",-13.045194625854492],["▁собственности",-13.045198440551758],["ຖານ",-13.045201301574709],["घर",-13.04520320892334],["▁Reh",-13.045204162597656],["กู",-13.045225143432615],["رفع",-13.04522705078125],["▁Георги",-13.045228004455566],["▁Mondial",-13.045230865478516],["▁ช่วย",-13.045247077941896],["團體",-13.04525089263916],["вежда",-13.045262336730955],["▁quadra",-13.045268058776855],["▁વાર",-13.045286178588867],["▁Út",-13.045305252075195],["▁wah",-13.04531478881836],["▁Välj",-13.045317649841309],["▁lietot",-13.045370101928713],["▁لارې",-13.045371055603027],["▁žene",-13.045372009277344],["▁esita",-13.04537296295166],["ûd",-13.045376777648926],["▁osobních",-13.045382499694824],["lóð",-13.04541301727295],["▁мис",-13.045413970947266],["▁gasit",-13.04543113708496],["▁jenë",-13.045438766479492],["čnosti",-13.04544162750244],["тврд",-13.045467376708984],["▁актов",-13.045475959777832],["▁земля",-13.04551601409912],["▁відвід",-13.045546531677246],["mika",-13.045578002929688],["erei",-13.045583724975586],["▁Вели",-13.04559326171875],["уємо",-13.045597076416016],["▁охра",-13.045631408691406],["▁ederim",-13.045646667480469],["▁Beri",-13.04565715789795],["▁واقعا",-13.045702934265137],["▁predlog",-13.045705795288086],["losti",-13.045730590820312],["▁cür",-13.045742988586426],["▁විදිහට",-13.045758247375488],["▁değiştir",-13.04577922821045],["ΤΟ",-13.04579257965088],["eerimis",-13.045815467834473],["▁شهری",-13.045817375183104],["▁ዳ",-13.045817375183104],["्यते",-13.04582405090332],["▁često",-13.04582405090332],["▁tapo",-13.04583740234375],["▁viimase",-13.045854568481444],["плате",-13.045881271362305],["zum",-13.045904159545898],["▁resum",-13.04590892791748],["▁thao",-13.045941352844238],["▁רוצים",-13.04594898223877],["ወቅ",-13.045985221862791],["▁ape",-13.045989990234377],["▁silma",-13.046003341674805],["▁rehabilita",-13.046051025390623],["أخذ",-13.046053886413574],["cim",-13.046114921569824],["▁berriz",-13.04611873626709],["▁célja",-13.04611873626709],["ବାଦ",-13.046130180358888],["▁ходе",-13.046192169189451],["punta",-13.046201705932615],["หอม",-13.046213150024414],["▁ਵਾ",-13.046218872070312],["stede",-13.046232223510742],["▁letra",-13.046246528625488],["▁ఎన్నికల",-13.046289443969728],["▁idiot",-13.046290397644045],["ୈ",-13.046307563781738],["▁oldalon",-13.04631233215332],["▁conferi",-13.046316146850586],["อาจารย์",-13.04632568359375],["▁Państwo",-13.046327590942385],["▁aurkeztu",-13.046327590942385],["▁zboží",-13.046327590942385],["▁गिरफ्तार",-13.046327590942385],["▁ଯୋଜନା",-13.046327590942385],["▁జాతీయ",-13.046327590942385],["▁ඉහළ",-13.046327590942385],["▁máxima",-13.0463285446167],["▁Còn",-13.046335220336914],["▁ambient",-13.046343803405762],["▁prawie",-13.046343803405762],["TIM",-13.04635238647461],["▁Gaza",-13.046354293823242],["性和",-13.046354293823242],["скр",-13.046360969543455],["▁editorial",-13.046392440795898],["▁etilgan",-13.046393394470217],["▁possibilidade",-13.04642105102539],["▁тварин",-13.046422958374023],["▁wifi",-13.046426773071287],["▁Stories",-13.046439170837402],["▁தொ",-13.046445846557615],["▁ഫോ",-13.046469688415527],["▁toisen",-13.046483039855955],["เอส",-13.046494483947754],["▁Dali",-13.04649829864502],["▁täit",-13.046513557434082],["ಕೊಂಡು",-13.046539306640623],["▁중요한",-13.046555519104004],["▁Ghaeilge",-13.046570777893066],["கன்",-13.0465726852417],["ทราบ",-13.04658031463623],["ไม่เคย",-13.046588897705078],["▁Zapraszamy",-13.046598434448242],["свя",-13.046612739562988],["тк",-13.046615600585938],["▁vitt",-13.046616554260254],["මාන",-13.046625137329102],["▁ומת",-13.046642303466797],["▁Átha",-13.046652793884276],["πον",-13.04666233062744],["▁Kepada",-13.046666145324709],["cidas",-13.046669006347656],["רב",-13.046670913696287],["▁مالک",-13.046674728393556],["جاج",-13.046688079833984],["▁नम",-13.04670524597168],["▁baahan",-13.046733856201172],["▁jedes",-13.04677391052246],["▁Haberleri",-13.046775817871094],["...).",-13.046780586242676],["nita",-13.046783447265623],["ද්දි",-13.0468111038208],["▁αποδ",-13.0468168258667],["Ạ",-13.046820640563965],["vö",-13.04682159423828],["ைய",-13.046850204467772],["ізм",-13.046855926513672],["िरहेको",-13.046903610229492],["селен",-13.046904563903809],["[4]",-13.046923637390137],["ដែលមាន",-13.046953201293944],["▁prendere",-13.047009468078612],["▁lõpp",-13.047021865844728],["▁Tower",-13.047032356262209],["▁Яна",-13.047089576721191],["▁κιν",-13.04710292816162],["άσει",-13.047148704528809],["greg",-13.047157287597656],["ኢ",-13.047176361083984],["រ៉ា",-13.04718017578125],["ර්ශ",-13.047240257263184],["quote",-13.047242164611816],["कडून",-13.047243118286133],["▁dyna",-13.047268867492676],["meklē",-13.047271728515623],["▁फो",-13.047282218933104],["▁lente",-13.047306060791016],["От",-13.047346115112305],["мести",-13.047355651855469],["▁banke",-13.047386169433594],["käs",-13.047399520874023],["tikai",-13.047417640686035],["коль",-13.047422409057615],["不出",-13.0474271774292],["ezett",-13.047428131103516],["裡面",-13.047428131103516],["jaran",-13.047459602355955],["▁ефекти",-13.047475814819336],["phan",-13.04748249053955],["▁Regen",-13.0474853515625],["▁홍",-13.047487258911133],["▁wouldn",-13.047497749328612],["▁所以",-13.047497749328612],["▁दूसरे",-13.04750919342041],["▁predsednik",-13.047511100769045],["▁Atatürk",-13.04751205444336],["▁ਕਈ",-13.047528266906738],["нием",-13.047557830810549],["▁өдрийн",-13.047560691833496],["mutat",-13.047562599182127],["तिर",-13.04758071899414],["▁представляет",-13.04759120941162],["▁ରୋଗ",-13.04759693145752],["▁ప్రస్తుతం",-13.047606468200684],["មី",-13.047607421875],["PUR",-13.047616004943848],["▁Skin",-13.047621726989746],["▁tribut",-13.04763126373291],["▁prins",-13.047645568847656],["adat",-13.047650337219238],["АШ",-13.047650337219238],["▁(2016)",-13.047650337219238],["▁گری",-13.047652244567873],["走向",-13.047682762145996],["▁adevărat",-13.047687530517578],["サイ",-13.047712326049805],["уй",-13.047714233398438],["▁giving",-13.047725677490234],["▁suurim",-13.047739028930664],["▁вещи",-13.047751426696776],["▁ڪيا",-13.04775333404541],["tarët",-13.047755241394045],["kedés",-13.047757148742676],["▁ਬਲ",-13.047781944274902],["tiri",-13.047809600830078],["рсан",-13.047825813293455],["▁Yksi",-13.047837257385254],["▁certainly",-13.047846794128418],["▁۱۱",-13.047852516174316],["▁Kedah",-13.047857284545898],["SIL",-13.047865867614746],["▁aspetta",-13.047868728637695],["tilan",-13.04787826538086],["لون",-13.047886848449709],["▁поет",-13.047898292541504],["▁terecht",-13.047929763793944],["▁Sre",-13.047944068908691],["▁Ангел",-13.047993659973145],["▁opor",-13.048007011413574],["ieties",-13.048011779785156],["ออกไป",-13.048041343688965],["▁koriste",-13.048076629638672],["дығы",-13.048101425170898],["μία",-13.048105239868164],["▁دیگران",-13.048120498657228],["▁जेल",-13.048121452331545],["▁конкурсу",-13.048124313354492],["dva",-13.04813003540039],["ーション",-13.048136711120604],["▁Stock",-13.048147201538086],["avimą",-13.0481595993042],["▁motivos",-13.04819679260254],["▁round",-13.04820728302002],["tojai",-13.04821491241455],["ryt",-13.048222541809082],["得到了",-13.048243522644045],["sahan",-13.048301696777344],["▁novom",-13.048307418823242],["ыць",-13.048310279846191],["▁Biblia",-13.04832363128662],["▁Правила",-13.048338890075684],["人群",-13.04833984375],["კური",-13.048355102539062],["ops",-13.048367500305176],["uille",-13.048380851745604],["blan",-13.048384666442873],["pij",-13.048388481140137],["▁ICT",-13.048388481140137],["rail",-13.048402786254885],["ЖИ",-13.048468589782717],["▁Campo",-13.048473358154297],["ਹਰ",-13.048479080200195],["▁СП",-13.048481941223145],["▁journal",-13.048494338989258],["імді",-13.04853343963623],["ഷണ",-13.048542976379396],["▁concret",-13.048562049865724],["▁الاجتماعي",-13.048572540283203],["▁vijana",-13.048577308654783],["▁tryck",-13.048578262329102],["▁pokus",-13.048619270324709],["▁trwy",-13.048624038696287],["puram",-13.048625946044922],["逢",-13.04865837097168],["สนับสนุน",-13.048663139343262],["▁kuju",-13.048667907714844],["条例",-13.048687934875488],["▁immagini",-13.04869270324707],["▁września",-13.04869270324707],["▁तुम्हाला",-13.04869270324707],["▁ਸ਼ੁਰੂ",-13.04869270324707],["ႏုိင္ငံ",-13.048693656921388],["▁Ctrl",-13.048694610595703],["▁సీఎం",-13.048697471618652],["▁состоянии",-13.04870319366455],["前的",-13.048707008361816],["▁detalles",-13.048709869384766],["▁Desember",-13.048733711242676],["▁کدام",-13.048746109008787],["多数",-13.048765182495115],["▁Educação",-13.048772811889648],["ரன்",-13.048773765563965],["▁maca",-13.048778533935549],["▁Ukrayna",-13.048784255981444],["▁xunto",-13.04880714416504],["іледі",-13.048826217651367],["λόγ",-13.048829078674316],["▁돌아",-13.04884910583496],["▁ചി",-13.04886531829834],["ቷ",-13.048869132995604],["नको",-13.048871994018556],["▁Warto",-13.048892974853516],["בני",-13.04893684387207],["▁lernen",-13.048948287963867],["▁ritual",-13.048954963684082],["▁دولتی",-13.048961639404297],["ιμο",-13.048964500427246],["▁decided",-13.04896640777588],["▁Фо",-13.048994064331056],["▁معنی",-13.049002647399902],["▁Nat",-13.04900360107422],["друк",-13.049005508422852],["ännö",-13.049015045166016],["▁Sempre",-13.049046516418455],["suit",-13.049063682556152],["ovými",-13.049066543579102],["▁resim",-13.049071311950684],["▁страници",-13.04909896850586],["▁marts",-13.049121856689451],["▁khususnya",-13.049127578735352],["▁Memang",-13.049128532409668],["▁Pati",-13.049131393432615],["▁Хотел",-13.049155235290527],["ушка",-13.049156188964844],["▁האר",-13.049180030822754],["จะไม่",-13.049193382263184],["▁הור",-13.049201011657717],["▁berte",-13.04920768737793],["دھر",-13.049219131469728],["▁political",-13.04922866821289],["▁ดี",-13.049234390258787],["চ্ছে",-13.04924774169922],["▁potensi",-13.049253463745115],["zame",-13.049272537231444],["lijn",-13.049290657043455],["▁projektet",-13.04929542541504],["▁Mutter",-13.04930305480957],["▁перевод",-13.049322128295898],["ЕД",-13.049338340759276],["lix",-13.049345016479492],["יחה",-13.049399375915527],["▁maksā",-13.049432754516602],["行く",-13.049433708190918],["harap",-13.049447059631348],["Ї",-13.049466133117676],["Apa",-13.049484252929688],["▁skatt",-13.049503326416016],["▁Fotografi",-13.04953384399414],["чем",-13.049545288085938],["▁دیده",-13.04957389831543],["▁Adresse",-13.049578666687012],["▁வீடியோ",-13.049604415893556],["ਾਇਆ",-13.04963207244873],["attend",-13.049638748168944],["нно",-13.049650192260742],["සිය",-13.049667358398438],["▁видов",-13.049673080444336],["▁Semalt",-13.049680709838867],["badi",-13.049687385559082],["▁grob",-13.049697875976562],["▁rhaid",-13.04971408843994],["▁qil",-13.04971694946289],["ণ্ড",-13.049738883972168],["▁sezone",-13.049749374389648],["▁хрон",-13.04977035522461],["▁neap",-13.04979133605957],["▁باشه",-13.049802780151367],["কর",-13.04982566833496],["▁Hansen",-13.049826622009276],["浩",-13.049846649169922],["▁vlo",-13.049848556518556],["半年",-13.04985809326172],["旺",-13.049859046936035],["会上",-13.0498628616333],["訊息",-13.049863815307615],["▁-1",-13.049869537353516],["ριο",-13.04987621307373],["▁kehilangan",-13.049877166748049],["▁longtemps",-13.049877166748049],["▁lyrics",-13.049877166748049],["▁බොහොම",-13.049877166748049],["▁២០១៨",-13.04987907409668],["▁մյուս",-13.049883842468262],["▁مراحل",-13.049885749816896],["▁hêzên",-13.04991054534912],["▁қаржы",-13.04994010925293],["▁መሆን",-13.049941062927246],["▁자연",-13.049957275390623],["▁явно",-13.049965858459473],["▁पैसे",-13.049966812133787],["▁дэлхийн",-13.049981117248535],["▁나라",-13.049982070922852],["▁കളി",-13.04998779296875],["▁egészség",-13.050004005432127],["רז",-13.05002784729004],["ப்படுத்த",-13.050043106079102],["▁سراسر",-13.050068855285645],["▁הדין",-13.05006980895996],["NGO",-13.050089836120604],["▁токто",-13.050097465515137],["ေဒ",-13.050104141235352],["▁позиция",-13.050107955932615],["▁pytanie",-13.05010986328125],["歌曲",-13.050116539001465],["ときは",-13.050131797790527],["▁combate",-13.050148963928224],["ເວລາ",-13.05018138885498],["▁АН",-13.050192832946776],["▁Omni",-13.050212860107422],["straat",-13.050215721130373],["ўныя",-13.0502347946167],["маар",-13.050246238708496],["▁nejak",-13.050270080566406],["▁skri",-13.050281524658203],["▁susitik",-13.050288200378418],["ርስ",-13.05029296875],["▁Пам",-13.050322532653809],["centi",-13.05032730102539],["▁kalb",-13.050341606140137],["▁kutatás",-13.050368309020996],["▁Spur",-13.050390243530272],["▁Prem",-13.05039882659912],["▁caixa",-13.0504150390625],["▁păr",-13.050458908081056],["tukseen",-13.050480842590332],["▁dereitos",-13.05048370361328],["have",-13.050596237182615],["▁coco",-13.050630569458008],["▁කාලෙ",-13.050633430480955],["盟",-13.050670623779297],["▁Fakult",-13.050686836242676],["Real",-13.05069065093994],["ogaeth",-13.05069351196289],["beda",-13.05069637298584],["前面",-13.0507230758667],["▁ვუ",-13.050766944885254],["റ്",-13.05078411102295],["▁пациент",-13.05078411102295],["veda",-13.050796508789062],["▁behandel",-13.05081272125244],["समेत",-13.050814628601074],["इस",-13.05082893371582],["▁досвід",-13.05085277557373],["enky",-13.050902366638184],["▁אישי",-13.050921440124512],["▁IC",-13.050936698913574],["ුණා",-13.050982475280762],["▁uusia",-13.05098819732666],["▁liiga",-13.050997734069824],["ඉ",-13.05100154876709],["剪",-13.05102252960205],["▁шек",-13.051030158996582],["▁неї",-13.051037788391112],["暨",-13.051037788391112],["▁সেক্স",-13.05104637145996],["▁предоставя",-13.051064491271973],["鹽",-13.051064491271973],["셀",-13.051065444946287],["▁Hòa",-13.051076889038086],["▁մարդիկ",-13.05107879638672],["▁Đường",-13.051088333129885],["žā",-13.051090240478516],["▁mörk",-13.051106452941896],["ZEN",-13.051116943359377],["▁۳۰",-13.051165580749512],["▁Edge",-13.051192283630373],["ידי",-13.051194190979004],["▁متخصص",-13.0512113571167],["نات",-13.051229476928713],["▁iulie",-13.051230430603027],["puls",-13.051231384277344],["▁sistemul",-13.05123233795166],["▁Azərbaycana",-13.051236152648926],["▁palaut",-13.051236152648926],["ಹಿ",-13.051239967346191],["ยังไง",-13.051244735717772],["▁köy",-13.051265716552734],["Share",-13.051279067993164],["roso",-13.05128002166748],["▁Jum",-13.051281929016112],["的基本",-13.05128288269043],["▁respecte",-13.051292419433594],["▁fynd",-13.051305770874023],["▁MIN",-13.05130672454834],["▁membre",-13.051321029663086],["ائه",-13.051342964172363],["▁näha",-13.051355361938477],["新型",-13.051386833190918],["▁ភូមិ",-13.051395416259766],["дыгы",-13.051397323608398],["▁behin",-13.0514554977417],["ிருப்ப",-13.051459312438965],["▁বন্ধ",-13.051461219787598],["▁Lamp",-13.051471710205078],["▁коментира",-13.05147933959961],["▁ostat",-13.051483154296877],["▁کاربری",-13.051507949829102],["▁lles",-13.051508903503418],["ေး",-13.05151653289795],["▁offentlige",-13.051517486572266],["▁gluten",-13.051535606384276],["▁gumawa",-13.051544189453123],["▁mui",-13.05156707763672],["▁Wali",-13.051570892333984],["▁имена",-13.05158233642578],["mall",-13.051587104797363],["▁trump",-13.05160427093506],["21)",-13.051636695861816],["▁calidade",-13.051654815673828],["▁ljudje",-13.05167007446289],["▁مطلوب",-13.051677703857422],["rion",-13.0516996383667],["▁kiasi",-13.051709175109863],["▁њима",-13.051728248596191],["žit",-13.051733016967772],["psa",-13.051779747009276],["үз",-13.051801681518556],["▁VAN",-13.051816940307615],["▁singel",-13.051831245422363],["▁मिली",-13.051850318908691],["Ye",-13.051897048950195],["Ле",-13.05190086364746],["地上",-13.051924705505373],["овый",-13.05193042755127],["▁الأو",-13.051936149597168],["▁lortu",-13.051950454711914],["സിന്റെ",-13.052002906799316],["▁mlad",-13.052069664001465],["kaas",-13.052083969116213],["▁Jeden",-13.052087783813477],["▁vigtigt",-13.052101135253906],["פיק",-13.052143096923828],["天下",-13.052186965942385],["ences",-13.052189826965332],["▁sufri",-13.052190780639648],["område",-13.052233695983888],["▁compare",-13.052239418029783],["▁активност",-13.052244186401367],["▁συμμετοχή",-13.05224895477295],["๓",-13.052250862121582],["▁elmúlt",-13.052251815795898],["נץ",-13.052252769470217],["▁ดังนั้น",-13.052252769470217],["▁sıcak",-13.052253723144531],["▁fillim",-13.052255630493164],["▁որպեսզի",-13.05225658416748],["▁powstał",-13.052264213562012],["==",-13.05228328704834],["▁бийлик",-13.052292823791504],["▁kanker",-13.052295684814451],["▁тушаал",-13.052303314208984],["ห์",-13.052305221557615],["风格",-13.052305221557615],["▁azərbaycanlı",-13.052312850952148],["▁चू",-13.052313804626465],["aatio",-13.052319526672363],["▁Tradi",-13.052327156066896],["▁primavera",-13.052327156066896],["YAN",-13.05233383178711],["▁Diamond",-13.052338600158691],["▁Slav",-13.05235767364502],["▁końca",-13.052358627319336],["ולד",-13.05236530303955],["familie",-13.052386283874512],["▁nero",-13.052398681640623],["▁%.",-13.052414894104004],["nema",-13.05246925354004],["▁Zap",-13.05247688293457],["▁cuales",-13.052480697631836],["▁ďalej",-13.052481651306152],["▁məhsul",-13.052499771118164],["jj",-13.052509307861328],["▁quên",-13.052525520324709],["గ్ర",-13.052541732788086],["108",-13.052542686462402],["guri",-13.052549362182615],["▁tags",-13.052553176879885],["posa",-13.052557945251465],["▁گروپ",-13.052573204040527],["чную",-13.05257511138916],["NOS",-13.052578926086426],["цтв",-13.052581787109377],["แก้ไข",-13.052597045898438],["▁layihəsi",-13.052605628967283],["zák",-13.052671432495115],["لەپ",-13.052701950073242],["▁খুব",-13.052712440490724],["▁küld",-13.052728652954102],["ជន",-13.052749633789062],["▁mót",-13.052778244018556],["▁solat",-13.052800178527832],["▁foreldre",-13.05280303955078],["▁málo",-13.05280876159668],["▁ছাড়া",-13.052846908569336],["ได้ที่",-13.052857398986816],["▁سام",-13.052875518798828],["▁maart",-13.052886962890623],["▁megsz",-13.05288791656494],["▁priti",-13.05289077758789],["ităţii",-13.052891731262209],["ачка",-13.052921295166016],["▁ரா",-13.052949905395508],["▁فرمان",-13.05298137664795],["लर",-13.05299949645996],["▁spara",-13.053027153015137],["▁وصف",-13.053033828735352],["дова",-13.053068161010742],["아이",-13.053072929382324],["falt",-13.0531005859375],["▁논",-13.053107261657717],["碎",-13.053128242492676],["▁Yesu",-13.05313491821289],["yka",-13.053146362304688],["ttyä",-13.0531587600708],["பர்",-13.05317211151123],["▁Перед",-13.053189277648926],["ליט",-13.05320167541504],["▁Track",-13.05320644378662],["先进",-13.05328369140625],["▁对于",-13.053287506103516],["▁üzü",-13.053290367126465],["▁كۆر",-13.05329132080078],["성과",-13.053330421447754],["つく",-13.05333423614502],["▁prijatelj",-13.053335189819336],["▁grands",-13.053342819213867],["リスト",-13.05337905883789],["▁emocional",-13.053410530090332],["DES",-13.053436279296877],["สไตล์",-13.053439140319824],["Ố",-13.05344009399414],["▁alumnado",-13.05344009399414],["▁sacerdot",-13.05344009399414],["▁نباید",-13.05344009399414],["▁অক্টোবর",-13.05344009399414],["พิธี",-13.053441047668455],["▁impossible",-13.053441047668455],["▁sederhana",-13.053441047668455],["เชียงใหม่",-13.05344295501709],["▁заштите",-13.053455352783203],["▁розвиток",-13.05345630645752],["▁منع",-13.053484916687012],["šnje",-13.05349349975586],["▁kindlasti",-13.05349826812744],["▁αυτός",-13.053502082824709],["▁Lege",-13.053504943847656],["milia",-13.053523063659668],["јед",-13.053526878356934],["▁ගුණ",-13.053529739379885],["ฝึก",-13.053531646728516],["▁dubbel",-13.053532600402832],["委員會",-13.05354118347168],["▁tepër",-13.053549766540527],["όπουλος",-13.05355167388916],["▁sumu",-13.053564071655272],["▁മുഖ",-13.053579330444336],["▁pantai",-13.05360984802246],["▁ضلع",-13.05360984802246],["映",-13.053613662719728],["▁ചര്",-13.05361557006836],["晴",-13.05361557006836],["▁namelijk",-13.053627014160156],["▁gelten",-13.053633689880373],["seng",-13.053638458251951],["▁گهٽ",-13.053674697875977],["zten",-13.053715705871582],["ovne",-13.05372428894043],["▁ramen",-13.053726196289062],["কল",-13.053733825683594],["使い",-13.05374526977539],["ڑا",-13.053772926330566],["▁goal",-13.053821563720703],["▁पल",-13.053845405578612],["▁vicino",-13.053855895996094],["områder",-13.053857803344728],["▁Аляксандр",-13.053857803344728],["ัน",-13.053866386413574],["▁Бы",-13.053866386413574],["▁utal",-13.053913116455078],["▁demostra",-13.053914070129396],["▁Гро",-13.053922653198242],["content",-13.053955078125],["CES",-13.05398178100586],["▁tangga",-13.053990364074709],["感動",-13.053996086120604],["是非常",-13.054000854492188],["▁ediyorum",-13.05402374267578],["▁žemės",-13.054024696350098],["▁Arra",-13.054031372070312],["▁poni",-13.054034233093262],["▁vibra",-13.054079055786133],["▁ऐ",-13.05408000946045],["▁видел",-13.05409049987793],["▁ikaw",-13.05409812927246],["nilai",-13.054129600524902],["▁Kopi",-13.054136276245115],["በቃ",-13.054162979125977],["gebouw",-13.054168701171877],["жих",-13.054171562194824],["▁alak",-13.05417251586914],["▁últimas",-13.054190635681152],["lank",-13.05422306060791],["▁altă",-13.054250717163086],["kund",-13.054261207580566],["जू",-13.054262161254885],["▁onderhoud",-13.054277420043944],["▁poslov",-13.054278373718262],["▁تحصیل",-13.05430793762207],["మాన",-13.0543212890625],["ნენ",-13.054332733154297],["▁posibilidad",-13.054353713989258],["▁1,1",-13.054397583007812],["▁balla",-13.054428100585938],["▁kërkon",-13.054438591003418],["ွေ",-13.054452896118164],["▁Lue",-13.05446720123291],["econ",-13.054495811462402],["გვ",-13.054530143737791],["IND",-13.05453872680664],["▁afgelopen",-13.054546356201172],["▁DOM",-13.05454921722412],["计算机",-13.054551124572754],["▁comentarios",-13.05457878112793],["▁சங்க",-13.05458164215088],["崎",-13.054593086242676],["련",-13.05459690093994],["▁Şə",-13.05460262298584],["▁Medical",-13.054606437683104],["▁comença",-13.05461883544922],["قطع",-13.054619789123535],["칠",-13.0546293258667],["χη",-13.054631233215332],["▁ældre",-13.054631233215332],["▁سؤال",-13.054631233215332],["▁хэмээн",-13.054633140563965],["▁متوسط",-13.054638862609863],["▁xəstə",-13.054651260375977],["zaka",-13.054652214050291],["▁tekið",-13.054656982421877],["▁الاسلام",-13.054665565490724],["▁kontrak",-13.054667472839355],["▁уговор",-13.054679870605469],["▁achieve",-13.054694175720217],["▁цената",-13.054695129394531],["▁berwarna",-13.054698944091797],["ගු",-13.054707527160645],["▁потреби",-13.054750442504885],["▁skry",-13.054758071899414],["ឈ្មោះ",-13.05477523803711],["bite",-13.054789543151855],["cipta",-13.05482006072998],["▁wakiwa",-13.054854393005373],["▁popolo",-13.05485725402832],["▁مراجعه",-13.054893493652344],["ിട്ടും",-13.054896354675291],["▁Klas",-13.054906845092772],["▁ցանկ",-13.054957389831545],["▁katerega",-13.05498218536377],["▁tub",-13.054996490478516],["▁висока",-13.055009841918944],["▁berdiri",-13.055014610290527],["立刻",-13.05501937866211],["stoja",-13.055028915405272],["kking",-13.055030822753906],["ელია",-13.055055618286133],["▁convida",-13.05505657196045],["ītā",-13.0551118850708],["▁petites",-13.055129051208496],["▁dogodi",-13.055163383483888],["ამი",-13.055164337158203],["റേ",-13.055191040039062],["ילי",-13.055206298828123],["વાદ",-13.055242538452148],["ଯ",-13.055289268493652],["秘",-13.055307388305664],["▁Editor",-13.055328369140623],["▁عزت",-13.055343627929688],["▁suam",-13.055347442626951],["▁реальн",-13.055390357971191],["▁माघ",-13.05539321899414],["itzat",-13.055394172668455],["бес",-13.055401802062988],["▁turned",-13.055431365966797],["համ",-13.055448532104492],["အခါ",-13.055459022521973],["▁шампион",-13.055482864379885],["▁sente",-13.055512428283691],["▁سوي",-13.055527687072754],["▁кул",-13.055535316467283],["▁მიუ",-13.055538177490234],["פון",-13.055550575256348],["▁videá",-13.055583953857422],["▁Хор",-13.055630683898926],["musik",-13.05563735961914],["▁спроб",-13.055642127990724],["▁πέρα",-13.05565357208252],["▁профессионал",-13.055665016174316],["▁nějak",-13.055668830871582],["espai",-13.055682182312012],["DH",-13.055683135986328],["▁anois",-13.05569839477539],["лека",-13.055706977844238],["▁Lige",-13.05571460723877],["ນ້ອຍ",-13.055731773376465],["▁крок",-13.055758476257324],["▁vandag",-13.05576992034912],["▁そんな",-13.055798530578612],["jol",-13.055801391601562],["▁llei",-13.055809020996094],["hik",-13.05581760406494],["▁زي",-13.055818557739258],["ō",-13.055822372436523],["▁ισχυρ",-13.055822372436523],["▁самостоятельно",-13.055822372436523],["▁առավել",-13.055822372436523],["▁संरक्षण",-13.055822372436523],["▁കൂടുതൽ",-13.055822372436523],["▁astăzi",-13.05582332611084],["▁särskilt",-13.05582332611084],["▁április",-13.05582332611084],["▁aanwezig",-13.055824279785156],["▁stránke",-13.055837631225586],["▁faris",-13.05583953857422],["▁Trước",-13.055840492248535],["▁Gefühl",-13.055846214294434],["ULU",-13.055850982666016],["dência",-13.055853843688965],["▁गोष्टी",-13.055870056152344],["▁PÅ",-13.055877685546877],["▁nopeasti",-13.055886268615724],["usaha",-13.055890083312988],["авана",-13.055896759033203],["Life",-13.055899620056152],["വോ",-13.055899620056152],["ແຕ່",-13.055899620056152],["▁otse",-13.05590534210205],["▁سعد",-13.055937767028809],["նչ",-13.055953979492188],["▁tipe",-13.055964469909668],["▁коштів",-13.055965423583984],["▁quickly",-13.0559720993042],["კურ",-13.055977821350098],["▁terms",-13.056002616882324],["▁Ադրբեջանի",-13.056031227111816],["▁Umar",-13.05604076385498],["▁робоч",-13.056041717529297],["čina",-13.056071281433104],["▁ପଡ଼ି",-13.056077003479004],["سير",-13.05607795715332],["တောင်",-13.056087493896484],["▁шат",-13.0560941696167],["▁ընդունել",-13.056126594543455],["▁માન",-13.056169509887695],["rück",-13.056185722351074],["▁надвор",-13.056222915649414],["döm",-13.056239128112791],["ận",-13.056251525878906],["▁чуть",-13.056252479553224],["TÉ",-13.056262969970703],["वत्",-13.056275367736816],["至于",-13.05630588531494],["koop",-13.0563325881958],["▁couldn",-13.056356430053713],["▁dvoch",-13.056356430053713],["maydi",-13.05635929107666],["▁kultūras",-13.056405067443848],["teilen",-13.056408882141112],["はず",-13.056410789489746],["▁mkoa",-13.05641746520996],["പക്ഷ",-13.056429862976074],["▁vete",-13.056451797485352],["▁MR",-13.0564603805542],["▁ଉତ୍",-13.05646800994873],["cian",-13.056471824645996],["տալ",-13.05650234222412],["▁ρόλο",-13.056533813476562],["▁түн",-13.056546211242676],["indica",-13.056571006774902],["▁elegan",-13.056580543518066],["▁тәрбие",-13.056591987609863],["すぐ",-13.056628227233888],["▁Ismail",-13.056645393371582],["ाब",-13.056653022766112],["▁günstig",-13.056659698486328],["tartó",-13.056668281555176],["時の",-13.056696891784668],["▁vanligt",-13.056699752807615],["רם",-13.05671215057373],["▁мүмкіндік",-13.05677604675293],[",7%",-13.056777954101562],["ਸਰ",-13.056781768798828],["▁Paro",-13.05678939819336],["寻找",-13.056800842285156],["يبة",-13.056801795959473],["▁meleg",-13.056827545166016],["▁խոր",-13.05683708190918],["▁วันนี้",-13.056852340698242],["▁ula",-13.056867599487305],["▁spare",-13.056883811950684],["▁vitu",-13.056899070739746],["▁popo",-13.05690097808838],["FL",-13.056904792785645],["gatik",-13.056930541992188],["tegia",-13.056936264038086],["钢",-13.056958198547363],["▁tých",-13.05696964263916],["關鍵",-13.05698585510254],["▁будете",-13.057002067565918],["囉",-13.057004928588867],["▁байланыш",-13.05700969696045],["▁tấn",-13.057016372680664],["▁వచ్చింది",-13.057024002075195],["▁ბარ",-13.057043075561523],["nove",-13.057050704956056],["▁එල්ල",-13.05706024169922],["gørelse",-13.05707836151123],["▁tặng",-13.05707836151123],["▁Засаг",-13.05707836151123],["▁ବନ୍ଦ",-13.057080268859863],["▁للبيع",-13.057101249694824],["▁בתוך",-13.057104110717772],["▁hektar",-13.057108879089355],["▁laisse",-13.057126998901367],["▁શોધ",-13.057129859924316],["Ed",-13.05713176727295],["▁нивните",-13.057134628295898],["czo",-13.057143211364746],["квіт",-13.057157516479492],["▁formació",-13.057159423828123],["ктив",-13.05716609954834],["▁repe",-13.05717658996582],["▁Iraq",-13.05721378326416],["▁palestin",-13.057217597961426],["алиев",-13.057223320007324],["▁અમદાવાદ",-13.05722713470459],["▁profiter",-13.057239532470703],["ศาล",-13.05728244781494],["▁ప్రజల",-13.057286262512209],["चक",-13.05731201171875],["▁оқы",-13.057327270507812],["▁ស្រី",-13.05733871459961],["組合",-13.05734157562256],["ilka",-13.057354927062988],["มาร์",-13.057377815246582],["▁акция",-13.057392120361328],["▁нико",-13.057394981384276],["▁اللي",-13.057394981384276],["▁חברה",-13.057395935058594],["isuuden",-13.057403564453123],["▁centri",-13.057416915893556],["γμα",-13.057417869567873],["ଢ଼",-13.057422637939451],["▁yazar",-13.057443618774414],["▁flores",-13.05746364593506],["ирано",-13.057472229003906],["ढा",-13.057473182678224],["ంక",-13.057476043701172],["SIS",-13.05748462677002],["▁menej",-13.057491302490234],["▁figli",-13.057503700256348],["▁aprile",-13.057504653930664],["ančių",-13.05752182006836],["nell",-13.057527542114258],["▁täis",-13.057549476623535],["▁שום",-13.057564735412598],["▁бюджету",-13.057579040527344],["▁ઠ",-13.057635307312012],["你就",-13.057652473449709],["moguć",-13.057660102844238],["fið",-13.057661056518556],["▁மனித",-13.057666778564451],["▁inteligent",-13.057676315307615],["▁preço",-13.057676315307615],["▁ellers",-13.05768585205078],["▁mão",-13.057703971862791],["▁kait",-13.057741165161133],["▁օրը",-13.057745933532717],["▁artes",-13.057779312133787],["EKO",-13.057785034179688],["灣",-13.057785987854004],["치는",-13.05780029296875],["▁Madi",-13.057843208312988],["▁gêm",-13.057854652404783],["olni",-13.05785846710205],["说的",-13.05788803100586],["▁priče",-13.057918548583984],["▁würden",-13.057927131652832],["▁Kunde",-13.057945251464844],["▁maqsad",-13.057947158813477],["▁acht",-13.05795669555664],["▁farmaci",-13.05797004699707],["▁Veja",-13.057979583740234],["naweza",-13.058003425598145],["▁zerbitzu",-13.058003425598145],["ബോധ",-13.058012962341309],["ात्म",-13.058043479919434],["ਨਾਂ",-13.058045387268066],["▁lena",-13.058056831359863],["▁Miami",-13.058059692382812],["▁Rø",-13.05810546875],["▁Lösung",-13.058110237121582],["▁укуктар",-13.058125495910645],["חזור",-13.058130264282228],["可愛",-13.058147430419922],["▁millorar",-13.058155059814451],["帳",-13.058171272277832],["▁staro",-13.05817413330078],["▁centr",-13.058191299438477],["▁Nici",-13.058199882507324],["erê",-13.058209419250488],["٤",-13.058210372924805],["▁Penyakit",-13.05821132659912],["▁posebej",-13.05821132659912],["▁الوزراء",-13.05821132659912],["▁كذلك",-13.058220863342283],["▁بالإضافة",-13.058226585388184],["вода",-13.0582275390625],["▁kaikille",-13.0582275390625],["ทหาร",-13.058238983154297],["▁маань",-13.05824089050293],["tétel",-13.058262825012209],["▁مراد",-13.058267593383787],["記載",-13.058267593383787],["қты",-13.058290481567385],["▁økonomi",-13.05829906463623],["▁forta",-13.058306694030762],["пуб",-13.058311462402344],["▁gauza",-13.05831527709961],["delig",-13.058326721191406],["▁амь",-13.058326721191406],["▁дорож",-13.058330535888672],["ארי",-13.058358192443848],["▁એવી",-13.05836296081543],["▁ရှိ",-13.058364868164062],["▁បន្ទាប់",-13.058368682861328],["▁खेळ",-13.058404922485352],["pirk",-13.058414459228516],["raži",-13.05841827392578],["였",-13.05841827392578],["ኘ",-13.058425903320312],["▁konstitu",-13.058432579040527],["1995",-13.05843734741211],["ржав",-13.05844020843506],["▁نماینده",-13.058459281921388],["基本上",-13.058491706848145],["▁nastop",-13.058513641357422],["Son",-13.058526039123535],["随后",-13.058536529541016],["▁កាន់",-13.05854606628418],["प्रमाणे",-13.058549880981444],["▁осу",-13.058555603027344],["▁joške",-13.058582305908203],["ทัน",-13.058637619018556],["▁pozdrav",-13.05864715576172],["kúp",-13.058658599853516],["waarde",-13.058682441711426],["▁pozornost",-13.058710098266602],["52)",-13.05871868133545],["▁Slot",-13.058775901794434],["ňu",-13.058782577514648],["▁Hạ",-13.05880355834961],["▁anuncia",-13.058804512023926],["abiliyor",-13.058808326721191],["ටර්",-13.058815002441406],["▁фонду",-13.05881690979004],["▁معمولا",-13.058838844299316],["▁tillë",-13.058856010437012],["▁معه",-13.05886459350586],["қу",-13.058884620666504],["িকা",-13.058884620666504],["▁ഗാ",-13.058884620666504],["▁szok",-13.058889389038086],["▁tyttö",-13.058935165405272],["дата",-13.058955192565918],["▁autores",-13.05896282196045],["ತ್ತಿ",-13.05897045135498],["কু",-13.058972358703612],["▁codi",-13.058981895446776],["▁такий",-13.059026718139648],["sizlik",-13.059039115905762],["▁поводу",-13.059073448181152],["▁Erfahrungen",-13.059096336364746],["bbel",-13.059154510498049],["retan",-13.059164047241213],["▁Време",-13.059174537658691],["पाद",-13.059212684631348],["廳",-13.05922794342041],["hatnak",-13.059228897094728],["▁kolon",-13.059231758117676],["рв",-13.059232711791992],["▁bitir",-13.059256553649902],["Ng",-13.059268951416016],["GC",-13.05927276611328],["▁मदत",-13.059286117553713],["ŝa",-13.059287071228027],["▁turistic",-13.059289932250977],["▁çend",-13.059289932250977],["▁رجل",-13.059314727783203],["kopi",-13.059329986572266],["責",-13.059332847595217],["▁баяр",-13.059351921081545],["▁തോന്ന",-13.059372901916504],["▁άνθρωποι",-13.059408187866213],["▁држави",-13.059408187866213],["▁квадрат",-13.059408187866213],["▁ویڈیو",-13.059408187866213],["▁कस",-13.059412002563477],["▁helyszín",-13.059412956237791],["▁жібер",-13.059415817260742],["charge",-13.059431076049805],["▁미래",-13.059447288513184],["ikut",-13.059457778930664],["dosta",-13.05950164794922],["ાડ",-13.0595064163208],["及び",-13.059523582458496],["indeki",-13.05955982208252],["▁venstre",-13.059560775756836],["▁числі",-13.059568405151367],["否",-13.059575080871582],["▁şeklinde",-13.059612274169922],["ပြား",-13.059639930725098],["▁tennis",-13.059656143188477],["പാ",-13.05970859527588],["▁phr",-13.059715270996094],["ינות",-13.05974006652832],["▁Cath",-13.059741973876951],["▁ainm",-13.059747695922852],["moj",-13.059755325317385],["▁Zim",-13.05977725982666],["摆",-13.05978298187256],["▁станет",-13.059795379638672],["wich",-13.059805870056152],["▁razvoja",-13.059825897216797],["▁लागत",-13.05982780456543],["▁pač",-13.059846878051758],["▁Herman",-13.05986785888672],["▁sifatida",-13.05986785888672],["નગર",-13.059884071350098],["▁označen",-13.059885025024414],["▁Gael",-13.059887886047363],["飲食",-13.059890747070312],["▁камп",-13.05990505218506],["▁ನಿಮಗೆ",-13.059919357299805],["מצ",-13.059924125671388],["▁vů",-13.059947967529297],["▁verkefni",-13.05996322631836],["büro",-13.059968948364258],["profe",-13.060014724731444],["▁kodwa",-13.06003189086914],["▁بچے",-13.060044288635254],["▁יד",-13.060099601745604],["いただいた",-13.060100555419922],["వో",-13.06012725830078],["ด้วยการ",-13.060128211975098],["▁látható",-13.06014347076416],["泥",-13.060144424438477],["▁guld",-13.060155868530272],["▁tidspunkt",-13.06019401550293],["know",-13.06020164489746],["miesz",-13.060202598571776],["liyo",-13.060216903686523],["समा",-13.060219764709473],["▁Lama",-13.060227394104004],["▁Atom",-13.060256958007812],["▁Sinh",-13.06026840209961],["▁liburu",-13.060272216796877],["GDP",-13.06035327911377],["weke",-13.060359001159668],["▁trafic",-13.06038761138916],["明年",-13.060394287109377],["▁János",-13.06044101715088],["▁း",-13.06044864654541],["ტკ",-13.060460090637209],["▁권",-13.060489654541016],["妮",-13.06057071685791],["▁दुर्",-13.060577392578123],["▁חר",-13.06058406829834],["▁mindestens",-13.0606050491333],["▁respeito",-13.0606050491333],["▁اللغة",-13.0606050491333],["▁অনুষ্ঠিত",-13.0606050491333],["▁ਅੰਦਰ",-13.060606002807615],["▁Depuis",-13.060606956481934],["▁આવશે",-13.060609817504885],["▁Այսօր",-13.0606107711792],["▁crecimiento",-13.060624122619627],["▁ይሁን",-13.060625076293944],["เมนู",-13.060626029968262],["▁تحریم",-13.060626029968262],["▁Sarawak",-13.060633659362791],["▁당신",-13.060641288757324],["መነ",-13.060650825500488],["MDB",-13.060704231262209],["▁riporta",-13.060711860656738],["▁ਭਾਰਤੀ",-13.060721397399902],["▁pequenos",-13.06073760986328],["▁educativa",-13.060772895812988],["▁redaktor",-13.060798645019531],["ващи",-13.06080722808838],["▁komin",-13.060818672180176],["▁запрос",-13.060830116271973],["loog",-13.060863494873049],["рэд",-13.060877799987791],["ລົດ",-13.060880661010742],["तृ",-13.06088638305664],["▁محروم",-13.060894966125488],["ຜົນ",-13.060914039611816],["▁Schweiz",-13.060927391052246],["▁مرز",-13.060932159423828],["tnik",-13.060935974121094],["rzenia",-13.060991287231444],["гэл",-13.061012268066406],["▁аса",-13.061013221740724],["▁Malik",-13.061053276062012],["▁운동",-13.061058044433594],["▁copiii",-13.06107234954834],["вит",-13.061118125915527],["tuv",-13.061155319213867],["▁голям",-13.061155319213867],["forhold",-13.061165809631348],["▁мисля",-13.061203956604004],["▁Visas",-13.0612154006958],["▁නැතුව",-13.061227798461914],["καταστ",-13.061259269714355],["▁koloni",-13.061287879943848],["human",-13.061298370361328],["▁jelentkez",-13.061302185058594],["▁партија",-13.061302185058594],["jelen",-13.061349868774414],["▁фар",-13.061351776123049],["▁pluri",-13.061360359191896],["▁ทุก",-13.06136417388916],["姓",-13.06137752532959],["▁ιδ",-13.06138515472412],["erint",-13.06138801574707],["ණු",-13.061392784118652],["בעל",-13.061403274536133],["▁శి",-13.061412811279297],["אלי",-13.061439514160156],["gget",-13.061461448669434],["fani",-13.061481475830078],["▁sivil",-13.06149196624756],["ставлять",-13.06150722503662],["▁esos",-13.061528205871582],["▁necesidad",-13.061552047729492],["holi",-13.061569213867188],["กลายเป็น",-13.061573028564451],["odh",-13.061574935913086],["▁Nog",-13.061605453491213],["طاق",-13.06162929534912],["▁bildet",-13.061635971069336],["▁suoja",-13.061644554138184],["▁duże",-13.061656951904297],["▁podat",-13.061675071716309],["dded",-13.061687469482422],["ද්දී",-13.061716079711914],["▁Mez",-13.061716079711914],["▁contrast",-13.061724662780762],["▁verran",-13.061724662780762],["Et",-13.06173610687256],["▁Ерөнхийлөгч",-13.061745643615724],["▁mateixa",-13.061763763427734],["▁ielā",-13.061771392822266],["▁спі",-13.061782836914062],["(3)",-13.061803817749023],["▁kemenangan",-13.061803817749023],["▁बलात्कार",-13.061803817749023],["▁ਮਨੁੱਖ",-13.061803817749023],["▁Sarajevo",-13.061809539794922],["▁Форум",-13.061812400817873],["▁რადიო",-13.06181812286377],["▁තත්වය",-13.061820030212402],["▁kvalitní",-13.061823844909668],["▁znovu",-13.061833381652832],["▁երկար",-13.061840057373049],["▁Rezultat",-13.061847686767578],["▁naru",-13.061859130859377],["▁auprès",-13.06186294555664],["▁අප්",-13.061877250671388],["zoek",-13.061898231506348],["lidir",-13.061903953552246],["えた",-13.061912536621094],["▁zomer",-13.061917304992676],["▁Maribor",-13.061927795410156],["▁руках",-13.061928749084473],["▁ongi",-13.061930656433104],["▁podporu",-13.061957359313965],["▁पर्",-13.061973571777344],["гиз",-13.062006950378418],["▁Λι",-13.06201171875],["זר",-13.06201457977295],["▁concep",-13.06202793121338],["▁ജീവ",-13.062047004699709],["ปลูก",-13.062067031860352],["אד",-13.062079429626465],["७",-13.062082290649414],["▁společnost",-13.062088012695312],["ashada",-13.062104225158691],["පාල",-13.06210708618164],["ವೀ",-13.062114715576172],["сце",-13.062125205993652],["േഷന്",-13.062141418457031],["▁berriak",-13.062146186828612],["▁consegui",-13.062155723571776],["▁équipe",-13.06218719482422],["ችል",-13.062188148498535],["▁forbi",-13.062201499938965],["...“",-13.062225341796877],["▁neměl",-13.062249183654783],["▁artikler",-13.062294960021973],["心的",-13.062373161315918],["▁إلي",-13.062376022338867],["ደት",-13.06238079071045],["▁ardhur",-13.062390327453612],["한다고",-13.062420845031738],["stick",-13.06242847442627],["▁snack",-13.062433242797852],["▁MTV",-13.062440872192385],["▁slad",-13.0624418258667],["▁رفته",-13.062467575073242],["край",-13.062470436096191],["▁ಜನರ",-13.062475204467772],["luv",-13.062482833862305],["▁записи",-13.06251335144043],["▁βασ",-13.062525749206545],["▁وې",-13.062528610229492],["▁Artist",-13.06253719329834],["▁olmadığını",-13.062552452087402],["▁зат",-13.062569618225098],["▁Puh",-13.062613487243652],["▁leren",-13.062625885009766],["ueshëm",-13.062629699707031],["▁quals",-13.062636375427246],["мета",-13.062642097473145],["цтва",-13.062670707702637],["▁ඇර",-13.062688827514648],["▁entanto",-13.062700271606444],["▁maggiore",-13.062705993652344],["▁دائر",-13.062705993652344],["▁quê",-13.062786102294922],["յուս",-13.06279468536377],["▁२७",-13.062803268432615],["▁bezoek",-13.062859535217283],["竟",-13.062871932983398],["Af",-13.062891006469728],["ግድ",-13.062914848327637],["νας",-13.062932968139648],["ИВ",-13.062938690185549],["壮",-13.062938690185549],["▁potpuno",-13.062943458557127],["▁unii",-13.062973976135254],["בנים",-13.062975883483888],["▁दिस",-13.062981605529783],["▁hoja",-13.062994956970217],["▁Georgia",-13.062996864318848],["កន្លែង",-13.063002586364746],["▁nepieciešams",-13.06300449371338],["▁wybodaeth",-13.06300449371338],["▁извештај",-13.06300449371338],["▁activities",-13.06300926208496],["ขยาย",-13.063010215759276],["▁Şimdi",-13.06301498413086],["相同",-13.063018798828123],["▁ఏపీ",-13.063023567199709],["▁ఎందుకు",-13.06302547454834],["tuks",-13.063034057617188],["▁ಕಡೆ",-13.063054084777832],["▁sverige",-13.06306266784668],["▁අම්ම",-13.06306266784668],["eeyo",-13.063075065612791],["నని",-13.06309413909912],["īta",-13.063112258911133],["ญา",-13.063118934631348],["▁hương",-13.06312370300293],["SHI",-13.06313133239746],["▁skupiny",-13.063135147094728],["▁union",-13.063157081604004],["▁explique",-13.06316089630127],["▁బె",-13.063164710998535],["▁Falls",-13.063165664672852],["▁utenfor",-13.0631742477417],["▁Srbije",-13.06320571899414],["எல்",-13.063228607177734],["▁ужас",-13.063231468200684],["ленне",-13.063241004943848],["▁поро",-13.06324577331543],["känd",-13.06324863433838],["▁آسیا",-13.06325626373291],["▁odlično",-13.063272476196287],["▁fordul",-13.063274383544922],["▁Вид",-13.06328582763672],["Kita",-13.063288688659668],["▁içi",-13.063302040100098],["▁ಕಲೆ",-13.063312530517578],["▁republika",-13.063323974609377],["▁Norma",-13.063334465026855],["▁другое",-13.063336372375488],["õõ",-13.063346862792969],["टू",-13.063346862792969],["▁vagyunk",-13.063365936279297],["▁Poi",-13.06339168548584],["▁базе",-13.063396453857422],["فريق",-13.063400268554688],["▁waarvan",-13.063443183898926],["▁պետ",-13.06345272064209],["ても",-13.063464164733888],["的内容",-13.063465118408203],["ም፣",-13.063478469848633],["▁naast",-13.063478469848633],["▁العلم",-13.063501358032228],["▁жаль",-13.063504219055176],["anj",-13.06354522705078],["പതി",-13.063546180725098],["▁гарга",-13.063553810119627],["▁ador",-13.063554763793944],["တန္",-13.063558578491213],["იას",-13.063576698303224],["▁psycho",-13.063580513000488],["rodz",-13.0635986328125],["فك",-13.063607215881348],["lago",-13.063612937927246],["winning",-13.063621520996094],["▁Trop",-13.063673973083496],["ferenc",-13.063702583312988],["Ban",-13.063705444335938],["pola",-13.063718795776367],["▁araka",-13.063729286193848],["▁gC",-13.063764572143556],["▁plugin",-13.063777923583984],["ológica",-13.063786506652832],["ستی",-13.06379222869873],["▁Fad",-13.06382656097412],["▁речь",-13.063830375671388],["vori",-13.063838958740234],["▁ох",-13.063841819763184],["▁انگیز",-13.06385898590088],["▁ekzist",-13.06386661529541],["▁Laser",-13.063908576965332],["▁valde",-13.06391429901123],["▁Policia",-13.063925743103027],["निया",-13.063936233520508],["แพทย์",-13.06394863128662],["▁concedi",-13.063949584960938],["▁عطا",-13.063958168029783],["▁nens",-13.063974380493164],["▁rendelet",-13.063997268676758],["▁nool",-13.064017295837402],["好吃",-13.064043998718262],["▁fëmijë",-13.064064979553224],["▁gesprek",-13.064087867736816],["నర్",-13.064088821411133],["áját",-13.064106941223145],["űr",-13.064106941223145],["▁ibi",-13.064116477966309],["▁glaz",-13.064129829406738],["▁ახ",-13.064129829406738],["廉",-13.064143180847168],["違い",-13.064169883728027],["灭",-13.064181327819824],["cij",-13.064193725585938],["▁οποίες",-13.06420612335205],["▁پیغام",-13.06420612335205],["▁අදාළ",-13.06420612335205],["▁සදහා",-13.06420612335205],["▁mengadakan",-13.064207077026367],["▁μέρες",-13.064210891723633],["▁pinakamahusay",-13.064215660095217],["շա",-13.064216613769531],["▁pesawat",-13.06422233581543],["▁نخستین",-13.064233779907228],["writer",-13.06424617767334],["naan",-13.064248085021973],["▁suala",-13.064266204833984],["šin",-13.06427001953125],["▁તેમની",-13.064302444458008],["▁අයට",-13.064305305480955],["▁oferty",-13.064311981201172],["▁हरि",-13.064318656921388],["▁സമയം",-13.064326286315918],["▁Teri",-13.064358711242676],["vít",-13.06436824798584],["▁faţă",-13.064385414123535],["▁സ്നേഹ",-13.064420700073242],["▁лицо",-13.064431190490724],["▁១០",-13.064438819885254],["▁Манай",-13.064453125],["遗",-13.064461708068848],["origine",-13.064470291137695],["ಸರ",-13.06447982788086],["ацца",-13.064518928527832],["▁plă",-13.06452178955078],["▁datingsider",-13.064531326293944],["րիկ",-13.064532279968262],["▁halu",-13.064537048339844],["Time",-13.064549446105955],["▁HOME",-13.064574241638184],["ąc",-13.064593315124512],["ЈА",-13.064608573913574],["▁bolsa",-13.064611434936523],["▁연결",-13.064615249633787],["▁teritorij",-13.064627647399902],["маз",-13.064629554748535],["▁мах",-13.064629554748535],["▁puudu",-13.06464958190918],["▁tanker",-13.064656257629396],["▁ALLAH",-13.064659118652344],["▁bericht",-13.064659118652344],["▁Dapat",-13.064702987670898],["▁ponte",-13.064716339111328],["▁ମୁଖ୍ୟମନ୍ତ୍ରୀ",-13.064762115478516],["▁함",-13.064769744873049],["යෙකු",-13.064805030822754],["▁Eve",-13.06480598449707],["ਪਰ",-13.06480884552002],["あなた",-13.06480884552002],["RAH",-13.064851760864258],["▁gesti",-13.064852714538574],["▁اقت",-13.064874649047852],["▁kerge",-13.064898490905762],["шава",-13.064910888671877],["▁دیر",-13.064937591552734],["俺",-13.064943313598633],["▁SEN",-13.064973831176758],["的机会",-13.064980506896973],["▁direk",-13.064986228942873],["▁przej",-13.065023422241213],["▁آوری",-13.065028190612791],["ደው",-13.065043449401855],["ಕೃ",-13.065081596374512],["carre",-13.06514835357666],["▁czyta",-13.065155029296877],["യായിരുന്നു",-13.065266609191896],["tamist",-13.06527042388916],["▁Яны",-13.065277099609377],["▁ರಾಮ",-13.065295219421388],["齊",-13.065326690673828],["▁2.000",-13.0653657913208],["歐洲",-13.0653657913208],["▁Chro",-13.065382957458496],["▁ឃុំ",-13.065388679504396],["सार",-13.065400123596191],["เสร็จ",-13.065407752990724],["갖",-13.065407752990724],["▁Έλληνες",-13.065409660339355],["▁Сегодня",-13.065409660339355],["▁оскільки",-13.065409660339355],["▁भइरहेको",-13.065409660339355],["▁никога",-13.065410614013672],["▁ଭାରତୀୟ",-13.06541633605957],["▁dziecko",-13.06541919708252],["▁uomini",-13.065423011779783],["▁marti",-13.065435409545898],["મેન્ટ",-13.065470695495604],["▁חושב",-13.065491676330566],["tør",-13.065497398376465],["лав",-13.06552505493164],["▁ерекше",-13.065532684326172],["ësisht",-13.065542221069336],["▁തി",-13.065550804138184],["▁doble",-13.065557479858398],["vamo",-13.065558433532717],["▁08:00",-13.065576553344728],["▁pohled",-13.065585136413574],["▁gestellt",-13.065590858459473],["רופא",-13.065591812133787],["▁vaz",-13.065610885620115],["▁reducere",-13.0656156539917],["ویں",-13.065616607666016],["នៅក្នុង",-13.065631866455078],["▁Klass",-13.065632820129396],["cës",-13.065653800964355],["▁Jamii",-13.06570053100586],["▁changement",-13.065726280212402],["▁травм",-13.065749168395996],["▁ਕਾਰ",-13.065750122070312],["曾經",-13.065778732299805],["times",-13.065790176391602],["nies",-13.065804481506348],["▁документів",-13.065807342529297],["▁khan",-13.06582736968994],["রণ",-13.06583309173584],["▁persze",-13.06583309173584],["2.2",-13.065900802612305],["łeś",-13.065902709960938],["▁povrat",-13.065924644470217],["▁Sakit",-13.065937042236328],["▁것도",-13.06593894958496],["ಮು",-13.065962791442873],["▁للن",-13.065972328186035],["passen",-13.0659818649292],["▁lygi",-13.065983772277832],["▁సమయంలో",-13.06600570678711],["ybę",-13.066017150878906],["gita",-13.06602668762207],["▁जून",-13.066032409667969],["меш",-13.066040992736816],["▁dobiti",-13.066091537475586],["გენ",-13.066104888916016],["▁combine",-13.066125869750977],["мус",-13.06613540649414],["tibor",-13.066139221191406],["▁Usa",-13.06614112854004],["▁aamu",-13.066167831420898],["▁દે",-13.066182136535645],["▁podstawie",-13.06618309020996],["▁поиска",-13.06618595123291],["AME",-13.066187858581545],["▁нашего",-13.06618881225586],["▁swydd",-13.066189765930176],["പുര",-13.066190719604492],["▁merkez",-13.066203117370604],["domi",-13.066213607788086],["▁Богу",-13.0662260055542],["▁Pál",-13.066235542297363],["▁اینترنت",-13.066256523132324],["堆",-13.066258430480955],["▁mohla",-13.06626033782959],["ත්ව",-13.066274642944336],["listi",-13.066307067871094],["െടുക്ക",-13.06630802154541],["บุญ",-13.066362380981444],["▁prepo",-13.066376686096191],["▁doresc",-13.066387176513672],["▁وڌ",-13.06640338897705],["▁Oliver",-13.066405296325684],["▁shafi",-13.066408157348633],["یری",-13.066411972045898],["▁הסו",-13.066415786743164],["▁Lub",-13.06643772125244],["▁знаем",-13.066455841064451],["▁ұйымдастыру",-13.066463470458984],["▁Halk",-13.0664701461792],["สก",-13.066484451293944],["Ả",-13.06649112701416],["чыў",-13.066521644592283],["▁belge",-13.0665283203125],["UTA",-13.066533088684082],["acquisto",-13.06654167175293],["▁Skup",-13.066542625427246],["▁너",-13.066546440124512],["▁dokładnie",-13.066568374633787],["顺利",-13.066582679748535],["▁येणार",-13.066588401794434],["琳",-13.066601753234863],["тельным",-13.066607475280762],["▁Halloween",-13.066614151000977],["▁článek",-13.066614151000977],["▁ظاہر",-13.066614151000977],["ėjas",-13.066615104675291],["▁Uvjeti",-13.06661891937256],["▁patvirtin",-13.06661891937256],["עריכת",-13.066621780395508],["▁раніше",-13.06662368774414],["▁úplně",-13.06666660308838],["სტრ",-13.06667423248291],["တည္",-13.06668472290039],["еру",-13.066699028015137],["▁решил",-13.066713333129885],["▁tersedia",-13.066715240478516],["▁elkarte",-13.066722869873049],["зад",-13.066730499267578],["▁јуна",-13.066734313964844],["▁צוויי",-13.06673526763916],["▁ადამიანის",-13.06673526763916],["▁कीमत",-13.06673812866211],["فكر",-13.066752433776855],["▁francez",-13.066795349121094],["▁feina",-13.066802978515623],["tomat",-13.066811561584473],["▁дати",-13.066815376281738],["▁diario",-13.06682014465332],["företag",-13.066832542419434],["▁Valley",-13.066837310791016],["▁ansvarlig",-13.066839218139648],["świę",-13.066845893859863],["ीच्या",-13.066851615905762],["وسی",-13.06689739227295],["▁индустри",-13.066923141479492],["▁Ekki",-13.06693172454834],["▁बाट",-13.066971778869627],["▁sno",-13.066983222961426],["र्जन",-13.066988945007324],["▁1,7",-13.067002296447754],["▁ees",-13.067002296447754],["▁Влада",-13.067015647888184],["主管",-13.06704807281494],["▁halál",-13.067078590393066],["▁ದೂರ",-13.067084312438965],["ifs",-13.06710720062256],["▁mối",-13.067115783691406],["ập",-13.067131996154783],["dista",-13.067188262939451],["메이",-13.06718921661377],["▁ດີ",-13.0671968460083],["▁Зу",-13.067209243774414],["很大的",-13.06728458404541],["▁korzysta",-13.067294120788574],["ത്തിനും",-13.067316055297852],["▁вдруг",-13.067317962646484],["▁(30)",-13.067340850830078],["55)",-13.06735134124756],["kaus",-13.067371368408203],["▁transparent",-13.067401885986328],["റാ",-13.067408561706545],["▁ترا",-13.067408561706545],["▁odd",-13.067415237426758],["vnost",-13.067416191101074],["▁tros",-13.067427635192873],["▁կլինի",-13.06747817993164],["peta",-13.067480087280272],["taran",-13.06748867034912],["taq",-13.067505836486816],["▁කණ්ඩායම",-13.06755256652832],["гоо",-13.067564964294434],["▁прича",-13.067566871643066],["▁Ace",-13.06757354736328],["سهل",-13.067584037780762],["▁tecnología",-13.067614555358888],["▁امنیتی",-13.067630767822266],["▁тепер",-13.06772804260254],["▁פור",-13.067730903625488],["▁مشترک",-13.067737579345703],["悟",-13.067754745483398],["messen",-13.067773818969728],["ంట్",-13.067777633666992],["▁tulis",-13.067785263061523],["檔案",-13.0678129196167],["▁придется",-13.06782054901123],["อัตรา",-13.067821502685549],["▁узнать",-13.067821502685549],["▁כדאי",-13.067822456359863],["▁הראשונה",-13.067824363708496],["▁lawan",-13.067826271057127],["▁તથા",-13.067827224731444],["▁június",-13.067828178405762],["▁ONLINE",-13.067830085754396],["▁ජීවත්",-13.067834854125977],["▁Между",-13.067846298217772],["ดอก",-13.067848205566406],["▁kärlek",-13.06786060333252],["▁İlçe",-13.067863464355469],["تاج",-13.067864418029783],["агу",-13.067867279052734],["▁आग्रह",-13.06787109375],["▁بزن",-13.067876815795898],["▁шүүх",-13.06789207458496],["▁সর্বশেষ",-13.067909240722656],["▁기간",-13.0679292678833],["বন",-13.067947387695312],["▁نص",-13.067947387695312],["▁assicura",-13.06796646118164],["▁gezien",-13.06797218322754],["ግራ",-13.067977905273438],["▁award",-13.06802463531494],["moja",-13.06802749633789],["獨立",-13.068034172058104],["▁stoji",-13.06808376312256],["ունակ",-13.068097114562988],["Bank",-13.068107604980469],["▁Trä",-13.06811237335205],["க்ஸ்",-13.068119049072266],["▁کارروائی",-13.068142890930176],["▁стойност",-13.068153381347656],["يكي",-13.068154335021973],["▁որն",-13.06821060180664],["▁aholi",-13.06821632385254],["▁Krista",-13.068238258361816],["▁वर्षा",-13.068239212036133],["那是",-13.06824779510498],["▁tone",-13.068257331848145],["ตัวอย่าง",-13.06826114654541],["▁referi",-13.068266868591309],["▁አት",-13.06827449798584],["slå",-13.068275451660156],["▁ваши",-13.068285942077637],["طعم",-13.068303108215332],["▁ри",-13.068306922912598],["aalis",-13.06832790374756],["೫",-13.06833839416504],["▁Kno",-13.068346977233888],["ලූ",-13.068371772766112],["▁فور",-13.068381309509276],["買取",-13.0684175491333],["▁afara",-13.068419456481934],["▁võivad",-13.068438529968262],["あの",-13.068479537963867],["▁1913",-13.068511962890623],["▁Orient",-13.068537712097168],["activité",-13.068544387817385],["▁کہیں",-13.068546295166016],["▁logi",-13.06855010986328],["▁воды",-13.06857681274414],["▁מקצועי",-13.068618774414062],["▁camí",-13.068620681762695],["ถือ",-13.06863784790039],["▁फै",-13.068666458129885],["▁(?)",-13.068668365478516],["kram",-13.068682670593262],["▁बाहिर",-13.068685531616213],["▁خر",-13.068705558776855],["บั",-13.06870937347412],["▁toplantı",-13.068747520446776],["rades",-13.06875228881836],["diğini",-13.068753242492676],["авіч",-13.068753242492676],["rske",-13.068770408630373],["▁dotyczące",-13.068779945373535],["▁stopni",-13.068792343139648],["فعال",-13.06881618499756],["锋",-13.06882381439209],["שיח",-13.06884479522705],["幹",-13.068860054016112],["▁yasa",-13.068878173828123],["▁ostali",-13.068906784057615],["šiu",-13.068922996520996],["רור",-13.06894874572754],["ପାରି",-13.06894874572754],["▁Παν",-13.06894874572754],["雜",-13.068964004516602],["戏",-13.068967819213867],["ंचे",-13.0689697265625],["zano",-13.068991661071776],["▁dostane",-13.068995475769045],["蒸",-13.069003105163574],["适",-13.06900691986084],["สัญญา",-13.069019317626951],["ありがとうございます",-13.069025039672852],["▁հաղթ",-13.069026947021484],["▁가운데",-13.0690279006958],["▁ترتیب",-13.069028854370115],["▁სამხედრო",-13.069028854370115],["▁joylashgan",-13.069029808044434],["▁ალბათ",-13.069029808044434],["▁כיצד",-13.069040298461914],["▁बयान",-13.06904125213623],["▁पदार्थ",-13.069043159484863],["စွာ",-13.069045066833496],["▁Бірақ",-13.069046974182127],["▁italiani",-13.069047927856444],["▁mogao",-13.069049835205078],["▁ලස්සන",-13.069049835205078],["▁lector",-13.069067001342772],["▁хэм",-13.069068908691406],["▁Ademais",-13.069069862365724],["▁الإمارات",-13.069073677062988],["▁Գր",-13.069074630737305],["▁király",-13.069077491760254],["bër",-13.06908130645752],["▁करीब",-13.069086074829102],["▁සහිත",-13.069086074829102],["▁صوت",-13.069133758544922],["▁नेपालले",-13.069134712219238],["▁ٿيندو",-13.069135665893556],["IES",-13.069144248962402],["▁attenzione",-13.069147109985352],["▁ನೆ",-13.069151878356934],["▁bestellen",-13.06916618347168],["▁своими",-13.069171905517578],["▁lyssna",-13.069173812866213],["▁speak",-13.069177627563477],["▁poor",-13.069186210632324],["▁erro",-13.069199562072754],["លី",-13.069205284118652],["▁рекорд",-13.069208145141602],["znám",-13.06921100616455],["limo",-13.069222450256348],["▁poveča",-13.069231033325195],["▁stranke",-13.069231986999512],["▁zar",-13.069241523742676],["▁ولو",-13.069243431091309],["долж",-13.06924533843994],["▁TUR",-13.069250106811523],["▁व्यक्ती",-13.069260597229004],["▁намер",-13.06926727294922],["▁치료",-13.069272994995115],["▁שש",-13.0692777633667],["ଆର",-13.069278717041016],["▁감사",-13.069284439086914],["reach",-13.069297790527344],["baj",-13.069300651550291],["▁başla",-13.069311141967772],["ugod",-13.069315910339355],["▁ثلاثة",-13.069332122802734],["сси",-13.06933879852295],["▁mənə",-13.069375038146973],["spreken",-13.069382667541504],["ព្រះ",-13.069390296936035],["ાઉ",-13.069392204284668],["▁niko",-13.069409370422363],["дзіў",-13.069416046142578],["خى",-13.069418907165527],["ଠି",-13.069425582885742],["дэл",-13.069429397583008],["▁sociala",-13.069480895996094],["▁পারেন",-13.069496154785156],["▁ratio",-13.06951141357422],["▁بولۇش",-13.069525718688965],["▁қабылда",-13.069533348083496],["rozen",-13.069540977478027],["▁Precis",-13.06955337524414],["▁narko",-13.06955337524414],["▁født",-13.06955909729004],["▁ผ",-13.069564819335938],["▁Ислам",-13.069575309753418],["דמי",-13.069581031799316],["tsje",-13.069588661193848],["▁pillu",-13.069592475891112],["▁ІІ",-13.069594383239746],["▁والك",-13.06960391998291],["▁Први",-13.069610595703123],["▁міндетті",-13.06962776184082],["ېر",-13.0696382522583],["▁narkotik",-13.069645881652832],["▁angka",-13.069649696350098],["тір",-13.069652557373049],["NM",-13.069668769836426],["▁összeg",-13.069746971130373],["nggi",-13.06975269317627],["▁evita",-13.06975555419922],["cioj",-13.069756507873535],["tube",-13.069782257080078],["▁termín",-13.069795608520508],["▁Phụ",-13.069806098937988],["▁reserved",-13.06982707977295],["ก็คือ",-13.069828033447266],["ларға",-13.069842338562012],["▁доказа",-13.069860458374023],["suka",-13.06987190246582],["▁метро",-13.06987762451172],["▁కష్ట",-13.069887161254885],["▁બહુ",-13.06991958618164],["▁hosting",-13.069920539855955],["▁gloria",-13.069931983947754],["äyt",-13.069947242736816],["▁qeveri",-13.069974899291992],["▁Katha",-13.069975852966309],["▁хит",-13.069981575012209],["ுள்ளார்",-13.06998348236084],["▁חב",-13.069993019104004],["▁Jens",-13.069995880126951],["▁selección",-13.070000648498535],["ۇرۇ",-13.070018768310549],["▁сіль",-13.070040702819824],["▁rangka",-13.070042610168455],["▁لح",-13.070060729980469],["▁विदेशी",-13.070061683654783],["sætte",-13.070096015930176],["▁چر",-13.070115089416504],["▁ingresso",-13.070122718811035],["▁Kira",-13.070155143737791],["一带一路",-13.070162773132324],["ພວກເຮົາ",-13.070171356201172],["ığı",-13.070196151733398],["ദേശ",-13.07020378112793],["▁άλλων",-13.070237159729004],["▁कार्यान्वयन",-13.070237159729004],["▁wanafunzi",-13.070241928100586],["▁କହିଲେ",-13.070245742797852],["▁Expert",-13.070246696472168],["互動",-13.070250511169434],["▁జో",-13.070258140563965],["นักศึกษา",-13.070263862609863],["ernas",-13.07026481628418],["▁chàng",-13.070294380187988],["輸入",-13.070303916931152],["▁सध्या",-13.070304870605469],["▁สิ่งที่",-13.070322036743164],["фин",-13.070352554321287],["ละคร",-13.070393562316896],["fors",-13.070404052734377],["▁Cher",-13.07041358947754],["▁udrž",-13.070415496826172],["ENDA",-13.07041835784912],["▁arada",-13.070436477661133],["▁스포츠",-13.070439338684082],["▁сказав",-13.070446968078612],["▁contain",-13.07045555114746],["▁කෙ",-13.070456504821776],["▁systeem",-13.07046890258789],["▁ölkənin",-13.0704927444458],["▁अपराध",-13.070500373840332],["σες",-13.070527076721191],["conf",-13.070541381835938],["ประเภท",-13.07056713104248],["▁ფილმები",-13.070592880249023],["▁برادر",-13.07059383392334],["顧",-13.070601463317873],["▁אחרת",-13.07060718536377],["▁साइट",-13.070608139038086],["खर",-13.07064151763916],["▁COR",-13.070649147033691],["istische",-13.070669174194336],["فات",-13.07069206237793],["اها",-13.070723533630373],["立法",-13.070723533630373],["▁persist",-13.07073211669922],["▁connu",-13.07075309753418],["പ്പെട്ടു",-13.070754051208496],["▁ervoor",-13.070762634277344],["▁तेज",-13.07079029083252],["▁kolejny",-13.070802688598633],["уст",-13.07080364227295],["Live",-13.070839881896973],["IKU",-13.070849418640137],["ົບ",-13.070856094360352],["▁Украина",-13.070881843566896],["▁ເຮົາ",-13.070900917053224],["▁სახელი",-13.070923805236816],["▁čí",-13.07093906402588],["snitt",-13.070945739746094],["▁извършва",-13.070945739746094],["▁обез",-13.070999145507812],["▁analisi",-13.071003913879396],["▁hijos",-13.07101821899414],["▁رکھا",-13.071025848388672],["віз",-13.071044921875],["▁аг",-13.071063041687012],["значи",-13.071069717407228],["▁монтаж",-13.071094512939451],["ණය",-13.071101188659668],["lær",-13.071109771728516],["lepas",-13.071123123168944],["▁katılım",-13.071124076843262],["▁ಇಷ್ಟ",-13.071124076843262],["miem",-13.071176528930664],["▁tume",-13.071179389953612],["▁թեկնածու",-13.071194648742676],["IČ",-13.07121467590332],["▁förut",-13.07121753692627],["ложени",-13.071229934692385],["ؤں",-13.07124137878418],["▁Bude",-13.071242332458496],["பெ",-13.071259498596191],["רמ",-13.071310997009276],["▁Iron",-13.07131290435791],["▁കിട്ട",-13.071328163146973],["▁душ",-13.07133960723877],["kuba",-13.071352005004885],["SUS",-13.071370124816896],["eamh",-13.071371078491213],["▁Kirk",-13.071378707885742],["ෙකු",-13.071380615234377],["が多く",-13.071393013000488],["▁helyen",-13.07139492034912],["▁جلال",-13.071398735046388],["▁කැමති",-13.071407318115234],["桶",-13.0714111328125],["澳門",-13.071431159973145],["▁acer",-13.071441650390623],["▁tietoa",-13.071441650390623],["▁Council",-13.07144832611084],["▁Nintendo",-13.07144832611084],["▁sicuramente",-13.07144832611084],["▁είμαστε",-13.07144832611084],["▁прокуратура",-13.07144832611084],["▁ദേശീയ",-13.071451187133787],["▁težav",-13.07145881652832],["▁upload",-13.071471214294434],["saat",-13.071500778198242],["õigus",-13.071505546569824],["▁jakieś",-13.07150936126709],["▁notte",-13.07150936126709],["rwa",-13.071510314941406],["prov",-13.071533203125],["▁brz",-13.071552276611328],["兩人",-13.071571350097656],["▁terör",-13.071575164794922],["▁dalk",-13.071577072143556],["лерге",-13.071619987487791],["▁resort",-13.071624755859377],["лици",-13.071649551391602],["▁balio",-13.071660041809082],["sep",-13.07167148590088],["▁الحكم",-13.071687698364258],["▁zahtjev",-13.071721076965332],["штет",-13.071722984313965],["▁өөрчлөлт",-13.071731567382812],["▁184",-13.07174015045166],["▁Description",-13.071751594543455],["▁ચે",-13.07176399230957],["▁ammatti",-13.071770668029783],["World",-13.071776390075684],["ମେ",-13.071779251098633],["▁korva",-13.071805953979492],["▁Pok",-13.071806907653809],["显",-13.071806907653809],["nčius",-13.07181453704834],["▁hefði",-13.071821212768556],["үшү",-13.07183074951172],["▁inşa",-13.07184600830078],["bov",-13.071855545043944],["▁jon",-13.07187271118164],["empat",-13.07187557220459],["▁Tesla",-13.071908950805664],["Бе",-13.07193374633789],["იცი",-13.071935653686523],["▁MIL",-13.071956634521484],["▁yor",-13.071969032287598],["ovina",-13.07198715209961],["▁текот",-13.071990966796877],["▁terrain",-13.071998596191406],["▁већи",-13.072011947631836],["▁લાગે",-13.07203483581543],["adhi",-13.072064399719238],["▁snaž",-13.072094917297363],["▁negro",-13.072099685668944],["世界上",-13.07212734222412],["▁lina",-13.072132110595703],["▁உங்களுக்கு",-13.072149276733398],["▁artistas",-13.072157859802246],["TOP",-13.07217025756836],["▁Пас",-13.072192192077637],["▁сы",-13.072205543518066],["zari",-13.072221755981444],["ڑے",-13.072247505187988],["▁sexo",-13.07225513458252],["ഭാ",-13.072263717651367],["▁takmer",-13.072272300720217],["謝",-13.072273254394531],["คล",-13.07228660583496],["你是",-13.072290420532228],["▁HEL",-13.07229709625244],["istri",-13.07230281829834],["हरी",-13.072306632995604],["▁toti",-13.07236671447754],["tzt",-13.072400093078612],["▁ayır",-13.072406768798828],["estar",-13.072412490844728],["▁सुर",-13.072426795959473],["ນັບ",-13.072437286376951],["▁קרא",-13.072449684143066],["Gal",-13.07246208190918],["ріп",-13.072464942932127],["高級",-13.072469711303713],["sano",-13.07247257232666],["▁registrere",-13.072477340698242],["▁läks",-13.072481155395508],["럼",-13.072481155395508],["عات",-13.07253360748291],["hake",-13.07254695892334],["シャ",-13.072579383850098],["▁ruč",-13.07260513305664],["▁مقامی",-13.07260513305664],["虫",-13.072606086730955],["▁երիտասարդ",-13.07260799407959],["▁утвержден",-13.07261848449707],["蓄",-13.072630882263184],["๐",-13.072656631469728],["ቄ",-13.072660446166992],["▁İbrahim",-13.072660446166992],["▁ئارقىلىق",-13.072660446166992],["▁κανένα",-13.072664260864258],["▁الشركات",-13.072664260864258],["▁ପ୍ରକାଶ",-13.072664260864258],["▁protek",-13.072667121887209],["▁نوموړ",-13.072667121887209],["▁прад",-13.072675704956056],["▁ಬೇ",-13.072677612304688],["▁powiedział",-13.072693824768066],["▁syr",-13.072699546813965],["EO",-13.07271671295166],["▁позиции",-13.072717666625977],["▁ಮನ",-13.072755813598633],["▁vojni",-13.072760581970217],["▁uva",-13.072768211364746],["▁කරගෙන",-13.072772979736328],["▁claus",-13.072789192199709],["▁tekanan",-13.072818756103516],["jada",-13.072820663452148],["avond",-13.07285213470459],["ინი",-13.072863578796388],["▁pasal",-13.072863578796388],["सभा",-13.07286548614502],["▁relativa",-13.072888374328612],["যুক্ত",-13.072900772094728],["öp",-13.07292366027832],["▁प्रसिद्ध",-13.072940826416016],["▁Cole",-13.072941780090332],["▁birgə",-13.07294750213623],["дир",-13.07296371459961],["▁oznacza",-13.072967529296877],["дня",-13.072973251342772],["▁ئەر",-13.07298469543457],["▁කාලයක්",-13.072985649108888],["šej",-13.073004722595217],["▁sandal",-13.07304859161377],["break",-13.073058128356934],["ບັດ",-13.073065757751465],["▁ogleda",-13.073077201843262],["▁Hoved",-13.073097229003906],["▁Zoo",-13.073131561279297],["▁மக",-13.07313346862793],["ເຫດ",-13.073145866394045],["▁مليار",-13.07317352294922],["▁Πολ",-13.073187828063965],["tical",-13.073190689086914],["律",-13.073213577270508],["▁gücü",-13.073229789733888],["žio",-13.073244094848633],["dder",-13.073247909545898],["▁paino",-13.073250770568848],["▁əks",-13.073265075683594],["▁arrangement",-13.07326602935791],["▁tayari",-13.073290824890137],["దేశం",-13.073311805725098],["▁utilisé",-13.073347091674805],["စစ္",-13.073356628417969],["firði",-13.073392868041992],["rilla",-13.073426246643066],["▁nainen",-13.073443412780762],["icis",-13.07345199584961],["▁오는",-13.07345199584961],["▁1926",-13.073456764221191],["தர்",-13.073460578918455],["1990",-13.073464393615724],["wé",-13.07352352142334],["ಯೋಗ",-13.07352352142334],["▁dạ",-13.073553085327148],["▁Amer",-13.073568344116213],["▁avança",-13.073620796203612],["kên",-13.07362937927246],["▁участник",-13.073637008666992],["一边",-13.073655128479004],["▁салон",-13.073690414428713],["▁Tava",-13.073692321777344],["▁ବର୍ଷା",-13.073700904846191],["还会",-13.07370948791504],["mella",-13.073776245117188],["ດ້ວຍ",-13.073780059814451],["▁opplevelse",-13.07378387451172],["ázat",-13.07379150390625],["▁internal",-13.073793411254885],["вары",-13.073797225952148],["▁çalışır",-13.073807716369627],["そのため",-13.073809623718262],["▁кле",-13.073823928833008],["▁quart",-13.07382583618164],["▁Ms",-13.073851585388184],["▁защиты",-13.07387351989746],["▁सुझाव",-13.07387351989746],["ाउनु",-13.07387638092041],["▁toepassing",-13.073882102966309],["ទ្រ",-13.073894500732422],["▁Онлайн",-13.073899269104004],["▁mbeadh",-13.073902130126951],["good",-13.073997497558594],["mize",-13.073999404907228],["▁brasileira",-13.074016571044922],["▁deler",-13.074018478393556],["▁요구",-13.074027061462402],["▁fyrst",-13.074113845825195],["▁вс",-13.074126243591309],["意味着",-13.074138641357422],["▁salve",-13.074141502380373],["▁Lut",-13.074161529541016],["scope",-13.074182510375977],["▁séu",-13.074267387390137],["ტრო",-13.07430648803711],["▁vyšší",-13.07431411743164],["▁сіз",-13.074336051940918],["цін",-13.074338912963867],["的力量",-13.074350357055664],["▁ampli",-13.074352264404297],["▁Студ",-13.074357986450195],["▁Know",-13.074359893798828],["▁Dowladda",-13.074360847473145],["रहे",-13.074406623840332],["▁organiser",-13.074408531188965],["▁namaz",-13.074417114257812],["▁издава",-13.074417114257812],["▁οτι",-13.074418067932127],["▁gate",-13.074419975280762],["▁ఊ",-13.074424743652344],["cules",-13.074429512023926],["▁मना",-13.07444953918457],["პე",-13.074451446533203],["ందో",-13.074466705322266],["▁nosos",-13.074466705322266],["▁sebahagian",-13.074469566345217],["прем",-13.074470520019531],["▁אים",-13.074477195739746],["ેજ",-13.074478149414062],["нюю",-13.074488639831545],["న్నీ",-13.074514389038086],["▁líne",-13.074514389038086],["▁خاموش",-13.074529647827148],["käyttö",-13.07456398010254],["▁പൂര്",-13.074593544006348],["▁Cerita",-13.07459545135498],["etarako",-13.074612617492676],["ไม",-13.07461929321289],["▁француз",-13.074627876281738],["▁پرواز",-13.074646949768066],["▁Blogg",-13.074676513671877],["ปลาย",-13.074678421020508],["ksiyon",-13.074679374694824],["▁beneficios",-13.074684143066406],["▁öö",-13.074691772460938],["nent",-13.074715614318848],["▁учество",-13.074750900268556],["▁вияв",-13.07475757598877],["▁ক্যা",-13.074792861938477],["▁Rico",-13.074799537658691],["zuar",-13.074819564819336],["isere",-13.074823379516602],["▁Content",-13.074830055236816],["天气",-13.07483196258545],["▁İç",-13.074846267700195],["▁Дра",-13.07485008239746],["なぁ",-13.074854850769045],["▁инс",-13.074858665466309],["▁өрөө",-13.074877738952637],["ASS",-13.074878692626951],["ομαι",-13.074885368347168],["▁thụ",-13.07489013671875],["▁centres",-13.074912071228027],["radd",-13.074914932250977],["სწორ",-13.07493019104004],["▁شيخ",-13.074944496154783],["洁",-13.074950218200684],["▁8.1",-13.074954986572266],["리고",-13.074958801269531],["▁najviše",-13.074960708618164],["plati",-13.074970245361328],["maran",-13.074975967407228],["▁eva",-13.075033187866213],["▁2-0",-13.075039863586426],["▁labu",-13.075079917907717],["▁masser",-13.075085639953612],["ប្រុស",-13.075087547302246],["▁Cymraeg",-13.075088500976562],["▁tươi",-13.075088500976562],["▁тұлға",-13.075088500976562],["씩",-13.07508945465088],["▁Roulette",-13.075093269348145],["▁ಒಬ್ಬ",-13.075096130371094],["▁خوشحال",-13.075105667114258],["\">",-13.075106620788574],["绿色",-13.075109481811523],["রের",-13.075111389160156],["ицу",-13.075118064880373],["หวาน",-13.075121879577637],["▁viskas",-13.075130462646484],["▁beso",-13.075138092041016],["▁sekaligus",-13.075140953063965],["▁bras",-13.075155258178713],["алтын",-13.075165748596191],["▁እጅግ",-13.07517147064209],["▁víkend",-13.075181007385254],["ınca",-13.075197219848633],["▁लगता",-13.075202941894531],["▁Där",-13.07523250579834],["▁извън",-13.075240135192873],["ஸ்ட்",-13.075252532958984],["hole",-13.0752534866333],["▁ഒന്നും",-13.075264930725098],["▁metra",-13.07528591156006],["▁jeans",-13.075356483459473],["ريا",-13.075362205505373],["poro",-13.07536792755127],["▁pagkain",-13.07537078857422],["bell",-13.075374603271484],["▁lur",-13.075384140014648],["çılar",-13.075385093688965],["▁annyira",-13.075389862060549],["ਕਸ",-13.075419425964355],["▁భాష",-13.075430870056152],["▁zdravotní",-13.075434684753418],["▁Mimo",-13.075471878051758],["▁ከማ",-13.075481414794922],["▁لأنه",-13.075511932373049],["▁бірге",-13.075528144836426],["▁راہ",-13.07554054260254],["რობ",-13.075604438781738],["▁ફર",-13.075611114501951],["УК",-13.07561492919922],["▁חיי",-13.07563591003418],["ສີ",-13.075678825378418],["▁certificat",-13.075681686401367],["inat",-13.075690269470217],["ιό",-13.075691223144531],["▁Vent",-13.07569694519043],["有一",-13.075711250305176],["▁Unternehmens",-13.07572078704834],["▁იმის",-13.075739860534668],["цця",-13.07575798034668],["wegen",-13.075786590576172],["lyse",-13.075817108154297],["▁turli",-13.07582950592041],["▁Jorge",-13.075838088989258],["لغ",-13.075844764709473],["spin",-13.075847625732422],["物质",-13.07587432861328],["▁Dispo",-13.07589340209961],["▁pysy",-13.07590103149414],["▁Bashk",-13.075925827026367],["▁Bek",-13.075963973999023],["ülő",-13.07596492767334],["▁traduc",-13.075968742370604],["▁බු",-13.075968742370604],["▁نداشت",-13.075979232788086],["arias",-13.075989723205566],["condi",-13.07599639892578],["▁Platform",-13.076003074645996],["▁македонски",-13.076003074645996],["▁1927",-13.076026916503906],["овим",-13.07603359222412],["ควบคุม",-13.076071739196776],["▁៨",-13.076072692871094],["▁भित्र",-13.076112747192385],["lasan",-13.07613468170166],["▁расп",-13.076197624206545],["බු",-13.076202392578123],["▁osigura",-13.076202392578123],["zbekistonda",-13.07622241973877],["▁Erst",-13.076233863830566],["XT",-13.07624340057373],["思维",-13.076265335083008],["ējot",-13.07629108428955],["▁αρχή",-13.076313972473145],["▁гульні",-13.076318740844728],["▁صباح",-13.076324462890623],["▁ಬೇರೆ",-13.076330184936523],["▁πάλι",-13.07633113861084],["▁kelamin",-13.076332092285156],["১২",-13.076346397399902],["▁சிறப்பு",-13.076350212097168],["▁билдирди",-13.076354026794434],["▁Север",-13.07636260986328],["▁ലക്ഷം",-13.076382637023926],["又是",-13.076383590698242],["▁individuelle",-13.07638454437256],["▁disease",-13.076386451721191],["ambu",-13.076388359069824],["ulang",-13.07638931274414],["▁ಸ್ಥಳ",-13.076419830322266],["▁Após",-13.076422691345217],["▁roinnt",-13.076423645019531],["ករ",-13.076427459716797],["тени",-13.07642936706543],["▁ostvari",-13.07643985748291],["▁Sweden",-13.076443672180176],["▁частности",-13.076465606689451],["носить",-13.076470375061035],["▁biro",-13.076473236083984],["▁търси",-13.07648754119873],["▁hissəsi",-13.076488494873049],["▁Handel",-13.076493263244627],["count",-13.076509475708008],["▁играч",-13.076515197753906],["uló",-13.07654857635498],["▁følelse",-13.076562881469728],["hinna",-13.076566696166992],["▁menstrua",-13.07656955718994],["ફો",-13.07657241821289],["ређен",-13.076574325561523],["▁ಇದೇ",-13.07657527923584],["▁Ży",-13.076581954956056],["▁ପୁ",-13.0765962600708],["▁കൂടെ",-13.076608657836914],["370",-13.07662868499756],["▁páginas",-13.076642036437988],["▁informar",-13.07666015625],["wach",-13.076687812805176],["▁Haag",-13.076687812805176],["▁sistemin",-13.076705932617188],["اسی",-13.076765060424805],["▁Sök",-13.076776504516602],["ício",-13.07679271697998],["▁Fon",-13.076804161071776],["dılar",-13.07683563232422],["pust",-13.076868057250977],["▁tarkoit",-13.076873779296877],["τοι",-13.07688331604004],["rind",-13.076933860778809],["รอง",-13.076945304870604],["264",-13.07695770263672],["▁රො",-13.076983451843262],["iĝi",-13.076996803283691],["▁ülés",-13.076996803283691],["▁прео",-13.076998710632324],["▁maaf",-13.077012062072754],["▁Otto",-13.077033996582031],["▁įmonės",-13.077062606811523],["ജന",-13.077073097229004],["▁trừ",-13.077086448669434],["▁části",-13.077093124389648],["▁bond",-13.07709789276123],["ayeen",-13.077128410339355],["效益",-13.077136039733888],["鲜",-13.077136039733888],["▁einigen",-13.07715129852295],["▁कळ",-13.077160835266112],["▁tuntuu",-13.077166557312012],["▁Aš",-13.077173233032228],["~~~",-13.07717514038086],["▁relacionados",-13.077207565307615],["▁expand",-13.077216148376465],["ставити",-13.077274322509766],["▁commence",-13.077289581298828],["ndlela",-13.077308654785156],["ставлен",-13.077325820922852],["ούρ",-13.07733917236328],["ಡೇ",-13.077366828918455],["▁бокс",-13.077386856079102],["▁חמ",-13.077402114868164],["▁Jenis",-13.077412605285645],["▁படிக்க",-13.077436447143556],["稀",-13.077438354492188],["bem",-13.077449798583984],["▁kome",-13.0774507522583],["▁teknisk",-13.07747745513916],["盗",-13.077485084533691],["▁හැබැයි",-13.077489852905272],["tenkin",-13.077506065368652],["▁ankoraŭ",-13.077523231506348],["▁niciodată",-13.077523231506348],["▁thầy",-13.077523231506348],["▁માહિતી",-13.077523231506348],["▁නෙවෙයි",-13.07752513885498],["▁الفترة",-13.07752799987793],["▁Kiedy",-13.07753562927246],["▁неговото",-13.077540397644045],["ୁଆ",-13.077550888061523],["ရွား",-13.077570915222168],["▁Informazio",-13.077577590942385],["重視",-13.077579498291016],["▁aqoon",-13.077592849731444],["▁šalies",-13.077598571777344],["ବଳ",-13.077603340148926],["لمان",-13.07760524749756],["▁දෙකක්",-13.077644348144531],["ეტი",-13.07766342163086],["▁compren",-13.077674865722656],["▁paste",-13.077675819396973],["▁annyi",-13.07770824432373],["داران",-13.077709197998049],["▁maxime",-13.077718734741213],["▁мъ",-13.077746391296388],["▁सम्म",-13.077763557434082],["총",-13.077770233154297],["위에",-13.077771186828612],["▁עולם",-13.07777500152588],["тердин",-13.077776908874512],["urta",-13.07780647277832],["▁delicat",-13.07782745361328],["▁хран",-13.077838897705078],["▁hest",-13.077851295471191],["▁hehehe",-13.07786750793457],["▁Spieler",-13.07787036895752],["Od",-13.077914237976074],["▁pelajaran",-13.077946662902832],["▁eventuell",-13.077951431274414],["ပတ္",-13.077963829040527],["▁burg",-13.07796859741211],["písať",-13.077969551086426],["思う",-13.077969551086426],["▁quiero",-13.077977180480955],["చ్చి",-13.07798671722412],["众多",-13.0780029296875],["▁جتي",-13.078012466430664],["નાર",-13.078091621398926],["▁ชั่วโมง",-13.078094482421877],["ตอนนี้",-13.078103065490724],["▁गेले",-13.078113555908203],["ιστές",-13.078124046325684],["▁Paar",-13.078203201293944],["▁arvioi",-13.078213691711426],["▁Добро",-13.07821559906006],["▁ખો",-13.07823085784912],["ВІ",-13.078256607055664],["▁такси",-13.078256607055664],["ですよね",-13.078266143798828],["▁cron",-13.07826805114746],["iyang",-13.07828426361084],["▁Alo",-13.078288078308104],["тельных",-13.078335762023926],["▁éis",-13.078405380249023],["▁ხელი",-13.078408241271973],["▁nata",-13.07843017578125],["▁طا",-13.078483581542969],["▁mab",-13.078484535217283],["▁юни",-13.078495025634766],["่อ",-13.078496932983398],["▁lampa",-13.078514099121094],["▁Սարգսյանի",-13.07854175567627],["erà",-13.078564643859863],["نزل",-13.078571319580078],["▁Drog",-13.078574180603027],["▁Jú",-13.078575134277344],["nić",-13.078583717346191],["▁accompagna",-13.078587532043455],["eći",-13.078617095947266],["ňov",-13.078636169433594],["πάρ",-13.078651428222656],["▁сүйлө",-13.078655242919922],["▁nomor",-13.07868480682373],["耀",-13.078694343566896],["ovanju",-13.078704833984377],["廢",-13.078723907470703],["查詢",-13.07872486114502],["서울",-13.078727722167969],["水果",-13.078734397888184],["พยายาม",-13.078740119934082],["▁obě",-13.078744888305664],["▁obicei",-13.078746795654297],["▁ਸਾਡੇ",-13.078747749328612],["▁Դուք",-13.078750610351562],["▁լինելու",-13.07875919342041],["▁پارٽي",-13.078763961791992],["▁Mountain",-13.078765869140623],["▁முழு",-13.078775405883787],["▁लामो",-13.078777313232422],["▁stipendi",-13.078783988952637],["▁həsr",-13.078784942626951],["▁שרי",-13.078790664672852],["▁reflex",-13.0787992477417],["▁कहते",-13.078804969787598],["▁මෙන්ම",-13.078807830810549],["एँ",-13.078808784484863],["Energ",-13.07880973815918],["▁практики",-13.078811645507812],["▁సర్",-13.07882595062256],["▁جدول",-13.078827857971191],["▁(2010)",-13.078840255737305],["jong",-13.078856468200684],["▁breath",-13.078858375549316],["▁інструмент",-13.078869819641112],["ಿಸಿದರು",-13.078886985778809],["▁медал",-13.078889846801758],["τήρια",-13.078911781311035],["らしい",-13.078916549682615],["▁berusia",-13.0789213180542],["áron",-13.078925132751465],["▁семьи",-13.078927040100098],["▁copie",-13.078933715820312],["▁edu",-13.078940391540527],["▁këtu",-13.07894229888916],["▁১৫",-13.0789794921875],["53)",-13.07898235321045],["▁puedan",-13.078983306884766],["的效果",-13.078988075256348],["▁eficiente",-13.07899284362793],["mija",-13.079002380371094],["▁konuş",-13.079042434692385],["алната",-13.079045295715332],["karna",-13.07905101776123],["ičku",-13.079052925109863],["▁birinchi",-13.07905387878418],["▁ఫ",-13.079057693481444],["▁Према",-13.079063415527344],["gant",-13.07906723022461],["ទូ",-13.079068183898926],["صرف",-13.079073905944824],["ົວ",-13.079086303710938],["▁měsíc",-13.079092025756836],["kont",-13.079095840454102],["▁hayvan",-13.07909870147705],["▁Tõ",-13.079103469848633],["▁beheer",-13.079111099243164],["ानु",-13.07912254333496],["▁главно",-13.079197883605955],["▁процесі",-13.079205513000488],["▁funnet",-13.079206466674805],["▁ប៉ុន្តែ",-13.079231262207031],["geen",-13.079241752624512],["စကား",-13.079261779785156],["active",-13.0792875289917],["ଦିନ",-13.079305648803713],["activ",-13.079310417175291],["▁පාලනය",-13.079313278198242],["▁സീ",-13.07931900024414],["▁šla",-13.079319953918455],["▁ਘ",-13.07933235168457],["참",-13.079333305358888],["▁كور",-13.079344749450684],["беларус",-13.079346656799316],["▁850",-13.079392433166504],["dėjo",-13.07939624786377],["Йорк",-13.07939624786377],["▁fira",-13.079412460327148],["▁رائے",-13.079416275024414],["ຄ້າ",-13.079426765441896],["▁კე",-13.07943630218506],["њој",-13.079532623291016],["mú",-13.079561233520508],["▁dương",-13.079574584960938],["vedi",-13.079575538635254],["tājs",-13.079584121704102],["▁zobacz",-13.079585075378418],["▁сумма",-13.079602241516112],["kowski",-13.079607009887695],["▁Ident",-13.07961654663086],["ofa",-13.07969856262207],["▁వ్యక్తి",-13.07973861694336],["▁kaps",-13.079742431640623],["kryt",-13.079751968383787],["軽",-13.079761505126951],["▁Guy",-13.079771995544434],["▁Perak",-13.079771995544434],["polski",-13.079829216003418],["nách",-13.079833984375],["▁দেয়া",-13.079833984375],["▁සො",-13.079838752746582],["▁tiste",-13.079848289489746],["▁Või",-13.079852104187012],["čov",-13.079901695251465],["▁παρουσία",-13.07991886138916],["澳洲",-13.079920768737791],["▁alvorlig",-13.079927444458008],["jesh",-13.079931259155272],["▁Amal",-13.079934120178224],["模型",-13.079936027526855],["▁누",-13.07993984222412],["▁odkaz",-13.079949378967283],["άζουν",-13.079955101013184],["雰囲気",-13.079955101013184],["▁Caerdydd",-13.079963684082031],["▁aliquip",-13.079963684082031],["▁məsuliyyət",-13.079963684082031],["▁ĉirkaŭ",-13.079963684082031],["▁ವರದಿ",-13.079963684082031],["▁квітня",-13.079964637756348],["▁युवक",-13.07996654510498],["▁բռն",-13.079970359802246],["▁grama",-13.079973220825195],["▁गर्नुपर्ने",-13.079974174499512],["▁dizi",-13.079992294311523],["▁pagu",-13.079998970031738],["▁Vah",-13.08000373840332],["▁occidental",-13.08000373840332],["▁ภาพ",-13.080008506774902],["ښې",-13.080016136169434],["▁števil",-13.08004093170166],["▁Ή",-13.08004093170166],["▁nähdä",-13.080049514770508],["єктів",-13.08005142211914],["▁действий",-13.080060958862305],["▁yıllık",-13.080076217651367],["▁хойш",-13.080076217651367],["▁Charlie",-13.080080032348633],["ေဝ",-13.080097198486328],["▁Мед",-13.080102920532228],["tības",-13.080106735229492],["استمرار",-13.080108642578123],["arbeta",-13.08010959625244],["ত্",-13.080140113830566],["今後",-13.080150604248049],["ກາງ",-13.08016586303711],["▁бюджета",-13.080174446105955],["ikoa",-13.080184936523438],["▁살아",-13.080184936523438],["▁09.",-13.080206871032717],["ଏସ",-13.08021354675293],["的企业",-13.080217361450195],["▁istediği",-13.080220222473145],["▁zoo",-13.080230712890623],["▁kultūros",-13.080286026000977],["▁marker",-13.080292701721191],["▁kopen",-13.080299377441406],["▁доведе",-13.080302238464355],["ραβ",-13.08031940460205],["▁čaka",-13.080337524414062],["▁oran",-13.080430030822754],["rajz",-13.0804443359375],["▁പോകുന്ന",-13.08044719696045],["▁escribir",-13.080449104309082],["خفي",-13.08049201965332],["icul",-13.080496788024902],["رسان",-13.08049774169922],["▁dono",-13.0805025100708],["gura",-13.080514907836914],["▁diff",-13.080523490905762],["охо",-13.080530166625977],["▁потр",-13.08055591583252],["▁skolan",-13.080574989318848],["▁۱۸",-13.080578804016112],["▁Bern",-13.080586433410645],["ladığı",-13.080689430236816],["▁ദിന",-13.080705642700195],["γέ",-13.080713272094728],["Max",-13.08072280883789],["تقد",-13.080728530883787],["▁romantik",-13.080730438232422],["▁död",-13.08073616027832],["▁vrouwen",-13.080745697021484],["jî",-13.0807466506958],["▁Anima",-13.080775260925291],["layın",-13.080785751342772],["ບາງ",-13.080815315246582],["两国",-13.08084774017334],["▁lương",-13.080860137939451],["▁proposé",-13.080865859985352],["▁lots",-13.080887794494627],["_--",-13.080891609191896],["dys",-13.08091163635254],["rky",-13.080924034118652],["現象",-13.080942153930664],["▁ład",-13.080950736999512],["▁Yazı",-13.080958366394045],["νδ",-13.080975532531738],["▁സമ",-13.08099365234375],["▁kehittämis",-13.08101749420166],["▁رهبری",-13.081064224243164],["▁tebe",-13.081073760986328],["هور",-13.081083297729492],["▁afër",-13.081125259399414],["▁ଭଲ",-13.081133842468262],["▁скор",-13.081135749816896],["ارد",-13.081158638000488],["▁사람들",-13.081164360046388],["εστ",-13.081188201904297],["▁Büyükşehir",-13.081188201904297],["▁muhiim",-13.081191062927246],["цького",-13.081194877624512],["▁නොවන",-13.08120059967041],["的服务",-13.081207275390623],["▁Söz",-13.081210136413574],["▁पिछले",-13.081210136413574],["▁पांच",-13.081217765808104],["dəki",-13.08122444152832],["▁Սարգսյանը",-13.08122730255127],["wahi",-13.081249237060549],["▁Podľa",-13.081259727478027],["▁ग्राम",-13.08126163482666],["▁husband",-13.081263542175291],["▁construcción",-13.08128261566162],["йно",-13.081294059753418],["학생",-13.08132266998291],["▁សង្គម",-13.081342697143556],["▁redde",-13.08135986328125],["ออกจาก",-13.081385612487791],["▁Svo",-13.081413269042969],["ходи",-13.081419944763184],["▁λι",-13.081430435180664],["▁anket",-13.081461906433104],["幾個",-13.0814790725708],["▁Ner",-13.081487655639648],["▁fixe",-13.08151149749756],["▁astu",-13.08151626586914],["▁системе",-13.081535339355469],["▁увид",-13.081578254699709],["ically",-13.081609725952148],["ريح",-13.081642150878906],["▁నో",-13.081659317016602],["▁sacar",-13.08168888092041],["▁dicht",-13.08169174194336],["istica",-13.081695556640623],["qiy",-13.08171844482422],["센",-13.081817626953123],["▁DEN",-13.081819534301758],["▁Wasiir",-13.081822395324709],["▁03.",-13.0818452835083],["▁ngang",-13.081859588623049],["▁zavr",-13.081866264343262],["▁izdela",-13.081875801086426],["wiesen",-13.081883430480955],["▁skon",-13.081894874572754],["kundige",-13.0819091796875],["▁kupon",-13.08205509185791],["▁הקו",-13.08206272125244],["▁catalog",-13.0820894241333],["holen",-13.082091331481934],["положен",-13.082098960876465],["መት",-13.08210277557373],["שרה",-13.082112312316896],["▁budem",-13.082112312316896],["टक",-13.082117080688477],["▁pareja",-13.082120895385742],["ічної",-13.082144737243652],["ીય",-13.082164764404297],["тись",-13.082167625427246],["saí",-13.082171440124512],["لىن",-13.082246780395508],["▁مواجه",-13.082253456115724],["▁tere",-13.08226490020752],["▁ברור",-13.08226490020752],["သင့္",-13.082289695739746],["ғай",-13.082290649414062],["▁տոն",-13.082338333129885],["狼",-13.082343101501465],["▁formular",-13.082366943359377],["ໃຕ້",-13.082386016845703],["▁roli",-13.082405090332031],["ေျမာက္",-13.082409858703612],["▁legtöbb",-13.082409858703612],["▁dziewczyn",-13.08241081237793],["▁продукції",-13.08241081237793],["▁አቀፍ",-13.08241081237793],["▁ਜਾਣਕਾਰੀ",-13.082412719726562],["resa",-13.08241367340088],["▁zaterdag",-13.08241367340088],["▁şarkı",-13.08241367340088],["▁văzut",-13.082414627075195],["▁масштаб",-13.082414627075195],["▁працівників",-13.082438468933104],["हित",-13.082443237304688],["▁noastra",-13.082447052001951],["▁ehita",-13.08245086669922],["▁helm",-13.08245086669922],["▁Stati",-13.082457542419434],["▁siyasî",-13.082460403442385],["▁Maal",-13.082468032836914],["▁θέματα",-13.082469940185549],["ඩු",-13.082479476928713],["▁Fag",-13.082483291625977],["مور",-13.082484245300291],["▁सर्वाधिक",-13.082494735717772],["यल",-13.08251667022705],["ضى",-13.08252239227295],["lhas",-13.082524299621582],["▁لکھ",-13.082544326782228],["▁voorbeeld",-13.082566261291504],["stige",-13.082571029663086],["ács",-13.08260726928711],["▁intenta",-13.082629203796388],["▁såsom",-13.082636833190918],["▁pertinent",-13.082642555236816],["ordnung",-13.08264446258545],["▁švies",-13.08267879486084],["ומה",-13.082685470581056],["▁nā",-13.082723617553713],["▁вопросам",-13.08277416229248],["▁ktorého",-13.08277702331543],["▁partener",-13.082781791687012],["▁Strona",-13.08278751373291],["▁Geni",-13.08280086517334],["▁ڈر",-13.082803726196287],["ministeri",-13.082805633544922],["kümne",-13.08281421661377],["▁Pravi",-13.082830429077148],["▁definit",-13.082849502563477],["▁všem",-13.082878112792969],["▁کره",-13.082886695861816],["▁sakin",-13.082891464233398],["▁marr",-13.082894325256348],["▁odnos",-13.082914352416992],["ődés",-13.082918167114258],["ลักษณะ",-13.082928657531738],["avait",-13.082948684692385],["▁თანა",-13.083025932312012],["▁runā",-13.083036422729492],["大きく",-13.083051681518556],["tāju",-13.08306884765625],["▁ਫਿਰ",-13.083096504211426],["είων",-13.08310604095459],["enko",-13.083137512207031],["ינע",-13.083138465881348],["▁zdr",-13.08314037322998],["hahaha",-13.083162307739258],["schließ",-13.08318328857422],["lied",-13.0831937789917],["▁edirlər",-13.083253860473633],["天的",-13.083264350891112],["▁પડી",-13.08327293395996],["▁sjekke",-13.083284378051758],["▁संग",-13.083287239074709],["▁дай",-13.08332061767578],["▁зашто",-13.08333969116211],["▁nire",-13.083370208740234],["▁destes",-13.083391189575195],["cijas",-13.083407402038574],["ących",-13.083410263061523],["vnih",-13.083422660827637],["ანდ",-13.08342742919922],["elių",-13.08344554901123],["▁پارک",-13.083487510681152],["ایر",-13.083515167236328],["ქა",-13.083521842956545],["▁svona",-13.083563804626465],["аагүй",-13.083576202392578],["vaju",-13.083579063415527],["諾",-13.083582878112791],["ნარ",-13.083585739135742],["ຊາ",-13.08360481262207],[".[3]",-13.083606719970703],["kumar",-13.083613395690918],["経済",-13.083614349365234],["▁illik",-13.08362102508545],["▁pyy",-13.083624839782717],["▁följa",-13.083630561828612],["▁Ciebie",-13.083636283874512],["▁Kumpulan",-13.083636283874512],["▁Trondheim",-13.083636283874512],["▁nắm",-13.083636283874512],["▁propuesta",-13.083636283874512],["▁rôznych",-13.083636283874512],["▁нөлөө",-13.083636283874512],["▁маданият",-13.083641052246094],["▁telefoon",-13.083642959594728],["▁ప్రముఖ",-13.083645820617676],["▁표현",-13.083647727966309],["▁erantzun",-13.083654403686523],["▁efficitur",-13.083661079406738],["▁maskin",-13.083677291870115],["uojant",-13.0836820602417],["ਿਲ",-13.0836820602417],["»։",-13.083684921264648],["▁председника",-13.083691596984863],["شنبې",-13.083693504333496],["ຊາຍ",-13.083744049072266],["года",-13.083745002746582],["▁நேரம்",-13.083745956420898],["▁rīko",-13.083765029907228],["▁شاء",-13.083765029907228],["▁heu",-13.083770751953123],["ร้านค้า",-13.083780288696287],["▁sofre",-13.08379077911377],["ಂಪ",-13.08380126953125],["cd",-13.083803176879885],["▁карыста",-13.083810806274414],["бул",-13.083813667297363],["אַפּ",-13.083826065063477],["▁යනවා",-13.08384895324707],["▁surface",-13.083869934082031],["եդ",-13.083879470825195],["rua",-13.083888053894045],["rale",-13.083897590637209],["▁Resep",-13.083911895751951],["▁förslag",-13.08392333984375],["があって",-13.083928108215332],["ედი",-13.083962440490724],["adore",-13.083970069885254],["lösning",-13.083990097045898],["▁지방",-13.083992004394531],["▁spra",-13.084014892578123],["▁δικ",-13.08402156829834],["▁земле",-13.084041595458984],["▁결제",-13.084064483642578],["რებული",-13.084070205688477],["▁Sæ",-13.084074020385742],["▁kondi",-13.084089279174805],["timin",-13.084099769592283],["▁جناب",-13.084101676940918],["▁štet",-13.084117889404297],["▁основі",-13.084117889404297],["kuun",-13.084128379821776],["čani",-13.08413314819336],["▁веза",-13.084141731262209],["▁паэт",-13.084145545959473],["▁çalak",-13.084151268005373],["poja",-13.0841646194458],["▁cocok",-13.084178924560549],["▁किती",-13.084184646606444],["UX",-13.084192276000977],["告訴",-13.084229469299316],["brik",-13.084253311157228],["▁סמ",-13.084257125854492],["▁tjänst",-13.084258079528809],["▁рук",-13.084274291992188],["▁Natura",-13.084285736083984],["וסט",-13.084306716918944],["ሉ፡፡",-13.08432674407959],["귀",-13.084373474121094],["▁красив",-13.084381103515623],["әр",-13.084390640258787],["OLE",-13.08441925048828],["ckej",-13.084427833557127],["학교",-13.084444999694824],["griff",-13.08447551727295],["▁يوق",-13.084476470947266],["稍",-13.084517478942873],["為您",-13.08452033996582],["▁મોટા",-13.084567070007324],["веч",-13.084580421447754],["tíma",-13.084602355957031],["▁دیتا",-13.08460521697998],["▁πυρ",-13.084623336791992],["▁وكانت",-13.08464813232422],["баева",-13.084671020507812],["▁национално",-13.084698677062988],["ümü",-13.08470630645752],["▁Asha",-13.084720611572266],["მენ",-13.084768295288086],["恨",-13.08478546142578],["Nkulunkulu",-13.08479118347168],["▁NR",-13.084798812866213],["ატა",-13.084800720214844],["乌",-13.084818840026855],["▁moderat",-13.084830284118652],["醬",-13.084836959838867],["riaren",-13.084843635559082],["▁tasuta",-13.084850311279297],["▁paslaugų",-13.08486270904541],["▁Általános",-13.08486270904541],["▁батьків",-13.08486270904541],["▁सम्झौता",-13.08486270904541],["▁ਹੁੰਦਾ",-13.08486270904541],["▁περίοδο",-13.084871292114258],["แบ่ง",-13.084872245788574],["▁پرسش",-13.084878921508787],["▁иштеп",-13.084880828857422],["引导",-13.084880828857422],["▁tří",-13.084884643554688],["hiver",-13.084905624389648],["дают",-13.084908485412598],["▁indhold",-13.084919929504396],["▁правы",-13.084921836853027],["سکو",-13.084922790527344],["▁ڏني",-13.084925651550291],["▁ולכן",-13.084930419921877],["▁SUV",-13.084941864013672],["▁Tokyo",-13.084948539733888],["▁ئىن",-13.084948539733888],["点击",-13.084951400756836],["tô",-13.084955215454102],["ādi",-13.084978103637695],["▁mało",-13.08500862121582],["רגיל",-13.085016250610352],["▁주문",-13.085023880004885],["▁ໃຊ້",-13.085037231445312],["▁Jal",-13.085058212280272],["▁ଗ୍ରାମ",-13.085061073303224],["▁placut",-13.08509635925293],["▁specifica",-13.085101127624512],["▁градус",-13.085124015808104],["▁Marti",-13.08516788482666],["▁mesures",-13.085172653198242],["टिक",-13.085211753845217],["德國",-13.085213661193848],["▁istiyorum",-13.08522129058838],["▁ruch",-13.08522129058838],["▁Sue",-13.085230827331545],["▁हल",-13.085245132446287],["gambar",-13.085281372070312],["▁12.00",-13.085282325744627],["უნა",-13.085284233093262],["▁kontrole",-13.085328102111816],["білі",-13.085329055786133],["危机",-13.08534049987793],["▁syns",-13.085344314575195],["▁இனி",-13.085344314575195],["▁تناول",-13.085349082946776],["加速",-13.08536434173584],["მარ",-13.085450172424316],["▁Vieš",-13.085450172424316],["lıyor",-13.085457801818848],["▁trading",-13.08546543121338],["ējie",-13.085488319396973],["▁мощ",-13.085500717163086],["▁identific",-13.085514068603516],["▁terapia",-13.085519790649414],["▁हार",-13.085532188415527],["▁różne",-13.08558177947998],["▁skýr",-13.085594177246094],["មុន",-13.085614204406738],["▁కాలం",-13.085673332214355],["▁forfatter",-13.08568286895752],["Str",-13.08570384979248],["▁частини",-13.085713386535645],["όλου",-13.08572483062744],["58)",-13.085739135742188],["▁Ở",-13.085771560668944],["▁Marian",-13.085816383361816],["▁құра",-13.085816383361816],["ေတာ",-13.085824966430664],["Med",-13.08584690093994],["malli",-13.085855484008787],["qual",-13.085869789123535],["ற்ப",-13.085907936096191],["▁142",-13.085980415344238],["▁jedne",-13.085989952087402],["▁ისევ",-13.086004257202148],["▁idman",-13.08602523803711],["賀",-13.086047172546388],["註",-13.086065292358398],["▁Voici",-13.086091995239258],["▁समर्थन",-13.086091995239258],["动物",-13.08609390258789],["iaid",-13.086094856262209],["▁memperoleh",-13.086095809936523],["▁televizor",-13.086099624633787],["▁endlich",-13.086101531982422],["▁ipina",-13.086103439331056],["▁hitel",-13.08610725402832],["grįž",-13.08611297607422],["▁največ",-13.08611297607422],["▁szkoły",-13.0861177444458],["തിന്റെ",-13.086126327514648],["utuu",-13.086140632629396],["senter",-13.086149215698242],["▁décision",-13.08615493774414],["tuma",-13.086155891418455],["▁الصور",-13.086164474487305],["▁penulis",-13.08616828918457],["SÉG",-13.086176872253418],["త్య",-13.086177825927734],["▁поддържа",-13.086237907409668],["▁മാധ്യമ",-13.0862398147583],["▁Gwel",-13.086257934570312],["▁djecu",-13.086273193359377],["alak",-13.086285591125488],["▁bír",-13.086294174194336],["▁Cit",-13.086308479309082],["▁juhul",-13.08632755279541],["▁दिखा",-13.086329460144045],["▁независимо",-13.086344718933104],["▁olim",-13.086371421813965],["▁gerçekten",-13.086382865905762],["▁ണ",-13.086387634277344],["潜",-13.08641529083252],["▁Zon",-13.086431503295898],["нчев",-13.086432456970217],["chádzajú",-13.086434364318848],["▁agor",-13.086512565612791],["行业的",-13.08651638031006],["าว",-13.086520195007324],["▁njerëzit",-13.08652114868164],["bygging",-13.086528778076172],["▁Gé",-13.086555480957031],["▁њихов",-13.08656120300293],["▁mpaka",-13.08657169342041],["▁szko",-13.086581230163574],["▁Hj",-13.086587905883787],["▁கண்",-13.086610794067385],["rski",-13.086620330810549],["纯",-13.086621284484863],["▁матеріали",-13.086629867553713],["▁esiin",-13.086634635925291],["tego",-13.08663558959961],["▁ಕಾರ್",-13.08663845062256],["usha",-13.086639404296877],["▁štiri",-13.086639404296877],["ddar",-13.086649894714355],["dikleri",-13.086718559265137],["▁újabb",-13.086724281311035],["▁Slu",-13.086725234985352],["▁1700",-13.0867280960083],["llisia",-13.086747169494627],["▁тражи",-13.086762428283691],["ચાર",-13.086763381958008],["्छन्",-13.086791038513184],["▁лига",-13.086812019348145],["▁iphone",-13.086841583251951],["▁saldırı",-13.08686351776123],["তম",-13.086905479431152],["szkol",-13.086910247802734],["を取り",-13.08691692352295],["▁ගො",-13.086923599243164],["לאַ",-13.086987495422363],["ūros",-13.087000846862791],["telor",-13.08701515197754],["▁inclu",-13.087020874023438],["త్వం",-13.087027549743652],["▁ከእ",-13.087051391601562],["antha",-13.087085723876951],["מקומות",-13.087085723876951],["▁mengapa",-13.087103843688965],["▁کہانی",-13.087105751037598],["全力",-13.087109565734863],["ancia",-13.087112426757812],["غز",-13.087120056152344],["▁marcas",-13.08712100982666],["كثر",-13.087122917175291],["▁nano",-13.087125778198242],["場合には",-13.08713150024414],["▁segui",-13.087141036987305],["ക്കെതിരെ",-13.087151527404783],["ovih",-13.087154388427734],["▁vastaa",-13.087162017822266],["ที่ใช้",-13.087173461914062],["โปร",-13.087206840515137],["ARAN",-13.087207794189451],["xay",-13.08720874786377],["▁анхаар",-13.087212562561035],["око",-13.0872220993042],["险",-13.087223052978516],["sby",-13.087237358093262],["ভো",-13.087251663208008],["▁effort",-13.087279319763184],["侧",-13.08729076385498],["sump",-13.087298393249512],["्ला",-13.087303161621094],["▁سعودي",-13.087307929992676],["殿",-13.087308883666992],["bilidad",-13.08731460571289],["싱",-13.087316513061523],["消失",-13.087319374084473],["ចំណ",-13.087321281433104],["▁Bệnh",-13.087321281433104],["▁ચૂંટણી",-13.087321281433104],["▁ấn",-13.087321281433104],["▁થયું",-13.087323188781738],["▁Polizei",-13.087327003479004],["▁صورتحال",-13.087357521057127],["വന",-13.087361335754396],["เกิน",-13.087369918823242],["šiel",-13.087417602539062],["▁Вона",-13.087419509887695],["ኖሩ",-13.087435722351074],["▁थाहा",-13.087435722351074],["▁tunnista",-13.0874662399292],["▁serviciu",-13.087472915649414],["▁Doma",-13.087477684020996],["▁röst",-13.087479591369627],["▁teri",-13.087479591369627],["▁debes",-13.08751106262207],["леген",-13.087522506713867],["▁materiais",-13.08752727508545],["▁avoid",-13.087530136108398],["luci",-13.087535858154297],["▁зустріч",-13.087538719177246],["grat",-13.087579727172852],["▁पाउने",-13.087580680847168],["▁стимул",-13.087624549865724],["▁процессе",-13.087631225585938],["▁Hori",-13.087638854980469],["▁transmit",-13.087638854980469],["tike",-13.087663650512695],["ชุมชน",-13.087676048278809],["hty",-13.087682723999023],["▁Barne",-13.087682723999023],["▁hư",-13.087689399719238],["▁ضرب",-13.087691307067873],["ก็มี",-13.0877046585083],["▁кач",-13.087711334228516],["▁zmian",-13.08773422241211],["▁تخصصی",-13.087736129760742],["ებიდან",-13.087750434875488],["ഗി",-13.08777904510498],["▁CT",-13.08778953552246],["▁površin",-13.08780288696289],["straße",-13.087818145751951],["пара",-13.087821006774902],["▁редакц",-13.0878324508667],["▁احت",-13.087835311889648],["▁números",-13.087838172912598],["▁poca",-13.087882041931152],["▁בפני",-13.087894439697266],["ീകരണ",-13.08792781829834],["တပ်",-13.087946891784668],["▁একটা",-13.087953567504885],["▁করি",-13.0879545211792],["trække",-13.087974548339844],["kovo",-13.08798885345459],["▁casas",-13.088042259216309],["▁знания",-13.088043212890623],["▁поново",-13.088061332702637],["▁Valle",-13.088092803955078],["▁現在",-13.088109016418455],["ເຂດ",-13.088117599487305],["▁destê",-13.088129043579102],["▁Vie",-13.08815860748291],["lew",-13.088160514831545],["▁alfa",-13.08816146850586],["vora",-13.088180541992188],["▁yanayo",-13.088238716125488],["АА",-13.088244438171388],["र्त",-13.088254928588867],["▁mettere",-13.088254928588867],["▁સ્ટ",-13.088275909423828],["▁Rabbi",-13.08827781677246],["▁tốc",-13.088292121887209],["▁ataca",-13.088299751281738],["▁sporo",-13.088300704956056],["ssaan",-13.0883207321167],["ане",-13.088333129882812],["▁mots",-13.08834171295166],["ในปี",-13.088350296020508],["▁execut",-13.088427543640137],["ecektir",-13.088431358337402],["efna",-13.088448524475098],["กับการ",-13.088467597961426],["2,5",-13.088471412658691],["▁puasa",-13.088471412658691],["▁Kids",-13.08848476409912],["另一方面",-13.08849048614502],["驾驶",-13.088497161865234],["ల్లా",-13.08851146697998],["▁klant",-13.088516235351562],["็ค",-13.088520050048828],["тарына",-13.088531494140623],["▁بنده",-13.088537216186523],["▁səhv",-13.088539123535156],["엘",-13.088552474975586],["▁dažādu",-13.088553428649902],["▁mężczyzn",-13.088553428649902],["▁życiu",-13.088553428649902],["▁секогаш",-13.088553428649902],["▁podjetje",-13.088556289672852],["רץ",-13.0885591506958],["▁количества",-13.088561058044434],["War",-13.088566780090332],["▁zvýšen",-13.088566780090332],["▁ព័ត៌មានជាតិ",-13.088566780090332],["వారు",-13.088587760925291],["▁үндэсний",-13.088590621948242],["▁navíc",-13.088593482971191],["▁fondi",-13.08860969543457],["▁qayta",-13.08860969543457],["onga",-13.088616371154783],["XL",-13.08862018585205],["▁кожен",-13.088621139526367],["▁dragoste",-13.088628768920898],["ព្រៃ",-13.088647842407228],["нства",-13.088665008544922],["spie",-13.088667869567873],["्यात",-13.088674545288086],["íveis",-13.08872127532959],["ήγηση",-13.088733673095703],["▁купи",-13.08873462677002],["διά",-13.08874797821045],["▁Selepas",-13.088757514953612],["▁вероятно",-13.088780403137209],["▁वृद्धि",-13.088784217834473],["▁Ruf",-13.088788032531738],["▁mangler",-13.08879280090332],["ехать",-13.088802337646484],["OLI",-13.08881664276123],["▁podoba",-13.088827133178713],["▁Easy",-13.08883571624756],["▁těchto",-13.088862419128418],["gjord",-13.08887004852295],["▁katero",-13.088873863220217],["爱情",-13.088876724243164],["ज़ा",-13.088895797729492],["ьо",-13.08890438079834],["▁freisin",-13.088908195495604],["party",-13.088918685913086],["▁Ры",-13.088918685913086],["▁flexibil",-13.088951110839844],["▁kiezen",-13.088966369628906],["▁முக",-13.088985443115234],["dahan",-13.089014053344728],["▁Gewinn",-13.08902072906494],["விட்ட",-13.089025497436523],["liyə",-13.089035987854004],["▁kaunis",-13.0890531539917],["▁преподава",-13.089082717895508],["বাস",-13.089088439941406],["ทุน",-13.089115142822266],["▁صورة",-13.089125633239746],["▁fyrsta",-13.08914279937744],["▁άγ",-13.089159965515137],["▁МАН",-13.089163780212402],["mast",-13.089184761047363],["ahin",-13.089191436767578],["▁ambas",-13.089204788208008],["▁উপর",-13.089218139648438],["ians",-13.089229583740234],["▁étant",-13.08923053741455],["▁mesos",-13.089231491088867],["▁фирми",-13.089232444763184],["ரியா",-13.089262008666992],["▁ഷാ",-13.089269638061523],["▁kesên",-13.089271545410156],["èr",-13.089295387268066],["ତେ",-13.089295387268066],["idée",-13.089324951171877],["▁dáng",-13.089364051818848],["小型",-13.08938694000244],["ტარებ",-13.089404106140137],["بول",-13.089428901672363],["▁sake",-13.089430809020996],["很高",-13.08946418762207],["argent",-13.089473724365234],["φά",-13.089481353759766],["▁Instru",-13.089486122131348],["▁이미지",-13.089497566223145],["वय",-13.089502334594728],["렸다",-13.089539527893066],["тяж",-13.089570999145508],["▁fungerar",-13.089580535888672],["▁درجه",-13.089617729187012],["▁magnis",-13.089625358581545],["▁распа",-13.089629173278809],["將於",-13.089644432067873],["▁жүйе",-13.089651107788086],["kkum",-13.089693069458008],["本文",-13.089702606201172],["岗位",-13.08970546722412],["▁моз",-13.089713096618652],["ຍົກ",-13.089725494384766],["奪",-13.089749336242676],["▁عين",-13.089770317077637],["スマホ",-13.089771270751951],["▁సై",-13.08977222442627],["ጮ",-13.0897855758667],["▁xestión",-13.0897855758667],["▁βάρος",-13.0897855758667],["▁விளையாட்டு",-13.0897855758667],["接下来",-13.0897855758667],["칭",-13.0897855758667],["닝",-13.089786529541016],["▁08.",-13.089797019958496],["▁shqiptarë",-13.089797019958496],["▁بداية",-13.089797019958496],["▁ଚାଲି",-13.089798927307127],["kő",-13.089799880981444],["▁professionnels",-13.089861869812012],["▁vidjeti",-13.089862823486328],["▁brán",-13.08987045288086],["oty",-13.089871406555176],["してくれる",-13.089872360229492],["ભી",-13.08988094329834],["▁افضل",-13.089936256408691],["▁tutustu",-13.089943885803224],["▁රමය",-13.08994483947754],["интерес",-13.089956283569336],["▁olemas",-13.089956283569336],["▁aandacht",-13.089961051940918],["▁научни",-13.089983940124512],["▁desain",-13.090018272399902],["adni",-13.090023040771484],["wur",-13.090024948120115],["▁стабил",-13.090059280395508],["TOK",-13.09006404876709],["енци",-13.090067863464355],["▁ruumi",-13.090068817138672],["త్తు",-13.09013557434082],["▁pisa",-13.09017562866211],["▁arbeider",-13.090177536010742],["ուժ",-13.09017848968506],["jumus",-13.090211868286133],["▁губи",-13.090215682983398],["▁estiver",-13.090219497680664],["▁bedrijfs",-13.090222358703612],["▁nude",-13.090245246887209],["kerne",-13.090246200561523],["けて",-13.090254783630373],["▁Morgan",-13.09027099609375],["ล้าง",-13.0902738571167],["▁ਖ਼",-13.09028434753418],["▁යු",-13.090287208557127],["ough",-13.090291023254396],["▁paid",-13.090301513671877],["rommet",-13.090310096740724],["▁לחץ",-13.09031581878662],["▁takiego",-13.090347290039062],["▁Tó",-13.09035301208496],["▁users",-13.090354919433594],["rët",-13.090370178222656],["あった",-13.090370178222656],["Alt",-13.090384483337402],["▁ګډون",-13.09044075012207],["▁біля",-13.090446472167969],["▁kiriye",-13.090458869934082],["ਰੋ",-13.090459823608398],["rijk",-13.090490341186523],["▁defect",-13.090490341186523],["əri",-13.090508460998535],["▁인간",-13.090526580810549],["מוש",-13.090529441833496],["▁wandel",-13.090534210205078],["▁тых",-13.09059238433838],["▁Police",-13.090614318847656],["▁inkl",-13.09064769744873],["sial",-13.090662956237791],["ўскі",-13.090697288513184],["ruo",-13.090737342834473],["дэж",-13.090747833251951],["▁Seda",-13.090764999389648],["ساء",-13.090771675109863],["▁okup",-13.090774536132812],["▁ಕರೆ",-13.090797424316406],["вале",-13.090801239013672],["▁있었",-13.09082317352295],["élet",-13.090824127197266],["टिंग",-13.090825080871582],["▁fines",-13.090835571289062],["▁vaga",-13.09087085723877],["Ξ",-13.090876579284668],["▁Deli",-13.090885162353516],["▁halinde",-13.090897560119627],["▁ilości",-13.09091567993164],["nością",-13.090924263000488],["เจ้าของ",-13.090925216674805],["благодар",-13.090928077697754],["سط",-13.090928077697754],["▁starting",-13.090944290161133],["いている",-13.090947151184082],["▁Village",-13.090959548950195],["фри",-13.090962409973145],["▁брод",-13.090970039367676],["保險",-13.090980529785156],["▁Jaa",-13.090987205505373],["▁KUR",-13.09101104736328],["▁апреля",-13.091020584106444],["▁ਕੈਪਟਨ",-13.091020584106444],["▁خواهید",-13.091021537780762],["▁تغيير",-13.091022491455078],["▁තිබූ",-13.091022491455078],["▁štýl",-13.09103012084961],["▁ഏറെ",-13.09103012084961],["▁дейді",-13.091066360473633],["verdi",-13.091069221496582],["▁Freitag",-13.091082572937012],["▁பக்கம்",-13.091118812561035],["▁(2013)",-13.09112548828125],["▁болгох",-13.091127395629885],["世代",-13.091142654418944],["漫画",-13.091176986694336],["▁ئۇنى",-13.091196060180664],["וכ",-13.091209411621094],["▁Reform",-13.091216087341309],["来源",-13.091225624084473],["▁Fili",-13.091227531433104],["дже",-13.091230392456056],["▁Teng",-13.091236114501951],["▁συνε",-13.09123992919922],["▁naudas",-13.091240882873535],["ให้บริการ",-13.091249465942385],["ശ്ര",-13.091275215148926],["▁Sze",-13.091289520263672],["ších",-13.091293334960938],["▁قوات",-13.091328620910645],["▁암",-13.091331481933594],["▁ນາ",-13.09134578704834],["nian",-13.091348648071287],["▁armas",-13.091350555419922],["ovni",-13.09135913848877],["hatatlan",-13.091367721557615],["▁ಗಳ",-13.091376304626465],["дил",-13.091381072998049],["▁suspens",-13.091395378112791],["▁cogn",-13.091412544250488],["▁Крем",-13.091434478759766],["▁sjö",-13.091444969177246],["▁मिले",-13.091477394104004],["▁virk",-13.091490745544434],["▁mundur",-13.091500282287598],["▁фо",-13.09151554107666],["▁SIN",-13.09156322479248],["想起",-13.091580390930176],["▁ziman",-13.09162712097168],["ความเป็น",-13.091654777526855],["▁מיני",-13.091656684875488],["шение",-13.091662406921388],["▁следните",-13.091696739196776],["▁Sinne",-13.091708183288574],["▁hizi",-13.09170913696289],["овую",-13.09172248840332],["▁Tod",-13.09172248840332],["Qarabağ",-13.091752052307127],["▁Sec",-13.09181308746338],["▁жүк",-13.091825485229492],["▁tende",-13.091835021972656],["opis",-13.091862678527832],["ജ്ജ",-13.091876983642578],["▁kropp",-13.091894149780272],["▁aru",-13.091898918151855],["▁tantra",-13.091914176940918],["▁loko",-13.091923713684082],["48)",-13.091937065124512],["▁даа",-13.09194564819336],["▁ທັງ",-13.0919771194458],["เกร",-13.0919828414917],["продовж",-13.092010498046877],["▁stuur",-13.092020988464355],["ишта",-13.092035293579102],["を感じ",-13.092040061950684],["ssimo",-13.092082977294922],["▁yilda",-13.092089653015137],["▁primero",-13.092130661010742],["ising",-13.09213924407959],["▁espai",-13.0921630859375],["▁บาง",-13.09217643737793],["▁wykonywa",-13.092195510864258],["ຊາດ",-13.092204093933104],["邀请",-13.09223461151123],["▁қызметі",-13.09223747253418],["風險",-13.092238426208496],["▁отбасы",-13.09224796295166],["▁kwietnia",-13.092256546020508],["▁আগামী",-13.092256546020508],["▁সুন্দর",-13.092257499694824],["▁ಪ್ರೀತಿ",-13.092257499694824],["แชร์",-13.09226131439209],["▁društvo",-13.09226131439209],["つまり",-13.09226131439209],["▁कानुन",-13.092276573181152],["ფინანს",-13.092283248901367],["▁কাছে",-13.09228515625],["▁রু",-13.092304229736328],["▁Participa",-13.09232234954834],["▁orë",-13.092323303222656],["▁tụ",-13.092337608337402],["▁Nell",-13.09235668182373],["▁यसरी",-13.09235668182373],["stans",-13.092367172241213],["▁امتیاز",-13.092367172241213],["▁Сон",-13.092379570007324],["▁חודשים",-13.092387199401855],["▁Péter",-13.092406272888184],["▁nuclear",-13.092406272888184],["ፈረ",-13.092416763305664],["ικοί",-13.092440605163574],["▁Giang",-13.092453956604004],["roy",-13.092463493347168],["▁ພ້ອມ",-13.092473983764648],["▁جيڪا",-13.09249782562256],["浙江",-13.092507362365724],["▁dëm",-13.09250831604004],["عبد",-13.09251594543457],["ksu",-13.092516899108888],["turile",-13.092557907104492],["vaz",-13.092578887939451],["▁объектов",-13.09260082244873],["▁בצ",-13.092605590820312],["▁knih",-13.092612266540527],["變得",-13.092613220214844],["▁mato",-13.092623710632324],["▁začet",-13.092631340026855],["▁लेने",-13.092646598815918],["idega",-13.092686653137209],["▁پہ",-13.092696189880373],["▁consigli",-13.092705726623535],["▁yes",-13.092705726623535],["▁судов",-13.092708587646484],["away",-13.09276294708252],["▁Simba",-13.092763900756836],["▁legen",-13.092774391174316],["០",-13.092790603637695],["▁berria",-13.09279441833496],["教育部",-13.092812538146973],["▁streng",-13.092815399169922],["▁កូន",-13.092820167541504],["▁професионал",-13.092843055725098],["▁ding",-13.092848777770996],["▁Біл",-13.092851638793944],["▁അല",-13.092853546142578],["不久",-13.0928955078125],["催",-13.0928955078125],["KAR",-13.092942237854004],["toimisto",-13.092952728271484],["▁olursa",-13.0929536819458],["intele",-13.092989921569824],["ප්ප",-13.092991828918455],["坊",-13.093000411987305],["โปรโมชั่น",-13.09303092956543],["Box",-13.093052864074709],["ປະຕິບັດ",-13.093055725097656],["▁kadib",-13.093055725097656],["▁პრემიერ",-13.093098640441896],["▁پاڻي",-13.093119621276855],["saare",-13.093141555786133],["▁hayatını",-13.093157768249512],["tória",-13.093158721923828],["foni",-13.093177795410156],["▁movies",-13.093207359313965],["▁состояние",-13.09323787689209],["Yan",-13.093242645263672],["▁வரி",-13.093247413635254],["▁צור",-13.093263626098633],["▁ගහ",-13.093304634094238],["▁pikë",-13.093306541442873],["▁viermi",-13.09331512451172],["▁kahju",-13.093337059020996],["浦",-13.093343734741213],["φασ",-13.093353271484377],["▁videó",-13.093358993530272],["صير",-13.093360900878906],["▁والش",-13.093379020690918],["▁yordam",-13.09338092803955],["▁kokos",-13.093382835388184],["▁meningkat",-13.093384742736816],["עץ",-13.093400955200195],["దం",-13.093401908874512],["▁ولد",-13.093425750732422],["slēgt",-13.0934419631958],["plast",-13.093448638916016],["嫁",-13.093470573425291],["不够",-13.093475341796877],["▁venter",-13.093480110168455],["▁gefa",-13.09348201751709],["▁crítica",-13.09348487854004],["▁MAFRA",-13.093494415283203],["▁jóvenes",-13.093494415283203],["▁müasir",-13.093494415283203],["▁капацитет",-13.093494415283203],["▁torpaq",-13.09349536895752],["▁segítségével",-13.093499183654783],["▁последние",-13.093499183654783],["▁Tirkiyeyê",-13.09350872039795],["▁Грција",-13.093513488769531],["▁نقاط",-13.093513488769531],["lenia",-13.09351634979248],["▁pôvod",-13.09351634979248],["▁telefona",-13.093525886535645],["ичните",-13.093531608581545],["tdi",-13.093535423278809],["მეტ",-13.093536376953123],["▁సేవ",-13.093544960021973],["ljed",-13.093546867370604],["▁padres",-13.093551635742188],["▁measure",-13.093562126159668],["nenud",-13.093585014343262],["▁Minggu",-13.093598365783691],["▁transporta",-13.09360408782959],["▁leef",-13.09361171722412],["▁modello",-13.093613624572754],["▁عاشق",-13.093620300292969],["▁животни",-13.093623161315918],["▁française",-13.09364128112793],["ТАР",-13.093649864196776],["▁végig",-13.09366226196289],["գար",-13.093663215637209],["세요",-13.093680381774902],["▁silti",-13.093708992004396],["▁لڑکی",-13.093714714050291],["временно",-13.093721389770508],["дөр",-13.093750953674316],["▁load",-13.093750953674316],["▁euskaraz",-13.093756675720217],["န္႔",-13.093778610229492],["suh",-13.093783378601074],["▁došlo",-13.09378433227539],["▁tineri",-13.093791961669922],["▁rakan",-13.093792915344238],["報道",-13.093819618225098],["ачи",-13.093820571899414],["途中",-13.09384536743164],["▁directamente",-13.093849182128906],["▁År",-13.093871116638184],["ístico",-13.093873977661133],["▁úloh",-13.093889236450195],["法國",-13.093889236450195],["▁benefits",-13.093890190124512],["▁ძ",-13.093928337097168],["द्द",-13.093958854675291],["▁αδ",-13.093968391418455],["公安",-13.093972206115724],["▁vähem",-13.093974113464355],["▁stanov",-13.09401035308838],["ுங்கள்",-13.094073295593262],["पनि",-13.094077110290527],["▁ചേ",-13.094080924987791],["▁2022",-13.094115257263184],["▁2012-",-13.094170570373535],["gência",-13.094196319580078],["помага",-13.094219207763672],["▁upravlja",-13.094242095947266],["▁gear",-13.094253540039062],["▁erməni",-13.094285011291504],["▁late",-13.094289779663086],["xil",-13.0942964553833],["awal",-13.094311714172363],["כור",-13.094321250915527],["ulation",-13.094348907470703],["▁Šiuo",-13.094399452209473],["▁irrita",-13.094401359558104],["estat",-13.0944185256958],["▁dərəcə",-13.094422340393066],["▁मुख",-13.094482421875],["▁स्रोत",-13.094493865966797],["სამართ",-13.094494819641112],["▁lyf",-13.09449577331543],["-31",-13.094517707824709],["▁syd",-13.094523429870604],["ුම",-13.094525337219238],["▁आफू",-13.094552040100098],["▁چهارم",-13.094552993774414],["▁vũ",-13.094574928283691],["îye",-13.094596862792969],["pika",-13.094598770141602],["оча",-13.094616889953612],["▁Verk",-13.094658851623535],["▁ٻيو",-13.0946626663208],["畢業",-13.094677925109863],["ຫົວ",-13.094679832458496],["bhail",-13.094685554504396],["աջ",-13.09470272064209],["佔",-13.094709396362305],["frica",-13.094719886779783],["▁návštěv",-13.094731330871582],["▁bersifat",-13.094733238220217],["▁ମୁଖ୍ୟ",-13.094735145568848],["▁przedmiot",-13.09473705291748],["▁Επίσης",-13.09473705291748],["▁ձեզ",-13.09473705291748],["▁PayPal",-13.09473991394043],["▁बच्चे",-13.09474277496338],["▁앞",-13.09481143951416],["▁зерттеу",-13.09482192993164],["▁reći",-13.094822883605955],["▁депозит",-13.094842910766602],["▁بگ",-13.094853401184082],["▁ystävä",-13.09485912322998],["跟我",-13.094865798950195],["▁ledelse",-13.09486961364746],["▁DP",-13.094873428344728],["▁Heil",-13.094886779785156],["▁पीएम",-13.094888687133787],["จุ",-13.094914436340332],["▁ใคร",-13.094922065734863],["fangen",-13.094950675964355],["▁ахуйн",-13.094955444335938],["लेली",-13.094965934753418],["▁شك",-13.094972610473633],["▁بز",-13.094978332519531],["▁öldür",-13.094983100891112],["▁тука",-13.094983100891112],["▁közé",-13.09499740600586],["▁hovorí",-13.095008850097656],["▁Nissan",-13.095012664794922],["schule",-13.095036506652832],["▁drugiej",-13.095041275024414],["▁εκατ",-13.095048904418944],["ാക്കി",-13.095056533813477],["▁boas",-13.095056533813477],["▁കേ",-13.09506607055664],["变成",-13.095088005065918],["chwy",-13.09511661529541],["viser",-13.095128059387209],["▁वीर",-13.095163345336914],["▁smukke",-13.095165252685549],["▁praktika",-13.095199584960938],["▁получил",-13.095252990722656],["škega",-13.095271110534668],["ирането",-13.095271110534668],["▁птиц",-13.095273971557615],["▁Vos",-13.0952787399292],["▁запит",-13.095282554626465],["ଗତ",-13.095290184020996],["▁криза",-13.095293045043944],["Дж",-13.095293998718262],["ارى",-13.095294952392578],["igra",-13.09530258178711],["▁Hid",-13.095317840576172],["▁israel",-13.095318794250488],["握",-13.095319747924805],["▁մարդու",-13.095330238342283],["KAL",-13.095345497131348],["ෙන්නේ",-13.095345497131348],["NÁ",-13.095369338989258],["kvæ",-13.095373153686523],["овна",-13.095394134521484],["Media",-13.095431327819824],["▁перш",-13.095441818237305],["aru",-13.09544849395752],["gju",-13.09545612335205],["▁muiden",-13.095460891723633],["หน้าที่",-13.09546184539795],["වෙලා",-13.09549045562744],["сход",-13.095494270324709],["byrå",-13.095504760742188],["在北京",-13.095507621765137],["▁چم",-13.095518112182615],["ง่ายๆ",-13.095526695251465],["▁Влас",-13.095538139343262],["luğun",-13.095540046691896],["CIÓ",-13.095541954040527],["סוף",-13.095562934875488],["pem",-13.09558391571045],["但我",-13.095589637756348],["▁záp",-13.09559440612793],["▁Општина",-13.095603942871094],["▁moim",-13.095610618591309],["▁astro",-13.095620155334473],["▁болов",-13.0956449508667],["وست",-13.09569263458252],["3.5",-13.095697402954102],["▁muassa",-13.09570026397705],["antan",-13.09571933746338],["nıza",-13.095738410949709],["טרה",-13.095752716064451],["▁спільно",-13.095759391784668],["мос",-13.095767974853516],["ອີ",-13.095775604248049],["elä",-13.095778465270996],["▁మనం",-13.095802307128906],["▁виді",-13.095810890197754],["ውል",-13.095819473266602],["yasa",-13.095823287963867],["▁ανέ",-13.095824241638184],["▁տակ",-13.095829963684082],["普及",-13.09583568572998],["▁deireadh",-13.095894813537598],["stveni",-13.095907211303713],["傻",-13.095947265625],["朝鲜",-13.095948219299316],["accia",-13.095949172973633],["▁Zusammen",-13.09595775604248],["īm",-13.095970153808594],["▁другими",-13.09597110748291],["▁beispielsweise",-13.09597396850586],["▁쉽게",-13.09597396850586],["▁Elizabeth",-13.095974922180176],["▁uwezo",-13.095980644226074],["▁нийгмийн",-13.095980644226074],["收藏",-13.095980644226074],["▁easily",-13.095983505249023],["කන්",-13.09598445892334],["▁مخکې",-13.09598445892334],["▁війни",-13.095985412597656],["▁نبی",-13.096002578735352],["भारत",-13.0960111618042],["izany",-13.09601593017578],["▁оборот",-13.096020698547363],["斯特",-13.096022605895996],["byl",-13.096030235290527],["▁राशि",-13.09603214263916],["▁మీడియా",-13.09604835510254],["▁erreicht",-13.096057891845703],["cham",-13.096063613891602],["▁२६",-13.096067428588867],["▁שאין",-13.096070289611816],["▁المنزل",-13.096084594726562],["▁facto",-13.096095085144045],["வான",-13.096110343933104],["EPA",-13.096116065979004],["▁გვი",-13.09611701965332],["shire",-13.096117973327637],["mæl",-13.09611988067627],["▁ਕੈ",-13.096125602722168],["ለች",-13.096128463745115],["чої",-13.096136093139648],["▁verbeter",-13.09613800048828],["▁morda",-13.096142768859863],["▁ponuja",-13.096160888671877],["ປະຈໍາ",-13.096186637878418],["zii",-13.096189498901367],["▁גוט",-13.096216201782228],["itum",-13.096224784851074],["ជិត",-13.096224784851074],["▁Klicka",-13.09626007080078],["▁vont",-13.09626579284668],["▁Luo",-13.096266746520996],["Kor",-13.096280097961426],["学科",-13.09628200531006],["skatt",-13.096309661865234],["▁Galeri",-13.096321105957031],["▁ئۈ",-13.096358299255373],["mpon",-13.09636688232422],["▁יכולה",-13.096409797668455],["ДР",-13.096435546875],["▁Хол",-13.096452713012695],["▁نہیں۔",-13.096454620361328],["▁intereses",-13.096467971801758],["▁வாழ்",-13.096467971801758],["ਕਮ",-13.096470832824709],["חזק",-13.096490859985352],["日上午",-13.096513748168944],["▁Belg",-13.096516609191896],["▁یافته",-13.096521377563477],["▁hombres",-13.09653663635254],["vajo",-13.096537590026855],["▁چىق",-13.096537590026855],["barca",-13.096558570861816],["しまう",-13.096574783325195],["▁Bole",-13.096612930297852],["▁បើក",-13.096635818481444],["▁Bird",-13.09664535522461],["يىلى",-13.09666633605957],["бін",-13.09667682647705],["rô",-13.096693992614746],["▁Ψ",-13.096713066101074],["йце",-13.09671688079834],["▁uza",-13.09673023223877],["▁ഉത്തര",-13.096735954284668],["结婚",-13.096735954284668],["νικ",-13.096739768981934],["▁כשה",-13.09674835205078],["पत",-13.096771240234377],["▁gazdasági",-13.096786499023438],["▁Nigeria",-13.096796989440918],["▁Søg",-13.096829414367676],["▁এটি",-13.096830368041992],["تور",-13.096848487854004],["▁fakat",-13.096856117248535],["▁Coca",-13.09688663482666],["▁Meet",-13.096896171569824],["bren",-13.09691333770752],["▁madu",-13.096927642822266],["േണ്ടി",-13.096936225891112],["▁срце",-13.096942901611328],["צור",-13.096953392028809],["▁दश",-13.096960067749023],["czną",-13.096968650817873],["шып",-13.096975326538086],["TÄ",-13.096989631652832],["▁بهذا",-13.096997261047363],["▁формы",-13.097000122070312],["▁වැඩක්",-13.097005844116213],["rador",-13.09700870513916],["уйте",-13.097010612487791],["ΕΡ",-13.09701442718506],["ເງິນ",-13.097015380859377],["ttuja",-13.097039222717283],["▁ائين",-13.097049713134766],["▁Yıl",-13.097065925598145],["olva",-13.097085952758787],["▁الشرق",-13.097089767456056],["вена",-13.097091674804688],["▁אה",-13.0971097946167],["▁၉",-13.097111701965332],["▁голема",-13.09714698791504],["货币",-13.097160339355469],["иха",-13.097167015075684],["ርሱ",-13.097172737121582],["ப்பட்டு",-13.097173690795898],["▁ບັນ",-13.09719181060791],["▁amino",-13.097195625305176],["チーム",-13.097197532653809],["巷",-13.097200393676758],["ຕ່າງ",-13.097207069396973],["▁největší",-13.097216606140137],["▁дұрыс",-13.097216606140137],["앤",-13.097216606140137],["▁inklusive",-13.097217559814451],["▁personenbezogen",-13.097217559814451],["▁اڳواڻ",-13.097217559814451],["▁નીચે",-13.097219467163086],["▁облыстық",-13.097225189208984],["▁עובד",-13.0972261428833],["सोबत",-13.097228050231934],["▁paligid",-13.09722900390625],["stico",-13.097233772277832],["▁Petersburg",-13.097237586975098],["▁Així",-13.097238540649414],["▁დაცულია",-13.097238540649414],["▁تحفظ",-13.097309112548828],["▁мечта",-13.097310066223145],["▁تقریب",-13.097382545471191],["▁blad",-13.09738540649414],["▁ratkaisu",-13.09738826751709],["▁мережі",-13.097393989562988],["▁गति",-13.097411155700684],["▁зүй",-13.097421646118164],["hatóság",-13.097463607788086],["▁quadro",-13.097489356994627],["▁Soy",-13.097539901733398],["▁клуба",-13.097549438476562],["баса",-13.09755039215088],["iani",-13.09755802154541],["▁조사",-13.097564697265623],["▁limon",-13.097583770751951],["၀၀",-13.09760284423828],["▁Rup",-13.097603797912598],["▁determin",-13.097607612609863],["zgar",-13.097625732421877],["éből",-13.09764575958252],["shot",-13.09765338897705],["▁trabalhar",-13.097671508789062],["▁ministeri",-13.097694396972656],["▁taille",-13.097712516784668],["barang",-13.097761154174805],["පති",-13.097772598266602],["ٹری",-13.097779273986816],["▁hebat",-13.097790718078612],["▁xiriir",-13.097795486450195],["ຕ້ອງ",-13.097801208496094],["あって",-13.097806930541992],["▁üretim",-13.097821235656738],["▁духовно",-13.097825050354004],["kutana",-13.09785270690918],["▁luma",-13.097862243652344],["طق",-13.097870826721191],["▁toy",-13.09787940979004],["ሾ",-13.097881317138672],["▁torne",-13.097923278808594],["▁ответил",-13.09798812866211],["▁fatti",-13.09799861907959],["▁মুখ",-13.098003387451172],["ahana",-13.098011016845703],["▁hop",-13.09802532196045],["ደረ",-13.098036766052246],["▁tofauti",-13.098052024841309],["স্",-13.098071098327637],["▁බං",-13.098101615905762],["▁prostat",-13.098108291625977],["▁146",-13.098109245300291],["ភូមិ",-13.09811305999756],["ಜು",-13.09811782836914],["▁plantas",-13.098122596740724],["▁орж",-13.098129272460938],["TW",-13.098133087158203],["▁estudios",-13.09814167022705],["▁Rao",-13.09818172454834],["仅仅",-13.098203659057615],["േറ്റ്",-13.098217964172363],["чева",-13.098223686218262],["▁KP",-13.09828758239746],["▁పర",-13.098291397094728],["▁eers",-13.09829807281494],["▁адже",-13.09829807281494],["संबंधी",-13.09830093383789],["▁Quint",-13.098320960998535],["▁õnne",-13.098326683044434],["▁реформа",-13.098342895507812],["▁protagonist",-13.098345756530762],["שתי",-13.098358154296877],["▁өзгер",-13.098363876342772],["▁bangunan",-13.09837245941162],["ગત",-13.09840488433838],["观察",-13.09842300415039],["чень",-13.098437309265137],["aktion",-13.09844207763672],["▁sosiale",-13.098448753356934],["▁રાખ",-13.098455429077148],["漲",-13.098455429077148],["▁Rotterdam",-13.09846019744873],["▁cœur",-13.09846019744873],["▁nodrošināt",-13.09846019744873],["▁rượu",-13.09846019744873],["▁पहिले",-13.09846019744873],["▁physical",-13.098461151123049],["▁રૂપિયા",-13.09846305847168],["▁हवे",-13.098464012145996],["▁deosebit",-13.098471641540527],["▁Pria",-13.098472595214844],["oši",-13.098478317260742],["▁учреждения",-13.098488807678224],["▁امیدوار",-13.09848976135254],["80%",-13.09849452972412],["يطان",-13.09850025177002],["▁કરતાં",-13.098502159118652],["బు",-13.098509788513184],["viy",-13.098515510559082],["▁berätta",-13.098519325256348],["▁norway",-13.098523139953612],["▁օ",-13.098529815673828],["dotto",-13.098532676696776],["▁zajedno",-13.098562240600586],["नंतर",-13.098569869995115],["rodu",-13.09857177734375],["▁참고",-13.098577499389648],["经济发展",-13.09857940673828],["▁ຮຽນ",-13.098580360412598],["▁পোস্ট",-13.098590850830078],["▁میشود",-13.098593711853027],["ইন",-13.098602294921877],["ίκη",-13.09860610961914],["▁ערב",-13.09864330291748],["1-2",-13.098648071289062],["bjerg",-13.09864902496338],["▁Formula",-13.098657608032228],["kirkju",-13.09867000579834],["/2006",-13.09868049621582],["▁хотите",-13.09868621826172],["▁ଥିଲା",-13.098692893981934],["▁откако",-13.09870147705078],["аве",-13.098711013793944],["▁Eri",-13.09873867034912],["▁musste",-13.098743438720703],["▁қызметтер",-13.09878635406494],["▁продав",-13.098797798156738],["▁razred",-13.098803520202637],["▁slow",-13.098811149597168],["▁GRA",-13.098824501037598],["▁Fü",-13.09884548187256],["εβ",-13.098858833312988],["ведения",-13.098861694335938],["▁hizmetleri",-13.09887409210205],["▁ច្បាប់",-13.098884582519531],["▁вироб",-13.098885536193848],["入住",-13.09890365600586],["▁Meter",-13.0989351272583],["▁Cour",-13.0989408493042],["▁gorî",-13.098973274230955],["campur",-13.098995208740234],["ជាង",-13.099003791809082],["ிருக்கும்",-13.09900951385498],["▁visos",-13.09902000427246],["▁즉",-13.099027633666992],["▁Than",-13.099043846130373],["yahan",-13.09905242919922],["قسم",-13.099054336547852],["ダー",-13.099064826965332],["TÓ",-13.099075317382812],["מדינה",-13.09912109375],["प्य",-13.099153518676758],["▁richtige",-13.099164009094238],["owanym",-13.09917163848877],["▁zaczyna",-13.099172592163086],["婦",-13.099175453186035],["▁velger",-13.099197387695312],["▁anseo",-13.09920883178711],["චාර",-13.099210739135742],["▁kåt",-13.099217414855955],["▁випуск",-13.0992431640625],["astus",-13.099275588989258],["Укр",-13.099275588989258],["Dia",-13.099279403686523],["▁mát",-13.099283218383787],["▁záhrad",-13.099302291870115],["אופ",-13.099315643310549],["ដើម្បី",-13.099339485168455],["的服務",-13.099350929260254],["leyici",-13.099371910095217],["መለስ",-13.099373817443848],["▁solicit",-13.09938144683838],["IYO",-13.09943389892578],["▁រដ្ឋ",-13.099452018737791],["řad",-13.099491119384766],["AX",-13.099529266357422],["▁roma",-13.099529266357422],["▁continent",-13.099541664123535],["男孩",-13.0995512008667],["meetri",-13.099552154541016],["▁टो",-13.09957504272461],["▁lost",-13.09959316253662],["klon",-13.099600791931152],["▁행",-13.09964656829834],["She",-13.099655151367188],["▁bloed",-13.099658966064451],["鼓勵",-13.099664688110352],["▁କାରଣ",-13.099691390991213],["▁Мэдээ",-13.099700927734377],["▁Saturday",-13.099705696105955],["▁područja",-13.099705696105955],["▁ΣΥΡΙΖΑ",-13.099705696105955],["▁бұрын",-13.099705696105955],["▁ਤਰ੍ਹਾਂ",-13.099705696105955],["hb",-13.099711418151855],["lcev",-13.09971523284912],["▁ніхто",-13.09971523284912],["razil",-13.099722862243652],["▁heyəti",-13.099722862243652],["▁ହୋଇଥିଲା",-13.099723815917969],["▁rencana",-13.099733352661133],["▁grudnia",-13.099739074707031],["razione",-13.09974193572998],["▁سرکاری",-13.09974193572998],["▁pozitív",-13.099754333496094],["▁Conseil",-13.099759101867676],["▁۱۴",-13.09976577758789],["▁چن",-13.099771499633787],["hama",-13.099772453308104],["วิ่ง",-13.099787712097168],["▁(27)",-13.099788665771484],["▁Тай",-13.099794387817385],["บุ",-13.099813461303713],["▁gewesen",-13.09981632232666],["лога",-13.099827766418455],["▁చేయడం",-13.099827766418455],["ಳ್ಳ",-13.099836349487305],["▁ขอบคุณ",-13.099848747253418],["▁mohl",-13.099860191345217],["▁tàu",-13.099860191345217],["ტორი",-13.099872589111328],["▁(!)",-13.099881172180176],["▁Hæ",-13.099900245666504],["QA",-13.099902153015137],["▁yö",-13.099902153015137],["▁Stück",-13.09992504119873],["▁jihar",-13.099933624267578],["TTI",-13.099937438964844],["transport",-13.099937438964844],["▁Чер",-13.099946022033691],["▁musique",-13.099953651428224],["တိုင်း",-13.099955558776855],["шую",-13.099964141845703],["▁Kultura",-13.099964141845703],["लाइन",-13.099968910217283],["ို",-13.099977493286133],["▁новите",-13.099985122680664],["▁piek",-13.10002613067627],["▁Saul",-13.100068092346191],["▁ត្រូវបាន",-13.100074768066406],["するのは",-13.100077629089355],["lovi",-13.100104331970217],["ଜୁ",-13.100110054016112],["▁rubrik",-13.100112915039062],["множ",-13.100131034851074],["▁pārstāv",-13.10016632080078],["likle",-13.10019302368164],["▁demokraci",-13.100200653076172],["社会主义",-13.100204467773438],["jš",-13.100210189819336],["視頻",-13.100225448608398],["必然",-13.10023593902588],["▁ofera",-13.100238800048828],["▁illərdə",-13.100245475769045],["▁privata",-13.100255966186523],["▁KER",-13.100276947021484],["▁sula",-13.10031509399414],["zv",-13.10035514831543],["نتظر",-13.10036277770996],["ļaut",-13.100377082824709],["ريل",-13.100380897521973],["▁ទំព័រ",-13.100408554077148],["ľu",-13.100433349609377],["明天",-13.100439071655272],["ијски",-13.100460052490234],["▁terrasse",-13.100476264953612],["▁cali",-13.10049057006836],["аются",-13.100539207458496],["aethau",-13.100564002990724],["▁Age",-13.10057544708252],["ppan",-13.100608825683594],["ଛ",-13.100640296936035],["wab",-13.100653648376465],["▁granic",-13.100682258605955],["пър",-13.100689888000488],["alna",-13.10069179534912],["▁लिएर",-13.100696563720703],["▁Makanan",-13.100709915161133],["情感",-13.100727081298828],["▁solidar",-13.10072898864746],["Um",-13.100763320922852],["▁Disa",-13.100793838500977],["▁Им",-13.100823402404783],["ánk",-13.100839614868164],["▁ພ",-13.100852012634276],["▁மாவட்ட",-13.100868225097656],["▁hadis",-13.100872039794922],["▁servo",-13.100872039794922],["复杂",-13.100884437561035],["决策",-13.100893020629885],["ЭС",-13.100899696350098],["▁0,1",-13.100902557373049],["tlich",-13.100908279418944],["借款",-13.10093879699707],["▁අගමැති",-13.100948333740234],["▁realitza",-13.10094928741455],["▁đứa",-13.1009521484375],["▁ھەققىدە",-13.1009521484375],["▁ସମସ୍ୟା",-13.1009521484375],["โทษ",-13.100953102111816],["▁iTunes",-13.100954055786133],["主持",-13.10096549987793],["▁tools",-13.100969314575195],["▁ምክር",-13.10097599029541],["ڀ",-13.100988388061523],["ඇ",-13.100994110107422],["▁Familia",-13.101001739501951],["运输",-13.101017951965332],["▁atendimento",-13.101021766662598],["▁poslije",-13.101021766662598],["▁València",-13.101057052612305],["ናቸው",-13.101078033447266],["ируют",-13.101091384887695],["chor",-13.101106643676758],["▁04.",-13.101119995117188],["نظام",-13.101123809814451],["elka",-13.10112476348877],["estima",-13.101158142089844],["▁tulemus",-13.101168632507324],["llus",-13.101174354553224],["ထပ္",-13.101207733154297],["uksissa",-13.101253509521484],["▁stessi",-13.101326942443848],["▁tanul",-13.101329803466797],["গ্রাম",-13.10135555267334],["▁នាំ",-13.1013765335083],["▁gasi",-13.10140609741211],["▁kaliteli",-13.10141372680664],["ํา",-13.1014404296875],["ેક",-13.101444244384766],["δύ",-13.101449966430664],["▁опита",-13.101458549499512],["▁አሉ",-13.101468086242676],["ಸಂ",-13.101490020751951],["κει",-13.101496696472168],["caj",-13.101545333862305],["2.0",-13.10156536102295],["▁inaan",-13.101591110229492],["▁разно",-13.101666450500488],["▁您",-13.101680755615234],["▁Valencia",-13.101686477661133],["▁Petri",-13.10170078277588],["ගි",-13.101712226867676],["▁fajn",-13.101713180541992],["▁jaka",-13.10173225402832],["▁aurait",-13.101738929748535],["客房",-13.101738929748535],["ලය",-13.10178279876709],["のを",-13.101806640625],["▁সংখ্যা",-13.10184383392334],["▁Pep",-13.101844787597656],["Ana",-13.101855278015137],["▁Män",-13.10185718536377],["▁Össze",-13.10186004638672],["▁Sinn",-13.101874351501465],["មនុស្ស",-13.101884841918944],["siooni",-13.101903915405272],["ruit",-13.101937294006348],["โปรด",-13.101951599121094],["妇",-13.102005004882812],["▁interventi",-13.102070808410645],["▁әл",-13.10208225250244],["ubo",-13.102092742919922],["▁adresi",-13.102094650268556],["▁deberá",-13.102096557617188],["▁životu",-13.102099418640137],["മാണ",-13.102134704589844],["дой",-13.102142333984377],["▁rizik",-13.102153778076172],["енный",-13.102157592773438],["товар",-13.102160453796388],["絵",-13.102174758911133],["▁ضلعو",-13.102184295654297],["▁notes",-13.102185249328612],["▁Poate",-13.10219383239746],["▁encontrado",-13.10219383239746],["ကြောင့်",-13.102200508117676],["▁Sehingga",-13.102200508117676],["▁insbesondere",-13.102200508117676],["▁ਮੇਰੇ",-13.102200508117676],["٫",-13.102201461791992],["▁կիրառ",-13.102201461791992],["▁Stavanger",-13.102202415466309],["▁මුහුණ",-13.102202415466309],["▁ovláda",-13.10220432281494],["▁johon",-13.102208137512209],["▁عملی",-13.102208137512209],["▁ഉണ്ട",-13.102214813232422],["▁strumenti",-13.10222339630127],["cjach",-13.102228164672852],["▁የተለያዩ",-13.1022310256958],["▁CAP",-13.102231979370115],["▁aspecto",-13.102253913879396],["[11]",-13.102272033691406],["ស្រី",-13.102279663085938],["यन",-13.102298736572266],["▁האדם",-13.10230827331543],["▁کلاس",-13.102340698242188],["▁177",-13.102341651916504],["▁실시",-13.102368354797363],["eaux",-13.10236930847168],["▁grupp",-13.102374076843262],["▁امتحان",-13.102380752563477],["▁গত",-13.10241413116455],["waz",-13.102420806884766],["బంధ",-13.102425575256348],["▁gốc",-13.102431297302246],["▁खाने",-13.102447509765623],["▁καλή",-13.10244846343994],["▁بهبود",-13.102457046508787],["érték",-13.102476119995115],["tner",-13.102479934692385],["▁രോഗ",-13.10251522064209],["▁logge",-13.102540969848633],["再度",-13.102551460266112],["လေ",-13.102561950683594],["▁చూస",-13.102561950683594],["▁iklan",-13.102601051330566],["▁صدای",-13.102603912353516],["lman",-13.102604866027832],["▁סע",-13.10260772705078],["▁izmed",-13.10262680053711],["زده",-13.102630615234377],["▁Анти",-13.102643013000488],["▁tijela",-13.10265064239502],["▁Kenapa",-13.102659225463867],["▁δημοσ",-13.102680206298828],["ସନ୍",-13.102690696716309],["認",-13.102690696716309],["▁politiet",-13.10269832611084],["▁sedaj",-13.102702140808104],["▁incur",-13.10272979736328],["▁бог",-13.102741241455078],["љено",-13.102767944335938],["▁Naslov",-13.102770805358888],["stave",-13.102778434753418],["▁Product",-13.10278606414795],["zelf",-13.102789878845217],["▁üzvü",-13.102801322937012],["▁نیروی",-13.102824211120604],["ത്തേ",-13.102828025817873],["▁kjempe",-13.102864265441896],["przy",-13.102869033813477],["▁Soft",-13.102887153625488],["demokrat",-13.102897644042969],["▁atmosfera",-13.102898597717283],["zami",-13.102914810180664],["▁ಬಳಸ",-13.102989196777344],["▁leita",-13.103007316589355],["▁rikt",-13.103010177612305],["▁zene",-13.103078842163086],["▁អាយុ",-13.103079795837402],["▁ulaz",-13.103084564208984],["лөө",-13.103089332580566],["vallen",-13.10313320159912],["状",-13.10314655303955],["©",-13.1031494140625],["▁விழா",-13.103153228759766],["▁Clean",-13.103154182434082],["ុយ",-13.103198051452637],["ача",-13.10323429107666],["ומ",-13.10323429107666],["▁Kür",-13.103251457214355],["▁Constantin",-13.103303909301758],["▁spekul",-13.103321075439451],["xico",-13.103325843811035],["alny",-13.10333251953125],["▁ਵਧ",-13.103358268737791],["tario",-13.10336208343506],["реза",-13.103363037109377],["▁табли",-13.10336685180664],["▁Salva",-13.103367805480955],["▁tuum",-13.103376388549805],["斑",-13.103376388549805],["▁Barbara",-13.10338020324707],["knya",-13.10339069366455],["▁παιχνίδι",-13.103394508361816],["▁Bá",-13.103413581848145],["СР",-13.10342502593994],["▁басшысы",-13.103425979614258],["▁milyar",-13.103428840637209],["▁πλαίσιο",-13.103450775146484],["▁дүгээр",-13.103450775146484],["▁сургуулийн",-13.103450775146484],["▁تقرير",-13.103450775146484],["▁वितरण",-13.103450775146484],["▁pelaksanaan",-13.1034517288208],["▁Крим",-13.103452682495115],["bble",-13.1034574508667],["▁Ciudad",-13.103461265563965],["▁Aarhus",-13.103468894958496],["▁Esper",-13.103475570678713],["▁ಪಡೆದ",-13.103498458862305],["▁כזה",-13.10352897644043],["sena",-13.103543281555176],["▁galvo",-13.103543281555176],["▁Perfect",-13.103549003601074],["▁volgen",-13.10355281829834],["▁Dodoma",-13.103557586669922],["▁berharap",-13.103560447692873],["৬",-13.10356616973877],["▁156",-13.103567123413086],["žné",-13.103570938110352],["▁kapcsolatban",-13.103583335876465],["▁arka",-13.10361385345459],["യോഗ",-13.103625297546388],["gazdaság",-13.103629112243652],["▁застав",-13.103629112243652],["▁Карл",-13.103642463684082],["▁Quản",-13.103643417358398],["▁mbl",-13.103647232055664],["一直在",-13.10365104675293],["aani",-13.10365390777588],["▁dovoljno",-13.10370922088623],["mýš",-13.103715896606444],["▁وتم",-13.103729248046877],["лда",-13.103731155395508],["케",-13.10374641418457],["aalne",-13.103798866271973],["álta",-13.103840827941896],["▁آئي",-13.10384464263916],["ਦਰ",-13.103846549987791],["▁modifi",-13.103852272033691],["▁vegar",-13.103861808776855],["δικά",-13.103869438171388],["گۈ",-13.103877067565918],["▁കാണുന്ന",-13.103891372680664],["▁WEB",-13.103897094726562],["▁eat",-13.103897094726562],["の中に",-13.10390281677246],["▁občas",-13.10390567779541],["▁vore",-13.10393238067627],["كين",-13.103936195373535],["rao",-13.103937149047852],["Pol",-13.103952407836914],["▁атмосфер",-13.10397720336914],["▁genug",-13.103979110717772],["▁Paula",-13.103988647460938],["▁FN",-13.103999137878418],["ÜL",-13.10400104522705],["סטי",-13.104018211364746],["▁લેવા",-13.104028701782228],["質量",-13.104031562805176],["oordeel",-13.104036331176758],["▁prä",-13.104045867919922],["▁iných",-13.104070663452148],["ஹா",-13.104073524475098],["行って",-13.104073524475098],["▁Bruno",-13.104085922241213],["▁utilizzare",-13.104138374328612],["▁parlamentar",-13.104172706604004],["▁नये",-13.10417652130127],["ställd",-13.104181289672852],["▁svara",-13.104186058044434],["▁बोले",-13.104193687438965],["▁krit",-13.10421657562256],["▁jesteś",-13.104243278503418],["▁نصف",-13.104252815246582],["▁сек",-13.10427188873291],["ต่างประเทศ",-13.104273796081545],["▁avio",-13.104273796081545],["▁máš",-13.104292869567873],["▁информ",-13.104292869567873],["-90",-13.104314804077148],["▁kabar",-13.104314804077148],["▁axuda",-13.10431957244873],["ขับ",-13.104375839233398],["­­",-13.104376792907717],["This",-13.10439682006836],["▁전화",-13.10440444946289],["▁χορ",-13.104413986206056],["áte",-13.104418754577637],["նելու",-13.104421615600586],["▁၈",-13.104421615600586],["▁જીવ",-13.104439735412598],["시설",-13.104447364807127],["-2011",-13.104472160339355],["zeli",-13.104484558105469],["注重",-13.104499816894531],["▁प्रतियोगिता",-13.104524612426758],["▁والق",-13.10454273223877],["▁מאל",-13.104601860046388],["▁Kontakta",-13.104628562927246],["smål",-13.104634284973145],["▁állás",-13.104639053344728],["▁Бре",-13.104653358459473],["紧张",-13.10466766357422],["siva",-13.104673385620115],["▁skje",-13.104681968688965],["▁elnök",-13.104687690734863],["HC",-13.104696273803713],["▁Airbnb",-13.10470199584961],["▁القاهرة",-13.104702949523926],["▁Sekarang",-13.104703903198242],["▁предпочита",-13.104705810546877],["▁평균",-13.104705810546877],["▁ushbu",-13.104706764221191],["▁українського",-13.104711532592772],["▁Daarnaast",-13.10471248626709],["▁distancia",-13.104713439941406],["▁admir",-13.10472011566162],["▁kontinent",-13.104724884033203],["时刻",-13.10473918914795],["ခံစား",-13.104741096496582],["شاء",-13.104766845703123],["▁पाच",-13.104780197143556],["▁Stol",-13.104793548583984],["▁влиза",-13.104817390441896],["▁автомобили",-13.104819297790527],["▁semaines",-13.10482406616211],["Су",-13.104840278625488],["kës",-13.10485553741455],["▁saída",-13.104859352111816],["łość",-13.104870796203612],["▁ره",-13.104872703552246],["▁poucos",-13.10487461090088],["▁Balti",-13.10488224029541],["▁Ured",-13.10491943359375],["▁altfel",-13.104928016662598],["▁183",-13.10492992401123],["▁livros",-13.104930877685549],["ινε",-13.104934692382812],["▁arro",-13.104949951171877],["▁เพิ่ม",-13.104954719543455],["▁נשים",-13.104981422424316],["興味",-13.10499382019043],["▁ნე",-13.104999542236328],["พระราช",-13.105010986328123],["▁Virtual",-13.105035781860352],["makta",-13.10508632659912],["sıyla",-13.105110168457031],["▁คํา",-13.10511589050293],["▁zgodnie",-13.10511875152588],["юється",-13.10519027709961],["права",-13.10520076751709],["▁основни",-13.105216979980469],["▁المط",-13.105229377746582],["▁והמ",-13.105243682861328],["▁impar",-13.105257987976074],["▁yidhi",-13.105311393737791],["▁almaq",-13.105320930480955],["▁وخ",-13.105329513549805],["ctura",-13.10533046722412],["▁роден",-13.105348587036133],["είας",-13.105351448059082],["▁ძალა",-13.10536289215088],["▁samtal",-13.105377197265623],["▁ለማስ",-13.105391502380373],["▁hina",-13.105399131774902],["▁bawang",-13.105405807495115],["▁selfs",-13.105466842651367],["fett",-13.105497360229492],["▁Dyna",-13.1055326461792],["▁grens",-13.105534553527832],["məz",-13.105554580688477],["▁закону",-13.105563163757324],["▁선정",-13.10558032989502],["▁noho",-13.105586051940918],["▁Ministri",-13.105610847473145],["▁ගමන",-13.105610847473145],["▁çay",-13.105626106262209],["gewicht",-13.105720520019531],["ไม่ต้อง",-13.105721473693848],["▁ಸಿಗ",-13.105722427368164],["▁الثالث",-13.105727195739746],["▁impre",-13.105735778808594],["kain",-13.105757713317873],["▁explain",-13.105762481689451],["▁съдържа",-13.105764389038086],["▁والي",-13.10576629638672],["ខេត្ត",-13.105785369873049],["▁setzen",-13.105801582336426],["Des",-13.105836868286133],["▁Táto",-13.105842590332031],["irlər",-13.105847358703612],["turen",-13.105849266052246],["▁nen",-13.105849266052246],["三大",-13.105857849121094],["építés",-13.10587215423584],["▁កាល",-13.105912208557127],["▁თვალ",-13.10591983795166],["琴",-13.105928421020508],["▁Equip",-13.10593032836914],["▁програмата",-13.10593605041504],["▁Jensen",-13.10594367980957],["どちら",-13.105948448181152],["▁forældre",-13.105955123901367],["▁съвсем",-13.105955123901367],["▁کمیسیون",-13.105955123901367],["▁گردشگری",-13.105956077575684],["▁matumizi",-13.105961799621582],["▁जहां",-13.105964660644531],["▁ઘણા",-13.105964660644531],["▁ಅದನ್ನು",-13.105965614318848],["▁Samstag",-13.105985641479492],["▁Umwelt",-13.105985641479492],["หัวใจ",-13.10599136352539],["▁Visual",-13.10599136352539],["▁червня",-13.105999946594238],["▁olmalıdır",-13.106013298034668],["ברי",-13.106026649475098],["▁İcra",-13.106035232543944],["bahagi",-13.10610580444336],["▁হলো",-13.106118202209473],["▁σα",-13.106128692626951],["iej",-13.10613250732422],["עפ",-13.1061372756958],["▁gentil",-13.106161117553713],["▁parlare",-13.106241226196287],["▁czar",-13.10633659362793],["▁pierdut",-13.106337547302246],["LANG",-13.10634422302246],["▁Gruppen",-13.10634708404541],["▁kunagi",-13.106363296508787],["▁autorit",-13.106369972229004],["▁spécial",-13.106382369995115],["▁tør",-13.10639190673828],["▁реализира",-13.106405258178713],["▁prijavi",-13.10643196105957],["▁Зато",-13.1064453125],["asso",-13.106451988220217],["verne",-13.106452941894531],["▁باسم",-13.106491088867188],["▁Nice",-13.106502532958984],["▁badania",-13.106538772583008],["tető",-13.106569290161133],["▁Jobs",-13.106610298156738],["godišnje",-13.106623649597168],["▁službe",-13.106623649597168],["▁animale",-13.10666561126709],["ektiv",-13.106701850891112],["▁tenke",-13.106730461120604],["▁Fakat",-13.106731414794922],["▁Brez",-13.106738090515137],["▁החי",-13.106739044189451],["ക്കിയ",-13.106765747070312],["▁Ђ",-13.106767654418944],["▁Barri",-13.106805801391602],["யுடன்",-13.106826782226562],["▁geg",-13.10682773590088],["▁anzu",-13.106881141662598],["▁Palace",-13.10690212249756],["▁பிரி",-13.10690212249756],["הו",-13.106910705566406],["20)",-13.106924057006836],["▁fios",-13.10695743560791],["高级",-13.106958389282228],["▁processos",-13.106992721557615],["▁BMT",-13.106998443603516],["▁συγκ",-13.10700225830078],["▁Stri",-13.107008934020996],["类似",-13.107015609741213],["cava",-13.10701847076416],["terület",-13.107019424438477],["ющее",-13.10703182220459],["чыцца",-13.107050895690918],["▁ছিল",-13.107100486755373],["▁opening",-13.10714054107666],["patia",-13.10714626312256],["SIM",-13.107151985168455],["▁Dhu",-13.107154846191406],["▁Holding",-13.107163429260254],["▁noves",-13.107163429260254],["పోయింది",-13.107166290283203],["▁chciał",-13.10716724395752],["細胞",-13.107172966003418],["▁اغ",-13.107173919677734],["▁Agama",-13.107187271118164],["▁بط",-13.107189178466797],["๔",-13.107210159301758],["ສັງຄົມ",-13.107210159301758],["▁δουλειά",-13.107210159301758],["▁сияқты",-13.107210159301758],["▁먼저",-13.107210159301758],["ไกล",-13.107213020324709],["▁pelayanan",-13.107213020324709],["▁लगातार",-13.107213973999023],["Qur",-13.107222557067873],["▁nevie",-13.107222557067873],["iuose",-13.107232093811035],["▁лютого",-13.107254028320312],["▁Львові",-13.107256889343262],["▁महिलाओं",-13.107263565063477],["▁nějaké",-13.10728931427002],["खण्ड",-13.107295036315918],["jest",-13.10731315612793],["▁sikur",-13.10731315612793],["▁Decreto",-13.107322692871094],["▁namų",-13.107327461242676],["ठे",-13.107337951660156],["▁mik",-13.107339859008787],["▁meş",-13.107359886169434],["തൃ",-13.107366561889648],["▁დო",-13.107369422912598],["公寓",-13.107391357421877],["会员",-13.107409477233888],["▁takrat",-13.107421875],["▁наличие",-13.10743236541748],["▁hartzen",-13.10745334625244],["▁propis",-13.10745906829834],["▁habitantes",-13.107492446899414],["ثي",-13.107500076293944],["یسی",-13.107504844665527],["▁decor",-13.107521057128906],["▁Nemo",-13.10755729675293],["▁javnosti",-13.107574462890623],["▁ਵਾਲਾ",-13.107584953308104],["kán",-13.107596397399902],["▁austral",-13.107596397399902],["Mark",-13.10763168334961],["компенс",-13.107646942138672],["קור",-13.10765266418457],["▁reten",-13.107666015625],["ადა",-13.107699394226074],["身边",-13.107699394226074],["šiť",-13.10771656036377],["▁مالي",-13.107731819152832],["▁الشهر",-13.107758522033691],["NF",-13.107797622680664],["▁edició",-13.107803344726562],["▁അന",-13.107898712158203],["▁onkin",-13.107924461364746],["мель",-13.107977867126465],["▁Dire",-13.107980728149414],["▁beteken",-13.108028411865234],["పోత",-13.108046531677246],["ที่ดิน",-13.108052253723145],["TÜ",-13.108062744140623],["▁astea",-13.108102798461914],["尋找",-13.108111381530762],["Kr",-13.108115196228027],["▁kohti",-13.10811996459961],["▁거의",-13.10815143585205],["يكا",-13.108162879943848],["▁Transfer",-13.10816478729248],["hud",-13.108176231384276],["ਸਲ",-13.108189582824709],["▁hő",-13.10819149017334],["▁Hör",-13.108197212219238],["▁ծրագիր",-13.10821533203125],["สํา",-13.108222007751465],["▁chud",-13.108254432678224],["▁এম",-13.108254432678224],["▁делает",-13.108281135559082],["▁०",-13.108293533325195],["▁тухайн",-13.108302116394045],["邮",-13.108344078063965],["હાર",-13.108353614807127],["▁લી",-13.108383178710938],["浸",-13.108424186706545],["▁Məhkəmə",-13.108466148376465],["▁înseamnă",-13.108466148376465],["▁քրեական",-13.108466148376465],["▁ରାଶିଫଳ",-13.108466148376465],["▁प्रणाली",-13.10846710205078],["▁diumenge",-13.108468055725098],["▁працэс",-13.108470916748049],["оўна",-13.10847282409668],["▁žmogus",-13.10847282409668],["▁пользу",-13.108478546142578],["ტყვი",-13.108479499816896],["▁Pflege",-13.108491897583008],["ύν",-13.108492851257324],["▁Kapoor",-13.10849666595459],["stjóri",-13.108497619628906],["▁membayar",-13.10849952697754],["▁చర్చ",-13.10854721069336],["▁aliis",-13.108561515808104],["MY",-13.108565330505373],["▁somente",-13.108567237854004],["▁المشروع",-13.1085786819458],["▁Mans",-13.10859203338623],["wp",-13.108598709106444],["▁البحر",-13.108598709106444],["zwy",-13.108644485473633],["▁Rek",-13.108652114868164],["03.2018",-13.108668327331545],["-2010",-13.108677864074709],["▁самые",-13.108718872070312],["▁أمس",-13.108736991882324],["▁Veli",-13.108755111694336],["▁ilişki",-13.108758926391602],["kiai",-13.108762741088867],["▁félags",-13.108778953552246],["▁ٻن",-13.10879898071289],["本站",-13.108803749084473],["▁venn",-13.108817100524902],["იშ",-13.108819007873535],["steig",-13.108826637268066],["▁daŭr",-13.10883331298828],["নু",-13.108863830566406],["▁продажба",-13.10887050628662],["▁Бургас",-13.108895301818848],["▁conoscere",-13.108901023864746],["▁لڳي",-13.108906745910645],["▁sortie",-13.108936309814451],["▁Giu",-13.108946800231934],["▁smer",-13.108948707580566],["▁ruin",-13.10895824432373],["錄",-13.108966827392578],["▁denke",-13.108972549438477],["issimi",-13.108979225158691],["ИХ",-13.10900592803955],["ازی",-13.109012603759766],["▁ಬಳಿಕ",-13.10902214050293],["▁القرار",-13.109033584594728],["objectif",-13.109050750732422],["▁जमा",-13.109084129333496],["τυ",-13.109103202819824],["▁2005,",-13.10910701751709],["በላ",-13.109126091003418],["▁језику",-13.10914421081543],["▁lopen",-13.109163284301758],["ყოფილ",-13.109164237976074],["▁නායක",-13.109173774719238],["blé",-13.109183311462402],["යන",-13.109197616577148],["ողական",-13.109210968017578],["▁مهاجر",-13.10922622680664],["饰",-13.109238624572754],["▁веду",-13.10925579071045],["сақ",-13.109302520751951],["▁verilen",-13.109310150146484],["▁познати",-13.109312057495115],["مية",-13.109389305114746],["▁Въ",-13.109415054321287],["yad",-13.10943603515625],["▁Comuni",-13.1094388961792],["況",-13.109503746032717],["anju",-13.109516143798828],["ραν",-13.10951805114746],["▁ფი",-13.109522819519045],["▁gemi",-13.10952854156494],["זוג",-13.109530448913574],["▁ڪيس",-13.109533309936523],["플레이",-13.109570503234863],["▁текста",-13.109572410583496],["ाली",-13.10958480834961],["坚",-13.10958766937256],["▁kaysa",-13.10959815979004],["▁miljoner",-13.109604835510254],["▁Охрид",-13.10965061187744],["ијом",-13.109657287597656],["▁Тус",-13.109660148620604],["芝",-13.109663963317873],["âld",-13.109676361083984],["濕",-13.10968017578125],["යකු",-13.109724044799805],["▁Pilipinas",-13.109724044799805],["▁కనిపించ",-13.109724044799805],["▁κέντρο",-13.10972499847412],["▁ନବୀନ",-13.10972499847412],["▁ዓይነት",-13.10972499847412],["▁подготовка",-13.109726905822754],["▁ٻاهر",-13.109726905822754],["▁Challenge",-13.109729766845703],["▁მაქვს",-13.109731674194336],["▁არამედ",-13.109732627868652],["▁الطفل",-13.10973834991455],["▁hjälper",-13.109747886657717],["▁यस्ता",-13.10975170135498],["▁adevarat",-13.109779357910156],["ჟი",-13.109780311584473],["▁Carmen",-13.109783172607422],["▁numurs",-13.109786033630373],["▁аудитор",-13.109786033630373],["▁tapasztalat",-13.1097993850708],["▁Comisión",-13.109801292419434],["েন",-13.109823226928713],["▁પો",-13.109825134277344],["▁تعرض",-13.109880447387695],["▁친구",-13.109896659851074],["▁максимал",-13.109899520874023],["مول",-13.109905242919922],["▁удобно",-13.109923362731934],["▁seguida",-13.109930992126465],["reform",-13.109942436218262],["პრო",-13.10995388031006],["▁දෙනා",-13.109967231750488],["▁moeder",-13.109968185424805],["▁login",-13.10997486114502],["▁zdravil",-13.109989166259766],["øst",-13.11001968383789],["ობაში",-13.110031127929688],["▁happened",-13.110036849975586],["ինգ",-13.110045433044434],["ENDE",-13.110050201416016],["▁bordo",-13.110089302062988],["日益",-13.110102653503418],["▁aspek",-13.110103607177734],["فون",-13.110116004943848],["สอบถาม",-13.110127449035645],["mning",-13.110132217407228],["Vol",-13.110135078430176],["▁მეც",-13.110136985778809],["▁fada",-13.110183715820312],["▁ລັດ",-13.110194206237791],["был",-13.110198974609377],["utama",-13.11024284362793],["▁Bó",-13.110243797302246],["▁你",-13.11025047302246],["خلف",-13.11026382446289],["టానికి",-13.110270500183104],["込",-13.1102933883667],["までの",-13.110296249389648],["▁баары",-13.110316276550291],["љив",-13.11033535003662],["išča",-13.110366821289062],["▁meja",-13.11036777496338],["▁tumbuh",-13.11038875579834],["▁ಸಂಸ್ಥೆ",-13.110395431518556],["DAS",-13.110400199890137],["▁vermek",-13.110401153564451],["nõu",-13.11044216156006],["▁көк",-13.110458374023438],["मेव",-13.110459327697754],["▁lietu",-13.110466957092283],["▁tölt",-13.110477447509766],["քում",-13.110486030578612],["zı",-13.110499382019045],["▁buni",-13.110541343688965],["▁دە",-13.11054229736328],["ולם",-13.110546112060549],["新增",-13.11056900024414],["▁చేశా",-13.110595703125],["▁réaliser",-13.110610961914062],["▁ислам",-13.110626220703123],["fire",-13.110629081726074],["▁مدارس",-13.110629081726074],["օր",-13.110631942749023],["▁kitob",-13.110636711120604],["▁Rayon",-13.110640525817873],["єте",-13.110647201538086],["Benz",-13.110708236694336],["▁horario",-13.110713005065918],["mağı",-13.110742568969728],["▁proiecte",-13.110747337341309],["▁женщина",-13.110751152038574],["tkin",-13.110782623291016],["提出了",-13.11079216003418],["▁beren",-13.110806465148926],["▁vini",-13.110806465148926],["▁Dazu",-13.110833168029783],["សារ",-13.110847473144531],["▁facere",-13.110864639282228],["bric",-13.110872268676758],["礦",-13.110947608947754],["vuo",-13.110952377319336],["ският",-13.110977172851562],["ธนาคาร",-13.11098289489746],["▁bhliain",-13.110983848571776],["▁kedudukan",-13.110983848571776],["▁tökéletes",-13.110983848571776],["▁ಕಥೆ",-13.110983848571776],["▁ειναι",-13.110984802246094],["▁நடிகர்",-13.110984802246094],["▁Jesús",-13.110987663269045],["▁бодлого",-13.11098861694336],["ູ້",-13.110998153686523],["▁sampeyan",-13.111004829406738],["▁képzés",-13.111008644104004],["▁ئېلىپ",-13.11101245880127],["反應",-13.111014366149902],["▁oferă",-13.111034393310549],["▁있어서",-13.111040115356444],["läggning",-13.111042022705078],["▁nowo",-13.111059188842772],["▁används",-13.111066818237305],["▁słowa",-13.111074447631836],["▁سپاه",-13.111075401306152],["ىس",-13.111079216003418],["▁Filme",-13.111084938049316],["щего",-13.111106872558594],["▁alimentare",-13.11110782623291],["ério",-13.111119270324709],["▁Instrument",-13.111124038696287],["▁Bing",-13.11114501953125],["▁لق",-13.111153602600098],["▁Hvem",-13.111163139343262],["▁CF",-13.111167907714844],["保罗",-13.111167907714844],["▁درون",-13.11117172241211],["▁танилцуул",-13.111180305480955],["▁اصلا",-13.111200332641602],["▁szt",-13.111224174499512],["各项",-13.111234664916992],["▁tuma",-13.1112642288208],["ეკ",-13.111278533935549],["▁fós",-13.111289024353027],["▁کسانی",-13.111292839050291],["▁ভাষা",-13.11129379272461],["kuna",-13.111298561096191],["पटक",-13.111302375793455],["အတြင္း",-13.111366271972656],["ყე",-13.111383438110352],["▁sorti",-13.111455917358398],["▁selleks",-13.111468315124512],["▁медицински",-13.111473083496094],["ට්ටු",-13.111480712890623],["▁посад",-13.111491203308104],["▁Administración",-13.111512184143066],["stoffen",-13.111515998840332],["▁hujan",-13.111530303955078],["▁будто",-13.111539840698242],["layo",-13.111547470092772],["▁رسم",-13.11155605316162],["▁menee",-13.111557006835938],["niekiem",-13.111589431762695],["▁Boo",-13.111590385437012],["▁צום",-13.111594200134276],["▁себеп",-13.111611366271973],["▁بع",-13.111618995666504],["satser",-13.111638069152832],["ayat",-13.111648559570312],["대출",-13.111688613891602],["▁traktor",-13.111692428588867],["なのです",-13.11172580718994],["▁boeken",-13.111726760864258],["▁ciri",-13.11172866821289],["venir",-13.111737251281738],["▁Inge",-13.111771583557127],["бъ",-13.111776351928713],[".12.2018",-13.111780166625977],["კლი",-13.11178493499756],["μος",-13.111807823181152],["▁alipo",-13.111807823181152],["▁ھن",-13.111821174621582],["▁Мала",-13.111834526062012],["ъб",-13.111844062805176],["▁đám",-13.111845970153809],["హి",-13.111848831176758],["სე",-13.111851692199709],["வம்",-13.111858367919922],["▁ياش",-13.11186695098877],["▁صادر",-13.111900329589844],["euro",-13.11190128326416],["▁comportament",-13.11191463470459],["▁ומו",-13.111937522888184],["لىم",-13.11195182800293],["ただし",-13.111953735351562],["▁enpresa",-13.111984252929688],["گیری",-13.111988067626951],["ļauj",-13.11199951171875],["ຄ່າ",-13.112038612365724],["▁miljard",-13.112083435058594],["rrean",-13.112085342407228],["▁oferi",-13.112099647521973],["▁construit",-13.112112045288086],["ڻو",-13.112119674682615],["ităţi",-13.112122535705566],["▁сър",-13.112127304077148],["▁Politic",-13.112144470214844],["环节",-13.112168312072754],["▁prioritet",-13.11217212677002],["▁Savo",-13.112180709838867],["▁نهاد",-13.112188339233398],["▁Zir",-13.112192153930664],["▁situacija",-13.112192153930664],["कों",-13.11219310760498],["駐",-13.11220645904541],["▁Esa",-13.112215995788574],["▁dettagli",-13.112224578857422],["拓展",-13.112236976623535],["qy",-13.112244606018066],["▁exercício",-13.112244606018066],["▁ημέρες",-13.112244606018066],["▁ਸਮੇਂ",-13.112244606018066],["▁সড়ক",-13.112245559692385],["▁спокойно",-13.1122465133667],["▁veröffentlicht",-13.112247467041016],["መጥ",-13.112258911132812],["▁සියල්ල",-13.112258911132812],["▁senki",-13.112264633178713],["▁آدرس",-13.112264633178713],["▁միշտ",-13.112286567687988],["usios",-13.112290382385254],["вій",-13.112295150756836],["Ok",-13.112302780151367],["દેશ",-13.112303733825684],["ನೂ",-13.112305641174316],["mije",-13.11232566833496],["▁गरिने",-13.11232566833496],["▁ವರ್ಷದ",-13.112327575683594],["▁(2014)",-13.112346649169922],["▁smije",-13.112354278564451],["teren",-13.112367630004885],["▁jep",-13.11239242553711],["ล์",-13.112396240234377],["സോ",-13.11240005493164],["▁fyrirtæki",-13.112401008605955],["▁विमान",-13.112407684326172],["help",-13.112438201904297],["▁ciała",-13.112439155578612],["▁shacabka",-13.112444877624512],["▁соответствующи",-13.112465858459473],["▁Nema",-13.112483978271484],["▁दर्शन",-13.112483978271484],["▁destin",-13.112486839294434],["രണ",-13.112515449523926],["▁rè",-13.11253547668457],["▁కుమార్",-13.112540245056152],["▁Utili",-13.112555503845217],["▁გოგო",-13.11256217956543],["▁លេខ",-13.11256504058838],["▁Cash",-13.112577438354492],["▁חדשים",-13.112584114074709],["▁aval",-13.112592697143556],["▁startup",-13.112594604492188],["聯合",-13.112597465515137],["▁alamat",-13.112604141235352],["chatting",-13.1126127243042],["▁jud",-13.112622261047363],["ուզ",-13.112628936767578],["▁ката",-13.112628936767578],["▁ترکیب",-13.11264991760254],["▁Bada",-13.112663269042969],["λευ",-13.112675666809082],["bilen",-13.112676620483398],["▁аўтар",-13.112692832946776],["▁Dėl",-13.112706184387209],["▁orgasm",-13.11270809173584],["gtig",-13.112709999084473],["掲載",-13.112754821777344],["fers",-13.112756729125977],["▁ଜଳ",-13.112757682800291],["▁diferen",-13.112760543823242],["▁دندان",-13.112770080566406],["▁ಯು",-13.112783432006836],["Super",-13.11279010772705],["▁videa",-13.11279296875],["▁ун",-13.112825393676758],["בלו",-13.112833023071287],["▁proteg",-13.11284351348877],["▁Canal",-13.112850189208984],["началь",-13.112869262695312],["▁opleiding",-13.112874031066896],["▁yoku",-13.112895965576172],["▁paciente",-13.11290454864502],["▁Antes",-13.112905502319336],["▁kvalitets",-13.112942695617676],["▁plāno",-13.112951278686523],["▁ٹو",-13.112970352172852],["▁બીજા",-13.1129789352417],["▁wymieni",-13.11298942565918],["▁शक",-13.112992286682127],["▁sergi",-13.112995147705078],["ੇਰ",-13.113021850585938],["παρ",-13.113022804260254],["▁দিতে",-13.11304759979248],["▁piena",-13.11305046081543],["mania",-13.11307144165039],["ภัย",-13.113089561462402],["▁سامان",-13.113102912902832],["▁autoriza",-13.113112449645996],["butan",-13.113119125366213],["▁205",-13.113119125366213],["▁начале",-13.113131523132324],["▁данной",-13.11313533782959],["▁розум",-13.113136291503906],["▁lidhur",-13.11314868927002],["»;",-13.113176345825195],["▁شرم",-13.113189697265623],["▁knihy",-13.113195419311523],["▁kracht",-13.113208770751951],["ποτε",-13.11321258544922],["▁aran",-13.11322784423828],["▁ရွိ",-13.113232612609863],["▁للإ",-13.113268852233888],["▁ጸ",-13.11330509185791],["▁dominant",-13.113306045532228],["▁forex",-13.113358497619627],["▁bombard",-13.113362312316896],["▁nagrad",-13.113386154174805],["værelse",-13.113398551940918],["க்கப்பட்ட",-13.1134033203125],["▁esine",-13.113450050354004],["▁городе",-13.113465309143066],["ீர்கள்",-13.113479614257812],["حف",-13.113496780395508],["▁imidlertid",-13.113507270812988],["▁имущества",-13.113507270812988],["▁хариуцлага",-13.113507270812988],["▁todavía",-13.113508224487305],["▁אותך",-13.113508224487305],["▁पछिल्लो",-13.11350917816162],["▁електричн",-13.113513946533203],["▁політики",-13.11351490020752],["▁newspaper",-13.11352252960205],["▁Giới",-13.113523483276367],["▁تجاری",-13.113525390625],["▁pillanat",-13.113526344299316],["▁tänker",-13.11353588104248],["೪",-13.113544464111328],["СТВО",-13.113554954528809],["▁Roy",-13.113558769226074],["▁гишүүд",-13.113558769226074],["▁tref",-13.11355972290039],["▁maamulka",-13.113561630249023],["XC",-13.113567352294922],["▁deren",-13.113577842712402],["▁шақыр",-13.113582611083984],["труд",-13.11358642578125],["ිකාව",-13.1135892868042],["▁veiklos",-13.113603591918944],["klubb",-13.113604545593262],["▁dispozici",-13.11361026763916],["▁обучения",-13.113630294799805],["ကေလး",-13.113633155822754],["ですから",-13.113635063171388],["▁évvel",-13.11364459991455],["▁jednego",-13.113670349121094],["▁theme",-13.113672256469728],["▁farà",-13.113683700561523],["иќ",-13.113697052001951],["▁fres",-13.113698959350586],["▁Krieg",-13.113741874694824],["elektr",-13.11374282836914],["هنن",-13.113763809204102],["越來越",-13.11378002166748],["▁upplevelse",-13.113781929016112],["▁ספק",-13.113795280456545],["ىيەت",-13.11379623413086],["▁эд",-13.113798141479492],["十五",-13.113808631896973],["онд",-13.113814353942873],["▁此外",-13.113815307617188],["▁brzo",-13.113858222961426],["една",-13.113862991333008],["àtica",-13.113869667053224],["זא",-13.113880157470703],["▁vyst",-13.113882064819336],["സൗ",-13.113895416259766],["▁пикир",-13.11390209197998],["โป",-13.113906860351562],["▁edilmesi",-13.113909721374512],["pallo",-13.11391544342041],["▁փողոց",-13.113924980163574],["▁massimo",-13.11392879486084],["▁دعم",-13.113936424255373],["▁والف",-13.113967895507812],["▁dejavnosti",-13.114005088806152],["ියා",-13.11403465270996],["生的",-13.114054679870604],["ναν",-13.114070892333984],["жел",-13.1140775680542],["Aku",-13.114105224609377],["▁intrat",-13.114108085632324],["▁installer",-13.114110946655272],["▁رسمي",-13.114136695861816],["riigi",-13.11415195465088],["pka",-13.114161491394045],["búnað",-13.114176750183104],["▁ବିଭାଗ",-13.114185333251951],["▁Bybel",-13.1141996383667],["▁maamul",-13.114201545715332],["▁አድር",-13.11420440673828],["viete",-13.114224433898926],["▁ಖ",-13.1142578125],["mət",-13.114263534545898],["▁mundësi",-13.114279747009276],["programma",-13.114280700683594],["▁ulei",-13.114288330078123],["▁ගන්නා",-13.114290237426758],["pustil",-13.114309310913086],["▁mna",-13.114324569702148],["abel",-13.114344596862791],["tland",-13.114360809326172],["▁पड़",-13.114365577697754],["▁хүч",-13.114367485046388],["reste",-13.114395141601562],["▁razgovor",-13.11440086364746],["▁ভোট",-13.114452362060549],["><",-13.114456176757812],["igheter",-13.114471435546877],["▁fiscale",-13.114471435546877],["▁maktab",-13.11447525024414],["▁закри",-13.11447525024414],["▁ਯੂ",-13.114497184753418],["▁Sache",-13.114501953125],["\"),",-13.114506721496582],["▁بتا",-13.114509582519531],["zył",-13.114513397216797],["▁Member",-13.114517211914062],["▁Afrikaans",-13.114527702331545],["лігін",-13.114542007446287],["निर्",-13.114578247070312],["▁ohjelma",-13.114580154418944],["▁sanoi",-13.114612579345703],["▁platí",-13.114668846130373],["ცემა",-13.114675521850586],["ezen",-13.114680290222168],["▁sentral",-13.114680290222168],["urra",-13.114686965942385],["▁kapacitet",-13.114712715148926],["不動産",-13.11475658416748],["lex",-13.11476707458496],["▁Baarlamaanka",-13.114771842956545],["▁Jahrhundert",-13.114771842956545],["▁Nẵng",-13.114771842956545],["▁Després",-13.11478042602539],["連結",-13.114787101745604],["▁ဘယ္",-13.114794731140137],["▁ಮಾತು",-13.114805221557615],["▁בעיקר",-13.11484718322754],["▁komunumo",-13.114848136901855],["hatra",-13.114849090576172],["вича",-13.114864349365234],["▁practic",-13.114877700805664],["ушу",-13.11488151550293],["▁stronę",-13.114892959594728],["▁விலை",-13.114916801452637],["機器",-13.114937782287598],["▁javob",-13.11494255065918],["bije",-13.114948272705078],["ుతూ",-13.114949226379396],["▁tvrdi",-13.114949226379396],["▁statistika",-13.114957809448242],["▁убеди",-13.11496353149414],["hnung",-13.114968299865724],["iniais",-13.114989280700684],["▁erabilera",-13.114998817443848],["tumis",-13.115032196044922],["▁objektu",-13.115046501159668],["▁کلیدی",-13.11506462097168],["ológiai",-13.115082740783691],["捷",-13.115097045898438],["▁miliona",-13.115103721618652],["▁페이지",-13.115105628967283],["▁Fiat",-13.115141868591309],["▁bane",-13.11514663696289],["rece",-13.115152359008787],["▁Debat",-13.115185737609863],["▁Bakit",-13.115242958068848],["▁기준",-13.115253448486328],["▁ಆದ",-13.115257263183594],["▁Մա",-13.115270614624023],["▁Pauli",-13.115281105041504],["▁varer",-13.115283012390137],["ාත්මක",-13.115288734436035],["▁پڑھیں",-13.115311622619627],["▁aprel",-13.115339279174805],["VID",-13.115360260009766],["▁partij",-13.115371704101562],["▁bizony",-13.115378379821776],["▁دید",-13.115415573120115],["▁رأي",-13.115419387817385],["▁déco",-13.115422248840332],["▁احد",-13.115422248840332],["pravi",-13.115448951721191],["▁ddefnyddio",-13.115459442138672],["▁gà",-13.11546516418457],["▁bangla",-13.115474700927734],["▁משו",-13.115506172180176],["▁cala",-13.11551570892334],["λαμ",-13.11553955078125],["rezza",-13.115598678588867],["▁(28)",-13.115598678588867],["polit",-13.115602493286133],["▁تال",-13.115636825561523],["▁beneficiar",-13.115638732910156],["▁приход",-13.115647315979004],["stöð",-13.115659713745115],["보고",-13.115668296813965],["▁længe",-13.11566925048828],["▁tona",-13.115687370300291],["เกิดขึ้น",-13.11568832397461],["äinen",-13.115693092346191],["▁ທະ",-13.115699768066406],["bril",-13.115714073181152],["ніше",-13.115714073181152],["▁edirik",-13.115726470947266],["▁தேர்வு",-13.115744590759276],["▁nowy",-13.115745544433594],["diet",-13.115751266479492],["ுடைய",-13.115755081176758],["▁lenni",-13.11575984954834],["▁Hud",-13.115767478942873],["▁ati",-13.115790367126465],["▁scegliere",-13.115802764892578],["▁خلک",-13.115809440612791],["ျပင္",-13.115830421447754],["በሩ",-13.115842819213867],["messa",-13.1158447265625],["cyjnego",-13.115852355957031],["▁kert",-13.115856170654297],["тесь",-13.11586570739746],["▁mandag",-13.115872383117676],["▁Susi",-13.115878105163574],["्रो",-13.115890502929688],["ກາ",-13.11590576171875],["▁SOS",-13.11591339111328],["▁ubu",-13.115918159484863],["▁туризм",-13.115922927856444],["▁데이터",-13.115925788879396],["▁naravno",-13.115942001342772],["rann",-13.11596393585205],["迁",-13.11598777770996],["俱乐部",-13.115989685058594],["ตน",-13.116011619567873],["articolo",-13.116015434265137],["▁Ime",-13.116023063659668],["▁достатъчно",-13.116037368774414],["▁относно",-13.116037368774414],["▁кримінальн",-13.11603832244873],["▁ডেস্ক",-13.116039276123049],["セット",-13.116043090820312],["▁Javascript",-13.116044044494627],["▁ፍርድ",-13.116044998168944],["▁निकाय",-13.116046905517578],["▁عثمان",-13.11605167388916],["▁tsi",-13.11605739593506],["▁kömək",-13.116058349609377],["ชิ้น",-13.116087913513184],["vogn",-13.116090774536133],["▁pseudo",-13.116090774536133],["▁aproximativ",-13.116092681884766],["▁Republika",-13.116095542907717],["▁Servis",-13.116107940673828],["期望",-13.11610984802246],["▁باشیم",-13.116133689880373],["▁боллоо",-13.116134643554688],["▁फर्क",-13.116137504577637],["ונו",-13.116178512573242],["වේද",-13.116178512573242],["▁lämm",-13.11617946624756],["▁képek",-13.11619472503662],["▁જુઓ",-13.116206169128418],["▁tana",-13.11620807647705],["▁загуби",-13.116251945495604],["objet",-13.11626434326172],["▁talian",-13.116266250610352],["▁مساء",-13.11630630493164],["▁формування",-13.116315841674805],["▁raga",-13.116320610046388],["广东",-13.11634635925293],["▁вашето",-13.116357803344728],["▁ያሉት",-13.116363525390623],["స్తూ",-13.11636447906494],["▁CAS",-13.116373062133787],["▁излезе",-13.116384506225586],["▁कदम",-13.116385459899902],["▁emerge",-13.116409301757812],["اگر",-13.116415977478027],["▁deciso",-13.11642074584961],["▁ചെറിയ",-13.116425514221191],["API",-13.116426467895508],["▁Олон",-13.116436958312988],["▁pemilik",-13.116450309753418],["▁shko",-13.116451263427734],["▁kæmpe",-13.11650276184082],["▁ಆರಂಭ",-13.116509437561035],["ларына",-13.116514205932615],["▁Mass",-13.116570472717283],["íc",-13.11659049987793],["KH",-13.116596221923828],["ခြ",-13.116607666015623],["lnie",-13.11660861968994],["▁පහ",-13.116615295410156],["▁సాగ",-13.11665153503418],["▁altına",-13.116669654846191],["પો",-13.11667537689209],["အေရး",-13.116677284240724],["▁uży",-13.116703987121582],["РС",-13.11673641204834],["▁질",-13.116755485534668],["ோர்",-13.116758346557615],["▁aşkar",-13.116758346557615],["▁Ec",-13.116761207580566],["▁вимог",-13.116764068603516],["guen",-13.116783142089844],["бите",-13.11683750152588],["▁التق",-13.116839408874512],["▁forsøk",-13.116861343383787],["▁Pagina",-13.116869926452637],["وتر",-13.116881370544434],["▁стаж",-13.116896629333496],["gb",-13.116905212402344],["▁ঘটনা",-13.116937637329102],["▁apunta",-13.116939544677734],["▁slučaj",-13.11695957183838],["▁ceart",-13.116973876953123],["▁toote",-13.11701488494873],["Tri",-13.117016792297363],["handler",-13.117018699645996],["▁beras",-13.11703109741211],["моћ",-13.11704158782959],["▁feet",-13.117043495178224],["מוד",-13.11704444885254],["▁hí",-13.11704921722412],["▁haq",-13.117059707641602],["▁الرو",-13.1170654296875],["स्या",-13.117069244384766],["▁жакын",-13.117069244384766],["▁ანა",-13.117080688476562],["حصل",-13.117093086242676],["▁ettevõtte",-13.117136001586914],["gaj",-13.117181777954102],["ОУ",-13.117186546325684],["ādī",-13.11720085144043],["ນັກ",-13.117207527160645],["▁Must",-13.117212295532228],["▁tuvo",-13.117213249206545],["mys",-13.117234230041504],["▁tui",-13.117270469665527],["▁Alberto",-13.117287635803224],["짜",-13.117291450500488],["მტკიცე",-13.11729335784912],["овіч",-13.117294311523438],["▁Reich",-13.117295265197754],["▁qism",-13.117300987243652],["▁Mashariki",-13.117304801940918],["▁conèixer",-13.117304801940918],["▁erforderlich",-13.117304801940918],["▁wszyscy",-13.117304801940918],["▁ఏర్పాటు",-13.117304801940918],["▁തിരുവനന്തപുരം",-13.117304801940918],["▁Colombia",-13.117305755615234],["▁امريکا",-13.117305755615234],["▁விமர்சனம்",-13.11730670928955],["▁விஜய்",-13.1173095703125],["▁అందుకే",-13.1173095703125],["▁milijard",-13.117310523986816],["血液",-13.11731243133545],["▁aşağıdakı",-13.117314338684082],["▁ນາງ",-13.11732006072998],["▁съюз",-13.117321968078612],["▁hetkellä",-13.11733055114746],["▁Secretaria",-13.117348670959473],["▁dz",-13.117350578308104],["▁यौन",-13.117387771606444],["▁notebook",-13.117393493652344],["▁pequeño",-13.11740779876709],["▁отримати",-13.117411613464355],["▁הסרט",-13.11741828918457],["▁خدمة",-13.117419242858888],["▁skandal",-13.117423057556152],["▁eko",-13.117435455322266],["kao",-13.117460250854492],["▁দুর্",-13.11752986907959],["即将",-13.117538452148438],["▁അവന്",-13.117548942565918],["募集",-13.117558479309082],["porte",-13.117563247680664],["▁intézmény",-13.117568016052246],["▁tanaman",-13.117576599121094],["經常",-13.11759090423584],["ങ്ങിയ",-13.117596626281738],["SG",-13.11760139465332],["農業",-13.117618560791016],["かもしれませんが",-13.117619514465332],["▁nanging",-13.117640495300291],["▁необходимости",-13.117661476135254],["▁көлемі",-13.117674827575684],["Esta",-13.117677688598633],["47)",-13.117690086364746],["▁ספרים",-13.117711067199709],["▁elokuva",-13.11771297454834],["fier",-13.117715835571287],["ደረጉ",-13.11772346496582],["▁haza",-13.117796897888184],["出来的",-13.117801666259766],["▁reserv",-13.117804527282717],["▁النظر",-13.117815971374512],["▁loja",-13.117820739746094],["НТ",-13.117831230163574],["▁নির্",-13.117849349975586],["▁būdu",-13.117873191833496],["▁сих",-13.117884635925291],["▁pensando",-13.11789894104004],["▁concreto",-13.117907524108888],["ực",-13.117927551269531],["▁stads",-13.117929458618164],["▁спал",-13.11793327331543],["ಸರ್",-13.117942810058594],["össä",-13.117944717407228],["मर",-13.117979049682615],["ဆိုင်",-13.117999076843262],["▁Cornel",-13.118002891540527],["▁Tali",-13.118009567260742],["шылар",-13.118038177490234],["pene",-13.118048667907717],["▁GS",-13.118075370788574],["▁visada",-13.118090629577637],["▁सम्बन्धित",-13.11811351776123],["▁suurema",-13.118115425109863],["rle",-13.118117332458496],["ніца",-13.11815357208252],["▁escrita",-13.118154525756836],["krom",-13.118186950683594],["Тү",-13.118220329284668],["▁звіт",-13.118231773376465],["ွာ",-13.118234634399414],["taal",-13.118239402770996],["NAK",-13.118250846862791],["▁سردار",-13.118252754211426],["▁проце",-13.118261337280272],["kkur",-13.118264198303224],["▁klassisk",-13.118266105651855],["▁javë",-13.11834716796875],["▁kirjoitta",-13.118353843688965],["上班",-13.118375778198242],["▁baño",-13.11838436126709],["paper",-13.118395805358888],["▁normál",-13.118438720703123],["жав",-13.118456840515137],["insta",-13.118465423583984],["▁acciones",-13.118468284606934],["يې",-13.1184720993042],["oloog",-13.118480682373049],["timet",-13.118496894836426],["▁Cym",-13.118496894836426],["тельные",-13.118545532226562],["atın",-13.118549346923828],["▁חוב",-13.118556022644045],["ショップ",-13.118557929992676],["▁enfin",-13.118562698364258],["រៀង",-13.11856460571289],["▁igri",-13.118569374084473],["▁яким",-13.118571281433104],["ର୍ଣ୍ଣ",-13.118573188781738],["คะแนน",-13.118573188781738],["▁kuitenkaan",-13.118574142456056],["▁ngắn",-13.118574142456056],["▁împreună",-13.118574142456056],["▁պատգամավոր",-13.118574142456056],["▁کامپیوتر",-13.118574142456056],["▁होईल",-13.118574142456056],["▁տնտեսական",-13.118575096130373],["▁Wis",-13.118577003479004],["nīcā",-13.11857795715332],["▁മറ്റൊരു",-13.118579864501951],["▁ولسوالۍ",-13.118581771850586],["▁жумуш",-13.118582725524902],["▁пачатку",-13.118584632873535],["▁Được",-13.118590354919434],["▁시대",-13.11859130859375],["ເສຍ",-13.118592262268066],["▁Swe",-13.118612289428713],["▁canon",-13.118622779846191],["৪",-13.118630409240724],["إجراء",-13.118635177612305],["▁එදා",-13.118635177612305],["▁olhos",-13.11864948272705],["▁версия",-13.118671417236328],["▁برخورد",-13.11867332458496],["▁задржани",-13.11870002746582],["ágy",-13.118735313415527],["▁заңды",-13.118736267089844],["▁dle",-13.118745803833008],["▁lær",-13.118768692016602],["▁akcija",-13.11878776550293],["▁suunnittelu",-13.11881160736084],["úp",-13.118855476379396],["▁istehsal",-13.118914604187012],["ဘက်",-13.118947982788086],["virti",-13.118948936462402],["више",-13.118968963623049],["▁bevis",-13.118998527526855],["nize",-13.119027137756348],["▁najlepšie",-13.119029998779297],["anci",-13.119033813476562],["▁ත්",-13.11905574798584],["▁fiche",-13.11907196044922],["▁гэтыя",-13.119078636169434],["ڈر",-13.119081497192385],["rės",-13.119112968444824],["▁sortiment",-13.119125366210938],["gta",-13.119126319885254],["▁ghé",-13.119144439697266],["▁घो",-13.119178771972656],["modo",-13.119182586669922],["▁პე",-13.11918830871582],["หยุด",-13.11921501159668],["bū",-13.119221687316896],["▁alıyor",-13.119227409362791],["▁લેખ",-13.119232177734377],["▁teko",-13.119264602661133],["▁ಪ್ರದೇಶ",-13.119290351867676],["ბული",-13.119293212890623],["をご",-13.11931610107422],["▁kultūr",-13.119319915771484],["▁inguru",-13.1193208694458],["vó",-13.119325637817385],["ବର",-13.11937141418457],["▁ശരി",-13.119379043579102],["žite",-13.119406700134276],["ledare",-13.119415283203123],["▁iekš",-13.11941909790039],["▁иску",-13.11941909790039],["▁jobs",-13.11945343017578],["dē",-13.119461059570312],["▁زدن",-13.11948299407959],["▁thù",-13.119492530822754],["▁citat",-13.11950969696045],["▁حدث",-13.11951732635498],["kkar",-13.119542121887209],["取引",-13.119548797607422],["▁เธ",-13.119555473327637],["▁kori",-13.119556427001951],["ઓને",-13.1195650100708],["zala",-13.119571685791016],["▁kuhu",-13.119582176208496],["▁кіно",-13.11959171295166],["jiet",-13.119593620300291],["▁helyett",-13.119604110717772],["▁matar",-13.119608879089355],["▁calcula",-13.119613647460938],["ਚਾ",-13.119623184204102],["▁ଦ୍ୱାରା",-13.119637489318848],["▁וק",-13.119675636291504],["bringen",-13.119686126708984],["記得",-13.119707107543944],["▁รายการ",-13.119709968566896],["▁مخالفت",-13.119715690612791],["▁skaidr",-13.119726181030272],["▁نظرات",-13.119730949401855],["一半",-13.119752883911133],["▁człowiek",-13.119773864746094],["мой",-13.119781494140623],["ysk",-13.11978244781494],["▁cria",-13.119797706604004],["黒",-13.119827270507812],["ठी",-13.119830131530762],["▁cene",-13.11983871459961],["카톡",-13.119843482971191],["▁Cliath",-13.119844436645508],["▁czerwca",-13.119844436645508],["▁selanjutnya",-13.119844436645508],["▁είμαι",-13.119844436645508],["▁București",-13.119845390319824],["▁ભારતીય",-13.119848251342772],["▁оствари",-13.119854927062988],["vaat",-13.11985969543457],["▁පෙළ",-13.11985969543457],["▁Ресей",-13.119863510131836],["▁juridisk",-13.11987018585205],["esperienza",-13.11987590789795],["ส่งเสริม",-13.119879722595217],["rih",-13.119894981384276],["▁سرویس",-13.11990451812744],["▁شهادت",-13.11990451812744],["▁réussi",-13.119905471801758],["ичних",-13.11992073059082],["▁gelo",-13.119935989379885],["▁संबंधित",-13.119935989379885],["▁गाडी",-13.119962692260742],["▁үс",-13.119991302490234],["▁Spirit",-13.11999225616455],["▁주요",-13.119999885559082],["ಿಗ",-13.12003231048584],["▁zaken",-13.12004280090332],["▁ყოველ",-13.120043754577637],["覚",-13.1200532913208],["▁całej",-13.120072364807127],["▁Bakanı",-13.120073318481444],["ebilecek",-13.120076179504396],["▁meeldi",-13.120091438293455],["▁harapan",-13.120135307312012],["▁تخت",-13.120140075683594],["어서",-13.12015438079834],["▁Lakini",-13.120165824890137],["▁Sonder",-13.120182037353516],["▁چک",-13.120209693908691],["րթ",-13.120224952697754],["▁Mẹ",-13.120229721069336],["ပု",-13.120250701904297],["trol",-13.12025260925293],["dija",-13.120278358459473],["▁Головн",-13.120279312133787],["СО",-13.12028980255127],["▁ျပန္",-13.1203031539917],["меша",-13.120305061340332],["▁dipakai",-13.12030792236328],["▁Km",-13.120348930358888],["âge",-13.120351791381836],["▁Nid",-13.120355606079102],["লের",-13.120359420776367],["▁ull",-13.120362281799316],["▁плану",-13.120363235473633],["▁maraqlı",-13.120366096496582],["▁relato",-13.120368003845217],["▁хүс",-13.120370864868164],["▁앞으로",-13.120381355285645],["▁llawer",-13.12039852142334],["▁1、",-13.120408058166504],["▁дъ",-13.12041473388672],["▁Egun",-13.120426177978516],["tryck",-13.120431900024414],["fus",-13.120444297790527],["▁biskup",-13.120445251464844],["照顧",-13.120447158813477],["ừng",-13.12048053741455],["деу",-13.12049674987793],["▁новом",-13.12051773071289],["▁Իր",-13.120518684387209],["лични",-13.120523452758787],["ดีกว่า",-13.12053108215332],["▁Inici",-13.120543479919434],["▁Hå",-13.12055778503418],["▁haf",-13.12058448791504],["ไท",-13.120594024658203],["▁наем",-13.12060260772705],["▁డా",-13.120620727539062],["不然",-13.12062168121338],["ayn",-13.12066650390625],["ਹਾਰ",-13.120667457580566],["▁препо",-13.120668411254885],["ਿਸ",-13.120671272277832],["▁Sey",-13.120692253112791],["ылі",-13.120708465576172],["▁AE",-13.12071132659912],["ägg",-13.120718955993652],["▁Лес",-13.120725631713867],["▁rezult",-13.120770454406738],["▁Meine",-13.120798110961914],["DUN",-13.120809555053713],["▁posición",-13.120816230773926],["▁լր",-13.120829582214355],["▁ημέρα",-13.120834350585938],["кура",-13.12086296081543],["▁bär",-13.120872497558594],["ລາຍ",-13.120888710021973],["▁koulutus",-13.12090301513672],["▁róla",-13.12092399597168],["▁мг",-13.120929718017578],["▁ciekaw",-13.12094020843506],["▁qas",-13.12094020843506],["روح",-13.120951652526855],["▁kár",-13.120975494384766],["▁SRI",-13.120986938476562],["▁fout",-13.120994567871094],["▁τρι",-13.12099552154541],["▁Valmi",-13.120997428894045],["▁agre",-13.12101936340332],["▁jugar",-13.12103271484375],["▁കഴിയ",-13.121041297912598],["ėmis",-13.121044158935549],["ייב",-13.121050834655762],["潔",-13.12106990814209],["戸",-13.12108325958252],["▁Кто",-13.121086120605469],["σου",-13.121098518371582],["▁ücret",-13.121099472045898],["アクセス",-13.12110996246338],["plek",-13.121113777160645],["▁López",-13.121116638183594],["▁कारोबार",-13.12111759185791],["▁általános",-13.121118545532228],["▁böcker",-13.121126174926758],["▁సినీ",-13.121152877807615],["無理",-13.121159553527832],["▁patīk",-13.121170043945312],["▁এবার",-13.121170043945312],["مىسى",-13.121170997619627],["▁ਖੇਡ",-13.121177673339844],["vell",-13.12118148803711],["▁მოს",-13.121230125427246],["patikana",-13.121243476867676],["могу",-13.12126636505127],["ხო",-13.121268272399902],["letes",-13.121277809143066],["्यं",-13.121278762817385],["siko",-13.121328353881836],["ຈີນ",-13.121345520019531],["温度",-13.121345520019531],["▁promove",-13.121379852294922],["jąca",-13.12138557434082],["ുന്നുണ്ട്",-13.121404647827148],["▁Kanal",-13.12142276763916],["▁Method",-13.121440887451172],["терди",-13.121442794799805],["▁spomin",-13.12145709991455],["▁teeb",-13.121493339538574],["miš",-13.121496200561523],["子女",-13.121500015258787],["▁Rex",-13.121524810791016],["▁øst",-13.121525764465332],["▁ջր",-13.121574401855469],["▁ויש",-13.121576309204102],["▁Keski",-13.121583938598633],["evan",-13.121600151062012],["▁ipsi",-13.12160301208496],["▁ស៊ី",-13.121627807617188],["▁značk",-13.12163257598877],["АВ",-13.12163543701172],["ብራ",-13.121644020080566],["▁ahí",-13.121657371520996],["▁Nove",-13.121658325195312],["▁Suma",-13.121658325195312],["▁negativa",-13.121679306030272],["▁ইউ",-13.12168025970459],["▁systemu",-13.121686935424805],["hini",-13.121713638305664],["–19",-13.121726036071776],["▁ote",-13.121734619140623],["তর",-13.121747970581056],["▁napr",-13.1217679977417],["شنبه",-13.121777534484863],["знаём",-13.121779441833496],["▁perd",-13.121822357177734],["guda",-13.121871948242188],["▁viernes",-13.121886253356934],["▁NAV",-13.121923446655272],["▁ਐਸ",-13.121926307678224],["цка",-13.121942520141602],["▁פה",-13.121944427490234],["ისი",-13.121954917907717],["▁Phó",-13.121959686279297],["jskim",-13.12196445465088],["▁ኢየሱስ",-13.121986389160156],["▁կո",-13.121992111206056],["▁20.000",-13.12199592590332],["▁lait",-13.121996879577637],["aquí",-13.122005462646484],["▁Moje",-13.12204647064209],["namh",-13.122047424316406],["kalau",-13.12205410003662],["▁البط",-13.12205410003662],["gény",-13.12205696105957],["ched",-13.122071266174316],["▁сталі",-13.122072219848633],["▁Daarom",-13.122077941894531],["▁základe",-13.122088432312012],["daran",-13.122099876403809],["руг",-13.122114181518556],["▁rritje",-13.122136116027832],["▁ferme",-13.122137069702148],["▁ცხ",-13.122152328491213],["▁sell",-13.12215805053711],["己",-13.12224292755127],["▁aproxima",-13.122328758239746],["▁xor",-13.122339248657228],["▁कृत",-13.12234592437744],["明顯",-13.122360229492188],["▁svarbu",-13.122374534606934],["ညီ",-13.12238311767578],["▁ಅರ್ಜಿ",-13.122390747070312],["▁Teknoloji",-13.122391700744627],["▁Whatsapp",-13.122391700744627],["▁kunnskap",-13.122394561767578],["▁Benjamin",-13.122395515441896],["▁regelmäßig",-13.122396469116213],["▁Prev",-13.122398376464844],["▁црква",-13.122406005859377],["▁jederzeit",-13.122413635253906],["▁dokáže",-13.122414588928224],["▁beğen",-13.122422218322754],["▁poboación",-13.1224365234375],["▁hensyn",-13.122451782226562],["idhe",-13.122471809387209],["▁concernant",-13.12248420715332],["▁тегло",-13.122516632080078],["િલ",-13.122522354125977],["hirap",-13.12252712249756],["▁consistent",-13.122535705566406],["▁Amen",-13.122547149658203],["▁Boa",-13.122552871704102],["▁fotografii",-13.122554779052734],["▁Alfred",-13.122563362121582],["ивање",-13.1226224899292],["toxic",-13.122650146484377],["▁дала",-13.122660636901855],["溝通",-13.122666358947754],["▁kompromis",-13.122687339782717],["▁kó",-13.122694969177246],["iště",-13.122695922851562],["▁կազմ",-13.122697830200195],["buat",-13.12270736694336],["▁таком",-13.122709274291992],["▁географ",-13.122715950012209],["▁минь",-13.122719764709473],["▁अज",-13.122722625732422],["ระยะ",-13.12272834777832],["▁Soomaali",-13.12272834777832],["▁langit",-13.122743606567385],["▁அல்ல",-13.122747421264648],["چىلىك",-13.122750282287598],["▁fera",-13.122756958007812],["уусу",-13.122759819030762],["දායක",-13.122762680053713],["▁Consiglio",-13.122777938842772],["▁Vēl",-13.122777938842772],["56)",-13.122783660888672],["8/",-13.12278938293457],["▁உடல்",-13.12282371520996],["ფა",-13.122855186462402],["还没有",-13.122861862182615],["▁కూ",-13.122875213623049],["▁आस",-13.12290096282959],["▁سين",-13.122932434082031],["កែ",-13.12294864654541],["▁မိ",-13.122995376586914],["▁досяг",-13.123001098632812],["emon",-13.123011589050291],["▁ridicat",-13.123011589050291],["modell",-13.123013496398926],["▁ኣብ",-13.12303066253662],["ერა",-13.123069763183594],["прост",-13.123071670532228],["נוע",-13.123072624206545],["реден",-13.123091697692873],["▁приложение",-13.123096466064451],["▁popra",-13.123113632202148],["▁યાદ",-13.123114585876465],["බී",-13.123123168945312],["▁Faktor",-13.123126029968262],["▁anty",-13.123128890991213],["▁pikiran",-13.123156547546388],["ქარ",-13.12316608428955],["▁sejam",-13.123180389404297],["▁හද",-13.123188018798828],["▁credo",-13.123193740844728],["▁접",-13.123197555541992],["を確認",-13.12320613861084],["verge",-13.12324333190918],["-2013",-13.123247146606444],["▁ยัง",-13.123248100280762],["वचन",-13.123268127441406],["GIA",-13.123296737670898],["илися",-13.123298645019531],["เช้า",-13.12335205078125],["dano",-13.123353958129885],["went",-13.123383522033691],["ОГ",-13.123388290405272],["▁kuiva",-13.123397827148438],["ZP",-13.123425483703612],["čnom",-13.123440742492676],["▁Divers",-13.12344455718994],["▁navnet",-13.1234769821167],["▁skjul",-13.123483657836914],["▁Tots",-13.123485565185549],["▁געש",-13.123492240905762],["لین",-13.123539924621582],["▁תמונות",-13.123542785644531],["▁tiap",-13.123543739318848],["▁nello",-13.123552322387695],["avam",-13.123557090759276],["▁restaurants",-13.123559951782228],["מק",-13.12356948852539],["▁Lucas",-13.123576164245604],["ไร้",-13.123577117919922],["▁kip",-13.123580932617188],["ഒ",-13.123584747314451],["▁мін",-13.123584747314451],["▁mainit",-13.123587608337402],["тады",-13.123590469360352],["▁refuz",-13.123597145080566],["▁▶",-13.123607635498049],["展开",-13.123618125915527],["▁uç",-13.12362289428711],["रह",-13.123629570007324],["▁makro",-13.123638153076172],["▁करो",-13.123638153076172],["oksid",-13.123652458190918],["みて",-13.12366008758545],["▁టీడీపీ",-13.123665809631348],["▁Country",-13.123666763305664],["▁natomiast",-13.123666763305664],["▁γυναίκα",-13.123666763305664],["▁інформація",-13.123666763305664],["▁jeżeli",-13.123668670654297],["▁ଯଦି",-13.123668670654297],["▁varu",-13.12367820739746],["▁विकल्प",-13.123685836791992],["▁పోటీ",-13.123689651489258],["▁Начало",-13.123695373535156],["▁minska",-13.123698234558104],["▁mechan",-13.123712539672852],["▁Praxis",-13.123716354370115],["alba",-13.123722076416016],["说话",-13.123724937438965],["农民",-13.12372589111328],["ແບບ",-13.123734474182127],["▁pracuje",-13.12374210357666],["▁phiên",-13.123745918273926],["məsinə",-13.123766899108888],["▁основных",-13.12379264831543],["ထက်",-13.123854637145996],["▁odprt",-13.123882293701172],["ups",-13.12392234802246],["▁root",-13.123923301696776],["▁garan",-13.12393856048584],["୍ଵ",-13.123942375183104],["▁úprav",-13.1239595413208],["▁பட்ட",-13.12397003173828],["▁شناس",-13.123971939086914],["▁plato",-13.123983383178713],["▁மாற்ற",-13.123988151550291],["▁Nata",-13.123992919921877],["▁लिखा",-13.124006271362305],["яването",-13.124018669128418],["▁población",-13.124022483825684],["▁пролет",-13.12403392791748],["▁အိမ္",-13.124038696289062],["ckiego",-13.12405776977539],["kür",-13.124058723449709],["ковски",-13.124067306518556],["▁ჯი",-13.1240873336792],["▁bảng",-13.124107360839844],["▁Петров",-13.124122619628906],["▁ухаан",-13.124127388000488],["▁όσα",-13.12413215637207],["mille",-13.124164581298828],["▁Maan",-13.124170303344728],["▁સમજ",-13.124178886413574],["ssos",-13.124211311340332],["▁эконом",-13.124213218688965],["▁전자",-13.124223709106444],["rabia",-13.12423324584961],["▁bestuur",-13.124238014221191],["tiedot",-13.124261856079102],["ZS",-13.124265670776367],["likt",-13.12428092956543],["▁Orange",-13.124282836914062],["時間を",-13.12428379058838],["▁nah",-13.12428855895996],["▁titi",-13.124296188354492],["▁shart",-13.124305725097656],["▁Коре",-13.124305725097656],["年輕",-13.124306678771973],["ساز",-13.124313354492188],["industrie",-13.124329566955566],["למד",-13.124340057373049],["ssystem",-13.124347686767578],["▁Suite",-13.124408721923828],["▁roze",-13.124420166015623],["▁Београда",-13.12443733215332],["現金",-13.124469757080078],["▁تبدیلی",-13.124475479125977],["ន្",-13.124478340148926],["დენი",-13.124484062194824],["▁reduc",-13.12453269958496],["▁grosse",-13.124534606933594],["pain",-13.124549865722656],["▁المخ",-13.124560356140137],["▁Terre",-13.124566078186035],["▁අඩවිය",-13.124574661254885],["▁floor",-13.124582290649414],["нява",-13.12458610534668],["사가",-13.124591827392578],["רחוב",-13.124594688415527],["▁Ευρώπη",-13.124661445617676],["》。",-13.12468433380127],["▁occur",-13.124695777893066],["▁번째",-13.124709129333496],["▁hadiah",-13.124722480773926],["▁directo",-13.124732971191406],["sidi",-13.124812126159668],["▁ബാല",-13.124815940856934],["ಪೆ",-13.12481689453125],["▁cabe",-13.124832153320312],["▁Програм",-13.124836921691896],["鎖",-13.12486457824707],["▁әсер",-13.124868392944336],["▁Tark",-13.1248779296875],["csoport",-13.12488842010498],["крив",-13.124892234802246],["統",-13.124897956848145],["қтар",-13.12489891052246],["ႏိုင္ငံ",-13.124918937683104],["▁સૌ",-13.124938011169434],["▁priklauso",-13.124939918518066],["▁lényeg",-13.124943733215332],["▁Παρασκευή",-13.124943733215332],["▁παραπάνω",-13.124943733215332],["▁помеѓу",-13.124943733215332],["▁საინტერესო",-13.124943733215332],["▁pegawai",-13.124945640563965],["▁업무",-13.124957084655762],["▁costum",-13.124959945678713],["▁fekete",-13.12498378753662],["▁коргоо",-13.124985694885254],["ỡ",-13.12498664855957],["▁జీవిత",-13.124996185302734],["▁hommes",-13.12499713897705],["eachd",-13.12501335144043],["▁leor",-13.125019073486328],["tulong",-13.125048637390137],["често",-13.125060081481934],["▁vroeg",-13.125066757202148],["▁gone",-13.125079154968262],["mpana",-13.12510585784912],["/20",-13.125109672546388],["▁конференция",-13.125110626220703],["▁پرې",-13.125115394592283],["အို",-13.1251220703125],["▁Tengah",-13.125123977661133],["那個",-13.125164031982422],["நிலை",-13.125174522399902],["▁otvara",-13.125214576721191],["BM",-13.125234603881836],["▁ଡି",-13.125271797180176],["เปิดตัว",-13.125293731689451],["▁анги",-13.125298500061035],["ได้รับการ",-13.125308990478516],["ทน",-13.125310897827148],["▁technika",-13.125314712524414],["▁geheel",-13.125328063964844],["▁palabra",-13.125360488891602],["▁ryhmä",-13.12538719177246],["ଥା",-13.125399589538574],["▁ਸੈ",-13.125408172607422],["après",-13.125411987304688],["▁личните",-13.125411987304688],["стос",-13.125421524047852],["▁squadra",-13.125426292419434],["▁djur",-13.12543773651123],["ທັງ",-13.125450134277344],["loko",-13.125454902648926],["álni",-13.12547206878662],["zej",-13.125473022460938],["عتقد",-13.125505447387695],["▁најголем",-13.12551212310791],["psis",-13.125515937805176],["▁nauti",-13.12559986114502],["цей",-13.125614166259766],["▁mester",-13.125661849975586],["▁0,3",-13.125666618347168],["▁rezultati",-13.125680923461914],["idhi",-13.125682830810549],["▁naší",-13.125691413879396],["▁realizza",-13.125702857971191],["▁henge",-13.125732421875],["四个",-13.125734329223633],["тица",-13.12574577331543],["keri",-13.125752449035645],["했고",-13.125776290893556],["▁Мол",-13.125778198242188],["▁दिख",-13.12578296661377],["ছিল",-13.125786781311035],["皆さん",-13.12579345703125],["▁근",-13.125812530517578],["тет",-13.12582015991211],["▁EG",-13.12583827972412],["▁iskolá",-13.125840187072754],["▁atrodo",-13.1258544921875],["কের",-13.125856399536133],["▁реклама",-13.125863075256348],["▁señor",-13.125866889953612],["▁creati",-13.125868797302246],["▁४०",-13.125873565673828],["▁darīt",-13.125896453857422],["נתי",-13.125911712646484],["▁SEC",-13.125911712646484],["ønn",-13.125913619995115],["erweise",-13.125916481018066],["áře",-13.12595272064209],["vimo",-13.125978469848633],["▁FR",-13.125993728637695],["▁teslim",-13.125995635986328],["ujesz",-13.12599754333496],["طه",-13.126026153564451],["ရုံ",-13.12603759765625],["▁compa",-13.126039505004885],["ಬೇಕಾದ",-13.12605094909668],["▁onko",-13.126078605651855],["pne",-13.126087188720703],["▁രാ",-13.126128196716309],["▁NAM",-13.126129150390623],["VB",-13.126133918762209],["▁moći",-13.126141548156738],["▁Gees",-13.126147270202637],["▁наоѓа",-13.126147270202637],["即時",-13.126163482666016],["昂",-13.126185417175291],["alem",-13.126194953918455],["▁PlayStation",-13.126222610473633],["▁paslaugos",-13.126222610473633],["▁такъв",-13.126222610473633],["▁мамиле",-13.126224517822266],["▁ਨਾਨਕ",-13.126227378845217],["▁аюулгүй",-13.12623405456543],["満足",-13.126235008239746],["ઇન",-13.126235961914062],["▁الساعة",-13.12623691558838],["▁शाखा",-13.12623691558838],["▁सम्मेलन",-13.12625217437744],["▁predstavnik",-13.126266479492188],["▁गैर",-13.12627410888672],["▁կյանքի",-13.126280784606934],["İR",-13.12628936767578],["▁неа",-13.126307487487791],["▁অনলাইন",-13.126328468322754],["▁вплив",-13.126334190368652],["▁lance",-13.126349449157717],["▁deliver",-13.126359939575195],["▁தொடர்ந்து",-13.12636661529541],["чара",-13.126379013061523],["EUR",-13.126392364501951],["[8]",-13.126397132873535],["taler",-13.126404762268066],["▁muista",-13.12641716003418],["▁támogat",-13.12641716003418],["halen",-13.126428604125977],["▁huaj",-13.12643814086914],["▁전에",-13.12644386291504],["ເຈົ້າ",-13.126466751098633],["▁რათა",-13.126472473144531],["▁pictures",-13.126504898071287],["نر",-13.126537322998049],["▁labo",-13.126547813415527],["▁penetra",-13.126579284667969],["ढी",-13.126593589782717],["بك",-13.126596450805664],["▁سامانه",-13.126605033874512],["将于",-13.126612663269045],["خطط",-13.126646995544434],["▁konum",-13.126646995544434],["▁गेल्या",-13.12664794921875],["▁hl",-13.126651763916016],["▁Seimo",-13.126679420471191],["▁græ",-13.126713752746582],["rique",-13.126733779907228],["▁рассматрива",-13.126745223999023],["ամաս",-13.12674617767334],["▁актер",-13.126751899719238],["▁setzt",-13.12678337097168],["多种",-13.126798629760742],["文学",-13.126815795898438],["gau",-13.126842498779297],["▁χειρ",-13.126858711242676],["▁распо",-13.126859664916992],["ający",-13.126867294311523],["▁කොහොම",-13.126924514770508],["▁agrega",-13.126925468444824],["▁Azad",-13.126965522766112],["▁command",-13.12699031829834],["▁Partido",-13.126995086669922],["▁ráno",-13.127013206481934],["▁كلمة",-13.1270170211792],["▁колкото",-13.127018928527832],["的感觉",-13.127033233642578],["ΠΟ",-13.127059936523438],["んですが",-13.127071380615234],["▁ods",-13.127082824707031],["▁شە",-13.12711238861084],["▁ادعا",-13.127124786376951],["יכה",-13.12714385986328],["▁Talk",-13.127147674560549],["علامه",-13.127158164978027],["▁Neue",-13.12716579437256],["ሥራ",-13.127167701721191],["ੰਤ",-13.127189636230469],["▁બાળકો",-13.127204895019531],["▁Cui",-13.127208709716797],["▁సమాచారం",-13.127209663391112],["ებულია",-13.127214431762695],["▁있다고",-13.127215385437012],["▁imagem",-13.127251625061035],["រក",-13.1272611618042],["▁mhaith",-13.127279281616213],["кта",-13.12728500366211],["▁luftë",-13.127311706542969],["▁energii",-13.127324104309082],["▁téh",-13.127334594726562],["▁mno",-13.127355575561523],["▁ಮರ",-13.12739372253418],["ध्व",-13.12742805480957],["拳",-13.127429962158203],["주는",-13.127474784851074],["ሰባ",-13.12747573852539],["的想法",-13.127488136291504],["▁tl",-13.12748908996582],["訳",-13.127493858337402],["処理",-13.12749481201172],["在美国",-13.127498626708984],["▁വ്യക്തി",-13.127501487731934],["▁septiembre",-13.12750244140625],["▁마지막",-13.12750244140625],["▁ଏଥି",-13.127503395080566],["▁nebuvo",-13.127504348754885],["▁જણાવ્યું",-13.127507209777832],["▁दूध",-13.127516746520996],["▁שירותי",-13.127522468566896],["▁ഓഫ്",-13.127523422241213],["▁Парламент",-13.127533912658691],["▁kelio",-13.127546310424805],["▁한번",-13.127572059631348],["▁człowieka",-13.127574920654297],["▁zasluž",-13.127595901489258],["▁gati",-13.127596855163574],["▁gdyż",-13.127598762512209],["▁животот",-13.127598762512209],["lerne",-13.127607345581056],["▁यांची",-13.12761402130127],["▁Çocuk",-13.127628326416016],["ject",-13.127657890319824],["▁prisen",-13.127666473388672],["сексуал",-13.127676963806152],["▁تىل",-13.127679824829102],["▁Kesk",-13.127686500549316],["GV",-13.127715110778809],["ସ୍ଥ",-13.127715110778809],["▁erabil",-13.127753257751465],["▁playing",-13.127760887145996],["▁Meu",-13.127769470214844],["▁прати",-13.12781047821045],["тарға",-13.127815246582031],["小学",-13.127819061279297],["άδα",-13.127850532531738],["▁terminar",-13.12786102294922],["▁секоја",-13.127881050109863],["ماق",-13.127936363220217],["น่าจะ",-13.127937316894531],["▁משל",-13.127992630004885],["▁lijek",-13.12799835205078],["▁stranice",-13.128015518188477],["▁moramo",-13.12804889678955],["▁vendim",-13.128049850463867],["你要",-13.128055572509766],["தன",-13.128073692321776],["▁device",-13.12808609008789],["lokal",-13.128128051757812],["ượng",-13.128131866455078],["phon",-13.128134727478027],["зак",-13.12814235687256],["дөгү",-13.12816047668457],["▁బిగ్",-13.128213882446287],["ออกแบบ",-13.128219604492188],["▁tanca",-13.12825870513916],["概",-13.128273010253906],["▁হে",-13.128296852111816],["▁temo",-13.128304481506348],["▁Protest",-13.128336906433104],["▁کانال",-13.12836456298828],["очно",-13.128378868103027],["▁စစ္",-13.12838363647461],["▁нашай",-13.12839412689209],["▁dersom",-13.128398895263672],["▁received",-13.128417015075684],["截至",-13.128417015075684],["kub",-13.128421783447266],["pę",-13.128425598144531],["现象",-13.128439903259276],["▁nechá",-13.128448486328123],["▁SAT",-13.128451347351074],["ুক",-13.128467559814451],["▁Primo",-13.128470420837402],["▁ຖືກ",-13.128499984741213],["жні",-13.128507614135742],["▁naha",-13.128507614135742],["▁چینی",-13.128520011901855],["▁pomer",-13.128528594970703],["▁verlo",-13.12855052947998],["▁réseau",-13.128609657287598],["▁posting",-13.128610610961914],["起了",-13.128692626953123],["▁تشدد",-13.128721237182615],["热情",-13.128732681274414],["中国人",-13.12873649597168],["venta",-13.128748893737791],["経営",-13.128764152526855],["▁grupy",-13.128765106201172],["放送",-13.128769874572754],["凱",-13.128780364990234],["ױ",-13.128783226013184],["▁öyrən",-13.1287841796875],["촌",-13.1287841796875],["▁Tarragona",-13.128785133361816],["▁dilaksanakan",-13.128785133361816],["▁febbraio",-13.128785133361816],["▁pembelajaran",-13.128785133361816],["▁заўсёды",-13.128785133361816],["▁помещения",-13.128785133361816],["▁اقوام",-13.128785133361816],["▁मोबाईल",-13.128785133361816],["▁তৈরি",-13.128785133361816],["▁যোগাযোগ",-13.128785133361816],["▁ấm",-13.128785133361816],["▁maksimum",-13.128786087036133],["▁akşam",-13.12878704071045],["▁lângă",-13.128787994384766],["▁komentāru",-13.128789901733398],["▁lördag",-13.128790855407717],["▁లేకుండా",-13.12879467010498],["▁പുറത്ത",-13.12879467010498],["▁పోస్ట్",-13.128796577453612],["▁τρεις",-13.12880039215088],["▁кеңеш",-13.12880039215088],["▁ಹೆಸರು",-13.128811836242676],["▁çevir",-13.12881851196289],["▁කොල්ල",-13.128824234008787],["▁izboljša",-13.128827095031738],["▁Друг",-13.128828048706056],["ahir",-13.128850936889648],["▁събира",-13.128859519958496],["▁የወ",-13.128859519958496],["▁dicen",-13.128890991210938],["▁mengajar",-13.128913879394531],["▁azokat",-13.12892723083496],["▁vyksta",-13.128955841064451],["加強",-13.12896728515625],["▁தமிழ",-13.129026412963867],["pisy",-13.129033088684082],["▁shpall",-13.129058837890623],["პრ",-13.129082679748535],["▁साउन",-13.129093170166016],["▁뜻",-13.129122734069824],["καν",-13.129138946533203],["▁пристап",-13.12914752960205],["▁жең",-13.129150390625],["ðan",-13.129158973693848],["引发",-13.129158973693848],["▁yoktur",-13.129164695739746],["kommun",-13.129179954528809],["▁európai",-13.129209518432615],["▁hundred",-13.129210472106934],["▁asiat",-13.129216194152832],["ути",-13.129239082336426],["ួល",-13.12924575805664],["ënë",-13.129258155822754],["▁කියන්නෙ",-13.129258155822754],["rose",-13.129261016845703],["уля",-13.129270553588867],["▁razı",-13.129289627075195],["קער",-13.129301071166992],["৮",-13.129302978515623],["ចម្",-13.1293363571167],["antu",-13.129358291625977],["▁põhjus",-13.12936782836914],["▁defa",-13.129368782043455],["穩",-13.129392623901367],["成員",-13.12941074371338],["CY",-13.12941551208496],["ေသ",-13.129419326782228],["▁печ",-13.129420280456545],["▁сваки",-13.129446983337402],["සම්",-13.129451751708984],["▁apsaugos",-13.1294527053833],["▁للع",-13.12945556640625],["▁igenom",-13.1294584274292],["注目",-13.129459381103516],["▁descobrir",-13.129460334777832],["者が",-13.12946605682373],["▁sorprend",-13.129475593566896],["▁університет",-13.129497528076172],["▁උනා",-13.129520416259766],["▁rene",-13.129524230957031],["யார்",-13.129542350769045],["ရုံး",-13.129545211791992],["▁Албан",-13.129563331604004],["的公司",-13.129573822021484],["байт",-13.12957763671875],["▁aprob",-13.129595756530762],["▁भाजप",-13.129596710205078],["اظ",-13.129603385925291],["ميز",-13.129611015319824],["אַמ",-13.129612922668455],["katzen",-13.12962245941162],["▁perdere",-13.129633903503418],["entitat",-13.12965202331543],["하신",-13.129685401916504],["ēšanu",-13.129705429077148],["▁wefan",-13.12970733642578],["شقا",-13.129714012145996],["ΗΣ",-13.129765510559082],["▁গু",-13.129772186279297],["νέ",-13.129782676696776],["ЧА",-13.129810333251951],["ragon",-13.129825592041016],["സേ",-13.12986660003662],["tepe",-13.12987232208252],["تری",-13.129948616027832],["▁항",-13.129974365234377],["Apple",-13.129976272583008],["▁радикал",-13.129984855651855],["▁persoana",-13.130008697509766],["赚",-13.130016326904297],["碗",-13.13002586364746],["▁Quant",-13.13003635406494],["▁економски",-13.130040168762209],["▁Lat",-13.13006591796875],["▁kebanyakan",-13.1300687789917],["▁తొలి",-13.1300687789917],["作り",-13.130069732666016],["▁అతని",-13.130070686340332],["▁છીએ",-13.130072593688965],["▁rëndësishme",-13.130075454711914],["▁Airlines",-13.13009262084961],["▁pistol",-13.130097389221191],["▁domenica",-13.130107879638672],["▁තමන්ගේ",-13.130111694335938],["▁Πώς",-13.130120277404783],["By",-13.130134582519531],["▁Hope",-13.130134582519531],["▁piyasa",-13.130135536193848],["▁kampuni",-13.130149841308594],["▁Petit",-13.130152702331545],["▁කොටස",-13.13018798828125],["▁gỗ",-13.13019847869873],["▁trgov",-13.13021755218506],["▁استقبال",-13.13021755218506],["▁başına",-13.130223274230955],["▁പറയുന്നത്",-13.130226135253906],["Zu",-13.130231857299805],["ጣል",-13.130247116088867],["▁tierra",-13.130252838134766],["▁tovaru",-13.130252838134766],["▁မွာ",-13.13030242919922],["▁Sumber",-13.130304336547852],["▁Dalka",-13.130316734313965],["▁hiljem",-13.130319595336914],["แผ่น",-13.130331039428713],["รร",-13.13033390045166],["▁evitare",-13.130348205566406],["▁अर्",-13.130359649658203],["▁tvár",-13.13036823272705],["▁fayda",-13.13037395477295],["▁ଓଡ଼ିଆ",-13.130379676818848],["▁خارجہ",-13.13038444519043],["▁Kepala",-13.130398750305176],["לס",-13.130406379699709],["Online",-13.13041877746582],["bruik",-13.13041877746582],["▁registrering",-13.130444526672363],["▁глу",-13.130456924438477],["▁sommige",-13.13046169281006],["طبع",-13.130473136901855],["dajte",-13.13049030303955],["▁دچار",-13.130491256713867],["ülü",-13.130508422851562],["wb",-13.130515098571776],["▁vanha",-13.13051700592041],["▁площад",-13.130537986755373],["▁Helt",-13.130544662475586],["▁lewer",-13.13058376312256],["ក្ខ",-13.130586624145508],["▁(*",-13.130586624145508],["▁cheann",-13.130596160888672],["▁wilaya",-13.130603790283203],["▁jeux",-13.130608558654783],["▁risulta",-13.130610466003418],["inud",-13.130630493164062],["▁قطاع",-13.130640029907228],["▁의해",-13.130644798278809],["บาท",-13.130648612976074],["▁шаары",-13.1306791305542],["▁involved",-13.130680084228516],["мок",-13.13070583343506],["▁inner",-13.130729675292969],["ಕೊ",-13.130738258361816],["▁Wasiirka",-13.130755424499512],["arsi",-13.130757331848145],["▁سوز",-13.130764961242676],["▁vzťah",-13.130769729614258],["▁заказ",-13.130770683288574],["ыў",-13.13079833984375],["▁biblio",-13.130803108215332],["owali",-13.130805015563965],["আপ",-13.130807876586914],["чулар",-13.130824089050291],["დელ",-13.13086986541748],["ΣΕ",-13.130885124206545],["▁ನಿರ್",-13.130887985229492],["▁мужчин",-13.130897521972656],["▁Ек",-13.130908012390137],["buran",-13.13090991973877],["▁ब्रह्म",-13.130910873413086],["Kİ",-13.130921363830566],["▁تربیت",-13.130928993225098],["ਿੰਦਰ",-13.13096809387207],["▁Fac",-13.131014823913574],["пос",-13.131023406982422],["▁Lern",-13.131036758422852],["riai",-13.131038665771484],["▁rząd",-13.131046295166016],["ที่มา",-13.131058692932127],["▁опер",-13.13107967376709],["▁ተስፋ",-13.131109237670898],["的感覺",-13.131117820739746],["▁отговорност",-13.13113784790039],["म्य",-13.13115406036377],["▁alone",-13.131159782409668],["usio",-13.13116455078125],["cule",-13.131183624267578],["▁المو",-13.131183624267578],["工人",-13.131184577941896],["▁sociali",-13.13119888305664],["▁koor",-13.13121509552002],["ឹ",-13.131218910217283],["▁stai",-13.131226539611816],["töltés",-13.131240844726562],["cycle",-13.131244659423828],["▁califica",-13.131254196166992],["至於",-13.131277084350586],["▁වික්",-13.13127899169922],["▁따",-13.1312894821167],["遵守",-13.131304740905762],["▁consult",-13.13131046295166],["▁mời",-13.13132095336914],["▁Matka",-13.13132667541504],["分類",-13.131335258483888],["кне",-13.13133716583252],["▁nossas",-13.131343841552734],["▁muutama",-13.131352424621582],["พันธุ์",-13.131353378295898],["ล่ะ",-13.131353378295898],["▁अच्छी",-13.131353378295898],["▁duomenų",-13.131357192993164],["assurance",-13.131362915039062],["▁Gminy",-13.13136386871338],["▁הייתי",-13.131390571594238],["▁دېگەن",-13.131399154663086],["izados",-13.131400108337402],["▁cámara",-13.131406784057615],["▁न्यूज़",-13.131413459777832],["ruth",-13.131433486938477],["▁sentiasa",-13.131433486938477],["uksiin",-13.131440162658691],["▁بودجه",-13.131446838378906],["▁mož",-13.131471633911133],["▁goût",-13.13148593902588],["▁позволява",-13.131509780883787],["ປາ",-13.131512641906738],["▁živo",-13.13151741027832],["▁קיבל",-13.131526947021484],["▁مكتب",-13.13153076171875],["▁தங்க",-13.131546974182127],["нням",-13.131553649902344],["yey",-13.131556510925291],["▁depozit",-13.131558418273926],["▁tyyli",-13.131573677062988],["кога",-13.13160228729248],["▁červen",-13.131611824035645],["▁Tinggi",-13.131632804870604],["▁pusti",-13.131637573242188],["ార",-13.131669044494627],["ողների",-13.13167953491211],["ដើម",-13.131689071655272],["rente",-13.131690979003906],["▁Níl",-13.131694793701172],["▁aquellos",-13.131698608398438],["買い",-13.131706237792969],["వారి",-13.131708145141602],["▁16.00",-13.13176155090332],["▁évben",-13.131800651550291],["гроз",-13.131803512573242],["hair",-13.131828308105469],["حمد",-13.13183307647705],["▁imprese",-13.13183879852295],["▁rô",-13.131858825683594],["▁Bee",-13.131861686706545],["häng",-13.13188648223877],["랜",-13.131897926330566],["▁Однак",-13.131912231445312],["ικαν",-13.131916046142578],["tande",-13.131916999816896],["▁Jeigu",-13.131924629211426],["友達",-13.131933212280272],["▁කොහොමද",-13.131935119628906],["▁гэ",-13.131938934326172],["▁Helen",-13.131953239440918],["αγωγή",-13.131976127624512],["onas",-13.13198947906494],["▁åben",-13.131990432739258],["▁recog",-13.132009506225586],["▁oplossing",-13.132019996643066],["▁साधन",-13.132020950317385],["▁CNN",-13.132047653198242],["▁1,000",-13.132084846496582],["▁kuvaa",-13.132085800170898],["▁IE",-13.132095336914062],["▁споразум",-13.13210678100586],["▁korištenje",-13.13211727142334],["stās",-13.13213062286377],["▁ugovor",-13.132142066955566],["▁пять",-13.132153511047363],["▁কর্ম",-13.13219165802002],["▁شعار",-13.132211685180664],["▁concerto",-13.132245063781738],["өж",-13.132277488708496],["Ай",-13.13230323791504],["▁ซึ่งเป็น",-13.132322311401367],["私人",-13.13234043121338],["▁Kv",-13.132342338562012],["▁notifica",-13.132359504699709],["పెట్ట",-13.132406234741213],["▁прете",-13.13244342803955],["▁garn",-13.132444381713867],["uck",-13.13247299194336],["arb",-13.132529258728027],["γλ",-13.132551193237305],["▁ທໍາ",-13.132579803466797],["▁państw",-13.13258457183838],["BET",-13.132587432861328],["悠",-13.132594108581545],["▁нашия",-13.13259506225586],["▁raun",-13.132601737976074],["鄭",-13.132609367370604],["somme",-13.132635116577148],["▁Persekutuan",-13.132640838623049],["▁официально",-13.13264274597168],["▁jedną",-13.132650375366213],["ружа",-13.132655143737791],["▁Gaeltachta",-13.132657051086426],["▁assistent",-13.13265895843506],["▁Distribu",-13.132660865783691],["▁situació",-13.132665634155272],["▁thở",-13.132684707641602],["▁հայտարար",-13.132688522338867],["▁erbyn",-13.132694244384766],["▁வை",-13.132698059082031],["hip",-13.132699012756348],["בער",-13.132702827453612],["▁Виктор",-13.13271427154541],["一本",-13.132715225219728],["ੇਟ",-13.13272190093994],["▁(2012)",-13.13272762298584],["▁fü",-13.132731437683104],["▁aspecte",-13.13273811340332],["ึง",-13.132747650146484],["▁создания",-13.13275146484375],["weithio",-13.13276195526123],["事務",-13.13277816772461],["▁produkcji",-13.13278102874756],["WC",-13.132798194885254],["ាស់",-13.132806777954102],["لىنىش",-13.132823944091797],["▁دليل",-13.132829666137695],["▁ഒറ്റ",-13.132837295532228],["hawa",-13.1328706741333],["▁прашања",-13.132880210876465],["สต",-13.132917404174805],["▁cauta",-13.132917404174805],["▁pude",-13.132928848266602],["деш",-13.132930755615234],["▁מאז",-13.13293170928955],["yarak",-13.132940292358398],["տում",-13.13296127319336],["▁팔",-13.132964134216309],["▁vlád",-13.13300323486328],["...]",-13.133004188537598],["▁lehtë",-13.133028984069824],["▁faqat",-13.133039474487305],["▁powodu",-13.13308048248291],["дерге",-13.133096694946287],["講座",-13.133108139038086],["LJ",-13.133119583129885],["язково",-13.133124351501465],["mäe",-13.133134841918944],["▁dátum",-13.13314437866211],["quest",-13.133150100708008],["iswa",-13.13315486907959],["▁ಎನ್",-13.133163452148438],["▁חברות",-13.133170127868652],["prog",-13.133193969726562],["▁masukkan",-13.133197784423828],["▁infect",-13.133198738098145],["▁가능한",-13.133217811584473],["สถาน",-13.133231163024902],["▁Lec",-13.133233070373535],["▁tái",-13.133243560791016],["吸收",-13.133281707763672],["▁Plant",-13.133296012878418],["之旅",-13.13331699371338],["▁diversaj",-13.133356094360352],["▁banner",-13.13338851928711],["jimas",-13.133397102355955],["▁erő",-13.133398056030272],["▁liikme",-13.13339900970459],["font",-13.133430480957031],["家長",-13.133442878723145],["▁Produktion",-13.133455276489258],["«.",-13.133458137512209],["itten",-13.133475303649902],["▁rezervate",-13.13348388671875],["▁rheol",-13.133495330810549],["是一家",-13.133498191833496],["▁تلویزیون",-13.133504867553713],["▁Wax",-13.13354778289795],["▁மத",-13.133566856384276],["▁Rez",-13.133569717407228],["▁ຊາດ",-13.133587837219238],["maš",-13.13359546661377],["▁nastav",-13.133596420288086],["▁máli",-13.133609771728516],["ീന",-13.133625030517578],["tezza",-13.13369846343994],["ပါဘူး။",-13.133743286132812],["想象",-13.133748054504396],["▁ដែរ",-13.13377857208252],["▁сне",-13.133785247802734],["בחר",-13.13378620147705],["▁odgovori",-13.133808135986328],["▁portar",-13.133808135986328],["▁welches",-13.133843421936035],["▁៧",-13.13387680053711],["ສໍາຄັນ",-13.13392734527588],["▁εικόνα",-13.133929252624512],["▁ശബരിമല",-13.133929252624512],["▁යටතේ",-13.133929252624512],["▁propo",-13.133930206298828],["▁δυνατότητα",-13.133930206298828],["▁संसार",-13.133950233459473],["▁εγκ",-13.133955001831056],["▁капітал",-13.133957862854004],["▁беларускага",-13.13398265838623],["▁gestão",-13.13398551940918],["▁Bangkok",-13.133991241455078],["▁имени",-13.133992195129396],["▁lex",-13.133993148803713],["▁bè",-13.134015083312988],["▁سوا",-13.134037971496582],["▁growth",-13.134039878845217],["一方面",-13.13404369354248],["▁Será",-13.13405704498291],["▁človeka",-13.134075164794922],["▁06.",-13.134078979492188],["▁teden",-13.134100914001465],["▁liberdade",-13.134108543395996],["▁advies",-13.134121894836426],["▁Saba",-13.134122848510742],["▁jaunie",-13.134160995483398],["▁merit",-13.134177207946776],["אהבה",-13.134200096130373],["▁hypo",-13.134206771850586],["Twitter",-13.13421630859375],["▁Нека",-13.134230613708496],["▁visible",-13.134242057800291],["▁കൊടുക്ക",-13.134244918823242],["chou",-13.134252548217772],["belt",-13.13425350189209],["liğin",-13.134258270263672],["▁пълно",-13.134262084960938],["欢",-13.134315490722656],["STAN",-13.13434886932373],["▁säkert",-13.134350776672363],["▁мое",-13.134357452392578],["дается",-13.134434700012209],["▁jesen",-13.134439468383787],["想法",-13.13445281982422],["ANTI",-13.134467124938965],["▁الشا",-13.134467124938965],["hic",-13.13447093963623],["拍摄",-13.13447380065918],["▁fəaliyyəti",-13.13449001312256],["▁alas",-13.134490966796877],["ከብ",-13.134496688842772],["иите",-13.13449764251709],["▁fersk",-13.13450527191162],["▁tempoh",-13.13452434539795],["▁Utara",-13.13454532623291],["▁kisi",-13.134559631347656],["▁abin",-13.134591102600098],["ँड",-13.134607315063477],["▁duniani",-13.134623527526855],["kkaus",-13.134636878967283],["▁Senhor",-13.1346435546875],["▁circum",-13.134645462036133],["▁къде",-13.134649276733398],["▁Ağ",-13.13465404510498],["▁Мур",-13.134688377380373],["▁skrá",-13.13469409942627],["お客様の",-13.134697914123535],["▁ចង់",-13.13470458984375],["▁ദി",-13.134726524353027],["ાળ",-13.134733200073242],["▁проч",-13.134733200073242],["▁Kid",-13.134739875793455],["▁alınması",-13.13474178314209],["▁hieronta",-13.134767532348633],["ṃ",-13.134780883789062],["▁نکن",-13.13478183746338],["▁ανε",-13.134818077087402],["▁جلو",-13.134825706481934],["เกษตร",-13.134827613830566],["štvo",-13.134843826293944],["▁ಅದ",-13.13485050201416],["▁evt",-13.134859085083008],["▁Helse",-13.134905815124512],["▁zob",-13.134913444519045],["▁ital",-13.134921073913574],["▁laufen",-13.134967803955078],["▁высоко",-13.134970664978027],["რთული",-13.13499927520752],["impa",-13.135001182556152],["▁cíl",-13.135005950927734],["krank",-13.135040283203123],["تقال",-13.13504409790039],["▁primum",-13.13506031036377],["▁аромат",-13.135066986083984],["▁bezpečnost",-13.135103225708008],["▁humanitar",-13.135114669799805],["صحة",-13.135119438171388],["領導",-13.135129928588867],["▁Espa",-13.135132789611816],["▁incl",-13.135132789611816],["jamos",-13.135174751281738],["экс",-13.135193824768066],["▁Titan",-13.135194778442385],["▁Cysylltu",-13.13521957397461],["▁сожалению",-13.13521957397461],["▁թույլ",-13.13521957397461],["▁اشتباه",-13.13521957397461],["▁मंसिर",-13.13521957397461],["▁হিসেবে",-13.13521957397461],["▁පැහැදිලි",-13.13521957397461],["▁indiferent",-13.135220527648926],["▁חשבון",-13.135220527648926],["▁பயன்படுத்த",-13.135220527648926],["▁laakiin",-13.135224342346191],["▁egoera",-13.13523292541504],["rət",-13.13524055480957],["▁zones",-13.135249137878418],["več",-13.135260581970217],["▁කරුණු",-13.135275840759276],["▁Beck",-13.135276794433594],["▁yeterli",-13.135276794433594],["▁akad",-13.135294914245604],["ধান",-13.135297775268556],["reichen",-13.135302543640137],["▁공유",-13.135306358337402],["▁ನ್ಯೂಸ್",-13.13530731201172],["റിന്റെ",-13.1353120803833],["▁جدی",-13.135348320007324],["ivel",-13.135351181030272],["anana",-13.135361671447754],["▁valsti",-13.135368347167969],["sjef",-13.13539981842041],["▁teigia",-13.13539981842041],["αίρ",-13.13540267944336],["▁vintage",-13.135409355163574],["▁Хе",-13.135419845581056],["▁تنفيذ",-13.13546657562256],["anggap",-13.135470390319824],["κτο",-13.13547706604004],["visão",-13.135478019714355],["▁захист",-13.135492324829102],["▁Təhsil",-13.13554859161377],["ရွင္း",-13.135558128356934],["tické",-13.135574340820312],["▁falsa",-13.135578155517578],["ляти",-13.135663032531738],["čnú",-13.135677337646484],["Tele",-13.1356840133667],["▁nasc",-13.135685920715332],["เธ",-13.13568878173828],["▁писател",-13.135714530944824],["iglia",-13.135750770568848],["▁nenhum",-13.135771751403809],["oče",-13.135777473449709],["diko",-13.135786056518556],["国家的",-13.135805130004885],["▁brugt",-13.135845184326172],["ьной",-13.13587760925293],["▁prezidenta",-13.135879516601562],["ਰਤ",-13.135882377624512],["▁एल",-13.135894775390623],["一批",-13.135894775390623],["▁Resultat",-13.135920524597168],["▁biografi",-13.135942459106444],["rías",-13.135947227478027],["▁Bili",-13.13595485687256],["▁годината",-13.135991096496582],["面向",-13.136024475097656],["لاب",-13.136080741882324],["றிய",-13.136089324951172],["▁ئەل",-13.136098861694336],["▁indem",-13.136099815368652],["▁діє",-13.13613224029541],["மணி",-13.136136054992676],["▁reply",-13.136143684387209],["▁Fik",-13.13614559173584],["报名",-13.136187553405762],["▁לוח",-13.136190414428713],["ಂಟ",-13.13619613647461],["行き",-13.13620376586914],["▁nabo",-13.13620948791504],["енка",-13.136212348937988],["sasa",-13.136219024658203],["和其他",-13.136229515075684],["droom",-13.136231422424316],["ლობის",-13.136258125305176],["▁Христа",-13.136258125305176],["則是",-13.136275291442873],["▁leda",-13.136280059814451],["▁מוס",-13.136322975158691],["▁goob",-13.136358261108398],["▁दुख",-13.136370658874512],["▁надеж",-13.136372566223145],["▁niemal",-13.136383056640623],["▁diya",-13.1364107131958],["诊",-13.136454582214355],["▁хав",-13.136466026306152],["▁voluntat",-13.136475563049316],["களே",-13.136483192443848],["ाइड",-13.136487007141112],["リンク",-13.13650608062744],["〉",-13.136509895324709],["▁vergangenen",-13.13651180267334],["▁ଗିରଫ",-13.13651180267334],["▁учасників",-13.136513710021973],["▁สมัครสมาชิก",-13.136514663696287],["▁ነገሮች",-13.136516571044922],["▁జరిగింది",-13.136517524719238],["▁compliment",-13.136518478393556],["廠商",-13.136520385742188],["růst",-13.136528968811035],["▁Xosé",-13.136531829833984],["доле",-13.136544227600098],["ຂັ້ນ",-13.136558532714844],["sulta",-13.136569023132324],["▁felicit",-13.13656997680664],["ūnas",-13.136578559875488],["კოს",-13.136578559875488],["▁কর",-13.136578559875488],["▁Lep",-13.136595726013184],["▁Hør",-13.136597633361816],["▁spremlja",-13.13660717010498],["▁menolak",-13.136632919311523],["觀眾",-13.136636734008787],["▁Αρχική",-13.136666297912598],["द्वारा",-13.136672973632812],["▁قوش",-13.136673927307127],["ဘယ်",-13.136698722839355],["uvad",-13.13676929473877],["▁završi",-13.136770248413086],["▁Fő",-13.136773109436035],["▁dobili",-13.136783599853516],["ಧಾರ",-13.136804580688477],["▁آگے",-13.13680648803711],["▁trúc",-13.13680934906006],["僕",-13.136829376220703],["גת",-13.13683032989502],["ΕΠ",-13.136836051940918],["folge",-13.136865615844728],["▁staf",-13.136865615844728],["ေနာက္",-13.136870384216309],["▁וע",-13.13687515258789],["▁ਈ",-13.136886596679688],["진다",-13.136927604675291],["▁sõit",-13.136940956115724],["▁číta",-13.13694667816162],["▁અનુ",-13.136951446533203],["ኝነት",-13.136971473693848],["jcie",-13.136974334716797],["▁pleno",-13.136975288391112],["▁водата",-13.136978149414062],["vania",-13.137001991271973],["fuata",-13.137019157409668],["▁айт",-13.137022972106934],["മന്",-13.137036323547363],["▁peal",-13.137039184570312],["▁вери",-13.137072563171388],["suoja",-13.137084007263184],["▁хришћан",-13.137084007263184],["▁Skor",-13.137093544006348],["ರ್ಡ್",-13.137109756469728],["ੱਸ",-13.137114524841309],["చర్",-13.13713550567627],["يۇ",-13.137151718139648],["▁Αλλά",-13.137168884277344],["ូល",-13.137174606323242],["▁svobod",-13.137174606323242],["▁Pazar",-13.13719367980957],["AGE",-13.137205123901367],["▁نړیوال",-13.137231826782228],["инки",-13.137232780456545],["▁nurody",-13.13724136352539],["حات",-13.137247085571287],["▁created",-13.137261390686035],["▁שאלות",-13.137274742126465],["▁हरेक",-13.13727569580078],["▁enkrat",-13.137276649475098],["▁alternat",-13.137303352355955],["▁بطور",-13.137325286865234],["γιο",-13.137328147888184],["ولت",-13.137377738952637],["▁جلوگیر",-13.13737964630127],["▁dwu",-13.13739776611328],["හැ",-13.137398719787598],["▁θυμ",-13.137444496154783],["▁മാറ്റി",-13.137450218200684],["▁representantes",-13.137459754943848],["ளம்",-13.137486457824709],["▁tocar",-13.137493133544922],["теле",-13.137503623962402],["▁하면",-13.13752269744873],["▁besed",-13.137524604797363],["▁navrh",-13.137555122375488],["暑",-13.137572288513184],["▁продукта",-13.137587547302246],["▁keta",-13.13760471343994],["▁Innova",-13.137609481811523],["▁observar",-13.137677192687988],["eihin",-13.137690544128418],["▁viitor",-13.137694358825684],["▁Աս",-13.137709617614746],["▁крају",-13.137714385986328],["વાનો",-13.137722969055176],["▁factura",-13.137722969055176],["▁Условия",-13.137723922729492],["edik",-13.137725830078123],["▁డ",-13.137727737426758],["faglig",-13.13773250579834],["κή",-13.137737274169922],["۱۳",-13.137738227844238],["spēj",-13.137742042541504],["▁stream",-13.137746810913086],["-300",-13.137770652770996],["ລວມ",-13.13780403137207],["▁spännande",-13.137804985046388],["▁ٹیکنالوجی",-13.137804985046388],["▁ڏانهن",-13.137804985046388],["▁తమిళ",-13.137804985046388],["▁Сабақтың",-13.137812614440918],["zida",-13.137816429138184],["เน้น",-13.13783359527588],["▁keur",-13.137839317321776],["▁उद्",-13.137845993041992],["tags",-13.137853622436523],["▁Велики",-13.137856483459473],["▁zorunda",-13.137871742248535],["▁स्वीकार",-13.137872695922852],["▁ఎంతో",-13.1378755569458],["▁deputado",-13.137883186340332],["draž",-13.13788890838623],["▁rigor",-13.137907028198242],["▁પુ",-13.137928009033203],["педи",-13.137971878051758],["møte",-13.137984275817873],["ାନ୍ତ",-13.138002395629885],["▁глуп",-13.1380033493042],["▁абал",-13.138017654418944],["BAT",-13.13803005218506],["atia",-13.13803005218506],["▁મી",-13.138038635253906],["ടിയ",-13.13805103302002],["vur",-13.138053894042969],["▁samkvæmt",-13.13807201385498],["▁praksis",-13.138075828552246],["▁užíva",-13.13808250427246],["namen",-13.13808822631836],["TEK",-13.138089179992676],["capaci",-13.13809871673584],["▁Boc",-13.138107299804688],["ților",-13.138117790222168],["▁Marcel",-13.138148307800291],["▁өтө",-13.138179779052734],["๑",-13.13818645477295],["gleda",-13.13819408416748],["▁Iga",-13.13819408416748],["▁roa",-13.138203620910645],["▁बजार",-13.138212203979492],["▁Parque",-13.138248443603516],["▁najveći",-13.138261795043944],["▁yeh",-13.13827419281006],["▁әрекет",-13.138282775878906],["▁પોલીસ",-13.138283729553224],["ÜN",-13.138286590576172],["▁dveh",-13.13828945159912],["她们",-13.13829517364502],["boa",-13.138301849365234],["két",-13.138317108154297],["ያስ",-13.138319969177246],["▁فيلم",-13.138364791870115],["▁Эх",-13.13837718963623],["יחס",-13.138384819030762],["▁материалы",-13.138404846191406],["▁feminin",-13.13843822479248],["看到了",-13.138446807861328],["▁trimis",-13.138453483581545],["læs",-13.138521194458008],["▁platforma",-13.138522148132324],["▁wujud",-13.13852596282959],["ਿਊ",-13.138538360595703],["aysay",-13.13853931427002],["▁lubi",-13.13853931427002],["年轻人",-13.138544082641602],["▁fabricant",-13.1385498046875],["▁୮",-13.138567924499512],["յո",-13.13858413696289],["орган",-13.138595581054688],["▁премиер",-13.138635635375977],["▁मुक्त",-13.138667106628418],["▁Juga",-13.138704299926758],["кът",-13.13872528076172],["хол",-13.138751029968262],["▁alanı",-13.138758659362791],["▁ರೂಪ",-13.13877296447754],["▁padi",-13.138785362243652],["▁skool",-13.138785362243652],["ວິ",-13.138797760009766],["lesi",-13.138798713684082],["შენ",-13.138802528381348],["▁هسته",-13.138805389404297],["▁kuya",-13.138808250427246],["▁tábor",-13.138840675354004],["▁තරුණ",-13.138859748840332],["mbr",-13.13893222808838],["бры",-13.138951301574709],["ëse",-13.138970375061035],["=3",-13.138980865478516],["▁bergan",-13.139007568359377],["mysle",-13.139010429382324],["naire",-13.139012336730955],["▁savivaldybės",-13.13902187347412],["▁materiaal",-13.139034271240234],["疼",-13.139037132263184],["éir",-13.139043807983398],["gulo",-13.139060974121094],["igazgató",-13.139066696166992],["ppä",-13.139068603515623],["欣",-13.139074325561523],["▁søk",-13.139081954956056],["стік",-13.139092445373535],["साइ",-13.1390962600708],["٩",-13.139097213745115],["仕事を",-13.139098167419434],["หลากหลาย",-13.13909912109375],["▁կառուց",-13.13909912109375],["Ճ",-13.139101028442385],["▁Gobolka",-13.139101028442385],["▁Klaipėdos",-13.139101028442385],["▁երկրորդ",-13.139101028442385],["rák",-13.1391019821167],["tamento",-13.1391019821167],["▁bitcoin",-13.139104843139648],["▁ಸೇವೆ",-13.13910675048828],["▁1100",-13.139110565185549],["zīmē",-13.13911247253418],["צב",-13.13911247253418],["▁министер",-13.139140129089355],["▁intampla",-13.13914394378662],["нап",-13.139179229736328],["▁günlük",-13.139198303222656],["თქვა",-13.139209747314451],["abril",-13.139220237731934],["sib",-13.139230728149414],["▁maintain",-13.13924789428711],["ljive",-13.139264106750488],["▁innovation",-13.139265060424805],["▁приятно",-13.139274597167969],["▁అయ్య",-13.139328002929688],["▁direitos",-13.13934326171875],["▁valoare",-13.139351844787598],["▁gerakan",-13.13935375213623],["वीं",-13.139361381530762],["▁αυτήν",-13.139362335205078],["Pen",-13.139373779296877],["▁Theater",-13.13938808441162],["▁жалға",-13.13939380645752],["▁Andrej",-13.139413833618164],["▁Shqipëria",-13.139421463012695],["▁159",-13.13943099975586],["专业的",-13.139431953430176],["nenie",-13.139455795288086],["▁органы",-13.139479637145996],["▁Coin",-13.139495849609377],["えない",-13.139497756958008],["▁trval",-13.139524459838867],["▁устройства",-13.13954257965088],["▁خش",-13.139552116394045],["伤害",-13.1395845413208],["USB",-13.139589309692385],["▁mogla",-13.139589309692385],["enos",-13.139595985412598],["コース",-13.139596939086914],["▁skla",-13.139606475830078],["▁extraordinar",-13.13965129852295],["▁tarka",-13.139655113220217],["▁нақты",-13.139660835266112],["▁etj",-13.13967227935791],["ဆာ",-13.13968563079834],["пора",-13.139704704284668],["▁Barack",-13.139708518981934],["▁الطب",-13.139766693115234],["▁이에",-13.139772415161133],["▁actie",-13.139788627624512],["jec",-13.139809608459473],["▁Prag",-13.139845848083496],["▁melko",-13.139847755432127],["९",-13.139897346496582],["▁макар",-13.139915466308594],["▁unum",-13.139942169189451],["่ํา",-13.139999389648438],["▁मं",-13.140008926391602],["▁(29",-13.140031814575195],["scul",-13.140034675598145],["raden",-13.140050888061523],["▁síðu",-13.140053749084473],["▁Кирил",-13.140066146850586],["ରରେ",-13.14007568359375],["▁invata",-13.140101432800291],["деление",-13.140108108520508],["ambiente",-13.14011573791504],["▁अरु",-13.14013671875],["▁Гри",-13.140141487121582],["kasin",-13.14014720916748],["▁jangka",-13.140151977539062],["▁فری",-13.140153884887695],["ਗੋ",-13.140176773071287],["suse",-13.1401948928833],["ግል",-13.140206336975098],["อร",-13.140254974365234],["▁થવા",-13.140265464782717],["ņēmu",-13.140266418457031],["1]",-13.140291213989258],["യാള",-13.140300750732422],["امر",-13.14030933380127],["FX",-13.140310287475586],["▁құжаттар",-13.140314102172852],["隐",-13.140324592590332],["戀",-13.14032745361328],["בוא",-13.14037799835205],["扭",-13.140381813049316],["ေက်ာင္း",-13.140387535095217],["σία",-13.140390396118164],["फि",-13.140395164489746],["แบรนด์",-13.140396118164062],["▁esfuerzo",-13.140398025512695],["▁helikopter",-13.140398025512695],["▁Bucureşti",-13.140399932861328],["▁Både",-13.140400886535645],["▁Prokuror",-13.140402793884276],["vezetés",-13.140403747558594],["องค์กร",-13.140403747558594],["▁ТЕ",-13.14040470123291],["▁باندې",-13.140408515930176],["▁పొందండి",-13.140413284301758],["ffy",-13.140416145324709],["ზუ",-13.140420913696287],["elde",-13.14043426513672],["▁기존",-13.14043426513672],["▁gives",-13.14044952392578],["ոշ",-13.14047622680664],["...!!!",-13.140501022338867],["аза",-13.14051628112793],["▁Austri",-13.140545845031738],["▁bohat",-13.140555381774902],["▁ఎక్కడ",-13.14055633544922],["antza",-13.140568733215332],["記錄",-13.14057445526123],["▁prec",-13.140583992004396],["нную",-13.140584945678713],["▁Erd",-13.140612602233888],["告知",-13.140623092651367],["▁állat",-13.140637397766112],["▁pobyt",-13.140661239624023],["▁аутор",-13.140674591064451],["▁કમ",-13.140677452087402],["ָה",-13.140679359436035],["▁öll",-13.14069366455078],["▁пости",-13.140695571899414],["▁Τσ",-13.140697479248049],["ေတြ႕",-13.140704154968262],["მად",-13.140708923339844],["▁◆",-13.140729904174805],["力度",-13.14073085784912],["ddin",-13.140748023986816],["2°",-13.14076328277588],["pump",-13.140792846679688],["▁визначен",-13.140796661376951],["with",-13.140830039978027],["▁презента",-13.14083480834961],["▁탈",-13.14083766937256],["정책",-13.14084243774414],["▁Ideen",-13.140869140625],["kow",-13.140871047973633],["OUR",-13.140878677368164],["▁япон",-13.140892028808594],["verunt",-13.1409330368042],["▁xanım",-13.1409330368042],["▁Grill",-13.140941619873049],["үүдийн",-13.140947341918944],["щем",-13.140966415405272],["▁honlap",-13.14098072052002],["▁importe",-13.140981674194336],["ਸੂ",-13.140983581542969],["▁pályá",-13.141003608703612],["နေရာ",-13.141009330749512],["▁Kontra",-13.141051292419434],["▁КП",-13.141056060791016],["pane",-13.141073226928713],["தீ",-13.14107608795166],["▁बुझ",-13.14107608795166],["▁తీస",-13.14108180999756],["dakah",-13.141083717346191],["람",-13.141094207763672],["▁tanács",-13.141098022460938],["▁suốt",-13.141136169433594],["预期",-13.141148567199709],["▁ujar",-13.141154289245604],["▁skam",-13.14116668701172],["▁científica",-13.141189575195312],["▁ସେହି",-13.141213417053224],["▁Fundación",-13.141234397888184],["kale",-13.141261100769045],["▁آنجا",-13.141273498535156],["▁Mauri",-13.141277313232422],["век",-13.141279220581056],["▁бумаг",-13.141304969787598],["▁aarde",-13.141314506530762],["ում՝",-13.141324043273926],["onde",-13.141331672668455],["▁ulang",-13.141342163085938],["▁Wam",-13.141348838806152],["zama",-13.141357421875],["departementet",-13.141366004943848],["tycznych",-13.141411781311035],["▁سیم",-13.141443252563477],["لول",-13.141449928283691],["▁148",-13.14146614074707],["▁Divi",-13.14146614074707],["▁каталог",-13.14146900177002],["▁siti",-13.141472816467283],["▁Gio",-13.141477584838867],["파이",-13.14148235321045],["▁oktatás",-13.141483306884766],["▁월",-13.14149570465088],["▁ventil",-13.14151668548584],["bygd",-13.14153003692627],["▁dovrebbe",-13.141549110412598],["▁Lao",-13.14157009124756],["▁Род",-13.141586303710938],["小说",-13.141650199890137],["▁понимаю",-13.1416597366333],["例えば",-13.14167594909668],["▁miała",-13.14168643951416],["dino",-13.141693115234377],["▁đợi",-13.141695976257324],["▁зрабіць",-13.141695976257324],["▁მოქალაქე",-13.141695976257324],["▁Trata",-13.141701698303224],["▁bowiem",-13.141701698303224],["▁rendezvény",-13.141706466674805],["출장",-13.141716957092283],["▁स्प",-13.141724586486816],["▁odnosi",-13.141731262207031],["ügyi",-13.141738891601562],["▁questão",-13.141745567321776],["▁ماحول",-13.141756057739258],["律师",-13.141761779785156],["▁Thư",-13.141765594482422],["▁اكثر",-13.141796112060549],["▁نحوه",-13.141809463500977],["▁fó",-13.141813278198242],["gora",-13.14182186126709],["ပတ်",-13.14182186126709],["▁medlemmar",-13.141846656799316],["▁taruhan",-13.141855239868164],["▁табыс",-13.141864776611328],["▁хүчин",-13.141883850097656],["zep",-13.14193058013916],["▁materials",-13.14194393157959],["▁podlagi",-13.141949653625488],["▁पेट",-13.141963005065918],["▁decis",-13.141974449157717],["dhé",-13.141983032226562],["▁Aw",-13.142011642456056],["▁අයි",-13.142045974731444],["مدينة",-13.142051696777344],["010",-13.14205837249756],["oke",-13.142064094543455],["▁plug",-13.142064094543455],["gelse",-13.14210605621338],["▁sheeg",-13.14212131500244],["▁मानिस",-13.142145156860352],["дво",-13.142154693603516],["สํานักงาน",-13.14216136932373],["ொரு",-13.142167091369627],["▁Basis",-13.142207145690918],["ング",-13.14222240447998],["▁spera",-13.14222812652588],["▁{{",-13.142239570617676],["ИП",-13.14224624633789],["▁Använd",-13.142254829406738],["▁kinne",-13.142258644104004],["ويد",-13.142301559448242],["รีบ",-13.142327308654783],["放心",-13.142337799072266],["▁Zona",-13.142342567443848],["عق",-13.142372131347656],["спи",-13.142373085021973],["нду",-13.142377853393556],["▁Хам",-13.142410278320312],["វាយ",-13.142417907714844],["flor",-13.142450332641602],["▁искам",-13.142457962036133],["гун",-13.142497062683104],["▁պաշտպանության",-13.14251708984375],["äre",-13.142537117004396],["ЈЕ",-13.142547607421877],["▁newid",-13.142553329467772],["ຄືນ",-13.14259147644043],["▁включи",-13.14260959625244],["වයි",-13.142613410949709],["ZZ",-13.142623901367188],["▁marin",-13.142704010009766],["▁naba",-13.142704010009766],["شين",-13.142717361450195],["lania",-13.142724990844728],["▁miser",-13.142725944519045],["غۇچى",-13.142744064331056],["▁Veranstaltung",-13.142744064331056],["▁Cala",-13.142760276794434],["▁vägen",-13.142812728881836],["▁tartó",-13.142821311950684],["ජීව",-13.142833709716797],["▁lokalit",-13.142861366271973],["ciją",-13.142866134643556],["udin",-13.14286994934082],["歌手",-13.142876625061035],["Го",-13.142882347106934],["▁conseils",-13.1428861618042],["▁खेलमा",-13.14292049407959],["订",-13.142922401428224],["▁Palestin",-13.142924308776855],["▁salat",-13.142951965332031],["▁Kristen",-13.142959594726562],["▁මෙහි",-13.142963409423828],["▁မီး",-13.14297580718994],["欄",-13.142977714538574],["▁હે",-13.142979621887209],["尺寸",-13.142983436584473],["▁σημαίνει",-13.142996788024902],["外交",-13.142998695373535],["顆",-13.142998695373535],["▁लोकप्रिय",-13.142999649047852],["ことがあります",-13.142999649047852],["▁Välkommen",-13.143000602722168],["▁बच्चा",-13.143014907836914],["类型",-13.14301586151123],["▁sveikatos",-13.143017768859863],["かかる",-13.143019676208496],["▁እንደተ",-13.143020629882812],["▁accidente",-13.143031120300291],["▁influencia",-13.14303207397461],["▁Newyddion",-13.143033981323242],["▁индекс",-13.143037796020508],["▁waarmee",-13.14303970336914],["ଚା",-13.143044471740724],["▁hardware",-13.14305019378662],["▁ಏನು",-13.14305305480957],["父親",-13.143054962158203],["▁жели",-13.143060684204102],["创建",-13.143067359924316],["▁ሲል",-13.143073081970217],["▁Ага",-13.14309310913086],["▁siri",-13.14311695098877],["▁المؤ",-13.143136024475098],["▁გახდა",-13.143136978149414],["▁Cover",-13.143138885498049],["ително",-13.143168449401855],["▁eredmény",-13.14317226409912],["▁jorden",-13.143196105957031],["NÉ",-13.143216133117676],["tekst",-13.143244743347168],["▁forsknings",-13.14325714111328],["▁автомобиль",-13.143272399902344],["нская",-13.143291473388672],["amentos",-13.14329433441162],["▁153",-13.143338203430176],["▁aydın",-13.143362045288086],["含有",-13.1433687210083],["▁Μο",-13.143403053283691],["▁սեր",-13.143409729003906],["▁Чар",-13.143410682678224],["▁철",-13.143410682678224],["▁leit",-13.143415451049805],["冠军",-13.14342212677002],["▁војни",-13.143433570861816],["▁ИН",-13.143500328063965],["▁Chinese",-13.14350414276123],["▁sedia",-13.143511772155762],["▁sonda",-13.143536567687988],["стати",-13.143537521362305],["ਮੇਲ",-13.143540382385254],["▁benytte",-13.143577575683594],["maları",-13.143585205078123],["▁ارت",-13.143589973449709],["àmbit",-13.143606185913086],["skapet",-13.143611907958984],["▁ühis",-13.143671989440918],["نیا",-13.14367389678955],["بق",-13.143674850463867],["▁მონაწილე",-13.143680572509766],["▁додатков",-13.143681526184082],["▁adını",-13.143691062927246],["▁bawat",-13.14371109008789],["▁slechts",-13.14372730255127],["elve",-13.143746376037598],["tuo",-13.143779754638672],["өп",-13.143790245056152],["▁soát",-13.143790245056152],["BK",-13.143797874450684],["▁henni",-13.14381217956543],["schied",-13.143839836120604],["تيا",-13.143844604492188],["rétti",-13.143860816955566],["▁simili",-13.14386749267578],["▁إيران",-13.143878936767578],["▁বাস",-13.143902778625488],["▁sprav",-13.143908500671388],["ãeste",-13.143918991088867],["preč",-13.1439208984375],["datud",-13.143945693969728],["▁ruu",-13.143980026245115],["初期",-13.144004821777344],["ರುವ",-13.144052505493164],["▁случају",-13.144062042236328],["▁ёс",-13.144079208374023],["▁גבוה",-13.144149780273438],["ତମ",-13.144157409667969],["▁fjor",-13.144161224365234],["▁illər",-13.144172668457031],["▁להג",-13.144176483154297],["▁Vezi",-13.144179344177246],["▁berdi",-13.144186973571776],["tzu",-13.144197463989258],["льних",-13.144203186035156],["梯",-13.144232749938965],["buz",-13.144243240356444],["▁မင္း",-13.144244194030762],["hallen",-13.144248008728027],["tajn",-13.14426326751709],["ぼ",-13.14426326751709],["mr",-13.144264221191406],["鍵",-13.144272804260254],["eachta",-13.144274711608888],["ホームページ",-13.144287109375],["▁Pàgina",-13.144298553466797],["▁împotriva",-13.144298553466797],["▁କେଉଁ",-13.144298553466797],["▁ಮುಖ್ಯಮಂತ್ರಿ",-13.144298553466797],["▁үүрэг",-13.144299507141112],["▁липня",-13.14430046081543],["▁پالیسی",-13.144302368164062],["▁klasy",-13.144309997558594],["▁gangguan",-13.144325256347656],["▁ushqim",-13.144336700439451],["kväll",-13.144341468811035],["▁dokumen",-13.144350051879885],["▁ՀՀԿ",-13.144360542297363],["▁Nutzer",-13.14439582824707],["年齢",-13.144404411315918],["▁Пат",-13.144417762756348],["▁ramp",-13.14441967010498],["▁παιδί",-13.144434928894045],["▁تجهیزات",-13.14444065093994],["્રી",-13.144464492797852],["▁일이",-13.144465446472168],["▁मनो",-13.144486427307127],["▁भव",-13.144492149353027],["▁systems",-13.144527435302734],["▁lägenhet",-13.144533157348633],["ጠበቅ",-13.144539833068848],["طال",-13.144559860229492],["▁Одоо",-13.144567489624023],["▁koto",-13.144577980041504],["▁zákazník",-13.144577980041504],["אש",-13.144613265991213],["ថ្",-13.144634246826172],["မု",-13.144647598266602],["▁besked",-13.14466381072998],["还能",-13.14466953277588],["320",-13.144683837890623],["▁प्रवास",-13.14469051361084],["europe",-13.144692420959473],["່ອງ",-13.144716262817385],["▁غل",-13.14472198486328],["老公",-13.144742965698242],["լին",-13.14475917816162],["▁общего",-13.14476490020752],["behandling",-13.144782066345217],["အခ်ိန္",-13.144798278808594],["aysan",-13.144808769226074],["▁cir",-13.144850730895996],["▁екипа",-13.144862174987791],["▁hanes",-13.144875526428224],["1993",-13.144882202148438],["▁چپ",-13.144882202148438],["됐",-13.144889831542969],["▁föl",-13.144906044006348],["▁celého",-13.144911766052246],["1991",-13.144941329956056],["▁ကျ",-13.144949913024902],["▁Daug",-13.144954681396484],["▁maszyn",-13.144963264465332],["▁δυσ",-13.144965171813965],["avez",-13.144967079162598],["▁Once",-13.145009994506836],["ാണി",-13.145025253295898],["▁מיי",-13.145027160644531],["ვები",-13.145033836364746],["Ez",-13.145034790039062],["kanlah",-13.14505672454834],["bilitat",-13.145057678222656],["▁отбор",-13.145057678222656],["ស៊",-13.145068168640137],["絕對",-13.145085334777832],["ீர்",-13.145086288452148],["▁Pasal",-13.145102500915527],["▁perto",-13.145112991333008],["▁Alpha",-13.145119667053224],["▁Радо",-13.14512825012207],["gato",-13.14516544342041],["étique",-13.145167350769045],["大学生",-13.14517879486084],["▁sistemos",-13.145180702209473],["▁kertas",-13.145196914672852],["bent",-13.14520263671875],["بوت",-13.145234107971191],["▁мисле",-13.145236015319824],["ਹੁ",-13.145244598388672],["▁svojega",-13.145256996154783],["τας",-13.145261764526367],["sug",-13.145270347595217],["اية",-13.145302772521973],["▁பிறகு",-13.145303726196287],["fjord",-13.145309448242188],["ଆଇ",-13.145336151123049],["▁barnen",-13.145356178283691],["▁Бен",-13.145371437072754],["dret",-13.145421981811523],["力的",-13.145442962646484],["блок",-13.14545726776123],["љива",-13.145487785339355],["▁Welche",-13.145503997802734],["▁mevcut",-13.145523071289062],["gesloten",-13.145524978637695],["aab",-13.14552879333496],["公众",-13.14556121826172],["衝",-13.145564079284668],["腕",-13.1455659866333],["ਡੇ",-13.145586967468262],["ませんでした",-13.145586967468262],["に対する",-13.145588874816896],["▁təhlükə",-13.145600318908691],["▁këtyre",-13.145602226257324],["▁marrëveshje",-13.145602226257324],["▁асабліва",-13.145602226257324],["▁асуудлыг",-13.145602226257324],["▁гроші",-13.145602226257324],["▁зыходнік",-13.145602226257324],["▁тэрбум",-13.14560317993164],["▁huyết",-13.145607948303224],["▁nostalgi",-13.14560890197754],["▁हासिल",-13.14561367034912],["▁نمایشگاه",-13.145625114440918],["▁background",-13.145627975463867],["▁حیات",-13.145636558532717],["matka",-13.145662307739258],["▁tibbi",-13.145666122436523],["▁സമൂഹ",-13.145668029785156],["ايات",-13.145695686340332],["▁201",-13.14570426940918],["cked",-13.145708084106444],["▁εργασίας",-13.145722389221191],["▁معیار",-13.145724296569824],["▁vekt",-13.145766258239746],["▁tāda",-13.145769119262695],["estiu",-13.145793914794922],["設立",-13.145794868469238],["ujúci",-13.145798683166504],["వె",-13.14579963684082],["设立",-13.1458158493042],["עני",-13.14582061767578],["ንን",-13.145830154418944],["oxy",-13.145832061767578],["পত্র",-13.145859718322754],["▁chambre",-13.145889282226562],["▁ہا",-13.145896911621094],["▁incluye",-13.14590835571289],["▁Scar",-13.1459321975708],["▁malý",-13.145936965942385],["▁azonnal",-13.145959854125977],["нню",-13.145967483520508],["▁naturali",-13.145967483520508],["▁Phát",-13.14601230621338],["▁participació",-13.146031379699709],["පත",-13.146076202392578],["εων",-13.146085739135742],["▁Wonder",-13.146088600158691],["▁עצמי",-13.14609718322754],["▁Projekte",-13.146100044250488],["▁چاہیے",-13.146124839782717],["▁comunidades",-13.14614200592041],["▁இன்",-13.146151542663574],["▁کیسے",-13.146187782287598],["▁الكويت",-13.14621925354004],["гоор",-13.146225929260254],["▁spletnih",-13.146232604980469],["▁masiv",-13.146244049072266],["uté",-13.146255493164062],["▁követően",-13.146273612976074],["ाशी",-13.146275520324709],["▁utiliser",-13.14628791809082],["但他",-13.146294593811035],["▁divulga",-13.146309852600098],["▁kompiuter",-13.146312713623049],["lusta",-13.146319389343262],["▁निजी",-13.146319389343262],["▁현장",-13.146331787109377],["KIA",-13.146334648132324],["▁vadina",-13.146342277526855],["είου",-13.146352767944336],["▁पौ",-13.146360397338867],["▁Reino",-13.146363258361816],["пише",-13.146376609802246],["ঙ",-13.146432876586914],["মী",-13.146442413330078],["▁Deput",-13.14645004272461],["KOM",-13.146454811096191],["▁уст",-13.146475791931152],["▁sinal",-13.146478652954102],["melt",-13.146479606628418],["▁recette",-13.146489143371582],["那就",-13.146492004394531],["▁ойло",-13.14649486541748],["▁snap",-13.146501541137695],["ဆိုင်ရာ",-13.146541595458984],["ooyin",-13.146562576293944],["tagen",-13.146573066711426],["zené",-13.146586418151855],["▁skuteczn",-13.146586418151855],["▁çağır",-13.146586418151855],["▁javne",-13.146599769592283],["▁պատմ",-13.146615028381348],["ošo",-13.1466646194458],["কম",-13.146697998046877],["نامہ",-13.146703720092772],["碰",-13.146717071533203],["▁pila",-13.146740913391112],["▁taký",-13.146746635437012],["්නේ",-13.146757125854492],["kön",-13.146771430969238],["▁spad",-13.146775245666504],["itāti",-13.146783828735352],["▁יצא",-13.1467924118042],["▁Agoda",-13.146805763244627],["▁नर",-13.146832466125488],["▁nasz",-13.14684009552002],["豬",-13.146848678588867],["했는데",-13.1468505859375],["▁Chance",-13.146851539611816],["▁Druck",-13.146876335144045],["겠습니다",-13.146878242492676],["ダイエット",-13.146892547607422],["▁extern",-13.14689826965332],["寶寶",-13.14689826965332],["칼",-13.146905899047852],["▁Verarbeitung",-13.146907806396484],["▁współpracy",-13.146907806396484],["▁Спасибо",-13.146907806396484],["▁ଶିଶୁ",-13.146907806396484],["▁შესაძლოა",-13.146907806396484],["▁Soome",-13.146912574768066],["▁בגלל",-13.146913528442385],["▁ପରିବାର",-13.146915435791016],["▁შეგიძლიათ",-13.146918296813965],["▁countries",-13.146923065185549],["▁mtoto",-13.146924018859863],["▁Rita",-13.146926879882812],["verð",-13.146933555603027],["▁دوربین",-13.146933555603027],["mentul",-13.146934509277344],["ਸ਼ੀ",-13.146937370300291],["▁rôle",-13.146957397460938],["▁Vergleich",-13.146978378295898],["▁əsasında",-13.14698314666748],["▁Jie",-13.146984100341797],["viera",-13.146991729736328],["ენა",-13.147006034851074],["▁புது",-13.147025108337402],["िड",-13.147028923034668],["▁המח",-13.147043228149414],["▁galay",-13.147053718566896],["ANTE",-13.14706039428711],["▁ਰੱਖ",-13.147066116333008],["ನಿಗೆ",-13.147074699401855],["▁cloud",-13.147085189819336],["▁njemu",-13.147085189819336],["0°",-13.147101402282717],["zve",-13.147109031677246],["еса",-13.14711856842041],["▁лав",-13.147130966186523],["tajien",-13.147146224975586],["യില്ല",-13.14718532562256],["▁Үй",-13.147205352783203],["▁relationship",-13.147214889526367],["▁மற்ற",-13.14722728729248],["▁menar",-13.147231101989746],["▁portu",-13.147234916687012],["wany",-13.14723777770996],["ล้าน",-13.147246360778809],["szedł",-13.147255897521973],["▁явля",-13.147272109985352],["теги",-13.14728832244873],["рды",-13.147293090820312],["▁ሁ",-13.147295951843262],["▁उसका",-13.147315979003906],["▁klasi",-13.14732265472412],["▁köl",-13.147326469421388],["▁цвят",-13.147339820861816],["▁মহা",-13.147342681884766],["EET",-13.147364616394045],["▁ਕੌਰ",-13.14736557006836],["▁ووٹ",-13.147369384765623],["のため",-13.14737033843994],["Cat",-13.147394180297852],["נאַ",-13.147394180297852],["▁ADA",-13.147411346435549],["γό",-13.147451400756836],["ደራ",-13.14747142791748],["▁മരണ",-13.147482872009276],["AVI",-13.147489547729492],["いって",-13.147489547729492],["ASE",-13.14749240875244],["▁исход",-13.14749813079834],["▁ವಾರ",-13.147507667541504],["何も",-13.14751148223877],["▁കൂട",-13.147513389587402],["이란",-13.147520065307615],["▁paties",-13.147538185119627],["▁vhodný",-13.14755153656006],["мк",-13.147610664367676],["ИЛИ",-13.147638320922852],["▁Tuan",-13.147686958312988],["▁مم",-13.147722244262695],["▁δει",-13.147741317749023],["энт",-13.147748947143556],["▁пуш",-13.147771835327148],["的使用",-13.147771835327148],["▁밤",-13.147838592529297],["brati",-13.14784336090088],["▁čast",-13.147846221923828],["たくさんの",-13.14785861968994],["と思っています",-13.147887229919434],["▁الاول",-13.14788818359375],["ტორ",-13.147893905639648],["▁امنیتي",-13.14790153503418],["ốn",-13.147908210754396],["▁vë",-13.147926330566406],["▁azienda",-13.147929191589355],["▁உதவி",-13.14793872833252],["▁description",-13.147956848144531],["▁ЄС",-13.147963523864746],["▁Лю",-13.147969245910645],["▁πω",-13.147992134094238],["▁sāk",-13.147994041442873],["▁darbe",-13.148019790649414],["▁Roinn",-13.148028373718262],["Masukkan",-13.14803409576416],["እስ",-13.148046493530272],["▁familjen",-13.148070335388184],["▁ऐन",-13.14808464050293],["ساب",-13.148089408874512],["දෙ",-13.14809226989746],["stopp",-13.148114204406738],["▁неке",-13.148116111755373],["▁왕",-13.148116111755373],["▁انتظام",-13.1481294631958],["▁patologi",-13.148149490356444],["▁Centra",-13.148153305053713],["baren",-13.148164749145508],["▁ბიზნეს",-13.148175239562988],["醉",-13.148180961608888],["傑",-13.14818286895752],["pán",-13.148184776306152],["ķē",-13.148200035095217],["郡",-13.148202896118164],["▁ସରକାରଙ୍କ",-13.14820384979248],["▁Naam",-13.14820671081543],["▁fita",-13.148210525512695],["▁Gürcüstan",-13.148215293884276],["▁çünkü",-13.148215293884276],["▁Αυτή",-13.148215293884276],["▁καλύτερο",-13.148215293884276],["▁подкрепа",-13.148215293884276],["▁свайго",-13.148215293884276],["▁হোসেন",-13.148215293884276],["▁Kaliforni",-13.148216247558594],["▁फुटबल",-13.148218154907228],["▁daughter",-13.148219108581545],["▁leabhar",-13.14822006225586],["▁فیصد",-13.148226737976074],["gede",-13.148249626159668],["Мы",-13.148250579833984],["▁במסגרת",-13.148256301879885],["وخ",-13.148269653320312],["▁ഉണ്ടാക",-13.148273468017578],["▁retrouver",-13.148277282714844],["指示",-13.148286819458008],["▁Србију",-13.14828872680664],["kizun",-13.148293495178224],["▁୯",-13.148295402526855],["▁Dí",-13.14830207824707],["дължи",-13.14832592010498],["▁ഭൂമി",-13.148329734802246],["▁ceste",-13.148333549499512],["לל",-13.148353576660156],["▁derbas",-13.148357391357422],["ildiği",-13.148366928100586],["vertrag",-13.148396492004396],["seura",-13.148398399353027],["▁часом",-13.148405075073242],["ラー",-13.148445129394531],["▁десе",-13.14849090576172],["reve",-13.148494720458984],["▁reserve",-13.148497581481934],["▁විතර",-13.148502349853516],["▁informací",-13.148504257202148],["▁funkcija",-13.148509979248049],["idele",-13.14851188659668],["▁фонда",-13.14852237701416],["▁zice",-13.148527145385742],["ছা",-13.1485595703125],["▁acima",-13.148563385009766],["▁thí",-13.148563385009766],["ţiuni",-13.148571014404297],["bericht",-13.148584365844728],["▁Datum",-13.148587226867676],["▁výš",-13.148600578308104],["▁Osmanlı",-13.14863395690918],["▁‡",-13.148648262023926],["▁konec",-13.148653030395508],["ပါတီ",-13.148659706115724],["▁Studien",-13.148675918579102],["▁clases",-13.148683547973633],["ண்ட்",-13.148700714111328],["▁window",-13.148707389831545],["▁rovno",-13.14876651763916],["gok",-13.148773193359377],["▁खाली",-13.14879035949707],["確",-13.148818969726562],["ылуы",-13.148822784423828],["▁rajo",-13.148832321166992],["▁недавно",-13.148837089538574],["▁virksomhed",-13.148838996887209],["πολιτ",-13.14885139465332],["▁shtetit",-13.148902893066406],["▁rimane",-13.148918151855469],["わけです",-13.148932456970217],["ujúce",-13.14893627166748],["wazi",-13.148968696594238],["ကတော့",-13.148972511291504],["virus",-13.148974418640137],["հեռ",-13.14899730682373],["GRE",-13.149006843566896],["▁кый",-13.149012565612791],["▁augu",-13.149041175842283],["▁Volks",-13.149053573608398],["▁najbolji",-13.149056434631348],["▁saavad",-13.149097442626951],["▁seko",-13.149107933044434],["▁hóp",-13.14911651611328],["රේ",-13.149131774902344],["▁toege",-13.149151802062988],["▁2025",-13.149155616760254],["liyin",-13.149162292480469],["▁பதில்",-13.14916706085205],["▁முடிவு",-13.149194717407228],["▁آبان",-13.149197578430176],["вую",-13.149200439453123],["▁വായിക്ക",-13.149210929870604],["リング",-13.149215698242188],["▁viku",-13.149216651916504],["▁donosi",-13.149225234985352],["naviy",-13.149231910705566],["▁Snow",-13.149237632751465],["▁läheb",-13.14923858642578],["▁afya",-13.149242401123049],["非洲",-13.14926052093506],["▁namanya",-13.149266242980955],["ഭി",-13.149276733398438],["▁характеристики",-13.149300575256348],["▁ehdot",-13.149336814880373],["以往",-13.149343490600586],["▁මග",-13.1493501663208],["еў",-13.149353981018066],["▁használt",-13.149378776550291],["▁телефону",-13.149399757385254],["باز",-13.149409294128418],["▁dheer",-13.14944076538086],["▁ужива",-13.149447441101074],["brauc",-13.149455070495604],["рали",-13.149483680725098],["▁ajut",-13.149486541748049],["ၚ",-13.149511337280272],["สิทธิ",-13.14951515197754],["▁نخست",-13.149518966674805],["推廣",-13.14952278137207],["ក្មេង",-13.149523735046388],["▁Každý",-13.149524688720703],["▁ہندوستان",-13.149530410766602],["▁אַלע",-13.1495361328125],["▁numéro",-13.149541854858398],["▁יוצא",-13.149541854858398],["▁ගොස්",-13.149559020996094],["▁Poleg",-13.149563789367676],["▁zoveel",-13.14957046508789],["▁سياست",-13.149593353271484],["▁pause",-13.149601936340332],["ወደ",-13.149639129638672],["▁razvija",-13.14964199066162],["▁yeye",-13.149645805358888],["maskin",-13.149650573730469],["▁እነዚህ",-13.149662017822266],["deira",-13.149678230285645],["قابل",-13.14968204498291],["ėjai",-13.149682998657228],["▁કેવી",-13.14969539642334],["Mak",-13.149697303771973],["▁prochaine",-13.149703025817873],["▁Consum",-13.149703979492188],["реєстр",-13.149723052978516],["▁берип",-13.149723052978516],["túra",-13.14972686767578],["▁CB",-13.14972686767578],["лати",-13.14975929260254],[".05.2018",-13.14976692199707],["▁прийом",-13.14980697631836],["ນິ",-13.149807929992676],["▁какое",-13.149808883666992],["न्तु",-13.149820327758787],["арни",-13.149827003479004],["▁Овој",-13.149828910827637],["▁આમ",-13.149855613708496],["pedia",-13.14986515045166],["▁fremme",-13.149916648864746],["мети",-13.149917602539062],["▁5.000",-13.14993953704834],["▁kanila",-13.149946212768556],["ພັນ",-13.149980545043944],["קפה",-13.149981498718262],["▁afdeling",-13.14999008178711],["▁Один",-13.14999771118164],["▁ucuz",-13.150004386901855],["vasti",-13.150018692016602],["▁Дуб",-13.150074005126951],["hihi",-13.150076866149902],["based",-13.150080680847168],["▁ହେ",-13.150084495544434],["▁ING",-13.1500883102417],["▁έχεις",-13.150105476379396],["▁ভাল",-13.15011501312256],["▁Tör",-13.150116920471191],["▁පැමිණ",-13.15013027191162],["▁סא",-13.150147438049316],["▁حلق",-13.150151252746582],["▁ΤΟΥ",-13.150191307067873],["▁puse",-13.150206565856934],["ເກມ",-13.150213241577148],["▁Sit",-13.150238037109377],["энні",-13.15025234222412],["▁investment",-13.150259017944336],["▁ट्रेन",-13.150276184082031],["۰",-13.15028476715088],["▁krep",-13.150297164916992],["▁håll",-13.15030288696289],["ِّ",-13.150303840637209],["▁referente",-13.150309562683104],["▁душе",-13.150314331054688],["▁hoffe",-13.15034008026123],["▁stylu",-13.150341033935549],["▁өнер",-13.150361061096191],["▁hrane",-13.150364875793455],["informati",-13.15037441253662],["චි",-13.150376319885254],["▁○",-13.150386810302734],["árny",-13.150397300720217],["也不是",-13.15040111541748],["▁Dib",-13.150416374206545],["▁lì",-13.150455474853516],["▁കൂ",-13.15045928955078],["▁abil",-13.150519371032717],["лерді",-13.150550842285156],["▁respons",-13.150552749633787],["▁өнім",-13.150577545166016],["tämään",-13.150588035583496],["pind",-13.15064811706543],["▁보호",-13.15068817138672],["ന്ത്",-13.15072536468506],["▁invite",-13.150738716125488],["▁zašto",-13.150749206542969],["▁enero",-13.150768280029297],["cias",-13.15078353881836],["zok",-13.150823593139648],["▁الكو",-13.150827407836914],["▁oes",-13.150830268859863],["▁menyampaikan",-13.150835037231444],["▁président",-13.150835037231444],["▁власності",-13.150835037231444],["▁علاقوں",-13.150835037231444],["▁тартып",-13.150836944580078],["▁दाहाल",-13.15084171295166],["▁μέλη",-13.150853157043455],["▁síðustu",-13.15085792541504],["▁danger",-13.150858879089355],["বু",-13.150863647460938],["▁onderwerp",-13.150872230529783],["▁կարծիք",-13.150875091552734],["▁gammal",-13.150893211364746],["▁կապված",-13.150909423828123],["▁portret",-13.150918006896973],["▁Bulan",-13.150923728942873],["▁GOD",-13.150936126708984],["▁можаце",-13.1509428024292],["▁Fateh",-13.150946617126465],["▁ilçe",-13.15095043182373],["▁storm",-13.15095043182373],["▁neį",-13.150973320007324],["▁اشرف",-13.15097999572754],["▁бүс",-13.15101146697998],["вак",-13.15102767944336],["▁якога",-13.151028633117676],["▁ulterior",-13.151041984558104],["...!!",-13.151087760925291],["▁líf",-13.15108871459961],["itāte",-13.151123046875],["▁ଶେଷ",-13.151129722595217],["posit",-13.151134490966797],["があれば",-13.15114402770996],["ဆရာ",-13.151145935058594],["▁మై",-13.15121364593506],["▁මල්",-13.15121364593506],["▁determinado",-13.151226043701172],["▁مرور",-13.151227951049805],["ტრ",-13.151233673095703],["▁अंग",-13.151256561279297],["▁gainera",-13.151265144348145],["キー",-13.151291847229004],["fyr",-13.151323318481444],["▁hetero",-13.151330947875977],["▁arama",-13.151349067687988],["แต่ละ",-13.151363372802734],["▁облыс",-13.15139389038086],["িম",-13.151405334472656],["▁conversation",-13.151407241821287],["▁էջ",-13.151429176330566],["ατο",-13.151451110839844],["▁එරෙහිව",-13.15145492553711],["▁ប្រធាន",-13.151455879211426],["bukan",-13.151469230651855],["007",-13.151482582092283],["▁NIE",-13.151482582092283],["ောင်",-13.151484489440918],["▁බැහැ",-13.151490211486816],["ትም",-13.151496887207031],["ஐ",-13.151497840881348],["▁nedir",-13.15149974822998],["Mİ",-13.151500701904297],["▁τιμή",-13.151501655578612],["▁ბინა",-13.151554107666016],["učili",-13.151576042175291],["▁geniet",-13.151578903198242],["cijski",-13.151581764221191],["давати",-13.151591300964355],["▁vətəndaş",-13.151592254638672],["χρ",-13.151605606079102],["▁hezar",-13.151640892028809],["▁выпуск",-13.15164279937744],["▁jäm",-13.151656150817873],["▁renda",-13.151662826538086],["цям",-13.151683807373049],["▁mogućnosti",-13.15169620513916],["▁другу",-13.151703834533691],["▁бө",-13.151735305786133],["τηση",-13.151747703552246],["▁Brod",-13.151756286621094],["▁mieh",-13.15182876586914],["▁handen",-13.151860237121582],["▁יפה",-13.151863098144531],["sjes",-13.15188980102539],["▁MES",-13.151907920837402],["การทํา",-13.151914596557615],["▁rebel",-13.151925086975098],["áir",-13.15192985534668],["オー",-13.151931762695312],["기술",-13.151941299438477],["▁përgjigje",-13.15195369720459],["ሞች",-13.151963233947754],["▁droits",-13.15196704864502],["▁obten",-13.152061462402344],["▁सिर",-13.152064323425291],["ฟา",-13.152076721191406],["▁Litt",-13.152082443237305],["▁Fax",-13.15208339691162],["擔任",-13.15211009979248],["▁Zuid",-13.15211296081543],["വരുടെ",-13.152121543884276],["ശ്വ",-13.152121543884276],["▁kvöld",-13.152145385742188],["กว้าง",-13.152146339416504],["▁προκειμένου",-13.15214729309082],["▁تشخیص",-13.15214729309082],["▁ঘোষণা",-13.15214729309082],["▁ඉතිං",-13.15214729309082],["▁επιλογή",-13.152148246765137],["▁ତଥା",-13.152152061462402],["▁skôr",-13.152155876159668],["▁Հայոց",-13.152155876159668],["▁Follow",-13.15218448638916],["▁പൊലീസ്",-13.15218734741211],["▁slog",-13.152199745178224],["▁samedi",-13.152201652526855],["▁אודות",-13.15220832824707],["▁ошондой",-13.152209281921388],["▁piu",-13.152213096618652],["▁Nil",-13.152215957641602],["υδ",-13.152217864990234],["蘋果",-13.152219772338867],["▁sesong",-13.152228355407717],["▁svojem",-13.152234077453612],["▁звон",-13.15224552154541],["ରାଜ",-13.152265548706056],["සන්",-13.15226936340332],["ート",-13.152284622192385],["▁kararı",-13.15232753753662],["▁Strom",-13.152332305908203],["▁einzige",-13.152334213256836],["▁אדער",-13.152336120605469],["ערט",-13.15234088897705],["▁قربانی",-13.152363777160645],["▁ลูก",-13.152372360229492],["▁potra",-13.152389526367188],["▁szél",-13.15239143371582],["duct",-13.152395248413086],["▁диета",-13.152410507202148],["蛋糕",-13.152420997619627],["▁Turun",-13.152430534362791],["▁institucion",-13.15244197845459],["▁තේ",-13.152448654174805],["▁aftale",-13.152466773986816],["▁חדשה",-13.152473449707031],["▁brasileiro",-13.152482986450195],["▁बाँ",-13.15249252319336],["უსი",-13.15250015258789],["▁gaude",-13.152518272399902],["▁tapahtuma",-13.152521133422852],["дневни",-13.15255355834961],["▁വിധി",-13.152568817138672],["▁סוף",-13.152581214904783],["Azər",-13.152603149414062],["▁Гора",-13.152623176574709],["▁உயர்",-13.152624130249023],["4/",-13.152647972106934],["лерін",-13.152653694152832],["trieb",-13.152654647827148],["▁ընկեր",-13.152654647827148],["▁esate",-13.152658462524414],["球迷",-13.15266227722168],["▁یقین",-13.152725219726562],["▁одговори",-13.152742385864258],["▁debemos",-13.152749061584473],["▁näe",-13.152750015258787],["ovka",-13.15275764465332],["iinta",-13.152767181396484],["кво",-13.152789115905762],["▁Kush",-13.15280055999756],["这就是",-13.152830123901367],["▁boom",-13.152836799621582],["▁asistent",-13.152843475341797],["▁спра",-13.152847290039062],["ገረ",-13.152859687805176],["yari",-13.152867317199709],["非法",-13.152870178222656],["▁pink",-13.152891159057615],["ько",-13.15289306640625],["▁ehitus",-13.152897834777832],["▁побач",-13.153017044067385],["▁симптом",-13.153019905090332],["ACIÓN",-13.15302562713623],["▁शान्ति",-13.153038024902344],["배포",-13.153040885925291],["▁Estamos",-13.153043746948242],["houd",-13.153066635131836],["зат",-13.153082847595217],["sanya",-13.15312385559082],["▁bismo",-13.153136253356934],["▁засе",-13.153138160705566],["▁הדבר",-13.153157234191896],["ạo",-13.15317726135254],["▁propune",-13.153178215026855],["akku",-13.153185844421388],["ältä",-13.153249740600586],["ဆန္",-13.153281211853027],["▁annual",-13.15331745147705],["▁следва",-13.153319358825684],["▁zako",-13.153325080871582],["▁Tama",-13.153332710266112],["▁sistēmas",-13.153334617614746],["λεγ",-13.153335571289062],["▁visst",-13.153366088867188],["▁කල්",-13.15337371826172],["▁Kent",-13.153374671936035],["▁historier",-13.153393745422363],["威胁",-13.153406143188477],["wirtschaft",-13.153420448303224],["mitta",-13.15342617034912],["▁Vat",-13.153427124023438],["▁Тако",-13.15343952178955],["▁σύν",-13.153448104858398],["▁BC",-13.153450965881348],["ไก่",-13.15345859527588],["▁verildi",-13.15345859527588],["▁ընդհանուր",-13.153461456298828],["▁ಧನ್ಯವಾದಗಳು",-13.153465270996094],["▁මධ්",-13.153465270996094],["torial",-13.15346622467041],["▁انتقاد",-13.153467178344728],["▁atbilst",-13.15346908569336],["шните",-13.153470993041992],["▁služieb",-13.153472900390623],["ไทยรัฐ",-13.153478622436523],["▁کمیٹی",-13.153480529785156],["ीक",-13.153491020202637],["▁decisão",-13.15350341796875],["[12]",-13.153531074523926],["▁وفق",-13.15354061126709],["łego",-13.153552055358888],["▁קטן",-13.153552055358888],["ახლ",-13.15356731414795],["▁фаза",-13.15359115600586],["▁Šajā",-13.153594970703123],["▁पहुंच",-13.153597831726074],["láš",-13.15361785888672],["▁чын",-13.153618812561035],["▁включа",-13.153636932373049],["นาม",-13.153642654418944],["▁haline",-13.15366554260254],["▁હાલ",-13.153668403625488],["▁Idea",-13.15369987487793],["шими",-13.153708457946776],["▁kjøre",-13.153717041015623],["▁형",-13.153718948364258],["▁പുറത്തു",-13.153727531433104],["▁بودم",-13.153736114501951],["▁bedst",-13.153738021850586],["▁مرات",-13.153748512268066],["xana",-13.153756141662598],["κρατ",-13.153806686401367],["olas",-13.153813362121582],["等の",-13.153815269470217],["▁viaggi",-13.153820991516112],["▁првиот",-13.153830528259276],["▁кеш",-13.15386199951172],["▁magpa",-13.153875350952148],["NOV",-13.153892517089844],["ඩිය",-13.153904914855955],["▁новий",-13.153935432434082],["▁останов",-13.153943061828612],["atum",-13.15394401550293],["▁ਰਿ",-13.15394687652588],["jmo",-13.153953552246094],["kushoto",-13.15396213531494],["▁مجازی",-13.153971672058104],["的老",-13.153990745544434],["▁правіць",-13.153992652893066],["▁2/3",-13.153996467590332],["▁врло",-13.154004096984863],["sunuz",-13.15401840209961],["▁piedi",-13.15402126312256],["▁quà",-13.154032707214355],["▁Lewis",-13.154035568237305],["▁ister",-13.154035568237305],["▁erikois",-13.154037475585938],["ОЗ",-13.154067039489746],["олошки",-13.154129028320312],["▁Julai",-13.154142379760742],["▁variable",-13.154150009155272],["开心",-13.154150009155272],["ქართულად",-13.15415859222412],["▁Kontrol",-13.154167175292969],["ផ្ទះ",-13.154169082641602],["▁idée",-13.154175758361816],["පන්",-13.154176712036133],["сок",-13.15419578552246],["126",-13.154196739196776],["▁Prov",-13.154224395751951],["▁ponos",-13.154232025146484],["zier",-13.15423583984375],["▁matrimonio",-13.154264450073242],["▁discurso",-13.154291152954102],["▁ricorda",-13.154300689697266],["wati",-13.154304504394531],["\\\\\"",-13.154351234436035],["▁הדי",-13.154378890991213],["▁ирэх",-13.154379844665527],["第一个",-13.154397010803224],["▁Miet",-13.15440559387207],["luku",-13.154415130615234],["تواصل",-13.15442943572998],["ষ্ঠ",-13.154455184936523],["していました",-13.154463768005373],["▁열린",-13.154470443725586],["▁Undang",-13.154471397399902],["יסה",-13.154540061950684],["लेख",-13.15455150604248],["थी",-13.154579162597656],["▁sektör",-13.154616355895996],["▁रोजगार",-13.154624938964844],["tunt",-13.15463638305664],["ttavaa",-13.154644966125488],["قود",-13.154669761657717],["▁toh",-13.154678344726562],["seite",-13.154685020446776],["▁Kritik",-13.15470027923584],["▁૧",-13.154706954956056],["แล้วก็",-13.15472412109375],["缘",-13.154739379882812],["▁odpowiada",-13.154776573181152],["▁ի՞նչ",-13.154776573181152],["▁انځور",-13.154776573181152],["▁ಇತ್ತೀಚಿನ",-13.154776573181152],["▁ಮೊಬೈಲ್",-13.154776573181152],["▁받은",-13.154776573181152],["▁vorhanden",-13.154777526855469],["▁əməkdaşlıq",-13.154777526855469],["▁ముఖ్యమంత్రి",-13.154777526855469],["▁niestety",-13.154778480529783],["▁আহত",-13.154786109924316],["▁smal",-13.15480613708496],["▁облик",-13.154826164245604],["▁основании",-13.154831886291504],["duction",-13.15484619140625],["▁odmah",-13.154848098754885],["ीला",-13.154854774475098],["BV",-13.15485668182373],["▁vurdering",-13.154877662658691],["▁resultatet",-13.154884338378906],["▁närheten",-13.154885292053224],["▁7,5",-13.154900550842283],["▁Köln",-13.154905319213867],["▁gern",-13.15492057800293],["▁sadar",-13.154956817626951],["▁стреми",-13.154959678649902],["▁Kendi",-13.154993057250977],["ກອນ",-13.15501594543457],["ຄິດ",-13.155024528503418],["gesetz",-13.155037879943848],["1992",-13.155083656311035],["▁Mynd",-13.155121803283691],["▁kule",-13.155122756958008],["ได",-13.15514087677002],["બે",-13.155146598815918],["▁одговорност",-13.155155181884766],["▁තැ",-13.155158042907717],["ggia",-13.15516185760498],["greifen",-13.155163764953612],["тельный",-13.155165672302246],["cî",-13.155221939086914],["ામાં",-13.155223846435549],["reid",-13.155234336853027],["▁kocha",-13.15524196624756],["sunu",-13.155250549316406],["▁concerne",-13.155256271362305],["不上",-13.155256271362305],["ლან",-13.155281066894531],["▁groen",-13.15530014038086],["wasser",-13.155303955078123],["▁инвалид",-13.15530490875244],["▁preguntas",-13.15530776977539],["च्",-13.155346870422363],["စစ်",-13.155359268188477],["全省",-13.155363082885742],["כך",-13.155391693115234],["▁10000",-13.15541648864746],["▁Emo",-13.155439376831056],["▁មើល",-13.155450820922852],["agoa",-13.155485153198242],["іцы",-13.155526161193848],["▁łącz",-13.155532836914062],["vlad",-13.155543327331545],["হু",-13.155545234680176],["▁wilde",-13.155545234680176],["▁서로",-13.155574798583984],["▁proved",-13.1555757522583],["▁ലാ",-13.155576705932615],["▁tekstu",-13.15558910369873],["heni",-13.155606269836426],["するために",-13.155611991882324],["▁çû",-13.15561294555664],["búð",-13.155614852905272],["▁влезе",-13.155620574951172],["maksā",-13.155633926391602],["otus",-13.155633926391602],["بعث",-13.15563678741455],["ভার",-13.15565299987793],["▁បទ",-13.155653953552246],["▁kazandı",-13.155656814575195],["▁говорили",-13.155668258666992],["▁породи",-13.155668258666992],["答え",-13.15567111968994],["મ્પ",-13.155675888061523],["▁байгуул",-13.155683517456056],["▁Bald",-13.15570068359375],["▁fiesta",-13.155704498291016],["▁qiz",-13.15573787689209],["▁dydd",-13.15574550628662],["のご",-13.155759811401367],["是個",-13.155776023864746],["▁Moda",-13.155784606933594],["vní",-13.15578556060791],["▁روک",-13.155790328979492],["ացում",-13.155795097351074],["емые",-13.155804634094238],["▁នៅក្នុង",-13.155810356140137],["▁xer",-13.155831336975098],["▁trä",-13.155851364135742],["▁wypad",-13.1558837890625],["वस",-13.155898094177246],["▁rozumie",-13.155917167663574],["දිය",-13.155953407287598],["以後",-13.155975341796877],["▁Fins",-13.15598201751709],["▁працює",-13.15599250793457],["邀",-13.156023979187012],["បី",-13.15603256225586],["চল",-13.156036376953123],["▁вредности",-13.156046867370604],["滅",-13.156052589416504],["▁tw",-13.156062126159668],["燕",-13.156063079833984],["atin",-13.156065940856934],["祈",-13.156067848205566],["牆",-13.156072616577148],["公园",-13.15607738494873],["了我",-13.156087875366213],["0)",-13.156088829040527],["▁مباراة",-13.156094551086426],["▁చరిత్ర",-13.156094551086426],["▁ಪ್ರಶಸ್ತಿ",-13.156094551086426],["▁ଦୂର",-13.156095504760742],["▁ملاتړ",-13.15609645843506],["▁രാത്രി",-13.15609645843506],["เอกสาร",-13.156099319458008],["▁aldrei",-13.156107902526855],["▁ihme",-13.156107902526855],["▁Polícia",-13.15611743927002],["▁ഞങ്ങള്",-13.156118392944336],["▁ngjarje",-13.156121253967283],["▁Anwendung",-13.15614128112793],["lecti",-13.156153678894045],["협회",-13.15616512298584],["▁markedet",-13.156170845031738],["▁opäť",-13.156174659729004],["▁nagusi",-13.156188011169434],["▁minit",-13.156194686889648],["едно",-13.156200408935549],["▁spielt",-13.156210899353027],["▁തന്നെയാണ്",-13.15621280670166],["*)",-13.156234741210938],["▁вредност",-13.156251907348633],["▁ước",-13.156286239624023],["▁propus",-13.156294822692873],["▁адамның",-13.156305313110352],["ਚੇ",-13.156332969665527],["sung",-13.156347274780272],["וה",-13.156357765197754],["ための",-13.156360626220703],["▁نامزد",-13.156365394592283],["▁aperta",-13.156414031982422],["amor",-13.156414985656738],["▁smerte",-13.156427383422852],["▁Европск",-13.1564302444458],["▁получава",-13.156436920166016],["▁жерге",-13.156441688537598],["міна",-13.156454086303713],["▁દો",-13.15645980834961],["स्ति",-13.15646743774414],["ráid",-13.15648365020752],["▁talous",-13.156501770019531],["спіль",-13.15651512145996],["ဘဝ",-13.15652561187744],["▁அப்படி",-13.156530380249023],["▁ចំពោះ",-13.156533241271973],["▁хоол",-13.15654754638672],["▁psykolog",-13.156548500061035],["ನಗರ",-13.156556129455566],["▁higher",-13.156644821166992],["▁ସମୟ",-13.156646728515623],["vél",-13.156658172607422],["▁сможете",-13.156699180603027],["▁приступ",-13.15670108795166],["いつ",-13.156707763671877],["▁vänta",-13.156716346740724],["▁üldse",-13.156718254089355],["▁Leng",-13.156756401062012],["百度",-13.156765937805176],["▁electro",-13.15677261352539],["▁dramat",-13.15679931640625],["เสื้อ",-13.156845092773438],["▁hota",-13.15684700012207],["室內",-13.156853675842283],["เซอร์",-13.15688705444336],["ספק",-13.15691375732422],["▁farge",-13.156933784484863],["siyalar",-13.156939506530762],["▁fabrika",-13.156941413879396],["▁ମୃତ",-13.15695095062256],["▁Христ",-13.156951904296877],["▁artikolon",-13.156968116760254],["keh",-13.156973838806152],["▁Revolu",-13.156984329223633],["▁lă",-13.15698528289795],["▁мес",-13.156988143920898],["plaat",-13.157015800476074],["どの",-13.157023429870604],["▁Boj",-13.157055854797363],["▁линия",-13.15705680847168],["▁Kết",-13.157068252563477],["ייע",-13.157123565673828],["▁рахунок",-13.157148361206056],["▁፤",-13.15715789794922],["cionais",-13.157158851623535],["רית",-13.157160758972168],["▁(29)",-13.1571683883667],["▁Matti",-13.15720558166504],["▁Кос",-13.157224655151367],["ئس",-13.1572265625],["離開",-13.15729522705078],["áš",-13.157310485839844],["▁komor",-13.15732765197754],["▁Conce",-13.157337188720703],["▁MH",-13.157342910766602],["▁anderem",-13.1573486328125],["sulat",-13.157356262207031],["滴",-13.157365798950195],["▁භාවිතා",-13.157367706298828],["▁ਅਕਾਲੀ",-13.157382011413574],["利润",-13.157384872436523],["▁resultaten",-13.157402992248535],["ვია",-13.157408714294434],["▁tắc",-13.157413482666016],["▁рівень",-13.157413482666016],["▁غواړي",-13.157413482666016],["▁ਤਿੰਨ",-13.157413482666016],["▁فراموش",-13.157414436340332],["אמנות",-13.157415390014648],["▁କନକ",-13.157416343688965],["▁වාහන",-13.157416343688965],["▁આવ્યું",-13.157419204711914],["ratzen",-13.157421112060549],["▁kaam",-13.157421112060549],["▁حداقل",-13.157426834106444],["▁amatør",-13.157427787780762],["elte",-13.157428741455078],["▁Tahir",-13.157430648803713],["▁aproximadamente",-13.157431602478027],["▁Belle",-13.157432556152344],["▁iarraidh",-13.157434463500977],["▁અંગે",-13.157447814941406],["▁kitą",-13.157458305358888],["▁त्यसैले",-13.157466888427734],["ٽا",-13.157496452331545],["666",-13.157508850097656],["▁інф",-13.157508850097656],["▁землі",-13.157565116882324],["▁oczy",-13.15758228302002],["▁Григор",-13.15758228302002],["кото",-13.15760898590088],["▁māc",-13.157614707946776],["▁συμφωνία",-13.157615661621094],["▁enti",-13.157625198364258],["▁amfani",-13.157632827758787],["izate",-13.157671928405762],["باس",-13.157679557800291],["praviť",-13.157684326171877],["▁projects",-13.157696723937988],["dziej",-13.157709121704102],["jelo",-13.157710075378418],["▁chwil",-13.157722473144531],["▁pidä",-13.157748222351074],["▁owoc",-13.157756805419922],["▁Source",-13.1577787399292],["etet",-13.157803535461426],["やり",-13.157828330993652],["▁ເລ",-13.157844543457031],["▁memori",-13.157896041870115],["▁pomen",-13.157920837402344],["ις",-13.157929420471191],["▁Second",-13.157934188842772],["▁prezzi",-13.157936096191406],["▁حساس",-13.15793800354004],["imize",-13.157955169677734],["τουρ",-13.15796184539795],["▁ξανα",-13.157967567443848],["جامع",-13.15797233581543],["▁गिर",-13.157992362976074],["▁संप",-13.15800952911377],["▁bagong",-13.158031463623049],["▁Details",-13.158037185668944],["حكم",-13.158041954040527],["▁имао",-13.158045768737791],["kasti",-13.15805435180664],["kora",-13.15806770324707],["grip",-13.15809154510498],["ခ်ိန္",-13.15809726715088],["▁fundo",-13.158100128173828],["ंप",-13.158123016357422],["▁Perse",-13.158180236816406],["çon",-13.158194541931152],["▁ଜି",-13.158201217651367],["▁GM",-13.158207893371582],["28)",-13.158225059509276],["вались",-13.158236503601074],["▁ensi",-13.158263206481934],["jeng",-13.158272743225098],["συν",-13.158303260803224],["▁نتيجة",-13.158353805541992],["bæk",-13.158382415771484],["▁batez",-13.158398628234863],["▁button",-13.158422470092772],["▁popull",-13.158437728881836],["▁finally",-13.158461570739746],["▁tenia",-13.158465385437012],["▁wichtige",-13.15848159790039],["kull",-13.158512115478516],["▁ПА",-13.158517837524414],["▁zagotavlja",-13.15853214263916],["▁parasti",-13.15853786468506],["▁rodzaj",-13.158544540405272],["▁vendosur",-13.158559799194336],["▁நில",-13.158571243286133],["ਹਿਰ",-13.158580780029297],["▁повідом",-13.158589363098145],["▁század",-13.158616065979004],["νοι",-13.158644676208496],["кування",-13.158658981323242],["ఎ",-13.158658981323242],["▁Datenschutz",-13.15868091583252],["株",-13.15868091583252],["▁Congress",-13.158681869506836],["▁yürü",-13.158702850341797],["shmëri",-13.158708572387695],["yish",-13.158717155456545],["▁המע",-13.158719062805176],["▁Ministério",-13.158735275268556],["▁Ontmoet",-13.158735275268556],["▁jednocześnie",-13.158735275268556],["▁mākslas",-13.158735275268556],["▁vogël",-13.158735275268556],["▁Лукашэнка",-13.158735275268556],["▁төлбөр",-13.158735275268556],["▁कोशिश",-13.158735275268556],["▁δυο",-13.158737182617188],["▁εφαρμογή",-13.158737182617188],["▁кылмыш",-13.158737182617188],["मस",-13.158747673034668],["▁msingi",-13.158753395080566],["▁даргын",-13.158754348754885],["성이",-13.158756256103516],["سبب",-13.158773422241213],["حقق",-13.158781051635742],["▁Regierung",-13.158784866333008],["▁vykonáva",-13.15879249572754],["▁facilmente",-13.158807754516602],["▁Bă",-13.158817291259766],["▁чынам",-13.158819198608398],["▁الرجل",-13.158838272094728],["suhte",-13.15884494781494],["سس",-13.158856391906738],["pokoj",-13.158862113952637],["▁Dik",-13.158875465393066],["▁innehåller",-13.158913612365724],["▁දුර",-13.158916473388672],["▁совершен",-13.158926963806152],["▁fuente",-13.158927917480469],["▁Melaka",-13.158929824829102],["▁fell",-13.158954620361328],["ឹម",-13.158964157104492],["▁baile",-13.158975601196287],["▁излиза",-13.158981323242188],["▁jah",-13.15898895263672],["▁leidžia",-13.159024238586426],["▁harakat",-13.15903663635254],["▁SAW",-13.159046173095703],["ита",-13.15904712677002],["suf",-13.159055709838867],["▁कट",-13.159069061279297],["irane",-13.159079551696776],["rista",-13.159102439880373],["▁ಓದ",-13.159112930297852],["erant",-13.1591157913208],["بون",-13.1591215133667],["▁japan",-13.159131050109863],["▁تقل",-13.159139633178713],["▁нощ",-13.159147262573242],["▁чыгар",-13.15917682647705],["▁חת",-13.159213066101074],["▁vorbei",-13.159215927124023],["▁domain",-13.159217834472656],["▁multum",-13.159219741821287],["ວຽກງານ",-13.15922737121582],["請求",-13.159239768981934],["əmə",-13.15924072265625],["▁बिग",-13.159250259399414],["▁novim",-13.159260749816896],["▁znova",-13.15929889678955],["тарының",-13.159299850463867],["یئر",-13.159335136413574],["▁рыб",-13.159342765808104],["▁allir",-13.159348487854004],["▁آدمی",-13.159354209899902],["▁جهاد",-13.15939235687256],["▁olaj",-13.159408569335938],["▁romp",-13.159418106079102],["▁kakhulu",-13.159423828125],["▁אידישע",-13.159432411193848],["▁premijer",-13.159466743469238],["ЕЙ",-13.1594820022583],["▁единствено",-13.159494400024414],["▁Зв",-13.159501075744627],["こちらの",-13.159512519836426],["▁opleve",-13.159525871276855],["▁terse",-13.159525871276855],["ආ",-13.159528732299805],["高效",-13.159540176391602],["কৃত",-13.159542083740234],["▁müşteri",-13.159555435180664],["▁مختلفة",-13.15955924987793],["▁hakimiyyəti",-13.15958023071289],["▁дей",-13.1596097946167],["▁11.00",-13.159612655639648],["▁секрет",-13.159615516662598],["▁республика",-13.159631729125977],["▁demon",-13.159634590148926],["▁ĉiam",-13.159639358520508],["▁Universit",-13.159640312194824],["عالم",-13.159656524658203],["āka",-13.159669876098633],["ರಾಗಿ",-13.159673690795898],["hidrat",-13.159706115722656],["ັ່ງ",-13.159708976745604],["▁מהם",-13.159725189208984],["ေတြက",-13.159727096557615],["▁държава",-13.159747123718262],["ORG",-13.159750938415527],["▁bless",-13.159754753112791],["▁cesty",-13.159810066223145],["▁договори",-13.15981674194336],["▁dvor",-13.159819602966309],["▁හු",-13.159835815429688],["ნებს",-13.159878730773926],["▁потпис",-13.159893035888672],["▁사용자",-13.15990161895752],["ტს",-13.159953117370604],["49)",-13.159955978393556],["▁thịt",-13.159955978393556],["▁9)",-13.159958839416504],["▁katru",-13.15998649597168],["闪",-13.159997940063477],["繪",-13.160004615783691],["יעה",-13.160018920898438],["陶",-13.1600341796875],["抹",-13.160037994384766],["rée",-13.160038948059082],["漆",-13.160048484802246],["มาตรฐาน",-13.160057067871094],["▁février",-13.16005802154541],["▁قېتىم",-13.160058975219728],["▁настоящее",-13.160059928894045],["▁Zweck",-13.160063743591309],["▁акционер",-13.160063743591309],["▁မိမိ",-13.160066604614258],["▁membentuk",-13.16007137298584],["▁Tháng",-13.160075187683104],["▁menutup",-13.160076141357422],["өй",-13.160089492797852],["pune",-13.160126686096191],["▁бич",-13.160134315490724],["▁rodiny",-13.160143852233888],["▁stim",-13.160143852233888],["sola",-13.160151481628418],["▁chtěl",-13.16015911102295],["▁չէր",-13.16015911102295],["fry",-13.160167694091797],["▁desidera",-13.160175323486328],["هڻ",-13.160183906555176],["▁වූයේ",-13.160201072692873],["ቻል",-13.16020679473877],["▁programmer",-13.16021728515625],["▁bitamina",-13.160223960876465],["▁जाऊन",-13.16023063659668],["▁зро",-13.160233497619627],["▁znalazł",-13.160235404968262],["ေဖ",-13.16024684906006],["▁ખાસ",-13.160290718078612],["▁420",-13.160292625427246],["όνι",-13.16031265258789],["▁bezig",-13.16031265258789],["ឯក",-13.160348892211914],["▁پابندی",-13.16036605834961],["ക്കുന്നത്",-13.160377502441406],["▁appella",-13.160405158996582],["لاك",-13.160432815551758],["▁KAL",-13.160438537597656],["▁meerdere",-13.160455703735352],["▁произведен",-13.1604585647583],["deng",-13.160470962524414],["▁hre",-13.160479545593262],["▁liha",-13.160480499267578],["▁پارس",-13.160483360290527],["▁Сьогодні",-13.160496711730955],["遭遇",-13.160505294799805],["▁Библи",-13.160513877868652],["▁кампа",-13.160513877868652],["支出",-13.160542488098145],["▁erant",-13.16054630279541],["▁vendas",-13.160561561584473],["▁Bağ",-13.160567283630373],["▁Give",-13.160589218139648],["▁Mono",-13.160601615905762],["čet",-13.160606384277344],["иц",-13.160608291625977],["▁Сим",-13.160615921020508],["▁بیرونی",-13.16062068939209],["▁अग",-13.160625457763672],["▁សារ",-13.160625457763672],["λαν",-13.160627365112305],["▁weerar",-13.160676956176758],["Top",-13.16068172454834],["▁folklor",-13.160686492919922],["▁trauma",-13.160700798034668],["▁сөздер",-13.160704612731934],["чные",-13.160722732543944],["мерно",-13.160725593566896],["▁പെണ്",-13.160775184631348],["▁bhr",-13.160804748535156],["▁választás",-13.160804748535156],["▁tulisi",-13.160811424255373],["▁покрет",-13.160834312438965],["▁صندوق",-13.160877227783203],["▁Решение",-13.160881996154783],["ነበ",-13.160906791687012],["stani",-13.160910606384276],["▁sampun",-13.160913467407228],["▁ocean",-13.16092014312744],["▁Mission",-13.160933494567873],["暴力",-13.160959243774414],["▁Yani",-13.160967826843262],["▁Исус",-13.16098403930664],["▁такую",-13.161003112792969],["▁espace",-13.161035537719728],["▁Siapa",-13.161070823669434],["▁кир",-13.16107177734375],["▁plec",-13.161075592041016],["▁Վա",-13.161076545715332],["▁колон",-13.161078453063965],["▁fjern",-13.161087989807127],["▁sepak",-13.16111660003662],["▁ranking",-13.16111946105957],["ाको",-13.161133766174316],["▁स्टार",-13.161139488220217],["▁görüşü",-13.16114330291748],["pastu",-13.161144256591797],["▁starší",-13.161200523376465],["▁kern",-13.161216735839844],["찾",-13.161219596862791],["εφ",-13.161221504211426],["▁juca",-13.161255836486816],["עשי",-13.16127586364746],["მები",-13.161280632019045],["▁Innovation",-13.161295890808104],["sygdom",-13.16130256652832],["▁miksi",-13.161310195922852],["straff",-13.161314964294434],["▁Проф",-13.161325454711914],["atmosf",-13.16134262084961],["kanje",-13.161357879638672],["▁україн",-13.161357879638672],["崩",-13.16136646270752],["▁çətin",-13.161375045776367],["ዥ",-13.16137981414795],["▁nhiễm",-13.161381721496582],["▁серпня",-13.161381721496582],["▁یونیورسٹی",-13.161381721496582],["▁গ্রেফতার",-13.161381721496582],["▁እንቅስቃሴ",-13.161381721496582],["οιραστείτε",-13.161382675170898],["▁appreciate",-13.161382675170898],["▁универзитет",-13.161382675170898],["▁पसंद",-13.161382675170898],["▁evidence",-13.161383628845217],["▁Међутим",-13.161383628845217],["▁රුපියල්",-13.161385536193848],["▁تکمیل",-13.161386489868164],["▁ระบบ",-13.161386489868164],["ຣາ",-13.161388397216797],["▁լավագույն",-13.161388397216797],["▁shkruan",-13.161389350891112],["▁загально",-13.161392211914062],["▁qeydiyyat",-13.161395072937012],["▁Xarici",-13.16140079498291],["▁ochrany",-13.161407470703123],["▁қалып",-13.161423683166504],["▁Tep",-13.16143798828125],["▁ktorom",-13.161442756652832],["înî",-13.16146755218506],["زید",-13.161471366882324],["▁terwyl",-13.16150951385498],["ultima",-13.161511421203612],["▁പണി",-13.16152000427246],["▁теб",-13.161527633666992],["▁Česká",-13.16153621673584],["▁tamamen",-13.161538124084473],["▁nk",-13.161539077758787],["▁Vala",-13.161541938781738],["▁الجميع",-13.161551475524902],["▁ഒന്ന്",-13.161572456359863],["▁없어",-13.16158390045166],["appar",-13.161584854125977],["▁requisitos",-13.161643028259276],["CON",-13.16164493560791],["powiedz",-13.16165828704834],["ytt",-13.16166877746582],["▁gejala",-13.161677360534668],["形势",-13.161697387695312],["▁למשל",-13.161698341369627],["▁មន្ត្រី",-13.16172695159912],["▁Uber",-13.161755561828612],["윤",-13.161779403686523],["စိုး",-13.161787986755373],["▁tôn",-13.161816596984863],["deur",-13.161835670471191],["▁etmə",-13.161846160888672],["▁studiu",-13.161850929260254],["ುತ್ತವೆ",-13.161861419677734],["▁dolgoz",-13.161888122558594],["▁näky",-13.161908149719238],["mizde",-13.161924362182615],["кей",-13.161937713623049],["▁tartalmaz",-13.161954879760742],["▁stali",-13.16195583343506],["▁вопросов",-13.161957740783691],["▁जस्तो",-13.16197109222412],["டம்",-13.161974906921388],["оля",-13.162013053894045],["ански",-13.162014961242676],["▁ହାତ",-13.162047386169434],["▁tvær",-13.162049293518066],["сьць",-13.162075996398926],["шілер",-13.162105560302734],["的一些",-13.162147521972656],["▁Pizza",-13.162154197692873],["språk",-13.162156105041504],["stöðu",-13.162165641784668],["കൊണ്ട",-13.162174224853516],["gyn",-13.162184715270996],["മീ",-13.162189483642578],["tiyê",-13.162217140197754],["▁sank",-13.16221809387207],["▁cazuri",-13.162224769592283],["▁تصویری",-13.162233352661133],["క్కు",-13.162236213684082],["aldeko",-13.162239074707031],["σιμο",-13.16224479675293],["▁hue",-13.162280082702637],["▁HDZ",-13.162310600280762],["▁SPR",-13.16233730316162],["▁زو",-13.16234016418457],["szko",-13.162354469299316],["руша",-13.162354469299316],["▁Marija",-13.162373542785645],["▁ulama",-13.16239070892334],["ämme",-13.162400245666504],["▁λειτουργ",-13.162436485290527],["競",-13.162452697753906],["▁ponudi",-13.162457466125488],["แมว",-13.162470817565918],["правіць",-13.162477493286133],["▁andar",-13.162477493286133],["▁म्या",-13.162481307983398],["▁kanthi",-13.162487030029297],["▁محرم",-13.16249656677246],["▁പറ്റ",-13.162520408630373],["오피",-13.162528038024902],["▁kogemus",-13.162534713745115],["▁ATM",-13.162549018859863],["snes",-13.162557601928713],["▁Verbindung",-13.162561416625977],["▁സന്തോഷ",-13.162574768066406],["▁инте",-13.162589073181152],["tetään",-13.162592887878418],["▁Krim",-13.162614822387695],["▁diyor",-13.16262149810791],["▁rəhbərliyi",-13.162656784057615],["▁ειδικ",-13.162662506103516],["muni",-13.162664413452148],["იყვან",-13.162673950195312],["考察",-13.162689208984377],["退休",-13.16269588470459],["▁ख़",-13.162701606750488],["٧",-13.162708282470703],["▁Goomegle",-13.162708282470703],["▁बताउनुभयो",-13.162708282470703],["▁സെക്രട്ടറി",-13.162708282470703],["값",-13.162708282470703],["▁kesihatan",-13.162710189819336],["▁sähköposti",-13.162711143493652],["▁žádné",-13.162715911865234],["▁žiadne",-13.16271686553955],["▁stránku",-13.1627197265625],["▁dimanche",-13.162725448608398],["▁रुपए",-13.162726402282717],["▁Photoshop",-13.16273307800293],["▁नंबर",-13.162734031677246],["▁vysoký",-13.162734985351562],["missão",-13.162737846374512],["▁वर्षीय",-13.162750244140623],["▁canta",-13.162753105163574],["▁obal",-13.162755012512209],["▁našu",-13.162761688232422],["▁ለውጥ",-13.16277313232422],["人と",-13.162793159484863],["▁pakka",-13.162801742553713],["▁karam",-13.162819862365724],["▁rôzne",-13.16282558441162],["конкрет",-13.162829399108888],["ڙن",-13.162833213806152],["▁उम्र",-13.162837028503418],["▁برج",-13.162851333618164],["▁Kyllä",-13.162858963012695],["▁بده",-13.162869453430176],["▁intent",-13.162887573242188],["▁شاگرد",-13.162903785705566],["osság",-13.162930488586426],["▁እንዳለ",-13.162935256958008],["▁sylw",-13.162938117980955],["▁Piet",-13.162956237792969],["ហា",-13.16296100616455],["▁troca",-13.162981033325195],["▁farlig",-13.162992477416992],["ជំ",-13.163005828857422],["აციის",-13.163015365600586],["▁टेस्ट",-13.163020133972168],["▁قور",-13.163021087646484],["▁טי",-13.163055419921877],["▁njo",-13.163066864013672],["▁Wissen",-13.163068771362305],["▁meninggal",-13.16306972503662],["kapital",-13.163076400756836],["▁requi",-13.1630859375],["▁udvikle",-13.163092613220217],["▁Funktion",-13.16309642791748],["▁motion",-13.163115501403809],["▁vietą",-13.163124084472656],["▁stemme",-13.163125038146973],["▁praza",-13.163158416748049],["▁Сре",-13.163161277770996],["間に",-13.163167953491213],["▁قبر",-13.163168907165527],["诉",-13.163171768188477],["xwaz",-13.16318416595459],["කරන",-13.163189888000488],["Tha",-13.16321849822998],["▁слуха",-13.163220405578612],["nee",-13.163248062133787],["▁dret",-13.163263320922852],["▁аудар",-13.163296699523926],["▁klicka",-13.163311958312988],["▁kandid",-13.163312911987305],["▁کامیابی",-13.163322448730469],["▁kumu",-13.163374900817873],["holdt",-13.163375854492188],["▁campus",-13.163376808166504],["▁Sven",-13.163394927978516],["生態",-13.163429260253906],["▁познат",-13.163432121276855],["▁факты",-13.163433074951172],["дија",-13.163450241088867],["▁snabba",-13.163454055786133],["▁ჩემ",-13.163475036621094],["rakstu",-13.1635160446167],["▁үч",-13.163540840148926],["▁Μαρ",-13.163564682006836],["▁УН",-13.163575172424316],["以降",-13.16357707977295],["ຶກ",-13.16359519958496],["▁szerepel",-13.163606643676758],["årige",-13.163636207580566],["▁аналіз",-13.163654327392578],["யல்",-13.163681030273438],["▁назива",-13.16368579864502],["ੈਂ",-13.163710594177246],["▁นัก",-13.163711547851562],["▁dostat",-13.1637601852417],["▁sellega",-13.16379165649414],["▁børne",-13.16380214691162],["στό",-13.16380500793457],["ਾਏ",-13.163811683654783],["▁Elke",-13.163850784301758],["▁језика",-13.163870811462402],["ופה",-13.163914680480955],["▁अं",-13.16397190093994],["咬",-13.16398811340332],["▁ponudb",-13.164002418518066],["▁जांच",-13.164013862609863],["▁რუს",-13.164017677307127],["▁можливо",-13.164026260375977],["សម្រេច",-13.16403579711914],["▁Gàidhlig",-13.164036750793455],["▁Każdy",-13.164036750793455],["▁тэмцээн",-13.164036750793455],["▁আলোচনা",-13.164036750793455],["▁મદદ",-13.164036750793455],["▁그렇게",-13.164036750793455],["▁വാഹന",-13.164037704467772],["indirizzo",-13.16403865814209],["خفض",-13.16404151916504],["▁मैदान",-13.164044380187988],["▁असोज",-13.164053916931152],["▁Njegov",-13.164071083068848],["▁마련",-13.164072036743164],["▁đoán",-13.164079666137695],["▁Jude",-13.16417407989502],["nish",-13.1641845703125],["νών",-13.164188385009766],["▁Kamer",-13.16421604156494],["謝謝",-13.164230346679688],["▁นอกจากนี้",-13.164233207702637],["▁ရဲ",-13.164236068725586],["▁vendredi",-13.164255142211914],["ДАН",-13.164257049560549],["▁ადრე",-13.164259910583496],["mish",-13.16427230834961],["▁zestaw",-13.164291381835938],["▁peč",-13.164339065551758],["▁अधिकृत",-13.164350509643556],["▁Ellen",-13.164352416992188],["ስር",-13.16436767578125],["mär",-13.164374351501465],["▁بينما",-13.16437530517578],["hlen",-13.164380073547363],["▁Ryan",-13.164384841918944],["▁takia",-13.164406776428224],["▁භ",-13.164421081542969],["patu",-13.164424896240234],["▁consente",-13.164440155029297],["▁ninh",-13.16444969177246],["走到",-13.164456367492676],["▁ສ່ວນ",-13.164458274841309],["▁saattaa",-13.164461135864258],["▁британ",-13.164462089538574],["крива",-13.164487838745115],["ύμ",-13.164510726928713],["▁Biblio",-13.16451644897461],["чного",-13.164590835571287],["▁भएकाले",-13.164615631103516],["▁хап",-13.164623260498049],["▁uod",-13.164633750915527],["ประจําปี",-13.164642333984377],["take",-13.164677619934082],["▁beveik",-13.16468620300293],["▁emoci",-13.16468620300293],["▁jooga",-13.164694786071776],["āru",-13.164715766906738],["▁ଥିଲେ",-13.164719581604004],["▁gorau",-13.164728164672852],["れている",-13.1647310256958],["▁ընդ",-13.164772987365724],["▁бая",-13.164790153503418],["▁gegeben",-13.164803504943848],["ատես",-13.164825439453123],["▁ändra",-13.16482925415039],["▁SWT",-13.164844512939451],["нина",-13.164847373962402],["къ",-13.164856910705566],["▁galo",-13.16486358642578],["fälle",-13.164897918701172],["▁makt",-13.164897918701172],["работка",-13.164902687072754],["▁fikri",-13.16490650177002],["▁ਬਰ",-13.164908409118652],["▁ಕೇಳ",-13.16491985321045],["seadme",-13.164939880371094],["▁Farma",-13.164958000183104],["ntino",-13.164959907531738],["▁Mã",-13.164960861206056],["▁સર",-13.164997100830078],["ត្រូវបាន",-13.16502857208252],["▁പിന്",-13.165045738220217],["▁bashku",-13.16505241394043],["тава",-13.165053367614746],["ஜ்",-13.165093421936035],["सम्",-13.165138244628906],["现在的",-13.165156364440918],["▁našli",-13.16516399383545],["59)",-13.165176391601562],["▁බුදු",-13.165217399597168],["ांसाठी",-13.165238380432127],["▁simpel",-13.165246963500977],["ნს",-13.165276527404783],["▁הגיע",-13.165284156799316],["▁bonito",-13.16529655456543],["帅",-13.165305137634276],["6000",-13.165306091308594],["ბას",-13.165307998657228],["▁Mwana",-13.165311813354492],["▁materie",-13.165313720703123],["的所有",-13.165334701538086],["stare",-13.1653470993042],["▁yhteen",-13.165361404418944],["ពិសេស",-13.165365219116213],["▁سقوط",-13.165366172790527],["궁",-13.165366172790527],["▁peníze",-13.16536808013916],["▁गुजरात",-13.165377616882324],["▁ಲ",-13.16538143157959],["ພຣະ",-13.16538429260254],["▁подальш",-13.165385246276855],["▁ביז",-13.16539192199707],["▁místě",-13.165392875671388],["hill",-13.165393829345703],["▁keuken",-13.16539478302002],["▁Respublika",-13.165401458740234],["▁atveju",-13.1654052734375],["▁ақша",-13.165431022644045],["альным",-13.16543674468994],["▁jesteśmy",-13.165438652038574],["▁Vind",-13.165483474731444],["HH",-13.165509223937988],["▁Alm",-13.165521621704102],["▁участников",-13.165522575378418],["▁боље",-13.165528297424316],["check",-13.16553020477295],["▁Apart",-13.165535926818848],["▁סוג",-13.165549278259276],["▁devrait",-13.16555881500244],["еге",-13.16556167602539],["▁купить",-13.16556167602539],["сій",-13.165570259094238],["1500",-13.165581703186035],["▁دسترسی",-13.1655855178833],["▁additional",-13.165596961975098],["▁loved",-13.165597915649414],["▁folder",-13.16559886932373],["ങ്",-13.165618896484377],["▁ООН",-13.165619850158691],["▁الفلسطينية",-13.16562271118164],["երեւ",-13.165630340576172],["▁stál",-13.165637969970703],["▁Quý",-13.165650367736816],["▁മുറി",-13.16566562652588],["素材",-13.16566562652588],["▁pirat",-13.165678977966309],["မရှိ",-13.16568088531494],["▁байқа",-13.165693283081056],["▁Horn",-13.165706634521484],["proses",-13.165745735168455],["▁принят",-13.165745735168455],["▁gegnum",-13.165748596191406],["▁krypt",-13.165753364562988],["▁huden",-13.165802001953123],["▁raba",-13.165821075439451],["解放",-13.16582202911377],["कर्मी",-13.165836334228516],["▁تحریر",-13.16588020324707],["▁Devi",-13.165895462036133],["▁leje",-13.16590976715088],["▁turne",-13.165942192077637],["▁Handels",-13.165947914123535],["▁gare",-13.165952682495115],["िः",-13.166020393371582],["▁faham",-13.166021347045898],["haba",-13.166022300720217],["חול",-13.166022300720217],["JAN",-13.16604995727539],["▁Alisema",-13.166080474853516],["▁wedstrijd",-13.16608715057373],["áciou",-13.166088104248049],["▁كثيرا",-13.16609001159668],["▁gogo",-13.166101455688477],["▁природни",-13.166111946105955],["▁allein",-13.16612434387207],["hafa",-13.166125297546388],["▁lucrurile",-13.166139602661133],["ခြား",-13.166144371032717],["ეში",-13.166150093078612],["लाय",-13.166172981262209],["▁tehnik",-13.166226387023926],["රුව",-13.16622829437256],["▁Olet",-13.166251182556152],["kowych",-13.166264533996582],["▁असार",-13.166264533996582],["రోజు",-13.166267395019531],["▁لیست",-13.16629123687744],["▁청소년",-13.166295051574709],["▁Penge",-13.166306495666504],["ØR",-13.166308403015137],["続け",-13.166308403015137],["▁layak",-13.166318893432615],["▁běž",-13.16633415222168],["▁bày",-13.166372299194336],["▁پې",-13.166375160217283],["▁haga",-13.16637897491455],["위를",-13.16641330718994],["viä",-13.166417121887209],["▁случая",-13.166427612304688],["leiding",-13.16643524169922],["เรีย",-13.16643524169922],["miga",-13.166437149047852],["▁162",-13.166443824768066],["▁रस",-13.166446685791016],["▁luta",-13.166455268859863],["teitä",-13.166463851928713],["▁158",-13.16646957397461],["▁Libro",-13.166474342346191],["விட்டு",-13.16648006439209],["▁недела",-13.166496276855469],["ಜನ",-13.166501998901367],["▁skola",-13.16650676727295],["ồi",-13.16652011871338],["තන",-13.16652774810791],["pose",-13.166531562805176],["究",-13.166539192199709],["▁klara",-13.166542053222656],["taro",-13.16655158996582],["▁duro",-13.166556358337402],["▁വിഷയ",-13.166590690612791],["ène",-13.166629791259766],["ặ",-13.166638374328612],["▁drugega",-13.166643142700195],["ጠና",-13.166668891906738],["▁Հայաստանը",-13.166668891906738],["▁bát",-13.166671752929688],["▁إضافة",-13.166675567626951],["▁التاريخ",-13.16667652130127],["อีกด้วย",-13.166681289672852],["▁chvíli",-13.16669750213623],["▁encuentro",-13.16669750213623],["▁кожного",-13.16669750213623],["▁Բոլոր",-13.16669750213623],["▁ಅಭಿವೃದ್ಧಿ",-13.16669750213623],["▁täytyy",-13.166702270507812],["▁ପୁଅ",-13.166703224182127],["▁tecnica",-13.166712760925291],["▁అమ్మాయి",-13.16671371459961],["▁ypač",-13.166717529296877],["▁іншыя",-13.166722297668455],["▁dažnai",-13.166726112365724],["▁гэмт",-13.166726112365724],["▁निधन",-13.166728019714355],["ນໍ້າ",-13.166728973388672],["▁viesnīcas",-13.166733741760254],["▁корпорац",-13.166740417480469],["▁egz",-13.166742324829102],["▁luksus",-13.166769981384276],["▁crescita",-13.16678237915039],["▁igazán",-13.166784286499023],["▁Salt",-13.166827201843262],["▁رکھ",-13.166827201843262],["raidh",-13.166830062866213],["▁Tramp",-13.166839599609377],["▁μπορείτε",-13.166845321655272],["▁permettre",-13.16685390472412],["раду",-13.166875839233398],["ਾਲ",-13.166885375976562],["średni",-13.166895866394045],["▁Item",-13.166898727416992],["▁yola",-13.166926383972168],["▁neljä",-13.166934967041016],["▁Loch",-13.166939735412598],["▁ianao",-13.166973114013672],["▁fight",-13.166997909545898],["жек",-13.167034149169922],["▁Iako",-13.167064666748049],["▁хамгаалах",-13.167067527770996],["▁Vương",-13.167078018188477],["kaza",-13.167080879211426],["แห",-13.167091369628906],["โรง",-13.167096138000488],["dham",-13.167102813720703],["▁клин",-13.167112350463867],["USE",-13.167126655578612],["מורה",-13.167126655578612],["▁těch",-13.167129516601562],["mbaj",-13.167142868041992],["ສາຍ",-13.167144775390623],["▁ເຂດ",-13.16716766357422],["styret",-13.167171478271484],["住了",-13.167194366455078],["heer",-13.167216300964355],["▁خاصی",-13.167226791381836],["ωσης",-13.167231559753418],["ikud",-13.167243003845217],["vais",-13.16724681854248],["നിന്നു",-13.167256355285645],["цар",-13.167272567749023],["تحويل",-13.167313575744627],["Col",-13.167320251464844],["ably",-13.16733455657959],["▁Tito",-13.167339324951172],["▁чужд",-13.167370796203612],["▁Darba",-13.167375564575195],["決め",-13.16738224029541],["▁сонс",-13.167407035827637],["▁olie",-13.167412757873535],["▁tarina",-13.1674165725708],["▁λεπτά",-13.167430877685549],["▁связан",-13.167455673217772],["kosi",-13.16746711730957],["ІВ",-13.167512893676758],["▁kehitys",-13.16754150390625],["▁faa",-13.167555809020996],["▁juaj",-13.167562484741213],["▁Schw",-13.16758632659912],["спри",-13.167609214782717],["høj",-13.167620658874512],["▁Respublikos",-13.167620658874512],["▁lõi",-13.16763687133789],["opter",-13.167644500732422],["ncin",-13.167656898498535],["▁тел",-13.167667388916016],["▁Besucher",-13.167692184448242],["ურის",-13.167723655700684],["▁мове",-13.16774559020996],["▁vairs",-13.167774200439451],["työn",-13.16778564453125],["▁lacag",-13.167797088623049],["▁මාව",-13.167829513549805],["strøm",-13.167834281921388],["▁vedo",-13.167837142944336],["▁takoj",-13.167854309082031],["direkt",-13.167898178100586],["▁versio",-13.167922973632812],["▁18+",-13.16793441772461],["沖",-13.167954444885254],["▁curent",-13.167957305908203],["▁Antal",-13.167963981628418],["統計",-13.167980194091797],["優秀",-13.168011665344238],["acağız",-13.168025016784668],["▁bilgisayar",-13.168030738830566],["▁będę",-13.168030738830566],["▁žmogaus",-13.168030738830566],["▁байгууллагын",-13.168031692504885],["▁మహేష్",-13.168031692504885],["▁ಕ್ಲಿಕ್",-13.168031692504885],["▁password",-13.168039321899414],["▁Warum",-13.168045997619627],["▁повинен",-13.168048858642578],["▁барысында",-13.168050765991213],["▁μήνα",-13.168063163757324],["រៀបចំ",-13.16806411743164],["▁இசை",-13.16806411743164],["▁Nasıl",-13.168070793151855],["▁повећа",-13.168073654174805],["▁추진",-13.168086051940918],["▁sociis",-13.16809368133545],["▁dema",-13.168098449707031],["▁Vsak",-13.16810131072998],["▁Også",-13.168110847473145],["สีดํา",-13.16811752319336],["▁belső",-13.168126106262209],["▁държави",-13.168150901794434],["védelem",-13.168153762817385],["กรม",-13.168185234069824],["▁يد",-13.168185234069824],["▁වර්ග",-13.168185234069824],["▁етеді",-13.168187141418455],["▁Pakatan",-13.168192863464355],["க்கோ",-13.168204307556152],["▁trudno",-13.168208122253418],["▁اندازی",-13.168211936950684],["▁09:00",-13.16821575164795],["▁оти",-13.168231010437012],["一般的",-13.168232917785645],["▁totalt",-13.168235778808594],["▁собра",-13.168255805969238],["名稱",-13.168256759643556],["▁ulica",-13.16827392578125],["▁normaal",-13.168277740478516],["▁KOR",-13.16828441619873],["ତ୍ତ",-13.168285369873049],["▁Quang",-13.168296813964844],["▁vc",-13.168296813964844],["▁longue",-13.168313026428224],["teiro",-13.168354034423828],["▁leath",-13.16836166381836],["▁частина",-13.16837215423584],["брал",-13.168405532836914],["▁palavras",-13.168405532836914],["▁accent",-13.168421745300291],["аран",-13.168438911437988],["▁trus",-13.16849136352539],["▁finibus",-13.16849422454834],["▁kelgan",-13.168496131896973],["▁ആക",-13.168502807617188],["klen",-13.168508529663086],["▁holda",-13.1685209274292],["到的",-13.168522834777832],["▁suferi",-13.16854763031006],["iako",-13.168557167053224],["▁controlar",-13.16856575012207],["一定的",-13.168584823608398],["黄金",-13.168588638305664],["▁oferte",-13.16860294342041],["▁laiką",-13.168603897094728],["▁masturb",-13.168611526489258],["Shi",-13.168612480163574],["▁Джо",-13.168644905090332],["паў",-13.168657302856444],["▁horrek",-13.16866397857666],["▁núi",-13.16869831085205],["Met",-13.168709754943848],["▁конф",-13.16872215270996],["▁brain",-13.168734550476074],["▁وبين",-13.168743133544922],["▁قیام",-13.168752670288086],["全て",-13.16880226135254],["▁stok",-13.1688232421875],["inek",-13.168838500976562],["▁konven",-13.168865203857422],["▁rodin",-13.168869018554688],["▁놀",-13.16889476776123],["ற்றி",-13.168903350830078],["godišnji",-13.168927192687988],["▁levert",-13.168951988220217],["تۇرۇش",-13.16895866394043],["ियाँ",-13.16896152496338],["▁សម",-13.168968200683594],["לימוד",-13.16897201538086],["▁alveg",-13.16900634765625],["ЕНИ",-13.169011116027832],["ਦੇਵ",-13.169013023376465],["▁Drugi",-13.169034004211426],["▁ಅರ",-13.16903591156006],["Ji",-13.169090270996094],["ېدو",-13.16909122467041],["szę",-13.169135093688965],["▁dedicato",-13.16915225982666],["▁Archi",-13.169157028198242],["▁157",-13.16916275024414],["▁согласи",-13.169211387634276],["іліп",-13.16923999786377],["minum",-13.169249534606934],["▁kí",-13.169254302978516],["▁tersi",-13.169267654418944],["▁verilməsi",-13.169299125671388],["▁желе",-13.169309616088867],["tirib",-13.169319152832031],["パソコン",-13.169344902038574],["агы",-13.169357299804688],["▁குடும்ப",-13.169363021850586],["ਯੋਗ",-13.169365882873535],["▁pasaulyje",-13.169365882873535],["▁फैसला",-13.169365882873535],["▁ಲೇಖನ",-13.169365882873535],["▁тобто",-13.169366836547852],["▁ಧರ್ಮ",-13.169366836547852],["▁aslında",-13.1693696975708],["гү",-13.169374465942385],["ໄທ",-13.169376373291016],["▁پرونده",-13.169390678405762],["▁gəlib",-13.169404983520508],["▁studija",-13.169411659240724],["▁risposta",-13.169413566589355],["▁ifadə",-13.169418334960938],["▁خرچ",-13.169434547424316],["▁Când",-13.169442176818848],["lianza",-13.16944694519043],["▁эсепте",-13.16944694519043],["▁folos",-13.169447898864746],["实验",-13.169447898864746],["▁Hidup",-13.16945457458496],["MOR",-13.16946029663086],["нство",-13.16946506500244],["▁պայմաններ",-13.169468879699709],["▁enw",-13.16947078704834],["leka",-13.169480323791504],["外部",-13.169480323791504],["お気軽に",-13.169507026672363],["▁твој",-13.169513702392578],["àries",-13.169530868530272],["▁підвищення",-13.169537544250488],["▁expon",-13.169538497924805],["▁książki",-13.169540405273438],["ຫາຍ",-13.169546127319336],["▁5,5",-13.169556617736816],["agd",-13.169557571411133],["ტების",-13.16955852508545],["▁കൊച്ചി",-13.16956615447998],["▁Ever",-13.169567108154297],["לם",-13.169580459594728],["▁cilvēka",-13.169585227966309],["יזם",-13.169597625732422],["ведува",-13.16960334777832],["▁આગળ",-13.169612884521484],["▁anna",-13.169615745544434],["▁Kdo",-13.169625282287598],["▁هتي",-13.16962718963623],["▁шир",-13.169631004333496],["▁1⁄2",-13.169633865356444],["nimada",-13.169636726379396],["▁Where",-13.169638633728027],["▁zorgt",-13.169638633728027],["▁cima",-13.169677734375],["päť",-13.169683456420898],["▁αυτούς",-13.16969871520996],["primer",-13.169733047485352],["▁изви",-13.1697359085083],["▁مغز",-13.169739723205566],["▁spontan",-13.169744491577148],["▁نداشته",-13.169763565063477],["▁trevlig",-13.16977310180664],["Tan",-13.169782638549805],["▁shan",-13.169811248779297],["▁இடம்",-13.169814109802246],["নায়",-13.169816970825195],["▁дроб",-13.16982078552246],["▁போன",-13.169827461242676],["мба",-13.169841766357422],["Plan",-13.169854164123535],["▁pita",-13.169856071472168],["husika",-13.169862747192385],["קאַ",-13.169867515563965],["▁recently",-13.169869422912598],["fart",-13.169870376586914],["▁енергия",-13.169878959655762],["切り",-13.169880867004396],["▁experts",-13.169883728027344],["▁எண்ண",-13.169886589050291],["▁(=",-13.169907569885254],["RW",-13.169914245605469],["amies",-13.169940948486328],["▁ହଜାର",-13.169955253601074],["ピー",-13.169960021972656],["▁urbano",-13.169962882995604],["ქცე",-13.169988632202148],["xio",-13.169999122619627],["grense",-13.170001983642578],["تبع",-13.170010566711426],["儲",-13.17001247406006],["linger",-13.17003345489502],["▁Diploma",-13.170037269592283],["GL",-13.1700439453125],["១១",-13.17004680633545],["▁buong",-13.170069694519045],["išką",-13.170071601867676],["ίστηκε",-13.170093536376951],["мати",-13.170096397399902],["Ś",-13.17009735107422],["▁دعو",-13.17011547088623],["▁қабылдау",-13.170144081115724],["▁východ",-13.170148849487305],["айыр",-13.170196533203123],["fere",-13.170198440551758],["▁Dah",-13.170202255249023],["▁конкурса",-13.1702241897583],["▁느",-13.170231819152832],["▁olgan",-13.170286178588867],["tzailea",-13.17029857635498],["▁სარ",-13.170317649841309],["हन",-13.170321464538574],["影像",-13.17034149169922],["boll",-13.170394897460938],["▁Ža",-13.170402526855469],["▁seviye",-13.170458793640137],["lét",-13.170472145080566],["▁Siis",-13.170476913452148],["rase",-13.170501708984377],["▁universo",-13.170514106750488],["ದಾರ",-13.170531272888184],["▁produkti",-13.170544624328612],["▁кетти",-13.170584678649902],["▁proche",-13.170594215393066],["▁trovato",-13.170598983764648],["▁kasutaja",-13.170634269714355],["▁Muse",-13.170646667480469],["▁prosper",-13.170653343200684],["▁csökkent",-13.170660972595217],["▁چور",-13.170662879943848],["▁Sandra",-13.170672416687012],["▁밀",-13.170676231384276],["坪",-13.170676231384276],["▁ishla",-13.170681953430176],["──",-13.170689582824709],["▁Каз",-13.170697212219238],["เจริญ",-13.170702934265137],["▁Warszawie",-13.170702934265137],["▁гораздо",-13.170702934265137],["▁ਅਸੀਂ",-13.170702934265137],["▁ശരീര",-13.170702934265137],["▁tècnic",-13.170703887939451],["▁தனது",-13.170703887939451],["▁Struktur",-13.17070484161377],["▁[+]",-13.17070484161377],["▁döyüş",-13.17070484161377],["▁сюжет",-13.170706748962402],["▁Câmara",-13.170708656311035],["▁Från",-13.170710563659668],["▁valmistu",-13.170716285705566],["ពិធី",-13.170722007751465],["▁עושה",-13.170732498168944],["▁ඊයේ",-13.170732498168944],["oze",-13.170734405517578],["▁bulshada",-13.170735359191896],["▁balo",-13.170737266540527],["▁σημείο",-13.170737266540527],["▁Prozess",-13.170745849609377],["▁رسائی",-13.170753479003906],["đenje",-13.170761108398438],["▁ତାଙ୍କୁ",-13.17076587677002],["▁Conserva",-13.170782089233398],["sztó",-13.170795440673828],["▁Јас",-13.170795440673828],["▁oldin",-13.170801162719728],["padne",-13.170806884765623],["▁Білім",-13.170806884765623],["▁dostaw",-13.170808792114258],["kirî",-13.17082405090332],["▁integrat",-13.170866012573242],["znaj",-13.170868873596191],["▁eficient",-13.170872688293455],["▁לאתר",-13.17088222503662],["نە",-13.170894622802734],["▁låne",-13.170896530151367],["тельного",-13.170942306518556],["isana",-13.1709566116333],["MES",-13.170965194702148],["▁Solu",-13.170978546142578],["▁Praze",-13.170980453491213],["▁$2",-13.170988082885742],["数学",-13.171016693115234],["▁Oggi",-13.171031951904297],["▁biste",-13.171045303344728],["伸",-13.171058654785156],["▁ποι",-13.171059608459473],["ေမး",-13.171070098876951],["▁mengaku",-13.171070098876951],["єкт",-13.171072959899902],["▁einzu",-13.171097755432127],["050",-13.171103477478027],["قدس",-13.17110538482666],["ecký",-13.171106338500977],["▁Jav",-13.171110153198242],["▁Math",-13.17112159729004],["看见",-13.171149253845217],["▁Osim",-13.171150207519531],["luas",-13.17115592956543],["▁artikkel",-13.171161651611328],["▁Није",-13.171245574951172],["▁ծրագրի",-13.17125415802002],["眼前",-13.171256065368652],["лини",-13.171276092529297],["▁народження",-13.17130184173584],["▁плана",-13.17131805419922],["▁түс",-13.171331405639648],["▁begira",-13.171339988708496],["▁kolekci",-13.17136001586914],["▁اصلي",-13.171374320983888],["▁tampa",-13.171380043029783],["ทิ้ง",-13.171394348144531],["▁Andersen",-13.171416282653809],["▁Gross",-13.171419143676758],["▁өнгө",-13.171422958374023],["จะทําให้",-13.17143726348877],["تغير",-13.171460151672363],["▁MAC",-13.171467781066896],["▁stanno",-13.171483993530272],["ICH",-13.171488761901855],["▁pry",-13.17149543762207],["mediği",-13.17149829864502],["قوى",-13.171509742736816],["ДО",-13.171510696411133],["riana",-13.171534538269045],["ТОР",-13.171541213989258],["тельная",-13.171576499938965],["後の",-13.171619415283203],["▁SOL",-13.171621322631836],["▁cuvinte",-13.171624183654783],["▁ვინც",-13.171632766723633],["▁pater",-13.17164421081543],["▁svých",-13.171676635742188],["عج",-13.17168140411377],["ھل",-13.171697616577148],["▁മാന",-13.17170524597168],["▁tişt",-13.171749114990234],["▁πρω",-13.171753883361816],["τούν",-13.17176342010498],["▁திற",-13.171774864196776],["ငယ္",-13.171783447265623],["▁nafar",-13.17178726196289],["▁hout",-13.171804428100586],["▁ћемо",-13.171812057495115],["завис",-13.171833038330078],["ພາ",-13.171849250793455],["▁prispe",-13.171853065490724],["ေကာင္",-13.171856880187988],["▁istu",-13.171856880187988],["▁څوک",-13.17187213897705],["giz",-13.171890258789062],["▁məsələlər",-13.171897888183594],["٢",-13.171907424926758],["▁puncte",-13.171954154968262],["ATO",-13.17195987701416],["▁sice",-13.17196273803711],["йшли",-13.171963691711426],["പോലെ",-13.17196559906006],["verte",-13.171974182128906],["覆盖",-13.171977043151855],["▁destination",-13.171979904174805],["▁vises",-13.17198371887207],["▁સૂ",-13.17198371887207],["gp",-13.171990394592283],["سمع",-13.171998977661133],["▁ferro",-13.172006607055664],["▁potencia",-13.172019004821776],["வெ",-13.172024726867676],["ΐ",-13.172040939331056],["▁Maendeleo",-13.172041893005373],["▁стоимости",-13.172041893005373],["▁տարեկան",-13.172041893005373],["▁이메일",-13.172041893005373],["казала",-13.172042846679688],["▁հատուկ",-13.17204475402832],["▁வகை",-13.172045707702637],["lämpö",-13.17204761505127],["▁laquelle",-13.172051429748535],["▁لوی",-13.172051429748535],["/07",-13.1720552444458],["▁ücretsiz",-13.172061920166016],["▁تحديد",-13.172064781188965],["▁אמ",-13.172067642211914],["▁örök",-13.17207145690918],["▁hücum",-13.172073364257812],["▁Prae",-13.17208194732666],["▁dəyər",-13.172090530395508],["▁δανε",-13.17209243774414],["▁ናቸው።",-13.172094345092772],["▁algúns",-13.172099113464355],["▁แม่",-13.17210578918457],["▁दलित",-13.172107696533203],["▁ديگر",-13.17210865020752],["തിനെ",-13.172114372253418],["▁скандал",-13.172117233276367],["▁följande",-13.172124862670898],["▁Nicola",-13.172127723693848],["ለህ",-13.172147750854492],["▁privé",-13.172179222106934],["▁interneto",-13.172201156616213],["museum",-13.172205924987791],["▁فلسطين",-13.172229766845703],["ειο",-13.172235488891602],["▁igazi",-13.17223834991455],["слід",-13.172248840332031],["▁Koz",-13.172264099121094],["▁최고의",-13.172270774841309],["нау",-13.17227840423584],["туры",-13.172280311584473],["▁widzi",-13.172283172607422],["▁مارچ",-13.17228889465332],["▁Глас",-13.172292709350586],["دفع",-13.17230224609375],["▁Grab",-13.172310829162598],["▁huske",-13.172310829162598],["▁dhá",-13.172327041625977],["▁студентів",-13.172341346740724],["▁अति",-13.172362327575684],["७५",-13.172412872314451],["▁piz",-13.17241668701172],["▁ऐक",-13.17243766784668],["▁bole",-13.172443389892578],["Ant",-13.172456741333008],["▁ਪੁ",-13.172468185424805],["لەرگە",-13.17246913909912],["PJ",-13.172483444213867],["▁barat",-13.172483444213867],["▁světa",-13.172508239746094],["மீ",-13.172526359558104],["▁Курс",-13.172536849975586],["ksin",-13.17253875732422],["שימה",-13.17255401611328],["▁ฮ",-13.172563552856444],["▁interesgarri",-13.17259120941162],["ଣ୍ଡା",-13.172595024108888],["зія",-13.172601699829102],["писати",-13.172636985778809],["▁گران",-13.172640800476074],["▁तय",-13.172640800476074],["▁затова",-13.172645568847656],["чени",-13.172662734985352],["▁Гру",-13.172670364379885],["यता",-13.172673225402832],["▁ٻار",-13.172675132751465],["ይት",-13.172710418701172],["▁případ",-13.172724723815918],["▁joo",-13.17275047302246],["▁ברי",-13.17276382446289],["vásár",-13.172794342041016],["jian",-13.172796249389648],["Shop",-13.172801971435549],["▁))",-13.172809600830078],["winkel",-13.17282772064209],["ზია",-13.172844886779783],["דיו",-13.172846794128418],["▁жағдайлар",-13.172861099243164],["retter",-13.172863960266112],["РЕД",-13.17289924621582],["योजन",-13.172903060913086],["▁cair",-13.17291259765625],["▁analyser",-13.172922134399414],["▁ಚಿತ್ರದ",-13.172945022583008],["čanje",-13.17294692993164],["1994",-13.172966957092283],["όρα",-13.172972679138184],["▁interact",-13.172974586486816],["▁verbal",-13.172978401184082],["yım",-13.172983169555664],["▁ставки",-13.17298984527588],["▁деді",-13.17301082611084],["▁ПРЕ",-13.173014640808104],["▁ගි",-13.173030853271484],["ĝi",-13.173067092895508],["trik",-13.17310619354248],["▁თემა",-13.173112869262695],["홍",-13.173117637634276],["▁ಮಹ",-13.173124313354492],["mom",-13.173141479492188],["jie",-13.1731538772583],["panga",-13.173171043395996],["▁řád",-13.173185348510742],["▁güvenlik",-13.17319393157959],["ໄລ",-13.173194885253906],["άκ",-13.173225402832031],["ლას",-13.173242568969728],["▁آقا",-13.173251152038574],["OTE",-13.173255920410156],["▁welat",-13.173261642456056],["යකින්",-13.173272132873535],["▁degusta",-13.173274040222168],["▁MIT",-13.17328643798828],["日下午",-13.173288345336914],["有一些",-13.173311233520508],["借り",-13.173317909240724],["▁weld",-13.173324584960938],["လည္",-13.173330307006836],["煤",-13.173333168029783],["习近平",-13.173344612121582],["撒",-13.173346519470217],["▁వెళ్ల",-13.17337417602539],["▁berfungsi",-13.173381805419922],["▁qualcuno",-13.173381805419922],["▁Σεπτεμβρίου",-13.173381805419922],["▁₪",-13.173383712768556],["▁જન્મ",-13.173385620117188],["▁fortsätta",-13.173391342163086],["▁сообщил",-13.173392295837402],["▁erityisesti",-13.17339324951172],["▁perioden",-13.173395156860352],["▁двигател",-13.173398971557615],["ുന്നില്ല",-13.173402786254885],["▁ছাত্র",-13.173402786254885],["▁התובע",-13.173406600952148],["▁Микола",-13.17340850830078],["flan",-13.17341136932373],["grupper",-13.17341136932373],["▁출시",-13.17341136932373],["▁vegades",-13.173417091369627],["飲み",-13.173418998718262],["▁кылуу",-13.173421859741213],["▁دینی",-13.173432350158691],["▁koristiti",-13.173439979553224],["▁latihan",-13.173449516296388],["▁आमा",-13.1734619140625],["▁хэмжээний",-13.173465728759766],["▁ଶା",-13.173468589782717],["GF",-13.173470497131348],["▁zondag",-13.173479080200195],["▁îți",-13.173481941223145],["еров",-13.17349338531494],["▁regim",-13.173500061035156],["▁anatin",-13.173503875732422],["ällä",-13.1735200881958],["▁roba",-13.173531532287598],["▁logis",-13.17353343963623],["ဆံ",-13.173550605773926],["▁ගත්තා",-13.17355728149414],["▁eliminar",-13.173566818237305],["▁koster",-13.173566818237305],["▁Economic",-13.173588752746582],["បន្ត",-13.173591613769531],["treff",-13.173602104187012],["▁კან",-13.17360782623291],["▁nogal",-13.173608779907228],["iyaha",-13.173630714416504],["ÃO",-13.173653602600098],["▁Malo",-13.173657417297363],["▁መሪ",-13.173661231994627],["▁ξ",-13.173677444458008],["針",-13.173678398132324],["HN",-13.173704147338867],["विध",-13.17371940612793],["▁разные",-13.173726081848145],["▁Stalin",-13.173772811889648],["▁soldi",-13.173785209655762],["▁méret",-13.173799514770508],["▁Олег",-13.173813819885254],["的名字",-13.173840522766112],["▁Etiketler",-13.173858642578123],["Как",-13.173940658569336],["习",-13.173995018005373],["৭",-13.173999786376951],["▁kommunikation",-13.174002647399902],["АЖ",-13.174015998840332],["▁তাই",-13.174032211303713],["▁trik",-13.174059867858888],["염",-13.174065589904783],["▁फु",-13.174073219299316],["علی",-13.174074172973633],["ολό",-13.174077033996582],["▁basın",-13.174077033996582],["welt",-13.174083709716797],["ელო",-13.174083709716797],["▁ነገ",-13.174108505249023],["的安全",-13.174108505249023],["▁međunarodn",-13.174112319946287],["▁klikke",-13.174114227294922],["▁dosť",-13.174164772033691],["▁Ах",-13.17418384552002],["loze",-13.174184799194336],["▁ऊपर",-13.174205780029297],["▁сард",-13.174222946166992],["▁частка",-13.174222946166992],["បុក",-13.174233436584473],["▁meklē",-13.174235343933104],["брани",-13.174261093139648],["әу",-13.174264907836914],["▁दिएका",-13.17426586151123],["▁Fryske",-13.17426872253418],["▁ಭಾರತದ",-13.174270629882812],["runk",-13.17429542541504],["אמא",-13.174299240112305],["▁lyh",-13.174301147460938],["ληψη",-13.174304962158203],["анні",-13.17433261871338],["▁bero",-13.174341201782228],["girê",-13.174345970153809],["類型",-13.174388885498049],["NDE",-13.174395561218262],["▁condiciona",-13.174398422241213],["ŵr",-13.17441463470459],["bildung",-13.174418449401855],["一是",-13.174447059631348],["▁schoon",-13.17446517944336],["▁durée",-13.174467086791992],["kaardi",-13.174508094787598],["▁создан",-13.17451000213623],["▁Dünyanı",-13.174510955810549],["▁četvrt",-13.17451286315918],["▁процеси",-13.174522399902344],["raamat",-13.174525260925291],["▁ដឹង",-13.174542427062988],["▁болу",-13.174543380737305],["prob",-13.174549102783203],["▁aracı",-13.174549102783203],["ದಂತೆ",-13.17455005645752],["лку",-13.17455768585205],["▁palīdz",-13.17456340789795],["▁productes",-13.174568176269531],["▁अमर",-13.174568176269531],["oppi",-13.17457389831543],["▁paraqit",-13.174609184265137],["▁వాళ్ళ",-13.174612998962402],["▁увидел",-13.17461395263672],["▁förra",-13.174616813659668],["ເຫ",-13.174617767333984],["diye",-13.174627304077148],["▁teña",-13.17463493347168],["▁Pilot",-13.174652099609377],["浓",-13.174662590026855],["grand",-13.174665451049805],["წერა",-13.174697875976562],["拒绝",-13.17471981048584],["▁પસંદ",-13.174720764160156],["▁ծառայության",-13.174724578857422],["밥",-13.174724578857422],["▁pàgina",-13.174729347229004],["▁importância",-13.174732208251951],["▁geste",-13.174734115600586],["▁izmantojot",-13.174736976623535],["tuotteet",-13.174737930297852],["еси",-13.174745559692385],["▁करिब",-13.174747467041016],["▁ಮೈ",-13.174753189086914],["▁3.000",-13.174763679504396],["▁Makan",-13.174764633178713],["▁турмуш",-13.174765586853027],["ehto",-13.17476749420166],["▁birkaç",-13.174806594848633],["▁نهایت",-13.174824714660645],["▁могуће",-13.174829483032228],["กําจัด",-13.174845695495604],["▁لینے",-13.174847602844238],["▁камера",-13.17485523223877],["▁vəziyyət",-13.174859046936035],["我知道",-13.174882888793944],["▁individuals",-13.174890518188477],["▁иск",-13.174921989440918],["▁dodatno",-13.17495822906494],["▁Овај",-13.174959182739258],["▁presidenti",-13.174962043762209],["вэр",-13.174971580505373],["▁пресс",-13.174978256225586],["▁бою",-13.175030708312988],["ovic",-13.175033569335938],["בנות",-13.175048828125],["nggal",-13.175057411193848],["活躍",-13.175061225891112],["▁adamı",-13.17506504058838],["是指",-13.175084114074709],["▁vrsta",-13.175092697143556],["▁Komisija",-13.175093650817873],["ფე",-13.175143241882324],["▁नप",-13.175151824951172],["▁állít",-13.175152778625488],["▁capacitat",-13.17515468597412],["ottaa",-13.175161361694336],["ေျဖ",-13.175179481506348],["▁болчу",-13.17518138885498],["▁Нийслэлий",-13.17519474029541],["védő",-13.175213813781738],["▁tsara",-13.175220489501951],["▁Berna",-13.17523193359375],["▁segona",-13.17526912689209],["УЛ",-13.175275802612305],["▁godini",-13.175281524658203],["▁ກໍາ",-13.175301551818848],["▁lieta",-13.175312042236328],["▁temper",-13.175312042236328],["pří",-13.175322532653809],["υρο",-13.175326347351074],["▁Area",-13.175326347351074],["ম্ব",-13.175329208374023],["▁ťa",-13.17534065246582],["యర్",-13.175369262695312],["▁sonuç",-13.175374031066896],["▁dodal",-13.17539119720459],["持有",-13.175408363342283],["▁الفا",-13.17543888092041],["▁کامیاب",-13.175457000732422],["ிற்கு",-13.17546558380127],["▁postavi",-13.175474166870115],["шие",-13.175508499145508],["▁varsin",-13.175529479980469],["的中",-13.175538063049316],["▁brune",-13.175544738769531],["▁Орган",-13.175559043884276],["▁görme",-13.175564765930176],["ларга",-13.175570487976074],["វ៉ា",-13.17559814453125],["職場",-13.17560577392578],["個性",-13.175633430480955],["▁impedi",-13.175637245178224],["ாது",-13.17564296722412],["▁למרות",-13.175657272338867],["▁карап",-13.175662994384766],["▁цены",-13.175671577453612],["▁atopa",-13.175676345825195],["▁tester",-13.175676345825195],["เหล่านี้",-13.175681114196776],["زيد",-13.17568302154541],["lacak",-13.175684928894045],["▁Frem",-13.175703048706056],["▁уулу",-13.175714492797852],["▁Folke",-13.175715446472168],["▁momenti",-13.175719261169434],["▁convertir",-13.175721168518066],["▁Стру",-13.175756454467772],["нскі",-13.175758361816406],["עש",-13.17581844329834],["לוי",-13.175856590270996],["▁borð",-13.17587184906006],["體育",-13.175891876220703],["нај",-13.175901412963867],["shka",-13.17592430114746],["▁കൊണ്ടു",-13.175960540771484],["圆",-13.175987243652344],["▁Хай",-13.17599105834961],["▁skj",-13.176010131835938],["凌",-13.176025390625],["bunt",-13.17603874206543],["▁förbättra",-13.176068305969238],["▁ئاپتونوم",-13.176068305969238],["▁внутри",-13.176069259643556],["▁राशिफल",-13.176071166992188],["▁दुवै",-13.17607593536377],["guzi",-13.176076889038086],["▁Chennai",-13.176077842712402],["▁itibaren",-13.17607879638672],["▁हमारी",-13.17607879638672],["▁vertical",-13.176082611083984],["сии",-13.176084518432615],["▁екінші",-13.176085472106934],["▁դրանց",-13.176092147827148],["ಗ್ಗ",-13.17609691619873],["▁محت",-13.17609977722168],["▁Interview",-13.176119804382324],["▁projetos",-13.176121711730955],["▁Inhalt",-13.176127433776855],["▁llogari",-13.176137924194336],["torius",-13.1761474609375],["▁ಇನ್ನು",-13.176156044006348],["prak",-13.176167488098145],["atutako",-13.17616844177246],["▁جشن",-13.176177024841309],["уваме",-13.17618465423584],["▁Punjab",-13.176197052001951],["▁tattoo",-13.176198959350586],["ੰਮ",-13.176230430603027],["luh",-13.176236152648926],["▁2011-",-13.17625617980957],["▁kawaida",-13.176260948181152],["▁Arthur",-13.176262855529783],["▁başkan",-13.176273345947266],["▁trao",-13.176274299621582],["▁વધ",-13.176342010498049],["▁шара",-13.176359176635742],["▁Спас",-13.17636775970459],["öğretim",-13.176393508911133],["▁ڈالر",-13.176403999328612],["zdan",-13.17642879486084],["ndeki",-13.176438331604004],["▁Universidad",-13.17643928527832],["▁deilig",-13.176451683044434],["是我们",-13.176474571228027],["irii",-13.176494598388672],["viku",-13.176498413085938],["▁بريد",-13.176511764526367],["ਵੈ",-13.176527976989746],["bling",-13.176528930664062],["老婆",-13.176528930664062],["▁نگر",-13.176563262939451],["▁говорим",-13.176605224609377],["▁chamada",-13.17661190032959],["▁นาที",-13.176636695861816],["▁ganas",-13.176652908325195],["▁tunggu",-13.176655769348145],["▁університету",-13.176666259765623],["▁콘텐츠",-13.176685333251951],["▁Yên",-13.17668628692627],["▁жұ",-13.176695823669434],["▁выгод",-13.176716804504396],["хал",-13.176719665527344],["▁număr",-13.17672061920166],["▁Мил",-13.17674160003662],["▁ostalo",-13.176755905151367],["▁string",-13.176765441894531],["▁traja",-13.17678928375244],["wienia",-13.176797866821287],["▁380",-13.17681121826172],["全く",-13.1768159866333],["▁arengu",-13.176828384399414],["▁ochii",-13.176843643188477],["ciente",-13.176881790161133],["▁dát",-13.176887512207031],["▁ciencia",-13.17690658569336],["▁NON",-13.17691421508789],["▁Sund",-13.176953315734863],["tema",-13.176957130432127],["▁mbr",-13.176981925964355],["▁fundet",-13.177009582519531],["▁Нэг",-13.177010536193848],["яд",-13.177061080932615],["цкая",-13.177067756652832],["▁keyin",-13.17707347869873],["faidh",-13.177081108093262],["▁వారికి",-13.177083969116213],["▁النساء",-13.177088737487791],["学者",-13.17709255218506],["níky",-13.177103996276855],["▁مدیران",-13.177116394042969],["▁Niin",-13.177127838134766],["▁többi",-13.177132606506348],["écoute",-13.17713737487793],["▁takové",-13.177141189575195],["बाबत",-13.177181243896484],["moi",-13.177186012268066],["▁sabit",-13.177190780639648],["PON",-13.177204132080078],["▁puntu",-13.17724609375],["▁vorher",-13.177248001098633],["جماع",-13.177249908447266],["raient",-13.177252769470217],["▁Ges",-13.177253723144531],["తులు",-13.177255630493164],["ക്കുള്ള",-13.177279472351074],["▁gomb",-13.17728042602539],["▁tulos",-13.177281379699709],["nous",-13.177295684814451],["▁സത്യ",-13.177313804626465],["▁2,3",-13.177346229553224],["▁Suites",-13.177346229553224],["▁141",-13.177348136901855],["▁അംഗ",-13.177359580993652],["റ്റു",-13.177362442016602],["▁atac",-13.177370071411133],["▁atlieka",-13.177374839782717],["lh",-13.17740535736084],["▁tatil",-13.17740535736084],["▁Rodríguez",-13.177414894104004],["▁thuyết",-13.177414894104004],["▁جعفر",-13.177414894104004],["▁کشاورزی",-13.177414894104004],["▁सपना",-13.177414894104004],["▁സമീപ",-13.177414894104004],["▁campanha",-13.177416801452637],["▁nîşan",-13.177417755126951],["▁Атамбаев",-13.177420616149902],["ေပါက္",-13.177422523498535],["▁જણાવ",-13.177422523498535],["ித்த",-13.177427291870115],["მეცნიერ",-13.17742919921875],["▁ambula",-13.177433013916016],["เอเชีย",-13.177434921264648],["▁izvēlēties",-13.17744255065918],["▁سوشل",-13.177443504333496],["▁degree",-13.177451133728027],["▁ciidamada",-13.17745304107666],["▁önnur",-13.177454948425291],["▁Тук",-13.177470207214355],["tieve",-13.17747688293457],["▁presiden",-13.177488327026367],["▁Suo",-13.177515029907228],["▁szobá",-13.177515983581545],["▁مث",-13.17752456665039],["▁درخت",-13.177541732788086],["sūtīt",-13.177550315856934],["ิว",-13.177552223205566],["著作",-13.177556037902832],["ذك",-13.177562713623049],["▁valta",-13.17764663696289],["āl",-13.17768669128418],["▁buss",-13.177688598632812],["bait",-13.177689552307127],["vann",-13.177709579467772],["▁آيا",-13.177719116210938],["▁سهم",-13.17772388458252],["▁Վեր",-13.177745819091797],["owanego",-13.17776107788086],["かれ",-13.17776870727539],["рэч",-13.177775382995604],["▁ilan",-13.177778244018556],["▁डर",-13.177816390991213],["▁ober",-13.177834510803224],["▁dispo",-13.177858352661133],["▁voie",-13.177861213684082],["০",-13.177865982055664],["nggo",-13.177875518798828],["huizen",-13.177881240844728],["▁keele",-13.177889823913574],["▁balas",-13.17789077758789],["▁Klikk",-13.17789363861084],["▁určitě",-13.177907943725586],["ברים",-13.177910804748535],["家中",-13.177922248840332],["گیر",-13.177923202514648],["արտադր",-13.17792797088623],["▁criança",-13.177938461303713],["▁viikon",-13.17794418334961],["▁Summer",-13.17795181274414],["▁især",-13.177953720092772],["▁ಇದ",-13.17801570892334],["▁თავად",-13.178019523620604],["▁teikt",-13.178022384643556],["▁Bilgi",-13.178030014038086],["▁લાગ",-13.178034782409668],["▁Ŝ",-13.178060531616213],["部长",-13.178060531616213],["qlar",-13.178065299987791],["minlash",-13.178068161010742],["▁Preise",-13.178071975708008],["▁arian",-13.178075790405272],["meleri",-13.178083419799805],["▁draw",-13.178092956542969],["СВ",-13.178094863891602],["▁નવા",-13.178144454956056],["TECH",-13.178162574768066],["gener",-13.178166389465332],["▁deniz",-13.178175926208496],["究竟",-13.178187370300291],["▁Совета",-13.178192138671877],["▁proiektu",-13.178210258483888],["▁۱۶",-13.178247451782228],["▁رضي",-13.178263664245604],["ιδ",-13.178269386291504],["▁ondersteun",-13.178278923034668],["▁Był",-13.17828369140625],["จ้าง",-13.178333282470703],["的水",-13.178339004516602],["ળો",-13.178339958190918],["▁arg",-13.17835521697998],["▁sulm",-13.178356170654297],["▁MEM",-13.17836570739746],["▁Бек",-13.178409576416016],["වාද",-13.178418159484863],["குறி",-13.178459167480469],["▁žá",-13.178461074829102],["marked",-13.178504943847656],["կաց",-13.178513526916504],["casa",-13.178524017333984],["खी",-13.178536415100098],["▁канча",-13.178543090820312],["நி",-13.17856216430664],["▁विद्या",-13.17856216430664],["▁berikan",-13.178582191467283],["▁szex",-13.178590774536133],["ແທ",-13.178604125976562],["▁ścian",-13.17863941192627],["▁acel",-13.178666114807127],["muna",-13.178675651550291],["▁Cook",-13.178678512573242],["巴西",-13.178680419921877],["ੜ੍ਹ",-13.17869758605957],["▁ഉണ്ടായ",-13.178705215454102],["ította",-13.178711891174316],["▁cyfle",-13.178715705871582],["▁غوښت",-13.17874526977539],["ก็ไม่",-13.178746223449709],["孫",-13.178747177124023],["sona",-13.178751945495604],["serien",-13.178753852844238],["不多",-13.178753852844238],["แข็ง",-13.178754806518556],["閃",-13.178756713867188],["ຂ້າພະເຈົ້າ",-13.178759574890137],["▁ಸುಮಾರು",-13.178762435913086],["▁penderita",-13.17876434326172],["▁ովքեր",-13.17876434326172],["▁coffee",-13.178765296936035],["▁ਨਵੀਂ",-13.178768157958984],["▁ყოველთვის",-13.178770065307615],["🏻",-13.178772926330566],["▁خاتون",-13.178781509399414],["▁idatzi",-13.178799629211426],["レベル",-13.178802490234377],["ndere",-13.17880916595459],["▁vize",-13.178854942321776],["▁පස්සෙ",-13.17886447906494],["▁influence",-13.178898811340332],["▁සංවිධානය",-13.178915977478027],["teman",-13.178933143615724],["ಚ್ಚಿ",-13.178933143615724],["▁ಪಟ್ಟಿ",-13.178947448730469],["▁Circ",-13.178960800170898],["▁dalawang",-13.178963661193848],["▁nabad",-13.17897129058838],["多家",-13.17898941040039],["▁rendere",-13.178995132446287],["увам",-13.179003715515137],["▁NAS",-13.179004669189451],["▁عالية",-13.179009437561035],["Mat",-13.179032325744627],["باحث",-13.179041862487791],["ÄÄ",-13.179068565368652],["▁нарушения",-13.179096221923828],["מידה",-13.179121971130373],["ारी",-13.179129600524902],["punan",-13.179131507873535],["આઈ",-13.17914581298828],["כתב",-13.179173469543455],["▁основних",-13.179187774658203],["ümüz",-13.179195404052734],["▁satelit",-13.179210662841797],["▁ପଦ",-13.179229736328123],["siä",-13.179232597351074],["දුරු",-13.179244995117188],["▁maska",-13.17924976348877],["▁पुढे",-13.179262161254885],["▁অভিযোগ",-13.179272651672363],["▁üzvləri",-13.17927360534668],["▁literatur",-13.179277420043944],["▁Віт",-13.179282188415527],["▁širok",-13.179286003112791],["ksjon",-13.179312705993652],["▁نج",-13.179346084594728],["hell",-13.17937183380127],["▁કાર્ય",-13.179373741149902],["දීම",-13.179398536682127],["تكون",-13.17940902709961],["▁investimento",-13.179412841796877],["▁шаш",-13.179513931274414],["▁jegy",-13.179516792297363],["Көл",-13.179536819458008],["▁fluid",-13.179536819458008],["▁страшно",-13.17954444885254],["▁maila",-13.179597854614258],["▁yık",-13.179612159729004],["rogen",-13.179615020751951],["▁okus",-13.179615020751951],["储",-13.179615020751951],["पाई",-13.179620742797852],["ичної",-13.179633140563965],["▁Жы",-13.17963981628418],["цями",-13.179668426513672],["▁लड़",-13.179683685302734],["estas",-13.179697036743164],["▁Jord",-13.179707527160645],["▁المل",-13.17970848083496],["दास",-13.179733276367188],["▁الجز",-13.179741859436035],["▁چاهي",-13.1797456741333],["støtte",-13.179774284362791],["▁مشاور",-13.179795265197754],["▁dụ",-13.179798126220703],["Kal",-13.179811477661133],["▁утра",-13.179832458496094],["同一",-13.179862022399902],["▁készített",-13.179879188537598],["▁Дня",-13.17991542816162],["bauen",-13.179923057556152],["cyj",-13.179929733276367],["▁같다",-13.179965019226074],["▁Yak",-13.179988861083984],["يست",-13.18000316619873],["▁ఆశ",-13.180008888244627],["壹",-13.180036544799805],["棄",-13.18005084991455],["▁بهم",-13.180057525634766],["ckar",-13.18006420135498],["的技术",-13.18007755279541],["柯",-13.180089950561523],["▁पर्यटक",-13.180098533630373],["อังกฤษ",-13.180109024047852],["▁Đầu",-13.1801118850708],["▁తీవ్ర",-13.1801118850708],["を持つ",-13.1801118850708],["▁låter",-13.180112838745115],["▁때문이다",-13.180121421813965],["▁தேதி",-13.180134773254396],["▁zač",-13.180139541625977],["वट",-13.180145263671877],["Vir",-13.180147171020508],["ผัก",-13.180155754089355],["▁Charlotte",-13.180164337158203],["ΑΛ",-13.180171966552734],["▁Hình",-13.180201530456545],["▁موج",-13.18020725250244],["▁نئين",-13.18020725250244],["այում",-13.18021297454834],["一只",-13.180216789245604],["▁мак",-13.180256843566896],["▁prekės",-13.180264472961426],["уюць",-13.180274963378906],["▁conflict",-13.180274963378906],["СТО",-13.18029499053955],["महा",-13.180304527282717],["▁Еще",-13.18031883239746],["▁Much",-13.18033504486084],["▁ಇತರ",-13.180335998535156],["กก",-13.180336952209473],["ଗୁ",-13.180340766906738],["рип",-13.180354118347168],["▁mừng",-13.180355072021484],["ここで",-13.180371284484863],["▁Michal",-13.180397987365724],["▁lide",-13.180415153503418],["▁ಸರಕಾರ",-13.180419921875],["▁lemn",-13.180447578430176],["رسال",-13.180453300476074],["handle",-13.180468559265137],["баатар",-13.180469512939451],["▁Już",-13.1804780960083],["▁QR",-13.180479049682615],["▁Photos",-13.180503845214844],["▁वर्षको",-13.18052101135254],["wyk",-13.18052577972412],["iseer",-13.180561065673828],["▁somit",-13.18057632446289],["▁chwili",-13.180580139160156],["▁fantast",-13.18058967590332],["பால",-13.180591583251951],["▁گئی۔",-13.180593490600586],["▁ٿر",-13.180604934692385],["▁Linn",-13.180641174316406],["▁nét",-13.180645942687988],["▁itp",-13.180652618408203],["▁бойы",-13.180655479431152],["ਹੇ",-13.180658340454102],["cand",-13.180663108825684],["▁заједно",-13.180688858032228],["▁nevis",-13.18069076538086],["dør",-13.180693626403809],["▁كىشى",-13.180696487426758],["dosh",-13.180703163146973],["▁панел",-13.180706024169922],["ренд",-13.180710792541504],["▁болница",-13.180733680725098],["▁raggiungere",-13.180757522583008],["èl",-13.18075942993164],["▁jednoducho",-13.180760383605955],["gegangen",-13.180814743041992],["nasta",-13.18082332611084],["▁rút",-13.180831909179688],["sidor",-13.180835723876951],["ワー",-13.180848121643066],["сред",-13.180855751037598],["▁devido",-13.180867195129396],["▁indar",-13.180910110473633],["であった",-13.180919647216797],["▁biler",-13.180932998657228],["▁réir",-13.18093967437744],["▁ಪ್ರಧಾನಿ",-13.18093967437744],["יבי",-13.18094539642334],["▁správne",-13.180965423583984],["төө",-13.180985450744627],["2,000",-13.180989265441896],["கார",-13.180994033813477],["▁व्ह",-13.180999755859377],["▁சொன்ன",-13.181008338928224],["るのは",-13.181095123291016],["▁doğum",-13.181108474731444],["▁Princip",-13.181109428405762],["ឈ",-13.18111801147461],["▁estás",-13.181126594543455],["ಮೇ",-13.181144714355469],["ражда",-13.181170463562012],["▁స్ప",-13.181172370910645],["дері",-13.18117904663086],["▁oleme",-13.18117904663086],["▁عربي",-13.181204795837402],["лаже",-13.181221961975098],["▁ହା",-13.18122386932373],["ご紹介",-13.181270599365234],["▁معامل",-13.181281089782717],["ète",-13.181285858154297],["▁Meli",-13.181296348571776],["▁從",-13.18129825592041],["овал",-13.181299209594728],["▁futbal",-13.181312561035156],["▁Австрали",-13.181331634521484],["▁برند",-13.1813383102417],["▁karton",-13.18134593963623],["▁прима",-13.181351661682127],["▁misterio",-13.181358337402344],["▁Ramon",-13.18143367767334],["ಿಗೂ",-13.181442260742188],["лист",-13.181448936462402],["▁Kanton",-13.181456565856934],["▁Константин",-13.181462287902832],["▁reivindica",-13.181463241577148],["▁арнайы",-13.181463241577148],["▁შედეგად",-13.181463241577148],["▁almindelig",-13.181464195251465],["▁리뷰",-13.181467056274414],["▁téléphone",-13.18146800994873],["▁đức",-13.181477546691896],["▁sõnul",-13.181480407714844],["▁भवति",-13.181485176086426],["▁huidige",-13.181512832641602],["ทู",-13.181517601013184],["▁dnevno",-13.18152141571045],["▁öde",-13.181524276733398],["▁क्लब",-13.181571006774902],["▁proračun",-13.181573867797852],["▁الطعام",-13.181573867797852],["خور",-13.181588172912598],["▁gelin",-13.18166446685791],["palvelut",-13.18171501159668],["▁Rado",-13.181750297546388],["င့္",-13.181785583496094],["нскага",-13.181792259216309],["▁李",-13.181793212890623],["▁രണ്ട",-13.18183422088623],["ขั้นตอน",-13.181846618652344],["▁ನೀರು",-13.18186378479004],["рушен",-13.181876182556152],["▁Berufs",-13.181876182556152],["▁recol",-13.181894302368164],["▁edition",-13.181901931762695],["▁alıb",-13.181922912597656],["nięcie",-13.181924819946287],["▁वी",-13.181943893432615],["avis",-13.181955337524414],["▁Ninh",-13.181978225708008],["▁ওয়া",-13.18198299407959],["ίτ",-13.181988716125488],["انى",-13.182036399841309],["▁storico",-13.18203830718994],["▁tappa",-13.182052612304688],["เป็นคน",-13.1820707321167],["▁lisaks",-13.18210220336914],["кового",-13.18212890625],["▁malik",-13.182140350341797],["saday",-13.18214511871338],["▁putut",-13.182161331176758],["▁հիմա",-13.1821928024292],["▁mush",-13.182196617126465],["하였",-13.182223320007324],["▁obrazov",-13.18222999572754],["▁ruk",-13.18222999572754],["▁umumnya",-13.182283401489258],["eigen",-13.182313919067385],["דיר",-13.182320594787598],["▁dollari",-13.182324409484863],["▁күш",-13.182332038879396],["▁maí",-13.18235206604004],["▁Pilihan",-13.18235683441162],["▁можеш",-13.182368278503418],["▁Maksim",-13.18237018585205],["▁Malam",-13.182427406311035],["zeni",-13.182442665100098],["report",-13.1824951171875],["ykite",-13.18250846862793],["sää",-13.18252658843994],["υμα",-13.182527542114258],["рке",-13.182557106018066],["▁amateur",-13.182558059692385],["▁физически",-13.182578086853027],["▁نعم",-13.182600021362305],["โห",-13.18261432647705],["ോള",-13.182634353637695],["▁biridir",-13.182639122009276],["▁uživatel",-13.182649612426758],["▁anumite",-13.182660102844238],["▁ತಂಡ",-13.182662010192873],["▁vanliga",-13.182674407958984],["▁Gat",-13.182682037353516],["▁worked",-13.182696342468262],["▁zoeken",-13.18270492553711],["स्त्र",-13.182729721069336],["▁вядома",-13.182730674743652],["УТ",-13.182738304138184],["vahetus",-13.182751655578612],["рії",-13.182790756225586],["瑪",-13.182790756225586],["GET",-13.182793617248535],["▁아직",-13.18280029296875],["째",-13.182806968688965],["벽",-13.182811737060549],["▁propias",-13.182814598083496],["ចំណុច",-13.182815551757812],["▁Congreso",-13.182816505432127],["▁Iglesia",-13.182816505432127],["▁Vielleicht",-13.182816505432127],["▁rozpoczę",-13.182816505432127],["▁Δεκεμβρίου",-13.182816505432127],["▁χώρες",-13.182816505432127],["▁خراسان",-13.182816505432127],["▁مسئله",-13.182816505432127],["▁يۇقىرى",-13.182816505432127],["▁ժողովրդի",-13.182819366455078],["▁harmadik",-13.182820320129396],["▁mbeidh",-13.18282985687256],["▁sobotu",-13.182835578918455],["▁İkinci",-13.182836532592772],["▁politikai",-13.18285083770752],["stiku",-13.182852745056152],["▁Medicina",-13.182861328125],["▁Berat",-13.182862281799316],["新疆",-13.182865142822266],["▁නියම",-13.182870864868164],["▁upošteva",-13.18287181854248],["融入",-13.182878494262695],["▁փաստ",-13.182896614074709],["▁սահմանված",-13.182899475097656],["ずっと",-13.182900428771973],["▁efforts",-13.182909965515137],["▁مرتب",-13.182920455932615],["▁القيام",-13.182936668395996],["神秘",-13.182945251464844],["▁бүтээл",-13.182984352111816],["▁امله",-13.182988166809082],["▁overzicht",-13.182991981506348],["spring",-13.183012008666992],["ຮູບ",-13.18302059173584],["▁Eugen",-13.18303394317627],["▁કરવાની",-13.183059692382812],["ЗИ",-13.183069229125977],["შვილი",-13.183077812194824],["▁වුනා",-13.183096885681152],["ਵਾਨ",-13.183133125305176],["▁સમ",-13.183160781860352],["瞬間",-13.183173179626465],["▁skoða",-13.183175086975098],["▁Duration",-13.183185577392578],["ğe",-13.183189392089844],["ගැ",-13.183197021484377],["▁шалга",-13.18320369720459],["▁manche",-13.183216094970703],["färd",-13.183218955993652],["▁małe",-13.183245658874512],["ously",-13.183268547058104],["▁atribu",-13.183268547058104],["liner",-13.1832914352417],["▁immobili",-13.1832914352417],["▁choisir",-13.183297157287598],["VG",-13.183306694030762],["▁esiste",-13.183316230773926],["agens",-13.18333625793457],["Out",-13.183341979980469],["▁soleil",-13.183354377746582],["▁gestió",-13.183378219604492],["▁Londra",-13.183384895324709],["স্থান",-13.183385848999023],["▁ମନ୍ତ୍ରୀ",-13.183385848999023],["oszt",-13.183398246765137],["▁সভা",-13.183411598205566],["াপ",-13.183425903320312],["▁भावना",-13.183432579040527],["ស់",-13.183438301086426],["▁rezerva",-13.183438301086426],["elewa",-13.183439254760742],["▁Käy",-13.18344497680664],["▁Світ",-13.183446884155272],["▁художник",-13.183472633361816],["etxea",-13.183486938476562],["▁1.8",-13.183490753173828],["▁տարբերակ",-13.183513641357422],["▁heta",-13.18352508544922],["▁Kato",-13.183531761169434],["简单的",-13.18353271484375],["▁romantic",-13.183533668518066],["ČE",-13.18354320526123],["чээ",-13.183547019958496],["▁IG",-13.18355655670166],["▁үнэн",-13.183563232421877],["▁располага",-13.183566093444824],["▁Hə",-13.18358325958252],["▁refugi",-13.18359375],["▁käyttäjä",-13.183605194091797],["tär",-13.183618545532228],["رود",-13.183670043945312],["▁schreiben",-13.18367862701416],["▁stek",-13.183682441711426],["▁kontakta",-13.183703422546388],["elé",-13.18372631072998],["▁20-30",-13.183743476867676],["▁ମିଳି",-13.183743476867676],["▁langue",-13.183761596679688],["▁შეს",-13.183770179748535],["▁կմ",-13.183786392211914],["사의",-13.183807373046877],["ထဲမွာ",-13.183819770812988],["▁äta",-13.183826446533203],["▁farið",-13.18382740020752],["▁raid",-13.183834075927734],["にする",-13.183834075927734],["▁сок",-13.183847427368164],["עשו",-13.183857917785645],["▁verstaan",-13.183876037597656],["ISH",-13.183886528015137],["面積",-13.183910369873049],["চিত",-13.183941841125488],["▁Unde",-13.183964729309082],["шения",-13.183982849121094],["▁bayar",-13.183985710144045],["לח",-13.183988571166992],["不懂",-13.184017181396484],["ŝt",-13.184019088745115],["▁សុខ",-13.184026718139648],["▁окре",-13.184045791625977],["szint",-13.18405532836914],["侵",-13.18406867980957],["缓",-13.184081077575684],["▁siyasəti",-13.184117317199709],["▁orqanları",-13.184139251708984],["ämne",-13.18414306640625],["▁Saha",-13.18414306640625],["▁Evrópu",-13.184170722961426],["▁ರಾಜಕೀಯ",-13.184170722961426],["▁awesome",-13.184171676635742],["▁ditemukan",-13.184171676635742],["▁کانپوء",-13.184171676635742],["▁धोरण",-13.184171676635742],["▁პროგრამა",-13.184171676635742],["▁sanggup",-13.18417263031006],["▁эсвэл",-13.184176445007324],["▁මෛත්",-13.18417739868164],["▁rozwiązanie",-13.18418312072754],["▁හමුව",-13.184185028076172],["▁yaradılması",-13.184195518493652],["Kat",-13.184223175048828],["गन",-13.184276580810549],["哪里",-13.184281349182127],["ydelse",-13.184284210205078],["▁realizzazione",-13.184303283691406],["ခြင့္",-13.18430995941162],["واقع",-13.184310913085938],["けば",-13.18436336517334],["▁lupta",-13.184370040893556],["▁Magnus",-13.184370994567873],["דק",-13.184377670288086],["▁Днес",-13.184378623962402],["▁សង្កាត់",-13.184398651123049],["acją",-13.184420585632324],["▁աճ",-13.18442440032959],["▁Banda",-13.184438705444336],["nskih",-13.18447208404541],["赛季",-13.184477806091309],["pension",-13.18448543548584],["▁Rhy",-13.184527397155762],["影响力",-13.184532165527344],["▁proud",-13.184575080871582],["▁שנת",-13.184581756591797],["▁dziw",-13.184602737426758],["▁маж",-13.184609413146973],["▁정보를",-13.184622764587402],["▁igrač",-13.184639930725098],["▁일정",-13.184639930725098],["কাল",-13.18465805053711],["력을",-13.184659004211426],["arat",-13.184662818908691],["▁заключ",-13.184666633605955],["▁Президента",-13.18467903137207],["▁dejtingsajter",-13.18469524383545],["वर्",-13.184698104858398],["▁curios",-13.18470859527588],["▁başlat",-13.184741973876951],["味道",-13.184757232666016],["▁그의",-13.184762954711914],["rín",-13.184771537780762],["stasjon",-13.184776306152344],["לין",-13.18480396270752],["▁קורס",-13.184825897216797],["htm",-13.18486213684082],["wic",-13.18486213684082],["dzien",-13.18486785888672],["▁rule",-13.184879302978516],["▁Vesel",-13.18488311767578],["▁ತಪ್ಪ",-13.184893608093262],["פרט",-13.184914588928224],["▁EE",-13.184943199157717],["▁plyn",-13.184955596923828],["isuutta",-13.184978485107422],["▁bulunuyor",-13.184998512268066],["▁dimaksud",-13.185003280639648],["ټا",-13.18500518798828],["無論",-13.185015678405762],["▁հր",-13.185019493103027],["的設計",-13.185029983520508],["shën",-13.185038566589355],["prote",-13.18504524230957],["sata",-13.185070037841797],["▁Lock",-13.185070037841797],["回去",-13.185091018676758],["▁viagem",-13.185117721557615],["சாமி",-13.1851224899292],["leges",-13.185123443603516],["क्षण",-13.18512725830078],["▁پیشرفت",-13.185128211975098],["Bro",-13.185138702392578],["ՐԱ",-13.18515396118164],["▁mír",-13.18519401550293],["▁behandeling",-13.185195922851562],["▁jsi",-13.18520736694336],["iaus",-13.18522834777832],["术",-13.18522834777832],["▁176",-13.18523120880127],["étől",-13.185245513916016],["▁жест",-13.185245513916016],["▁(*)",-13.185267448425291],["ELLE",-13.185283660888672],["▁Oda",-13.185290336608888],["▁xen",-13.185297012329102],["▁kerület",-13.185307502746582],["ந்தா",-13.185309410095217],["▁नभए",-13.18532943725586],["▁RP",-13.185348510742188],["▁작업",-13.185361862182615],["▁costat",-13.185365676879885],["▁připrav",-13.185368537902832],["တက်",-13.185385704040527],["бору",-13.185399055480955],["▁едни",-13.185401916503906],["យប់",-13.185407638549805],["▁Belo",-13.185409545898438],["▁Hlavní",-13.185415267944336],["metre",-13.185423851013184],["▁povr",-13.185434341430664],["▁صبر",-13.185442924499512],["迹",-13.18545913696289],["houden",-13.185486793518066],["rebbero",-13.185486793518066],["▁Policy",-13.185518264770508],["ภาพยนตร์",-13.185526847839355],["▁Gospodar",-13.185526847839355],["٨",-13.185527801513672],["▁Москвы",-13.185527801513672],["基準",-13.185527801513672],["▁moeilijk",-13.185528755187988],["▁necessità",-13.185528755187988],["▁tilfælde",-13.185528755187988],["▁Trabalho",-13.18553066253662],["▁ଏଭଳି",-13.185532569885254],["▁először",-13.18553352355957],["▁vackra",-13.185537338256836],["▁ମନ",-13.185537338256836],["▁Χρ",-13.185540199279783],["▁المياه",-13.185548782348633],["▁ఉంటే",-13.185551643371582],["▁twa",-13.185562133789062],["hout",-13.185577392578123],["▁Pret",-13.185585021972656],["▁chiếu",-13.185589790344238],["▁gangen",-13.185589790344238],["▁svojoj",-13.185595512390137],["plně",-13.185623168945312],["अप",-13.185628890991213],["▁dico",-13.18563461303711],["▁sempurna",-13.18564796447754],["一座",-13.185681343078612],["امة",-13.18569564819336],["▁അവർ",-13.185698509216309],["нко",-13.18571949005127],["рід",-13.18571949005127],["ítear",-13.185728073120115],["▁समान",-13.185729026794434],["CAR",-13.185751914978027],["▁акции",-13.185755729675291],["ුන",-13.185765266418455],["▁árum",-13.185765266418455],["عتبر",-13.185785293579102],["▁ਵਰ",-13.185819625854492],["▁makna",-13.185830116271973],["ktör",-13.18584442138672],["эц",-13.18584442138672],["▁Sac",-13.185864448547363],["▁تقویت",-13.185874938964844],["▁முதல",-13.18588161468506],["▁pilota",-13.185905456542969],["▁teater",-13.185946464538574],["لاس",-13.185954093933104],["TIS",-13.185958862304688],["гаас",-13.185983657836914],["вэл",-13.185989379882812],["▁nehéz",-13.18599796295166],["ಯೆ",-13.186026573181152],["▁framtid",-13.186034202575684],["▁heat",-13.186039924621582],["分配",-13.18604564666748],["jdu",-13.186098098754885],["▁mạch",-13.186108589172363],["▁Hemen",-13.186115264892578],["▁Kép",-13.18612003326416],["▁reducir",-13.186147689819336],["的日子",-13.186151504516602],["▁पॉ",-13.18616008758545],["lukan",-13.186182975769045],["▁yarım",-13.186185836791992],["▁ข้อ",-13.186185836791992],["▁literat",-13.186202049255373],["▁जेठ",-13.186235427856444],["▁NEW",-13.186278343200684],["▁Baraza",-13.18630313873291],["Ва",-13.186327934265137],["KIT",-13.186345100402832],["▁စကား",-13.18634796142578],["▁આજ",-13.186365127563477],["izy",-13.186409950256348],["▁jouer",-13.186412811279297],["yczną",-13.186447143554688],["ološki",-13.186450004577637],["▁مسلح",-13.18645191192627],["ನೆಯ",-13.186474800109863],["▁Edu",-13.18649959564209],["▁тағы",-13.186511993408203],["▁zásad",-13.186515808105469],["))))",-13.186546325683594],["бара",-13.186546325683594],["▁pence",-13.186575889587402],["▁dibawa",-13.18659496307373],["statistik",-13.186606407165527],["▁يەر",-13.186616897583008],["▁varem",-13.18661880493164],["lį",-13.186634063720703],["LUM",-13.186660766601562],["▁tulang",-13.186670303344728],["▁hälsa",-13.186671257019045],["してる",-13.186699867248535],["▁معدن",-13.186705589294434],["ยังคง",-13.186711311340332],["xus",-13.186720848083496],["ვლი",-13.186723709106444],["وطن",-13.186741828918455],["ვილ",-13.186744689941406],["áticos",-13.186745643615724],["▁seneste",-13.186812400817873],["מרים",-13.186822891235352],["▁బాస్",-13.186844825744627],["▁የለም",-13.18686866760254],["▁télé",-13.186877250671388],["වය",-13.186880111694336],["▁Oczywiście",-13.18688678741455],["▁movimiento",-13.18688678741455],["▁Φεβρουαρίου",-13.18688678741455],["▁કોંગ્રેસ",-13.18688678741455],["▁ପୋଲିସ",-13.186887741088867],["査定",-13.186887741088867],["▁исчез",-13.186888694763184],["▁ልጆች",-13.186890602111816],["domā",-13.186894416809082],["mönnum",-13.186896324157717],["▁שהת",-13.186901092529297],["▁மிகவும்",-13.186902046203612],["Fu",-13.18690586090088],["ਫਾ",-13.186918258666992],["ością",-13.18694305419922],["幸せ",-13.186948776245115],["▁ಭಾಷೆ",-13.1869535446167],["▁берди",-13.186959266662598],["▁사건",-13.186969757080078],["مىن",-13.18699836730957],["▁כיום",-13.18702220916748],["Bel",-13.187029838562012],["▁ögon",-13.187063217163086],["▁Rü",-13.1870698928833],["▁tonn",-13.18708038330078],["ที่เกี่ยวข้อง",-13.18710231781006],["រឿង",-13.18710708618164],["▁ડો",-13.187131881713867],["漂亮",-13.187134742736816],["elmi",-13.187156677246094],["পাল",-13.187156677246094],["▁tijelo",-13.187173843383787],["▁Серед",-13.187173843383787],["▁матеріал",-13.187204360961914],["▁dhow",-13.187231063842772],["▁kava",-13.187251091003418],["▁procedur",-13.187253952026367],["ት፣",-13.187280654907228],["強化",-13.187283515930176],["▁පිරිසක්",-13.187311172485352],["▁menet",-13.187315940856934],["▁HK",-13.187323570251465],["▁इच्छा",-13.187352180480955],["ാല",-13.187417030334473],["▁நம்",-13.18742561340332],["▁4.3",-13.18743133544922],["▁nismo",-13.187435150146484],["linin",-13.1874418258667],["▁Ся",-13.187447547912598],["▁Ĉe",-13.187459945678713],["▁šią",-13.18746852874756],["竞争力",-13.187469482421877],["euros",-13.18748378753662],["▁optimale",-13.18748378753662],["のない",-13.18748378753662],["▁oportun",-13.187495231628418],["▁siirty",-13.187508583068848],["▁tunnel",-13.187529563903809],["▁judicial",-13.187542915344238],["▁érdekes",-13.187543869018556],["▁තියෙන්නෙ",-13.187552452087402],["▁Australi",-13.1875581741333],["şü",-13.187561988830566],["ရိုး",-13.18757152557373],["▁мэнд",-13.18757152557373],["abban",-13.187576293945312],["HOR",-13.187578201293944],["▁azul",-13.187588691711426],["czony",-13.187610626220703],["▁ફરી",-13.187614440917969],["▁Дода",-13.18763542175293],["▁öv",-13.187640190124512],["тић",-13.187651634216309],["ପତି",-13.187660217285156],["ghan",-13.187673568725586],["▁buscando",-13.18767547607422],["▁inguruan",-13.187681198120115],["▁umri",-13.187689781188965],["▁പഴയ",-13.187703132629396],["▁ninguna",-13.187705039978027],["▁Block",-13.18773365020752],["▁מיין",-13.187759399414062],["орт",-13.187764167785645],["▁useful",-13.18776512145996],["రించ",-13.187777519226074],["ഴ്",-13.187788009643556],["▁vendet",-13.187800407409668],["页",-13.187803268432615],["nutie",-13.187824249267578],["▁қ",-13.187829971313477],["▁далей",-13.187847137451172],["έτα",-13.187851905822754],["სულ",-13.187867164611816],["▁discipline",-13.187867164611816],["▁തെറ്റ",-13.18789005279541],["▁computador",-13.18789291381836],["asach",-13.18790054321289],["ńcz",-13.187904357910156],["▁মান",-13.187911987304688],["▁ενδιαφέρον",-13.187936782836914],["zde",-13.187952041625977],["▁contamina",-13.187955856323242],["građ",-13.187960624694824],["huur",-13.187982559204102],["butik",-13.187999725341797],["retning",-13.188006401062012],["пед",-13.188013076782228],["▁acte",-13.188065528869627],["ække",-13.188072204589844],["-200",-13.18811321258545],["قبال",-13.188157081604004],["覆",-13.18816375732422],["ดําเนินการ",-13.1881685256958],["טון",-13.1881742477417],["▁arran",-13.1881742477417],["නෝ",-13.188176155090332],["▁humanit",-13.18817901611328],["枝",-13.18819808959961],["尝试",-13.188201904296877],["▁Фил",-13.18820571899414],["fråga",-13.188236236572266],["torie",-13.188240051269531],["gori",-13.188244819641112],["릭",-13.188244819641112],["틴",-13.188244819641112],["▁Hezkuntza",-13.188247680664062],["▁augusztus",-13.188247680664062],["▁kuukautta",-13.188247680664062],["▁mendukung",-13.188247680664062],["▁perspective",-13.188247680664062],["▁vienkārši",-13.188247680664062],["▁ytterligare",-13.188247680664062],["▁τίτλο",-13.188247680664062],["▁згідно",-13.188247680664062],["▁ಪ್ರಮುಖ",-13.188251495361328],["▁dejtingsidor",-13.188252449035645],["▁قانۇن",-13.188252449035645],["▁ਵੱਧ",-13.18825340270996],["▁الصفحة",-13.188254356384276],["▁ક્યાં",-13.188257217407228],["▁isteyen",-13.188261985778809],["▁vorige",-13.188261985778809],["▁söndag",-13.188268661499023],["▁Αλ",-13.188276290893556],["▁przecież",-13.188277244567873],["െങ്കിൽ",-13.188284873962402],["▁гео",-13.18829345703125],["▁Shopping",-13.188295364379885],["▁publish",-13.1882963180542],["▁հայտնել",-13.1882963180542],["客服",-13.188311576843262],["תכנית",-13.188329696655272],["▁готовы",-13.188329696655272],["▁Ticket",-13.18835735321045],["▁усилия",-13.188361167907717],["▁જશે",-13.188365936279297],["▁Jedi",-13.18836784362793],["▁3.3",-13.188376426696776],["▁Reklam",-13.18839168548584],["▁संस्कृति",-13.188392639160156],["▁iako",-13.188408851623535],["tempat",-13.18842315673828],["▁umowy",-13.188426971435549],["▁Philips",-13.18842887878418],["▁ქართულ",-13.188433647155762],["▁gris",-13.188488006591797],["▁Эн",-13.188488006591797],["▁देखने",-13.188504219055176],["▁оказа",-13.18850803375244],["rasti",-13.188520431518556],["ೋ",-13.188530921936035],["▁ወይ",-13.188558578491213],["nele",-13.188559532165527],["ଫା",-13.188569068908691],["附近的",-13.188624382019045],["▁saking",-13.188628196716309],["▁kliendi",-13.188632011413574],["▁живо",-13.188645362854004],["年来",-13.188652992248535],["▁indtil",-13.188671112060549],["▁genieten",-13.18870449066162],["▁병",-13.18870735168457],["▁השבוע",-13.188709259033203],["▁சேர்ந்த",-13.18871021270752],["λές",-13.188725471496582],["目に",-13.188799858093262],["ടുത്ത",-13.188833236694336],["▁215",-13.188844680786133],["▁Τη",-13.188876152038574],["dıkları",-13.18889331817627],["▁Magyarországon",-13.188894271850586],["డంతో",-13.188899993896484],["▁innovative",-13.188899993896484],["▁teatral",-13.188899993896484],["▁Κατα",-13.188922882080078],["דירות",-13.188939094543455],["230",-13.188940048217772],["ειδ",-13.188942909240724],["▁координ",-13.188958168029783],["การพัฒนา",-13.188977241516112],["▁ժամ",-13.188983917236328],["doni",-13.188989639282228],["▁capitale",-13.188990592956545],["▁Liza",-13.189007759094238],["▁kuće",-13.1890287399292],["▁docent",-13.189037322998049],["▁fælles",-13.189040184020996],["▁sham",-13.189042091369627],["полі",-13.189061164855955],["leriniz",-13.189066886901855],["amon",-13.189069747924805],["▁Jama",-13.1890869140625],["▁20.00",-13.189109802246094],["▁ວິ",-13.189117431640623],["▁formação",-13.189156532287598],["▁мову",-13.189156532287598],["သတင္း",-13.189217567443848],["▁каманд",-13.18922233581543],["czas",-13.189230918884276],["▁beleid",-13.18924045562744],["▁bawer",-13.189306259155272],["▁См",-13.189312934875488],["ût",-13.189339637756348],["ਰਮ",-13.189342498779297],["即將",-13.189363479614258],["ส่วนตัว",-13.189391136169434],["▁zyrtar",-13.189398765563965],["▁Size",-13.18940544128418],["▁Dak",-13.189411163330078],["▁Serbia",-13.189419746398926],["pion",-13.18942165374756],["▁Бара",-13.18942165374756],["install",-13.189430236816406],["ตกแต่ง",-13.189445495605469],["▁නේ",-13.189455032348633],["▁یمن",-13.189461708068848],["வன",-13.189467430114746],["▁príjem",-13.189472198486328],["4,5",-13.189476013183594],["▁Liel",-13.189481735229492],["▁utilizator",-13.189501762390137],["▁Cancel",-13.1895112991333],["▁מזה",-13.189520835876465],["徑",-13.189542770385742],["हरुमा",-13.189549446105955],["▁talla",-13.189549446105955],["解决方案",-13.18959617614746],["วิดีโอ",-13.18960952758789],["▁campaign",-13.18960952758789],["▁caratteristiche",-13.18960952758789],["▁έρευνα",-13.18960952758789],["▁Սակայն",-13.18960952758789],["곤",-13.18960952758789],["▁nyhetsbrev",-13.189610481262209],["▁जिन",-13.189613342285156],["无论是",-13.189619064331056],["▁Yusuf",-13.189620018005373],["▁साधारण",-13.18962287902832],["▁നഗര",-13.18962574005127],["licita",-13.189631462097168],["▁Goed",-13.189638137817385],["▁असतो",-13.189645767211914],["▁مونږ",-13.189647674560549],["▁ವರ್ಷಗಳ",-13.189653396606444],["▁있지만",-13.189653396606444],["▁ученика",-13.189657211303713],["▁завтра",-13.189658164978027],["▁przygotowan",-13.189671516418455],["▁kuhakikisha",-13.189674377441406],["опис",-13.18968105316162],["▁dezembro",-13.189689636230469],["▁stránek",-13.189697265625],["хой",-13.189702987670898],["▁goste",-13.189705848693848],["shma",-13.189716339111328],["ಫಿ",-13.189720153808594],["▁changer",-13.18973159790039],["▁mutlaka",-13.189737319946287],["▁يجعل",-13.189749717712402],["lds",-13.189753532409668],["▁politiker",-13.189756393432615],["▁никаких",-13.189757347106934],["▁Limited",-13.189762115478516],["▁batas",-13.18977165222168],["▁gari",-13.189790725708008],["▁कहाँ",-13.189793586730955],["▁głównie",-13.189813613891602],["trekker",-13.18983268737793],["▁қоры",-13.189833641052246],["तू",-13.189834594726562],["ჭი",-13.189837455749512],["▁Uw",-13.18984031677246],["▁תנאי",-13.189858436584473],["▁jezika",-13.189859390258787],["▁deydi",-13.189891815185549],["午",-13.189937591552734],["▁szerel",-13.189940452575684],["▁számos",-13.189942359924316],["▁স্ট",-13.18995761871338],["▁Georg",-13.18997859954834],["▁medij",-13.189990997314451],["stellingen",-13.189995765686035],["פּר",-13.190007209777832],["▁212",-13.190059661865234],["ይን",-13.19007682800293],["▁hayot",-13.19008445739746],["റോ",-13.190122604370115],["▁ilumin",-13.190122604370115],["▁færdig",-13.190125465393066],["▁Meld",-13.190128326416016],["▁14.00",-13.190145492553713],["▁Evropsk",-13.190149307250977],["bų",-13.190167427062988],["▁امان",-13.190192222595217],["distrikt",-13.190195083618164],["▁merki",-13.190195083618164],["▁имя",-13.190228462219238],["ючись",-13.190251350402832],["▁Bona",-13.190253257751465],["▁çalışmalar",-13.190267562866213],["이는",-13.190271377563477],["▁செல்ல",-13.190275192260742],["ରାମ",-13.190281867980955],["исте",-13.190285682678224],["UKU",-13.190301895141602],["▁根据",-13.190301895141602],["▁стрес",-13.19031047821045],["SAR",-13.190320014953612],["θεση",-13.190332412719728],["▁bukanlah",-13.190350532531738],["▁Obra",-13.19035816192627],["▁پرست",-13.190361976623535],["▁Pengu",-13.190382957458496],["uliwa",-13.190386772155762],["▁होना",-13.190412521362305],["▁atyre",-13.19041347503662],["▁scoil",-13.190414428710938],["▁Glu",-13.190417289733888],["попул",-13.190448760986328],["▁Walter",-13.19048309326172],["▁сві",-13.190484046936035],["应对",-13.19050407409668],["▁honey",-13.190521240234377],["▁крст",-13.190529823303224],["▁ກະຊວງ",-13.190540313720703],["▁Muk",-13.190559387207031],["▁yanvar",-13.19056224822998],["хом",-13.19056797027588],["oane",-13.190608024597168],["شخص",-13.190618515014648],["mızın",-13.190635681152344],["ительно",-13.190641403198242],["当天",-13.190641403198242],["୍ୟୁ",-13.190649032592772],["▁කාල",-13.190664291381836],["BEN",-13.190665245056152],["▁regal",-13.190686225891112],["▁participe",-13.190707206726074],["▁гради",-13.190720558166504],["zero",-13.190731048583984],["▁kruh",-13.190732955932615],["rikan",-13.190790176391602],["▁Øst",-13.19080638885498],["मौ",-13.190825462341309],["式的",-13.190863609313965],["▁luci",-13.190866470336914],["பாடு",-13.190875053405762],["ଚି",-13.19089126586914],["ېږي",-13.190895080566406],["▁ბოლოს",-13.190897941589355],["▁jaké",-13.190898895263672],["夾",-13.190937995910645],["▁ignora",-13.19094944000244],["텔",-13.190951347351074],["薩",-13.19096851348877],["▁ਵਿਅਕਤੀ",-13.190973281860352],["▁Fómhair",-13.190974235534668],["▁Gebäude",-13.190974235534668],["▁cyntaf",-13.190974235534668],["▁ensimmäinen",-13.190974235534668],["▁właściciel",-13.190974235534668],["▁большинство",-13.190974235534668],["▁जम्मू",-13.190974235534668],["▁ඇවිත්",-13.190974235534668],["굴",-13.190974235534668],["▁berdewam",-13.190975189208984],["▁nửa",-13.190975189208984],["▁эмгек",-13.190975189208984],["为主",-13.190975189208984],["▁srednje",-13.190983772277832],["▁ಇದನ್ನು",-13.190984725952148],["▁만든",-13.190990447998049],["▁diferença",-13.191004753112791],["▁απλά",-13.19100856781006],["▁օգն",-13.19101619720459],["▁shpesh",-13.19101905822754],["▁hoá",-13.191022872924805],["▁ជួយ",-13.19102668762207],["چو",-13.191044807434082],["kang",-13.191057205200195],["▁liksom",-13.191061973571776],["▁කළේය",-13.191075325012209],["▁වග",-13.191080093383787],["▁майка",-13.191091537475586],["▁bildirir",-13.1910982131958],["▁proxectos",-13.191105842590332],["▁екенін",-13.19113063812256],["▁първата",-13.19113063812256],["監督",-13.191139221191406],["▁straight",-13.191144943237305],["からです",-13.191161155700684],["▁tammikuuta",-13.19116497039795],["حوا",-13.191167831420898],["▁sawa",-13.19117259979248],["▁tarehe",-13.191184043884276],["▁весьма",-13.191184043884276],["▁Dekor",-13.191201210021973],["▁hed",-13.191201210021973],["▁numara",-13.19121265411377],["▁Âu",-13.191227912902832],["▁karier",-13.19123363494873],["тті",-13.19124984741211],["▁Coco",-13.191262245178224],["▁gerecht",-13.191262245178224],["▁ingår",-13.191292762756348],["▁evin",-13.191296577453612],["▁అంతే",-13.19130516052246],["الك",-13.19131851196289],["କରେ",-13.191336631774902],["biec",-13.191338539123535],["direct",-13.191339492797852],["▁کين",-13.191340446472168],["▁garden",-13.191368103027344],["▁KS",-13.19136905670166],["urri",-13.191386222839355],["▁맛",-13.19140338897705],["ნად",-13.19142246246338],["▁UL",-13.191431999206545],["▁filtre",-13.191441535949709],["▁stane",-13.191452026367188],["▁tesz",-13.191466331481934],["arım",-13.19147777557373],["▁Віктор",-13.19155216217041],["▁ခုနှစ်",-13.191553115844728],["▁чадвар",-13.19156265258789],["order",-13.19160270690918],["▁atrod",-13.19160270690918],["-70",-13.19166088104248],["១៥",-13.191678047180176],["▁příprav",-13.191679954528809],["▁disko",-13.191680908203123],["tuje",-13.191693305969238],["▁juist",-13.191701889038086],["▁slet",-13.191702842712402],["ологи",-13.19170379638672],["▁набор",-13.191707611083984],["▁Publica",-13.19177532196045],["▁siglo",-13.191795349121094],["▁Bod",-13.19179630279541],["▁γράφ",-13.191804885864258],["miti",-13.19183349609375],["佈",-13.191842079162598],["▁italiane",-13.191847801208496],["соф",-13.191849708557127],["новић",-13.191890716552734],["ПИ",-13.191904067993164],["▁objek",-13.191935539245604],["▁nekad",-13.191943168640137],["ελε",-13.191946029663086],["حدد",-13.191946983337402],["▁Fern",-13.191957473754885],["▁Иде",-13.191977500915527],["▁žup",-13.191991806030272],["복지",-13.19200038909912],["▁mistä",-13.192011833190918],["▁նիստ",-13.19201946258545],["▁zakaj",-13.192051887512209],["άλα",-13.1920747756958],["▁אוטובוס",-13.192091941833496],["▁yönetim",-13.192095756530762],["▁μπα",-13.19210147857666],["▁milliard",-13.192124366760254],["DZ",-13.19214153289795],["▁organizacij",-13.192142486572266],["slēdz",-13.192150115966797],["vízi",-13.192155838012695],["▁Beograd",-13.19217014312744],["▁AFP",-13.192193984985352],["▁Dzi",-13.192214012145996],["▁Henri",-13.192214012145996],["ovém",-13.192221641540527],["▁cirk",-13.192231178283691],["▁мандат",-13.192235946655272],["▁sätta",-13.19223690032959],["▁Karte",-13.192240715026855],["▁Gest",-13.192255973815918],["▁очите",-13.192255973815918],["▁ಶೇ",-13.192255973815918],["▁действу",-13.192262649536133],["AMU",-13.192264556884766],["քն",-13.192273139953612],["▁2,4",-13.19228744506836],["fallen",-13.192298889160156],["pine",-13.192301750183104],["剑",-13.192307472229004],["繼",-13.192307472229004],["ھو",-13.192322731018066],["▁Rhe",-13.192325592041016],["봐",-13.192326545715332],["▁Mohammed",-13.192339897155762],["▁indispensable",-13.192339897155762],["▁wspomni",-13.192339897155762],["▁yerləşən",-13.192339897155762],["▁đẳng",-13.192339897155762],["▁1911",-13.192342758178713],["▁Sf",-13.192342758178713],["▁ዋጋ",-13.192342758178713],["▁dibêje",-13.19234561920166],["▁gælder",-13.19234561920166],["▁pamamaga",-13.192346572875977],["▁uliopita",-13.192346572875977],["▁присуство",-13.19234848022461],["آل",-13.19235897064209],["▁ուզում",-13.192360877990724],["▁provok",-13.192378997802734],["▁opreme",-13.192381858825684],["▁навчальних",-13.192383766174316],["▁otevře",-13.19238567352295],["gràfic",-13.192388534545898],["▁Важно",-13.192395210266112],["ptu",-13.192400932312012],["▁دغو",-13.192408561706545],["▁ילד",-13.19241428375244],["▁बच",-13.192421913146973],["வேண்டும்",-13.192429542541504],["stest",-13.192434310913086],["▁당시",-13.192439079284668],["៕",-13.192447662353516],["▁장소",-13.192461967468262],["▁strateji",-13.192469596862791],["过了",-13.192480087280272],["▁Düz",-13.19249439239502],["▁qiymət",-13.19251537322998],["▁przekona",-13.192527770996094],["▁byw",-13.192559242248535],["基因",-13.192590713500977],["▁ಪುಟ",-13.19259548187256],["คดี",-13.192625045776367],["▁zweiten",-13.192649841308594],["▁있기",-13.19265365600586],["▁état",-13.19265842437744],["上の",-13.192682266235352],["عقد",-13.19272804260254],["▁özelliği",-13.192734718322754],["vante",-13.192757606506348],["▁directa",-13.192758560180664],["▁lateral",-13.192761421203612],["AZA",-13.192769050598145],["দান",-13.192769050598145],["११",-13.192779541015623],["▁гаргаж",-13.192795753479004],["▁എഴുതിയ",-13.192826271057127],["▁მივ",-13.192827224731444],["作成",-13.192827224731444],["▁前",-13.192842483520508],["▁nung",-13.192878723144531],["▁մասնակց",-13.192896842956545],["▁граница",-13.19290256500244],["▁Proje",-13.192913055419922],["நே",-13.19294261932373],["▁меры",-13.19294261932373],["こちら",-13.192951202392578],["▁began",-13.192954063415527],["▁adus",-13.19295597076416],["▁позна",-13.192956924438477],["▁Automobil",-13.192962646484377],["▁ուժ",-13.192970275878906],["bridge",-13.192973136901855],["▁کجا",-13.192983627319336],["▁سعید",-13.193011283874512],["▁grant",-13.19302463531494],["▁padeda",-13.193058013916016],["bied",-13.19308090209961],["жела",-13.193113327026367],["▁رسیدن",-13.193113327026367],["▁aca",-13.193153381347656],["▁закони",-13.193187713623049],["зира",-13.193201065063477],["▁mehan",-13.193219184875488],["▁хотели",-13.19324779510498],["▁kub",-13.193248748779297],["▁mjeseca",-13.193260192871094],["▁Cell",-13.193262100219728],["ಿಸುತ್ತ",-13.193292617797852],["▁അര്",-13.193344116210938],["▁tekem",-13.193351745605469],["▁Nech",-13.19338321685791],["▁Sofia",-13.193391799926758],["▁järel",-13.193442344665527],["demo",-13.193446159362791],["▁เพื่อให้",-13.193449020385742],["▁öröm",-13.19344997406006],["metod",-13.19348430633545],["▁universitat",-13.193504333496094],["អូ",-13.193517684936523],["▁hung",-13.193533897399902],["▁artikolo",-13.193557739257812],["▁કુ",-13.193558692932127],["予以",-13.193559646606444],["▁ún",-13.19357204437256],["▁भूकम्प",-13.193605422973633],["▁paru",-13.193625450134276],["▁ప్రశ్న",-13.193626403808594],["▁ඔහුට",-13.193638801574709],["boot",-13.19364070892334],["▁ನಿಜ",-13.193641662597656],["▁төгрөг",-13.193645477294922],["شل",-13.193666458129885],["▁nauja",-13.193687438964844],["休閒",-13.19370460510254],["▁Tiểu",-13.193708419799805],["▁bądź",-13.193708419799805],["▁Тогда",-13.193708419799805],["▁велосипед",-13.193708419799805],["宣傳",-13.193708419799805],["extérieur",-13.19370937347412],["▁dhaoine",-13.19370937347412],["▁mieszkańców",-13.19371223449707],["▁prilikom",-13.193713188171388],["▁ಹಿರಿಯ",-13.193713188171388],["▁Brussel",-13.193714141845703],["▁महीने",-13.193714141845703],["▁যাচ্ছে",-13.193714141845703],["▁anat",-13.193720817565918],["▁хотелось",-13.19372272491455],["▁Zusammenarbeit",-13.193724632263184],["▁indeholder",-13.1937255859375],["▁exhibi",-13.193730354309082],["▁czemu",-13.19373607635498],["resc",-13.193743705749512],["風景",-13.193747520446776],["身心",-13.193769454956056],["▁biện",-13.193791389465332],["եցի",-13.19381046295166],["ВК",-13.193811416625977],["▁plasma",-13.193817138671877],["▁سلطان",-13.193839073181152],["พุทธ",-13.193845748901367],["▁Fed",-13.193853378295898],["出し",-13.19387435913086],["สถานี",-13.19389533996582],["дру",-13.193897247314451],["rød",-13.19390106201172],["ницы",-13.193915367126465],["▁população",-13.193920135498049],["▁Konstitu",-13.193928718566896],["▁Пасля",-13.193931579589844],["充满",-13.193963050842283],["tiye",-13.193964004516602],["igitaj",-13.19398021697998],["▁किसान",-13.19398021697998],["решт",-13.19399070739746],["▁jaksa",-13.193992614746094],["▁атом",-13.194005966186523],["▁പറയുന്ന",-13.19400691986084],["▁принять",-13.194010734558104],["▁kojih",-13.19402313232422],["▁liggen",-13.194025039672852],["▁kono",-13.194048881530762],["▁jugo",-13.19406509399414],["▁üçüncü",-13.194083213806152],["▁Fabi",-13.194085121154783],["▁않는다",-13.194087982177734],["ការងារ",-13.194090843200684],["kocsi",-13.194092750549316],["▁imao",-13.194113731384276],["ख्",-13.19412612915039],["▁liikku",-13.194133758544922],["▁نماید",-13.194144248962402],["▁veten",-13.194146156311035],["▁walba",-13.19416046142578],["书记",-13.194161415100098],["▁గత",-13.194167137145996],["▁Vald",-13.19419765472412],["▁statistic",-13.194202423095703],["▁pair",-13.19423007965088],["သိန္း",-13.194231986999512],["iteten",-13.194236755371094],["▁פרו",-13.194247245788574],["ДАР",-13.194318771362305],["▁balcon",-13.194327354431152],["▁Lust",-13.194331169128418],["radu",-13.1943359375],["▁TC",-13.194388389587402],["▁lazima",-13.194388389587402],["افي",-13.19438934326172],["27)",-13.194461822509766],["ପରେ",-13.194462776184082],["▁določi",-13.194482803344728],["▁dünyaya",-13.194488525390623],["цвет",-13.19448947906494],["Red",-13.194499015808104],["▁метра",-13.19454288482666],["▁elements",-13.19454574584961],["ALAN",-13.194579124450684],["ष्ण",-13.194584846496582],["▁šp",-13.194591522216797],["▁sjell",-13.19462776184082],["▁늘",-13.194653511047363],["리를",-13.194676399230955],["▁Христос",-13.194677352905272],["▁тарих",-13.194684028625488],["▁ayı",-13.194707870483398],["▁fører",-13.194713592529297],["潛",-13.194750785827637],["▁SQL",-13.1947660446167],["ట్ర",-13.194774627685549],["عهد",-13.194804191589355],["▁Aici",-13.19480800628662],["▁kleuren",-13.194811820983888],["άζεται",-13.194839477539062],["ပါသည်။",-13.19485569000244],["▁Mudah",-13.194864273071287],["▁залуу",-13.194896697998049],["accordo",-13.194900512695312],["▁abonnement",-13.194940567016602],["▁napada",-13.194942474365234],["▁ആരോഗ്യ",-13.194945335388184],["▁bando",-13.194984436035156],["▁Наши",-13.195005416870115],["გრძელ",-13.195028305053713],["▁बिहान",-13.195028305053713],["肺",-13.195032119750977],["柏",-13.19503402709961],["סטע",-13.195051193237305],["много",-13.195069313049316],["ွယ်",-13.195072174072266],["▁nguyện",-13.195077896118164],["▁schedule",-13.19507884979248],["▁वरिष्ठ",-13.195079803466797],["▁మరింత",-13.195088386535645],[".06.2018",-13.195116996765137],["▁Rev",-13.1951265335083],["▁ранее",-13.195141792297363],["kát",-13.195143699645996],["▁suht",-13.195161819458008],["အာ",-13.195178031921388],["▁shakl",-13.195192337036133],["彼女",-13.195204734802246],["▁எனது",-13.19521427154541],["ائح",-13.19521713256836],["puoli",-13.195220947265623],["नीय",-13.19524383544922],["▁gars",-13.195252418518066],["จู",-13.19526195526123],["▁tồn",-13.19528865814209],["▁natoque",-13.195318222045898],["Far",-13.195340156555176],["श्य",-13.195383071899414],["▁webshop",-13.195389747619627],["blað",-13.19542407989502],["▁najde",-13.195435523986816],["teikt",-13.195441246032717],["▁بھیج",-13.195446968078612],["▁нос",-13.195454597473145],["▁manos",-13.195459365844728],["မ်ားကို",-13.195462226867676],["ကူ",-13.195465087890623],["▁Ինչ",-13.19546890258789],["▁jiems",-13.195472717285156],["▁cola",-13.195496559143066],["▁קצר",-13.19550323486328],["▁chair",-13.195523262023926],["▁նշանակ",-13.195526123046877],["▁asiakas",-13.19554042816162],["llaan",-13.195541381835938],["▁veien",-13.195558547973633],["vning",-13.195563316345217],["faz",-13.19558334350586],["▁Gór",-13.195601463317873],["▁سد",-13.195603370666504],["▁ginawa",-13.195616722106934],["वरील",-13.19561767578125],["▁definition",-13.19563102722168],["▁테",-13.195634841918944],["▁উঠ",-13.195643424987791],["▁εκείνο",-13.19566249847412],["region",-13.19567584991455],["▁vehicle",-13.195695877075195],["arnir",-13.19569969177246],["အဖြဲ႔",-13.195707321166992],["حرص",-13.195719718933104],["басы",-13.195741653442385],["▁amelyet",-13.195785522460938],["▁Rol",-13.195791244506836],["స్తున్న",-13.195792198181152],["▁maladie",-13.195793151855469],["sweise",-13.19580078125],["▁vatn",-13.195801734924316],["▁دقیق",-13.19582462310791],["▁våren",-13.195831298828123],["▁ihmiset",-13.195842742919922],["▁pivo",-13.195859909057615],["▁อยาก",-13.195868492126465],["졌",-13.195871353149414],["▁своє",-13.195884704589844],["▁технически",-13.195886611938477],["▁مبلغ",-13.195915222167969],["iac",-13.195928573608398],["▁løsninger",-13.195930480957031],["ഭാവ",-13.195942878723145],["▁איינ",-13.195950508117676],["sön",-13.195953369140623],["▁яам",-13.195964813232422],["ginti",-13.19597053527832],["θέσεις",-13.195972442626951],["▁فرار",-13.195990562438965],["▁دين",-13.196009635925291],["▁قرض",-13.196030616760254],["μισθ",-13.196032524108888],["brit",-13.196037292480469],["▁Uzun",-13.196045875549316],["ential",-13.196049690246582],["▁vërtet",-13.196050643920898],["كور",-13.196069717407228],["▁Dahil",-13.196087837219238],["чила",-13.196107864379885],["іон",-13.196134567260742],["▁себи",-13.19615077972412],["▁풍",-13.196152687072754],["▁Morav",-13.196154594421388],["▁कर्",-13.196172714233398],["ዋን",-13.19620418548584],["▁centrs",-13.19620418548584],["▁нада",-13.196208953857422],["▁proposa",-13.19621467590332],["▁telefone",-13.19621467590332],["bygning",-13.196216583251951],["▁मगर",-13.196252822875977],["ΡΟ",-13.196273803710938],["АРИ",-13.196285247802734],["▁असर",-13.196288108825684],["stej",-13.196290969848633],["ပါတယ်",-13.196309089660645],["本書",-13.196310997009276],["▁dönüş",-13.19631290435791],["▁career",-13.196318626403809],["▁højt",-13.196337699890137],["▁Kram",-13.196346282958984],["▁Câu",-13.196370124816896],["▁харилцаа",-13.196371078491213],["に行って",-13.196389198303224],["▁Müdürü",-13.19639015197754],["▁Bericht",-13.196418762207031],["ماني",-13.19642734527588],["▁müqavilə",-13.196449279785156],["▁μήνυμα",-13.196449279785156],["▁կոդը",-13.196449279785156],["▁vazirligi",-13.196450233459473],["▁اسفند",-13.196452140808104],["▁Թուրքիայի",-13.196454048156738],["▁během",-13.19645881652832],["喜愛",-13.1964693069458],["▁Państwa",-13.19648265838623],["▁pwede",-13.19649600982666],["▁naopak",-13.196516036987305],["տե",-13.196542739868164],["ရက်နေ့",-13.196548461914062],["▁экран",-13.19654941558838],["▁bam",-13.196589469909668],["шкен",-13.196595191955566],["paar",-13.196613311767578],["teesta",-13.19665241241455],["▁Люди",-13.196654319763184],["周期",-13.196654319763184],["▁שונות",-13.196683883666992],["лят",-13.196694374084473],["ಕಲ್",-13.1967134475708],["▁στρατ",-13.196717262268066],["alainen",-13.196731567382812],["▁rinj",-13.196735382080078],["สต์",-13.196754455566406],["▁írta",-13.196759223937988],["▁skrb",-13.196765899658203],["▁ഊ",-13.196770668029783],["▁ფარ",-13.19677448272705],["▁imkanı",-13.196792602539062],["▁ವಿಶ್ವ",-13.19680118560791],["▁التع",-13.196812629699709],["gjer",-13.196816444396973],["спортивн",-13.196824073791504],["▁eurų",-13.196831703186035],["▁Gusto",-13.19683837890625],["▁pravila",-13.19683837890625],["の上",-13.19683837890625],["1.3",-13.196869850158691],["▁البد",-13.196874618530272],["と思い",-13.196879386901855],["▁உட",-13.19688320159912],["קום",-13.196885108947754],["▁vinner",-13.196905136108398],["していない",-13.196913719177246],["▁자동차",-13.196924209594728],["এর",-13.19692611694336],["▁eszközök",-13.196959495544434],["▁ಹಾಕಿ",-13.196967124938965],["න්නට",-13.19696807861328],["▁potrafi",-13.196969985961914],["▁گزر",-13.196986198425291],["കല",-13.197039604187012],["szej",-13.19704246520996],["дий",-13.197062492370604],["▁Jor",-13.197068214416504],["▁Monaten",-13.197115898132324],["▁еске",-13.197136878967283],["▁tafel",-13.197144508361816],["▁هيئة",-13.197148323059082],["▁Igor",-13.19716453552246],["ப்பது",-13.197169303894045],["好友",-13.197171211242676],["▁ОС",-13.197195053100586],["▁relaciona",-13.197200775146484],["ΟΙ",-13.197203636169434],["▁Γκ",-13.197209358215332],["ўра",-13.197232246398926],["acció",-13.19723892211914],["വേഷ",-13.197245597839355],["▁Cię",-13.197257041931152],["达成",-13.197267532348633],["▁otsa",-13.19731616973877],["tiniai",-13.197335243225098],["▁АТО",-13.197354316711426],["▁tegel",-13.19736671447754],["▁presque",-13.197372436523438],["▁ອົງການ",-13.197382926940918],["ОЙ",-13.197395324707031],["ለማ",-13.197399139404297],["▁povinn",-13.197409629821776],["▁ног",-13.197429656982422],["ਕੋ",-13.197434425354004],["▁näyttä",-13.197437286376951],["运用",-13.197442054748535],["ندگی",-13.197453498840332],["منى",-13.197469711303713],["ค์",-13.197494506835938],["▁खरीद",-13.197515487670898],["meh",-13.197522163391112],["▁Diya",-13.19756317138672],["▁Feld",-13.197582244873049],["grænse",-13.19759750366211],["▁alanında",-13.197601318359377],["▁Θε",-13.197602272033691],["კეთე",-13.19761562347412],["▁frys",-13.19761848449707],["მანი",-13.197646141052246],["ნთ",-13.197653770446776],["▁سار",-13.197681427001951],["▁روستا",-13.19768238067627],["ပေ",-13.197684288024902],["▁sorgen",-13.197689056396484],["▁intentar",-13.197696685791016],["▁9:00",-13.197720527648926],["▁strict",-13.197724342346191],["▁vay",-13.19773006439209],["▁նպատակով",-13.197766304016112],["▁Registrer",-13.19779109954834],["stamise",-13.197813987731934],["▁HC",-13.197821617126465],["ទូរស័ព្ទ",-13.19782257080078],["▁Niestety",-13.197823524475098],["▁véhicule",-13.197823524475098],["▁પ્રથમ",-13.197823524475098],["▁urządzenia",-13.197824478149414],["▁conoscenza",-13.197833061218262],["▁chặt",-13.197843551635742],["▁vôbec",-13.197843551635742],["普段",-13.19784450531006],["▁त्यहाँ",-13.197845458984377],["▁Hunde",-13.19786548614502],["▁کمیشن",-13.19789218902588],["▁Esperant",-13.197894096374512],["▁suficient",-13.19791030883789],["שווי",-13.19791316986084],["▁fortæller",-13.197925567626951],["▁RC",-13.197930335998535],["csú",-13.197957992553713],["▁الداخلية",-13.197968482971191],["코리아",-13.197982788085938],["▁1908",-13.197985649108888],["uqa",-13.197997093200684],["▁wedding",-13.198010444641112],["ផ្លូវ",-13.198016166687012],["▁licenc",-13.198026657104492],["lanka",-13.198030471801758],["▁aveti",-13.198031425476074],["යේදී",-13.198050498962402],["▁pressure",-13.198050498962402],["▁bear",-13.198057174682615],["ဲ့",-13.198075294494627],["▁okrem",-13.198113441467283],["abileceği",-13.198122024536133],["jenja",-13.198124885559082],["▁պատճառով",-13.198127746582031],["▁الحج",-13.198131561279297],["▁سیر",-13.198140144348145],["ард",-13.198149681091309],["▁silnik",-13.198150634765623],["▁mob",-13.19815158843994],["մեն",-13.198161125183104],["torum",-13.198164939880373],["сүн",-13.19820499420166],["korea",-13.198213577270508],["▁Finance",-13.198232650756836],["▁Lehen",-13.198238372802734],["▁consiglia",-13.198254585266112],["▁generell",-13.198282241821287],["▁centras",-13.198291778564451],["حام",-13.1983003616333],["▁hoteli",-13.198334693908691],["▁datter",-13.198335647583008],["telje",-13.198346138000488],["ωμ",-13.198351860046388],["▁целиот",-13.198354721069336],["▁spilleautomater",-13.198357582092283],["▁rasanya",-13.198390007019045],["iams",-13.198400497436523],["KOL",-13.198402404785156],["▁SPORT",-13.198402404785156],["▁ואי",-13.198413848876951],["tigt",-13.19841480255127],["parād",-13.198416709899902],["▁primele",-13.198418617248535],["informa",-13.19842529296875],["しよう",-13.198436737060549],["▁Hrvat",-13.198445320129396],["ుకుంటున్న",-13.19845485687256],["дож",-13.19845962524414],["атора",-13.19847297668457],["▁pâ",-13.198505401611328],["▁паведам",-13.19851779937744],["▁చూసి",-13.19853401184082],["mbala",-13.198537826538086],["ຊີ",-13.198586463928224],["近年",-13.198589324951172],["▁հաս",-13.198622703552246],["ಪತಿ",-13.198633193969728],["▁ਵਿਸ਼ਵ",-13.198641777038574],["▁dør",-13.19864559173584],["คอย",-13.19866943359375],["▁korra",-13.198678970336914],["▁Porsche",-13.19869613647461],["▁अड",-13.198708534240724],["դեպի",-13.19871711730957],["▁కావ",-13.198744773864746],["▁여기",-13.19874668121338],["▁Order",-13.198756217956545],["▁Industrial",-13.198758125305176],["▁փոփոխություններ",-13.198772430419922],["ουργ",-13.198779106140137],["▁Владе",-13.198797225952148],["skt",-13.198830604553224],["▁Mult",-13.198838233947754],["HCM",-13.198843002319336],["▁txa",-13.19886589050293],["▁meli",-13.198895454406738],["▁뿐",-13.198908805847168],["eerimine",-13.198930740356444],["AMP",-13.198936462402344],["▁جمعیت",-13.198999404907228],["▁Carrer",-13.199015617370604],["లుగా",-13.19902229309082],["▁შემდეგი",-13.19902229309082],["▁spazi",-13.199049949645996],["的功能",-13.19905948638916],["▁2017)",-13.199064254760742],["ဖတ္",-13.199067115783691],["▁ചോദ്യ",-13.199068069458008],["tiene",-13.199081420898438],["szerzés",-13.19909381866455],["▁અવ",-13.199098587036133],["īts",-13.199102401733398],["ముఖ",-13.199113845825195],["▁lansat",-13.199124336242676],["価",-13.199124336242676],["▁nuolat",-13.199152946472168],["ตู้",-13.199164390563965],["▁Trav",-13.199170112609863],["▁klubb",-13.199170112609863],["▁บน",-13.19919204711914],["บันทึก",-13.19919776916504],["՜",-13.199198722839355],["▁Thánh",-13.199198722839355],["▁stycznia",-13.199198722839355],["▁zabezpiecz",-13.199198722839355],["▁따르면",-13.199198722839355],["▁બાબત",-13.199199676513672],["▁علامت",-13.199204444885254],["▁Пера",-13.199211120605469],["▁informazio",-13.199213027954102],["▁удалось",-13.199214935302734],["▁plkst",-13.19922161102295],["▁ardal",-13.199224472045898],["وقت",-13.19923496246338],["▁ڏسي",-13.199237823486328],["▁falla",-13.199243545532228],["▁kliknite",-13.199249267578123],["▁അവള്",-13.199254035949709],["くなって",-13.199262619018556],["เชื่อ",-13.199274063110352],["也可",-13.199280738830566],["▁yaptır",-13.199283599853516],["▁katër",-13.199296951293944],["▁หลังจาก",-13.19931983947754],["▁Norden",-13.199336051940918],["▁conduce",-13.199337005615234],["хана",-13.199345588684082],["▁cî",-13.199353218078612],["▁comunicació",-13.19935703277588],["ေနတဲ့",-13.199359893798828],["▁жүйесі",-13.199363708496094],["▁תורה",-13.19937801361084],["▁marraskuuta",-13.199384689331056],["▁ability",-13.199386596679688],["▁dhib",-13.199408531188965],["▁thoát",-13.199414253234863],["▁نسبة",-13.199418067932127],["コー",-13.19944667816162],["ếp",-13.199455261230469],["ాయని",-13.199456214904783],["Next",-13.199469566345217],["▁förstå",-13.19948959350586],["prendre",-13.199492454528809],["▁VG",-13.19951057434082],["๒",-13.199542045593262],["▁minulle",-13.199544906616213],["▁ورتو",-13.199545860290527],["mówi",-13.199573516845703],["▁mesmos",-13.199575424194336],["对我",-13.199578285217283],["掛け",-13.199586868286133],["рік",-13.199588775634766],["திர",-13.199596405029297],["▁ಪಾತ್ರ",-13.199600219726562],["▁كۆ",-13.199606895446776],["▁ነው፤",-13.199610710144045],["▁такава",-13.199628829956056],["▁kohal",-13.199655532836914],["soma",-13.19968318939209],["▁rute",-13.199702262878418],["zimmer",-13.199721336364746],["▁ellátás",-13.199732780456545],["čkoj",-13.199803352355955],["թե",-13.199814796447754],["रम",-13.199814796447754],["পার",-13.199830055236816],["▁iPod",-13.199848175048828],["▁gösteri",-13.199857711791992],["schuld",-13.19991970062256],["▁qolgan",-13.199929237365724],["▁safari",-13.199934005737305],["ياد",-13.199957847595217],["▁درجة",-13.199958801269531],["▁kato",-13.199987411499023],["▁Кө",-13.199995040893556],["▁السوق",-13.199996948242188],["▁Boden",-13.20000171661377],["expert",-13.20005989074707],["隨時",-13.200060844421388],["▁одред",-13.200071334838867],["中美",-13.200080871582031],["▁ilgis",-13.20010471343994],["▁راج",-13.200105667114258],["પત્ર",-13.200153350830078],["wały",-13.200173377990724],["stag",-13.20017910003662],["درس",-13.200182914733888],["hwy",-13.200201988220217],["▁کسانو",-13.200230598449709],["一群",-13.200236320495604],["меры",-13.200241088867188],["logia",-13.200243949890137],["nifica",-13.200244903564451],["ឌ",-13.200252532958984],["miste",-13.200264930725098],["▁koruma",-13.200264930725098],["▁ελληνική",-13.200302124023438],["束",-13.20030689239502],["▁famosa",-13.20031452178955],["▁imagens",-13.200332641601562],["▁පෝ",-13.20034885406494],["▁bakom",-13.200390815734863],["дити",-13.200399398803713],["▁härlig",-13.20041847229004],["ңіз",-13.200419425964355],["uudised",-13.200429916381836],["▁ഒന്നു",-13.200430870056152],["▁Κου",-13.20046615600586],["邪",-13.200501441955566],["رجع",-13.200531959533691],["▁объяс",-13.200547218322754],["ກົດ",-13.20054817199707],["輝",-13.2005615234375],["спі",-13.200568199157717],["ສູນ",-13.200573921203612],["delik",-13.200575828552246],["မြောက်",-13.200575828552246],["▁kvadrat",-13.200575828552246],["▁میباشد",-13.200575828552246],["▁ኃይል",-13.200575828552246],["▁Infantil",-13.200576782226562],["▁τελικά",-13.200576782226562],["วัยรุ่น",-13.200581550598145],["▁ඇත්තටම",-13.200583457946776],["▁பார்வை",-13.20058822631836],["▁xung",-13.200591087341309],["▁Cent",-13.200599670410156],["▁domácí",-13.200599670410156],["pflicht",-13.200600624084473],["შუ",-13.200610160827637],["▁Oku",-13.200613021850586],["גני",-13.20061492919922],["ოვანი",-13.200624465942385],["ộc",-13.200627326965332],["արժեք",-13.200631141662598],["▁स्वतः",-13.200638771057127],["▁台灣",-13.200642585754396],["▁ظرفیت",-13.20065212249756],["小說",-13.20065212249756],["veren",-13.200654029846191],["ਾਸ",-13.200661659240724],["▁vdekje",-13.200668334960938],["lón",-13.200682640075684],["▁ឆ្នាំ២០១៨",-13.20068645477295],["▁154",-13.200695037841797],["▁মানুষের",-13.200695991516112],["ίο",-13.200701713562012],["níku",-13.20071506500244],["▁företaget",-13.200716018676758],["研究生",-13.200721740722656],["лыс",-13.200729370117188],["▁גבוהה",-13.20073699951172],["தற்கு",-13.200740814208984],["fă",-13.200762748718262],["lås",-13.20077133178711],["▁реда",-13.20079517364502],["kjø",-13.200798034667969],["ới",-13.200818061828612],["▁Cet",-13.200825691223145],["버스",-13.200840950012209],["عال",-13.200872421264648],["▁serangan",-13.200891494750977],["▁кух",-13.200910568237305],["ន៍",-13.200912475585938],["ດິນ",-13.20092487335205],["jährige",-13.201031684875488],["▁aluminium",-13.20105266571045],["步行",-13.201056480407717],["▁розділ",-13.201058387756348],["block",-13.201067924499512],["▁raus",-13.20107078552246],["▁२९",-13.201086044311523],["▁влез",-13.201107025146484],["▁publiek",-13.2011079788208],["▁טרי",-13.20112133026123],["books",-13.201132774353027],["すべて",-13.201178550720217],["အဖြစ်",-13.201183319091797],["▁souhaitez",-13.20119571685791],["isessä",-13.201210975646973],["κρα",-13.201213836669922],["▁Julian",-13.201214790344238],["cén",-13.201258659362791],["tery",-13.201272010803224],["ຫນ",-13.20127773284912],["になると",-13.20127773284912],["▁познава",-13.201301574707031],["פורט",-13.20130443572998],["▁தின",-13.201333045959473],["ények",-13.201343536376951],["▁heerlijk",-13.20134449005127],["ત્વ",-13.20136833190918],["trici",-13.201376914978027],["成功的",-13.20137882232666],["▁సరి",-13.20138454437256],["улар",-13.201391220092772],["▁Hausa",-13.20139217376709],["Free",-13.201414108276367],["▁Viva",-13.201435089111328],["GRA",-13.20144271850586],["▁hə",-13.201455116271973],["iņā",-13.201467514038086],["líf",-13.201480865478516],["取得了",-13.201557159423828],["▁דאָ",-13.201587677001951],["១៣",-13.20160675048828],["RUM",-13.201610565185549],["øve",-13.201614379882812],["▁simplemente",-13.201631546020508],["▁contacter",-13.20164394378662],["▁provider",-13.201668739318848],["▁جایی",-13.201669692993164],["▁godziny",-13.201680183410645],["▁TIN",-13.201700210571287],["▁Ovaj",-13.201708793640137],["רך",-13.2017240524292],["raw",-13.201729774475098],["▁igår",-13.201735496520996],["▁Spika",-13.201744079589844],["▁konca",-13.201753616333008],["▁uji",-13.201760292053224],["vitamin",-13.201767921447754],["ింగ్",-13.201780319213867],["▁конца",-13.201781272888184],["▁çalışıyor",-13.201786994934082],["verb",-13.201800346374512],["فصل",-13.201822280883787],["قم",-13.201828002929688],["palo",-13.201837539672852],["töku",-13.201848983764648],["Kau",-13.201858520507812],["▁фота",-13.20186996459961],["nera",-13.201870918273926],["▁ඉල්ල",-13.20187282562256],["▁হত্যা",-13.20187759399414],["▁economica",-13.201885223388672],["認證",-13.201913833618164],["▁stilling",-13.201927185058594],["▁நடத்த",-13.201927185058594],["▁ମହ",-13.201942443847656],["負担",-13.201945304870604],["សិក្សា",-13.20195484161377],["ѣ",-13.201955795288086],["▁najlepsze",-13.201955795288086],["▁ہمیشہ",-13.201955795288086],["▁დაკავშირებით",-13.201955795288086],["▁cluiche",-13.201956748962402],["▁προσπάθεια",-13.20195770263672],["▁английски",-13.201960563659668],["▁जाहिरात",-13.201960563659668],["▁eesmärk",-13.2019624710083],["▁экономики",-13.20196533203125],["人工智能",-13.201971054077148],["▁amerikansk",-13.201988220214844],["▁ovoga",-13.201995849609377],["▁नीचे",-13.20200538635254],["▁berpikir",-13.202007293701172],["▁ആത്മ",-13.202013969421388],["▁glæde",-13.202025413513184],["▁سعيد",-13.2020263671875],["ඩා",-13.202028274536133],["▁Līdz",-13.20202922821045],["▁pile",-13.202045440673828],["заслуж",-13.202046394348145],["▁terbesar",-13.202049255371094],["▁fakult",-13.202067375183104],["폐",-13.202094078063965],["▁Risiko",-13.202095985412598],["▁закладів",-13.202107429504396],["▁comunicar",-13.202110290527344],["接觸",-13.202113151550291],["▁प्रत्यक्ष",-13.202116012573242],["▁በር",-13.202131271362305],["▁শু",-13.202133178710938],["▁يعرف",-13.202134132385254],["▁trzeci",-13.202140808105469],["▁vois",-13.20217514038086],["▁ಹೊಂದ",-13.202186584472656],["ФЗ",-13.202211380004885],["▁عملي",-13.202228546142578],["ພວກ",-13.20223331451416],["▁թվում",-13.202240943908691],["▁accessible",-13.202245712280272],["klud",-13.20224952697754],["matkan",-13.20226764678955],["▁zilei",-13.202278137207031],["▁terça",-13.202282905578612],["hæng",-13.202288627624512],["▁rhannu",-13.202292442321776],["容量",-13.202296257019045],["லர்",-13.202301025390623],["▁دخل",-13.202309608459473],["ஞ",-13.202310562133787],["▁צילום",-13.202317237854004],["ावा",-13.202327728271484],["▁účel",-13.202345848083496],["оно",-13.20237636566162],["▁Лазар",-13.202394485473633],["▁vala",-13.202423095703123],["▁учини",-13.202441215515137],["держать",-13.20246410369873],["aketa",-13.202473640441896],["vazi",-13.202479362487791],["▁Sca",-13.202497482299805],["▁utför",-13.20249843597412],["ሩን",-13.202499389648438],["▁привлече",-13.202499389648438],["▁கட்டுரை",-13.20250415802002],["▁Toma",-13.202507019042969],["tatott",-13.202507972717283],["cenie",-13.202523231506348],["nging",-13.202532768249512],["следовател",-13.202539443969728],["သေဘာ",-13.20254898071289],["▁приятели",-13.202552795410156],["තුව",-13.202571868896484],["ઝા",-13.20258903503418],["▁ustawy",-13.202593803405762],["▁Aktion",-13.20259952545166],["▁جوانان",-13.20260524749756],["▁катастроф",-13.202617645263672],["рение",-13.20270824432373],["olon",-13.20272445678711],["▁ilustr",-13.20272445678711],["▁имот",-13.202725410461426],["▁qüvvə",-13.202728271484377],["▁намери",-13.20273494720459],["Љ",-13.202746391296388],["▁ಮೂರು",-13.202760696411133],["ວດ",-13.202768325805664],["▁ఫి",-13.202787399291992],["หวย",-13.20279026031494],["▁lành",-13.202807426452637],["▁felső",-13.20281219482422],["imat",-13.202815055847168],["▁15.00",-13.202836990356444],["kapa",-13.202847480773926],["ласан",-13.20285415649414],["කල",-13.202862739562988],["hetnek",-13.202874183654783],["กุ",-13.202874183654783],["endre",-13.202886581420898],["▁Хто",-13.20291519165039],["▁ಕ್ರ",-13.202921867370604],["ສະພາ",-13.202922821044922],["▁Rac",-13.202929496765137],["▁Lihat",-13.202947616577148],["συ",-13.20294952392578],["▁nikada",-13.202961921691896],["▁Guardian",-13.202969551086426],["لق",-13.20298957824707],["▁kazao",-13.2030029296875],["ค้นหา",-13.20300579071045],["▁sert",-13.203006744384766],["▁megtalál",-13.203025817871094],["用心",-13.203031539916992],["▁ఆడ",-13.2030668258667],["▁Zone",-13.203100204467772],["▁Ў",-13.203118324279783],["▁tirk",-13.203129768371582],["▁Reka",-13.203160285949709],["▁दम",-13.20320987701416],["ošā",-13.203221321105955],["▁врши",-13.203238487243652],["▁discret",-13.203251838684082],["▁ہو۔",-13.20325756072998],["闭",-13.203269004821776],["乔",-13.203269958496094],["▁motivi",-13.20327091217041],["õp",-13.203302383422852],["▁Durant",-13.203306198120115],["谢谢",-13.203327178955078],["արդար",-13.203328132629396],["粉絲",-13.203328132629396],["납",-13.203332901000977],["ジュ",-13.203333854675291],["▁Tất",-13.203336715698242],["▁tegelikult",-13.203336715698242],["▁zituzten",-13.203336715698242],["▁Үндэсний",-13.203336715698242],["▁በሙሉ",-13.203336715698242],["▁בנוסף",-13.203338623046877],["▁ଗାଁ",-13.203339576721191],["▁ලේ",-13.203341484069824],["運営",-13.20334529876709],["▁Meclis",-13.20335292816162],["▁funguje",-13.203357696533203],["▁penatibus",-13.203357696533203],["▁proprietà",-13.203362464904783],["▁Thương",-13.203363418579102],["▁pH",-13.20337200164795],["bben",-13.203375816345217],["▁سخن",-13.203375816345217],["展現",-13.203376770019531],["ТВ",-13.203378677368164],["ទាំងអស់",-13.203387260437012],["▁adeeg",-13.203389167785645],["▁amore",-13.20339298248291],["▁scheint",-13.203408241271973],["▁աշխարհ",-13.203408241271973],["▁màxim",-13.203417778015137],["યે",-13.203433990478516],["گران",-13.203445434570312],["▁Екс",-13.20345401763916],["เศรษฐกิจ",-13.203482627868652],["hö",-13.203489303588867],["изован",-13.203495979309082],["▁förstår",-13.203497886657717],["▁ಅನ್ನು",-13.203499794006348],["▁ገንዘብ",-13.20350170135498],["▁рассказал",-13.20352268218994],["▁Proces",-13.203524589538574],["kortti",-13.20353889465332],["တန္း",-13.203540802001951],["▁obecnie",-13.20357608795166],["▁Ege",-13.203584671020508],["▁својих",-13.20359706878662],["招生",-13.203618049621582],["要是",-13.203638076782228],["ენის",-13.203641891479492],["финанс",-13.20366382598877],["▁hatta",-13.203685760498049],["▁bermula",-13.203686714172363],["▁салбар",-13.20368766784668],["रुप",-13.203707695007324],["▁dhis",-13.203715324401855],["gawe",-13.203726768493652],["▁būtina",-13.2037353515625],["▁układ",-13.20373821258545],["▁Money",-13.203749656677246],["▁agir",-13.203774452209473],["▁নিজ",-13.203776359558104],["▁zeleni",-13.203801155090332],["▁trwa",-13.203845977783203],["দ্",-13.203847885131836],["ភាសា",-13.203848838806152],["ovky",-13.20387077331543],["ຄະ",-13.20388412475586],["▁mandato",-13.20388412475586],["▁Revista",-13.203904151916504],["nista",-13.20391845703125],["terv",-13.203923225402832],["▁Soc",-13.203946113586426],["▁ಆಗಿ",-13.204012870788574],["即便",-13.204023361206056],["▁Жол",-13.204025268554688],["不管是",-13.204026222229004],["▁cuốn",-13.204044342041016],["▁cerve",-13.204056739807127],["▁karbon",-13.204065322875977],["▁0,2",-13.204068183898926],["▁нуди",-13.204071044921877],["▁செய்யப்பட்ட",-13.204110145568848],["▁arts",-13.204126358032228],["▁вези",-13.204205513000488],["សី",-13.204214096069336],["stuksen",-13.204232215881348],["▁Համ",-13.204233169555664],["▁apk",-13.204245567321776],["IFA",-13.204253196716309],["▁அட",-13.204258918762209],["▁அவன்",-13.204286575317385],["विषयी",-13.20431423187256],["▁laver",-13.20431900024414],["▁سوق",-13.204328536987305],["ចេញ",-13.204340934753418],["▁ഹോ",-13.204358100891112],["امن",-13.204360961914062],["tase",-13.20436954498291],["▁Tiger",-13.20437240600586],["▁seun",-13.204379081726074],["▁romantisk",-13.20439338684082],["▁опа",-13.204405784606934],["▁Dù",-13.20441436767578],["▁модерн",-13.204440116882324],["▁обмен",-13.204440116882324],["liegt",-13.204448699951172],["▁dll",-13.20447063446045],["▁napis",-13.204479217529297],["സര്",-13.204489707946776],["▁таңда",-13.204497337341309],["▁ΜΕ",-13.204498291015623],["▁gratul",-13.204545021057127],["▁atent",-13.20455265045166],["▁wakil",-13.204575538635254],["▁patienter",-13.204598426818848],["znie",-13.20462131500244],["ခ်င္း",-13.204632759094238],["▁informacion",-13.2046537399292],["▁Recept",-13.204654693603516],["▁0.5",-13.204663276672363],["ټو",-13.204679489135742],["▁quota",-13.20469093322754],["념",-13.204696655273438],["försäkring",-13.204719543457031],["▁wrinkles",-13.204719543457031],["▁отношений",-13.204719543457031],["▁Lưu",-13.204721450805664],["▁spotkania",-13.20472240447998],["ម្ចាស់",-13.204723358154297],["▁विश्वकप",-13.204723358154297],["שיתוף",-13.204724311828612],["▁შესა",-13.204730987548828],["▁промяна",-13.204744338989258],["▁teszi",-13.204750061035156],["▁દૂર",-13.204751968383787],["หนู",-13.204752922058104],["▁возила",-13.204754829406738],["▁என்றால்",-13.204754829406738],["▁rokmi",-13.204755783081056],["▁चुकी",-13.204760551452637],["▁trakcie",-13.2047700881958],["▁Şubat",-13.204771041870115],["▁növény",-13.20478630065918],["TEC",-13.204791069030762],["▁그녀",-13.204792976379396],["▁внесен",-13.204815864562988],["▁(2011)",-13.20481777191162],["▁quiz",-13.204837799072266],["୍ମ",-13.204843521118164],["▁hakuna",-13.204845428466797],["▁oblast",-13.204875946044922],["▁escolha",-13.204901695251465],["▁כה",-13.204904556274414],["128",-13.204916954040527],["तक",-13.204919815063477],["明日",-13.204928398132324],["κων",-13.204941749572754],["▁fantastiske",-13.204949378967283],["izacija",-13.204984664916992],["▁dün",-13.205002784729004],["cţia",-13.205020904541016],["▁Sprache",-13.205020904541016],["▁partager",-13.205026626586914],["▁대한민국",-13.205032348632812],["▁Sme",-13.205035209655762],["YU",-13.205038070678713],["▁ഭാഷ",-13.20504379272461],["▁ürünler",-13.205049514770508],["See",-13.205073356628418],["計画",-13.205077171325684],["▁linggo",-13.205086708068848],["ரில்",-13.205101013183594],["americano",-13.205103874206545],["▁berean",-13.205105781555176],["▁боја",-13.205105781555176],["▁kelib",-13.205124855041504],["▁kalbos",-13.205169677734377],["kadang",-13.205171585083008],["▁geçme",-13.20517635345459],["▁вроде",-13.20518970489502],["واء",-13.205194473266602],["▁dipa",-13.205206871032717],["▁gering",-13.205212593078612],["љења",-13.205219268798828],["LN",-13.20522117614746],["▁greutate",-13.205225944519045],["стта",-13.205251693725586],["riq",-13.205257415771484],["▁гала",-13.2052640914917],["▁sånn",-13.205284118652344],["OSS",-13.205296516418455],["pored",-13.205309867858888],["好多",-13.205316543579102],["▁nạ",-13.205358505249023],["арын",-13.205360412597656],["▁опор",-13.205368995666504],["▁అయినా",-13.205370903015137],["▁ढ",-13.205388069152832],["ntiems",-13.205389022827148],["▁engage",-13.205395698547363],["▁награди",-13.205415725708008],["hány",-13.205421447753906],["เลีย",-13.205422401428224],["▁intermedi",-13.205425262451172],["лердің",-13.205430030822754],["mobiliar",-13.205451011657717],["musi",-13.205452919006348],["▁fonds",-13.205470085144045],["▁spozna",-13.205473899841309],["bica",-13.2055082321167],["▁8000",-13.20551872253418],["posición",-13.205549240112305],["▁gore",-13.205561637878418],["ובר",-13.20556354522705],["lingan",-13.20556926727295],["How",-13.205574035644531],["Var",-13.20559787750244],["▁photograph",-13.205599784851074],["שן",-13.20561695098877],["▁1,0",-13.205625534057615],["မန္",-13.205636978149414],["グラ",-13.205665588378906],["ംഗ്",-13.205670356750488],["parol",-13.205684661865234],["▁Denk",-13.20568561553955],["▁Генерал",-13.20568561553955],["ىڭىز",-13.205689430236816],["दं",-13.20570182800293],["▁በደ",-13.205706596374512],["▁Joh",-13.205735206604004],["Pal",-13.205737113952637],["▁myndir",-13.20574951171875],["▁election",-13.2057523727417],["▁acusa",-13.205760955810549],["смо",-13.205767631530762],["▁Reng",-13.205785751342772],["▁Banka",-13.20578670501709],["aleko",-13.205790519714355],["▁nią",-13.205790519714355],["ค้า",-13.205791473388672],["Мен",-13.205798149108888],["▁yaşadığı",-13.205799102783203],["▁berisi",-13.20585823059082],["▁სტა",-13.205869674682615],["的核心",-13.205870628356934],["▁brott",-13.205894470214844],["ίκ",-13.205930709838867],["вою",-13.205944061279297],["▁دس",-13.20595932006836],["ספר",-13.20596694946289],["の人",-13.205977439880373],["वर्ण",-13.205979347229004],["▁oferuje",-13.206003189086914],["▁Comunica",-13.206048011779783],["ແລ",-13.2060546875],["▁akka",-13.206066131591797],["信仰",-13.206076622009276],["▁έχετε",-13.20610523223877],["▁είσαι",-13.20610523223877],["▁τίποτα",-13.20610523223877],["▁помочь",-13.20610523223877],["▁تفاوت",-13.20610523223877],["▁wyłącznie",-13.206106185913086],["▁старонкі",-13.206111907958984],["க்காக",-13.206114768981934],["▁chaud",-13.20611572265625],["▁ერთხელ",-13.206116676330566],["▁stránok",-13.2061185836792],["▁איינער",-13.206132888793944],["▁люблю",-13.206149101257324],["▁ساعات",-13.206151008605955],["可惜",-13.206154823303224],["ējas",-13.206168174743652],["ieť",-13.206172943115234],["▁ospita",-13.206172943115234],["▁ratu",-13.20617389678955],["▁attempt",-13.206180572509766],["விட்டது",-13.206189155578612],["▁શકો",-13.206205368041992],["..........",-13.206206321716309],["▁gelme",-13.206214904785156],["žė",-13.206219673156738],["szło",-13.20623016357422],["rinë",-13.20623779296875],["▁timbul",-13.206241607666016],["وڈ",-13.206242561340332],["▁בלתי",-13.206247329711914],["ဆိုး",-13.206254959106444],["▁educação",-13.206262588500977],["▁safety",-13.206263542175291],["▁طبي",-13.206269264221191],["台南",-13.206273078918455],["▁POST",-13.206289291381836],["အားလုံး",-13.206304550170898],["▁wzor",-13.206304550170898],["▁Рі",-13.206310272216797],["▁welcome",-13.206311225891112],["▁teria",-13.206316947937012],["ี้ย",-13.206320762634276],["了他",-13.20633602142334],["▁പ്",-13.206342697143556],["▁തിരിച്ചു",-13.206343650817873],["▁മൂന്നു",-13.2063570022583],["cando",-13.206369400024414],["▁32.",-13.206375122070312],["▁ispit",-13.206377983093262],["▁sonrası",-13.206378936767578],["▁ஆண்",-13.206379890441896],["▁түүх",-13.206382751464844],["ほうが",-13.20638942718506],["manns",-13.206395149230955],["▁đón",-13.206395149230955],["▁chuť",-13.206401824951172],["ђењ",-13.20643138885498],["יפות",-13.206440925598145],["▁ediblər",-13.206459999084473],["▁ungdom",-13.206460952758787],["čným",-13.206462860107422],["▁Sof",-13.20647144317627],["െത്തിയ",-13.206486701965332],["▁ٿيندي",-13.206510543823242],["▁نوشت",-13.20651149749756],["メニュー",-13.206530570983888],["▁Fehler",-13.206538200378418],["لۈك",-13.20656681060791],["▁māk",-13.20659065246582],["▁Pastor",-13.20659351348877],["▁mėn",-13.206600189208984],["▁женщины",-13.2066011428833],["irat",-13.20660400390625],["chon",-13.206615447998049],["12.2017",-13.206632614135742],["▁viisi",-13.20668125152588],["mult",-13.20669174194336],["贫困",-13.206695556640623],["fres",-13.20669937133789],["▁듯",-13.20669937133789],["oida",-13.206707000732422],["▁Mio",-13.206719398498535],["▁samle",-13.20676040649414],["חוש",-13.206761360168455],["иск",-13.206767082214355],["უზ",-13.206775665283203],["▁smaak",-13.206787109375],["په",-13.206794738769531],["주택",-13.206794738769531],["මිනි",-13.206836700439451],["▁organizacije",-13.206847190856934],["▁تعلقات",-13.20686149597168],["▁oddi",-13.206913948059082],["▁линии",-13.206913948059082],["▁sebagainya",-13.20691967010498],["entzako",-13.20693588256836],["▁serien",-13.206939697265623],["▁potreba",-13.206944465637209],["▁гроз",-13.206951141357422],["▁pikk",-13.206971168518066],["աբեր",-13.207021713256836],["zenia",-13.207037925720217],["እኔ",-13.207048416137695],["къде",-13.20705223083496],["▁البا",-13.207056045532228],["▁щось",-13.207087516784668],["langkah",-13.207098960876465],["ୀଙ୍କ",-13.207115173339844],["sakyti",-13.20711898803711],["▁另外",-13.207120895385742],["▁viatge",-13.20712661743164],["▁exercise",-13.20713233947754],["▁perfecto",-13.207157135009766],["▁Ска",-13.20716667175293],["lardı",-13.207176208496094],["▁halua",-13.207188606262209],["อส",-13.207209587097168],["▁нарушение",-13.207209587097168],["▁latina",-13.207230567932127],["▁မှာ",-13.20727825164795],["control",-13.20730209350586],["▁වයස",-13.207322120666504],["▁Roj",-13.2073392868042],["ຖືກ",-13.20736026763916],["▁sueño",-13.20736312866211],["▁cle",-13.207366943359377],["λού",-13.20738124847412],["▁hyväksy",-13.20738410949707],["nemen",-13.207387924194336],["▁payment",-13.207395553588867],["писали",-13.207420349121094],["କ୍ସ",-13.207448959350586],["▁kalender",-13.207459449768066],["▁потпуно",-13.207459449768066],["▁moraju",-13.2074613571167],["▁ಮಂದಿ",-13.20748233795166],["เจ็บ",-13.20748805999756],["▁Nit",-13.207489013671877],["ongoza",-13.207490921020508],["чаны",-13.207490921020508],["সম্পাদনা",-13.207491874694824],["▁Cộng",-13.207491874694824],["▁Sûriyê",-13.207491874694824],["▁fondamentale",-13.207491874694824],["▁означает",-13.207491874694824],["▁ביטוח",-13.207491874694824],["▁جذاب",-13.207491874694824],["▁ধর্ম",-13.207491874694824],["▁പരാതി",-13.207491874694824],["롭",-13.207491874694824],["▁مبتلا",-13.20749282836914],["陌生",-13.20749282836914],["▁Saga",-13.207494735717772],["▁kematian",-13.207494735717772],["▁Києва",-13.20749568939209],["▁ситуација",-13.20749568939209],["▁вайны",-13.207500457763672],["▁Spot",-13.20751667022705],["▁разполага",-13.20751953125],["旗下",-13.20752239227295],["▁אותנו",-13.207537651062012],["▁וועגן",-13.207538604736328],["▁لکه",-13.207538604736328],["▁pracowników",-13.207552909851074],["တစ်ဦး",-13.207562446594238],["▁Bekijk",-13.207579612731934],["▁köt",-13.20758056640625],["▁recurso",-13.207592964172363],["63)",-13.207599639892578],["▁Jelen",-13.207601547241213],["をした",-13.207612991333008],["▁फरक",-13.20762062072754],["▁erakunde",-13.207622528076172],["ອີກ",-13.207633018493652],["▁укук",-13.207642555236816],["Commerce",-13.20765495300293],["▁бисте",-13.20766258239746],["▁vizuri",-13.207669258117676],["▁сьв",-13.207672119140623],["▁zapad",-13.207681655883787],["▁Eski",-13.207685470581056],["त्ती",-13.207686424255373],["ÁT",-13.207698822021484],["▁kdyby",-13.207703590393066],["▁വിവര",-13.207715034484863],["ycznym",-13.207724571228027],["▁murit",-13.207764625549316],["▁Atlant",-13.20778751373291],["eissa",-13.207797050476074],["лос",-13.207822799682615],["lõ",-13.207839012145996],["▁articolo",-13.207844734191896],["▁важни",-13.207846641540527],["▁Ramazan",-13.207855224609377],["▁گئے۔",-13.207857131958008],["circ",-13.20786190032959],["▁atšķir",-13.207863807678224],["asyonu",-13.207868576049805],["▁vizual",-13.207883834838867],["▁پوري",-13.207894325256348],["▁SEX",-13.207908630371094],["▁aufgrund",-13.20793628692627],["ക്ഷി",-13.207940101623535],["нац",-13.207958221435549],["▁stanovanj",-13.207965850830078],["▁главата",-13.207975387573242],["▁Departamento",-13.207979202270508],["▁celebración",-13.20799446105957],["▁Tarix",-13.20801830291748],["▁Consulta",-13.208020210266112],["▁Quid",-13.208023071289062],["impression",-13.20802402496338],["▁LP",-13.208027839660645],["добра",-13.208053588867188],["▁tespit",-13.208073616027832],["▁họp",-13.208075523376465],["▁Constru",-13.208078384399414],["▁recebe",-13.208120346069336],["▁نمای",-13.208120346069336],["▁laag",-13.208126068115234],["▁силно",-13.208126068115234],["ginn",-13.208131790161133],["▁stranu",-13.208131790161133],["▁sensible",-13.208142280578612],["▁කතාවක්",-13.208145141601562],["▁Camping",-13.208158493041992],["▁técnicos",-13.208168029785156],["bib",-13.20820140838623],["▁legg",-13.208221435546877],["كيم",-13.208266258239746],["arski",-13.208348274230955],["dīju",-13.208392143249512],["доў",-13.20839786529541],["ԵՆ",-13.20840072631836],["▁Kär",-13.20842742919922],["▁شفاف",-13.20842742919922],["قوا",-13.208447456359863],["一篇",-13.20844841003418],["ಡುವ",-13.208457946777344],["บิน",-13.20846939086914],["ٺي",-13.20849323272705],["jab",-13.20850658416748],["culi",-13.208520889282228],["ថ្នាក់",-13.208520889282228],["цией",-13.208521842956545],["▁മാസ",-13.208532333374023],["▁podróż",-13.208540916442873],["narod",-13.20854377746582],["anud",-13.208547592163086],["သစ်",-13.208553314208984],["▁szcze",-13.208571434020996],["できた",-13.208580017089844],["▁contesta",-13.20859146118164],["▁arritur",-13.208600044250488],["▁corresponde",-13.208614349365234],["▁faaliyet",-13.208614349365234],["ЛЬ",-13.208626747131348],["▁frekar",-13.20863151550293],["jós",-13.208710670471191],["▁gyv",-13.208715438842772],["ышы",-13.20872688293457],["मन्त्र",-13.208765983581545],["ષ્ટ",-13.208768844604492],["Кон",-13.208806037902832],["▁roko",-13.208820343017578],["担任",-13.208826065063477],["počet",-13.20883846282959],["不明",-13.208839416503906],["тээ",-13.208852767944336],["vena",-13.20887565612793],["र्ण",-13.20887565612793],["ႇ",-13.20887851715088],["▁Allerdings",-13.208880424499512],["▁dzisiaj",-13.208880424499512],["▁дүүргийн",-13.208880424499512],["▁представлява",-13.208880424499512],["▁توصیه",-13.208880424499512],["▁बाहेर",-13.208880424499512],["▁მიუხედავად",-13.208880424499512],["ပထမ",-13.208881378173828],["▁Ancaq",-13.208881378173828],["▁keseluruhan",-13.208881378173828],["▁shunday",-13.208881378173828],["▁médecin",-13.208882331848145],["▁pilsētā",-13.20888328552246],["▁Herêma",-13.208884239196776],["▁alrededor",-13.20888614654541],["▁roślin",-13.208887100219728],["裝置",-13.208889961242676],["▁그냥",-13.208894729614258],["▁सुबह",-13.208895683288574],["▁প্রকাশিত",-13.208897590637209],["كتور",-13.208916664123535],["عم",-13.208930015563965],["▁అసలు",-13.208934783935549],["▁Gesamt",-13.20895290374756],["浪漫",-13.208958625793455],["▁Ngân",-13.20896339416504],["Gar",-13.20897102355957],["▁Mexico",-13.20897388458252],["我们在",-13.208985328674316],["▁الجسم",-13.20899772644043],["صاد",-13.209001541137695],["Bil",-13.209007263183594],["ико",-13.209023475646973],["我和",-13.209030151367188],["▁پهرين",-13.209038734436035],["他也",-13.209059715270996],["▁Кай",-13.209075927734377],["▁reveni",-13.209081649780272],["▁социальной",-13.20908260345459],["ဘက္",-13.209087371826172],["▁مص",-13.20911979675293],["ickou",-13.20914077758789],["▁Tutti",-13.209158897399902],["▁zilnic",-13.209162712097168],["▁εθνικ",-13.209166526794434],["▁namna",-13.209171295166016],["lové",-13.209181785583496],["▁ср",-13.209195137023926],["дердің",-13.209209442138672],["▁fati",-13.20921516418457],["▁दर्द",-13.209229469299316],["▁данным",-13.209246635437012],["vakt",-13.209250450134276],["▁magasin",-13.209258079528809],["▁ଆପଣଙ୍କ",-13.209266662597656],["▁Nek",-13.209288597106934],["usių",-13.20930004119873],["gast",-13.209303855895996],["▁നടി",-13.209307670593262],["▁berge",-13.209312438964844],["▁სისტემა",-13.20932674407959],["트리",-13.209327697753906],["koodi",-13.209331512451172],["▁салып",-13.209376335144045],["pēt",-13.209393501281738],["āks",-13.209393501281738],["▁راپور",-13.209415435791016],["ставете",-13.209424018859863],["の方は",-13.209444046020508],["▁Lion",-13.209449768066406],["سائل",-13.209476470947266],["▁professionale",-13.209478378295898],["But",-13.209482192993164],["nič",-13.20948600769043],["▁તેમાં",-13.209495544433594],["▁náklad",-13.209497451782228],["▁كۆز",-13.209500312805176],["▁amerikanske",-13.209506034851074],["erken",-13.209516525268556],["▁السودان",-13.20956325531006],["ंसाठी",-13.20957851409912],["▁meid",-13.209587097167969],["▁samfund",-13.209589004516602],["▁huid",-13.209640502929688],["▁Pole",-13.209650039672852],["arto",-13.209665298461914],["▁उम्मेदवार",-13.20969009399414],["pasta",-13.209697723388672],["หิน",-13.209717750549316],["▁ପଡି",-13.209721565246582],["▁exit",-13.209728240966797],["▁سبق",-13.209739685058594],["դա",-13.209755897521973],["yeti",-13.20981788635254],["ያዘ",-13.209820747375488],["۔۔",-13.209839820861816],["▁դիմ",-13.209840774536133],["▁Nieder",-13.209848403930664],["▁allmän",-13.209877014160156],["▁それ",-13.209877967834473],["▁βοηθ",-13.20988655090332],["yys",-13.209891319274902],["▁kanser",-13.209896087646484],["▁банки",-13.209917068481444],["▁।”",-13.209924697875977],["▁gida",-13.209925651550291],["ваться",-13.209933280944824],["▁tempora",-13.20994472503662],["▁دادند",-13.20994472503662],["ird",-13.20995044708252],["Euro",-13.209951400756836],["krá",-13.209986686706545],["رسل",-13.210000038146973],["▁Вес",-13.210015296936035],["тог",-13.210018157958984],["▁Кој",-13.2100248336792],["핑",-13.21006965637207],["▁branco",-13.21008014678955],["▁2、",-13.21012020111084],["▁Apostol",-13.210142135620115],["▁vlastne",-13.21015453338623],["▁Rog",-13.210161209106444],["okról",-13.21019458770752],["憑",-13.210220336914062],["烧",-13.210227966308594],["猪",-13.210234642028809],["ТЕЛ",-13.210235595703123],["罰",-13.21023654937744],["連續",-13.210247039794922],["圧",-13.210251808166504],["Ậ",-13.210271835327148],["▁MATOKEO",-13.210271835327148],["▁hamkorlik",-13.210271835327148],["▁књига",-13.210271835327148],["▁հանդես",-13.210271835327148],["▁शनिवार",-13.210271835327148],["▁ಅಭ್ಯರ್ಥಿ",-13.210271835327148],["▁ተብሎ",-13.210271835327148],["▁знову",-13.210274696350098],["▁raspuns",-13.21027946472168],["vereniging",-13.21029281616211],["stjóra",-13.210293769836426],["▁homens",-13.210298538208008],["超越",-13.210315704345703],["▁intelectual",-13.210323333740234],["▁한편",-13.21035385131836],["κοινων",-13.210360527038574],["▁مدرس",-13.210368156433104],["▁anaye",-13.210369110107422],["▁ziemlich",-13.2103853225708],["▁bibin",-13.210402488708496],["ໄວ",-13.210427284240724],["▁cháu",-13.210451126098633],["▁הדרך",-13.210463523864746],["▁Serokê",-13.21047306060791],["2.8",-13.210474014282228],["▁mudar",-13.210492134094238],["▁lisan",-13.210495948791504],["▁ludi",-13.210503578186035],["▁परेका",-13.210554122924805],["ჭა",-13.21055507659912],["▁पठ",-13.210600852966309],["UDA",-13.210601806640623],["▁جسے",-13.210606575012209],["времено",-13.210609436035156],["▁дожд",-13.210618019104004],["문의",-13.21062183380127],["▁బాధ",-13.210627555847168],["ចិន",-13.2106294631958],["實在",-13.210646629333496],["▁YOU",-13.21066188812256],["ակա",-13.210691452026367],["▁اسپ",-13.21069622039795],["▁yili",-13.210699081420898],["zahlen",-13.210708618164062],["кош",-13.210732460021973],["▁taklif",-13.210760116577148],["▁марка",-13.21077823638916],["низ",-13.21078109741211],["▁vpliva",-13.210783004760742],["▁aspetti",-13.21078395843506],["领域的",-13.210792541503906],["▁ගියේ",-13.210793495178224],["ŠT",-13.210800170898438],["▁krog",-13.210810661315918],["▁Maji",-13.210855484008787],["▁таң",-13.210878372192385],["kmetij",-13.210886001586914],["ጊ",-13.210887908935549],["ància",-13.210899353027344],["▁தமிழர்",-13.21090030670166],["aldean",-13.210926055908203],["▁dri",-13.210939407348633],["▁നഷ്ട",-13.210939407348633],["▁dunque",-13.21095085144043],["chop",-13.210960388183594],["▁ödül",-13.210960388183594],["יקים",-13.210963249206545],["ينو",-13.21097183227539],["käyt",-13.210975646972656],["સ્પ",-13.210975646972656],["OF",-13.210976600646973],["▁gió",-13.210977554321287],["0-1",-13.210991859436035],["▁Slim",-13.211017608642578],["ीका",-13.21102237701416],["▁malas",-13.211029052734377],["nskog",-13.211087226867676],["▁acestuia",-13.211103439331056],["▁avrebbe",-13.211105346679688],["BF",-13.211132049560549],["mamış",-13.211133003234863],["▁dereito",-13.211140632629396],["▁Таа",-13.211143493652344],["由此",-13.211148262023926],["੍ਹ",-13.211158752441406],["▁Više",-13.211159706115724],["▁الحب",-13.21116542816162],["▁մաս",-13.211166381835938],["▁кылган",-13.211181640625],["▁édes",-13.211190223693848],["Iz",-13.211207389831545],["▁컴퓨터",-13.211207389831545],["▁позволя",-13.211220741271973],["▁Bukit",-13.21124267578125],["▁cache",-13.211263656616213],["▁Cle",-13.21127700805664],["▁току",-13.211284637451172],["▁الفلسطيني",-13.211292266845703],["人氣",-13.211298942565918],["▁divin",-13.211368560791016],["▁ανάπτυξη",-13.211381912231444],["lup",-13.211400985717772],["▁Czas",-13.211400985717772],["ുണ്ട",-13.211418151855469],["▁PV",-13.21142864227295],["▁zatem",-13.211435317993164],["ידה",-13.211443901062012],["▁زى",-13.211492538452148],["ंडा",-13.21150016784668],["▁Երեւանում",-13.211509704589844],["▁ئۆي",-13.211518287658691],["▁vöru",-13.211536407470703],["tīv",-13.211557388305664],["までに",-13.211560249328612],["страш",-13.211573600769045],["മുണ്ട്",-13.211578369140623],["przed",-13.211585998535156],["مؤسس",-13.211603164672852],["cius",-13.211609840393066],["▁عبارت",-13.21161937713623],["兆",-13.211630821228027],["málum",-13.211652755737305],["▁dance",-13.21165370941162],["▁بلوچ",-13.21165657043457],["ផ្ទាល់",-13.211664199829102],["▁intéressant",-13.211664199829102],["▁mahasiswa",-13.211664199829102],["▁ακριβώς",-13.211664199829102],["▁Վերջին",-13.211664199829102],["▁տոկոս",-13.211664199829102],["▁kollektiv",-13.211668014526367],["▁prijedlog",-13.211668968200684],["▁szereplő",-13.211671829223633],["▁бағалау",-13.211675643920898],["ičke",-13.211685180664062],["经理",-13.211685180664062],["čnega",-13.211687088012695],["▁Nová",-13.211699485778809],["▁цело",-13.21170425415039],["▁Германија",-13.211705207824709],["▁vrijdag",-13.21172332763672],["sminister",-13.211724281311035],["▁visiškai",-13.211737632751465],["▁163",-13.211739540100098],["▁개선",-13.211740493774414],["▁likevel",-13.211745262145996],["▁выступа",-13.211756706237791],["▁Presse",-13.211777687072754],["May",-13.2117919921875],["▁යුතුයි",-13.211804389953612],["▁Sean",-13.211807250976562],["yiz",-13.211834907531738],["▁problemlər",-13.21187973022461],["▁currently",-13.211909294128418],["▁എന്നാണ്",-13.21191120147705],["▁тарых",-13.211923599243164],["▁операции",-13.21193504333496],["▁obvi",-13.21194839477539],["▁상대",-13.211950302124023],["▁نيز",-13.211970329284668],["▁məqsədi",-13.211973190307615],["▁informaciją",-13.21197509765625],["▁estaban",-13.211993217468262],["▁آبی",-13.211996078491213],["▁acompaña",-13.212027549743652],["乃",-13.2120361328125],["▁ogląda",-13.21205997467041],["シー",-13.212060928344728],["führen",-13.21206283569336],["▁បង្ហាញ",-13.21206760406494],["impegno",-13.212071418762209],["tkazish",-13.212072372436523],["▁팬",-13.21207332611084],["ဆိုင္",-13.212075233459473],["▁וכו",-13.212077140808104],["ીના",-13.212081909179688],["▁partai",-13.212090492248535],["hata",-13.212100982666016],["3,4",-13.212121963500977],["kanal",-13.212122917175291],["ural",-13.21212387084961],["рут",-13.212139129638672],["▁ನಿನ್ನ",-13.21214485168457],["에서의",-13.212159156799316],["▁Nomor",-13.212162971496582],["▁дожив",-13.212203025817873],["ırken",-13.21221160888672],["▁العالمي",-13.212224006652832],["▁звяр",-13.21224594116211],["三個",-13.21224880218506],["▁автора",-13.212250709533691],["▁ബെ",-13.212251663208008],["hea",-13.21231746673584],["▁rokoch",-13.212319374084473],["▁ආව",-13.212345123291016],["▁finance",-13.212355613708496],["▁নিন",-13.212359428405762],["▁елдер",-13.212367057800291],["▁सिं",-13.212372779846191],["бина",-13.21237564086914],["ozo",-13.21238136291504],["▁imena",-13.212393760681152],["нського",-13.21240234375],["ndak",-13.212421417236328],["总体",-13.212434768676758],["▁않았",-13.212445259094238],["setzt",-13.212449073791504],["▁derzeit",-13.21246337890625],["▁सोड",-13.212477684020996],["kilala",-13.21249294281006],["стите",-13.212516784667969],["▁fundi",-13.212522506713867],["ατε",-13.21253776550293],["가요",-13.212549209594728],["▁Marine",-13.212569236755373],["▁Хө",-13.212589263916016],["▁умер",-13.212591171264648],["▁չափ",-13.212594985961914],["▁staten",-13.212597846984863],["▁1998.",-13.21262550354004],["IWA",-13.212638854980469],["ಿದರೆ",-13.212638854980469],["▁сим",-13.21264934539795],["ólogo",-13.212650299072266],["公路",-13.212652206420898],["რდება",-13.21265983581543],["khona",-13.212661743164062],["▁økt",-13.212693214416504],["empresa",-13.21269989013672],["天氣",-13.21270751953125],["heb",-13.212709426879885],["▁parere",-13.21274757385254],["▁arată",-13.212778091430664],["▁zavar",-13.212788581848145],["▁öneri",-13.212790489196776],["toris",-13.212799072265623],["▁Rain",-13.2128267288208],["▁Monta",-13.212835311889648],["IPA",-13.212862014770508],["lanishi",-13.21286392211914],["▁مفهوم",-13.212881088256836],["leo",-13.212889671325684],["опи",-13.212894439697266],["▁Bomb",-13.212902069091797],["▁râ",-13.212910652160645],["പന",-13.21291446685791],["▁artigos",-13.21291446685791],["geme",-13.21291732788086],["净",-13.212931632995604],["識",-13.212937355041504],["▁GAL",-13.21294403076172],["▁ପୁଲିସ",-13.212955474853516],["▁చెప్పిన",-13.212966918945312],["锁",-13.212968826293944],["ँदा",-13.212989807128906],["စံု",-13.212992668151855],["ذار",-13.213000297546388],["▁Portu",-13.213006019592283],["▁Milo",-13.213006973266602],["führung",-13.213010787963867],["深化",-13.213027000427246],["▁సమస్య",-13.213032722473145],["欺",-13.21303367614746],["纽约",-13.21304416656494],["アニメ",-13.213047981262209],["▁gebe",-13.213050842285156],["▁töötaja",-13.213056564331056],["▁chuyến",-13.213059425354004],["▁wzrost",-13.213059425354004],["▁ಕುಟುಂಬ",-13.213059425354004],["▁šiandien",-13.21306037902832],["▁ଦାବି",-13.21306037902832],["▁ترسره",-13.213074684143066],["ได้ว่า",-13.213080406188965],["▁nedá",-13.213090896606444],["▁legale",-13.21314811706543],["▁الدفاع",-13.21314811706543],["▁বিএনপির",-13.21316623687744],["▁riserva",-13.213172912597656],["▁ئالدى",-13.213193893432615],["▁kva",-13.213210105895996],["▁иргэн",-13.213242530822754],["▁तपाईको",-13.213242530822754],["▁habere",-13.21324634552002],["чкој",-13.213254928588867],["▁ย",-13.213258743286133],["▁Ogni",-13.213266372680664],["▁Егер",-13.21328067779541],["▁Защо",-13.213281631469728],["rosa",-13.213292121887209],["аюцца",-13.213303565979004],["ėjus",-13.213311195373535],["▁חלו",-13.213323593139648],["▁asam",-13.213333129882812],["主体",-13.213335990905762],["▁disposa",-13.213356971740724],["▁самим",-13.213377952575684],["▁vilkår",-13.21338939666748],["▁Alternativ",-13.213421821594238],["ліст",-13.2134428024292],["ፈን",-13.213449478149414],["▁საკუთარ",-13.213494300842283],["▁хил",-13.21351146697998],["▁इत्य",-13.213520050048828],["▁literar",-13.213526725769045],["סטער",-13.213549613952637],["▁komentár",-13.213558197021484],["קבע",-13.213563919067385],["คอม",-13.213570594787598],["suku",-13.21357536315918],["▁Nation",-13.213578224182127],["hava",-13.213581085205078],["▁nápad",-13.213586807250977],["▁هيء",-13.213586807250977],["юся",-13.21359157562256],["uș",-13.213601112365724],["▁элек",-13.21362590789795],["▁Wales",-13.21363925933838],["▁Jet",-13.213667869567873],["▁వాళ్ల",-13.213685035705566],["▁ከፍ",-13.21369457244873],["▁edita",-13.213715553283691],["▁දාන්න",-13.213744163513184],["▁umbes",-13.21375560760498],["ančio",-13.21376132965088],["นํามา",-13.213784217834473],["▁kêm",-13.213786125183104],["▁இணைய",-13.21379566192627],["მას",-13.213802337646484],["(?)",-13.21381664276123],["战斗",-13.21381950378418],["▁unhas",-13.213829040527344],["▁fungera",-13.213841438293455],["▁رسیده",-13.21384334564209],["რიგ",-13.21384620666504],["▁zapal",-13.21384620666504],["▁reakcij",-13.213855743408203],["ляется",-13.21386432647705],["▁културно",-13.213871955871582],["▁нэгж",-13.213875770568848],["▁مف",-13.213878631591797],["џе",-13.213887214660645],["afodd",-13.213889122009276],["ਦੀਪ",-13.213902473449709],["▁canción",-13.213909149169922],["▁predavanj",-13.21391487121582],["ශා",-13.21392059326172],["▁भनेको",-13.213921546936035],["SSA",-13.2139253616333],["pärast",-13.21392822265625],["odat",-13.2139310836792],["▁ação",-13.213936805725098],["▁őr",-13.213945388793944],["೨",-13.213994979858398],["hine",-13.214018821716309],["ави",-13.214045524597168],["▁sille",-13.214048385620115],["tager",-13.214064598083496],["▁Vater",-13.214068412780762],["▁Batman",-13.21407985687256],["▁përdorim",-13.21409511566162],["▁bloga",-13.214096069335938],["▁permitir",-13.21410083770752],["▁๒",-13.214105606079102],["▁zadnji",-13.21411418914795],["▁αμ",-13.214119911193848],["▁лед",-13.214120864868164],["动作",-13.214126586914062],["▁arrin",-13.214137077331545],["mmat",-13.214141845703123],["▁promotion",-13.214200019836426],["іж",-13.214204788208008],["ULT",-13.214238166809082],["▁کہہ",-13.21425437927246],["▁कॅ",-13.21425724029541],["▁آور",-13.214285850524902],["四川",-13.214320182800291],["lılar",-13.21434211730957],["▁damage",-13.21435260772705],["▁הקל",-13.21435832977295],["▁судалгаа",-13.214380264282228],["merkt",-13.21438694000244],["ୱା",-13.214388847351074],["▁humil",-13.214396476745604],["▁mač",-13.214397430419922],["▁prochain",-13.214399337768556],["尺",-13.214404106140137],["▁Gon",-13.214411735534668],["▁értékelés",-13.21441650390625],["▁केपी",-13.21442413330078],["▁корен",-13.214427947998049],["肖",-13.214430809020996],["▁მოე",-13.214433670043944],["▁జన",-13.214439392089844],["▁Kebangsaan",-13.214455604553224],["▁Phần",-13.214455604553224],["▁pomembno",-13.214455604553224],["▁предприятий",-13.214455604553224],["▁корпоратив",-13.21445655822754],["▁ողջ",-13.214459419250488],["សាស្ត្រ",-13.214462280273438],["楽しい",-13.214462280273438],["▁гадзін",-13.214473724365234],["кв",-13.21447467803955],["▁engkau",-13.2144775390625],["▁memandang",-13.21448040008545],["▁своєї",-13.214491844177246],["nbsp",-13.21450424194336],["▁מנו",-13.21450901031494],["เกมส์",-13.214520454406738],["▁первого",-13.214520454406738],["人文",-13.214526176452637],["5.00",-13.21453857421875],["▁получа",-13.214539527893066],["▁argent",-13.214548110961914],["▁αυτόν",-13.214553833007812],["▁conoce",-13.214561462402344],["bentuk",-13.214566230773926],["▁nostrum",-13.214576721191406],["▁sabato",-13.214591026306152],["▁sken",-13.214591026306152],["क्षित",-13.214594841003418],["▁efektif",-13.214613914489746],["▁fizet",-13.21462631225586],["▁promoción",-13.214649200439451],["ເນ",-13.214666366577148],["сө",-13.214669227600098],["▁المد",-13.214675903320312],["▁आपले",-13.214693069458008],["▁സമയ",-13.214704513549805],["မယ်",-13.214716911315918],["▁රෙ",-13.214733123779297],["▁оған",-13.214747428894045],["▁хур",-13.214754104614258],["▁maali",-13.214765548706056],["jén",-13.214768409729004],["MON",-13.214784622192385],["▁مخې",-13.214787483215332],["▁کارشناسی",-13.214810371398926],["ුරු",-13.214818000793455],["άνα",-13.214834213256836],["▁মিল",-13.214834213256836],["▁FEL",-13.214835166931152],["шай",-13.214855194091797],["▁schaffen",-13.21487045288086],["oksia",-13.214881896972656],["▁প্রতিষ্ঠান",-13.214900970458984],["▁trg",-13.214911460876465],["▁моно",-13.21491241455078],["▁органами",-13.214947700500488],["рія",-13.21494960784912],["▁Kral",-13.21496868133545],["▁କରିଥିବା",-13.214978218078612],["▁જેમાં",-13.21497917175293],["▁padėti",-13.215020179748535],["▁φωτ",-13.2150297164917],["zahlung",-13.215049743652344],["τσ",-13.215059280395508],["▁vinde",-13.215075492858888],["tyje",-13.215110778808594],["▁suke",-13.215143203735352],["인의",-13.21514892578125],["▁mediji",-13.21515655517578],["▁myśli",-13.215176582336426],["▁Dell",-13.215185165405272],["۲۰",-13.215204238891602],["▁kosmet",-13.215224266052246],["ТС",-13.215238571166992],["▁Ukraine",-13.21524143218994],["няй",-13.215252876281738],["同时也",-13.215261459350586],["िना",-13.215265274047852],["▁mamlakat",-13.215271949768066],["▁domicili",-13.2152738571167],["▁Фран",-13.215275764465332],["▁βαρ",-13.215282440185549],["ілетін",-13.215290069580078],["terà",-13.215291023254396],["▁discuti",-13.21530055999756],["市委",-13.215324401855469],["shiq",-13.215330123901367],["▁laguntza",-13.215338706970217],["κί",-13.215343475341797],["ుతోంది",-13.21535301208496],["ılıyor",-13.21535587310791],["▁సభ",-13.21535873413086],["ോട",-13.215372085571287],["▁әке",-13.215383529663086],["nienia",-13.215384483337402],["▁publiku",-13.21538543701172],["▁component",-13.215412139892578],["rinti",-13.215415954589844],["▁परत",-13.21541690826416],["▁Након",-13.21542739868164],["Пе",-13.21543788909912],["▁jirta",-13.215444564819336],["▁ifrån",-13.215445518493652],["tični",-13.215449333190918],["ଝ",-13.215457916259766],["▁intervención",-13.215471267700195],["หลังจาก",-13.21548557281494],["former",-13.215511322021484],["telo",-13.215532302856444],["▁جائے۔",-13.215551376342772],["▁designed",-13.215561866760254],["▁radīt",-13.215584754943848],["bývá",-13.215595245361328],["▁wens",-13.21561336517334],["yız",-13.215620994567873],["овања",-13.21562385559082],["▁Büro",-13.21562671661377],["▁нече",-13.215639114379885],["ԱՀ",-13.215642929077148],["хээ",-13.215673446655272],["ҮЙ",-13.215675354003906],["▁хайр",-13.215696334838867],["grond",-13.21572208404541],["gekomen",-13.21574878692627],["klus",-13.215752601623535],["▁뭐",-13.215773582458496],["▁imparti",-13.215781211853027],["雖",-13.21578311920166],["▁തെ",-13.21580696105957],["мілі",-13.215808868408203],["▁zvyk",-13.215808868408203],["▁presti",-13.215814590454102],["▁הישראלי",-13.21581745147705],["▁mövcud",-13.215826034545898],["kových",-13.21583080291748],["▁ascend",-13.215831756591797],["РБ",-13.215832710266112],["▁człon",-13.215845108032228],["😊",-13.215853691101074],["▁Димитров",-13.21585464477539],["▁нескольких",-13.21585464477539],["▁өзгөчө",-13.21585464477539],["▁ցույց",-13.21585464477539],["▁कृपया",-13.21585464477539],["▁შეუძლია",-13.21585464477539],["몬",-13.21585464477539],["▁výběr",-13.215856552124023],["▁ಅವಕಾಶ",-13.215856552124023],["▁मिडिया",-13.215858459472656],["▁хариу",-13.215859413146973],["អ៊ី",-13.215860366821287],["▁бебе",-13.21588134765625],["▁týden",-13.215886116027832],["лице",-13.215887069702148],["ráðherra",-13.215890884399414],["▁বিষয়",-13.215904235839844],["▁Animal",-13.215912818908691],["▁درک",-13.215935707092283],["▁윤",-13.215937614440918],["▁వున్న",-13.215965270996094],["1:",-13.215977668762209],["▁nasjonal",-13.215984344482422],["jede",-13.215986251831056],["▁coñecemento",-13.215991973876951],["▁মতো",-13.21599292755127],["ạp",-13.215996742248535],["ോടെ",-13.216005325317385],["▁végez",-13.216012001037598],["▁départ",-13.216020584106444],["tocht",-13.216031074523926],["▁Նրա",-13.21603298187256],["▁brus",-13.216033935546877],["▁copiilor",-13.21604824066162],["5,00",-13.216063499450684],["24)",-13.216066360473633],["meng",-13.216084480285645],["ਜ਼ੀ",-13.216096878051758],["▁ક્લિક",-13.21611499786377],["▁اقتدار",-13.216142654418944],["рз",-13.216176986694336],["▁شدہ",-13.216184616088867],["▁якое",-13.216206550598145],["▁трансформ",-13.21620750427246],["▁предполага",-13.216214179992676],["mbara",-13.21621799468994],["ဘ၀",-13.21622085571289],["▁принципи",-13.216222763061523],["▁Մեծ",-13.216225624084473],["▁акты",-13.21623420715332],["▁Hd",-13.216246604919434],["▁minuman",-13.216263771057127],["▁bygga",-13.216272354125977],["▁mikrofon",-13.21628761291504],["▁5.5",-13.216300964355469],["▁qalib",-13.216303825378418],["рабат",-13.216327667236328],["▁கூ",-13.21634578704834],["sniedz",-13.216353416442873],["▁lược",-13.216355323791504],["tæki",-13.216361045837402],["▁ស្លាប់",-13.216371536254885],["▁Beidh",-13.216383934020996],["と思いました",-13.216387748718262],["ónica",-13.21640396118164],["▁تغییرات",-13.21645450592041],["▁algunes",-13.216471672058104],["▁далее",-13.216485977172852],["▁отговори",-13.216495513916016],["▁érkez",-13.216506004333496],["▁kisha",-13.216523170471191],["▁случаев",-13.21652603149414],["idest",-13.216533660888672],["-45",-13.216565132141112],["反对",-13.216580390930176],["▁Oba",-13.216583251953123],["פח",-13.21664810180664],["خف",-13.216670036315918],["▁filhos",-13.216675758361816],["▁табыл",-13.216704368591309],["▁Kişi",-13.216726303100586],["▁Uten",-13.216757774353027],["зије",-13.216758728027344],["▁kamid",-13.21680736541748],["تطوير",-13.21682834625244],["▁노동",-13.2168550491333],["ியல்",-13.216861724853516],["▁எஸ்",-13.216874122619627],["assurer",-13.216875076293944],["inak",-13.21688461303711],["▁ซี",-13.21690845489502],["▁cresc",-13.216913223266602],["напред",-13.216925621032717],["ჯო",-13.216938972473145],["起到",-13.216961860656738],["ෙක",-13.2169771194458],["मू",-13.216988563537598],["▁sanoa",-13.217001914978027],["경기",-13.217005729675291],["▁சார்",-13.217007637023926],["സമ്മ",-13.217018127441406],["▁създава",-13.217026710510254],["▁peur",-13.217041015625],["дю",-13.217048645019531],["▁zakład",-13.217048645019531],["▁ఫ్ర",-13.217059135437012],["ciri",-13.21706199645996],["ตรา",-13.21707820892334],["шок",-13.217087745666504],["▁ЈЕ",-13.217087745666504],["▁PET",-13.217090606689451],["▁якій",-13.21709442138672],["кр",-13.217096328735352],["੍",-13.217098236083984],["▁სახელ",-13.21711540222168],["▁Oj",-13.21713161468506],["שׁ",-13.217144012451172],["തിരുത്തുക",-13.217168807983398],["契",-13.21717357635498],["alala",-13.217187881469728],["wiedzi",-13.217193603515623],["moli",-13.217196464538574],["障",-13.217201232910156],["的行为",-13.217203140258787],["ਦਿਆਂ",-13.217206001281738],["攻击",-13.217209815979004],["mū",-13.21721076965332],["debat",-13.217211723327637],["▁Ás",-13.217223167419434],["吊",-13.2172269821167],["▁사용할",-13.217242240905762],["▁vén",-13.217246055603027],["▁bicicleta",-13.217254638671877],["▁najlepiej",-13.217254638671877],["▁weltweit",-13.217254638671877],["▁පුංචි",-13.217254638671877],["▁incomp",-13.217255592346191],["▁смартфон",-13.217255592346191],["▁ਸਮਝ",-13.217256546020508],["▁cümlədən",-13.217259407043455],["▁ഓഫീസ",-13.217259407043455],["▁ibn",-13.21726417541504],["▁தொடர்பு",-13.217265129089355],["▁آھن",-13.217286109924316],["▁Նախ",-13.217289924621582],["▁ప్లే",-13.217289924621582],["▁vip",-13.217291831970217],["▁видимо",-13.217330932617188],["▁paziņo",-13.21734619140625],["▁brother",-13.21735668182373],["▁gabi",-13.21735668182373],["▁페",-13.217365264892578],["ደረሰ",-13.21737575531006],["salg",-13.217412948608398],["▁لغو",-13.217419624328612],["▁حركة",-13.21742820739746],["连接",-13.217440605163574],["▁abia",-13.21744155883789],["▁urusan",-13.217459678649902],["▁Campus",-13.217462539672852],["enfant",-13.217464447021484],["сим",-13.217464447021484],["▁испо",-13.217487335205078],["▁قاتل",-13.217503547668455],["وسي",-13.21753978729248],["▁আন",-13.21754264831543],["givning",-13.21755027770996],["▁teie",-13.217558860778809],["▁өн",-13.21760082244873],["▁ಬಳಿ",-13.217604637145996],["▁bepaalde",-13.21761703491211],["▁Ruh",-13.217628479003906],["മ്പി",-13.217631340026855],["▁ניו",-13.217642784118652],["▁bringt",-13.21765422821045],["▁unes",-13.217658042907717],["▁Gaya",-13.217660903930664],["▁rodič",-13.217689514160156],["เราจะ",-13.217693328857422],["▁כללי",-13.217693328857422],["▁подели",-13.217703819274902],["klassen",-13.217705726623535],["平时",-13.2177152633667],["▁primeros",-13.21774673461914],["тес",-13.21778678894043],["良かった",-13.217792510986328],["▁Short",-13.21784782409668],["▁तरफ",-13.217876434326172],["▁stond",-13.217905044555664],["mäng",-13.217912673950195],["ጠን",-13.217912673950195],["▁ولايت",-13.217913627624512],["▁환",-13.217914581298828],["atii",-13.217942237854004],["▁zaujíma",-13.21794891357422],["форма",-13.217951774597168],["▁hada",-13.217958450317385],["▁للح",-13.217961311340332],["▁Ulu",-13.217966079711914],["▁ទាក់ទង",-13.21798038482666],["▁cristal",-13.217981338500977],["kokemus",-13.217984199523926],["ሰጠው",-13.217984199523926],["▁udhë",-13.217988967895508],["ثبت",-13.21799373626709],["▁Hindu",-13.21802043914795],["▁SSD",-13.218029022216797],["льная",-13.218072891235352],["çə",-13.218094825744627],["▁молоді",-13.218094825744627],["τερα",-13.218109130859377],["▁بارش",-13.218120574951172],["▁Євро",-13.21815013885498],["تېر",-13.218164443969728],["np",-13.218168258666992],["▁sportive",-13.218183517456056],["ودی",-13.218234062194824],["▁господин",-13.218239784240724],["αγορ",-13.218254089355469],["▁blik",-13.218257904052734],["▁rude",-13.21826457977295],["ෂණය",-13.218290328979492],["▁Супер",-13.218299865722656],["язані",-13.218329429626465],["▁lunes",-13.218363761901855],["ქე",-13.218412399291992],["ტექ",-13.218420028686523],["▁שכל",-13.218422889709473],["説",-13.218422889709473],["стали",-13.218450546264648],["▁begeleid",-13.218450546264648],["▁Cosa",-13.21845245361328],["▁მთ",-13.21848201751709],["UV",-13.218482971191406],["▁veti",-13.218504905700684],["▁შა",-13.218542098999023],["▁combinatie",-13.218554496765137],["▁Keep",-13.21855640411377],["▁Ennek",-13.21858024597168],["▁მიმდინარე",-13.21865463256836],["หมู่",-13.218657493591309],["▁Można",-13.218657493591309],["▁ijtimoiy",-13.218657493591309],["▁keperluan",-13.218657493591309],["▁Οκτωβρίου",-13.218657493591309],["▁आंदोलन",-13.218657493591309],["▁kebenaran",-13.218658447265623],["▁отметить",-13.218658447265623],["▁ಹಲವು",-13.218661308288574],["▁հետեւյալ",-13.218663215637209],["▁Guest",-13.218666076660156],["▁bulunmaktadır",-13.218667030334473],["▁đỡ",-13.218667984008787],["abbat",-13.218668937683104],["▁ЖА",-13.218668937683104],["÷",-13.218670845031738],["νια",-13.218674659729004],["▁bağlantı",-13.218679428100586],["▁редовно",-13.218688011169434],["▁epidemi",-13.218698501586914],["สั้น",-13.218703269958496],["mienia",-13.218705177307127],["▁פיר",-13.21871566772461],["boga",-13.218722343444824],["▁deixou",-13.218722343444824],["▁கைது",-13.21872329711914],["መም",-13.218730926513672],["яч",-13.218738555908203],["▁heima",-13.218750953674316],["▁Lịch",-13.218754768371582],["dati",-13.218761444091797],["▁müzik",-13.218762397766112],["▁گزینه",-13.218780517578123],["▁భార్య",-13.218791961669922],["الو",-13.21880054473877],["etê",-13.218807220458984],["▁average",-13.218812942504885],["IAS",-13.218816757202148],["σμό",-13.218819618225098],["▁ନିଜର",-13.218820571899414],["bolaget",-13.218822479248049],["▁FBI",-13.218823432922363],["yaz",-13.21884822845459],["▁ವಿವರ",-13.218853950500488],["领先",-13.21886920928955],["▁трош",-13.218883514404297],["▁ناصر",-13.218890190124512],["▁accès",-13.218896865844728],["▁पाल",-13.218900680541992],["ნეთ",-13.218939781188965],["▁áhuga",-13.21896266937256],["▁ngũ",-13.219016075134276],["лине",-13.219037055969238],["▁poziva",-13.219091415405272],["▁pemerintahan",-13.219123840332031],["Jehova",-13.219148635864258],["կո",-13.219202995300291],["▁Lager",-13.219213485717772],["ກິດ",-13.21921443939209],["▁nenas",-13.21921730041504],["▁djeca",-13.219226837158203],["▁kształt",-13.219233512878418],["амын",-13.21923828125],["قرب",-13.219257354736328],["▁6,5",-13.219263076782228],["▁בראש",-13.219274520874023],["มหาวิทยาลัย",-13.219281196594238],["alach",-13.21929168701172],["▁Stud",-13.219322204589844],["▁Mikä",-13.219337463378906],["▁ئاز",-13.219367980957031],["ины",-13.21937370300293],["▁contenuti",-13.219400405883787],["▁않을",-13.219407081604004],["енным",-13.219409942626951],["发行",-13.219414710998535],["▁Lleida",-13.21945095062256],["▁შეხვედრა",-13.21945095062256],["▁sapa",-13.219456672668455],["/100",-13.219460487365724],["일까지",-13.219488143920898],["▁externa",-13.219545364379885],["▁Јер",-13.21955108642578],["rühm",-13.219584465026855],["امت",-13.219615936279297],["キャ",-13.219621658325195],["▁pares",-13.219626426696776],["ерге",-13.219627380371094],["或許",-13.219636917114258],["صلاح",-13.219672203063965],["lád",-13.219717979431152],["još",-13.21972370147705],["tsika",-13.219733238220217],["▁0%",-13.219744682312012],["իան",-13.219748497009276],["ķu",-13.21977996826172],["尤",-13.219788551330566],["▁тонн",-13.219833374023438],["▁strona",-13.21983528137207],["aíonn",-13.219840049743652],["▁ጥሩ",-13.219858169555664],["▁şəbəkə",-13.219861030578612],["спорт",-13.219862937927246],["▁99%",-13.219879150390623],["▁hetke",-13.219893455505373],["となって",-13.219893455505373],["quita",-13.219919204711914],["aaaaa",-13.21994400024414],["အဲ",-13.21994972229004],["▁Jom",-13.219961166381836],["▁zehn",-13.219962120056152],["▁hapur",-13.219966888427734],["ittu",-13.219972610473633],["τια",-13.220008850097656],["▁kapı",-13.220008850097656],["ร้อย",-13.220038414001465],["ώσουν",-13.22003936767578],["▁درې",-13.22004508972168],["褲",-13.220060348510742],["ကြည့်",-13.22006130218506],["▁antibiotik",-13.220062255859377],["▁imágenes",-13.220062255859377],["▁wanawake",-13.220062255859377],["▁řešení",-13.220062255859377],["▁грађана",-13.220062255859377],["▁ପର୍ଯ୍ୟନ୍ତ",-13.220062255859377],["▁vitam",-13.220063209533691],["▁psykisk",-13.220065116882324],["hany",-13.220075607299805],["▁თვით",-13.22007656097412],["ေအး",-13.220084190368652],["▁ડ",-13.220085144042969],["▁наводи",-13.2200927734375],["ιστικά",-13.22012424468994],["กลัว",-13.220125198364258],["육",-13.220135688781738],["मेर",-13.220142364501951],["nejší",-13.220197677612305],["נדער",-13.220200538635254],["кажу",-13.220205307006836],["▁launch",-13.220206260681152],["▁Груз",-13.220211029052734],["▁چکا",-13.220223426818848],["▁спеціально",-13.220234870910645],["جور",-13.220237731933594],["▁지나",-13.22023868560791],["▁comum",-13.220243453979492],["秘密",-13.22025203704834],["▁maraming",-13.220263481140137],["▁علماء",-13.220263481140137],["уде",-13.2202730178833],["▁মত",-13.220293998718262],["्ती",-13.220300674438477],["▁показва",-13.220330238342283],["строить",-13.220364570617676],["şın",-13.220365524291992],["dydd",-13.220366477966309],["▁Grafik",-13.22038745880127],["lassa",-13.220392227172852],["▁ಮುಂದಿನ",-13.220402717590332],["onej",-13.220429420471191],["nosci",-13.220458984375],["대표",-13.220468521118164],["▁Katso",-13.22046947479248],["يغ",-13.220491409301758],["▁kinnita",-13.220494270324709],["▁loita",-13.220523834228516],["ਖਾ",-13.220525741577148],["▁consiglio",-13.220525741577148],["▁သတင္း",-13.220542907714844],["▁ವ್ಯವಸ್ಥೆ",-13.22055435180664],["sættelse",-13.220558166503906],["▁లోని",-13.220566749572754],["ประชุม",-13.220582962036133],["ginta",-13.220586776733398],["▁Bade",-13.220593452453612],["rovi",-13.22062873840332],["▁locales",-13.220637321472168],["▁Česko",-13.220657348632812],["VII",-13.220666885375977],["▁λε",-13.220672607421877],["▁Ero",-13.220703125],["▁البيت",-13.220705032348633],["▁Quad",-13.220711708068848],["ГИ",-13.220715522766112],["▁пътя",-13.220715522766112],["▁Chemi",-13.22071647644043],["lēm",-13.22074031829834],["ाउँछ",-13.22075080871582],["▁algun",-13.22075080871582],["باء",-13.220766067504885],["▁brauchen",-13.220767974853516],["ーン",-13.220772743225098],["▁aktivitas",-13.220775604248049],["NYE",-13.220792770385742],["ٹن",-13.220794677734377],["▁realitzar",-13.220794677734377],["▁Slo",-13.22079849243164],["▁समज",-13.220858573913574],["▁පැවැත්",-13.220861434936523],["إعداد",-13.22086238861084],["തിനു",-13.22086238861084],["▁concreta",-13.22087860107422],["▁යෙද",-13.22091007232666],["▁الإن",-13.220917701721191],["ווים",-13.220939636230469],["lépés",-13.220942497253418],["▁Avan",-13.220965385437012],["feder",-13.22096824645996],["mui",-13.22098445892334],["▁shqipe",-13.220985412597656],["▁ұйымдар",-13.220993995666504],["▁þing",-13.221001625061035],["▁vee",-13.221035957336426],["vriendelijk",-13.221043586730955],["bility",-13.221062660217283],["pî",-13.221070289611816],["šal",-13.221076011657717],["ັດ",-13.221083641052246],["▁อ่าน",-13.22110366821289],["▁problematik",-13.221161842346191],["▁Tato",-13.221243858337402],["▁Books",-13.221269607543944],["▁општи",-13.22127628326416],["▁SAP",-13.221307754516602],["▁түсі",-13.221308708190918],["køb",-13.221323013305664],["▁dawa",-13.22132396697998],["发出",-13.22134017944336],["▁මොන",-13.22134494781494],["ίδης",-13.22134780883789],["魂",-13.221417427062988],["▁감독",-13.221418380737305],["依据",-13.221426963806152],["▁tvoje",-13.221429824829102],["modul",-13.22143268585205],["▁прод",-13.221461296081545],["▁Ağustos",-13.221467971801758],["▁Warszawa",-13.221467971801758],["▁większe",-13.221467971801758],["▁środków",-13.221467971801758],["▁μέλλον",-13.22146987915039],["▁wysokości",-13.221473693847656],["▁بشأن",-13.221475601196287],["▁қолдау",-13.221478462219238],["ذب",-13.22148323059082],["▁гадна",-13.221489906311035],["▁possiamo",-13.221491813659668],["▁nevojë",-13.221512794494627],["▁предложи",-13.221512794494627],["自宅",-13.221515655517578],["ಸ್ವಾಮಿ",-13.221524238586426],["▁samla",-13.22154140472412],["願意",-13.221555709838867],["▁vismaz",-13.221571922302246],["▁ନେତା",-13.221574783325195],["▁Raport",-13.22158432006836],["▁funktioniert",-13.221597671508787],["▁mismos",-13.221598625183104],["▁menjual",-13.22161102294922],["▁Nghị",-13.221612930297852],["दन",-13.22161865234375],["රට",-13.221622467041016],["▁македонскиот",-13.22164535522461],["යෙ",-13.221657752990724],["ሯ",-13.221684455871582],["પુર",-13.221692085266112],["▁местного",-13.221692085266112],["▁intensa",-13.221694946289062],["▁autom",-13.221696853637695],["▁Staats",-13.22171688079834],["▁Telekom",-13.221741676330566],["▁verste",-13.221752166748049],["▁زال",-13.221766471862791],["LIM",-13.22176742553711],["▁fax",-13.221768379211426],["么",-13.221796035766602],["▁Siva",-13.221796989440918],["▁njeri",-13.221811294555664],["▁ข้อมูล",-13.221826553344728],["▁determine",-13.221827507019045],["▁keçmiş",-13.221856117248535],["▁Bên",-13.221863746643066],["▁өргөн",-13.221879005432127],["ကိုယ္",-13.221911430358888],["issant",-13.22192096710205],["▁nettstedet",-13.221927642822266],["geli",-13.22194004058838],["▁публикува",-13.22194766998291],["▁estimula",-13.221952438354492],["▁զբաղ",-13.221952438354492],["是以",-13.221956253051758],["▁חג",-13.221965789794922],["▁वेद",-13.222002983093262],["▁ಒಂದೇ",-13.222016334533691],["を出",-13.22202205657959],["お願い",-13.22205638885498],["▁긴",-13.22206211090088],["▁മാറ",-13.222064971923828],["▁kronik",-13.222079277038574],["▁patrí",-13.222084045410156],["▁dugun",-13.222089767456056],["▁izba",-13.22211456298828],["▁معلم",-13.222115516662598],["▁166",-13.222128868103027],["▁lenger",-13.22213363647461],["▁అధికార",-13.222143173217772],["▁준",-13.222169876098633],["▁پوځ",-13.222175598144531],["▁робот",-13.22218132019043],["▁vél",-13.22219467163086],["▁게시물",-13.222198486328123],["▁шай",-13.222227096557615],["▁aizsardzības",-13.22223949432373],["▁Singer",-13.222268104553224],["▁numri",-13.222277641296388],["▁muga",-13.222281455993652],["▁Hồng",-13.222291946411133],["eedka",-13.22229290008545],["मंत्री",-13.222298622131348],["▁отырған",-13.22231101989746],["▁клиенти",-13.222312927246094],["▁Wet",-13.222317695617676],["▁perigo",-13.222370147705078],["akor",-13.22239589691162],["▁गण",-13.22239589691162],["▁Hög",-13.222406387329102],["▁rendi",-13.222421646118164],["▁दिसत",-13.22242832183838],["ର୍ଯ୍ୟ",-13.222441673278809],["必要な",-13.222467422485352],["nassa",-13.222475051879885],["bigay",-13.222489356994627],["▁אוכל",-13.222524642944336],["▁súd",-13.22254467010498],["▁փակ",-13.222551345825195],["wcze",-13.2225923538208],["чана",-13.222620964050291],["▁ажиллагааг",-13.222671508789062],["▁tysk",-13.222676277160645],["▁clair",-13.22267723083496],["त्रा",-13.222692489624023],["▁yaşama",-13.22270679473877],["▁زيات",-13.2227201461792],["▁реформи",-13.222745895385742],["государственного",-13.222750663757324],["смотри",-13.222783088684082],["ούμενο",-13.222793579101562],["▁дешев",-13.222793579101562],["▁Dollar",-13.222814559936523],["▁ظهور",-13.222823143005373],["▁Haram",-13.222826957702637],["▁Ayu",-13.22282886505127],["තුන්",-13.222829818725586],["екты",-13.222830772399902],["爸",-13.222834587097168],["یتی",-13.222838401794434],["ticos",-13.2228422164917],["វិធី",-13.222867965698242],["▁पोलिस",-13.222872734069824],["ທັງຫມົດ",-13.222875595092772],["७०",-13.22287654876709],["อร่อย",-13.22287654876709],["▁Müdafiə",-13.22287654876709],["▁mazingira",-13.22287654876709],["▁pembayaran",-13.22287654876709],["▁сакавіка",-13.22287654876709],["▁शुभकामना",-13.22287654876709],["짐",-13.222877502441406],["▁perbuatan",-13.22287940979004],["schijn",-13.222880363464355],["▁ажлыг",-13.222890853881836],["▁Rosen",-13.222896575927734],["▁сүрөт",-13.222896575927734],["▁شریکولو",-13.222901344299316],["▁ಇದರ",-13.222908020019531],["дена",-13.222908973693848],["LAD",-13.222915649414062],["▁władz",-13.222920417785645],["▁skommel",-13.222922325134276],["構成",-13.22292423248291],["▁Maybe",-13.222926139831545],["▁леко",-13.222941398620604],["▁नारायण",-13.222975730895996],["▁sjó",-13.222978591918944],["▁Nors",-13.22301197052002],["▁მოსა",-13.223012924194336],["▁huhtikuuta",-13.22304916381836],["kald",-13.223063468933104],["▁joulukuuta",-13.223085403442385],["zun",-13.223105430603027],["ETS",-13.223108291625977],["רכב",-13.223109245300291],["▁Môže",-13.223125457763672],["нформац",-13.223126411437988],["▁musiał",-13.223132133483888],["▁upozna",-13.223136901855469],["▁joves",-13.223140716552734],["▁curi",-13.223189353942873],["▁მუ",-13.22319221496582],[")」",-13.223193168640137],["親子",-13.22319793701172],["▁ark",-13.223207473754885],["tya",-13.22321319580078],["▁Bide",-13.22321319580078],["今后",-13.223226547241213],["▁وف",-13.223237037658691],["धु",-13.22323989868164],["▁nepie",-13.223240852355955],["▁ovim",-13.22325611114502],["▁komentarz",-13.223259925842283],["วิน",-13.223268508911133],["▁հոգ",-13.22327709197998],["entrada",-13.22328281402588],["▁різні",-13.223305702209473],["妝",-13.223310470581056],["▁arren",-13.22331714630127],["ائها",-13.223328590393066],["▁Konst",-13.223334312438965],["▁PKR",-13.223341941833496],["▁gozd",-13.223348617553713],["чната",-13.223379135131836],["▁sizlere",-13.22341251373291],["▁hồn",-13.223414421081545],["siri",-13.22342014312744],["▁زما",-13.22343635559082],["ပြန်",-13.22344207763672],["▁İmam",-13.223456382751465],["▁bestehen",-13.223474502563477],["▁allí",-13.223505020141602],["▁globo",-13.2235107421875],["▁থাকে",-13.223514556884766],["iečių",-13.223522186279297],["▁تھیں",-13.223522186279297],["umine",-13.223535537719728],["▁Generation",-13.223544120788574],["ฝน",-13.223548889160156],["▁സ്ഥാന",-13.223562240600586],["▁سمجه",-13.223592758178713],["pako",-13.223600387573242],["▁portare",-13.22366428375244],["▁typisk",-13.223666191101074],["▁repede",-13.22366714477539],["houder",-13.223671913146973],["认可",-13.22368049621582],["▁2020.",-13.223689079284668],["▁hjul",-13.223690032958984],["▁potrà",-13.223697662353516],["▁pripravljen",-13.223706245422363],["čica",-13.223727226257324],["bilang",-13.223736763000488],["▁Ordu",-13.223749160766602],["parken",-13.223776817321776],["▁forsøg",-13.223777770996094],["▁janji",-13.223793029785156],["▁slår",-13.22380256652832],["▁niño",-13.2238130569458],["caso",-13.22384262084961],["在地",-13.223854064941406],["▁பக்க",-13.22387409210205],["▁konsumen",-13.223934173583984],["▁ஆம்",-13.223938941955566],["▁folyamat",-13.223970413208008],["vonal",-13.223976135253906],["▁commerciale",-13.223978996276855],["ೇನು",-13.224011421203612],["лично",-13.224037170410156],["▁Instal",-13.224040031433104],["▁arquivo",-13.224041938781738],["▁ojos",-13.224044799804688],["▁delas",-13.22404670715332],["cić",-13.22412109375],["▁الكل",-13.22412109375],["хоп",-13.224127769470217],["▁Opis",-13.224151611328123],["教练",-13.224188804626465],["▁Sut",-13.224191665649414],["шее",-13.22421169281006],["CIO",-13.224234580993652],["▁Teori",-13.224242210388184],["▁forskjell",-13.224247932434082],["▁усі",-13.22427463531494],["▁খু",-13.22427463531494],["▁penu",-13.224276542663574],["ขัด",-13.224284172058104],["▁càrrec",-13.224287033081056],["▁ନୁହେଁ",-13.224287033081056],["▁ଭିତରେ",-13.224287033081056],["▁గుర్తు",-13.224287033081056],["▁सुन्दर",-13.224287986755373],["▁gelukkig",-13.224288940429688],["▁ରାସ୍ତା",-13.224288940429688],["antara",-13.224292755126951],["▁thậm",-13.224294662475586],["▁ಸೀ",-13.224297523498535],["▁término",-13.224303245544434],["▁Mataifa",-13.22430419921875],["▁presença",-13.22430419921875],["▁પ્રતિ",-13.224309921264648],["▁poist",-13.224316596984863],["▁úti",-13.224318504333496],["стары",-13.224324226379396],["▁herzlich",-13.224337577819824],["▁Çox",-13.22434139251709],["量的",-13.224342346191406],["▁bëjë",-13.224347114562988],["記念",-13.224356651306152],["▁Carlo",-13.224358558654783],["ستي",-13.224387168884276],["▁Rusijos",-13.224388122558594],["呈现",-13.224398612976074],["ûl",-13.22439956665039],["▁CHE",-13.22441577911377],["▁construção",-13.224419593811035],["▁printr",-13.224419593811035],["▁Эта",-13.224428176879885],["▁Mig",-13.224448204040527],["▁vodil",-13.224448204040527],["▁Všeobecné",-13.224461555480955],["▁անվտանգության",-13.224486351013184],["▁مباشرة",-13.224488258361816],["▁цагийн",-13.224489212036133],["醫生",-13.224504470825195],["”،",-13.22450828552246],["▁Inhalte",-13.224529266357422],["▁ακολουθ",-13.224532127380373],["တတ်",-13.22453498840332],["izay",-13.224535942077637],["▁Nazi",-13.224539756774902],["▁Sah",-13.22455596923828],["▁íslensku",-13.224557876586914],["▁candidatura",-13.224573135375977],["▁çmim",-13.224574089050291],["รายได้",-13.224605560302734],["▁empre",-13.22460651397705],["ékony",-13.224611282348633],["ថ្មីៗ",-13.224611282348633],["▁Avui",-13.224623680114746],["▁rozwija",-13.224624633789062],["回收",-13.22464370727539],["ැයි",-13.224644660949709],["ваат",-13.224653244018556],["שמה",-13.224663734436035],["▁texnologiya",-13.22467041015625],["▁الاجتماعية",-13.22468090057373],["▁Infra",-13.22468376159668],["▁가진",-13.224750518798828],["chir",-13.22478485107422],["▁اخر",-13.22482204437256],["▁малку",-13.224831581115724],["▁uvedl",-13.224833488464355],["▁රජ",-13.224854469299316],["ോടു",-13.224889755249023],["▁araba",-13.224919319152832],["▁২৪",-13.224960327148438],["▁получите",-13.224961280822754],["▁energetik",-13.224981307983398],["▁ተሰ",-13.224982261657717],["મત",-13.22498607635498],["ोन",-13.225021362304688],["▁Gericht",-13.225055694580078],["ไปที่",-13.22506046295166],["նող",-13.225061416625977],["▁inak",-13.225090026855469],["สํานัก",-13.225119590759276],["▁കിട്ടിയ",-13.225122451782228],["ご相談",-13.225130081176758],["الأ",-13.225131034851074],["धो",-13.225131034851074],["exp",-13.225144386291504],["kaka",-13.225150108337402],["ប្",-13.225164413452148],["▁పిల్లల",-13.225176811218262],["▁สํานักงาน",-13.22520637512207],["ulması",-13.225207328796388],["كاف",-13.225234031677246],["anaya",-13.225236892700195],["▁ኤርትራ",-13.225257873535156],["▁millət",-13.225263595581056],["▁சம",-13.225263595581056],["ruši",-13.225268363952637],["▁aberta",-13.225275039672852],["▁recenz",-13.225318908691406],["үс",-13.225322723388672],["ipari",-13.225327491760254],["▁traffic",-13.22534465789795],["▁титул",-13.225346565246582],["▁crescut",-13.225350379943848],["▁контроля",-13.22535514831543],["gehend",-13.225358963012695],["▁década",-13.225377082824709],["▁ເມື່ອ",-13.225409507751465],["第三方",-13.225414276123049],["▁Тал",-13.2254638671875],["▁मू",-13.225510597229004],["기간",-13.225510597229004],["▁īpašu",-13.22553539276123],["zkou",-13.225564002990724],["▁smje",-13.225580215454102],["▁toplo",-13.225589752197266],["bakken",-13.225601196289062],["ఎల్",-13.225601196289062],["มู",-13.22562026977539],["▁gastro",-13.225643157958984],["▁retur",-13.225643157958984],["រាជ",-13.225663185119627],["ពេញ",-13.225664138793944],["тил",-13.225665092468262],["柳",-13.225680351257324],["▁تبلیغات",-13.225689888000488],["균",-13.225696563720703],["▁μπροστά",-13.225699424743652],["▁மட்டுமே",-13.225699424743652],["▁ಕೇವಲ",-13.225699424743652],["▁Language",-13.225700378417969],["▁zupełnie",-13.225700378417969],["▁Címkék",-13.225701332092283],["▁ধরে",-13.225714683532717],["▁아침",-13.225722312927246],["▁مجله",-13.225726127624512],["▁Cop",-13.22572898864746],["пољ",-13.225730895996094],["▁blomster",-13.22573471069336],["▁zástup",-13.225763320922852],["▁länder",-13.22576904296875],["▁улсад",-13.225787162780762],["ହାର",-13.225799560546877],["fig",-13.225834846496582],["oppa",-13.22584342956543],["όνια",-13.225845336914062],["ซอย",-13.22586154937744],["▁усім",-13.225871086120604],["当日",-13.225889205932615],["▁عربی",-13.225896835327148],["▁Mum",-13.225899696350098],["29)",-13.225902557373049],["▁გას",-13.225907325744627],["อล",-13.225926399230955],["▁физическо",-13.225936889648438],["कर्म",-13.22596263885498],["▁krwi",-13.22600555419922],["▁бут",-13.226008415222168],["▁juri",-13.226016998291016],["▁involve",-13.226025581359863],["▁холод",-13.22602653503418],["สําหรับการ",-13.22605037689209],["▁ಬೀ",-13.226055145263672],["▁Dî",-13.226092338562012],["▁mõni",-13.22610855102539],["▁dröm",-13.226109504699709],["▁заявил",-13.226150512695312],["▁Пала",-13.226151466369627],["▁odbywa",-13.22617244720459],["正确",-13.226213455200195],["▁yaran",-13.22622776031494],["范围内",-13.226238250732422],["▁woorde",-13.226239204406738],["แท",-13.226274490356444],["೧",-13.226279258728027],["orten",-13.22628402709961],["▁attīstības",-13.22629451751709],["іти",-13.226332664489746],["▁nemzeti",-13.226364135742188],["▁وهم",-13.226381301879885],["ອດ",-13.226388931274414],["▁непро",-13.22645664215088],["▁sisään",-13.226462364196776],["▁jugak",-13.226465225219728],["ກໍາລັງ",-13.226469993591309],["▁կատարել",-13.226475715637209],["പെട്ട",-13.226476669311523],["ដ៏",-13.226484298706056],["平安",-13.226487159729004],["生き",-13.2264986038208],["▁енерг",-13.226511001586914],["개발",-13.226524353027344],["czym",-13.226527214050291],["▁kirjan",-13.226529121398926],["zgl",-13.226533889770508],["▁діти",-13.226571083068848],["▁usul",-13.22657299041748],["▁плата",-13.226577758789062],["ители",-13.226644515991213],["▁फ्ल",-13.22665023803711],["breek",-13.226689338684082],["▁испит",-13.226723670959473],["തല്ല",-13.226726531982422],["▁izlož",-13.226727485656738],["umuz",-13.226757049560549],["▁هؤلاء",-13.226759910583496],["▁dispositivos",-13.22679615020752],["نتج",-13.226818084716797],["▁tòa",-13.226828575134276],["▁მარტო",-13.226879119873049],["canta",-13.22689151763916],["▁આપવા",-13.226896286010742],["verein",-13.226901054382324],["▁ေလး",-13.22691822052002],["▁pasiūly",-13.226933479309082],["▁yığ",-13.226936340332031],["▁માંગ",-13.226947784423828],["▁andro",-13.226956367492676],["▁Đa",-13.226959228515623],["▁materiálu",-13.226961135864258],["▁อัน",-13.226988792419434],["መጨረሻ",-13.226991653442385],["ceļ",-13.227001190185549],["▁гаргах",-13.227012634277344],["NR",-13.22702693939209],["лис",-13.227046012878418],["-400",-13.227063179016112],["▁ēd",-13.227078437805176],["▁pema",-13.227089881896973],["另一个",-13.227100372314451],["รัฐบาล",-13.227112770080566],["▁yhteydessä",-13.227113723754885],["▁आफ्नै",-13.227113723754885],["▁मौजूद",-13.227113723754885],["▁warsztat",-13.2271146774292],["▁zamestna",-13.2271146774292],["▁алкохол",-13.2271146774292],["▁ਪ੍ਰਾਪਤ",-13.2271146774292],["▁Alkohol",-13.227115631103516],["▁narzędzi",-13.227117538452148],["▁арттыру",-13.22711944580078],["▁оценки",-13.227121353149414],["므로",-13.227127075195312],["▁غزة",-13.227134704589844],["▁исследования",-13.227139472961426],["▁공부",-13.22715187072754],["දර",-13.227154731750488],["▁Puerto",-13.22716236114502],["▁parlar",-13.227164268493652],["▁diamant",-13.22718906402588],["▁suivre",-13.22718906402588],["▁samstarf",-13.227212905883787],["部份",-13.227213859558104],["▁weder",-13.22723388671875],["▁trodde",-13.227234840393066],[".1.1",-13.227238655090332],["структур",-13.227242469787598],["▁Tình",-13.227246284484863],["▁мына",-13.227265357971191],["▁kujdes",-13.227293014526367],["Ĉ",-13.227322578430176],["▁القدس",-13.227327346801758],["▁сина",-13.227368354797363],["▁hujjatlar",-13.227371215820312],["аги",-13.227389335632324],["的主",-13.227389335632324],["▁švent",-13.227407455444336],["▁отримання",-13.22741413116455],["▁पर्छ",-13.227437019348145],["cile",-13.227449417114258],["▁សម្តេច",-13.227461814880373],["ОО",-13.227462768554688],["▁стратегия",-13.227478981018066],["▁регистрира",-13.227492332458496],["वर्ष",-13.227545738220217],["ವಾರ",-13.227551460266112],["▁emlék",-13.227556228637695],["aminen",-13.227560997009276],["▁stranden",-13.227572441101074],["▁yaradı",-13.227601051330566],["▁jakość",-13.227609634399414],["▁tietää",-13.227612495422363],["zünk",-13.22762966156006],["מנות",-13.227631568908691],["юють",-13.22764492034912],["▁никада",-13.227657318115234],["ogram",-13.227666854858398],["luč",-13.22767448425293],["▁Həmin",-13.22768211364746],["▁amade",-13.227683067321776],["▁minima",-13.227705001831056],["▁רצו",-13.227718353271484],["affi",-13.227728843688965],["سيد",-13.227737426757812],["▁regeringen",-13.22780704498291],["▁колеги",-13.227808952331545],["▁Мари",-13.227811813354492],["▁השת",-13.227843284606934],["teka",-13.227846145629885],["▁телефона",-13.227856636047363],["▁возраста",-13.22785758972168],["▁Tabi",-13.227866172790527],["▁свако",-13.227879524230955],["▁واردات",-13.227900505065918],["▁alten",-13.22792911529541],["▁тура",-13.22795581817627],["▁policia",-13.227956771850586],["▁устройство",-13.227965354919434],["ведение",-13.2279691696167],["▁sklopu",-13.227972984313965],["리스",-13.22797679901123],["باع",-13.22797966003418],["▁röd",-13.22797966003418],["▁legat",-13.227986335754396],["lå",-13.22799301147461],["▁Powered",-13.22802734375],["▁Тол",-13.228034019470217],["那就是",-13.22804069519043],["▁تعد",-13.228049278259276],["▁dimiliki",-13.228059768676758],["ագետ",-13.228143692016602],["▁3/4",-13.2281494140625],["inchi",-13.228158950805664],["▁такими",-13.22815990447998],["REM",-13.228178024291992],["▁sartu",-13.228190422058104],["ovića",-13.228204727172852],["马上",-13.228233337402344],["▁ստաց",-13.228264808654783],["▁chyb",-13.22826862335205],["▁soient",-13.22828769683838],["lce",-13.228316307067873],["väst",-13.228328704833984],["나는",-13.228373527526855],["நாட்டு",-13.228401184082031],["▁traf",-13.228421211242676],["▁Ваши",-13.228423118591309],["▁سۈر",-13.228426933288574],["▁muli",-13.228429794311523],["▁dedicata",-13.228435516357422],["▁sélection",-13.228455543518066],["lıkları",-13.228471755981444],["▁Stjórn",-13.228471755981444],["mehr",-13.22847843170166],["▁უკან",-13.228486061096191],["胖",-13.228490829467772],["▁strach",-13.228503227233888],["▁(50",-13.228513717651367],["ຝ່າຍ",-13.228529930114746],["▁chụp",-13.228530883789062],["▁izdelkov",-13.228530883789062],["▁nadzieję",-13.228530883789062],["▁събития",-13.228530883789062],["▁مسابقات",-13.228530883789062],["▁ವಾಣಿಜ್ಯ",-13.228530883789062],["▁واکنش",-13.228532791137695],["▁csatlakoz",-13.228535652160645],["▁jednotlivých",-13.228543281555176],["▁असताना",-13.228560447692873],["▁기억",-13.228564262390137],["▁بذلك",-13.228590965270996],["ညာ",-13.228592872619627],["ल्य",-13.228598594665527],["▁Берлин",-13.228608131408691],["▁indicado",-13.228611946105955],["દર્શ",-13.228622436523438],["▁השימוש",-13.228630065917969],["▁didin",-13.228659629821776],["▁Movies",-13.228679656982422],["▁viis",-13.228692054748535],["▁algoritm",-13.228693008422852],["▁በርካታ",-13.228727340698242],["рэй",-13.228729248046877],["▁tushun",-13.22876262664795],["▁тапсырма",-13.228774070739746],["▁geplant",-13.22877597808838],["76)",-13.228795051574709],["លំ",-13.228797912597656],["▁jeres",-13.228797912597656],["▁رول",-13.228804588317873],["▁Vive",-13.228829383850098],["tamalla",-13.22884464263916],["હો",-13.228861808776855],["чатков",-13.228866577148438],["▁минерал",-13.228901863098145],["კავ",-13.228927612304688],["▁[5]",-13.22893238067627],["ക്കാര്",-13.22894287109375],["корисни",-13.228944778442385],["▁demander",-13.228957176208496],["▁polityk",-13.228958129882812],["▁debatt",-13.228963851928713],["▁ఉన్నా",-13.22898769378662],["INDA",-13.229015350341797],["▁anao",-13.22902488708496],["생활",-13.229033470153809],["▁ಟ್ರ",-13.22903823852539],["▁চাই",-13.229056358337402],["▁gola",-13.2290620803833],["▁Yli",-13.229066848754885],["▁åka",-13.229092597961426],["lerinizi",-13.229158401489258],["ံု",-13.22916316986084],["▁светлина",-13.229175567626951],["▁stadium",-13.229182243347168],["戻",-13.229185104370115],["▁szolgál",-13.229222297668455],["▁bildirdi",-13.229241371154783],["andet",-13.229251861572266],["նակ",-13.229255676269531],["овдун",-13.229270935058594],["zijn",-13.229284286499023],["járó",-13.229307174682615],["алар",-13.229330062866213],["▁Tilaa",-13.229333877563477],["drau",-13.22934627532959],["长的",-13.22942352294922],["▁школьн",-13.229435920715332],["pini",-13.22944450378418],["▁Zero",-13.229447364807127],["MAX",-13.22947883605957],["ಪು",-13.229480743408203],["prøve",-13.22949504852295],["▁anlam",-13.229522705078123],["▁κινητ",-13.229537963867188],["skur",-13.229559898376465],["นาที",-13.229573249816896],["▁୧୨",-13.229601860046388],["ຽ",-13.229636192321776],["שא",-13.229660987854004],["▁dostępne",-13.22966766357422],["▁róż",-13.22972011566162],["▁ආදරය",-13.229801177978516],["▁араб",-13.229828834533691],["VIL",-13.229843139648438],["agent",-13.229862213134766],["▁நிறுவன",-13.229864120483398],["▁dienst",-13.22989559173584],["▁දු",-13.229897499084473],["රාජ",-13.229903221130373],["▁Regione",-13.229907035827637],["▁nyní",-13.229925155639648],["fnið",-13.22994613647461],["▁disediakan",-13.22994899749756],["▁doświadczenie",-13.22994899749756],["▁nämlich",-13.22994899749756],["▁ವಿವಿಧ",-13.22994899749756],["保養",-13.22994899749756],["θήκη",-13.229950904846191],["▁Þeir",-13.229950904846191],["▁څلور",-13.229950904846191],["มั่นใจ",-13.229957580566406],["▁अझै",-13.229962348937988],["▁Париж",-13.22996425628662],["▁Luz",-13.229965209960938],["▁retten",-13.229965209960938],["ೊಂದ",-13.229984283447266],["▁चयन",-13.229984283447266],["ဂါ",-13.229985237121582],["▁giusto",-13.229991912841797],["диш",-13.22999382019043],["▁pokazal",-13.23000144958496],["ጠቃ",-13.230016708374023],["▁Certifica",-13.230018615722656],["dzin",-13.230023384094238],["▁mulla",-13.230024337768556],["▁düzey",-13.23003101348877],["▁მცირე",-13.2300386428833],["▁ამბობს",-13.230052947998049],["δου",-13.230055809020996],["▁ресурси",-13.23007583618164],["▁:)))",-13.230095863342283],["▁Tuna",-13.230096817016602],["るように",-13.23010540008545],["▁snö",-13.230117797851562],["nička",-13.23011875152588],["▁Една",-13.230119705200195],["▁dış",-13.23012924194336],["▁suun",-13.230133056640623],["▁konkrete",-13.230142593383787],["▁FÖR",-13.230143547058104],["▁Díky",-13.23015594482422],["声明",-13.23016357421875],["▁деклар",-13.230167388916016],["▁Nové",-13.230179786682127],["▁geschrieben",-13.230186462402344],["▁считает",-13.23020839691162],["▁Joy",-13.230215072631836],["▁করবেন",-13.230246543884276],["ション",-13.230263710021973],["ybių",-13.230267524719238],["Представ",-13.230289459228516],["▁دائم",-13.230297088623049],["▁తెలుస",-13.230308532714844],["aaa",-13.230345726013184],["▁OT",-13.230354309082031],["▁자체",-13.230358123779297],["▁ympäri",-13.230365753173828],["▁Liit",-13.2304048538208],["▁توقع",-13.23040771484375],["▁Ази",-13.230412483215332],["▁POP",-13.23041820526123],["▁iubit",-13.23043441772461],["▁Podobn",-13.230440139770508],["▁Առ",-13.230450630187988],["▁pierwszej",-13.230451583862305],["▁вуз",-13.230461120605469],["ിലാണ്",-13.23046875],["▁πατ",-13.230478286743164],["ىمى",-13.230512619018556],["образи",-13.230533599853516],["▁Orde",-13.230545997619627],["lasti",-13.230572700500488],["Sur",-13.230624198913574],["талған",-13.230634689331056],["▁משת",-13.230669975280762],["▁Monate",-13.230690956115724],["▁Maamulka",-13.23070240020752],["▁limited",-13.230710983276367],["▁fals",-13.230717658996582],["▁आपने",-13.230721473693848],["▁ученик",-13.230740547180176],["gitt",-13.230741500854492],["ҮН",-13.23077392578125],["▁Betrieb",-13.230780601501465],["▁boz",-13.230783462524414],["▁מאי",-13.230783462524414],["wój",-13.230791091918944],["▁медици",-13.230791091918944],["▁жоспары",-13.230793952941896],["ећа",-13.230796813964844],["▁Phen",-13.230798721313477],["▁வார",-13.230854034423828],["▁الدین",-13.230876922607422],["▁strikke",-13.230879783630373],["ათი",-13.230883598327637],["▁የነበረው",-13.23092555999756],["▁œuvre",-13.230931282043455],["▁መሆኑ",-13.230938911437988],["▁Nasi",-13.23095703125],["Char",-13.230962753295898],["▁eskola",-13.230986595153809],["▁одлуч",-13.230989456176758],["▁belki",-13.231001853942873],["的中国",-13.231008529663086],["▁aeroport",-13.2310209274292],["ም፤",-13.23102569580078],["▁dokona",-13.231045722961426],["▁شهدا",-13.231069564819336],["▁kõigi",-13.23109531402588],["סביר",-13.23110294342041],["▁dynamic",-13.23110580444336],["しまいました",-13.231117248535156],["ssimi",-13.231130599975586],["▁Тэд",-13.231136322021484],["▁nuna",-13.231146812438965],["▁Church",-13.23114776611328],["▁furniz",-13.231157302856444],["▁llibres",-13.23116970062256],["▁פיל",-13.231189727783203],["▁hökumət",-13.231193542480469],["biet",-13.231212615966797],["mhair",-13.23122215270996],["▁വിവാഹ",-13.231240272521973],["▁Moni",-13.231273651123049],["urd",-13.231287956237791],["▁Inicia",-13.23131275177002],["▁жаңалықтар",-13.231321334838867],["▁Mek",-13.231325149536133],["▁помил",-13.231352806091309],["िमा",-13.23135757446289],["ງາມ",-13.231366157531738],["▁ۋا",-13.231366157531738],["▁umožňuje",-13.231369018554688],["▁Жээнбеков",-13.231369018554688],["▁فهرست",-13.231369018554688],["▁ಪ್ರಶ್ನೆ",-13.231369018554688],["▁ನಿರ್ದೇಶಕ",-13.231369972229004],["▁Wunsch",-13.23137092590332],["▁soggiorno",-13.23137092590332],["▁профессор",-13.231375694274902],["▁sammenligne",-13.23137664794922],["▁қазіргі",-13.231380462646484],["▁организован",-13.231383323669434],["▁trädgård",-13.23139762878418],["込んで",-13.231400489807127],["▁fogo",-13.23140811920166],["▁sözlərinə",-13.231409072875977],["вече",-13.231416702270508],["演奏",-13.231425285339355],["▁båda",-13.231431007385254],["võtja",-13.231437683105469],["চ্ছ",-13.231450080871582],["χαν",-13.231453895568848],["일부터",-13.231464385986328],["pasi",-13.231491088867188],["dağ",-13.23149585723877],["osis",-13.231502532958984],["▁способности",-13.231504440307615],["▁srce",-13.231511116027832],["▁mój",-13.231513023376465],["מציאות",-13.23151969909668],["力を",-13.231529235839844],["▁noku",-13.231532096862791],["чені",-13.231544494628906],["бац",-13.231553077697754],["▁önünde",-13.231562614440918],["▁Prøv",-13.231573104858398],["▁Level",-13.23161506652832],["▁alanda",-13.23161506652832],["国際",-13.231634140014648],["MIN",-13.231721878051758],["▁બનાવ",-13.23173999786377],["▁pomocou",-13.2317476272583],[":))",-13.231752395629885],["▁Πο",-13.2317533493042],["▁quelqu",-13.231757164001465],["▁BS",-13.231773376464844],["▁conjunt",-13.231773376464844],["прат",-13.231778144836426],["طرف",-13.231794357299805],["▁μυ",-13.23179817199707],["qil",-13.231802940368652],["▁krijg",-13.231815338134766],["гуул",-13.23182773590088],["▁refleks",-13.231828689575195],["واجه",-13.231829643249512],["▁pieaug",-13.231831550598145],["试验",-13.231836318969728],["فيد",-13.231839179992676],["▁Communi",-13.231843948364258],["аф",-13.231863021850586],["лв",-13.231863975524902],["ACT",-13.231892585754396],["लोक",-13.231958389282228],["▁Følg",-13.231998443603516],["χων",-13.23200225830078],["გონ",-13.232004165649414],["▁muro",-13.232011795043944],["▁дотор",-13.232026100158691],["▁nopea",-13.232038497924805],["▁dezelfde",-13.232040405273438],["▁kjenner",-13.232074737548828],["σκα",-13.232078552246094],["▁artistic",-13.232098579406738],["▁ज़",-13.232129096984863],["కింగ్",-13.232149124145508],["ٹو",-13.232157707214355],["истика",-13.232189178466797],["klare",-13.232206344604492],["KAS",-13.232207298278809],["出现了",-13.232221603393556],["▁سنا",-13.232227325439451],["に参加",-13.23224925994873],["түр",-13.232293128967283],["▁geometri",-13.232309341430664],["▁etkisi",-13.232322692871094],["▁Vig",-13.232355117797852],["▁tys",-13.2323637008667],["▁समुदाय",-13.23237133026123],["▁Skri",-13.232376098632812],["estate",-13.232388496398926],["▁toplu",-13.232439041137695],["▁цій",-13.232446670532228],["▁ເອົາ",-13.232463836669922],["べき",-13.232465744018556],["▁கொள்",-13.232468605041504],["事项",-13.232474327087402],["gados",-13.232479095458984],["алтай",-13.232484817504885],["▁gær",-13.232503890991213],["▁ھە",-13.23257064819336],["사는",-13.232592582702637],["bow",-13.232595443725586],["▁ból",-13.23260498046875],["讓我們",-13.232658386230469],["▁TIP",-13.232662200927734],["▁7-8",-13.232665061950684],["首都",-13.232677459716797],["гэн",-13.23267936706543],["iyeen",-13.23268985748291],["▁ಓದಿ",-13.232693672180176],["▁선수",-13.232707977294922],["▁পরীক্ষা",-13.23271369934082],["حفظ",-13.23274040222168],["▁изисква",-13.23275661468506],["जय",-13.232769966125488],["緊張",-13.23277473449707],["▁Ιανουαρίου",-13.232791900634766],["▁неговиот",-13.232791900634766],["▁चितवन",-13.232791900634766],["▁ውሳኔ",-13.232791900634766],["▁dieselfde",-13.232792854309082],["▁состојба",-13.232792854309082],["▁случва",-13.232793807983398],["▁पीछे",-13.232795715332031],["▁Црква",-13.232796669006348],["▁යොදා",-13.232800483703612],["▁غریب",-13.232803344726562],["▁samenwerking",-13.232805252075195],["şma",-13.23282527923584],["cic",-13.232837677001951],["▁respecta",-13.232840538024902],["▁ممنوع",-13.232854843139648],["과정",-13.232870101928713],["ىر",-13.23287868499756],["▁caracteriza",-13.23289394378662],["পথ",-13.232912063598633],["▁aulas",-13.23292064666748],["▁rekrut",-13.232939720153809],["▁ورځې",-13.232961654663086],["vella",-13.23297882080078],["friend",-13.23302936553955],["▁Бан",-13.233036041259766],["▁рождения",-13.233074188232422],["▁الأل",-13.233075141906738],["▁одржи",-13.23307991027832],["šov",-13.23308277130127],["▁따라서",-13.233124732971191],["▁Six",-13.23313331604004],["▁Sonnen",-13.233146667480469],["▁הצי",-13.233146667480469],["▁בב",-13.233148574829102],["▁Ravi",-13.23317050933838],["▁kwart",-13.233171463012695],["▁Тие",-13.233196258544922],["▁Iba",-13.233213424682615],["реш",-13.23321533203125],["ชี",-13.23321533203125],["▁motiu",-13.233230590820312],["▁naroda",-13.233230590820312],["▁znacznie",-13.23324489593506],["ورن",-13.23325538635254],["▁dulce",-13.233267784118652],["linga",-13.233296394348145],["තම",-13.233301162719728],["মন",-13.233305931091309],["▁kæreste",-13.23330783843994],["▁хичээл",-13.233317375183104],["▁могао",-13.2333345413208],["صم",-13.233348846435549],["▁ભા",-13.233360290527344],["eithiau",-13.23336124420166],["ช่วยให้",-13.233384132385254],["wî",-13.233400344848633],["▁tuan",-13.23341464996338],["▁принадлеж",-13.23343276977539],["яті",-13.233449935913086],["yekî",-13.233450889587402],["▁begynner",-13.233463287353516],["▁preparado",-13.233469009399414],["▁מצוי",-13.233489990234377],["▁úst",-13.233502388000488],["givare",-13.233508110046388],["▁Франц",-13.233540534973145],["англ",-13.233559608459473],["hã",-13.233572959899902],["fried",-13.233580589294434],["שלח",-13.233583450317385],["▁vélemény",-13.233583450317385],["十一",-13.23361873626709],["Ջ",-13.233622550964355],["ԱՄ",-13.233646392822266],["▁Panel",-13.233654975891112],["راغ",-13.233678817749023],["System",-13.233702659606934],["ійськ",-13.233720779418944],["▁டா",-13.233724594116213],["ssez",-13.233755111694336],["værende",-13.233784675598145],["macam",-13.233810424804688],["ขั้น",-13.233824729919434],["▁అనేక",-13.233854293823242],["kynn",-13.233859062194824],["рга",-13.233860969543455],["τρε",-13.233872413635254],["њата",-13.233898162841797],["▁निर",-13.233899116516112],["ovaním",-13.233938217163086],["▁pirmā",-13.23395538330078],["66)",-13.233962059020996],["▁familj",-13.233972549438477],["▁aikuis",-13.233973503112791],["▁paika",-13.233975410461426],["srecht",-13.23402214050293],["▁Сай",-13.234044075012209],["▁Outro",-13.2340669631958],["▁aquela",-13.2340669631958],["קרי",-13.234086990356444],["▁مض",-13.234088897705078],["▁груд",-13.23409938812256],["ellement",-13.234115600585938],["丰富的",-13.23412036895752],["▁వేల",-13.234126091003418],["yske",-13.23414134979248],["發揮",-13.234156608581545],["DG",-13.234163284301758],["旋",-13.234179496765137],["ŝo",-13.234189987182615],["жете",-13.234208106994627],["ประหยัด",-13.234216690063477],["▁článok",-13.234216690063477],["▁স্কুল",-13.234216690063477],["▁இங்கே",-13.234216690063477],["▁რომელმაც",-13.234216690063477],["▁rơi",-13.234217643737791],["▁اداروں",-13.23421859741211],["▁знайти",-13.234219551086426],["▁kèm",-13.234223365783691],["▁Sheekh",-13.234228134155272],["▁personnage",-13.234243392944336],["▁veiligheid",-13.234248161315918],["ityksen",-13.23426628112793],["▁اللجنة",-13.23426914215088],["▁rayonunun",-13.234278678894045],["குமார்",-13.234288215637209],["▁zanimiv",-13.234294891357422],["икалык",-13.234304428100586],["չի",-13.23432445526123],["▁Saw",-13.234334945678713],["ницима",-13.234335899353027],["▁שפ",-13.23434829711914],["歲的",-13.234360694885254],["नाश",-13.234363555908203],["可能會",-13.234366416931152],["رحل",-13.234370231628418],["▁näitä",-13.234373092651367],["▁ถนน",-13.234383583068848],["▁Deste",-13.234400749206545],["▁زيادة",-13.234403610229492],["ក៏",-13.234437942504885],["▁போய்",-13.234451293945312],["▁comercio",-13.234457969665527],["▁Avo",-13.234461784362791],["第二次",-13.234466552734377],["▁ових",-13.234484672546388],["▁erotic",-13.234490394592283],["▁actualmente",-13.234527587890623],["VIS",-13.234538078308104],["▁Juu",-13.23454761505127],["▁acha",-13.23454761505127],["▁Opin",-13.234551429748535],["▁łatwo",-13.234560012817385],["▁hiel",-13.234563827514648],["▁setor",-13.234570503234863],["▁Ля",-13.234588623046877],["▁nessa",-13.23462963104248],["▁nár",-13.234630584716797],["స్టర్",-13.234654426574709],["▁regret",-13.234655380249023],["राग",-13.234660148620604],["▁темата",-13.234663009643556],["▁നാം",-13.2346830368042],["▁bestimmt",-13.234688758850098],["izing",-13.234692573547363],["löst",-13.234697341918944],["▁kitabı",-13.234745025634766],["▁Dön",-13.234752655029297],["운동",-13.234759330749512],["โหลด",-13.234763145446776],["gami",-13.234764099121094],["▁sajt",-13.23477268218994],["лығын",-13.23477554321289],["▁persönlich",-13.234786033630373],["▁monster",-13.234796524047852],["▁Lav",-13.234810829162598],["▁саат",-13.23481559753418],["▁العملية",-13.234827041625977],["high",-13.234835624694824],["träff",-13.234835624694824],["▁soirée",-13.234854698181152],["▁призначення",-13.234854698181152],["▁pako",-13.23486328125],["▁नेपालका",-13.234864234924316],["សេ",-13.234865188598633],["owości",-13.23486614227295],["ואה",-13.23487377166748],["▁móti",-13.234882354736328],["▁BEN",-13.234886169433594],["ઉ",-13.234917640686035],["ிற",-13.234920501708984],["的目标",-13.234930992126465],["▁ovih",-13.23495864868164],["▁suurem",-13.23495864868164],["▁dolara",-13.234962463378906],["▁Hogyan",-13.23496913909912],["▁Ghe",-13.234979629516602],["▁Kend",-13.235002517700195],["▁kì",-13.235005378723145],["▁طبقه",-13.23501205444336],["মু",-13.2350435256958],["▁gjith",-13.235045433044434],["▁Semi",-13.2350492477417],["σαμε",-13.235065460205078],["▁bura",-13.235065460205078],["▁xul",-13.235069274902344],["риг",-13.23507595062256],["ржан",-13.235078811645508],["▁فارم",-13.23508071899414],["▁pavar",-13.235101699829102],["▁10,000",-13.235115051269531],["▁contingut",-13.235119819641112],["▁وغيره",-13.235135078430176],["▁taktik",-13.235137939453123],["သင်",-13.235152244567873],["fish",-13.2351713180542],["▁luck",-13.235187530517578],["▁clique",-13.235204696655272],["ovaného",-13.23522663116455],["LLE",-13.235257148742676],["ിയും",-13.235258102416992],["▁बढ़ा",-13.23526382446289],["klan",-13.235292434692385],["▁vides",-13.23530387878418],["▁ค้นหา",-13.235311508178713],["▁hüquqi",-13.235313415527344],["▁सच",-13.235326766967772],["▁nærmere",-13.235329627990724],["▁భావ",-13.23533535003662],["▁enfant",-13.235350608825684],["mmelse",-13.235413551330566],["▁dubl",-13.235472679138184],["▁دهه",-13.235487937927246],["▁inilah",-13.235498428344728],["镜",-13.235498428344728],["768",-13.23550796508789],["▁မိုး",-13.235544204711914],["▁bemutat",-13.235557556152344],["▁क्रम",-13.235563278198242],["բան",-13.235567092895508],["▁costs",-13.235569953918455],["rance",-13.23558521270752],["记忆",-13.235594749450684],["▁dici",-13.23561668395996],["プラン",-13.235621452331545],["觀察",-13.235640525817873],["😍",-13.235641479492188],["▁నిర్మాత",-13.235642433166504],["▁melibatkan",-13.23564338684082],["▁pertumbuhan",-13.23564338684082],["▁Ըստ",-13.23564338684082],["▁बुधबार",-13.23564338684082],["▁सिनेमा",-13.23564338684082],["▁ಟೆಸ್ಟ್",-13.23564338684082],["▁ສປປ",-13.23564338684082],["▁istəyən",-13.23564624786377],["▁въпреки",-13.23564624786377],["▁אָדער",-13.23564624786377],["▁jarcento",-13.23564910888672],["ลับ",-13.23565673828125],["▁dewleta",-13.23565673828125],["▁касније",-13.23565673828125],["▁argitaratu",-13.235657691955566],["▁Jehovah",-13.235658645629885],["▁quoniam",-13.235658645629885],["▁thận",-13.23566436767578],["ରୂପ",-13.235678672790527],["daginn",-13.235697746276855],["кров",-13.235698699951172],["ถูกต้อง",-13.235698699951172],["▁סיפור",-13.235698699951172],["▁podla",-13.23570156097412],["▁mesecev",-13.235732078552246],["▁ରୋ",-13.23573398590088],["相关的",-13.235737800598145],["ทันที",-13.235739707946776],["▁fourni",-13.235742568969728],["▁capta",-13.235756874084473],["▁нийтийн",-13.235770225524902],["▁वर्षे",-13.23580551147461],["udun",-13.235806465148926],["▁శివ",-13.235807418823242],["▁bếp",-13.235837936401367],["qir",-13.235892295837402],["了一些",-13.235912322998049],["▁Tiêu",-13.235913276672363],["čite",-13.235918045043944],["semi",-13.235919952392578],["बद्ध",-13.23592758178711],["การจัด",-13.235931396484377],["▁momentu",-13.23593521118164],["тичні",-13.23594856262207],["stieg",-13.235960960388184],["Finan",-13.23599338531494],["र्च",-13.235994338989258],["മ്പ്",-13.235997200012209],["חבר",-13.236005783081056],["ాలంటే",-13.236029624938965],["▁bilməz",-13.236044883728027],["гнат",-13.236045837402344],["▁tunni",-13.236069679260254],["こそ",-13.236108779907228],["▁રમત",-13.23611068725586],["▁شهرداری",-13.236116409301758],["▁korpus",-13.23612117767334],["plats",-13.236125946044922],["▁verano",-13.236141204833984],["▁fahren",-13.23614501953125],["▁barrio",-13.23617172241211],["παν",-13.236175537109377],["▁rendel",-13.236186027526855],["rør",-13.236199378967283],["▁ನಿಂದ",-13.236199378967283],["алык",-13.236202239990234],["▁Veliko",-13.236230850219728],["bách",-13.236236572265623],["▁Lær",-13.236261367797852],["▁изрази",-13.236263275146484],["▁Dialog",-13.236273765563965],["nende",-13.236288070678713],["▁Cổ",-13.23629379272461],["ရယ္",-13.23629665374756],["▁Kare",-13.236334800720217],["կն",-13.23633861541748],["履行",-13.236340522766112],["▁zdi",-13.236369132995604],["ത്രി",-13.23639678955078],["entibus",-13.23640251159668],["нскай",-13.23642635345459],["سري",-13.23643684387207],["▁completar",-13.236445426940918],["ителна",-13.236456871032717],["▁luxe",-13.236461639404297],["fólk",-13.236495971679688],["kları",-13.236504554748535],["▁curat",-13.2365083694458],["▁കലാ",-13.23652172088623],["▁Kommentarer",-13.236573219299316],["▁последовател",-13.236590385437012],["▁трен",-13.23660659790039],["▁አብይ",-13.236618995666504],["还可以",-13.236628532409668],["effekt",-13.236648559570312],["ନ୍ନ",-13.236648559570312],["ÅR",-13.236659049987791],["cert",-13.236663818359377],["malu",-13.236676216125488],["▁holat",-13.236692428588867],["▁колдон",-13.236706733703612],["súly",-13.236738204956056],["interès",-13.236745834350586],["▁haal",-13.236754417419434],["▁tärkeä",-13.236760139465332],["▁competente",-13.23678970336914],["▁Vari",-13.23679542541504],["▁alınan",-13.236833572387695],["იფ",-13.236838340759276],["閱",-13.236857414245604],["▁yapar",-13.236886978149414],["אַנט",-13.23691177368164],["▁ilaç",-13.23691463470459],["▁પગ",-13.236915588378906],["▁нарын",-13.236927032470703],["▁balan",-13.236946105957031],["ancien",-13.23696231842041],["一つ",-13.236963272094728],["▁করবে",-13.236976623535156],["▁eskort",-13.236988067626951],["▁ข",-13.23698902130127],["▁החוק",-13.237025260925291],["楚",-13.23702907562256],["ówka",-13.237045288085938],["▁Maqedoni",-13.237046241760254],["φει",-13.237061500549316],["넷",-13.237070083618164],["▁బీజేపీ",-13.23707103729248],["▁Derneği",-13.237071990966797],["▁дзяржавы",-13.237071990966797],["샤",-13.237071990966797],["▁Surabaya",-13.237074851989746],["immagine",-13.237075805664062],["▁ලේකම්",-13.23707675933838],["▁엄마",-13.237077713012695],["လြန္",-13.237078666687012],["▁Bonjour",-13.237080574035645],["▁acoperi",-13.237091064453123],["▁Tud",-13.23709774017334],["▁parfait",-13.237104415893556],["ៀង",-13.237105369567873],["▁Proc",-13.237106323242188],["▁کندهار",-13.237133026123049],["ставка",-13.237135887145996],["▁Sài",-13.237146377563477],["▁реша",-13.237150192260742],["▁voorval",-13.237174034118652],["▁ontvangen",-13.237175941467283],["▁книг",-13.237176895141602],["pensa",-13.237180709838867],["▁forces",-13.237183570861816],["▁HDMI",-13.237186431884766],["▁vašem",-13.237186431884766],["▁jamiyat",-13.237207412719728],["▁användning",-13.237228393554688],["有著",-13.237229347229004],["건설",-13.23723602294922],["▁වසරේ",-13.2372465133667],["hotmail",-13.237263679504396],["▁មេ",-13.237318992614746],["▁directement",-13.237327575683594],["▁چونڊ",-13.237342834472656],["▁cazare",-13.237343788146973],["ເຊື່ອ",-13.237350463867188],["▁yetiş",-13.237367630004885],["шаралар",-13.23742961883545],["-80",-13.237441062927246],["▁градина",-13.237444877624512],["▁cháy",-13.237467765808104],["▁uppdrag",-13.237470626831056],["▁ಕ್ರಮ",-13.237470626831056],["▁нашиот",-13.237491607666016],["vlada",-13.237496376037598],["יפים",-13.237505912780762],["▁lapsen",-13.237505912780762],["▁ndiyo",-13.237537384033203],["לוג",-13.237561225891112],["▁жоба",-13.237592697143556],["ӨЛ",-13.237595558166504],["еното",-13.23759651184082],["▁KAS",-13.237641334533691],["다면",-13.237641334533691],["પિ",-13.23764419555664],["▁Naka",-13.237651824951172],["evento",-13.23766040802002],["▁تبادل",-13.237668991088867],["入り",-13.2376708984375],["▁පැත්ත",-13.237675666809082],["▁Script",-13.23768138885498],["iĝ",-13.23768424987793],["ขนาดเล็ก",-13.23769760131836],["篇文章",-13.23770809173584],["OSI",-13.23771858215332],["▁costume",-13.237723350524902],["Хо",-13.23772430419922],["▁Alhamdulillah",-13.237767219543455],["רוג",-13.237789154052734],["いただけます",-13.23782444000244],["▁спорта",-13.23784637451172],["▁حامل",-13.237872123718262],["▁예정",-13.237872123718262],["ringar",-13.237878799438477],["īnu",-13.237900733947754],["▁musliman",-13.23790168762207],["рба",-13.23791790008545],["▁Lega",-13.237924575805664],["▁రోజుల",-13.237944602966309],["intu",-13.23795223236084],["▁kuvi",-13.238018989562988],["▁verrà",-13.238018989562988],["▁okazis",-13.238032341003418],["BRO",-13.238039016723633],["▁mintha",-13.238093376159668],["▁lemah",-13.238107681274414],["▁dirba",-13.238114356994627],["▁lete",-13.238120079040527],["タイ",-13.238129615783691],["CF",-13.238139152526855],["▁Diri",-13.238165855407717],["အိမ္",-13.23819637298584],["bonus",-13.238201141357422],["▁กลุ่ม",-13.238234519958496],["ለቀ",-13.238277435302734],["▁potrzeby",-13.238279342651367],["▁bedrag",-13.238286972045898],["▁sõidu",-13.238314628601074],["▁tilaa",-13.238314628601074],["ビー",-13.238314628601074],["kens",-13.23833179473877],["▁otroke",-13.238358497619627],["वंत",-13.23837184906006],["мут",-13.238381385803224],["▁recorda",-13.238389015197754],["▁Ին",-13.238395690917969],["▁curta",-13.23840618133545],["380",-13.238418579101562],["▁causas",-13.238421440124512],["ിപ്പ",-13.238429069519045],["етесь",-13.23843002319336],["差距",-13.23843479156494],["失败",-13.238443374633787],["▁خىزمەت",-13.238468170166016],["▁Куб",-13.238484382629396],["詳しく",-13.238492965698242],["ฆ",-13.23849391937256],["▁Сондықтан",-13.238502502441406],["▁मुस्लिम",-13.238502502441406],["▁ଶିକ୍ଷା",-13.238502502441406],["▁Muhammed",-13.238503456115724],["▁kondiĉoj",-13.238503456115724],["博物館",-13.23850917816162],["▁सड़क",-13.238511085510254],["▁Gallery",-13.23852252960205],["▁багыт",-13.23852252960205],["ჩინ",-13.238547325134276],["▁матэрыялаў",-13.238551139831545],["▁ඔවුන්ගේ",-13.238565444946287],["▁палат",-13.238566398620604],["▁Sungai",-13.238570213317873],["▁اولیه",-13.238574028015137],["有機會",-13.238574028015137],["▁warunki",-13.238585472106934],["▁prokur",-13.238646507263184],["▁paremmin",-13.238651275634766],["▁voort",-13.238655090332031],["▁тә",-13.238669395446776],["▁Voj",-13.238673210144045],["robo",-13.238689422607422],["▁senin",-13.238691329956056],["▁இவர்",-13.238703727722168],["▁Щоб",-13.23870849609375],["▁הילדים",-13.238714218139648],["ლეთ",-13.238755226135254],["▁Varför",-13.23875904083252],["▁Spain",-13.238765716552734],["ჩერ",-13.238773345947266],["▁pastāv",-13.238791465759276],["▁грамот",-13.238807678222656],["▁менеджер",-13.238815307617188],["ędzi",-13.238845825195312],["▁diretor",-13.23887062072754],["▁mennä",-13.238871574401855],["▁tõus",-13.238871574401855],["ांवर",-13.238872528076172],["▁cinq",-13.238937377929688],["▁expected",-13.23894500732422],["▁Styr",-13.238984107971191],["кевіч",-13.238990783691406],["▁Duy",-13.238991737365724],["▁kläder",-13.23901653289795],["▁боло",-13.23905086517334],["▁contiene",-13.23906421661377],["▁mè",-13.239072799682615],["▁ақ",-13.239116668701172],["▁գործընթաց",-13.239134788513184],["▁mēr",-13.239171028137209],["ပီ",-13.239173889160156],["▁Mich",-13.239178657531738],["bolt",-13.239222526550291],["▁mile",-13.23922634124756],["▁razum",-13.239229202270508],["քով",-13.239252090454102],["langen",-13.239295959472656],["▁adultos",-13.239320755004885],["▁radar",-13.239336013793944],["▁1991.",-13.239339828491213],["69)",-13.239389419555664],["▁provincial",-13.239419937133787],["▁gestor",-13.239426612854004],["▁떠",-13.23943328857422],["▁hotela",-13.2394437789917],["▁súťaž",-13.239465713500977],["ッド",-13.239492416381836],["▁мэндийн",-13.239514350891112],["ATION",-13.239542961120604],["▁вера",-13.239583015441896],["▁ब्या",-13.239588737487791],["▁Holz",-13.239594459533691],["verit",-13.239596366882324],["ıyorum",-13.23960304260254],["март",-13.239604949951172],["νους",-13.239612579345703],["▁didalam",-13.239624977111816],["jiem",-13.23967456817627],["▁Tres",-13.239713668823242],["▁dár",-13.23972511291504],["▁edizione",-13.239726066589355],["▁Ελλην",-13.239744186401367],["▁پہنچا",-13.239745140075684],["▁einzelnen",-13.239758491516112],["▁condicións",-13.23976230621338],["▁separate",-13.23978042602539],["▁իրավիճակ",-13.239801406860352],["▁вярва",-13.239806175231934],["▁qoldi",-13.23982048034668],["iranju",-13.23983097076416],["로부터",-13.23983669281006],["ୁଛି",-13.239837646484377],["isola",-13.239855766296388],["▁bici",-13.239875793457031],["▁आत",-13.23990249633789],["▁магчыма",-13.23991584777832],["тих",-13.239928245544434],["▁Dywedodd",-13.239934921264648],["▁keselamatan",-13.239934921264648],["▁lahatsoratra",-13.239934921264648],["▁memutuskan",-13.239934921264648],["▁thừa",-13.239934921264648],["▁відбувається",-13.239934921264648],["▁դոլար",-13.239934921264648],["▁චෝදනා",-13.239935874938965],["▁dětí",-13.23993682861328],["▁zagotovi",-13.23993682861328],["▁ڪوشش",-13.239937782287598],["▁возможны",-13.23993968963623],["ကု",-13.239940643310549],["▁ટકા",-13.239954948425291],["▁gyógyszer",-13.239956855773926],["在家",-13.239959716796877],["別の",-13.23996353149414],["▁sokongan",-13.239968299865724],["mpää",-13.23997402191162],["▁тийм",-13.239978790283203],["▁Corporation",-13.23999309539795],["▁ზე",-13.239994049072266],["▁опять",-13.239995956420898],["natural",-13.239996910095217],["▁смерти",-13.24001407623291],["▁ଦେବା",-13.24003791809082],["njima",-13.24004364013672],["▁wybiera",-13.2400484085083],["▁Systems",-13.240068435668944],["▁Hoch",-13.240077018737791],["▁vyombo",-13.240079879760742],["▁býva",-13.240110397338867],["▁muzyk",-13.2401123046875],["▁poziomie",-13.240155220031738],["▁دنیای",-13.240166664123535],["ଏସ୍",-13.24017333984375],["能在",-13.240191459655762],["cone",-13.240195274353027],["ṛ",-13.240195274353027],["▁ലെ",-13.24019718170166],["▁poika",-13.240219116210938],["▁жасап",-13.240225791931152],["▁yararlan",-13.240230560302734],["▁lignende",-13.240242004394531],["▁வேறு",-13.240249633789062],["▁lô",-13.24025058746338],["▁darbą",-13.240251541137695],["▁делови",-13.240254402160645],["TÖ",-13.240262985229492],["▁الآخر",-13.240269660949709],["をクリック",-13.240270614624023],["▁لاعب",-13.240275382995604],["塑",-13.2402925491333],["▁Boek",-13.240311622619627],["qiq",-13.240317344665527],["▁यात",-13.240410804748535],["ంటు",-13.240419387817385],["▁τσ",-13.24043083190918],["▁sisteme",-13.240434646606444],["▁പ്രാ",-13.240458488464355],["▁نوي",-13.240469932556152],["▁வாய்ப்பு",-13.240480422973633],["heil",-13.240488052368164],["▁placere",-13.240503311157228],["▁piec",-13.240506172180176],["▁Baku",-13.240509033203123],["jával",-13.240530014038086],["сле",-13.24053192138672],["▁दुःख",-13.240535736083984],["ိုင္",-13.240538597106934],["ΜΕ",-13.240553855895996],["▁حو",-13.240560531616213],["ನೀ",-13.240580558776855],["▁reçu",-13.24059772491455],["ukų",-13.240618705749512],["ంతా",-13.240622520446776],["▁21:00",-13.240633964538574],["在他",-13.240638732910156],["▁පවා",-13.24064826965332],["▁गोली",-13.24065113067627],["天天",-13.24066162109375],["▁ogled",-13.240689277648926],["stres",-13.24070644378662],["▁Черн",-13.240752220153809],["▁piene",-13.240761756896973],["▁ຊີ",-13.240761756896973],["▁Хр",-13.240762710571287],["▁ফা",-13.240775108337402],["yip",-13.240777015686035],["ĝu",-13.2407808303833],["diyo",-13.240785598754885],["▁164",-13.240785598754885],["▁luminos",-13.240808486938477],["▁пайдалан",-13.240808486938477],["▁صورتی",-13.240819931030272],["ээгүй",-13.240822792053224],["เรื่องนี้",-13.240822792053224],["▁Συμ",-13.240829467773438],["ຣິ",-13.2408447265625],["mesini",-13.240863800048828],["▁பற்றிய",-13.240866661071776],["άνει",-13.240887641906738],["сіць",-13.24090576171875],["▁dambe",-13.240907669067385],["▁Cze",-13.240930557250977],["▁rahul",-13.24098777770996],["▁realitat",-13.240994453430176],["hæð",-13.241000175476074],["▁ಗ್ರಾಮ",-13.241019248962402],["▁જેવા",-13.241025924682615],["λιά",-13.241031646728516],["ძლიერ",-13.241037368774414],["ဘုရား",-13.241045951843262],["▁Въз",-13.24106216430664],["▁हुँदैन",-13.241068840026855],["ടക്ക",-13.24110984802246],["一百",-13.24112033843994],["▁долларов",-13.241127014160156],["▁véd",-13.24114990234375],["ଶୀ",-13.241158485412598],["▁Dot",-13.241168975830078],["дір",-13.24119472503662],["▁Община",-13.241209030151367],["▁أكد",-13.241215705871582],["▁какая",-13.241222381591797],["ಬಿಟ್ಟ",-13.24123191833496],["▁Dema",-13.241239547729492],["▁tria",-13.241243362426758],["mainen",-13.241247177124023],["▁facil",-13.241256713867188],["osz",-13.241259574890137],["▁ദാ",-13.241265296936035],["ສັດ",-13.241272926330566],["▁حافظ",-13.241276741027832],["▁nyumba",-13.241277694702148],["OŠ",-13.241286277770996],["▁konkurent",-13.241291046142578],["▁விடு",-13.241291046142578],["spett",-13.24129581451416],["毅",-13.241315841674805],["▁berani",-13.241320610046388],["ແມ່",-13.241321563720703],["▁ಇಲಾಖೆ",-13.241329193115234],["猜",-13.2413330078125],["▁ತಿಂಗಳ",-13.241333961486816],["▁mayores",-13.241339683532717],["ნერ",-13.24134635925293],["価値",-13.241348266601562],["▁tyto",-13.241351127624512],["彭",-13.241352081298828],["▁көмек",-13.24135684967041],["▁മോഹന്",-13.241362571716309],["▁ગાંધી",-13.24136734008789],["▁magnifique",-13.241369247436523],["▁மனைவி",-13.241369247436523],["▁отговаря",-13.24137020111084],["▁муниципального",-13.241372108459473],["▁phiếu",-13.241374015808104],["▁अगदी",-13.241378784179688],["疑問",-13.241384506225586],["▁երեք",-13.241389274597168],["▁כולם",-13.241399765014648],["▁ΓΙΑ",-13.241400718688965],["▁trùng",-13.241415023803713],["▁придоби",-13.241418838500977],["▁gehören",-13.241424560546877],["▁نظارت",-13.241446495056152],["עיצוב",-13.241451263427734],["▁ימי",-13.241456985473633],["▁prijatelji",-13.241461753845217],["▁износ",-13.241467475891112],["árov",-13.241473197937012],["JER",-13.241493225097656],["▁rekomend",-13.241507530212402],["▁coração",-13.241519927978516],["رار",-13.24153995513916],["spør",-13.241549491882324],["овому",-13.241595268249512],["▁organizar",-13.241597175598145],["▁340",-13.24160861968994],["世界的",-13.241617202758787],["stāv",-13.241622924804688],["tahun",-13.24164581298828],["▁förening",-13.24168300628662],["明確",-13.241697311401367],["▁وغيرها",-13.241726875305176],["▁herba",-13.241729736328123],["▁Giovanni",-13.24173355102539],["ંજ",-13.241735458374023],["тры",-13.24173641204834],["тури",-13.24174690246582],["多く",-13.241762161254885],["▁искуство",-13.241796493530272],["कम",-13.241806030273438],["▁такви",-13.241806030273438],["šću",-13.241819381713867],["▁Хүн",-13.241836547851562],["geving",-13.24183750152588],["▁Ühe",-13.24184799194336],["ушылар",-13.241849899291992],["▁Част",-13.24185562133789],["ብት",-13.24185848236084],["ΛΗ",-13.241862297058104],["▁формата",-13.24189281463623],["NZ",-13.24193000793457],["▁प्रकरण",-13.241954803466797],["▁Üz",-13.241971969604492],["kwenza",-13.241973876953123],["▁дайында",-13.241982460021973],["ounce",-13.241985321044922],["▁Opel",-13.242000579833984],["▁väärtus",-13.2420015335083],["▁operatori",-13.242012977600098],["▁tette",-13.242029190063477],["évolution",-13.242039680480955],["▁wyci",-13.242074966430664],["▁vaadata",-13.242080688476562],["的声音",-13.24208641052246],["▁truth",-13.242090225219728],["▁Итали",-13.242137908935549],["rén",-13.242138862609863],["hug",-13.242149353027344],["richtung",-13.24216365814209],["▁Gəncə",-13.242164611816406],["▁మార్",-13.242170333862305],["▁Tipps",-13.24217414855957],["▁tangu",-13.242197036743164],["▁средно",-13.24220371246338],["фф",-13.242213249206545],["▁उतर",-13.242237091064451],["▁հիմն",-13.24224090576172],["▁ਵੱਖ",-13.242246627807615],["íoch",-13.242298126220703],["▁პროფ",-13.242315292358398],["කිරීම",-13.242344856262209],["پورت",-13.242349624633787],["▁medel",-13.242359161376951],["▁Dost",-13.242389678955078],["मार्फत",-13.24245834350586],["larımızın",-13.242483139038086],["AW",-13.242484092712402],["▁rov",-13.24250602722168],["▁κρίση",-13.242524147033691],["다운",-13.242524147033691],["▁SEL",-13.24253273010254],["YR",-13.24255657196045],["tyt",-13.24256706237793],["▁hiki",-13.242583274841309],["▁نشد",-13.242597579956056],["▁prevoz",-13.242609024047852],["ਗਿ",-13.242636680603027],["▁تريد",-13.24264144897461],["▁सोमवार",-13.242648124694824],["▁폐",-13.24266242980957],["▁бого",-13.242695808410645],["▁soggetti",-13.24269676208496],["▁gizon",-13.242721557617188],["ımın",-13.242724418640137],["▁169",-13.242734909057615],["▁kw",-13.242740631103516],["▁दाखव",-13.242759704589844],["บรร",-13.24276065826416],["صيد",-13.242779731750488],["biljež",-13.242786407470703],["teist",-13.242794036865234],["▁Šu",-13.242803573608398],["▁εμφαν",-13.242803573608398],["พยาธิ",-13.242805480957031],["▁Bedeutung",-13.242806434631348],["▁nauczyciel",-13.242806434631348],["▁parturient",-13.242806434631348],["▁անձանց",-13.242806434631348],["▁चुदाई",-13.242806434631348],["▁bietjie",-13.24280834197998],["▁Said",-13.24281120300293],["▁ਸ਼ਹਿਰ",-13.242812156677246],["▁tirsdag",-13.24282455444336],["▁taqdim",-13.242833137512209],["▁પહેલાં",-13.24284553527832],["▁yolda",-13.242856979370115],["▁medewerkers",-13.242860794067385],["[13]",-13.242863655090332],["▁Бро",-13.242865562438965],["▁شاد",-13.242865562438965],["▁мнению",-13.24288272857666],["▁juridic",-13.242886543273926],["超級",-13.242887496948242],["▁известны",-13.242895126342772],["▁vastavalt",-13.242902755737305],["أفراد",-13.242905616760254],["▁கொடுக்க",-13.242907524108888],["jh",-13.242914199829102],["▁ممكن",-13.242921829223633],["▁апартамент",-13.242923736572266],["▁contatto",-13.242926597595217],["▁kralj",-13.242941856384276],["נוח",-13.24294376373291],["uras",-13.24294662475586],["▁verloren",-13.242960929870604],["▁travailler",-13.242963790893556],["▁работников",-13.242968559265137],["ğun",-13.24298095703125],["▁scias",-13.242981910705566],["不停",-13.242984771728516],["▁oikeus",-13.242986679077148],["salamat",-13.242995262145996],["▁Bravo",-13.243002891540527],["ssar",-13.243019104003906],["prije",-13.243026733398438],["实行",-13.24306869506836],["▁Бей",-13.24307632446289],["▁tuttu",-13.243077278137209],["اسم",-13.243083953857422],["▁රාත්",-13.243084907531738],["▁κό",-13.243103981018066],["тс",-13.243114471435549],["▁gioca",-13.243128776550291],["ակը",-13.243135452270508],["▁asiakkai",-13.243165969848633],["köszön",-13.243182182312012],["▁dauden",-13.243182182312012],["ບັນຫາ",-13.243191719055176],["оцін",-13.243229866027832],["▁dins",-13.243244171142578],["友好",-13.243247985839844],["ियर",-13.243252754211426],["▁Season",-13.243252754211426],["▁छुट",-13.243257522583008],["▁дает",-13.243294715881348],["▁باقي",-13.243294715881348],["▁الإعلام",-13.24329948425293],["ζει",-13.24330997467041],["▁lez",-13.24332046508789],["▁takiej",-13.24332046508789],["▁شهيد",-13.243332862854004],["▁शास्त्र",-13.243372917175291],["▁avval",-13.243374824523926],["UNE",-13.243402481079102],["最も",-13.243404388427734],["那里",-13.243408203125],["ट्टा",-13.243422508239746],["▁డీ",-13.243422508239746],["▁supuesto",-13.243441581726074],["▁നോട്ട",-13.243473052978516],["▁शिक",-13.243474006652832],["▁Laika",-13.24347972869873],["▁Schau",-13.24348258972168],["▁Unë",-13.243501663208008],["▁کنی",-13.243508338928224],["▁наприклад",-13.243521690368652],["▁مسئولیت",-13.243531227111816],["दल",-13.243542671203612],["▁Fotograf",-13.24354648590088],["▁അഭിപ്രായ",-13.243563652038574],["▁väljer",-13.243566513061523],["ნელი",-13.24357795715332],["平等",-13.243579864501951],["▁napja",-13.243587493896484],["▁ڈھ",-13.243595123291016],["▁earn",-13.243597030639648],["▁океан",-13.243610382080078],["▁खुश",-13.243612289428713],["▁ಸೇರಿ",-13.243618965148926],["aint",-13.243642807006836],["街道",-13.243667602539062],["▁documental",-13.243675231933594],["▁qayda",-13.243698120117188],["▁morale",-13.24370002746582],["▁الکترونیک",-13.24372386932373],["▁dijual",-13.24372673034668],["არის",-13.243729591369627],["▁keby",-13.243739128112791],["েট",-13.243741035461426],["▁Ау",-13.243748664855955],["▁पत्रिका",-13.24375820159912],["паѓа",-13.24377155303955],["રૂ",-13.243778228759766],["▁Cart",-13.24382209777832],["ናዊ",-13.243823051452637],["óns",-13.243834495544434],["▁uğra",-13.24383544921875],["▁Roland",-13.243850708007812],["▁Воз",-13.243871688842772],["▁külasta",-13.243884086608888],["опол",-13.243891716003418],["▁internete",-13.243900299072266],["▁bebek",-13.243901252746582],["▁AUTO",-13.243911743164062],["▁பிடித்த",-13.243911743164062],["icama",-13.243945121765137],["▁өөрөө",-13.243951797485352],["pisane",-13.2439546585083],["▁กระเป๋า",-13.244003295898438],["▁ajánlat",-13.244014739990234],["▁Vivi",-13.244026184082031],["▁Klin",-13.24403190612793],["quil",-13.244053840637209],["очку",-13.244053840637209],["▁Stress",-13.24405574798584],["▁AY",-13.244074821472168],["ფეხ",-13.244089126586914],["▁девет",-13.244093894958496],["აძის",-13.244114875793455],["▁допуска",-13.24412727355957],["▁төл",-13.24413013458252],["拿出",-13.244134902954102],["AKU",-13.24413776397705],["skriften",-13.244215965270996],["gee",-13.244219779968262],["▁ræða",-13.244222640991213],["risht",-13.244232177734377],["พิจารณา",-13.244243621826172],["▁ხელმძღვანელ",-13.244244575500488],["▁González",-13.244245529174805],["▁bashkëpunim",-13.244245529174805],["▁cắt",-13.244245529174805],["▁peningkatan",-13.244245529174805],["▁өнөөдөр",-13.244247436523438],["▁શહેર",-13.24424934387207],["tider",-13.244255065917969],["▁ಪ್ರವಾಸ",-13.244256973266602],["▁बुधवार",-13.244263648986816],["▁Quanto",-13.244272232055664],["▁jälleen",-13.244277000427246],["▁විතරයි",-13.24428939819336],["▁samarbete",-13.244304656982422],["▁fatal",-13.24431037902832],["แฮ",-13.244324684143066],["▁رضی",-13.24433135986328],["方の",-13.24435043334961],["ிக்",-13.244356155395508],["▁ఎవరి",-13.244383811950684],["▁Sela",-13.24438762664795],["ūras",-13.244390487670898],["ا۔",-13.244439125061035],["▁vuokra",-13.244450569152832],["▁tả",-13.244463920593262],["põe",-13.244479179382324],["▁trim",-13.244514465332031],["▁Venemaa",-13.244522094726562],["▁бүл",-13.244552612304688],["▁Budi",-13.244582176208496],["енного",-13.24459171295166],["▁മാറി",-13.24459457397461],["▁hof",-13.244623184204102],["▁सवारी",-13.244624137878418],["▁第二",-13.244644165039062],["▁venres",-13.24467945098877],["कुल",-13.244715690612791],["వాస",-13.244765281677246],["лює",-13.24478530883789],["▁обща",-13.244792938232422],["▁ቤቱ",-13.24479866027832],["▁yapılması",-13.244812965393066],["живе",-13.244851112365724],["▁但是",-13.24485969543457],["▁Freone",-13.244878768920898],["涨",-13.244908332824709],["kjent",-13.244948387145996],["▁príbeh",-13.244958877563477],["▁Ema",-13.244976997375488],["▁Ց",-13.244998931884766],["紀念",-13.245012283325195],["ायची",-13.245020866394045],["ڭلار",-13.245028495788574],["venuto",-13.24502944946289],["▁сопствен",-13.24506950378418],["문학",-13.245077133178713],["tettava",-13.245094299316406],["▁Shk",-13.245101928710938],["நிதி",-13.245121002197266],["шего",-13.245142936706545],["▁ນະ",-13.245153427124023],["werte",-13.245155334472656],["▁McC",-13.24517059326172],["ậu",-13.245192527770996],["andó",-13.245223999023438],["казывать",-13.245224952697754],["▁Før",-13.245226860046388],["▁کوه",-13.245259284973145],["当年",-13.24527645111084],["Ди",-13.245287895202637],["▁जुड़े",-13.245288848876951],["▁găsi",-13.245327949523926],["▁izrazi",-13.24533462524414],["ತ್ರಿ",-13.245359420776367],["▁hồng",-13.245365142822266],["ሄድ",-13.245368003845217],["ແຫ່ງ",-13.245373725891112],["പ്പോഴ",-13.24538803100586],["ejšie",-13.245394706726074],["▁أمن",-13.245423316955566],["▁øge",-13.245429039001465],["член",-13.24543571472168],["▁Astro",-13.245442390441896],["▁imposto",-13.245458602905272],["▁ondan",-13.245468139648438],["ઘા",-13.245475769042969],["▁TG",-13.245478630065918],["ljuje",-13.245482444763184],["▁externe",-13.24549961090088],["ירי",-13.245500564575195],["▁സഹായ",-13.245500564575195],["▁қар",-13.24551486968994],["واسط",-13.245556831359863],["▁Herria",-13.245565414428713],["▁passione",-13.245569229125977],["nictwa",-13.245572090148926],["ઠા",-13.245587348937988],["▁قاتارلىق",-13.245591163635254],["▁Trên",-13.245615005493164],["▁yönet",-13.245622634887695],["▁близо",-13.24563217163086],["hran",-13.245634078979492],["رصد",-13.245635032653809],["LAP",-13.245638847351074],["▁illegal",-13.245651245117188],["▁Hawa",-13.245659828186035],["▁авион",-13.245664596557615],["уулах",-13.24567985534668],["▁Europäische",-13.245686531066896],["▁gynnwys",-13.245686531066896],["▁জুলাই",-13.245686531066896],["▁সমস্যা",-13.245686531066896],["▁ମନ୍ଦିର",-13.245686531066896],["▁lưng",-13.245687484741213],["▁ජාත්",-13.245687484741213],["▁straf",-13.245689392089844],["▁πρόεδρος",-13.245689392089844],["▁loạn",-13.245691299438477],["▁කුඩා",-13.245691299438477],["▁widać",-13.24570655822754],["▁zaś",-13.245716094970703],["ግር",-13.24575138092041],["stipr",-13.245756149291992],["ផលិត",-13.245757102966309],["▁Stö",-13.24575901031494],["▁שיעור",-13.245759963989258],["▁etkili",-13.24576473236084],["▁Iesu",-13.24579620361328],["misjon",-13.245797157287598],["▁1980-",-13.245805740356444],["▁ਵੇਖ",-13.245808601379396],["▁определени",-13.245838165283203],["skoðun",-13.245840072631836],["ਵੇਂ",-13.245848655700684],["stron",-13.245857238769531],["▁kaudu",-13.245858192443848],["▁پت",-13.245893478393556],["▁carry",-13.245898246765137],["▁Funda",-13.245928764343262],["夢想",-13.245967864990234],["▁ଫ",-13.24599838256836],["▁funktioner",-13.246018409729004],["▁амаль",-13.246021270751951],["ጡት",-13.24602222442627],["▁besch",-13.24604320526123],["▁poesia",-13.246053695678713],["▁толук",-13.246065139770508],["iunile",-13.246088027954102],["大多",-13.246092796325684],["▁ഉള്",-13.246110916137695],["▁Engel",-13.24614429473877],["▁poziv",-13.246146202087402],["背后",-13.246146202087402],["▁нужд",-13.246152877807615],["عاطف",-13.24616813659668],["інг",-13.246171951293944],["zü",-13.246190071105955],["▁MAX",-13.246216773986816],["▁ښو",-13.246231079101562],["▁Гара",-13.246234893798828],["▁fuisse",-13.246241569519045],["мн",-13.246268272399902],["ព័ត៌មាន",-13.2462739944458],["▁Прес",-13.246274948120115],["▁pleca",-13.246283531188965],["▁točk",-13.24628734588623],["ālais",-13.246295928955078],["▁కాల",-13.246305465698242],["▁байгуулалт",-13.24631404876709],["USI",-13.246330261230469],["▁책임",-13.246336936950684],["▁Folge",-13.246367454528809],["▁покажа",-13.246367454528809],["▁Mám",-13.246383666992188],["▁jeni",-13.246384620666504],["▁garis",-13.24642276763916],["▁banii",-13.246434211730955],["+3",-13.246451377868652],["650",-13.246469497680664],["ದೇಶ",-13.24647045135498],["jskog",-13.246472358703612],["оры",-13.246475219726562],["▁creative",-13.246509552001951],["havn",-13.246545791625977],["▁Danes",-13.246548652648926],["▁ಬರೆದ",-13.246572494506836],["panda",-13.246578216552734],["▁dönemde",-13.246599197387695],["▁పెంచ",-13.24661636352539],["យើង",-13.246634483337402],["▁מגיע",-13.246660232543944],["క్రమ",-13.246744155883787],["ciler",-13.246789932250977],["шыл",-13.246796607971191],["ხელ",-13.246806144714355],["26)",-13.246818542480469],["فز",-13.246834754943848],["▁kvant",-13.246834754943848],["systeem",-13.246856689453123],["▁qiy",-13.246875762939451],["▁እዚህ",-13.246907234191896],["▁туда",-13.24691390991211],["னில்",-13.246919631958008],["▁diş",-13.246922492980955],["▁الطريق",-13.246930122375488],["▁Ondo",-13.246936798095703],["CAD",-13.246942520141602],["▁đổ",-13.246953010559082],["▁Romana",-13.246954917907717],["ទទួល",-13.24696159362793],["lutning",-13.246987342834473],["avant",-13.246991157531738],["375",-13.247004508972168],["reš",-13.24701976776123],["▁bixi",-13.247028350830078],["▁სოფელ",-13.247048377990724],["▁lake",-13.247061729431152],["▁kran",-13.247076988220217],["szé",-13.247093200683594],["▁заборон",-13.24709415435791],["聯盟",-13.247111320495604],["老闆",-13.247116088867188],["蛇",-13.247124671936035],["ಗ್ರ",-13.247126579284668],["핵",-13.247126579284668],["ประธาน",-13.2471284866333],["▁khuyến",-13.247129440307615],["▁գործունեության",-13.247129440307615],["▁көңіл",-13.247130393981934],["▁געווארן",-13.24713134765625],["▁ίσως",-13.247132301330566],["▁Dort",-13.247136116027832],["▁splendid",-13.247142791748049],["▁Ngọc",-13.247146606445312],["▁vērā",-13.247148513793944],["▁Podczas",-13.247149467468262],["▁ਵਿਚਾਰ",-13.247162818908691],["▁תוכלו",-13.247170448303224],["▁місці",-13.247172355651855],["▁trồng",-13.247178077697754],["▁gedacht",-13.247194290161133],["ექტ",-13.247234344482422],["▁Mihai",-13.247237205505373],["▁früher",-13.247237205505373],["הא",-13.247246742248535],["▁compras",-13.247252464294434],["用途",-13.247259140014648],["群体",-13.24726104736328],["száll",-13.247268676757812],["DOS",-13.247271537780762],["▁жанр",-13.247276306152344],["ສອນ",-13.247282028198242],["▁maaliskuuta",-13.247308731079102],["▁diyaar",-13.247309684753418],["▁Щ",-13.247319221496582],["▁מלון",-13.247322082519531],["▁древн",-13.247325897216797],["ئەت",-13.247339248657228],["kawan",-13.247352600097656],["▁abre",-13.247355461120604],["▁გაგ",-13.247377395629885],["▁bohater",-13.247418403625488],["CEL",-13.247430801391602],["▁Grada",-13.247431755065918],["ገብ",-13.247442245483398],["▁बुद्ध",-13.247453689575195],["▁vurdere",-13.247455596923828],["▁разделе",-13.247461318969728],["▁kín",-13.247468948364258],["రియా",-13.247482299804688],["kuju",-13.24748420715332],["▁основна",-13.247486114501951],["▁develop",-13.2475004196167],["▁produz",-13.24752712249756],["▁vastaava",-13.247530937194824],["▁کارهای",-13.247536659240724],["化工",-13.247541427612305],["हम",-13.247546195983888],["eysan",-13.247549057006836],["没想到",-13.247553825378418],["кових",-13.247628211975098],["並不",-13.247653007507324],["▁kedvenc",-13.247669219970703],["မြို့နယ်",-13.247678756713867],["訂房",-13.247705459594728],["▁tűz",-13.247709274291992],["▁sefer",-13.247711181640623],["laufen",-13.247729301452637],["भू",-13.247756004333496],["▁Ժ",-13.247774124145508],["▁turno",-13.24777603149414],["Ven",-13.247790336608888],["cento",-13.247790336608888],["▁κοιν",-13.247804641723633],["▁Belum",-13.247820854187012],["▁verstehen",-13.247830390930176],["غير",-13.247844696044922],["▁витамин",-13.247845649719238],["しっかりと",-13.247849464416504],["23)",-13.247852325439451],["verz",-13.24785327911377],["▁Anderson",-13.247854232788086],["▁साथी",-13.247857093811035],["▁Джон",-13.247867584228516],["▁државата",-13.247896194458008],["▁hack",-13.247904777526855],["▁миний",-13.247928619384766],["historia",-13.247955322265623],["▁ճ",-13.248040199279783],["▁itali",-13.248055458068848],["哈哈",-13.248072624206545],["▁Peli",-13.248149871826172],["▁Tani",-13.248153686523438],["▁Tabel",-13.24817943572998],["▁Def",-13.248188972473145],["▁آي",-13.248188972473145],["▁urdu",-13.248208999633787],["tuju",-13.248225212097168],["▁viņam",-13.248234748840332],["valent",-13.248266220092772],["นําเสนอ",-13.248273849487305],["▁වැඩේ",-13.248279571533203],["দী",-13.24828815460205],["▁ANC",-13.24830722808838],["කදී",-13.248326301574709],["atsiya",-13.248347282409668],["▁erinevate",-13.24835205078125],["▁luxus",-13.248357772827148],["וץ",-13.248368263244627],["غرب",-13.248370170593262],["▁교회",-13.24839210510254],["▁القطاع",-13.24840259552002],["▁ተፈ",-13.248412132263184],["▁giảng",-13.248433113098145],["▁ТОВ",-13.248446464538574],["BG",-13.248448371887209],["▁Mambo",-13.248465538024902],["онос",-13.248469352722168],["▁Malta",-13.248469352722168],["▁бей",-13.248477935791016],["▁Lux",-13.248479843139648],["▁කිසි",-13.248506546020508],["▁۱۷",-13.24850845336914],["迟",-13.24852180480957],["盆",-13.248543739318848],["聴",-13.248552322387695],["披露",-13.248560905456545],["検査",-13.24856948852539],["▁inseamna",-13.248574256896973],["▁mencegah",-13.248574256896973],["▁încât",-13.248574256896973],["▁επίπεδο",-13.248574256896973],["▁Кост",-13.248574256896973],["▁павінны",-13.248574256896973],["▁повітря",-13.248574256896973],["▁मौसम",-13.248574256896973],["▁Kjøp",-13.248577117919922],["▁споделя",-13.248586654663086],["▁nettdating",-13.24858856201172],["▁бази",-13.24858856201172],["▁رکھتے",-13.24858856201172],["▁Deshalb",-13.248589515686035],["▁serious",-13.2485990524292],["▁повечето",-13.248602867126465],["▁ಹೇಳಿದರು",-13.248604774475098],["▁kjøpt",-13.248610496520996],["▁mütləqdir",-13.248612403869627],["▁Hersteller",-13.248614311218262],["▁unna",-13.248617172241213],["▁essential",-13.24862003326416],["▁viðskipta",-13.248624801635742],["▁שהיה",-13.24862575531006],["▁කිසිම",-13.248647689819336],["▁ganda",-13.248649597167969],["▁тъ",-13.248668670654297],["▁שמע",-13.248692512512209],["▁plaća",-13.24873161315918],["▁luotto",-13.248761177062988],["▁Αντι",-13.248771667480469],["Uni",-13.24878215789795],["联赛",-13.24878215789795],["おすすめ",-13.248790740966797],["▁chcesz",-13.24881362915039],["▁oriental",-13.248849868774414],["pohja",-13.248858451843262],["लेला",-13.248858451843262],["▁Կապ",-13.248865127563477],["▁주장",-13.248872756958008],["islav",-13.248889923095703],["ແຜນ",-13.248907089233398],["isay",-13.24891471862793],["▁samtale",-13.24892520904541],["▁Jason",-13.248946189880373],["▁heims",-13.248979568481444],["▁Rapport",-13.24898624420166],["▁activitate",-13.248992919921877],["▁parceria",-13.248998641967772],["▁працювати",-13.248998641967772],["▁doldur",-13.249025344848633],["会计",-13.249037742614746],["▁générale",-13.249038696289062],["▁മഹാ",-13.249052047729492],["▁garde",-13.249061584472656],["▁pomi",-13.24909782409668],["აზი",-13.249104499816896],["▁Torino",-13.249106407165527],["ταρ",-13.249130249023438],["▁عادت",-13.24915599822998],["▁μεγαλ",-13.249168395996094],["▁uzņēm",-13.249175071716309],["▁természetes",-13.249180793762209],["ድም",-13.24918270111084],["▁testament",-13.249221801757812],["▁növ",-13.249241828918455],["▁тэд",-13.249241828918455],["▁հանգ",-13.249248504638672],["▁მეგობ",-13.249263763427734],["▁૩",-13.249272346496582],["▁كتب",-13.249276161193848],["rapport",-13.249281883239746],["而不",-13.249302864074709],["▁miklu",-13.249350547790527],["▁läst",-13.249363899230955],["▁باشقا",-13.24936580657959],["▁военно",-13.249380111694336],["▁davet",-13.24940586090088],["▁bereik",-13.249418258666992],["▁informații",-13.249428749084473],["Мар",-13.249465942382812],["ଟର",-13.249465942382812],["žiť",-13.249473571777344],["▁corrente",-13.249478340148926],["▁دوام",-13.24949836730957],["ಂತಹ",-13.249515533447266],["لىغان",-13.249541282653809],["▁sustava",-13.249566078186035],["▁végén",-13.24957275390625],["▁כלי",-13.249585151672363],["уста",-13.24959659576416],["ხაზ",-13.249605178833008],["rili",-13.249608039855955],["▁gjin",-13.249608039855955],["▁Danh",-13.249613761901855],["ԵՎ",-13.249614715576172],["再生",-13.249632835388184],["τώ",-13.2496337890625],["▁tudni",-13.249634742736816],["ሳይ",-13.249655723571776],["ക്കേ",-13.2496919631958],["struktūr",-13.249696731567385],["ETT",-13.24970531463623],["▁табу",-13.249736785888672],["▁prolong",-13.249750137329102],["▁BAL",-13.249753952026367],["▁individuell",-13.249798774719238],["▁Пов",-13.249801635742188],["▁πρωτο",-13.249805450439451],["gjort",-13.249836921691896],["▁flux",-13.249842643737791],["▁pasir",-13.249849319458008],["levy",-13.24985122680664],["有名",-13.24985122680664],["▁avia",-13.249868392944336],["strum",-13.2498779296875],["▁коса",-13.249896049499512],["īvs",-13.249906539916992],["▁Potreb",-13.249914169311523],["لدى",-13.249930381774902],["▁treat",-13.249960899353027],["▁mast",-13.24997329711914],["cute",-13.24997615814209],["versió",-13.249987602233888],["聯絡",-13.249995231628418],["biztosítás",-13.249998092651367],["▁bevat",-13.250005722045898],["▁نالي",-13.25001335144043],["▁Wszystkie",-13.25002098083496],["▁możliwe",-13.25002098083496],["▁строительства",-13.25002098083496],["▁ബിജെപി",-13.25002098083496],["▁physique",-13.250021934509276],["▁Інтернет",-13.250021934509276],["▁atbalsta",-13.250022888183594],["▁frumoase",-13.250025749206545],["▁ಸಿದ್ಧ",-13.250029563903809],["▁πλευρά",-13.250039100646973],["▁კაცი",-13.250041961669922],["▁tālāk",-13.250044822692873],["▁кухня",-13.250057220458984],["有力",-13.250060081481934],["▁stör",-13.25006103515625],["▁KUALA",-13.250062942504885],["▁Güzel",-13.250078201293944],["produk",-13.250093460083008],["lupa",-13.250097274780272],["▁бывает",-13.250100135803224],["INS",-13.250103950500488],["▁רחב",-13.25010871887207],["▁ζω",-13.250115394592283],["▁zrak",-13.250126838684082],["▁dibandingkan",-13.250134468078612],["▁містить",-13.25014877319336],["▁아니다",-13.250155448913574],["▁ఎన్",-13.250157356262209],["рған",-13.250167846679688],["▁čisti",-13.250176429748535],["▁domestic",-13.250192642211914],["▁robiť",-13.250205039978027],["apport",-13.250237464904783],["▁posjet",-13.250238418579102],["▁రామ్",-13.250258445739746],["▁随着",-13.250272750854492],["▁ROM",-13.250277519226074],["дзіце",-13.250300407409668],["یزی",-13.250324249267578],["▁Rail",-13.250324249267578],["ัย",-13.250356674194336],["urnar",-13.250368118286133],["▁თავისუფლება",-13.250368118286133],["▁දිග",-13.25037670135498],["▁пераклад",-13.250386238098145],["ONS",-13.250389099121094],["মো",-13.250401496887209],["סון",-13.25041389465332],["▁poem",-13.250414848327637],["ရတာ",-13.250423431396484],["▁авиа",-13.250438690185549],["▁ຕ້ອງ",-13.250470161437988],["▁Lingua",-13.250495910644531],["ırı",-13.250500679016112],["riy",-13.250505447387695],["▁ekzistas",-13.250510215759276],["▁kesin",-13.250510215759276],["▁votar",-13.25051212310791],["▁الأمم",-13.25051212310791],["▁тыя",-13.25051498413086],["▁parima",-13.250527381896973],["▁مائ",-13.250527381896973],["▁ٺ",-13.250534057617188],["ılacak",-13.250542640686035],["▁صادرات",-13.250553131103516],["▁گام",-13.250555992126465],["call",-13.250557899475098],["ევა",-13.250645637512209],["▁إليها",-13.250666618347168],["▁Taiwan",-13.250679969787598],["▁vix",-13.25069808959961],["נייה",-13.250715255737305],["▁(60",-13.25074863433838],["▁უცხო",-13.250761985778809],["sagt",-13.250765800476074],["▁darbus",-13.250767707824709],["お問い合わせ",-13.250795364379885],["聞き",-13.250795364379885],["安定",-13.250802040100098],["▁leida",-13.250809669494627],["▁suasana",-13.250824928283691],["▁mentres",-13.250829696655272],["bug",-13.250849723815918],["▁momente",-13.25086498260498],["pone",-13.250865936279297],["▁ouder",-13.250895500183104],["▁فهي",-13.25092601776123],["▁ዐ",-13.250946044921877],["▁jazdy",-13.250947952270508],["▁mandar",-13.250972747802734],["េច",-13.250988960266112],["pici",-13.25100326538086],["ifier",-13.251004219055176],["▁qaab",-13.251004219055176],["▁minuts",-13.251006126403809],["▁aromat",-13.25100803375244],["ဓ",-13.25102996826172],["在這裡",-13.251049995422363],["▁בעיר",-13.251060485839844],["kaasi",-13.25113296508789],["一段时间",-13.251154899597168],["த்துக்",-13.251173973083496],["köy",-13.25120449066162],["▁zadnje",-13.251220703125],["▁krooni",-13.251222610473633],["ndawo",-13.251243591308594],["假期",-13.251246452331545],["จํากัด",-13.251273155212402],["▁Barbie",-13.251298904418944],["▁Gora",-13.251315116882324],["每月",-13.251319885253906],["▁έκ",-13.251343727111816],["▁আয়",-13.251349449157717],["▁bazén",-13.25137424468994],["围绕",-13.25137710571289],["▁നടത്തി",-13.25137996673584],["สถาบัน",-13.251388549804688],["▁организма",-13.25139331817627],["练",-13.25139331817627],["▁érez",-13.251401901245115],["상을",-13.25140380859375],["▁तत्र",-13.2514066696167],["dán",-13.251415252685549],["ūl",-13.251419067382812],["▁mərkəzi",-13.251425743103027],["အချိန်",-13.251428604125977],["▁napaka",-13.25143337249756],["▁돈",-13.25143337249756],["▁animals",-13.251436233520508],["▁2017:",-13.251441955566406],["諮詢",-13.251444816589355],["▁landoj",-13.251461029052734],["เชื้อ",-13.25146770477295],["ปรากฏ",-13.251468658447266],["ଯାତ୍ରା",-13.251470565795898],["▁ανάγκη",-13.251470565795898],["▁δήλωσε",-13.251470565795898],["▁μόλις",-13.251470565795898],["▁όνομα",-13.251470565795898],["▁ପଟ୍ଟନାୟକ",-13.251470565795898],["▁көңүл",-13.251471519470217],["▁esetében",-13.251472473144531],["▁keuangan",-13.251472473144531],["▁ਮੌਕੇ",-13.251472473144531],["▁خوبصورت",-13.251474380493164],["คณะกรรมการ",-13.25147533416748],["▁хэргийн",-13.251500129699709],["印刷膠帶",-13.251500129699709],["nieuw",-13.251505851745604],["kirjandus",-13.251511573791504],["▁الفاظ",-13.251511573791504],["▁تجاوز",-13.251511573791504],["▁scopo",-13.251520156860352],["▁səfər",-13.25152587890625],["เสื้อผ้า",-13.25153636932373],["활동",-13.251537322998049],["▁Chap",-13.251538276672363],["educació",-13.251541137695312],["▁alerta",-13.251543998718262],["▁행복",-13.251551628112791],["ГЕ",-13.251553535461426],["▁עסק",-13.25155544281006],["▁nəzərə",-13.251562118530272],["täht",-13.25156307220459],["น้ําหนัก",-13.25156307220459],["mord",-13.251566886901855],["▁መልካም",-13.251572608947754],["ringan",-13.25158405303955],["▁njihova",-13.25160312652588],["▁vztah",-13.251605033874512],["▁szy",-13.251614570617676],["▁акцент",-13.25161838531494],["▁Karlov",-13.251622200012209],["▁хэрэгжүүлэх",-13.251633644104004],["▁готово",-13.251639366149902],["▁Барлық",-13.251641273498535],["▁これは",-13.251643180847168],["gjöf",-13.251646041870115],["▁Ilmu",-13.251646995544434],["ляў",-13.251662254333496],["▁milijon",-13.251665115356444],["▁худ",-13.251667976379396],["▁Curs",-13.251696586608888],["▁nhắc",-13.25169849395752],["▁2050",-13.251705169677734],["▁mellora",-13.251716613769531],["NAT",-13.251723289489746],["▁вызвал",-13.2517671585083],["给了",-13.251771926879885],["regler",-13.251788139343262],["密切",-13.251797676086426],["▁nylig",-13.251808166503906],["▁کف",-13.251808166503906],["մե",-13.251809120178224],["mando",-13.251816749572754],["èh",-13.251829147338867],["ेस्ट",-13.251829147338867],["ეულ",-13.2518310546875],["▁TEM",-13.25183391571045],["ствовать",-13.251856803894045],["▁کروڑ",-13.25186538696289],["布局",-13.25186538696289],["▁Karta",-13.251872062683104],["▁დამა",-13.251880645751951],["▁удзел",-13.251930236816406],["▁Ван",-13.251945495605469],["ٻ",-13.251946449279783],["▁Драган",-13.251946449279783],["轻松",-13.251959800720217],["लाल",-13.251960754394531],["hoek",-13.252016067504885],["?????",-13.252035140991213],["▁нашег",-13.252035140991213],["žiti",-13.252038955688477],["▁actitud",-13.252041816711426],["▁edusta",-13.252042770385742],["ጥፋት",-13.25204372406006],["hovo",-13.25204849243164],["▁Ала",-13.25207233428955],["numero",-13.252083778381348],["बो",-13.252086639404297],["على",-13.252087593078612],["▁предмети",-13.252095222473145],["▁ગીત",-13.25211238861084],["▁শি",-13.252132415771484],["pewa",-13.252141952514648],["ಗಾರ",-13.252157211303713],["grein",-13.252161979675291],["▁virág",-13.252166748046877],["Yehova",-13.252177238464355],["sports",-13.252184867858888],["▁finom",-13.252188682556152],["▁נייעס",-13.252198219299316],["▁opas",-13.252202033996582],["▁ബ്ര",-13.252203941345217],["▁غريب",-13.252236366271973],["▁પ્રો",-13.252239227294922],["▁thème",-13.252243995666504],["ימו",-13.25224494934082],["的角度",-13.2522554397583],["ദ്ദ",-13.252256393432615],["▁efe",-13.252300262451172],["fæl",-13.25230598449707],["▁палі",-13.252317428588867],["▁wangu",-13.25233268737793],["гір",-13.25235366821289],["▁današnj",-13.252366065979004],["ütt",-13.252373695373535],["ération",-13.252387046813965],["▁новым",-13.25242805480957],["▁Bibliotek",-13.252443313598633],["▁Allahu",-13.25245761871338],["▁Lia",-13.252471923828123],["لک",-13.252483367919922],["▁keskkonna",-13.252486228942873],["СТАН",-13.252495765686035],["irkan",-13.252496719360352],["məyi",-13.252497673034668],["ACA",-13.252518653869627],["乘客",-13.252528190612791],["▁Nový",-13.252533912658691],["▁Minä",-13.252534866333008],["дарга",-13.252538681030272],["తను",-13.252558708190918],["▁engelska",-13.252573013305664],["▁విజయ",-13.252583503723145],["nine",-13.25259017944336],["ακού",-13.252591133117676],["діл",-13.252613067626951],["▁formada",-13.252657890319824],["ുന്നത",-13.252660751342772],["▁Bazar",-13.252665519714355],["▁머리",-13.252683639526367],["ستخدام",-13.252715110778809],["აფრ",-13.252716064453123],["▁aduc",-13.252728462219238],["▁plaça",-13.252735137939451],["▁träning",-13.252752304077148],["▁ensimmäis",-13.252760887145996],["วร",-13.252801895141602],["▁материалов",-13.252829551696776],["நீ",-13.25283432006836],["ответ",-13.252838134765623],["▁айл",-13.252842903137209],["▁valgus",-13.252862930297852],["▁bahagia",-13.25287628173828],["辛苦",-13.252881050109863],["毁",-13.25288200378418],["salt",-13.252883911132812],["▁creme",-13.25289249420166],["▁тады",-13.25290298461914],["샵",-13.252909660339355],["mell",-13.252911567687988],["▁సినిమాలో",-13.252912521362305],["ґ",-13.252922058105469],["▁József",-13.252922058105469],["▁vững",-13.252922058105469],["▁ιδιαίτερα",-13.252922058105469],["▁गंभीर",-13.252922058105469],["▁দক্ষিণ",-13.252922058105469],["▁dweud",-13.252923011779783],["▁rozwiązania",-13.252923011779783],["▁vaihtoehto",-13.252923011779783],["▁مضبوط",-13.252923011779783],["▁يۈز",-13.252923011779783],["▁सोमबार",-13.252923965454102],["▁ಗಂಟೆ",-13.252923965454102],["▁jeweils",-13.252925872802734],["▁февраля",-13.252925872802734],["▁kaikkea",-13.2529296875],["▁가슴",-13.2529296875],["▁uudelleen",-13.252933502197266],["▁لع",-13.252935409545898],["▁estudiantes",-13.252939224243164],["ხან",-13.252943992614746],["▁مهارت",-13.252944946289062],["cero",-13.252946853637695],["▁perusteella",-13.252947807312012],["قانون",-13.25295352935791],["เชิง",-13.25295352935791],["总理",-13.252959251403809],["航班",-13.25296115875244],["▁биздин",-13.252972602844238],["サン",-13.252974510192873],["▁மாதிரி",-13.25297737121582],["▁Lice",-13.252992630004885],["▁ООО",-13.252997398376465],["▁चंद्र",-13.253005981445312],["▁написал",-13.253012657165527],["▁Telegram",-13.253026962280272],["▁ഉണ്ട്",-13.253043174743652],["▁המידע",-13.253059387207031],["▁ചോദ്യം",-13.25306224822998],["较大",-13.25306224822998],["▁activiteiten",-13.253096580505373],["▁helmikuuta",-13.253096580505373],["رضا",-13.25313663482666],["▁горада",-13.253138542175291],["▁گوشت",-13.25316333770752],["েজ",-13.253217697143556],["engan",-13.25322437286377],["غن",-13.253232955932615],["▁CPU",-13.253238677978516],["▁Karen",-13.253238677978516],["ాడ",-13.25324821472168],["ИИ",-13.253250122070312],["выя",-13.253265380859377],["依照",-13.253301620483398],["ΕΝ",-13.253307342529297],["ಕ್ಷಣ",-13.253314018249512],["ejte",-13.253326416015623],["▁sunku",-13.253326416015623],["magn",-13.25333023071289],["▁анх",-13.253337860107422],["▁Роб",-13.253351211547852],["▁administration",-13.253378868103027],["スク",-13.253382682800291],["zaji",-13.25338363647461],["▁խորհրդի",-13.25338649749756],["▁վերջ",-13.253388404846191],["უბრ",-13.25340175628662],["сах",-13.25341796875],["سەن",-13.253423690795898],["klassi",-13.253425598144531],["έρα",-13.253426551818848],["▁ظ",-13.253439903259276],["▁jiyana",-13.253442764282228],["▁starta",-13.253450393676758],["arba",-13.25345230102539],["messig",-13.253478050231934],["セン",-13.253482818603516],["▁propostas",-13.253496170043944],["ravi",-13.253534317016602],["פול",-13.253558158874512],["▁თქვენს",-13.253559112548828],["ಲಿಲ್ಲ",-13.253580093383787],["▁będziemy",-13.253583908081056],["▁tanım",-13.253595352172852],["▁310",-13.253617286682127],["تحليل",-13.253621101379396],["beth",-13.253628730773926],["▁aiški",-13.253633499145508],["▁намерен",-13.25364589691162],["▁Kaca",-13.253646850585938],["▁휴",-13.253654479980469],["Mere",-13.253656387329102],["▁34.",-13.253657341003418],["▁mãos",-13.253674507141112],["なん",-13.253677368164062],["▁ПРАВ",-13.25367832183838],["▁delovanje",-13.25368881225586],["▁aliquo",-13.253700256347656],["▁مقامات",-13.253705978393556],["kanten",-13.253716468811035],["▁rys",-13.253731727600098],["ственно",-13.253735542297363],["lula",-13.253747940063477],["▁เด็ก",-13.253753662109377],["▁Alte",-13.253786087036133],["▁Vergi",-13.253793716430664],["▁2004,",-13.253799438476562],["▁operativo",-13.253801345825195],["▁Manda",-13.253802299499512],["λους",-13.25381565093994],["լո",-13.253820419311523],["▁appare",-13.253832817077637],["▁เว็บ",-13.253843307495115],["▁يک",-13.2538480758667],["▁lë",-13.253851890563965],["▁punts",-13.25385570526123],["ΤΗ",-13.253860473632812],["社会的",-13.253881454467772],["▁භාර",-13.253884315490724],["ńskiej",-13.253887176513672],["▁Private",-13.253891944885254],["▁суп",-13.253914833068848],["▁Gav",-13.25394058227539],["ฤ",-13.25396728515625],["▁objem",-13.25397491455078],["ความรัก",-13.253984451293944],["▁saisi",-13.253984451293944],["ცენ",-13.253987312316896],["▁zic",-13.253994941711426],["▁Clear",-13.253995895385742],["▁malalt",-13.254009246826172],["جت",-13.254016876220703],["വസ്",-13.25401782989502],["Ev",-13.254030227661133],["կային",-13.254043579101562],["▁dimostra",-13.254053115844728],["▁aktar",-13.25405979156494],["ntibus",-13.25408172607422],["▁kontaktu",-13.254084587097168],["ซุ",-13.254127502441406],["▁beden",-13.254135131835938],["pathi",-13.254146575927734],["▁pecca",-13.254151344299316],["▁компанија",-13.254155158996582],["▁কোটি",-13.254158020019531],["▁உன்",-13.25417709350586],["▁mjesec",-13.25418472290039],["משך",-13.254194259643556],["leşme",-13.254199981689451],["▁zaudē",-13.254206657409668],["أطفال",-13.254210472106934],["१२",-13.254213333129885],["▁ஆகிய",-13.254222869873049],["اوی",-13.254228591918944],["jóð",-13.254243850708008],["▁сделал",-13.254244804382324],["rack",-13.254264831542969],["▁propra",-13.254267692565918],["сели",-13.254294395446776],["▁վստահ",-13.254307746887209],["heri",-13.254313468933104],["▁மருத்துவ",-13.254345893859863],["妥",-13.25434684753418],["▁loku",-13.254348754882812],["敷",-13.254351615905762],["▁하나님",-13.254366874694824],["ដំបូង",-13.254374504089355],["▁ymchwil",-13.254374504089355],["컵",-13.254374504089355],["▁mengurangi",-13.254375457763672],["▁Използва",-13.254375457763672],["▁законопроект",-13.254375457763672],["▁неабходна",-13.254375457763672],["▁לקרוא",-13.254375457763672],["▁مراکز",-13.254375457763672],["▁நடிகை",-13.254375457763672],["▁uređaj",-13.254376411437988],["▁проверки",-13.254377365112305],["▁አስተዳደር",-13.254379272460938],["▁PornHub",-13.254380226135254],["▁wants",-13.254380226135254],["▁süd",-13.254383087158203],["▁نویسنده",-13.254383087158203],["▁matériel",-13.25438404083252],["▁Hussain",-13.254385948181152],["τός",-13.254388809204102],["▁هوشمند",-13.254398345947266],["▁ඉස්සර",-13.254400253295898],["истички",-13.254405975341797],["appareil",-13.254411697387695],["▁امسال",-13.254411697387695],["oppgave",-13.25441551208496],["▁ترجمه",-13.254416465759276],["గ్రా",-13.254417419433594],["▁כתובת",-13.25442886352539],["▁asioita",-13.254429817199709],["▁ngunit",-13.254436492919922],["▁عائد",-13.254440307617188],["▁erotiske",-13.25444221496582],["▁vivienda",-13.25444221496582],["▁بہترین",-13.254448890686035],["▁वहीं",-13.254453659057615],["▁имоти",-13.254457473754885],["▁Spanien",-13.254462242126465],["▁හමුදා",-13.25446319580078],["▁Bac",-13.254465103149414],["▁qoru",-13.254470825195312],["▁capacité",-13.254474639892578],["▁պաշտոնական",-13.25447940826416],["▁പത്ര",-13.254494667053224],["▁nascetur",-13.25450038909912],["グループ",-13.254509925842283],["▁sols",-13.254535675048828],["عکس",-13.254549980163574],["रेट",-13.254551887512209],["▁Fokus",-13.2545804977417],["▁Freude",-13.254595756530762],["▁reglament",-13.254599571228027],["▁cikgu",-13.254624366760254],["▁provocar",-13.254650115966797],["▁franca",-13.254681587219238],["מרו",-13.254683494567873],["صنف",-13.254693031311035],["▁kunden",-13.254695892333984],["▁Араб",-13.2547025680542],["tenant",-13.254706382751465],["▁okna",-13.254706382751465],["▁الأر",-13.25475025177002],["акт",-13.254765510559082],["ပို႔",-13.25477695465088],["▁gisa",-13.25478744506836],["ивает",-13.254790306091309],["▁ಕಲ",-13.254794120788574],["▁الصورة",-13.254817008972168],["عطي",-13.254826545715332],["▁placer",-13.254834175109863],["▁skillnad",-13.254843711853027],["▁земјата",-13.25484848022461],["▁პოლიტიკური",-13.25487995147705],["▁Николай",-13.254891395568848],["▁වෙන්",-13.254924774169922],["heed",-13.25497055053711],["brio",-13.254971504211426],["годишни",-13.25498104095459],["▁2009-",-13.25498390197754],["카페",-13.25498390197754],["▁ဧ",-13.255005836486816],["ბრა",-13.255035400390623],["用了",-13.25506591796875],["▁MINI",-13.25507640838623],["იპ",-13.255077362060549],["▁වීමට",-13.255084991455078],["seo",-13.255087852478027],["vke",-13.255106925964355],["]]",-13.255120277404783],["తన",-13.255121231079102],["▁Besi",-13.255126953125],["stått",-13.255173683166504],["▁ساد",-13.255186080932615],["search",-13.255199432373049],["▁qala",-13.255199432373049],["ωμένη",-13.255204200744627],["▁Toimi",-13.255205154418944],["▁repres",-13.255215644836426],["▁Baix",-13.25521755218506],["BANG",-13.255231857299805],["▁voorkomen",-13.255242347717283],["▁dollars",-13.255244255065918],["▁novamente",-13.255248069763184],["তের",-13.255250930786133],["상품",-13.255253791809082],["▁Angebote",-13.255273818969728],["elako",-13.255294799804688],["▁megy",-13.25529670715332],["▁vinte",-13.255298614501951],["▁flet",-13.255304336547852],["▁добие",-13.255316734313965],["▁адчу",-13.255335807800291],["▁ziņo",-13.255340576171877],["דומה",-13.255355834960938],["也没",-13.255376815795898],["▁valgte",-13.255404472351074],["▁Nikol",-13.255411148071287],["මෝ",-13.255441665649414],["▁vân",-13.255451202392578],["ητική",-13.255463600158691],["▁කාලේ",-13.255468368530272],["▁wijze",-13.255486488342283],["ກົມ",-13.255500793457031],["ర్స్",-13.25551414489746],["ฟิ",-13.255522727966309],["的关系",-13.25553035736084],["ireachta",-13.255537033081056],["▁pell",-13.255547523498535],["▁ingyenes",-13.255550384521484],["▁shughuli",-13.2555513381958],["▁орталық",-13.255559921264648],["▁çoğu",-13.255560874938965],["▁inlägget",-13.255584716796877],["ነሱ",-13.255586624145508],["Date",-13.255587577819824],["▁зборува",-13.255589485168455],["رث",-13.255592346191406],["▁pārvald",-13.255624771118164],["▁គួរ",-13.255627632141112],["▁בעת",-13.255644798278809],["▁മാത്രമല്ല",-13.255647659301758],["▁anzi",-13.255651473999023],["▁لیک",-13.255672454833984],["▁دام",-13.255681037902832],["▁Hogy",-13.255681991577148],["بح",-13.25568389892578],["▁etorri",-13.255692481994627],["▁igång",-13.25575351715088],["uosius",-13.255760192871094],["নির",-13.255770683288574],["вера",-13.25579833984375],["畅",-13.255799293518066],["▁Gesch",-13.255820274353027],["烏",-13.255821228027344],["ແນວ",-13.255825996398926],["▁korábban",-13.255830764770508],["▁pewnością",-13.255830764770508],["▁τελευταίο",-13.255830764770508],["▁बॉलीवुड",-13.255830764770508],["▁শিক্ষক",-13.255830764770508],["▁lejlighed",-13.25583267211914],["▁वर्तमान",-13.255834579467772],["▁garanci",-13.25583553314209],["▁ഒക്കെ",-13.255836486816406],["▁powinna",-13.255840301513672],["▁ஒரே",-13.255840301513672],["これが",-13.25584602355957],["▁бөлім",-13.255854606628418],["▁ගෑ",-13.255861282348633],["▁наконец",-13.255864143371582],["▁ljep",-13.255868911743164],["働き",-13.255873680114746],["misestä",-13.255882263183594],["▁በአዲስ",-13.255884170532228],["▁제목",-13.255885124206545],["▁confiance",-13.255888938903809],["▁kände",-13.255892753601074],["ರಂದು",-13.255898475646973],["人民政府",-13.255899429321287],["▁Gjermani",-13.255915641784668],["سیم",-13.255929946899414],["▁halb",-13.255932807922363],["▁გავა",-13.25593376159668],["▁громад",-13.255945205688477],["▁znanje",-13.255948066711426],["▁kiệm",-13.25595474243164],["▁प्रधान",-13.255985260009766],["▁hú",-13.255990028381348],["rone",-13.255990982055664],["rrera",-13.256000518798828],["▁кары",-13.256003379821776],["▁जागा",-13.256006240844728],["▁छात्र",-13.256007194519045],["▁adag",-13.256021499633787],["▁መል",-13.256032943725586],["▁busy",-13.256049156188965],["▁tänä",-13.256057739257812],["▁participação",-13.256058692932127],["▁lipsa",-13.25607967376709],["มัก",-13.25608730316162],["▁ல",-13.256108283996582],["▁않아",-13.256123542785645],["▁కలిగి",-13.25612449645996],["љена",-13.256131172180176],["▁წამო",-13.256136894226074],["▁Máis",-13.256158828735352],["▁halal",-13.256171226501465],["ających",-13.256175994873049],["ξα",-13.256176948547363],["▁rannsókn",-13.25617790222168],["▁ţin",-13.256178855895996],["valle",-13.25618839263916],["▁slovenske",-13.256192207336426],["▁diritti",-13.256196975708008],["FK",-13.256214141845703],["▁tutan",-13.25622844696045],["kø",-13.256229400634766],["Kami",-13.256245613098145],["ζη",-13.256267547607422],["▁کنندگان",-13.256290435791016],["▁verilir",-13.256293296813965],["▁Bax",-13.256340026855469],["hitra",-13.256369590759276],["▁Ore",-13.256403923034668],["kvo",-13.2564058303833],["▁procedimento",-13.25640869140625],["▁dijete",-13.256413459777832],["tuste",-13.256451606750488],["story",-13.256465911865234],["多種",-13.256471633911133],["লার",-13.25652027130127],["▁resolución",-13.25653076171875],["▁Provincia",-13.256532669067385],["ನ್ನೂ",-13.256546020507812],["دات",-13.256567001342772],["abay",-13.25657844543457],["iral",-13.256582260131836],["▁今回は",-13.256591796875],["onnan",-13.25660800933838],["▁سرور",-13.25661277770996],["▁opção",-13.25662612915039],["▁sør",-13.25663948059082],["nisse",-13.256691932678224],["ዚያ",-13.256695747375488],["▁أب",-13.256714820861816],["▁քաղաքի",-13.256726264953612],["VÉ",-13.256759643554688],["грэ",-13.256765365600586],["▁eier",-13.2567720413208],["▁ලොව",-13.25677490234375],["▁معرض",-13.25678539276123],["ssima",-13.256791114807127],["+++",-13.25681972503662],["▁의원",-13.256848335266112],["ۇس",-13.256855010986328],["▁nemt",-13.256884574890137],["РАН",-13.25688648223877],["▁sekci",-13.256896018981934],["▁Patienten",-13.25690746307373],["▁ପୋ",-13.25694179534912],["σχε",-13.256953239440918],["▁Danas",-13.257010459899902],["▁aloqa",-13.25704288482666],["ຂໍ",-13.257067680358888],["になっています",-13.257072448730469],["▁penya",-13.25707721710205],["ÜR",-13.257089614868164],["hodné",-13.257092475891112],["▁Hora",-13.257131576538086],["▁voordat",-13.257145881652832],["▁kahvi",-13.257163047790527],["▁gözü",-13.257165908813477],["stream",-13.257176399230955],["ខាង",-13.257200241088867],["致力于",-13.257211685180664],["玛",-13.257224082946776],["▁vigila",-13.25722885131836],["ismul",-13.25723934173584],["允许",-13.257246971130373],["▁гаспадар",-13.25724983215332],["▁پلی",-13.25725269317627],["▁व्यवसायी",-13.25725555419922],["niveau",-13.25727653503418],["ศักดิ์",-13.257287979125977],["▁Mallorca",-13.257288932800291],["▁মঙ্গলবার",-13.257288932800291],["▁মার্চ",-13.257288932800291],["▁වැරදි",-13.257288932800291],["▁ვიდრე",-13.257288932800291],["რეგიონ",-13.257290840148926],["▁ਸੱਚ",-13.257290840148926],["▁كەلگەن",-13.257291793823242],["▁ٹرمپ",-13.25729274749756],["▁Matukio",-13.257293701171877],["任務",-13.257294654846191],["▁Jonathan",-13.257295608520508],["▁hubungi",-13.25729751586914],["▁völlig",-13.25729751586914],["▁tärkei",-13.257301330566406],["▁korku",-13.257315635681152],["▁ప్రపంచ",-13.25732135772705],["ולים",-13.257323265075684],["▁Richtung",-13.25733470916748],["▁өгч",-13.257341384887695],["▁പിടി",-13.257343292236328],["▁קודם",-13.257362365722656],["цит",-13.257367134094238],["itou",-13.25737762451172],["▁udało",-13.257381439208984],["▁objeto",-13.257404327392578],["▁mito",-13.257411003112791],["65)",-13.257413864135742],["▁síðar",-13.257420539855955],["▁Leer",-13.257429122924805],["▁후보",-13.25743579864502],["quez",-13.257436752319336],["нава",-13.257475852966309],["kirinê",-13.25749683380127],["▁35%",-13.257519721984863],["คลอง",-13.257532119750977],["όλ",-13.25760555267334],["▁доставка",-13.257606506347656],["\\\\'",-13.257609367370604],["्टर",-13.257620811462402],["၀ါ",-13.257630348205566],["▁vb",-13.2576322555542],["▁missa",-13.257640838623049],["▁quantidade",-13.257641792297363],["▁Weiter",-13.25764274597168],["▁engang",-13.25764274597168],["▁markaana",-13.257647514343262],["▁ശന",-13.257651329040527],["ાની",-13.257659912109377],["▁Tug",-13.257660865783691],["▁അല്",-13.257699966430664],["তন",-13.257702827453612],["চি",-13.257708549499512],["▁noir",-13.25771141052246],["的学生",-13.257718086242676],["▁подумал",-13.257720947265623],["▁수상",-13.257728576660156],["▁fotografije",-13.257737159729004],["▁Unión",-13.257770538330078],["▁인정",-13.25778102874756],["ക്രമ",-13.257789611816406],["ከት",-13.25779628753662],["მოდ",-13.257817268371582],["യ്ക്കു",-13.257834434509276],["قية",-13.257841110229492],["▁hạt",-13.25788402557373],["▁হল",-13.257884979248049],["▁ვებ",-13.257891654968262],["مرد",-13.257892608642578],["▁טיפ",-13.257912635803224],["▁obrat",-13.257917404174805],["▁storlek",-13.25791835784912],["▁δική",-13.257928848266602],["▁rre",-13.2579345703125],["▁алуға",-13.257938385009766],["▁Fakultet",-13.257943153381348],["▁গো",-13.25800609588623],["公式",-13.25803279876709],["wini",-13.25804042816162],["▁.........",-13.258048057556152],["міш",-13.258051872253418],["▁dras",-13.25805377960205],["▁baskı",-13.258068084716797],["▁kunci",-13.258087158203123],["väline",-13.258091926574709],["▁Talent",-13.258101463317873],["buy",-13.258105278015137],["▁wielki",-13.2581148147583],["ଢ",-13.258122444152832],["▁ပညာ",-13.258132934570312],["tax",-13.258173942565918],["rush",-13.25817584991455],["▁Franci",-13.258191108703612],["▁Тор",-13.258191108703612],["ურა",-13.258216857910156],["ляють",-13.25826644897461],["ვება",-13.258288383483888],["▁olanlar",-13.258288383483888],["名的",-13.258298873901367],["▁Omega",-13.258305549621582],["გის",-13.258315086364746],["daftar",-13.258319854736328],["▁åren",-13.258346557617188],["▁necessidade",-13.258349418640137],["દર",-13.25835132598877],["იდე",-13.25835132598877],["▁Entrada",-13.25835418701172],["▁ihtiyacı",-13.258360862731934],["229",-13.258371353149414],["ίστε",-13.258402824401855],["้ว",-13.258402824401855],["▁professionnelle",-13.258414268493652],["cane",-13.258435249328612],["عوا",-13.258500099182127],["辦法",-13.258508682250977],["▁अंतर",-13.258517265319824],["▁tanong",-13.258520126342772],["ัว",-13.258529663085938],["▁DOS",-13.258537292480469],["▁sungguh",-13.258543014526367],["▁गि",-13.258548736572266],["ம்மா",-13.258554458618164],["▁comunità",-13.258560180664062],["ുമെന്ന",-13.258573532104492],["▁255",-13.25857925415039],["▁urang",-13.258600234985352],["σέ",-13.258602142333984],["▁سبيل",-13.258625984191896],["خصوص",-13.258633613586426],["▁дзяржаўны",-13.258637428283691],["ತೀ",-13.258642196655272],["▁Jego",-13.258644104003906],["บริหาร",-13.258646965026855],["буз",-13.258654594421388],["▁makaka",-13.258660316467283],["batur",-13.258661270141602],["▁alerg",-13.258707046508787],["害怕",-13.258722305297852],["差不多",-13.25874137878418],["▁návštev",-13.258746147155762],["ລະບົບ",-13.258748054504396],["ሔ",-13.258748054504396],["▁nyhedsbrev",-13.258749008178713],["▁शुक्रवार",-13.258749008178713],["▁ຕິດຕໍ່ພວກເຮົາ",-13.258749008178713],["▁ممتاز",-13.258749961853027],["hova",-13.258752822875977],["▁widoczny",-13.258753776550291],["▁Palvelu",-13.258756637573242],["corp",-13.258758544921877],["▁1992.",-13.258767127990724],["▁kullanılan",-13.258776664733888],["▁đèn",-13.258776664733888],["▁uker",-13.258781433105469],["▁beginning",-13.258783340454102],["สาน",-13.258795738220217],["icí",-13.258796691894531],["▁प्रचार",-13.25879955291748],["▁स्थानमा",-13.25881290435791],["▁chợ",-13.258817672729492],["▁szü",-13.258817672729492],["不大",-13.258838653564451],["▁רבה",-13.258848190307615],["▁தள",-13.258880615234377],["▁منتظر",-13.25888729095459],["▁٢",-13.258893966674805],["▁معاملات",-13.258907318115234],["▁flow",-13.258917808532717],["▁adatai",-13.258935928344728],["разум",-13.258949279785156],["ଧି",-13.258959770202637],["▁meile",-13.258988380432127],["χθεί",-13.258995056152344],["géné",-13.259021759033203],["കളിൽ",-13.259031295776367],["нський",-13.259032249450684],["▁патриот",-13.259050369262695],["▁прыклад",-13.259065628051758],["▁têk",-13.259093284606934],["iškų",-13.2590970993042],["▁atua",-13.259108543395996],["▁aconteceu",-13.259130477905272],["Mit",-13.25915241241455],["▁දිනය",-13.25916862487793],["▁Меди",-13.25917911529541],["▁общей",-13.25918674468994],["arrêt",-13.259188652038574],["▁shum",-13.259191513061523],["▁cinque",-13.259196281433104],["iều",-13.259221076965332],["linis",-13.259238243103027],["▁зале",-13.259239196777344],["▁есепте",-13.259255409240724],["maailma",-13.259269714355469],["mpaa",-13.25927734375],["▁ума",-13.25929069519043],["ęcia",-13.2593412399292],["办公",-13.25935173034668],["zük",-13.259358406066896],["▁постој",-13.25937557220459],["ებაში",-13.25937843322754],["▁ашық",-13.259395599365234],["Бел",-13.2593994140625],["ट्टी",-13.259410858154297],["път",-13.259432792663574],["上涨",-13.259440422058104],["桃園",-13.259443283081056],["▁nëpër",-13.259464263916016],["ológia",-13.259516716003418],["ஜா",-13.259517669677734],["ίζοντας",-13.259526252746582],["▁yrittä",-13.259546279907228],["▁Nico",-13.259549140930176],["seksuel",-13.25955295562744],["ကေန",-13.25957202911377],["έχει",-13.259573936462402],["▁časov",-13.259575843811035],["▁ending",-13.259596824645996],["σσα",-13.259598731994627],["លក់",-13.25960636138916],["▁notar",-13.259638786315918],["ięciu",-13.259658813476562],["▁አመ",-13.259661674499512],["▁postane",-13.259662628173828],["▁оконча",-13.25968074798584],["▁Нико",-13.259685516357422],["screen",-13.25969123840332],["▁šum",-13.25969409942627],["▁loss",-13.259705543518066],["▁Чет",-13.259706497192385],["Ла",-13.259711265563965],["▁акта",-13.259721755981444],["that",-13.259727478027344],["úra",-13.259783744812012],["iding",-13.259790420532228],["delle",-13.259821891784668],["റില്",-13.259831428527832],["▁zarur",-13.259832382202148],["ஞ்சி",-13.25983428955078],["លាយ",-13.259841918945312],["▁seeing",-13.259886741638184],["▁શો",-13.25990104675293],["▁بەت",-13.25990867614746],["▁ಸಿಕ್ಕ",-13.259909629821776],["▁applications",-13.259917259216309],["▁Marvel",-13.259933471679688],["serve",-13.259934425354004],["спар",-13.259949684143066],["რძ",-13.259990692138672],["▁talu",-13.259995460510254],["▁görür",-13.260010719299316],["▁өкмөт",-13.260018348693848],["итесь",-13.26002025604248],["στρα",-13.260022163391112],["ക്കൂ",-13.260047912597656],["▁машини",-13.260049819946287],["▁FREE",-13.260068893432615],["iausiai",-13.260075569152832],["encji",-13.260082244873049],["ræði",-13.260082244873049],["▁күндө",-13.260087013244627],["▁значај",-13.260117530822754],["▁Тре",-13.260130882263184],["武汉",-13.260153770446776],["▁günün",-13.260162353515623],["中华人民共和国",-13.260173797607422],["軸",-13.260181427001951],["孝",-13.260189056396484],["inize",-13.260196685791016],["ጢ",-13.260210037231444],["▁ကၽြန္ေတာ္",-13.260210037231444],["▁odporúča",-13.260210990905762],["▁propósito",-13.260210990905762],["▁инцидент",-13.260210990905762],["แห้ง",-13.260212898254396],["▁lékař",-13.26021957397461],["လောက်",-13.260220527648926],["▁결국",-13.260229110717772],["телните",-13.260231018066406],["▁момант",-13.26023769378662],["හො",-13.260250091552734],["พลาด",-13.26025104522705],["▁channel",-13.260251998901367],["▁특별",-13.260252952575684],["▁NOT",-13.260254859924316],["▁έξω",-13.260259628295898],["РМ",-13.260276794433594],["▁загуба",-13.26028537750244],["▁vua",-13.260293006896973],["▁giết",-13.260293960571287],["线上",-13.260300636291504],["▁свобода",-13.260305404663086],["שער",-13.260315895080566],["▁פעילות",-13.260319709777832],["▁clau",-13.260320663452148],["ffet",-13.26034164428711],["isena",-13.260355949401855],["isyys",-13.260367393493652],["ంజ",-13.260374069213867],["ေျခ",-13.260374069213867],["▁UNA",-13.260387420654297],["λοι",-13.260401725769045],["▁nødvendig",-13.260411262512209],["▁nakne",-13.260458946228027],["▁مریم",-13.260472297668455],["▁Halu",-13.260488510131836],["เสนอ",-13.260491371154783],["▁sokat",-13.26051139831543],["▁Ljud",-13.26051425933838],["▁preži",-13.260525703430176],["▁keçid",-13.26053524017334],["▁גדולה",-13.26054859161377],["铁路",-13.260567665100098],["▁Şah",-13.260579109191896],["▁laboratori",-13.260590553283691],["mäßig",-13.260592460632324],["▁administrative",-13.260594367980955],["▁selline",-13.26059627532959],["▁отпад",-13.260619163513184],["stoffer",-13.260653495788574],["▁gold",-13.260656356811523],["▁Penis",-13.260658264160156],["▁Stellen",-13.260671615600586],["...........",-13.2606782913208],["يث",-13.260683059692385],["▁superiore",-13.260690689086914],["▁rivi",-13.260708808898926],["▁дуго",-13.26072120666504],["▁Option",-13.260743141174316],["▁tisk",-13.260761260986328],["▁지도",-13.26076889038086],["enhet",-13.260774612426758],["▁प्रभावित",-13.260783195495604],["▁Էլ",-13.260785102844238],["▁ով",-13.26078987121582],["▁uste",-13.26081657409668],["פוליטי",-13.260826110839844],["луж",-13.260836601257324],["▁VW",-13.260845184326172],["র্ত",-13.260915756225586],["▁১৮",-13.260980606079102],["▁jarang",-13.260985374450684],["никами",-13.26098918914795],["▁דק",-13.261011123657228],["မှတ်",-13.261015892028809],["▁contribuir",-13.261040687561035],["▁versão",-13.261043548583984],["▁tõe",-13.261067390441896],["शर",-13.261080741882324],["slaan",-13.261089324951172],["▁corp",-13.2611722946167],["▁numit",-13.261186599731444],["▁YE",-13.261201858520508],["▁sprzedaż",-13.261202812194824],["вав",-13.261208534240724],["▁እነ",-13.26120948791504],["نها",-13.261211395263672],["▁сервер",-13.261231422424316],["มุม",-13.261236190795898],["▁Baada",-13.261247634887695],["amore",-13.261260986328123],["又有",-13.261268615722656],["▁นํา",-13.261312484741213],["ရက်",-13.261322021484377],["ávky",-13.261323928833008],["▁pū",-13.261353492736816],["的数据",-13.261354446411133],["▁Evropske",-13.261358261108398],["ക്കം",-13.26137351989746],["▁иста",-13.261404037475586],["missi",-13.261407852172852],["▁Кыргызстанда",-13.261407852172852],["venti",-13.261415481567385],["▁تفصیلات",-13.261425971984863],["ဘူး။",-13.261428833007812],["달러",-13.261441230773926],["вън",-13.26144313812256],["▁santa",-13.261455535888672],["▁kauppa",-13.261466026306152],["▁gina",-13.261478424072266],["▁erf",-13.261484146118164],["▁বিশ্ববিদ্যালয়",-13.26149082183838],["ZY",-13.261515617370604],["▁silmä",-13.26152801513672],["▁giàu",-13.261542320251465],["▁zahrad",-13.261567115783691],["(3",-13.261576652526855],["gott",-13.261590957641602],["▁früh",-13.261591911315918],["▁grze",-13.261594772338867],["mobile",-13.26159954071045],["▁ಕೆಲ",-13.261630058288574],["▁ekonomisk",-13.261649131774902],["─",-13.26166820526123],["٦",-13.261674880981444],["▁agréable",-13.261675834655762],["shimcha",-13.261682510375977],["▁ሽ",-13.261690139770508],["▁posibilitatea",-13.26170253753662],["usias",-13.261713981628418],["▁defender",-13.261728286743164],["уды",-13.261735916137695],["▁طرز",-13.261740684509276],["ډو",-13.261752128601074],["▁décidé",-13.261752128601074],["jooks",-13.261754989624023],["▁ozdob",-13.261761665344238],["▁jatko",-13.261768341064451],["циите",-13.261795043945312],["▁Cristina",-13.261807441711426],["responsabilidade",-13.261820793151855],["▁tillfälle",-13.261823654174805],["▁постоје",-13.261823654174805],["Кар",-13.26183032989502],["▁виклик",-13.261832237243652],["▁рос",-13.261834144592283],["錯誤",-13.26183795928955],["▁сазна",-13.261850357055664],["一支",-13.26185131072998],["ınızı",-13.261852264404297],["▁hề",-13.261860847473145],["ՈՒԹՅՈՒՆ",-13.261865615844728],["▁pilt",-13.261878967285156],["▁jaga",-13.261885643005373],["bók",-13.26190185546875],["▁kitos",-13.261920928955078],["▁използват",-13.261933326721191],["▁klubi",-13.261945724487305],["▁solve",-13.26194667816162],["▁झालेल्या",-13.261957168579102],["▁ej",-13.262027740478516],["▁license",-13.262030601501465],["▁judici",-13.26203155517578],["ချင်",-13.262057304382324],["skudd",-13.262075424194336],["▁ਆਮ",-13.262080192565918],["loom",-13.262102127075195],["▁الوطن",-13.262104988098145],["▁نجات",-13.262115478515623],["▁கண்ட",-13.262124061584473],["▁müügi",-13.262129783630373],["खंड",-13.262158393859863],["▁Apoi",-13.262165069580078],["▁vời",-13.26217269897461],["▁Πρω",-13.262173652648926],["nızı",-13.26218318939209],["▁siang",-13.26218318939209],["▁અનેક",-13.262227058410645],["▁Raza",-13.26223373413086],["▁ლუ",-13.262250900268556],["овао",-13.262258529663086],["▁fonctionne",-13.262264251708984],["οικ",-13.262271881103516],["▁mày",-13.262277603149414],["▁социјал",-13.262290954589844],["▁പാല",-13.26229190826416],["▁Regul",-13.262304306030272],["▁imagin",-13.262320518493652],["年以上",-13.262332916259766],["今は",-13.262359619140623],["▁לכן",-13.262367248535156],["ติดตั้ง",-13.262372970581056],["ṇa",-13.262380599975586],["▁gizli",-13.262391090393066],["draw",-13.262392044067385],["▁daba",-13.26240062713623],["▁Bureau",-13.262411117553713],["თავს",-13.262425422668455],["▁Ergebnis",-13.262439727783203],["▁terenu",-13.262441635131836],["▁RAIS",-13.262442588806152],["▁بېرىش",-13.262460708618164],["ացված",-13.26248836517334],["▁Franz",-13.262490272521973],["無線",-13.262490272521973],["▁siaj",-13.262495040893556],["▁dõi",-13.26250171661377],["solve",-13.262503623962402],["▁متحد",-13.262506484985352],["VT",-13.2625150680542],["▁റാ",-13.262518882751465],["ाँ",-13.26251983642578],["ున్న",-13.262524604797363],["▁שנות",-13.262529373168944],["完整的",-13.262531280517578],["新时代",-13.262537002563477],["curso",-13.26254177093506],["▁klipp",-13.262560844421388],["ەس",-13.262606620788574],["телната",-13.26260757446289],["trico",-13.262618064880373],["ничка",-13.26263427734375],["mişdi",-13.262639045715332],["▁rustig",-13.262642860412598],["▁입장",-13.262660026550291],["بۇ",-13.262661933898926],["来た",-13.26267433166504],["ଦ୍ର",-13.262677192687988],["kkaat",-13.262685775756836],["▁ڪيل",-13.262691497802734],["▁slikt",-13.262697219848633],["▁مین",-13.262706756591797],["▁hotels",-13.26271152496338],["▁draud",-13.262717247009276],["hrung",-13.26271915435791],["bella",-13.262763023376465],["▁dudalen",-13.262764930725098],["posizione",-13.262784957885742],["▁heen",-13.262818336486816],["▁ଭି",-13.26282787322998],["▁процесса",-13.262847900390623],["▁вашем",-13.262853622436523],["▁actuación",-13.262856483459473],["▁veral",-13.262876510620115],["▁bào",-13.262877464294434],["▁Кой",-13.262880325317385],["cáil",-13.26288604736328],["▁استاندارد",-13.262892723083496],["的照片",-13.262893676757812],["ကျွန်",-13.262909889221191],["▁Mura",-13.26292610168457],["▁Կա",-13.262946128845217],["▁cake",-13.262951850891112],["svið",-13.262962341308594],["不住",-13.26296329498291],["legal",-13.262993812561035],["ക്കായി",-13.2629976272583],["▁کشی",-13.263007164001465],["▁erdi",-13.26301383972168],["▁пок",-13.26306438446045],["▁অফিস",-13.26309299468994],["▁دینا",-13.263097763061523],["▁ļauj",-13.263121604919434],["桂",-13.26312255859375],["▁ရန္ကုန္",-13.263138771057127],["▁Browser",-13.263140678405762],["▁Cynulliad",-13.263140678405762],["ທະວີ",-13.263141632080078],["▁դեսպան",-13.263141632080078],["▁మార్కెట్",-13.263141632080078],["▁ridiculus",-13.263142585754396],["▁perkataan",-13.26314926147461],["▁desktop",-13.263150215148926],["▁Barzanî",-13.263151168823242],["▁വിവിധ",-13.26316261291504],["使う",-13.263164520263672],["▁nejsou",-13.26316738128662],["tuba",-13.263175010681152],["▁nazorat",-13.263178825378418],["▁आशा",-13.26319694519043],["▁middle",-13.263230323791504],["▁వెళ్ళ",-13.26323413848877],["óireacht",-13.263237953186035],["ոյի",-13.263237953186035],["sinya",-13.2632417678833],["သေး",-13.26325798034668],["▁thread",-13.263260841369627],["▁weiterhin",-13.263265609741213],["xê",-13.263270378112791],["▁kasutus",-13.26329231262207],["ब्ब",-13.263297080993652],["▁القوات",-13.263310432434082],["▁koron",-13.263327598571776],["▁контакти",-13.263347625732422],["▁Alka",-13.263350486755373],["▁gündə",-13.263365745544434],["結局",-13.263382911682127],["▁следи",-13.263384819030762],["▁Төр",-13.263388633728027],["▁167",-13.263397216796877],["▁осозна",-13.263398170471191],["▁ຮອງ",-13.263407707214355],["▁поставя",-13.263415336608888],["▁Мінску",-13.263423919677734],["▁tár",-13.263428688049316],["սն",-13.263453483581545],["ນາມ",-13.263461112976074],["▁άρ",-13.263463973999023],["праш",-13.263476371765137],["▁поводом",-13.263490676879885],["▁ledig",-13.263513565063477],["нген",-13.2635498046875],["крас",-13.263572692871094],["▁deficit",-13.263577461242676],["▁innego",-13.263578414916992],["▁λογ",-13.263589859008787],["▁پوشش",-13.263593673706056],["▁Staat",-13.263595581054688],["▁မွ",-13.263616561889648],["▁kedves",-13.263628959655762],["▁Усі",-13.263632774353027],["▁paradis",-13.26365566253662],["▁Pasak",-13.26365852355957],["▁titik",-13.26365852355957],["mesema",-13.263659477233888],["▁بالس",-13.26366901397705],["▁сведения",-13.263710021972656],["șa",-13.263724327087402],["mona",-13.263731956481934],["frau",-13.263733863830566],["ίρ",-13.26374626159668],["▁देवी",-13.263755798339844],["▁síos",-13.26376247406006],["▁gauti",-13.263812065124512],["নগর",-13.263822555541992],["union",-13.263826370239258],["▁Vamos",-13.263829231262209],["▁poslal",-13.263843536376951],["▁ଭା",-13.263854026794434],["▁VN",-13.263866424560549],["▁Mene",-13.263877868652344],["▁енгіз",-13.263880729675291],["67)",-13.26389980316162],["▁путь",-13.263908386230469],["▁Pomoc",-13.263933181762695],["morf",-13.26395320892334],["▁ଘର",-13.263954162597656],["pasang",-13.263978004455566],["▁Merkez",-13.263994216918944],["▁saham",-13.263994216918944],["▁խմբագրել",-13.26400375366211],["▁Think",-13.264029502868652],["▁AMD",-13.264031410217283],["kumi",-13.264037132263184],["▁föräldrar",-13.26404094696045],["▁കേസ",-13.26408576965332],["▁اتي",-13.264117240905762],["levél",-13.264138221740724],["▁LRT",-13.26413917541504],["▁آتی",-13.264145851135254],["▁Medlem",-13.26414680480957],["▁tartalom",-13.264158248901367],["▁uredi",-13.264168739318848],["事物",-13.264175415039062],["عروض",-13.26421070098877],["овића",-13.264214515686035],["ljenja",-13.264220237731934],["UAL",-13.264232635498049],["▁دقت",-13.264232635498049],["▁dört",-13.26423454284668],["▁tokoh",-13.264238357543944],["▁ilişkiler",-13.264269828796388],["ရုပ်",-13.26427936553955],["▁শিল্প",-13.264280319213867],["▁50.000",-13.264284133911133],["▁attiva",-13.264296531677246],["കുമാര്",-13.264309883117676],["▁začala",-13.26431655883789],["▁hepsi",-13.264320373535156],["大切な",-13.264322280883787],["▁trein",-13.26434326171875],["▁жор",-13.26437759399414],["▁volunt",-13.264395713806152],["▁Pension",-13.264410972595217],["▁رام",-13.264410972595217],["יצור",-13.264427185058594],["ותה",-13.264445304870604],["täviä",-13.264472007751465],["▁Angst",-13.264473915100098],["▁hører",-13.26447582244873],["ኢህአዴግ",-13.264487266540527],["ոնի",-13.26452350616455],["▁requisit",-13.264527320861816],["▁кризис",-13.264533042907717],["崇",-13.264540672302246],["▁Мектеп",-13.264544486999512],["▁melk",-13.264549255371094],["ionis",-13.264562606811523],["bash",-13.264565467834473],["آت",-13.264570236206056],["ദ്യ",-13.264585494995115],["橫",-13.264591217041016],["ાત",-13.264599800109863],["▁администр",-13.264599800109863],["เพชร",-13.264607429504396],["ตั๋วเครื่องบิน",-13.264609336853027],["▁Böyük",-13.264609336853027],["▁Pohjois",-13.264609336853027],["▁Μαρτίου",-13.264609336853027],["▁Трябва",-13.264609336853027],["▁सांगितले",-13.264610290527344],["▁wythnos",-13.26461124420166],["▁മൂന്ന്",-13.264616012573242],["▁asilimia",-13.264618873596191],["▁popolnoma",-13.264619827270508],["▁કરશે",-13.264625549316406],["購物網",-13.264634132385254],["▁Bücher",-13.264636039733888],["▁frábær",-13.264636039733888],["Кыргыз",-13.26463794708252],["▁morbi",-13.264638900756836],["▁szintén",-13.264638900756836],["▁avtobus",-13.264644622802734],["တစ္ေယာက္",-13.264649391174316],["▁رہنے",-13.264657020568848],["▁sintomas",-13.26465892791748],["▁כמובן",-13.264659881591797],["уа",-13.264663696289062],["▁αυτοί",-13.264663696289062],["management",-13.264665603637695],["VAN",-13.264668464660645],["▁lliure",-13.264668464660645],["▁families",-13.264676094055176],["▁बाटो",-13.264683723449709],["▁edrych",-13.264711380004885],["ի՝",-13.264713287353516],["▁লোক",-13.264716148376465],["▁حکمت",-13.26471996307373],["▁Проблем",-13.264723777770996],["სას",-13.264730453491213],["تحدث",-13.264735221862791],["▁хочется",-13.264753341674805],["▁المواد",-13.264755249023438],["有没有",-13.264778137207031],["▁قوله",-13.264789581298828],["▁ريال",-13.264793395996094],["▁പരിപാടി",-13.26479721069336],["▁fejlesztés",-13.264811515808104],["▁банку",-13.264830589294434],["▁Alman",-13.264846801757812],["▁도움이",-13.264854431152344],["číta",-13.26486873626709],["▁habilita",-13.264883995056152],["▁локални",-13.26488971710205],["▁escenario",-13.264915466308594],["YI",-13.264917373657228],["▁Ziem",-13.264928817749023],["ography",-13.264938354492188],["ataj",-13.264959335327148],["▁Ирак",-13.264960289001465],["ელები",-13.264992713928224],["ória",-13.26499843597412],["bladet",-13.26503562927246],["로그",-13.265039443969728],["▁herstel",-13.265053749084473],["ženkl",-13.265058517456056],["лаган",-13.265082359313965],["פטר",-13.26508617401123],["▁올라",-13.26513671875],["bury",-13.265153884887695],["isieren",-13.265175819396973],["αρτ",-13.265176773071287],["सकेका",-13.265213012695312],["मण्डल",-13.265239715576172],["हाय",-13.265254974365234],["▁stö",-13.26526165008545],["diens",-13.265267372131348],["рвен",-13.26527500152588],["બલ",-13.265277862548828],["▁maximus",-13.265281677246094],["ሕዝ",-13.265286445617676],["▁veit",-13.265286445617676],["▁Hed",-13.265288352966309],["▁දහ",-13.265310287475586],["▁menti",-13.2653169631958],["ುವುದ",-13.265334129333496],["▁7000",-13.265341758728027],["▁사항",-13.26534652709961],["▁teams",-13.265347480773926],["▁kayak",-13.265348434448242],["खल",-13.265371322631836],["▁لجنة",-13.26538372039795],["▁тұрғын",-13.265409469604492],["భి",-13.26541805267334],["кут",-13.265483856201172],["▁shirka",-13.265485763549805],["скъп",-13.265496253967283],["вне",-13.26551628112793],["▁iedere",-13.265531539916992],["▁imod",-13.265535354614258],["ARIA",-13.26553726196289],["დიდ",-13.265544891357422],["බල",-13.265560150146484],["▁ચાર",-13.265569686889648],["▁جیت",-13.265591621398926],["ያት",-13.265603065490724],["▁되고",-13.265609741210938],["SHO",-13.265647888183594],["▁bell",-13.26564884185791],["скер",-13.26567268371582],["▁punk",-13.265687942504885],["▁පහර",-13.265690803527832],["議員",-13.265698432922363],["▁komponen",-13.265711784362791],["ацией",-13.265748023986816],["▁орындау",-13.26576042175293],["▁seminari",-13.265789985656738],["cchio",-13.265793800354004],["▁сиз",-13.265820503234863],["સિ",-13.265851020812988],["▁turen",-13.265871047973633],["▁detaloj",-13.265913963317873],["ANZA",-13.26594066619873],["jörð",-13.265951156616213],["▁optimiza",-13.26595687866211],["ڈا",-13.26596736907959],["▁터",-13.265969276428224],["رته",-13.265976905822754],["损",-13.265979766845703],["wiel",-13.265983581542969],["▁şərait",-13.265992164611816],["▁2-4",-13.266008377075195],["什",-13.266008377075195],["ವಾಸ",-13.266016006469728],["தோ",-13.266024589538574],["▁gaji",-13.26604461669922],["▁עומד",-13.266058921813965],["▁न्यायालय",-13.266071319580078],["schrijving",-13.266079902648926],["▁καλύτερα",-13.266079902648926],["▁কার্যালয়",-13.266079902648926],["▁Ushbu",-13.266080856323242],["▁mụn",-13.26608180999756],["▁حقيقت",-13.26608180999756],["▁uspjeh",-13.266082763671877],["▁νερό",-13.266083717346191],["▁бюро",-13.266083717346191],["▁спосіб",-13.266085624694824],["▁فاصله",-13.266085624694824],["▁ಭಾರತೀಯ",-13.266085624694824],["तान",-13.266087532043455],["ப்படுகிறது",-13.26608943939209],["▁መልኩ",-13.266091346740724],["▁رڳو",-13.26609230041504],["▁jābūt",-13.266095161437988],["▁કહી",-13.266107559204102],["▁artikli",-13.266109466552734],["▁pozwoli",-13.266109466552734],["களுடன்",-13.266125679016112],["▁ଆମେ",-13.266132354736328],["dělá",-13.26614761352539],["▁Rezerv",-13.266149520874023],["▁mujibu",-13.266165733337402],["ចុះ",-13.266168594360352],["▁ေရႊ",-13.2661714553833],["ЗО",-13.266189575195312],["▁Agustus",-13.266209602355955],["▁necesare",-13.266209602355955],["▁posed",-13.26621150970459],["▁հակա",-13.266253471374512],["▁nacht",-13.26625633239746],["▁Forest",-13.26628303527832],["▁atras",-13.266318321228027],["▁கூட்ட",-13.266318321228027],["▁لاي",-13.266351699829102],["Ten",-13.266358375549316],["vito",-13.266386032104492],["թթ",-13.266396522521973],["balu",-13.266416549682615],["khabar",-13.266425132751465],["▁далеч",-13.266430854797363],["▁сярод",-13.26643180847168],["▁Only",-13.266435623168944],["▁landen",-13.266438484191896],["ाइल",-13.26644229888916],["▁ஷ",-13.266449928283691],["▁feladatok",-13.26645278930664],["▁japansk",-13.266467094421388],["έων",-13.266472816467283],["▁suurin",-13.26647663116455],["pjes",-13.26649284362793],["arit",-13.26649570465088],["▁njihovih",-13.266512870788574],["▁թիմ",-13.26651668548584],["▁dəyişikliklər",-13.266525268554688],["dymo",-13.266533851623535],["ѓ",-13.266544342041016],["isée",-13.266546249389648],["עבר",-13.266552925109863],["▁ári",-13.266596794128418],["מאָ",-13.26660442352295],["▁Tâm",-13.26660442352295],["ဆိုတဲ့",-13.266609191894531],["ರಾಜ",-13.266610145568848],["вшие",-13.266633987426758],["▁₹",-13.266637802124023],["ড়ি",-13.266640663146973],["▁והת",-13.266650199890137],["▁Denis",-13.266671180725098],["▁produktiv",-13.26667594909668],["oznám",-13.266717910766602],["▁упор",-13.266772270202637],["делен",-13.26678466796875],["▁శ్ర",-13.26678466796875],["ിനെതിരെ",-13.266789436340332],["▁ကြ",-13.266791343688965],["▁prossimo",-13.266796112060549],["▁Também",-13.266805648803713],["ඹු",-13.266806602478027],["▁dlm",-13.26682186126709],["▁zostało",-13.266822814941406],["ттер",-13.266844749450684],["នាយក",-13.26686668395996],["▁osjećaj",-13.266868591308594],["құ",-13.266892433166504],["Bur",-13.26693058013916],["▁ПР",-13.266950607299805],["▁justice",-13.26695156097412],["▁NOS",-13.266974449157717],["nutý",-13.267001152038574],["▁Лука",-13.26700496673584],["▁dolores",-13.267013549804688],["▁Trust",-13.267014503479004],["▁provides",-13.26701545715332],["▁uspe",-13.267030715942385],["▁Bác",-13.267033576965332],["▁laikas",-13.267047882080078],["▁добар",-13.267061233520508],["▁healthy",-13.26706886291504],["▁bizitza",-13.267085075378418],["▁соглас",-13.267096519470217],["▁১৩",-13.267109870910645],["▁dışı",-13.267129898071287],["▁първия",-13.267135620117188],["жена",-13.267152786254885],["artikel",-13.26715850830078],["555",-13.26716423034668],["cerca",-13.26716423034668],["tono",-13.267165184020996],["▁Pf",-13.267167091369627],["gué",-13.267172813415527],["▁гэнэ",-13.267217636108398],["▁заметил",-13.26722240447998],["▁dw",-13.26723575592041],["ወጡ",-13.267237663269045],["▁nimmt",-13.267237663269045],["▁медаль",-13.267260551452637],["məyib",-13.26726531982422],["edes",-13.2672758102417],["▁gondolat",-13.267290115356444],["▁Zak",-13.26729965209961],["алу",-13.26730728149414],["▁Posi",-13.267308235168455],["▁kapat",-13.267313957214355],["เช่า",-13.267328262329102],["čních",-13.267337799072266],["▁Change",-13.267358779907228],["▁مرت",-13.267363548278809],["▁Дзе",-13.26738166809082],["स्टे",-13.26742458343506],["內的",-13.267440795898438],["是你",-13.26744270324707],["▁απε",-13.267459869384766],["เช็ค",-13.267470359802246],["▁societate",-13.267475128173828],["▁minera",-13.26747703552246],["илось",-13.26750373840332],["▁remove",-13.267518043518066],["▁Reci",-13.267520904541016],["WD",-13.267522811889648],["▁драма",-13.267541885375977],["களையும்",-13.267542839050291],["股份",-13.267547607421877],["梨",-13.267548561096191],["▁përfshi",-13.26755142211914],["ជំងឺ",-13.267552375793455],["▁Fakültesi",-13.267552375793455],["▁Communication",-13.267553329467772],["▁miesięcy",-13.267553329467772],["▁trứng",-13.267553329467772],["▁اندروید",-13.267553329467772],["▁zemalja",-13.267555236816406],["▁حکام",-13.267563819885254],["▁úřad",-13.26756477355957],["だが",-13.267565727233888],["おか",-13.26756763458252],["▁హై",-13.267570495605469],["ಲಾಗುತ್ತದೆ",-13.267589569091797],["Օ",-13.26759147644043],["▁чаще",-13.267598152160645],["それぞれ",-13.267605781555176],["▁seguire",-13.267621040344238],["▁अथ",-13.26762866973877],["▁rozdiel",-13.267632484436035],["चित्र",-13.267634391784668],["▁Allen",-13.267641067504885],["▁பெண்கள்",-13.267660140991213],["vika",-13.267670631408691],["▁वाम",-13.267671585083008],["▁throughout",-13.26767349243164],["▁fjár",-13.267699241638184],["ЦА",-13.2677001953125],["▁trăm",-13.26771640777588],["▁Aaj",-13.26773166656494],["άτ",-13.267754554748535],["▁կառավարման",-13.267757415771484],["trix",-13.267765045166016],["▁Stal",-13.26776885986328],["racht",-13.267769813537598],["▁Новы",-13.267779350280762],["▁месо",-13.267789840698242],["fero",-13.267804145812988],["▁apertura",-13.267805099487305],["▁ہوں۔",-13.267826080322266],["▁ďalších",-13.267845153808594],["gros",-13.267849922180176],["▁dde",-13.26785659790039],["steiger",-13.26785945892334],["▁exigi",-13.26789093017578],["▁fuerit",-13.267906188964844],["егі",-13.26791286468506],["עקט",-13.26791286468506],["▁ሲሉ",-13.267918586730955],["ส่วนใหญ่",-13.267942428588867],["도를",-13.267945289611816],["यम",-13.267946243286133],["▁Rasul",-13.267950057983398],["▁เซ",-13.267950057983398],["ുകളുടെ",-13.267951011657717],["▁가치",-13.267951011657717],["өс",-13.26795482635498],["▁තැන්",-13.267956733703612],["▁Partei",-13.268003463745115],["▁лако",-13.26803207397461],["Om",-13.26806640625],["韓国",-13.26807975769043],["▁poży",-13.268083572387695],["دب",-13.268099784851074],["▁ಅನೇಕ",-13.26811408996582],["有哪些",-13.268156051635742],["▁sổ",-13.268157958984377],["того",-13.268179893493652],["tress",-13.268184661865234],["yrði",-13.268220901489258],["hdin",-13.26826000213623],["▁rrit",-13.268295288085938],["▁vypl",-13.268310546875],["▁келді",-13.268318176269531],["▁noteikti",-13.268322944641112],["brau",-13.26833724975586],["என்",-13.268346786499023],["▁virka",-13.26836395263672],["▁правото",-13.26836395263672],["▁парламента",-13.268375396728516],["▁սր",-13.2684326171875],["▁felett",-13.268442153930664],["▁innovativ",-13.268449783325195],["▁officia",-13.268452644348145],["დაც",-13.26845932006836],["ोट",-13.268472671508787],["▁אינה",-13.268484115600586],["situ",-13.268485069274902],["▁టి",-13.268499374389648],["ствено",-13.268536567687988],["更大",-13.268542289733888],["हरूलाई",-13.268555641174316],["baut",-13.268562316894531],["▁Komunik",-13.268569946289062],["κατα",-13.268571853637695],["▁станува",-13.268573760986328],["اعات",-13.268590927124023],["▁موضوعات",-13.268613815307615],["syarat",-13.268614768981934],["▁direktoru",-13.268625259399414],["▁Európa",-13.268627166748049],["▁viri",-13.268632888793944],["▁Sweet",-13.268634796142578],["▁administrat",-13.268644332885742],["▁kriter",-13.268648147583008],["▁бүтээгдэхүүн",-13.268648147583008],["▁odkazy",-13.26867389678955],["өнгө",-13.268675804138184],["জান",-13.26869773864746],["いで",-13.268701553344728],["▁Bú",-13.268715858459473],["▁gamma",-13.268728256225586],["schloss",-13.268742561340332],["▁peau",-13.268762588500977],["▁الأمريكي",-13.268765449523926],["ධි",-13.268770217895508],["▁στόχο",-13.268781661987305],["▁여러분",-13.268784523010254],["▁Eko",-13.268787384033203],["▁dezake",-13.268805503845217],["▁מציע",-13.268810272216797],["▁ថ",-13.268832206726074],["ßt",-13.268843650817873],["balan",-13.26889991760254],["▁Dho",-13.26890754699707],["impi",-13.26891040802002],["▁maitse",-13.268913269042969],["əndə",-13.2689208984375],["bereit",-13.268962860107422],["▁Tac",-13.268985748291016],["内心",-13.268988609313965],["▁Consello",-13.268999099731444],["▁respekto",-13.269007682800291],["▁замет",-13.269010543823242],["▁Verder",-13.269018173217772],["▁қызметін",-13.269024848937988],["▁ajándék",-13.269027709960938],["▁histórica",-13.269027709960938],["▁niezwykle",-13.269027709960938],["▁отсутствие",-13.269027709960938],["▁فائرنگ",-13.269027709960938],["▁जिसमें",-13.269027709960938],["▁ಪ್ರಮಾಣ",-13.269027709960938],["▁afirmou",-13.269028663635254],["▁कमजोर",-13.269028663635254],["▁захворювання",-13.26902961730957],["▁മീഡിയ",-13.26902961730957],["▁معنوی",-13.269030570983888],["▁Сергей",-13.269031524658203],["▁državljan",-13.269035339355469],["талі",-13.269037246704102],["finans",-13.269048690795898],["▁Sydney",-13.269048690795898],["▁πορεία",-13.269052505493164],["▁chyf",-13.26906394958496],["▁powinno",-13.26906681060791],["▁ellenére",-13.269084930419922],["▁बातें",-13.269092559814451],["▁sahne",-13.269098281860352],["▁любите",-13.269124031066896],["▁picioare",-13.26912784576416],["ziq",-13.269145011901855],["straž",-13.269145965576172],["რებ",-13.269149780273438],["▁eigi",-13.26917839050293],["▁ਸਭਾ",-13.26918125152588],["ivanja",-13.26918601989746],["ेन्ट",-13.269186973571776],["▁Vậy",-13.26919651031494],["ალურ",-13.26921272277832],["▁jeziku",-13.26921272277832],["▁nõud",-13.269224166870115],["▁نيو",-13.269234657287598],["▁zava",-13.269246101379396],["▁COL",-13.269247055053713],["▁Hazrat",-13.269247055053713],["▁օրվա",-13.269257545471191],["▁vääri",-13.269283294677734],["▁evrov",-13.269285202026367],["ೀರ",-13.269289016723633],["▁марки",-13.269298553466797],["▁صفح",-13.269306182861328],["▁kúpeľ",-13.269339561462402],["зды",-13.269351959228516],["▁Tử",-13.26935577392578],["▁מעבר",-13.269367218017578],["▁kratko",-13.269373893737791],["▁tiltak",-13.26937770843506],["▁וד",-13.269396781921388],["▁איין",-13.269407272338867],["▁terveys",-13.269442558288574],["▁liko",-13.269451141357422],["liance",-13.26947021484375],["bubu",-13.269471168518066],["ورس",-13.269471168518066],["hart",-13.269481658935549],["гл",-13.269512176513672],["▁Vaj",-13.269530296325684],["▁facta",-13.269551277160645],["moda",-13.269572257995604],["▁kurioje",-13.269574165344238],["▁появи",-13.269575119018556],["▁melawan",-13.269583702087402],["गण",-13.269598007202148],["▁True",-13.269603729248049],["júci",-13.26962661743164],["воде",-13.269636154174805],["ητικό",-13.269649505615234],["▁Čech",-13.269660949707031],["novice",-13.26966667175293],["▁olemassa",-13.26966953277588],["▁Вен",-13.269680976867676],["▁truly",-13.269698143005373],["జె",-13.269715309143066],["мъ",-13.269728660583496],["▁nul",-13.26975440979004],["▁4%",-13.26975917816162],["▁ekibi",-13.26976490020752],["РД",-13.269780158996582],["ztu",-13.26979160308838],["▁modelu",-13.26979923248291],["▁სახლში",-13.269805908203123],["▁Христов",-13.269810676574709],["ගල",-13.269838333129885],["인이",-13.269865036010742],["▁Sein",-13.269866943359377],["ναι",-13.269868850708008],["的速度",-13.269874572753906],["思って",-13.269875526428224],["ژه",-13.269878387451172],["ण्याच्या",-13.269887924194336],["▁karibu",-13.269896507263184],["ésének",-13.269902229309082],["▁Safari",-13.269902229309082],["▁루",-13.26991081237793],["▁нещата",-13.269930839538574],["▁trapp",-13.269951820373535],["location",-13.269959449768066],["▁hálózat",-13.269979476928713],["مارس",-13.269989967346191],["▁Dél",-13.270023345947266],["isio",-13.270026206970217],["הי",-13.270028114318848],["▁თო",-13.270071029663086],["IMU",-13.270084381103516],["▁Bununla",-13.27008819580078],["▁zapewni",-13.270101547241213],["▁التو",-13.270109176635742],["turer",-13.270111083984377],["▁חבר",-13.270122528076172],["учен",-13.270130157470703],["ляет",-13.270142555236816],["քին",-13.270148277282717],["좋",-13.27017593383789],["まま",-13.270185470581056],["まり",-13.270193099975586],["▁ఇచ్చిన",-13.270228385925291],["▁Observa",-13.270248413085938],["▁dole",-13.270268440246582],["▁Até",-13.270282745361328],["▁رائع",-13.270285606384276],["▁Xxx",-13.27032470703125],["tikas",-13.270343780517578],["ുളള",-13.270356178283691],["▁sechs",-13.270383834838867],["àtic",-13.270403861999512],["prowadzenie",-13.27041244506836],["тг",-13.270423889160156],["ಲ್ಲೇ",-13.2704439163208],["趨勢",-13.270447731018066],["õhtu",-13.270456314086914],["▁ТО",-13.270458221435549],["▁روي",-13.270462989807127],["▁organizmu",-13.270466804504396],["衰",-13.270466804504396],["ហេតុ",-13.270484924316406],["kaji",-13.270499229431152],["сію",-13.270503044128418],["▁Electronic",-13.27050495147705],["▁Uncategorized",-13.27050495147705],["▁alışveriş",-13.27050495147705],["▁tersenyum",-13.27050495147705],["▁مطمئن",-13.27050495147705],["▁पाहिजे",-13.27050495147705],["▁తక్కువ",-13.27050495147705],["▁אנדערע",-13.270505905151367],["▁Όροι",-13.270506858825684],["▁алғашқы",-13.2705078125],["▁derbarê",-13.270508766174316],["▁ਸੇਵਾ",-13.270509719848633],["▁vasitəsi",-13.270511627197266],["▁Farmaajo",-13.270512580871582],["▁ਪੜ੍ਹ",-13.270515441894531],["▁risorse",-13.270516395568848],["▁mümkündür",-13.270522117614746],["wear",-13.270537376403809],["▁saaks",-13.270537376403809],["エン",-13.270550727844238],["Þ",-13.270563125610352],["▁သူ႔",-13.270572662353516],["▁нічого",-13.270574569702148],["▁Prix",-13.27057933807373],["فیل",-13.270586967468262],["▁Tax",-13.270588874816896],["menet",-13.270594596862791],["pong",-13.270596504211426],["لیس",-13.270599365234377],["▁aceite",-13.27061653137207],["▁Kings",-13.270627975463867],["▁عروس",-13.2706298828125],["instrument",-13.27065372467041],["УЧ",-13.27065372467041],["መልክ",-13.27065658569336],["大丈夫",-13.2706937789917],["▁capable",-13.270700454711914],["▁खाना",-13.270713806152344],["▁снег",-13.27071475982666],["▁acea",-13.270718574523926],["oshi",-13.270743370056152],["▁fate",-13.270756721496582],["päi",-13.270781517028809],["▁Kommentar",-13.270791053771973],["▁පේ",-13.2708101272583],["▁yksin",-13.270825386047363],["ፈራ",-13.270833969116213],["▁bære",-13.270880699157717],["වුන්",-13.270883560180664],["▁своїм",-13.27089023590088],["ሆነ",-13.27090549468994],["ියේ",-13.27090835571289],["▁परिषद",-13.270928382873535],["▁tydlig",-13.270940780639648],["證明",-13.270946502685549],["পো",-13.270954132080078],["▁مصادر",-13.27097511291504],["ющихся",-13.270980834960938],["▁Trebuie",-13.270981788635254],["ගැනීම",-13.270983695983888],["▁تحقق",-13.27099323272705],["▁ಒಳ",-13.270998001098633],["▁mogućnost",-13.271014213562012],["▁نیول",-13.27103042602539],["▁Perlu",-13.271031379699709],["utili",-13.27103328704834],["ometr",-13.271052360534668],["▁panne",-13.271055221557615],["-05",-13.271065711975098],["لاحق",-13.27108383178711],["ိမ္",-13.271106719970703],["▁二",-13.27113151550293],["▁sağlıklı",-13.271137237548828],["▁ddydd",-13.271158218383787],["OOD",-13.271193504333496],["▁Lehr",-13.271196365356444],["▁razloga",-13.271224975585938],["▁문서",-13.271239280700684],["आय",-13.271241188049316],["အမှတ်",-13.271245002746582],["▁ქართველ",-13.27127170562744],["▁kedv",-13.271286010742188],["ината",-13.27129364013672],["আই",-13.271299362182615],["▁ഉണ്ടാക്ക",-13.27131175994873],["▁dual",-13.27134895324707],["ੇਸ਼ਨ",-13.27135181427002],["nggang",-13.271361351013184],["культ",-13.27138614654541],["غه",-13.271388053894045],["▁Fantas",-13.271391868591309],["▁Muzik",-13.271465301513672],["▁comunicare",-13.271465301513672],["ահան",-13.271466255187988],["مطار",-13.271474838256836],["經歷",-13.271479606628418],["▁Every",-13.271503448486328],["iyadda",-13.271513938903809],["▁Зараз",-13.271546363830566],["strå",-13.271553993225098],["▁मिळव",-13.271564483642578],["8000",-13.27159309387207],["ukra",-13.271601676940918],["аваныя",-13.271623611450195],["Զ",-13.271626472473145],["▁Ferra",-13.271635055541992],["τέλ",-13.271642684936523],["СБ",-13.271651268005373],["тых",-13.271655082702637],["▁mimpi",-13.2716703414917],["டைய",-13.271673202514648],["安心して",-13.271676063537598],["▁actos",-13.271696090698242],["▁Haki",-13.271742820739746],["นําไป",-13.271754264831545],["១៨",-13.27176570892334],["▁풀",-13.271788597106934],["▁тамак",-13.27179718017578],["ხატ",-13.271807670593262],["ვეს",-13.27181625366211],["▁njoh",-13.271821022033691],["▁plenty",-13.271870613098145],["▁niente",-13.271876335144045],["▁ڏس",-13.271918296813965],["skærm",-13.271928787231444],["ყენე",-13.271936416625977],["丢",-13.271939277648926],["▁ਨੌਜਵਾਨ",-13.27194595336914],["債",-13.271946907043455],["萊",-13.27196216583252],["തുടങ്ങ",-13.271984100341797],["▁chociaż",-13.271984100341797],["▁вобласці",-13.271984100341797],["▁بازگشت",-13.271984100341797],["▁सर्वोच्च",-13.271984100341797],["▁முயற்சி",-13.271984100341797],["▁ಸಂಖ್ಯೆ",-13.271984100341797],["▁റിപ്പോര്",-13.271984100341797],["휘",-13.271985054016112],["โพสต์",-13.271987915039062],["▁нравится",-13.271987915039062],["▁ब्रेकिंग",-13.271987915039062],["Adresse",-13.271997451782228],["ห้าม",-13.27199935913086],["▁социалдык",-13.27200412750244],["наж",-13.272007942199709],["▁осуществляется",-13.272011756896973],["▁Aydın",-13.27202606201172],["▁lucah",-13.272039413452148],["▁سيكون",-13.272042274475098],["▁preventiv",-13.272052764892578],["▁고민",-13.272075653076172],["▁भारी",-13.272092819213867],["▁ανθρώπους",-13.272107124328612],["▁dessen",-13.27210807800293],["လွန်",-13.272123336791992],["ಿಗಳು",-13.272127151489258],["▁взял",-13.272133827209473],["▁скры",-13.272137641906738],["▁skel",-13.272138595581056],["tinio",-13.272140502929688],["ศึก",-13.272140502929688],["aidhean",-13.27214527130127],["bı",-13.272150039672852],["▁әскери",-13.272159576416016],["▁წამ",-13.272178649902344],["وژ",-13.272188186645508],["▁bliain",-13.272188186645508],["▁پناه",-13.272189140319824],["მრ",-13.272197723388672],["▁კაც",-13.272205352783203],["▁Πλ",-13.272214889526367],["DAT",-13.272221565246582],["ረድ",-13.272228240966797],["▁ඉන්නේ",-13.272239685058594],["▁купити",-13.272255897521973],["mega",-13.27226448059082],["▁rop",-13.272269248962402],["isztika",-13.272273063659668],["▁sove",-13.272283554077148],["▁atividade",-13.27228546142578],["▁skaits",-13.272302627563477],["▁precies",-13.27231216430664],["Пре",-13.272316932678224],["稱為",-13.27232265472412],["alaba",-13.272337913513184],["ησης",-13.272372245788574],["▁lukt",-13.272372245788574],["▁начальник",-13.27238941192627],["atenció",-13.272401809692385],["দ্র",-13.272401809692385],["되었다",-13.2724027633667],["реп",-13.27241039276123],["▁Друго",-13.272422790527344],["▁Acesta",-13.27242374420166],["▁dekh",-13.272432327270508],["▁संत",-13.272439002990724],["дери",-13.272465705871582],["आं",-13.272469520568848],["▁sā",-13.272469520568848],["▁ruangan",-13.27247428894043],["nab",-13.272475242614746],["ผู้ใช้",-13.272480010986328],["絶",-13.272520065307615],["中学",-13.272539138793944],["▁атап",-13.27255153656006],["▁черт",-13.272558212280272],["▁infant",-13.27256202697754],["看見",-13.272564888000488],["▁kür",-13.272567749023438],["▁ತೆರೆ",-13.272573471069336],["øren",-13.272583961486816],["▁بیٹھ",-13.272603034973145],["▁વી",-13.272607803344728],["▁ਪੰ",-13.272615432739258],["ಿದ್ದರೆ",-13.272627830505373],["mõis",-13.27265167236328],["बाई",-13.272652626037598],["▁อําเภอ",-13.272662162780762],["▁løs",-13.272666931152344],["鬆",-13.272672653198242],["▁රන්",-13.272686958312988],["حظ",-13.27268886566162],["增長",-13.272720336914062],["jścia",-13.272741317749023],["▁בתל",-13.272744178771973],["sék",-13.272759437561035],["▁artis",-13.272760391235352],["▁fece",-13.272767066955566],["руб",-13.2727689743042],["▁tabia",-13.272802352905272],["▁chica",-13.27280616760254],["71)",-13.27281379699707],["ั่ง",-13.272823333740234],["▁engine",-13.272832870483398],["piiri",-13.272856712341309],["▁duhov",-13.272859573364258],["▁Qaz",-13.272863388061523],["▁කරල",-13.272870063781738],["▁อยู่",-13.272879600524902],["▁ស្រ",-13.272897720336914],["▁lililo",-13.272903442382812],["▁веру",-13.272920608520508],["と思いますが",-13.272932052612305],["▁ቋንቋ",-13.272951126098633],["▁Satan",-13.273005485534668],["Ор",-13.273006439208984],["▁klav",-13.273018836975098],["▁စား",-13.273056983947754],["▁paska",-13.273061752319336],["wahl",-13.273075103759766],["схем",-13.273083686828612],["▁לז",-13.273111343383787],["Cap",-13.273114204406738],["пел",-13.273114204406738],["ებია",-13.273115158081056],["trait",-13.273119926452637],["▁በግ",-13.273122787475586],["เลยครับ",-13.273133277893066],["▁заявив",-13.273141860961914],["▁ପର",-13.273174285888672],["істи",-13.273175239562988],["▁binti",-13.273215293884276],["lıklar",-13.273237228393556],["ชู",-13.27324390411377],["zidi",-13.273253440856934],["▁calendar",-13.273290634155272],["งาม",-13.273313522338867],["장이",-13.273319244384766],["▁tù",-13.27333164215088],["▁hè",-13.273347854614258],["فوز",-13.273396492004396],["▁לאור",-13.273409843444824],["凝",-13.27342700958252],["mäki",-13.273429870605469],["କୃତ",-13.273451805114746],["▁adquirir",-13.273459434509276],["▁framkvæmd",-13.273465156555176],["▁खेलाडी",-13.273465156555176],["▁Justiça",-13.273466110229492],["▁Weihnachts",-13.273466110229492],["▁terwijl",-13.273466110229492],["▁εβδομάδα",-13.273466110229492],["▁Документ",-13.273466110229492],["▁ಅಗತ್ಯ",-13.273466110229492],["▁अंतिम",-13.273468017578123],["▁spektakl",-13.273472785949709],["▁ખબર",-13.273472785949709],["▁lova",-13.273473739624023],["裁判",-13.27347469329834],["▁tashmë",-13.273480415344238],["▁ජනප්",-13.273480415344238],["▁ધ્યાન",-13.273482322692873],["順利",-13.27348518371582],["▁بگیرید",-13.27348804473877],["▁минул",-13.273488998413086],["選び",-13.27349090576172],["▁partecipazione",-13.273492813110352],["▁wou",-13.27351188659668],["▁хувийн",-13.273514747619627],["በብ",-13.273515701293944],["▁GDPR",-13.273520469665527],["▁سعر",-13.27352237701416],["kseni",-13.273527145385742],["▁ممنون",-13.27352809906006],["▁تيار",-13.27353286743164],["▁ဇ",-13.27353572845459],["▁towar",-13.273539543151855],["▁comincia",-13.27354335784912],["owanej",-13.273553848266602],["▁башчысы",-13.27355670928955],["▁어린이",-13.273569107055664],["▁سلامتی",-13.27358055114746],["եթ",-13.273589134216309],["▁güclü",-13.273603439331056],["▁תשע",-13.273614883422852],["▁مرداد",-13.273616790771484],["▁ushtri",-13.27363109588623],["ادية",-13.27363395690918],["▁లి",-13.273638725280762],["▁Pand",-13.27365493774414],["▁શિ",-13.27366065979004],["▁кезек",-13.273690223693848],["үүдийг",-13.273700714111328],["▁الماء",-13.273707389831545],["løst",-13.273728370666504],["▁dificil",-13.27375316619873],["พลัง",-13.273758888244627],["수는",-13.27377223968506],["іны",-13.273822784423828],["न्द्र",-13.273823738098145],["ဗ်",-13.273839950561523],["▁vestido",-13.2738618850708],["hrani",-13.27387809753418],["▁falsch",-13.27389907836914],["гнут",-13.273900985717772],["αλλ",-13.273907661437988],["▁feno",-13.27392578125],["▁operacion",-13.273935317993164],["鼠",-13.273944854736328],["▁مسئولان",-13.273945808410645],["ajul",-13.273971557617188],["▁форума",-13.273996353149414],["▁ولس",-13.27399730682373],["▁လို",-13.274020195007324],["▁plage",-13.274022102355955],["▁rök",-13.274025917053224],["zili",-13.274040222167969],["timit",-13.274059295654297],["▁چکے",-13.274069786071776],["▁ତାଙ୍କର",-13.27407932281494],["▁Sankt",-13.274090766906738],["ათვის",-13.2741060256958],["ଙ୍ଗା",-13.274110794067385],["чник",-13.274114608764648],["тні",-13.27411937713623],["innova",-13.274133682250977],["▁fitur",-13.274150848388672],["▁makine",-13.274166107177734],["▁slipper",-13.27420711517334],["▁plakat",-13.274208068847656],["▁hool",-13.274222373962402],["večer",-13.274226188659668],["堪",-13.2742280960083],["▁ית",-13.274230003356934],["ਕਿ",-13.274232864379885],["▁infeksi",-13.274250030517578],["▁kuća",-13.274272918701172],["▁ಇನ್ನೂ",-13.27431869506836],["▁seconde",-13.27432918548584],["▁yangu",-13.274344444274902],["세계",-13.27434539794922],["ജ്ഞ",-13.274358749389648],["▁Fuck",-13.274372100830078],["▁ujë",-13.274391174316406],["▁rabat",-13.274413108825684],["IBA",-13.274418830871582],["дэх",-13.274456024169922],["wesen",-13.274469375610352],["нош",-13.27447509765625],["▁проду",-13.274493217468262],["ΟΝ",-13.274495124816896],["▁věci",-13.27451515197754],["▁ਸਮ",-13.274539947509766],["وجه",-13.274559020996094],["▁lähte",-13.274591445922852],["▁veritat",-13.274617195129396],["우리",-13.274633407592772],["▁বাড়ি",-13.27465534210205],["▁Τουρκία",-13.274673461914062],["▁funcionar",-13.274677276611328],["▁Hé",-13.27467918395996],["ពិត",-13.274682998657228],["▁complementar",-13.274703979492188],["▁stadi",-13.2747163772583],["▁העו",-13.27471923828125],["jobb",-13.274727821350098],["▁lidi",-13.274754524230955],["▁Úvod",-13.27476692199707],["▁σφ",-13.27476692199707],["wort",-13.274778366088867],["记得",-13.274778366088867],["στεί",-13.274781227111816],["▁bale",-13.274785041809082],["ტანა",-13.274787902832031],["ENER",-13.274800300598145],["▁shqiptarët",-13.27480125427246],["▁metà",-13.274820327758787],["업체",-13.274825096130373],["▁FU",-13.274828910827637],["macht",-13.27483081817627],["▁iaith",-13.27485466003418],["▁Pozna",-13.27488136291504],["ەرى",-13.274898529052734],["шао",-13.274917602539062],["плащане",-13.274922370910645],["▁ataque",-13.274934768676758],["▁ព្រោះ",-13.274943351745604],["▁HABARI",-13.274949073791504],["▁einstakling",-13.274949073791504],["▁ninguém",-13.274949073791504],["▁μεγαλύτερο",-13.274949073791504],["▁мэргэжлийн",-13.274949073791504],["▁विश्वविद्यालय",-13.274949073791504],["▁सुप्रीम",-13.274949073791504],["▁ጉባኤ",-13.274949073791504],["โทรศัพท์",-13.27495002746582],["▁gadget",-13.274954795837402],["▁ilość",-13.274954795837402],["▁لحظه",-13.274954795837402],["▁انتہائی",-13.27495574951172],["▁чиновник",-13.274958610534668],["▁penyebab",-13.27496337890625],["▁पात्र",-13.27496337890625],["▁којим",-13.274965286254885],["▁сторінка",-13.274969100952148],["▁emateko",-13.274970054626465],["▁madde",-13.27497386932373],["▁naktsmītnes",-13.27497386932373],["▁ხო",-13.27497386932373],["▁kezel",-13.274975776672363],["▁ప్రజా",-13.274978637695312],["▁அரு",-13.274985313415527],["▁işaret",-13.274994850158691],["dinti",-13.27499771118164],["รองเท้า",-13.27501106262207],["▁domes",-13.275030136108398],["▁अस्",-13.275041580200195],["▁Domine",-13.27504539489746],["▁пространство",-13.275054931640623],["▁surpriz",-13.275060653686523],["stvom",-13.275062561035156],[".............",-13.275064468383787],["fläche",-13.275081634521484],["的价格",-13.275094032287598],["▁muzika",-13.275094985961914],["магазин",-13.275097846984863],["▁ullamco",-13.275108337402344],["priser",-13.27512264251709],["▁معتبر",-13.27513313293457],["▁IQ",-13.275140762329102],["▁bigarren",-13.275145530700684],["▁luister",-13.275165557861328],["▁৩০",-13.275171279907228],["看起来",-13.275172233581545],["унд",-13.275175094604492],["▁KAM",-13.275178909301758],["zlari",-13.275191307067873],["답",-13.27519989013672],["210",-13.275203704833984],["хид",-13.275219917297363],["▁sidor",-13.275224685668944],["▁дирек",-13.275245666503906],["▁silikon",-13.27525520324707],["hå",-13.27525806427002],["ichi",-13.27528190612793],["avec",-13.275285720825195],["ավորման",-13.275296211242676],["74)",-13.275311470031738],["▁materijal",-13.275311470031738],["▁इसकी",-13.275311470031738],["Internet",-13.275346755981444],["▁hasiera",-13.27536392211914],["▁એના",-13.275378227233888],["▁Proč",-13.275379180908203],["▁situado",-13.27538776397705],["▁Saison",-13.275390625],["▁서비스를",-13.275399208068848],["▁Libre",-13.275402069091797],["▁Français",-13.275408744812012],["▁होतं",-13.275434494018556],["жили",-13.275443077087402],["▁ridic",-13.275447845458984],["▁యువ",-13.27545928955078],["ခဲ",-13.27547836303711],["ින",-13.27548599243164],["▁Pasta",-13.275492668151855],["▁Pärnu",-13.275495529174805],["▁пункта",-13.27551555633545],["▁Arabia",-13.275521278381348],["甚",-13.27554988861084],["▁ahhoz",-13.275568008422852],["ძის",-13.2755708694458],["šnji",-13.275598526000977],["▁snø",-13.275602340698242],["▁schimba",-13.27561378479004],["ຖື",-13.275625228881836],["▁تھ",-13.275632858276367],["IZA",-13.275633811950684],["▁kahden",-13.275664329528809],["▁프로젝트",-13.27567195892334],["▁nevím",-13.275677680969238],["tiivse",-13.275755882263184],["▁خورد",-13.275758743286133],["▁174",-13.275762557983398],["▁pubblici",-13.275829315185549],["▁страницата",-13.275856018066406],["ならない",-13.275887489318848],["▁Két",-13.275918006896973],["▁landets",-13.275936126708984],["గె",-13.275958061218262],["कट",-13.27597427368164],["偉",-13.275976181030272],["▁Василь",-13.275981903076172],["▁aplic",-13.275983810424805],["trafik",-13.275988578796388],["▁краја",-13.276000022888184],["alista",-13.276001930236816],["▁प्राण",-13.276015281677246],["▁macar",-13.276029586791992],["▁jump",-13.276034355163574],["▁Sekret",-13.27604866027832],["ацијата",-13.276050567626951],["▁duniya",-13.276061058044434],["▁ΝΑ",-13.276061058044434],["▁Aplica",-13.27609920501709],["▁lili",-13.276107788085938],["▁создать",-13.276108741760254],["▁boyut",-13.27613639831543],["是为了",-13.276159286499023],["▁කිව්ව",-13.276171684265137],["▁primis",-13.27620792388916],["▁okr",-13.276230812072754],["▁seren",-13.27623462677002],["ամե",-13.276235580444336],["▁ពួក",-13.2762451171875],["▁göm",-13.276248931884766],["下跌",-13.276277542114258],["▁жолы",-13.276280403137209],["ādes",-13.276286125183104],["▁ennast",-13.276305198669434],["▁بأنه",-13.276321411132812],["Plus",-13.276331901550291],["▁Pirma",-13.27635097503662],["mojo",-13.276351928710938],["▁Нор",-13.276361465454102],["sene",-13.276371002197266],["▁lokalne",-13.276375770568848],["培訓",-13.276381492614746],["嫩",-13.27638339996338],["LEX",-13.27638816833496],["вець",-13.276396751403809],["64)",-13.27640438079834],["اختبار",-13.27641773223877],["▁BG",-13.27641773223877],["▁Huy",-13.2764253616333],["ลักษณ์",-13.276433944702148],["컬",-13.276433944702148],["๖",-13.276434898376465],["▁άτομα",-13.276434898376465],["▁мероприятия",-13.276434898376465],["▁європейськ",-13.276434898376465],["▁ڪورٽ",-13.276434898376465],["▁받고",-13.276434898376465],["▁shëndet",-13.276436805725098],["ຫຼັກ",-13.276442527770996],["ချုပ်",-13.276446342468262],["σουμε",-13.276449203491213],["კას",-13.27645778656006],["▁Українська",-13.27645778656006],["▁kompetens",-13.276458740234377],["▁viral",-13.276458740234377],["▁महाराज",-13.276476860046388],["▁سڀني",-13.276490211486816],["ॉल",-13.276494979858398],["▁nöjd",-13.276500701904297],["▁fremtiden",-13.276512145996094],["▁Stadium",-13.276524543762209],["▁ወንድ",-13.276555061340332],["▁калып",-13.276567459106444],["▁پورا",-13.276593208312988],["▁መታ",-13.276603698730469],["▁üzem",-13.276607513427734],["▁الذى",-13.276609420776367],["▁akaun",-13.276619911193848],["▁umma",-13.276619911193848],["▁автоном",-13.276637077331545],["▁ял",-13.276643753051758],["ическа",-13.27666187286377],["▁necesaria",-13.276663780212402],["väli",-13.276668548583984],["gyógy",-13.276726722717283],["▁bizonyos",-13.276740074157717],["ındaki",-13.276742935180664],["▁acompanha",-13.276747703552246],["▁ಗೊತ್ತಾ",-13.276752471923828],["▁sona",-13.276772499084473],["תות",-13.276800155639648],["şehir",-13.276811599731444],["pusz",-13.276812553405762],["▁फेरि",-13.27684211730957],["▁අනේ",-13.27684211730957],["▁Navn",-13.276846885681152],["▁desarrollar",-13.27685260772705],["▁stačí",-13.276878356933594],["ຕ້ອງການ",-13.276885986328123],["▁gehe",-13.27692985534668],["▁датум",-13.276966094970703],["liwe",-13.276972770690918],["そのまま",-13.277009963989258],["▁prende",-13.27701187133789],["▁ਹਾਲ",-13.277013778686523],["漫畫",-13.277019500732422],["-02",-13.277032852172852],["isean",-13.277040481567385],["▁ভুল",-13.277055740356444],["▁жастар",-13.277056694030762],["尚未",-13.277056694030762],["▁waxayna",-13.277066230773926],["どのような",-13.277077674865724],["▁built",-13.277088165283203],["्ह",-13.277094841003418],["▁izaten",-13.27710247039795],["现代化",-13.277161598205566],["▁varen",-13.277164459228516],["▁trick",-13.277170181274414],["ໂຕ",-13.277180671691896],["▁எடுத்து",-13.277180671691896],["oša",-13.27718734741211],["▁berhubungan",-13.277201652526855],["/19",-13.27720546722412],["▁chiede",-13.27723217010498],["▁نع",-13.277249336242676],["ινα",-13.277271270751951],["θή",-13.27727508544922],["▁autent",-13.277276992797852],["▁рідн",-13.277292251586914],["▁ћу",-13.277303695678713],["▁frakt",-13.277325630187988],["phen",-13.277328491210938],["▁zapravo",-13.27733325958252],["ысты",-13.277335166931152],["▁Reihe",-13.277351379394531],["štění",-13.277376174926758],["▁juhi",-13.27739143371582],["かに",-13.277399063110352],["จัดการ",-13.27742862701416],["▁ieško",-13.27743434906006],["▁Тези",-13.27744197845459],["▁ਚੋਣ",-13.27744197845459],["▁ziara",-13.277482986450195],["▁ترکی",-13.277501106262209],["▁autentic",-13.27752685546875],["▁vrijednosti",-13.2775297164917],["മുണ്ട",-13.277551651000977],["▁Minsk",-13.277551651000977],["હુ",-13.277563095092772],["ମ୍ବ",-13.277580261230469],["▁tulo",-13.27761173248291],["▁Gener",-13.277626037597656],["හොත්",-13.27764892578125],["▁kock",-13.27765655517578],["बोध",-13.277676582336426],["▁сваім",-13.27768325805664],["▁solum",-13.277700424194336],["▁fungo",-13.277713775634766],["สิว",-13.27772331237793],["▁Groot",-13.27773380279541],["టూ",-13.277746200561523],["▁dlh",-13.277748107910156],["▁articula",-13.277775764465332],["cipit",-13.277780532836914],["stīt",-13.277783393859863],["일보",-13.277795791625977],["▁വ്വ",-13.277827262878418],["უღ",-13.27784252166748],["dalo",-13.27785587310791],["工作中",-13.277860641479492],["토토",-13.27786350250244],["踢",-13.277871131896973],["襲",-13.27788257598877],["▁boja",-13.277889251708984],["NAR",-13.277899742126465],["пав",-13.277905464172363],["звон",-13.277910232543944],["svět",-13.27791690826416],["▁СДСМ",-13.277923583984377],["▁पृथ्वी",-13.277923583984377],["▁దీంతో",-13.277923583984377],["▁sporazum",-13.277924537658691],["▁zaštitu",-13.277924537658691],["▁आफ्ना",-13.277924537658691],["▁ответственности",-13.27792739868164],["▁navodi",-13.277928352355955],["▁ಜನ್ಮ",-13.27793025970459],["▁galimybė",-13.277931213378906],["▁ဟု",-13.277935028076172],["▁proovi",-13.277935981750488],["▁చూడండి",-13.277942657470703],["värt",-13.27794361114502],["小组",-13.277947425842283],["ظل",-13.277949333190918],["▁MAL",-13.277950286865234],["▁здоровья",-13.277956008911133],["▁gallwch",-13.277957916259766],["▁निवड",-13.277957916259766],["▁yhteyttä",-13.277961730957031],["▁તરફ",-13.27796459197998],["exposició",-13.27797508239746],["▁troš",-13.277981758117676],["▁jakin",-13.277998924255373],["hier",-13.27800178527832],["utis",-13.2780122756958],["ချင်း",-13.27801513671875],["▁erkennen",-13.278018951416016],["▁Marathi",-13.278019905090332],["mood",-13.278037071228027],["drive",-13.27803897857666],["▁факти",-13.278051376342772],["plattform",-13.278056144714355],["ຮັກ",-13.27805995941162],["▁Sõ",-13.278064727783203],["▁rovnako",-13.278082847595217],["▁पुर्",-13.278093338012695],["▁ನಟಿ",-13.278096199035645],["▁față",-13.278116226196287],["▁تاب",-13.278118133544922],["▁вашего",-13.278120040893556],["▁transaksi",-13.278122901916504],["▁luns",-13.27814769744873],["▁देखिएको",-13.278152465820312],["▁disseny",-13.278153419494627],["▁almenn",-13.278162002563477],["▁інститут",-13.278167724609377],["교통",-13.278172492980955],["▁ติดต่อ",-13.278213500976562],["▁piiri",-13.27821922302246],["▁ജീവിതം",-13.278227806091309],["▁можда",-13.278244018554688],["▁existencia",-13.278264045715332],["▁іншими",-13.278276443481444],["tavo",-13.278289794921877],["ेयर",-13.27829647064209],["ались",-13.278301239013672],["ومي",-13.278342247009276],["▁meille",-13.27835750579834],["ాల్సిన",-13.27837371826172],["تردد",-13.278393745422363],["▁Enfin",-13.278395652770996],["各式",-13.278395652770996],["ίτη",-13.278396606445312],["ksesi",-13.278409957885742],["▁чөлөө",-13.278423309326172],["▁delavnic",-13.278471946716309],["▁Ful",-13.278497695922852],["112",-13.27851104736328],["▁Folgen",-13.278514862060549],["ίζ",-13.27851676940918],["▁Active",-13.278541564941406],["抓住",-13.278553009033203],["▁кмет",-13.278573036193848],["پٹ",-13.27857780456543],["íti",-13.278578758239746],["mür",-13.27858543395996],["▁fiu",-13.278596878051758],["▁अग्र",-13.27861213684082],["▁Ergebnisse",-13.278635025024414],["▁matí",-13.278637886047363],["ográfico",-13.27863883972168],["▁એને",-13.278657913208008],["ሰል",-13.278668403625488],["▁Tentu",-13.278677940368652],["▁أق",-13.278679847717283],["▁дија",-13.278727531433104],["зін",-13.278736114501951],["▁vaikuttaa",-13.278745651245115],["▁ngôn",-13.27875518798828],["▁неба",-13.278767585754396],["ભર",-13.278841018676758],["eeseen",-13.278855323791504],["▁мэдэгд",-13.278874397277832],["зима",-13.278875350952148],["▁ਕੋਲ",-13.278879165649414],["shgan",-13.278883934020996],["不行",-13.278886795043944],["确认",-13.278888702392578],["/05/20",-13.278891563415527],["물을",-13.278891563415527],["444",-13.27890396118164],["▁Мінска",-13.27892780303955],["▁zinām",-13.278928756713867],["地点",-13.278937339782717],["▁nessuno",-13.278939247131348],["byggð",-13.278952598571776],["▁veste",-13.27895736694336],["▁размера",-13.278965950012209],["▁Беларуская",-13.278974533081056],["▁predsjednika",-13.278976440429688],["лсан",-13.27897834777832],["्नुहोस्",-13.278989791870115],["▁Legea",-13.278989791870115],["▁արդյունք",-13.279006958007812],["ақты",-13.279012680053713],["▁योगी",-13.279016494750977],["▁ferment",-13.27901840209961],["▁mező",-13.279023170471191],["สัง",-13.279048919677734],["▁frank",-13.279082298278809],["▁REAL",-13.279083251953123],["ર્ક",-13.279098510742188],["▁मोह",-13.279099464416504],["▁ਕਹਿ",-13.27910041809082],["لە",-13.279101371765137],["தானே",-13.279114723205566],["▁scén",-13.279129981994627],["▁εποχή",-13.279130935668944],["બો",-13.279135704040527],["barra",-13.279136657714844],["▁Kuma",-13.279150009155272],["▁slabo",-13.279163360595703],["▁Macron",-13.279166221618652],["RIT",-13.27921199798584],["명을",-13.279216766357422],["овни",-13.279219627380373],["LAK",-13.279229164123535],["▁الجر",-13.279230117797852],["▁эт",-13.279233932495115],["ells",-13.27923583984375],["▁Quest",-13.279241561889648],["▁نقشه",-13.279252052307127],["▁Svoj",-13.279253959655762],["▁zaraz",-13.279254913330078],["ΝΑ",-13.279255867004396],["อบรม",-13.279266357421877],["агч",-13.279279708862305],["ఫి",-13.279291152954102],["签证",-13.279314041137695],["izuar",-13.27932071685791],["考生",-13.279324531555176],["▁endroit",-13.279333114624023],["▁Freunde",-13.279340744018556],["▁వ్యాఖ్య",-13.279354095458984],["ettes",-13.279376983642578],["اسة",-13.279378890991213],["▁obiekt",-13.279382705688477],["culture",-13.27938461303711],["ינת",-13.279412269592283],["เหล็ก",-13.279413223266602],["ስፖርት",-13.279413223266602],["ដឹកនាំ",-13.279413223266602],["▁Δευτέρα",-13.279413223266602],["▁πρώην",-13.279413223266602],["▁poskytuje",-13.279414176940918],["▁ਅੰਮ੍ਰਿਤਸਰ",-13.279414176940918],["▁Erasmus",-13.279415130615234],["▁méthode",-13.279415130615234],["▁обезбеди",-13.279415130615234],["▁כרטיס",-13.279415130615234],["▁tanulmány",-13.279419898986816],["▁dokonce",-13.279422760009766],["▁internasional",-13.279427528381348],["▁meskipun",-13.279427528381348],["iškos",-13.279428482055664],["▁ünlü",-13.279428482055664],["▁filmes",-13.279430389404297],["▁zależy",-13.279430389404297],["▁Många",-13.279443740844728],["១២",-13.2794771194458],["▁significant",-13.279485702514648],["▁υπέρ",-13.279485702514648],["▁ਜਗ",-13.279501914978027],["▁싶은",-13.279518127441406],["▁nedenfor",-13.279521942138672],["øk",-13.279525756835938],["▁fóra",-13.279535293579102],["ిస్తూ",-13.279541015625],["சே",-13.27955722808838],["artig",-13.27956771850586],["▁wellness",-13.279568672180176],["therapie",-13.279582977294922],["ที่พัก",-13.27958869934082],["▁일을",-13.27959156036377],["▁toukokuuta",-13.279595375061035],["верт",-13.279596328735352],["▁ମାସ",-13.279605865478516],["▁хэмжээг",-13.279622077941896],["一杯",-13.279622077941896],["▁verjetno",-13.279643058776855],["▁perfecte",-13.279659271240234],["▁bavi",-13.27967357635498],["▁แถม",-13.27967357635498],["▁Tällä",-13.27967929840088],["henkilö",-13.27968406677246],["▁erkek",-13.279706001281738],["Прав",-13.279706954956056],["世界杯",-13.279707908630373],["▁puro",-13.279711723327637],["間違い",-13.27971649169922],["roko",-13.279718399047852],["üstü",-13.2797212600708],["▁Sales",-13.279725074768066],["一面",-13.279756546020508],["র্থ",-13.279779434204102],["60)",-13.279793739318848],["▁දැනට",-13.27979564666748],["▁داشتند",-13.27980899810791],["▁deluje",-13.279812812805176],["▁първите",-13.279827117919922],["▁corpus",-13.279850959777832],["▁ກໍ່",-13.279863357543944],["▁руу",-13.279887199401855],["سد",-13.279889106750488],["だったので",-13.279905319213867],["全市",-13.279912948608398],["▁հավաք",-13.279919624328612],["▁Вести",-13.27992057800293],["▁2016)",-13.279926300048828],["▁qismi",-13.279947280883787],["PART",-13.2799654006958],["▁վայր",-13.279983520507812],["ቸዉ",-13.279986381530762],["09)",-13.279993057250977],["▁χρησιμοποι",-13.280031204223633],["▁Defini",-13.280036926269531],["▁lukke",-13.28003978729248],["▁Wana",-13.280065536499023],["▁отвара",-13.280085563659668],["gradu",-13.280088424682615],["malle",-13.28009796142578],["ुभएको",-13.280110359191896],["▁sidder",-13.28011417388916],["趣味",-13.280120849609377],["pyt",-13.280132293701172],["▁έγ",-13.280165672302246],["ადამიან",-13.280180931091309],["▁քայլ",-13.280183792114258],["▁przepis",-13.28018569946289],["▁quæ",-13.280186653137209],["▁ప్రి",-13.280195236206056],["135",-13.280200004577637],["▁арна",-13.28020191192627],["ražen",-13.28021240234375],["реже",-13.280219078063965],["▁venu",-13.280232429504396],["▁Literatura",-13.280261039733888],["280",-13.2802734375],["ьор",-13.280282974243164],["▁Wirtschafts",-13.280299186706545],["▁praeter",-13.28030014038086],["▁emisij",-13.280303001403809],["ويات",-13.28032112121582],["▁veselības",-13.2803316116333],["▁Tij",-13.280359268188477],["▁Hacı",-13.280377388000488],["ックス",-13.280381202697754],["▁recomenda",-13.28038501739502],["▁Eel",-13.28042984008789],["ເທ",-13.280488014221191],["▁calma",-13.280494689941406],["▁variante",-13.280494689941406],["▁mawr",-13.280498504638672],["▁ซ",-13.28053379058838],["▁Detail",-13.280537605285645],["▁6-7",-13.280548095703123],["veel",-13.280557632446287],["▁градот",-13.280566215515137],["▁Todas",-13.280574798583984],["▁выраз",-13.280587196350098],["▁господар",-13.280588150024414],["ಿನಿಂದ",-13.280607223510742],["лић",-13.280616760253906],["▁modèle",-13.280638694763184],["ไล",-13.280646324157717],["▁explicat",-13.280652046203612],["ñen",-13.280682563781738],["▁риба",-13.280706405639648],["▁potekal",-13.280725479125977],["קאָ",-13.280747413635254],["agam",-13.280753135681152],["▁kritika",-13.28075885772705],["▁გადმო",-13.280818939208984],["నున్న",-13.280826568603516],["മാക",-13.28085231781006],["獻",-13.280855178833008],["ţiilor",-13.28085994720459],["GD",-13.28086280822754],["▁peace",-13.280866622924805],["▁tanár",-13.280892372131348],["กรุง",-13.28090476989746],["เทคนิค",-13.28090476989746],["▁комісії",-13.280905723571776],["▁المشاركة",-13.280905723571776],["▁capítulo",-13.280906677246094],["▁నన్ను",-13.280906677246094],["กรุงเทพฯ",-13.280911445617676],["ပေါင်း",-13.280911445617676],["▁kõne",-13.280913352966309],["יבער",-13.28091526031494],["▁voulez",-13.280921936035156],["▁ډلې",-13.280924797058104],["▁утицај",-13.280925750732422],["px",-13.280929565429688],["▁setter",-13.280945777893066],["▁कपूर",-13.280949592590332],["▁آئین",-13.280950546264648],["▁ସେମାନେ",-13.28095817565918],["▁yabancı",-13.280961036682127],["▁bairro",-13.280965805053713],["сети",-13.280966758728027],["níkov",-13.280970573425291],["▁DAS",-13.280981063842772],["▁හරියට",-13.280982971191406],["▁couleurs",-13.28098487854004],["証",-13.280987739562988],["▁houdt",-13.281002044677734],["▁غونډه",-13.281007766723633],["ຄາ",-13.28102207183838],["▁ਜਨਮ",-13.28104019165039],["▁Nato",-13.281082153320312],["қызы",-13.281108856201172],["лөрү",-13.281119346618652],["▁дейности",-13.281120300292969],["suche",-13.281123161315918],["मक",-13.281124114990234],["▁அம்மா",-13.281142234802246],["▁غربی",-13.281145095825195],["vári",-13.28114891052246],["▁پام",-13.281163215637209],["алга",-13.281170845031738],["▁Riv",-13.281171798706056],["▁tādu",-13.281189918518066],["▁ազգ",-13.28119945526123],["ичке",-13.28120231628418],["▁gê",-13.28120231628418],["▁Tous",-13.281211853027344],["pros",-13.28121280670166],["▁เกม",-13.281214714050291],["▁kuriame",-13.281220436096191],["▁פנים",-13.281222343444824],["▁kvart",-13.281229972839355],["न्ट",-13.28124713897705],["תשובה",-13.28125286102295],["먹",-13.281323432922363],["▁експеримент",-13.281336784362791],["تعلم",-13.281354904174805],["ရင်း",-13.281362533569336],["▁menyang",-13.281365394592283],["πιστ",-13.281366348266602],["▁bestand",-13.281371116638184],["riza",-13.281390190124512],["▁zadnjih",-13.28139591217041],["dych",-13.281397819519045],["▁ішкі",-13.28139877319336],["▁kerusi",-13.281400680541992],["▁اجرایی",-13.281411170959473],["ഷന്",-13.281417846679688],["▁dešimt",-13.281418800354004],["Sz",-13.28142547607422],["▁قارى",-13.28142547607422],["▁tasu",-13.28144359588623],["鐘",-13.281449317932127],["▁suíomh",-13.281489372253418],["သန္း",-13.281497955322266],["(*",-13.281505584716797],["ىۋ",-13.281511306762695],["▁Клас",-13.281551361083984],["▁nowego",-13.281561851501465],["▁dobri",-13.281563758850098],["kompoz",-13.28157901763916],["雙方",-13.281596183776855],["فاد",-13.281610488891602],["baha",-13.281634330749512],["æder",-13.281635284423828],["▁العم",-13.28165054321289],["▁ವಿಭಾಗ",-13.28169059753418],["▁celkom",-13.28171157836914],["ezer",-13.281715393066406],["ራን",-13.281715393066406],["▁årlig",-13.281720161437988],["▁boutique",-13.281740188598633],["▁Назарбаев",-13.281767845153809],["▁गांव",-13.281781196594238],["груп",-13.281783103942873],["▁механізм",-13.281789779663086],["▁شدم",-13.2817964553833],["▁কৰি",-13.28179931640625],["▁fragt",-13.281805992126465],["▁utdanning",-13.281842231750488],["▁אמת",-13.2818603515625],["有趣的",-13.281868934631348],["ربع",-13.281871795654297],["리그",-13.28189468383789],["strādāt",-13.28189754486084],["збу",-13.281902313232422],["paro",-13.281906127929688],["▁निर्देश",-13.281916618347168],["▁quantitat",-13.281939506530762],["▁ماهی",-13.281977653503418],["场比赛",-13.281983375549316],["▁ဟာ",-13.28198528289795],["▁ประเภท",-13.281991004943848],["▁Loc",-13.282011985778809],["ชัน",-13.282037734985352],["▁الطا",-13.282052040100098],["volt",-13.282052993774414],["layır",-13.282086372375488],["Associació",-13.282102584838867],["▁hävi",-13.28210735321045],["βαση",-13.282111167907717],["ISE",-13.28211784362793],["jamas",-13.28214168548584],["kleding",-13.282164573669434],["ਯ",-13.282169342041016],["▁парите",-13.282190322875977],["▁Hakim",-13.28219985961914],["tiivi",-13.28221321105957],["▁jugador",-13.28221607208252],["▁ရေ",-13.282219886779783],["▁cijelo",-13.282228469848633],["▁miljoni",-13.28224277496338],["▁enfrenta",-13.28225326538086],["नम्",-13.282308578491213],["サー",-13.282309532165527],["なし",-13.28231430053711],["▁ယူ",-13.28233242034912],["ението",-13.28233528137207],["▁ਡੀ",-13.282337188720703],["▁Kura",-13.282341957092283],["▁ווייל",-13.282352447509766],["啥",-13.282374382019045],["ස්ත",-13.28237533569336],["▁berättar",-13.282379150390623],["▁هاڻي",-13.282381057739258],["▁ജാ",-13.282384872436523],["▁župan",-13.282389640808104],["お知らせ",-13.28239631652832],["กลิ่น",-13.282401084899902],["▁Dessutom",-13.282401084899902],["▁zasebnosti",-13.282401084899902],["▁αλήθεια",-13.282401084899902],["▁γύρω",-13.282401084899902],["▁μπορούσε",-13.282401084899902],["▁ребенок",-13.282401084899902],["▁Blut",-13.28240203857422],["▁መሰረት",-13.282403945922852],["▁можливості",-13.282411575317385],["ກວ່າ",-13.282421112060549],["▁piccola",-13.282442092895508],["▁منو",-13.282442092895508],["marks",-13.28244972229004],["ола",-13.282451629638672],["▁البعض",-13.282464027404783],["мия",-13.282466888427734],["▁Vlada",-13.28246784210205],["▁kalu",-13.282474517822266],["▁victime",-13.282475471496582],["▁cozinha",-13.282501220703123],["▁cih",-13.282506942749023],["▁Amikor",-13.282511711120604],["▁Coll",-13.282512664794922],["▁kii",-13.282526969909668],["▁birisi",-13.282532691955566],["▁Bela",-13.282533645629885],["▁አድርጎ",-13.282543182373049],["▁2003,",-13.282574653625488],["▁relació",-13.282584190368652],["мимо",-13.28259563446045],["▁besedil",-13.282602310180664],["▁पुल",-13.28260612487793],["MMA",-13.282630920410156],["▁공동",-13.282641410827637],["▁balki",-13.282651901245115],["saken",-13.28268337249756],["lulu",-13.282686233520508],["▁ერ",-13.282690048217772],["GN",-13.282694816589355],["▁Norm",-13.28269863128662],["sün",-13.282752990722656],["▁davr",-13.282769203186035],["▁ਲੋੜ",-13.282770156860352],["▁натуральн",-13.282779693603516],["ສົ່ງ",-13.282803535461426],["เพิ่มขึ้น",-13.28281593322754],["▁Vă",-13.282818794250488],["▁Shab",-13.282841682434082],["SIN",-13.282849311828612],["କ୍ର",-13.282852172851562],["desmit",-13.28286361694336],["▁ziemi",-13.282903671264648],["ОМ",-13.282953262329102],["▁نگران",-13.282957077026367],["▁IB",-13.282977104187012],["matica",-13.283002853393556],["▁Cumhuriyet",-13.28302764892578],["rania",-13.283042907714844],["sätter",-13.283055305480955],["▁అయి",-13.283056259155272],["▁Odense",-13.28305721282959],["▁cứng",-13.28306484222412],["▁ଟ",-13.283076286315918],["第十",-13.28308391571045],["▁لكي",-13.283085823059082],["riji",-13.283114433288574],["▁következ",-13.28311824798584],["riem",-13.28315258026123],["proces",-13.283164024353027],["▁najmniej",-13.28317165374756],["▁ພັນ",-13.28318214416504],["紀",-13.283184051513672],["▁Confira",-13.283195495605469],["▁prvé",-13.283195495605469],["▁sabo",-13.283196449279783],["▁түй",-13.283199310302734],["▁კრ",-13.283232688903809],["ивается",-13.283248901367188],["▁explore",-13.283252716064451],["▁ສະຫະລັດ",-13.283275604248049],["▁haluaa",-13.28329086303711],["나요",-13.28329372406006],["▁FAR",-13.283294677734377],["▁dop",-13.28331470489502],["▁hazırda",-13.283355712890623],["▁bekende",-13.283370018005373],["▁199",-13.283370971679688],["▁nelja",-13.28337574005127],["▁Tovább",-13.283376693725586],["▁coro",-13.28341293334961],["ច្រើន",-13.283416748046877],["▁ליד",-13.283424377441406],["▁lorg",-13.283425331115724],["جۇ",-13.28343105316162],["ていない",-13.28343105316162],["devu",-13.283432006835938],["▁ဆက္",-13.28343391418457],["ונד",-13.283453941345217],["國立",-13.283454895019531],["▁mängd",-13.28346061706543],["▁მოა",-13.283465385437012],["▁principios",-13.283470153808594],["▁2000,",-13.283475875854492],["▁Sare",-13.283479690551758],["▁לחי",-13.28353786468506],["490",-13.283567428588867],["ที่น่า",-13.283577919006348],["▁victoria",-13.28357982635498],["▁твърде",-13.283592224121094],["वाय",-13.283635139465332],["पक्ष",-13.283645629882812],["▁Vou",-13.28365421295166],["▁препозна",-13.283658981323242],["▁Atsi",-13.283687591552734],["▁päivänä",-13.283716201782228],["▁pú",-13.283750534057615],["▁Jedna",-13.283757209777832],["أربع",-13.283769607543944],["▁minket",-13.283769607543944],["MX",-13.283793449401855],["emper",-13.283821105957031],["▁svakom",-13.283828735351562],["25)",-13.283838272094728],["▁derrota",-13.283843040466309],["▁എന്നത്",-13.283856391906738],["ידע",-13.28386402130127],["GAL",-13.283865928649902],["▁financiare",-13.283869743347168],["կեր",-13.283884048461914],["▁Because",-13.28389835357666],["▁társadalmi",-13.28389835357666],["▁ಅಕ್ಷರ",-13.283899307250977],["▁жизнен",-13.283900260925291],["▁Natürlich",-13.28390121459961],["▁zaštite",-13.28390121459961],["▁consegna",-13.283903121948242],["júce",-13.28391170501709],["▁Одес",-13.28391170501709],["▁লাখ",-13.28391170501709],["▁confianza",-13.28393268585205],["▁Koordin",-13.283934593200684],["▁plăcut",-13.283952713012695],["▁Клуб",-13.283957481384276],["▁asosida",-13.283958435058594],["▁versucht",-13.283963203430176],["▁emotional",-13.283968925476074],["▁бригад",-13.283970832824709],["▁जीव",-13.283971786499023],["▁arvesta",-13.283976554870604],["▁rempli",-13.283977508544922],["laitos",-13.283982276916504],["▁anjeun",-13.284049034118652],["▁Viņa",-13.2840576171875],["▁käydä",-13.284069061279297],["义务",-13.284070014953612],["harmoni",-13.28407096862793],["▁колдонуу",-13.284075736999512],["otin",-13.284096717834473],["▁Dinas",-13.284140586853027],["▁गये",-13.284153938293455],["▁orðið",-13.284168243408203],["feat",-13.284187316894531],["லுக்கு",-13.28419017791748],["▁భారత్",-13.284193992614746],["▁beragam",-13.284197807312012],["▁ആദ്യം",-13.284197807312012],["▁Lange",-13.284219741821287],["▁κατε",-13.284226417541504],["▁auttaa",-13.284234046936035],["▁Servicio",-13.284239768981934],["▁տալ",-13.284239768981934],["▁начать",-13.284258842468262],["▁Spiritu",-13.284268379211426],["ordningen",-13.28428554534912],["▁poche",-13.284317016601562],["▁2013)",-13.284318923950195],["▁module",-13.284334182739258],["▁حرم",-13.284358978271484],["▁sentrum",-13.284390449523926],["▁участва",-13.284390449523926],["으니",-13.284448623657228],["▁håper",-13.284462928771973],["▁Reco",-13.284467697143556],["شك",-13.284482955932615],["▁តំបន់",-13.284482955932615],["▁چاہ",-13.284486770629885],["▁kõiki",-13.2844877243042],["▁Пло",-13.284505844116213],["ვლ",-13.284516334533691],["গু",-13.284523010253906],["гласи",-13.284544944763184],["▁recrea",-13.284550666809082],["োৱা",-13.28455924987793],["อดีต",-13.28459644317627],["▁urgent",-13.284601211547852],["лима",-13.28460693359375],["▁Romano",-13.2846097946167],["▁надій",-13.284612655639648],["ئام",-13.284621238708496],["▁mirar",-13.284623146057127],["▁segon",-13.284642219543455],["დების",-13.284675598144531],["tém",-13.284689903259276],["SW",-13.284692764282228],["▁Ander",-13.284772872924805],["▁ព្រឹក",-13.284772872924805],["▁بورس",-13.284774780273438],["αστική",-13.28477668762207],["▁նշեց",-13.284809112548828],["▁tjej",-13.284852027893066],["▁रोजी",-13.284870147705078],["▁grū",-13.284875869750977],["რცხ",-13.28487777709961],["ត្រា",-13.284894943237305],["ръст",-13.28490161895752],["ziga",-13.284944534301758],["▁қос",-13.284953117370604],["▁производстве",-13.284955978393556],["▁Quick",-13.284958839416504],["に入って",-13.28496265411377],["tävää",-13.284968376159668],["▁osjeća",-13.284975051879885],["ਥੇ",-13.284977912902832],["▁ouders",-13.284997940063477],["ināja",-13.28500747680664],["nado",-13.285008430480955],["▁slovenski",-13.285014152526855],["orð",-13.285016059875488],["њен",-13.285037994384766],["▁բերել",-13.285037994384766],["▁Giám",-13.285058975219728],["scoil",-13.285061836242676],["عيش",-13.285093307495115],["▁очевидно",-13.28509521484375],["годишна",-13.285099029541016],["▁redact",-13.28510856628418],["▁visión",-13.285161972045898],["▁sì",-13.285168647766112],["ột",-13.285189628601074],["lope",-13.285192489624023],["▁افرادی",-13.285198211669922],["▁Баг",-13.285201072692873],["ባብ",-13.2852144241333],["▁வைக்க",-13.285228729248049],["ushi",-13.285232543945312],["▁Uk",-13.285258293151855],["▁nová",-13.285261154174805],["спу",-13.28526210784912],["▁kolektiv",-13.285266876220703],["蟲",-13.285283088684082],["wyll",-13.285292625427246],["▁എന്തു",-13.285292625427246],["BIT",-13.285300254821776],["ifique",-13.285322189331056],["败",-13.285335540771484],["▁správa",-13.285338401794434],["▁parecer",-13.2853422164917],["ЭМ",-13.285372734069824],["卢",-13.28537368774414],["kalender",-13.28537940979004],["▁प्रतिनिधि",-13.285395622253418],["▁ఎమ్మెల్యే",-13.285396575927734],["▁Mūsų",-13.28539752960205],["▁dumneavoastra",-13.28539752960205],["▁všichni",-13.28539752960205],["▁αγάπη",-13.28539752960205],["▁хуулиар",-13.28539752960205],["ฝ้ากระ",-13.285400390625],["▁හැකියාව",-13.285400390625],["▁реалізації",-13.285401344299316],["▁Gòn",-13.28540325164795],["▁luchd",-13.285411834716797],["▁рабіць",-13.285412788391112],["▁companii",-13.28541374206543],["▁хлеб",-13.28543472290039],["摘要",-13.285456657409668],["▁veterinar",-13.28547191619873],["▁Informations",-13.285500526428224],["▁pakub",-13.28550148010254],["▁ਬਾਦਲ",-13.285502433776855],["▁hanc",-13.285518646240234],["▁Â",-13.28553295135498],["▁ਬਾਹਰ",-13.285558700561523],["▁Басқа",-13.285587310791016],["YB",-13.285590171813965],["▁კრი",-13.28560733795166],["▁Tập",-13.285630226135254],["▁اساسی",-13.285634994506836],["ဳိ",-13.285650253295898],["pido",-13.285651206970217],["tool",-13.285664558410645],["▁temsil",-13.285666465759276],["עסק",-13.285672187805176],["▁kasnije",-13.285677909851074],["୧୮",-13.28567886352539],["prakt",-13.285679817199709],["учасник",-13.28571605682373],["▁משפט",-13.285717964172363],["美好的",-13.285730361938477],["▁kender",-13.285751342773438],["と一緒に",-13.285773277282717],["▁rodziny",-13.285776138305664],["态度",-13.28579044342041],["▁erotik",-13.285794258117676],["▁myslí",-13.285795211791992],["စံ",-13.285802841186523],["午後",-13.285808563232422],["▁erilaisia",-13.285816192626951],["”“",-13.28581714630127],["▁coeur",-13.285820960998535],["Է",-13.285822868347168],["فاف",-13.285840034484863],["방법",-13.28584098815918],["ിനും",-13.285843849182127],["▁malzeme",-13.285873413085938],["▁എനിക്ക",-13.285890579223633],["行的",-13.285903930664062],["콜",-13.285906791687012],["▁الأردن",-13.285955429077148],["гласува",-13.285959243774414],["▁yoshlar",-13.28597354888916],["▁հեռուստա",-13.285988807678224],["▁gweld",-13.28602695465088],["▁Frysk",-13.286039352416992],["ไม่รู้",-13.286040306091309],["▁ناز",-13.286060333251951],["▁determinar",-13.286087989807127],["▁opinii",-13.286117553710938],["▁حصل",-13.286145210266112],["70)",-13.286148071289062],["▁ಅಂದ",-13.286152839660645],["wujud",-13.286160469055176],["▁ڄڻ",-13.286172866821287],["▁budov",-13.286177635192873],["▁matéria",-13.28618621826172],["▁addig",-13.286192893981934],["▁wysz",-13.286200523376465],["sicher",-13.286226272583008],["▁Kirke",-13.286258697509766],["▁render",-13.286258697509766],["▁traditional",-13.286259651184082],["කය",-13.286285400390623],["▁kıl",-13.286288261413574],["▁kolik",-13.286304473876951],["▁prezen",-13.286307334899902],["我都",-13.28630828857422],["▁piata",-13.286312103271484],["彩票",-13.2863187789917],["▁tùy",-13.28635025024414],["▁Koti",-13.286371231079102],["▁családi",-13.286396026611328],["機関",-13.28644561767578],["ያዊ",-13.286446571350098],["▁Sicht",-13.28645133972168],["▁pand",-13.286459922790527],["ţional",-13.286473274230955],["▁Italian",-13.286487579345703],["ളം",-13.286502838134766],["ahidi",-13.286504745483398],["kezelő",-13.286510467529297],["▁retning",-13.286519050598145],["ؤمن",-13.28652000427246],["▁grá",-13.28653621673584],["みたいな",-13.28653621673584],["ערי",-13.286569595336914],["▁асуу",-13.286577224731444],["ந்",-13.286602020263672],["▁siguro",-13.28661823272705],["ାସ",-13.286660194396973],["ማዊ",-13.286663055419922],["▁EI",-13.286665916442873],["litz",-13.28667163848877],["▁הדר",-13.286698341369627],["▁الفكر",-13.28670597076416],["唇",-13.286715507507324],["▁తర",-13.286723136901855],["▁поезд",-13.28672695159912],["▁ராஜ",-13.286757469177246],["▁Dje",-13.286760330200195],["▁Misi",-13.28676700592041],["ለያ",-13.2868013381958],["▁felul",-13.28680419921875],["維護",-13.28680419921875],["炮",-13.286822319030762],["lnym",-13.286826133728027],["▁Lac",-13.286849975585938],["▁carti",-13.286852836608888],["කර්",-13.28685474395752],["▁tworzy",-13.286855697631836],["▁Дев",-13.286869049072266],["गृह",-13.286873817443848],["ŷ",-13.286898612976074],["▁Malagasy",-13.286898612976074],["▁menciptakan",-13.286898612976074],["▁хиляди",-13.286898612976074],["▁յուրաքանչյուր",-13.286898612976074],["▁چیئرمین",-13.286898612976074],["▁অন্যান্য",-13.286898612976074],["▁উদ্ধার",-13.286898612976074],["▁ਵੱਡੀ",-13.286898612976074],["▁disiplin",-13.28690242767334],["▁fresco",-13.28690242767334],["▁negra",-13.28690242767334],["▁বলেছেন",-13.286910057067873],["▁کانفرنس",-13.286917686462402],["▁bevestig",-13.28691864013672],["▁تماما",-13.286937713623049],["▁Schön",-13.286952018737791],["▁hkrati",-13.28695297241211],["▁الاحتلال",-13.286956787109377],["▁vesh",-13.28698444366455],["▁rato",-13.28699779510498],["NEWS",-13.287002563476562],["▁politie",-13.287014961242676],["▁stole",-13.287014961242676],["▁पुगेको",-13.287030220031738],["SEO",-13.287033081054688],["▁המל",-13.287033081054688],["▁бөлігі",-13.287038803100586],["оца",-13.287053108215332],["ėlių",-13.287062644958496],["пить",-13.287062644958496],["ບານ",-13.287068367004396],["bər",-13.28707790374756],["▁специалистов",-13.28708267211914],["▁salvo",-13.287107467651367],["▁kide",-13.28711223602295],["▁tłumacz",-13.28711223602295],["▁курса",-13.287127494812012],["▁କହି",-13.287138938903809],["▁רמת",-13.28714084625244],["▁නොමැති",-13.287184715270996],["hýb",-13.287187576293944],["▁Virgin",-13.287188529968262],["▁BD",-13.287189483642578],["ozni",-13.287191390991213],["▁ସର୍ବ",-13.287193298339844],["قنا",-13.287195205688477],["▁gắng",-13.287203788757324],["▁apartamento",-13.287206649780272],["jėg",-13.28722095489502],["▁proposition",-13.287222862243652],["slova",-13.28724193572998],["موم",-13.28725242614746],["гом",-13.287281036376951],["малы",-13.287293434143066],["▁junta",-13.287302017211914],["μμ",-13.287304878234863],["▁hozir",-13.287304878234863],["▁sog",-13.287311553955078],["▁çıkma",-13.28731918334961],["▁поиск",-13.287322998046877],["Vil",-13.28732681274414],["▁қарым",-13.28734016418457],["ungkap",-13.287370681762695],["指数",-13.287405014038086],["▁bež",-13.2874116897583],["ಧ್",-13.28742218017578],["▁—",-13.28742790222168],["的東西",-13.287429809570312],["shar",-13.287434577941896],["▁Hát",-13.287455558776855],["▁Predstav",-13.287461280822754],["kande",-13.28746509552002],["▁odos",-13.287481307983398],["▁розв",-13.28748607635498],["feito",-13.287520408630373],["▁lielu",-13.287521362304688],["▁kupić",-13.287534713745115],["▁waiting",-13.28754425048828],["▁sete",-13.287545204162598],["▁групе",-13.287545204162598],["▁Dong",-13.287555694580078],["▁վարկ",-13.28756332397461],["▁realizzare",-13.287567138671877],["käy",-13.28758144378662],["▁планина",-13.287583351135254],["ட்ச",-13.287610054016112],["Eesti",-13.28764533996582],["▁Γεν",-13.287649154663086],["pić",-13.2876615524292],["aeg",-13.287668228149414],["▁ماذا",-13.287668228149414],["ïn",-13.287680625915527],["▁काट",-13.287693977355955],["하자",-13.287701606750488],["▁சிவ",-13.287714004516602],["ံုး",-13.287717819213867],["▁Panda",-13.287721633911133],["▁عم",-13.287748336791992],["๊อ",-13.287753105163574],["naigh",-13.287776947021484],["ался",-13.287787437438965],["ເຮັດ",-13.287799835205078],["▁rayonunda",-13.287819862365724],["lagay",-13.287823677062988],["▁أخ",-13.287829399108888],["kış",-13.28783130645752],["▁постійно",-13.287839889526367],["trica",-13.287840843200684],["மெ",-13.28786563873291],["▁تنو",-13.287877082824709],["机遇",-13.287981033325195],["▁שבה",-13.287982940673828],["▁auki",-13.288030624389648],["▁सेवन",-13.288061141967772],["▁masalalar",-13.28806209564209],["▁koho",-13.288065910339355],["▁সাধারণ",-13.288084983825684],["Arti",-13.28810977935791],["▁personaggi",-13.288115501403809],["▁mban",-13.288167953491213],["कम्",-13.28817653656006],["▁họa",-13.288188934326172],["تجار",-13.28819751739502],["▁Коле",-13.288223266601562],["sust",-13.28822898864746],["氧",-13.28824520111084],["วิธีการ",-13.288260459899902],["▁ليک",-13.28827667236328],["لاق",-13.28827953338623],["шћу",-13.28829288482666],["▁бейне",-13.28830623626709],["uudessa",-13.28831958770752],["ไต",-13.28833293914795],["▁terang",-13.288335800170898],["祝福",-13.288338661193848],["▁voglia",-13.288349151611328],["细节",-13.288359642028809],["扮演",-13.288371086120604],["▁Aq",-13.288372039794922],["囊",-13.288372039794922],["▁displej",-13.288374900817873],["سىن",-13.288385391235352],["玲",-13.288387298583984],["メンバー",-13.2883939743042],["สมบูรณ์",-13.28840160369873],["▁tecnologie",-13.288402557373049],["▁σχεδόν",-13.288402557373049],["▁руководство",-13.288402557373049],["โต๊ะ",-13.28840446472168],["▁unterstützt",-13.288407325744627],["▁ਤੌਰ",-13.288407325744627],["▁Tijdens",-13.288408279418944],["▁Međutim",-13.288410186767578],["▁priložnost",-13.288411140441896],["▁חינם",-13.288411140441896],["▁النفط",-13.288411140441896],["▁ගුරු",-13.288412094116213],["▁KATIKA",-13.288414001464844],["▁chiếm",-13.288414001464844],["เส้นทาง",-13.28841781616211],["▁senda",-13.288423538208008],["▁Begitu",-13.288427352905272],["▁menyukai",-13.288427352905272],["েক",-13.288430213928224],["▁madala",-13.28843116760254],["rbh",-13.288436889648438],["▁भण्डारी",-13.288437843322754],["▁Följ",-13.288492202758787],["▁الجهاز",-13.288492202758787],["▁svetainėje",-13.288500785827637],["▁เอา",-13.288538932800291],["▁Schwarz",-13.288555145263672],["โจ",-13.28858757019043],["▁mørke",-13.28859806060791],["▁matahari",-13.288606643676758],["คับ",-13.28861141204834],["▁սպան",-13.288617134094238],["▁Morten",-13.28862762451172],["▁Déan",-13.288640975952148],["▁jakim",-13.288654327392578],["▁빠",-13.28868579864502],["burð",-13.288688659667969],["とのこと",-13.288731575012209],["isyon",-13.288743019104004],["ต้า",-13.288747787475586],["▁reasons",-13.288758277893066],["ិច",-13.2887601852417],["pyr",-13.288773536682127],["▁واقعہ",-13.288779258728027],["▁құру",-13.288780212402344],["klinik",-13.288785934448242],["▁allar",-13.288785934448242],["▁стек",-13.288819313049316],["▁монет",-13.288830757141112],["▁tweet",-13.28883171081543],["▁сі",-13.288884162902832],["▁Helena",-13.288902282714844],["山东",-13.28890323638916],["▁Свето",-13.288912773132324],["▁Lúc",-13.288914680480955],["▁Salud",-13.288948059082031],["▁dure",-13.288949966430664],["▁Kusini",-13.289013862609863],["▁pwy",-13.289016723632812],["▁อย่า",-13.289051055908203],["algun",-13.289068222045898],["▁дать",-13.289074897766112],["್ಯೂ",-13.289102554321287],["▁공연",-13.289107322692873],["▁tampil",-13.289118766784668],["▁candidatos",-13.289142608642578],["▁ფორმა",-13.289156913757324],["▁ಇವರ",-13.289162635803224],["zino",-13.28918170928955],["09.20",-13.289203643798828],["▁Makamu",-13.289203643798828],["нзи",-13.289215087890623],["aðila",-13.289228439331056],["▁sekta",-13.28923511505127],["が出",-13.289237022399902],["▁propios",-13.289251327514648],["ਛ",-13.289278984069824],["▁বো",-13.289280891418455],["的一种",-13.289281845092772],["▁ambalo",-13.289297103881836],["▁አይነት",-13.289319038391112],["是一種",-13.28932285308838],["▁stāv",-13.28934383392334],["klat",-13.289372444152832],["دھی",-13.289390563964844],["၁၇",-13.28940486907959],["прияти",-13.289406776428224],["▁لیں",-13.289426803588867],["▁мање",-13.289434432983398],["▁ampliar",-13.289437294006348],["▁படை",-13.289457321166992],["▁апел",-13.289499282836914],["yttä",-13.289545059204102],["▁właściw",-13.28954792022705],["eczka",-13.28956699371338],["▁نموده",-13.289636611938477],["▁napoved",-13.28968906402588],["▁لکھا",-13.28972625732422],["▁arasındaki",-13.289770126342772],["킨",-13.28977394104004],["▁muutaman",-13.289780616760254],["▁belles",-13.289806365966797],["▁ცნობილი",-13.289813041687012],["來看",-13.289835929870604],["萌",-13.289837837219238],["▁inhoud",-13.28984832763672],["▁budi",-13.289860725402832],["▁зориул",-13.289889335632324],["▁aming",-13.28989028930664],["▁realizadas",-13.28989028930664],["▁کنه",-13.289894104003906],["ማው",-13.289904594421388],["▁üzlet",-13.289905548095703],["▁Работа",-13.289907455444336],["▁Obligatorisk",-13.289908409118652],["▁iedzīvotāju",-13.289908409118652],["▁oppdatert",-13.289908409118652],["▁zufrieden",-13.289908409118652],["▁πρόκειται",-13.289908409118652],["▁произошло",-13.289908409118652],["▁ಸಂಪರ್ಕ",-13.289908409118652],["▁гурван",-13.289909362792969],["▁стадион",-13.289909362792969],["▁সংসদ",-13.289909362792969],["▁cưới",-13.28991413116455],["▁פעולה",-13.28991413116455],["▁ትልቅ",-13.289916038513184],["▁навіны",-13.289918899536133],["▁Nielsen",-13.28991985321045],["▁والدین",-13.289923667907717],["יכות",-13.28992748260498],["ត្រី",-13.28993797302246],["永遠",-13.289939880371094],["▁ټکی",-13.289949417114258],["ύγ",-13.289957046508787],["▁ಜೊತೆಗೆ",-13.289959907531738],["▁그림",-13.290016174316406],["entorn",-13.29002857208252],["すでに",-13.29002857208252],["▁KW",-13.290051460266112],["更高",-13.290067672729492],["▁parmi",-13.290081977844238],["▁Corp",-13.290087699890137],["▁esimene",-13.29010009765625],["▁otti",-13.290109634399414],["▁especifica",-13.290120124816896],["▁везе",-13.290127754211426],["стане",-13.290128707885742],["▁පක්ෂ",-13.290139198303224],["西安",-13.290173530578612],["▁Aina",-13.29017448425293],["ంచి",-13.290178298950195],["▁Турция",-13.29018783569336],["▁chcą",-13.29019260406494],["ufa",-13.29019546508789],["▁acelasi",-13.290199279785156],["▁راحت",-13.290207862854004],["תינוק",-13.290234565734863],["▁relevan",-13.290242195129396],["▁Aar",-13.290255546569824],["もいい",-13.290271759033203],["▁Велика",-13.290282249450684],["ící",-13.290301322937012],["მით",-13.290301322937012],["▁komite",-13.290306091308594],["liyor",-13.290318489074709],["▁Трет",-13.2903413772583],["ុល",-13.29034423828125],["▁Domov",-13.290356636047363],["接到",-13.290358543395996],["kylä",-13.290377616882324],["▁felly",-13.290390014648438],["wada",-13.290443420410156],["▁Apps",-13.290474891662598],["organizzazione",-13.29047679901123],["▁දුන්",-13.290517807006836],["obla",-13.290521621704102],["llab",-13.29052734375],["شيد",-13.290528297424316],["▁Exam",-13.290535926818848],["▁König",-13.290555953979492],["yön",-13.290563583374023],["DON",-13.290570259094238],["供應",-13.290574073791504],["▁مىڭ",-13.290582656860352],["izao",-13.290605545043944],["▁partners",-13.29061508178711],["အရေး",-13.290620803833008],["дневна",-13.29063606262207],["6.000",-13.290637969970703],["?!?",-13.290647506713867],["ფო",-13.290672302246094],["LAB",-13.290714263916016],["ลล์",-13.290714263916016],["ψει",-13.290719985961914],["▁passou",-13.29075527191162],["▁säng",-13.290782928466797],["▁fully",-13.290815353393556],["ธา",-13.29081916809082],["▁էի",-13.290848731994627],["αίου",-13.290881156921388],["いれば",-13.290891647338867],["▁utstyr",-13.29089641571045],["tyksen",-13.290924072265623],["▁bîr",-13.290924072265623],["▁Цей",-13.290937423706056],["▁kasoo",-13.290952682495115],["▁Алар",-13.29095458984375],["кани",-13.290961265563965],["ésre",-13.290969848632812],["▁ΑΠΟ",-13.29099464416504],["▁Woo",-13.290996551513672],["▁нож",-13.291010856628418],["▁licita",-13.29101276397705],["larna",-13.291013717651367],["▁ይሄ",-13.291021347045898],["▁Чы",-13.291043281555176],["▁причине",-13.29105281829834],["▁خارجه",-13.29108715057373],["lotte",-13.291123390197754],["▁TAL",-13.291126251220703],["իդ",-13.291136741638184],["คุณจะ",-13.291145324707031],["▁betekent",-13.291162490844728],["▁rekla",-13.291168212890623],["ոդ",-13.291175842285156],["utum",-13.291186332702637],["▁לקוחות",-13.291204452514648],["▁கவன",-13.291226387023926],["▁Ой",-13.291234016418455],["▁Англи",-13.291254043579102],["城市的",-13.29128646850586],["▁برگ",-13.291287422180176],["▁Skole",-13.291295051574709],["euses",-13.2913236618042],["ที่น่าสนใจ",-13.29133129119873],["▁tales",-13.291332244873049],["валіся",-13.291340827941896],["疏",-13.29135513305664],["▁Jr",-13.291363716125488],["italia",-13.291377067565918],["否则",-13.291386604309082],["坑",-13.291396141052246],["łożyć",-13.291404724121094],["▁Ergo",-13.291406631469728],["▁حڪم",-13.291407585144045],["ぐらい",-13.291415214538574],["Andriamanitra",-13.291417121887209],["▁penjelasan",-13.291417121887209],["▁رعایت",-13.291417121887209],["▁چارواکو",-13.291417121887209],["▁چوہدری",-13.291417121887209],["▁पहिल्या",-13.291417121887209],["▁Электрон",-13.291418075561523],["▁бірнеше",-13.291418075561523],["▁ಫೋಟೋ",-13.291418075561523],["▁genutzt",-13.291419982910156],["▁ખુબ",-13.291419982910156],["▁lửa",-13.291420936584473],["maken",-13.291424751281738],["tidak",-13.291431427001951],["▁kook",-13.2914400100708],["pozor",-13.291464805603027],["▁šalių",-13.291464805603027],["▁ናቸው፡፡",-13.291464805603027],["▁terkena",-13.29146957397461],["▁seguenti",-13.291481971740724],["▁MG",-13.291484832763672],["thala",-13.291491508483888],["▁سف",-13.29150390625],["▁negatif",-13.29150676727295],["▁симбол",-13.29150676727295],["verzekering",-13.291508674621582],["▁niềm",-13.291508674621582],["▁tehnyt",-13.291521072387695],["kleri",-13.291523933410645],["▁אולם",-13.291526794433594],["▁njimi",-13.291531562805176],["▁ರಂದು",-13.291536331176758],["▁уурхайн",-13.29155158996582],["폭",-13.29155445098877],["▁градови",-13.291568756103516],["▁fieri",-13.291587829589844],["κρά",-13.291593551635742],["поў",-13.291597366333008],["חוץ",-13.291608810424805],["▁Menga",-13.2916259765625],["ोज",-13.291638374328612],["▁Mey",-13.29164218902588],["▁осе",-13.291658401489258],["22)",-13.291672706604004],["▁segnala",-13.291672706604004],["တယ်လို့",-13.29167652130127],["ketju",-13.291680335998535],["tamista",-13.291687965393066],["▁mame",-13.291698455810549],["▁gitu",-13.29172134399414],["умен",-13.291736602783203],["▁Leonardo",-13.291742324829102],["cën",-13.291744232177734],["guar",-13.291748046875],["▁مدرسه",-13.291815757751465],["▁ән",-13.291831016540527],["▁Agenda",-13.291833877563477],["▁Bolo",-13.291833877563477],["▁Mies",-13.291866302490234],["viennent",-13.291884422302246],["▁статия",-13.29189682006836],["▁Nutri",-13.291906356811523],["▁şirkəti",-13.291925430297852],["▁innym",-13.29194164276123],["škem",-13.291949272155762],["话题",-13.291963577270508],["▁keret",-13.29197597503662],["▁κε",-13.29200553894043],["तै",-13.292020797729492],["▁Hui",-13.292048454284668],["▁снимка",-13.292113304138184],["▁yazılı",-13.2921142578125],["▁Mendi",-13.292116165161133],["▁يېزى",-13.29213809967041],["▁0.0",-13.292142868041992],["แปลก",-13.292149543762209],["▁paki",-13.292155265808104],["▁hayal",-13.292160987854004],["osabb",-13.292165756225586],["▁Kör",-13.292186737060549],["imizi",-13.29218864440918],["▁píše",-13.292192459106444],["ήσω",-13.29220485687256],["▁participan",-13.29222297668457],["ေလးေတြ",-13.292236328125],["▁Sharma",-13.292244911193848],["▁establece",-13.292248725891112],["കർ",-13.292250633239746],["▁akut",-13.292298316955566],["เรื่องราว",-13.292302131652832],["▁viņiem",-13.292319297790527],["izzata",-13.29232692718506],["▁kullanım",-13.292329788208008],["strom",-13.292332649230955],["▁процеса",-13.292332649230955],["之路",-13.292390823364258],["fore",-13.292396545410156],["മേഖല",-13.292397499084473],["▁Viol",-13.292399406433104],["▁мера",-13.292418479919434],["urban",-13.292426109313965],["▁նախարարության",-13.29245376586914],["▁rileva",-13.292455673217772],["шні",-13.292463302612305],["മായും",-13.29246425628662],["660",-13.292466163635254],["▁კოლ",-13.292471885681152],["upu",-13.29248046875],["ધુ",-13.292531967163086],["knek",-13.292537689208984],["անուն",-13.292550086975098],["▁Monster",-13.292561531066896],["▁focal",-13.292566299438477],["အသံ",-13.292572021484377],["▁Advent",-13.292572975158691],["▁beleza",-13.292587280273438],["▁colpi",-13.29259204864502],["▁Ձ",-13.292609214782717],["▁terapeut",-13.29265022277832],["只需",-13.29265308380127],["▁بەر",-13.292656898498535],["▁33.",-13.292659759521484],["▁Never",-13.29266357421875],["▁זר",-13.292665481567385],["手法",-13.292695045471191],["▁ääre",-13.292695999145508],["▁ويل",-13.292717933654783],["▁Bodi",-13.292733192443848],["▁Views",-13.292741775512695],["still",-13.29276180267334],["▁tvo",-13.292765617370604],["▁демонстра",-13.292766571044922],["иясы",-13.292768478393556],["▁нарича",-13.292803764343262],["▁السن",-13.292834281921388],["લાલ",-13.292844772338867],["▁اوپر",-13.292859077453612],["▁ruke",-13.29287052154541],["Fer",-13.292887687683104],["腾",-13.292895317077637],["涂",-13.292914390563965],["தல்",-13.29291820526123],["▁vizita",-13.292922019958496],["▁žmo",-13.292924880981444],["ถุง",-13.292925834655762],["ŕ",-13.292926788330078],["កំពូល",-13.292926788330078],["▁Yüksek",-13.292926788330078],["ေစ်း",-13.292927742004396],["▁indonesia",-13.292927742004396],["▁insgesamt",-13.292927742004396],["▁ترمنځ",-13.292927742004396],["▁مسئلہ",-13.292927742004396],["▁ऊर्जा",-13.292927742004396],["▁සල්ලි",-13.292928695678713],["▁સુંદર",-13.292930603027344],["▁lichaam",-13.292932510375977],["▁herêmê",-13.292933464050291],["▁chậm",-13.292940139770508],["▁veciños",-13.292943000793455],["▁참석",-13.292943000793455],["ęć",-13.292949676513672],["▁ansambl",-13.292949676513672],["▁Profesor",-13.292960166931152],["ેલી",-13.29296588897705],["▁drzwi",-13.292975425720217],["bán",-13.292976379394531],["不容易",-13.292978286743164],["▁valodā",-13.292983055114746],["▁वस्तु",-13.292997360229492],["▁Дэлхийн",-13.293002128601074],["んだけど",-13.293021202087402],["▁Birinci",-13.293023109436035],["00€",-13.293025016784668],["▁whatever",-13.2930269241333],["▁heyecan",-13.29302978515625],["époque",-13.293038368225098],["ታል፡፡",-13.293039321899414],["այլ",-13.293042182922363],["ন্না",-13.29304313659668],["راحة",-13.293070793151855],["▁напада",-13.29307460784912],["шлі",-13.293088912963867],["▁पाटील",-13.293095588684082],["▁എത്തിയ",-13.293095588684082],["▁baan",-13.293118476867676],["▁Perfekt",-13.293126106262209],["ស្តី",-13.293133735656738],["▁बेच",-13.293137550354004],["▁Яго",-13.29314136505127],["نغ",-13.29315948486328],["▁السم",-13.29318904876709],["▁klarer",-13.293193817138672],["▁επανα",-13.293210983276367],["▁រថយន្ត",-13.293218612670898],["משמע",-13.293221473693848],["▁Què",-13.29322624206543],["ത്തിലേക്ക്",-13.29324436187744],["▁testen",-13.293258666992188],["▁lago",-13.293261528015137],["▁Spiele",-13.293262481689451],["▁Дей",-13.293272018432615],["▁kion",-13.293313026428224],["▁distr",-13.293316841125488],["▁Clique",-13.293327331542969],["我会",-13.29333209991455],["▁жыць",-13.293339729309082],["▁Vincent",-13.293347358703612],["식품",-13.29343318939209],["ዐ",-13.29347324371338],["ବର୍ଷ",-13.293478965759276],["▁кров",-13.293486595153809],["▁173",-13.29349422454834],["bind",-13.293496131896973],["▁عهد",-13.293499946594238],["ήσετε",-13.293508529663086],["▁عذاب",-13.293530464172363],["▁органдары",-13.293535232543944],["ుని",-13.293545722961426],["▁широк",-13.29354763031006],["խմբագրել",-13.293550491333008],["▁წესი",-13.2935791015625],["▁porter",-13.29358196258545],["▁brud",-13.293588638305664],["ורד",-13.293618202209473],["cted",-13.293628692626951],["▁souhait",-13.29362964630127],["უტ",-13.293635368347168],["▁katu",-13.293638229370115],["▁მონა",-13.293681144714355],["▁skončil",-13.293731689453123],["עובד",-13.293733596801758],["▁Лет",-13.293739318847656],["▁Александра",-13.293745040893556],["కారం",-13.293747901916504],["▁Ավ",-13.293766021728516],["setting",-13.293784141540527],["▁fertig",-13.293785095214844],["▁rodzic",-13.293785095214844],["▁kunto",-13.293828964233398],["▁النار",-13.29386043548584],["ssem",-13.293864250183104],["ધિ",-13.293868064880373],["▁Tallinnas",-13.293868064880373],["▁niciun",-13.293904304504396],["▁фудбал",-13.29392433166504],["▁сабе",-13.293927192687988],["▁fhe",-13.293936729431152],["要有",-13.293937683105469],["▁melding",-13.29395580291748],["수를",-13.29395580291748],["▁Deutsch",-13.293974876403809],["▁musiqi",-13.293981552124023],["▁ecco",-13.293986320495604],["40%",-13.29401683807373],["▁CEN",-13.294034957885742],["ívá",-13.29404067993164],["UTU",-13.29405117034912],["▁Шинэ",-13.29405403137207],["▁целе",-13.294061660766602],["见到",-13.294078826904297],["보세요",-13.294089317321776],["▁Polen",-13.294096946716309],["வித",-13.2941255569458],["▁Blond",-13.294139862060549],["kreis",-13.294145584106444],["▁бағдарлама",-13.294178009033203],["▁cielo",-13.29419994354248],["▁දවස",-13.294208526611328],["▁natus",-13.294251441955566],["▁ĝia",-13.294260025024414],["३२",-13.294270515441896],["trekk",-13.29428768157959],["кладання",-13.29428768157959],["ndamise",-13.294296264648438],["▁zengin",-13.29432487487793],["▁график",-13.294333457946776],["▁nümayəndə",-13.294352531433104],["INN",-13.294379234313965],["憂",-13.294405937194824],["▁özellikleri",-13.294426918029783],["▁तस्वीर",-13.294428825378418],["広告",-13.294435501098633],["កីឡា",-13.294438362121582],["▁сілтеме",-13.294439315795898],["▁Sarajevu",-13.294440269470217],["▁Stuttgart",-13.294440269470217],["▁Νοεμβρίου",-13.294440269470217],["▁বৃহস্পতিবার",-13.294440269470217],["▁ସୃଷ୍ଟି",-13.294440269470217],["▁స్మార్ట్",-13.294440269470217],["▁ለማድረግ",-13.294440269470217],["ဆောင်ရွက်",-13.294441223144531],["▁këmbë",-13.294442176818848],["▁Nhiều",-13.29444408416748],["▁कॉलेज",-13.29444408416748],["วิทยาศาสตร์",-13.294445037841797],["▁بیمارستان",-13.294448852539062],["▁naršy",-13.294450759887695],["▁लहान",-13.294452667236328],["▁ఇలాంటి",-13.29445457458496],["годишен",-13.29446029663086],["▁musel",-13.29446506500244],["▁ႏိုင္ငံ",-13.294466018676758],["▁항상",-13.294471740722656],["▁Gwasanaethau",-13.29448413848877],["▁přístup",-13.294488906860352],["▁Две",-13.29449462890625],["vette",-13.294498443603516],["一場",-13.294522285461426],["▁messages",-13.29452896118164],["▁ჟ",-13.294532775878906],["▁Records",-13.294561386108398],["▁Electric",-13.29457950592041],["▁Sne",-13.294584274291992],["▁кезең",-13.294588088989258],["▁клиента",-13.294599533081056],["cred",-13.294601440429688],["日报",-13.2946138381958],["▁bygget",-13.294647216796877],["▁รวมถึง",-13.294682502746582],["▁Dagbladet",-13.29470157623291],["▁Эми",-13.294703483581545],["▁Szám",-13.294750213623049],["knut",-13.29477596282959],["tsar",-13.294779777526855],["צער",-13.294782638549805],["▁absolute",-13.29478359222412],["ŵ",-13.294790267944336],["ட்டில்",-13.294804573059082],["▁تری",-13.294805526733398],["▁مهدی",-13.294816970825195],["yttää",-13.294821739196776],["▁cahaya",-13.294844627380373],["▁dậy",-13.294855117797852],["天堂",-13.29486083984375],["لىدى",-13.2948637008667],["▁ఆలోచన",-13.2948637008667],["▁шляхом",-13.294866561889648],["▁Zbog",-13.294867515563965],["TIL",-13.29487419128418],["强烈",-13.294877052307127],["ің",-13.29490566253662],["▁propagand",-13.29493236541748],["▁తాను",-13.29493522644043],["▁않았다",-13.294943809509276],["ctio",-13.29495620727539],["▁Rydym",-13.294958114624023],["▁istilah",-13.294987678527832],["bilità",-13.294998168945312],["läge",-13.29500675201416],["▁សិទ្ធិ",-13.295084953308104],["ddwch",-13.295089721679688],["کشن",-13.295093536376951],["ပိုင်",-13.295116424560549],["▁адил",-13.295116424560549],["ճար",-13.295119285583496],["▁چين",-13.295120239257812],["▁podrás",-13.295132637023926],["▁Ministerul",-13.295146942138672],["▁ባይ",-13.295151710510254],["ീകരിച്ച",-13.29515266418457],["▁നവ",-13.295154571533203],["када",-13.295180320739746],["daug",-13.295190811157228],["มาแล้ว",-13.295210838317873],["bull",-13.295225143432615],["▁cais",-13.295228958129885],["კარ",-13.295244216918944],["▁هوای",-13.295245170593262],["▁والص",-13.295257568359377],["▁vetur",-13.295265197753906],["▁анда",-13.29527473449707],["有利",-13.295275688171388],["▁excess",-13.29528522491455],["▁ור",-13.295330047607422],["▁ड्र",-13.295336723327637],["▁avo",-13.295345306396484],["تمام",-13.295355796813965],["▁faites",-13.295357704162598],["▁Mentre",-13.295379638671877],["▁Zv",-13.295391082763672],["▁Kanun",-13.29541301727295],["tický",-13.295432090759276],["بخش",-13.29544448852539],["▁regla",-13.295449256896973],["▁politički",-13.29545783996582],["ציל",-13.295461654663086],["▁leitura",-13.295485496520996],["▁срока",-13.295515060424805],["ALU",-13.295517921447754],["▁झु",-13.295530319213867],["atau",-13.295562744140623],["гөө",-13.295568466186523],["▁ጨ",-13.295576095581056],["ټر",-13.295595169067385],["▁pieder",-13.295613288879396],["▁द्वि",-13.295619010925291],["nicze",-13.295625686645508],["ວົງ",-13.295645713806152],["かない",-13.295645713806152],["betul",-13.295665740966797],["▁sectores",-13.295671463012695],["▁σύ",-13.295680046081545],["▁televisión",-13.295695304870604],["وڑ",-13.295716285705566],["▁(32)",-13.295724868774414],["▁Любо",-13.295732498168944],["▁съдържание",-13.295759201049805],["▁ေမာင္",-13.29576015472412],["▁generar",-13.295763969421388],["んだよ",-13.295769691467283],["▁ඔව්",-13.295780181884766],["▁BM",-13.295809745788574],["▁опште",-13.295815467834473],["qî",-13.295846939086914],["▁ຈ",-13.29584789276123],["是中国",-13.29587173461914],["겠다",-13.29588222503662],["▁участка",-13.295890808105469],["твърд",-13.295900344848633],["▁Proses",-13.295917510986328],["솔",-13.295921325683594],["女兒",-13.295923233032228],["并非",-13.295924186706545],["閣",-13.29592514038086],["▁тэрыторыі",-13.295955657958984],["▁त्यस्तै",-13.295955657958984],["▁গল্প",-13.295955657958984],["▁ਪਰਿਵਾਰ",-13.295955657958984],["eynta",-13.295957565307615],["▁نظیر",-13.295958518981934],["▁결혼",-13.295960426330566],["▁spotkanie",-13.295963287353516],["▁қалыптастыру",-13.295964241027832],["▁tashqari",-13.295965194702148],["▁Glad",-13.295971870422363],["▁കാണാം",-13.295989990234377],["▁انھوں",-13.295992851257324],["▁ניהול",-13.295995712280272],["тимуть",-13.295998573303224],["▁Display",-13.296012878417969],["▁Bahan",-13.296016693115234],["▁płyn",-13.296034812927246],["▁областта",-13.296053886413574],["ుకోవడం",-13.296060562133787],["▁πάρει",-13.296080589294434],["ልና",-13.296120643615724],["▁Anfrage",-13.296127319335938],["▁учетом",-13.296131134033203],["деген",-13.296137809753418],["фини",-13.296138763427734],["▁бидний",-13.296162605285645],["▁техники",-13.296164512634276],["▁tường",-13.296170234680176],["▁அமைச்சர்",-13.29617691040039],["▁аген",-13.296187400817873],["▁Jocuri",-13.29621124267578],["▁Ronald",-13.296236038208008],["▁bakım",-13.296239852905272],["▁opinie",-13.296244621276855],["quat",-13.296268463134766],["сул",-13.29628086090088],["▁Nazir",-13.29629135131836],["ittely",-13.29630184173584],["ЛЯ",-13.296310424804688],["▁davom",-13.296314239501951],["▁milyard",-13.296320915222168],["▁Dzie",-13.2963228225708],["တည်",-13.296337127685549],["▁बांध",-13.296337127685549],["▁пароль",-13.29635524749756],["▁گذار",-13.29636573791504],["▁एड",-13.296371459960938],["▁තිබුන",-13.296372413635254],["זמן",-13.296375274658203],["▁línia",-13.29640769958496],["uncu",-13.296408653259276],["▁diversa",-13.296419143676758],["াদ",-13.296430587768556],["축제",-13.296475410461426],["▁imaxe",-13.29647731781006],["▁selber",-13.296479225158691],["žívá",-13.296493530273438],["▁ေန႔စဥ္",-13.296502113342283],["▁проник",-13.296507835388184],["עביר",-13.296510696411133],["▁կազմում",-13.29651927947998],["▁ไม่ต้อง",-13.296552658081056],["▁Ruhe",-13.29655647277832],["てる",-13.296561241149902],["もあり",-13.296565055847168],["▁începe",-13.29656982421875],["▁modest",-13.296587944030762],["▁começou",-13.296597480773926],["ğında",-13.296615600585938],["మై",-13.296615600585938],["▁Злат",-13.296615600585938],["ກ່າວ",-13.296631813049316],["▁કાર",-13.29665470123291],["zita",-13.296664237976074],["▁2,2",-13.296666145324709],["▁sección",-13.296710968017578],["ciendo",-13.296720504760742],["▁dejt",-13.296733856201172],["▁pese",-13.29673671722412],["ዮን",-13.296762466430664],["▁grå",-13.296765327453612],["▁meteen",-13.29676628112793],["▁ouvert",-13.296767234802246],["၀င္း",-13.296785354614258],["▁Gori",-13.296785354614258],["▁rigtige",-13.296791076660156],["▁vaikai",-13.296792984008787],["▁มหาวิทยาลัย",-13.296799659729004],["▁vaske",-13.296805381774902],["οτ",-13.296823501586914],["▁Laba",-13.296833038330078],["▁سنگین",-13.29685115814209],["▁მაღალი",-13.296855926513672],["▁Kiri",-13.296862602233888],["▁Engine",-13.296880722045898],["medlem",-13.296907424926758],["▁ፀ",-13.296908378601074],["商務",-13.296923637390137],["▁Жыл",-13.296935081481934],["രായ",-13.29694366455078],["gı",-13.296954154968262],["包装",-13.296977043151855],["▁Tina",-13.296979904174805],["arth",-13.296991348266602],["▁нити",-13.29699420928955],["▁بالإ",-13.2969970703125],["οβ",-13.297048568725586],["▁pojavi",-13.297053337097168],["▁මැර",-13.297056198120115],["▁dideli",-13.297088623046877],["event",-13.29709529876709],["▁ገብ",-13.297159194946287],["staande",-13.297164916992188],["▁ಮುಖ",-13.297207832336426],["▁tegele",-13.29720973968506],["吃的",-13.29721736907959],["▁FRA",-13.29725456237793],["▁фарма",-13.297264099121094],["קישור",-13.297306060791016],["▁келиш",-13.297323226928713],["▁halla",-13.29735279083252],["्ट",-13.297356605529783],["▁հայտարարել",-13.297365188598633],["▁خالی",-13.29738426208496],["▁posibilidades",-13.297390937805176],["▁Poza",-13.297407150268556],["编辑",-13.297430038452148],["ಕರು",-13.297443389892578],["ႊ",-13.297462463378906],["▁wszelkie",-13.29747200012207],["ᄒᄒ",-13.297472953796388],["▁Kikwete",-13.297472953796388],["▁liebe",-13.297472953796388],["▁služeb",-13.297472953796388],["▁багатьох",-13.297472953796388],["▁पंजाब",-13.297472953796388],["▁नौकरी",-13.29747486114502],["▁Ийм",-13.297478675842283],["▁באזור",-13.297484397888184],["іўся",-13.2974853515625],["▁trotzdem",-13.297487258911133],["▁Slovenskej",-13.297490119934082],["2]",-13.297491073608398],["▁ślub",-13.297491073608398],["▁زړه",-13.297492027282717],["▁Poder",-13.297493934631348],["үлгөн",-13.297494888305664],["▁დამატებითი",-13.297497749328612],["▁keeles",-13.297505378723145],["▁végül",-13.297508239746094],["▁automobilių",-13.297513961791992],["▁басейн",-13.29751682281494],["▁Hace",-13.297526359558104],["▁Khách",-13.2975435256958],["ສິດ",-13.297545433044434],["▁élèves",-13.297551155090332],["▁mengubah",-13.297555923461914],["حاسب",-13.297564506530762],["▁tavoitt",-13.297564506530762],["▁soco",-13.297567367553713],["▁ഭരണ",-13.297569274902344],["▁आधी",-13.297577857971191],["skipulag",-13.297588348388672],["▁выполнен",-13.297607421875],["▁stål",-13.297650337219238],["gaba",-13.297656059265137],["repet",-13.297667503356934],["žnost",-13.2976713180542],["いただいて",-13.297673225402832],["▁सकिने",-13.297677040100098],["寻",-13.297677040100098],["▁Palo",-13.297684669494627],["▁Waren",-13.297723770141602],["▁ناس",-13.297728538513184],["▁Luther",-13.29773998260498],["ORO",-13.297755241394045],["▁lượt",-13.297759056091309],["▁яд",-13.297759056091309],["▁முடியும்",-13.297759056091309],["▁అతి",-13.297760009765623],["▁określ",-13.29777717590332],["ЭГ",-13.297794342041016],["▁xira",-13.297811508178713],["YD",-13.297813415527344],["▁české",-13.297813415527344],["▁исполни",-13.297821998596191],["▁азыр",-13.297831535339355],["გვა",-13.297834396362305],["approche",-13.297869682312012],["▁οργαν",-13.297879219055176],["▁justru",-13.297880172729492],["▁बनेको",-13.297900199890137],["▁retire",-13.297913551330566],["▁aklı",-13.297928810119627],["▁дро",-13.297938346862791],["▁жүрген",-13.297968864440918],["θώ",-13.298002243041992],["шката",-13.298004150390623],["▁погиб",-13.29801082611084],["పోయిన",-13.298032760620115],["▁ekonomiko",-13.29804229736328],["поло",-13.298052787780762],["▁tantas",-13.298065185546877],["なって",-13.298073768615724],["▁ceno",-13.298077583312988],["▁stanju",-13.298087120056152],["▁кожна",-13.298099517822266],["ÇÃO",-13.298105239868164],["▁teadus",-13.29810619354248],["▁fremtid",-13.298129081726074],["szego",-13.298151016235352],["जित",-13.298168182373049],["▁कक्षा",-13.298192024230955],["▁resultaat",-13.29822063446045],["▁hvit",-13.298221588134766],["продаж",-13.298233032226562],["ವಿಲ್ಲ",-13.298236846923828],["▁pohár",-13.298242568969728],["兒子",-13.298245429992676],["ျဖတ္",-13.2982759475708],["σπα",-13.29830551147461],["kologi",-13.29832649230957],["rygg",-13.298336029052734],["▁strength",-13.298344612121582],["▁ruchu",-13.298349380493164],["▁zouden",-13.29837131500244],["ိုး",-13.2983980178833],["stamine",-13.298416137695312],["TAI",-13.298421859741213],["▁didelė",-13.298437118530272],["łó",-13.298447608947754],["▁normalmente",-13.29847526550293],["▁Фер",-13.298481941223145],["▁valko",-13.29848575592041],["åring",-13.298500061035156],["▁Orden",-13.298513412475586],["▁szerző",-13.298523902893066],["▁የበ",-13.298523902893066],["▁maisto",-13.298532485961914],["▁elin",-13.29853343963623],["▁δημιουργία",-13.298543930053713],["jaid",-13.298554420471191],["▁Benz",-13.29858684539795],["തല",-13.298635482788086],["سعى",-13.298672676086426],["യല്ല",-13.298672676086426],["▁kellene",-13.298672676086426],["India",-13.29868507385254],["ว่าเป็น",-13.29868507385254],["լու",-13.298686981201172],["▁adapté",-13.298694610595703],["▁gund",-13.298694610595703],["▁asema",-13.298698425292969],["▁ברו",-13.29871654510498],["▁ОП",-13.298751831054688],["لها",-13.298754692077637],["rein",-13.298776626586914],["შავ",-13.298776626586914],["▁стои",-13.298794746398926],["▁ОР",-13.29880142211914],["抜",-13.298809051513672],["▁illis",-13.29882526397705],["▁മരിച്ച",-13.298836708068848],["▁آفر",-13.298852920532228],["!(",-13.298861503601074],["▁הנה",-13.298905372619627],["▁انکار",-13.298916816711426],["敲",-13.298922538757324],["bulu",-13.298934936523438],["纷纷",-13.298935890197754],["祥",-13.298951148986816],["oloogia",-13.298954963684082],["เติม",-13.298979759216309],["ვების",-13.298983573913574],["指标",-13.298983573913574],["materi",-13.298991203308104],["▁prostredníctvom",-13.298992156982422],["▁τουλάχιστον",-13.298992156982422],["▁χρειάζεται",-13.298992156982422],["▁මිනිසුන්",-13.298992156982422],["▁ሚሊዮን",-13.298992156982422],["▁υπηρεσίες",-13.298993110656738],["▁հաճախ",-13.298993110656738],["▁Sfânt",-13.298994064331056],["▁bambino",-13.298995018005373],["▁گهڻو",-13.298995018005373],["▁Riigikogu",-13.298995971679688],["▁försöka",-13.298995971679688],["ไฮ",-13.298999786376951],["▁مشابه",-13.29900074005127],["▁poprzedni",-13.29900360107422],["▁સર્વ",-13.299012184143066],["ប្រជាជន",-13.299020767211914],["▁ඉල්ලා",-13.299025535583496],["▁istiqamətində",-13.299026489257812],["къл",-13.29904079437256],["ព្យ",-13.299042701721191],["пісаў",-13.299043655395508],["▁hve",-13.299054145812988],["▁уулзалт",-13.29905891418457],["▁Στι",-13.299072265625],["▁dingin",-13.299083709716797],["▁zdravlje",-13.29908561706543],["▁مقبول",-13.299092292785645],["▁svæðinu",-13.29909324645996],["▁nakna",-13.299101829528809],["ච්ච",-13.299131393432615],["▁이날",-13.29916000366211],["▁Direktor",-13.299168586730955],["mæli",-13.299176216125488],["▁velikosti",-13.299184799194336],["세대",-13.299221992492676],["▁одобрен",-13.29923152923584],["дос",-13.299232482910156],["RING",-13.299243927001951],["kkeet",-13.299270629882812],["ਆਈ",-13.299274444580078],["▁hà",-13.29928970336914],["▁Bentuk",-13.299298286437988],["ЫМ",-13.29931640625],["cuba",-13.299321174621582],["▁есеп",-13.299334526062012],["▁dapatkan",-13.299341201782228],["▁affida",-13.29936981201172],["▁xaalad",-13.299372673034668],["▁branche",-13.299382209777832],["▁multimedia",-13.299398422241213],["▁সো",-13.299402236938477],["ensä",-13.29944133758545],["වින්",-13.299443244934082],["zeka",-13.299452781677246],["tično",-13.299453735351562],["▁यति",-13.299466133117676],["▁engagement",-13.299468994140623],["▁taste",-13.299473762512209],["▁Wochenende",-13.299483299255373],["ስና",-13.299485206604004],["▁skape",-13.299493789672852],["очек",-13.299514770507812],["▁సినిమాలు",-13.299518585205078],["▁Zoti",-13.299530029296877],["▁เนื้อ",-13.299535751342772],["лердин",-13.299537658691406],["▁Ελ",-13.299538612365724],["▁dastur",-13.299554824829102],["ثة",-13.299575805664062],["▁upaya",-13.299590110778809],["了吧",-13.29959487915039],["ilecek",-13.29960823059082],["▁hejm",-13.299612998962402],["▁Arī",-13.2996187210083],["▁ప్ల",-13.299622535705566],["▁كانوا",-13.299623489379885],["▁Över",-13.299642562866213],["▁rdeč",-13.299657821655272],["▁mbështet",-13.29966163635254],["ਰਣ",-13.299677848815918],["▁친",-13.299684524536133],["ecer",-13.29969310760498],["▁čega",-13.299700736999512],["Ger",-13.299705505371094],["ಬೆ",-13.29970932006836],["550",-13.299723625183104],["មើល",-13.29973602294922],["ጀት",-13.299742698669434],["rojnë",-13.29975700378418],["▁meydan",-13.299758911132812],["لہ",-13.29976749420166],["pien",-13.299768447875977],["▁увер",-13.299768447875977],["بانی",-13.299802780151367],["▁проход",-13.299813270568848],["▁پلیس",-13.29983615875244],["▁ಅತಿ",-13.299842834472656],["ਊ",-13.299877166748049],["уванні",-13.299904823303224],["evne",-13.299909591674805],["стю",-13.299922943115234],["▁projectes",-13.299928665161133],["▁Gand",-13.29994010925293],["▁طفل",-13.299967765808104],["lenk",-13.299976348876951],["yki",-13.299991607666016],["也不会",-13.29999828338623],["▁ਹੇ",-13.300007820129396],["▁王",-13.300036430358888],["▁управо",-13.300052642822266],["▁عبادت",-13.300061225891112],["68)",-13.300065994262695],["STEM",-13.300086975097656],["▁صديق",-13.3001127243042],["▁aquel",-13.30011749267578],["▁podia",-13.300122261047363],["fiki",-13.300176620483398],["▁comprova",-13.300182342529297],["▁textu",-13.300195693969728],["▁ေရး",-13.300203323364258],["плату",-13.300206184387209],["▁φύ",-13.300212860107422],["abilidade",-13.300252914428713],["hear",-13.30026149749756],["▁Tym",-13.300264358520508],["ମ୍ଭ",-13.300280570983888],["මම",-13.300284385681152],["ನಿಯ",-13.300296783447266],["▁σκοπό",-13.30034351348877],["▁искат",-13.300347328186035],["nät",-13.300349235534668],["ረብ",-13.300359725952148],["藉由",-13.300359725952148],["shti",-13.300372123718262],["ებიც",-13.300384521484377],["▁composta",-13.30039405822754],["bore",-13.300397872924805],["▁bolt",-13.300418853759766],["၂၀",-13.30042839050293],["▁ambasador",-13.300434112548828],["culis",-13.300455093383787],["翔",-13.300463676452637],["မေ",-13.3004789352417],["ाज्या",-13.300480842590332],["▁Philip",-13.300480842590332],["ਮਨ",-13.30048370361328],["脳",-13.300508499145508],["หมวดหมู่",-13.300514221191406],["▁UNTUK",-13.300514221191406],["▁Директор",-13.300514221191406],["▁следующие",-13.300514221191406],["〈",-13.300514221191406],["ႆ",-13.300515174865724],["▁خواهیم",-13.300515174865724],["▁സംഘടന",-13.30051612854004],["▁претходно",-13.300517082214355],["▁peraturan",-13.30052089691162],["陷入",-13.300524711608888],["▁службу",-13.300529479980469],["▁այստեղ",-13.300530433654783],["▁ادارې",-13.300533294677734],["ຜິດ",-13.300539016723633],["▁гаргасан",-13.30053997039795],["طبيعة",-13.300551414489746],["▁தலைமை",-13.300551414489746],["▁heshiis",-13.300556182861328],["▁narsa",-13.300562858581545],["VET",-13.300566673278809],["rozum",-13.300570487976074],["▁vietoje",-13.30058479309082],["▁долоо",-13.30059051513672],["▁خوراک",-13.300593376159668],["▁Jessica",-13.300599098205566],["▁establecido",-13.300626754760742],["▁eşi",-13.300633430480955],["▁151",-13.300642013549805],["▁ستار",-13.300661087036133],["▁ണ്ണ",-13.300673484802246],["szki",-13.300679206848145],["手工",-13.300691604614258],["lön",-13.30070972442627],["þing",-13.300719261169434],["▁mənzil",-13.300721168518066],["▁ຟ",-13.3007230758667],["▁pute",-13.300732612609863],["▁bekommt",-13.300745964050291],["▁voltam",-13.300756454467772],["▁laimė",-13.300768852233888],["даване",-13.300821304321287],["▁shah",-13.300840377807615],["liğe",-13.30087184906006],["▁խոս",-13.300872802734377],["其次",-13.300888061523438],["تمتع",-13.30090045928955],["▁Sorry",-13.300932884216309],["▁ultimele",-13.30093765258789],["▁Кош",-13.300946235656738],["まい",-13.3009672164917],["懂得",-13.300976753234863],["▁ਢ",-13.300984382629396],["isip",-13.30098819732666],["nięcia",-13.30098819732666],["▁سوار",-13.301000595092772],["▁próf",-13.301043510437012],["▁Official",-13.301051139831545],["▁nombro",-13.30105209350586],["▁Теле",-13.301055908203123],["▁Straße",-13.301061630249023],["▁Makedon",-13.301064491271973],["▁Vijay",-13.301069259643556],["▁відэа",-13.301072120666504],["อาชีพ",-13.30107879638672],["▁взять",-13.301103591918944],["ышкан",-13.301118850708008],["trend",-13.301133155822754],["텐",-13.301136016845703],["▁fett",-13.301172256469728],["▁Basta",-13.30117893218994],["▁ኢትዮጵያውያን",-13.301189422607422],["▁sijaan",-13.301190376281738],["▁ເດືອນ",-13.301196098327637],["▁pomocí",-13.301218032836914],["▁mielestä",-13.301226615905762],["виден",-13.301236152648926],["▁nějaký",-13.301239013671877],["常見",-13.301240921020508],["მბ",-13.301244735717772],["tyl",-13.301246643066406],["בלי",-13.30125904083252],["שרות",-13.301279067993164],["▁Rut",-13.30129337310791],["маа",-13.30130100250244],["▁IBM",-13.30130386352539],["▁planin",-13.301315307617188],["▁Ngô",-13.301339149475098],["laag",-13.301344871520996],["▁библиотека",-13.301376342773438],["möglich",-13.301395416259766],["▁나이",-13.301398277282717],["▁ਮੰ",-13.30141258239746],["ttänyt",-13.301419258117676],["ပါဘူး",-13.301443099975586],["ซะ",-13.301451683044434],["▁نیو",-13.30148220062256],["空氣",-13.301518440246582],["magyar",-13.301532745361328],["▁მთავარ",-13.30154514312744],["eysa",-13.301556587219238],["▁ٿيون",-13.301560401916504],["gaga",-13.30156135559082],["▁ketua",-13.301578521728516],["▁газета",-13.30160903930664],["ājs",-13.301609992980955],["δί",-13.30162525177002],["फ्रे",-13.301630020141602],["RUS",-13.301657676696776],["flokk",-13.301671028137209],["▁Juha",-13.301674842834473],["▁försöker",-13.301691055297852],["名单",-13.3016939163208],["▁நோ",-13.301695823669434],["вшего",-13.301702499389648],["▁سرو",-13.301714897155762],["▁pudo",-13.30174160003662],["▁نادي",-13.301749229431152],["остта",-13.301764488220217],["▁байгааг",-13.301767349243164],["▁problèmes",-13.301774978637695],["하지만",-13.301779747009276],["▁аднаго",-13.301786422729492],["▁рот",-13.30180549621582],["▁труп",-13.301807403564451],["▁검",-13.301830291748049],["▁lundi",-13.301840782165527],["道理",-13.301872253417969],["▁танд",-13.301873207092283],["▁negat",-13.30189323425293],["▁zatvor",-13.301913261413574],["献",-13.301920890808104],["▁поси",-13.30192756652832],["▁ချ",-13.301932334899902],["ūta",-13.301956176757812],["כיוון",-13.301958084106444],["▁uansett",-13.301960945129396],["▁படி",-13.301960945129396],["一口",-13.301980018615724],["▁prani",-13.301998138427734],["kash",-13.302002906799316],["▁mínima",-13.302007675170898],["▁VOL",-13.30202865600586],["▁trondheim",-13.30203914642334],["▁আন্তর্জাতিক",-13.30203914642334],["▁সোমবার",-13.30203914642334],["▁બંને",-13.30203914642334],["▁కాబట్టి",-13.30203914642334],["뜨",-13.30203914642334],["phakathi",-13.302040100097656],["សេរី",-13.302042007446287],["▁piškotkov",-13.302046775817873],["▁సందర్భంగా",-13.302046775817873],["鳳",-13.30205249786377],["▁,‬",-13.302056312561035],["▁Doanh",-13.302058219909668],["▁tatizo",-13.3020601272583],["▁төрлийн",-13.302064895629885],["愉快",-13.302067756652832],["▁Verkauf",-13.302098274230955],["玩具",-13.302117347717283],["สเต",-13.302129745483398],["▁мониторинг",-13.302145957946776],["▁sinds",-13.302159309387209],["▁дневно",-13.30216121673584],["▁კინო",-13.302166938781738],["▁قالت",-13.302170753479004],["çilik",-13.30217170715332],["ుకు",-13.30217456817627],["▁rinna",-13.30217456817627],["മാന്",-13.30217742919922],["▁במשך",-13.302178382873535],["▁Bho",-13.302180290222168],["▁यश",-13.302184104919434],["给他",-13.30218505859375],["▁polecam",-13.302186965942385],["zara",-13.3021879196167],["▁aperto",-13.302188873291016],["▁Zahl",-13.302201271057127],["थि",-13.302248001098633],["▁relazione",-13.302255630493164],["Het",-13.30226707458496],["colare",-13.30226993560791],["▁mature",-13.302275657653809],["▁ਰੂਪ",-13.302278518676758],["才会",-13.302281379699709],["▁tờ",-13.302305221557615],["▁unidade",-13.302311897277832],["▁respondi",-13.30231761932373],["▁192",-13.302326202392578],["খন",-13.302353858947754],["▁окруж",-13.302355766296388],["▁Vuo",-13.302362442016602],["▁auditori",-13.302372932434082],["▁шийд",-13.302380561828612],["yksi",-13.302383422851562],["தற்காக",-13.302412986755373],["电池",-13.302416801452637],["▁שחור",-13.302420616149902],["▁sebenar",-13.3024320602417],["תחום",-13.302437782287598],["verband",-13.30244255065918],["იტა",-13.302444458007812],["тю",-13.302450180053713],["▁znal",-13.302454948425291],["▁избран",-13.302468299865724],["▁волн",-13.302485466003418],["▁ekte",-13.30250644683838],["یږي",-13.302528381347656],["ပစ္",-13.3025484085083],["▁لاړ",-13.302566528320312],["qab",-13.30258083343506],["ଦାନ",-13.30259132385254],["▁خبري",-13.30261516571045],["▁Elite",-13.302632331848145],["▁ostalih",-13.30263614654541],["ttavan",-13.302639961242676],["▁lugha",-13.30264377593994],["▁பை",-13.302647590637209],["▁Kabel",-13.302648544311523],["これを",-13.302699089050291],["▁कोटी",-13.302719116210938],["▁sông",-13.302730560302734],["ెంట్",-13.302799224853516],["лении",-13.302828788757324],["хте",-13.302828788757324],["▁Κυ",-13.302847862243652],["そこ",-13.302871704101562],["dís",-13.302884101867676],["▁spelare",-13.302898406982422],["րք",-13.302922248840332],["ັ້ນ",-13.302938461303713],["sija",-13.302945137023926],["▁Tilmeld",-13.302963256835938],["▁kerst",-13.302964210510254],["排放",-13.302976608276367],["ख्या",-13.302987098693848],["টাই",-13.30299472808838],["원을",-13.302997589111328],["pustit",-13.303006172180176],["▁island",-13.303037643432615],["ගී",-13.30304718017578],["delijk",-13.303054809570312],["▁considered",-13.303057670593262],["▁ម៉",-13.30306911468506],["▁дані",-13.303071975708008],["йни",-13.303107261657717],["որոշ",-13.303108215332031],["▁წყალ",-13.303114891052246],["▁Бела",-13.303131103515623],["▁Christen",-13.30313491821289],["▁sihat",-13.303171157836914],["▁обласн",-13.303174018859863],["▁banal",-13.303184509277344],["▁כח",-13.30321216583252],["pico",-13.303213119506836],["▁Josip",-13.30321979522705],["itvi",-13.30323600769043],["▁špa",-13.303239822387695],["▁المكان",-13.303239822387695],["ratta",-13.303253173828123],["išču",-13.30325984954834],["ичната",-13.303265571594238],["▁kū",-13.30329704284668],["ေနတာ",-13.303308486938477],["▁алыш",-13.30332374572754],["▁ensam",-13.303339958190918],["datum",-13.30334758758545],["▁standar",-13.303359985351562],["▁Καρ",-13.303362846374512],["▁persoonlijke",-13.303394317626951],["▁уақытта",-13.303394317626951],["კლა",-13.303399085998535],["▁rahu",-13.303407669067385],["▁accessori",-13.30341625213623],["ութիւն",-13.303444862365724],["湾",-13.303444862365724],["▁находя",-13.303454399108888],["▁тексту",-13.303470611572266],["▁Сиз",-13.30348300933838],["▁värme",-13.303524017333984],["лева",-13.303547859191896],["▁кордон",-13.303547859191896],["▁módszer",-13.30355453491211],["▁Šta",-13.303564071655272],["▁Cần",-13.303565979003906],["▁madagdagan",-13.303565979003906],["▁առողջ",-13.303565979003906],["▁kodėl",-13.303566932678224],["▁برامج",-13.303566932678224],["▁سڄي",-13.303566932678224],["皆様",-13.303566932678224],["ເວົ້າ",-13.30356788635254],["▁мляко",-13.30356788635254],["▁تبديل",-13.30356788635254],["▁գալիս",-13.303571701049805],["▁ensuite",-13.303579330444336],["▁Ekspert",-13.303583145141602],["физ",-13.303585052490234],["▁العاده",-13.3035888671875],["▁अथवा",-13.303594589233398],["▁időpont",-13.303596496582031],["▁dobbelt",-13.303607940673828],["▁بست",-13.303614616394045],["再加上",-13.303633689880373],["▁දැඩි",-13.303647994995115],["אחר",-13.303651809692385],["▁философ",-13.30365753173828],["▁ساعة",-13.30365753173828],["▁الرحمن",-13.30366325378418],["▁schönen",-13.303667068481444],["ต่ํา",-13.303672790527344],["▁161",-13.30367374420166],["ಕಾಶ",-13.303674697875977],["▁здравствен",-13.303683280944824],["▁სიახლეები",-13.303683280944824],["▁يعتبر",-13.303688049316406],["▁கொடுத்த",-13.303690910339355],["▁результаты",-13.30370044708252],["▁лідер",-13.303701400756836],["▁tayyor",-13.303703308105469],["▁phóng",-13.303709030151367],["▁dhaq",-13.303736686706545],["▁аян",-13.303739547729492],["▁proqramı",-13.30376148223877],["▁ପ୍ରଧାନମନ୍ତ୍ରୀ",-13.30377197265625],["ဖြစ်သည်။",-13.303772926330566],["деля",-13.303780555725098],["いった",-13.303780555725098],["▁tzw",-13.30379867553711],["знаком",-13.303852081298828],["▁ଚି",-13.30385684967041],["▁betalen",-13.303874015808104],["▁Location",-13.303884506225586],["ਬਾਲ",-13.303921699523926],["▁oversikt",-13.303922653198242],["▁finding",-13.303926467895508],["▁cuarto",-13.303935050964355],["▁joista",-13.30397605895996],["▁suhu",-13.30400562286377],["▁alig",-13.304022789001465],["kkeen",-13.304024696350098],["▁Biel",-13.30402946472168],["евой",-13.30405330657959],["▁GL",-13.30406093597412],["▁lipca",-13.304061889648438],["5€",-13.304089546203612],["▁ರಿ",-13.304099082946776],["▁великих",-13.304101943969728],["▁केस",-13.304102897644045],["▁Mash",-13.304107666015623],["▁tendencia",-13.304118156433104],["aichean",-13.304157257080078],["▁ocorre",-13.304157257080078],["▁yanada",-13.304179191589355],["tania",-13.304191589355469],["лиц",-13.304191589355469],["ității",-13.304224967956545],["зик",-13.304234504699709],["▁saygı",-13.304248809814451],["▁нужна",-13.30425262451172],["▁relativi",-13.304272651672363],["únic",-13.304291725158691],["과의",-13.304296493530272],["▁dogaja",-13.304306983947754],["▁forget",-13.304332733154297],["طبق",-13.304339408874512],["ല്ലി",-13.304367065429688],["▁haaste",-13.304388999938965],["▁påverka",-13.304413795471191],["wise",-13.304415702819824],["▁føre",-13.30441665649414],["!!!!!!",-13.304417610168455],["▁පරිදි",-13.304465293884276],["▁вып",-13.304471015930176],["▁শা",-13.304478645324709],["▁sınıf",-13.304479598999023],["нцы",-13.304489135742188],["моло",-13.3045072555542],["▁orthu",-13.30451488494873],["▁teljesítmény",-13.304522514343262],["▁கண்ண",-13.304524421691896],["proba",-13.304543495178224],["ያዝ",-13.304559707641602],["論壇",-13.304571151733398],["▁случува",-13.304580688476562],["čím",-13.304638862609863],["▁Vega",-13.304651260375977],["▁ګوند",-13.304655075073242],["ndatud",-13.30466079711914],["льно",-13.304680824279783],["робіт",-13.304689407348633],["▁istiqamət",-13.30471134185791],["ראָ",-13.304713249206545],["യത്",-13.304728507995604],["ረጋ",-13.304733276367188],["▁සිර",-13.304743766784668],["▁לתת",-13.3047456741333],["▁consumidor",-13.304746627807615],["หมาย",-13.304758071899414],["длин",-13.30477809906006],["▁veikla",-13.304780960083008],["▁chambres",-13.304787635803224],["▁empreses",-13.30479621887207],["▁eet",-13.304819107055664],["ଗି",-13.304834365844728],["▁ശ്രദ്ധ",-13.304849624633787],["বো",-13.304850578308104],["▁Mato",-13.30486011505127],["▁ಟೀ",-13.304869651794434],["hero",-13.30489444732666],["▁poveste",-13.30491542816162],["راي",-13.304924964904783],["rnir",-13.304930686950684],["过的",-13.304949760437012],["▁критери",-13.30495262145996],["▁prueba",-13.304983139038086],["▁заборави",-13.3049955368042],["问题的",-13.304996490478516],["币",-13.305007934570312],["kabel",-13.305020332336426],["優勢",-13.305028915405272],["▁βιβλίο",-13.305048942565918],["XE",-13.305051803588867],["७४",-13.305062294006348],["鏈",-13.305066108703612],["倉",-13.305076599121094],["▁Stok",-13.305078506469728],["▁ට",-13.305079460144045],["กระทรวง",-13.305093765258787],["ෞ",-13.305094718933104],["▁arbennig",-13.305094718933104],["▁myöhemmin",-13.305094718933104],["▁tấm",-13.305094718933104],["▁кількості",-13.305094718933104],["▁мероприятий",-13.305094718933104],["▁مۇھىم",-13.305094718933104],["▁কৰক",-13.305094718933104],["▁ተጨማሪ",-13.305094718933104],["plauk",-13.305099487304688],["▁mưa",-13.305099487304688],["▁מיליון",-13.30510139465332],["▁Україну",-13.305102348327637],["▁положения",-13.305102348327637],["ጀመረ",-13.305106163024902],["▁շենք",-13.305106163024902],["▁उत्तम",-13.305110931396484],["▁సంబంధించిన",-13.305110931396484],["igheten",-13.305112838745115],["▁Аднак",-13.305112838745115],["▁mağaza",-13.305113792419434],["▁कश्मीर",-13.305113792419434],["▁souvenir",-13.3051176071167],["▁worry",-13.305136680603027],["▁ക്കാര",-13.30513858795166],["▁Ќе",-13.305140495300291],["▁Right",-13.305146217346191],["▁својој",-13.305146217346191],["ξουν",-13.305148124694824],["rutan",-13.305152893066406],["▁Toronto",-13.305152893066406],["sammen",-13.30516529083252],["▁Перш",-13.305179595947266],["▁कहीं",-13.305200576782228],["▁mūs",-13.305201530456545],["▁သံ",-13.305206298828123],["▁모습",-13.305225372314451],["▁jonkin",-13.305243492126465],["▁sağlar",-13.305243492126465],["▁మార",-13.305259704589844],["ējais",-13.30526351928711],["▁levels",-13.305267333984377],["เนื้อหา",-13.305268287658691],["▁24/7",-13.305291175842283],["▁koning",-13.305317878723145],["ďa",-13.30531883239746],["▁ماشومانو",-13.305331230163574],["▁문제가",-13.305346488952637],["ketak",-13.305360794067385],["mít",-13.30539894104004],["▁pomočjo",-13.305411338806152],["racia",-13.305421829223633],["ാതിരി",-13.305425643920898],["▁сумму",-13.305426597595217],["▁edifici",-13.305431365966797],["▁komunikasi",-13.305448532104492],["▁tahansa",-13.305462837219238],["મિ",-13.305466651916504],["▁Deal",-13.305472373962402],["▁SB",-13.30548095703125],["▁நிலையில்",-13.305485725402832],["mbangan",-13.305486679077148],["лове",-13.305493354797363],["Van",-13.305554389953612],["▁vjeç",-13.305569648742676],["IDI",-13.30559253692627],["▁piacere",-13.305601119995115],["▁nuori",-13.30565357208252],["▁супруг",-13.305665969848633],["stato",-13.305688858032228],["ਡਾ",-13.30570125579834],["▁lemak",-13.305702209472656],["ຂ້າ",-13.30573272705078],["▁stranici",-13.305753707885742],["▁CNC",-13.305770874023438],["▁passiert",-13.305777549743652],["IZI",-13.305781364440918],["▁Siamo",-13.305781364440918],["▁Unik",-13.3057861328125],["▁unica",-13.30582332611084],["chang",-13.305827140808104],["▁заходи",-13.305829048156738],["-2009",-13.30587387084961],["▁Gard",-13.305878639221191],["▁משחק",-13.305886268615724],["као",-13.305948257446287],["▁4-6",-13.305954933166504],["ğr",-13.305959701538086],["▁rano",-13.305962562561035],["さを",-13.305974960327148],["száz",-13.305978775024414],["▁چهارشنبه",-13.305994033813477],["先進",-13.305994033813477],["▁shr",-13.305996894836426],["▁erfaringer",-13.306001663208008],["(10",-13.30600929260254],["▁Συ",-13.306012153625488],["▁Kyl",-13.30601406097412],["▁americano",-13.306035041809082],["▁втор",-13.30604076385498],["▁campos",-13.306047439575195],["▁sebep",-13.306065559387209],["▁klien",-13.306074142456056],["ഫീ",-13.306076049804688],["ତର",-13.306085586547852],["ించు",-13.306110382080078],["▁IK",-13.306138038635254],["творення",-13.306144714355469],["▁მსოფლიოს",-13.306198120117188],["▁MAD",-13.306215286254885],["τρι",-13.306219100952148],["▁Imran",-13.30622673034668],["หลายคน",-13.306230545043944],["▁bw",-13.306244850158691],["شاهد",-13.306246757507324],["▁រដ្ឋាភិបាល",-13.30628776550293],["▁staden",-13.306291580200195],["വയ",-13.306292533874512],["▁ფასი",-13.306314468383787],["▁तपाई",-13.306349754333496],["▁banko",-13.306358337402344],["▁Beau",-13.306363105773926],["了解到",-13.30636501312256],["▁тас",-13.306389808654783],["▁laulu",-13.306397438049316],["▁retard",-13.30639934539795],["▁gitti",-13.306406021118164],["▁Институт",-13.30641269683838],["klusi",-13.306442260742188],["▁daşı",-13.306456565856934],["tün",-13.30646800994873],["▁പ്രണയ",-13.30649471282959],["kereső",-13.306498527526855],["▁conocimiento",-13.306516647338867],["▁плаща",-13.30652904510498],["▁draga",-13.30654525756836],["25%",-13.306551933288574],["▁aime",-13.306553840637209],["φου",-13.30655574798584],["્યૂ",-13.306557655334473],["幣",-13.306561470031738],["ేశ్వర",-13.306577682495115],["▁neer",-13.306581497192385],["奴",-13.306604385375977],["審査",-13.30661392211914],["क्सी",-13.306614875793455],["อํานาจ",-13.30662441253662],["បុណ្យ",-13.306626319885254],["▁Prifysgol",-13.306626319885254],["▁управлява",-13.306626319885254],["▁ندارند",-13.306626319885254],["ପୂର୍ଣ୍ଣ",-13.30662727355957],["▁gemakkelijk",-13.306628227233888],["▁शुल्क",-13.306628227233888],["ႏွင့္",-13.306629180908203],["▁impon",-13.30663013458252],["▁আবেদন",-13.306632041931152],["按摩",-13.306632041931152],["▁თბილისში",-13.306632995605469],["▁байдлын",-13.306633949279783],["▁ધર્મ",-13.306634902954102],["▁ئېلان",-13.30664348602295],["▁reče",-13.306644439697266],["▁brought",-13.306652069091797],["▁አደጋ",-13.306658744812012],["▁녹",-13.306659698486328],["izează",-13.306666374206545],["setzung",-13.30667495727539],["▁respektive",-13.306681632995604],["▁لعبة",-13.306693077087402],["▁soluzioni",-13.306703567504885],["പരി",-13.30671501159668],["ວງ",-13.30672836303711],["▁Biixi",-13.30672836303711],["▁Господа",-13.306739807128906],["trzy",-13.3067626953125],["▁sisältää",-13.306769371032717],["▁informieren",-13.30677318572998],["해요",-13.306775093078612],["▁ఉన్నారు",-13.306790351867676],["▁asisten",-13.306803703308104],["hagen",-13.3068265914917],["▁reference",-13.306836128234863],["▁любовь",-13.30685806274414],["keä",-13.30686092376709],["▁Zarząd",-13.306889533996582],["▁കാരണ",-13.306904792785645],["▁comunicado",-13.30690574645996],["▁аткар",-13.30690574645996],["▁مصدر",-13.306912422180176],["▁збир",-13.306920051574709],["▁Hitta",-13.306925773620604],["▁laboratorio",-13.306949615478516],["hengi",-13.306954383850098],["▁czer",-13.30696964263916],["▁سیاه",-13.30698299407959],["▁አስተ",-13.306989669799805],["▁политике",-13.307000160217283],["▁kuendelea",-13.30703830718994],["▁meaning",-13.30704402923584],["▁lido",-13.307072639465332],["зму",-13.30708122253418],["▁sənəd",-13.307082176208496],["magi",-13.307121276855469],["▁jobbar",-13.307125091552734],["ančios",-13.307138442993164],["telep",-13.30713939666748],["680",-13.30714511871338],["▁annonce",-13.307161331176758],["čte",-13.307169914245604],["axi",-13.307205200195312],["▁unutar",-13.30721378326416],["▁cold",-13.307215690612791],["tokana",-13.307220458984377],["▁ALI",-13.307226181030272],["▁բանակ",-13.307229042053224],["▁ካለ",-13.307244300842283],["▁Viņš",-13.3072509765625],["▁bhaint",-13.307254791259766],["ztat",-13.30726146697998],["▁шығару",-13.307263374328612],["▁singura",-13.307272911071776],["щина",-13.307307243347168],["قرأ",-13.30731201171875],["▁eina",-13.307327270507812],["民間",-13.307328224182127],["طم",-13.307355880737305],["സിന്",-13.307361602783203],["▁מדבר",-13.30736255645752],["şik",-13.307387351989746],["듯",-13.30738925933838],["▁Barcelo",-13.307394981384276],["पन्न",-13.30739974975586],["▁Natale",-13.307400703430176],["čkov",-13.30740451812744],["▁சென்று",-13.307417869567873],["▁нашето",-13.307453155517578],["єднан",-13.307456016540527],["▁mha",-13.30746078491211],["Ժ",-13.30746364593506],["▁créa",-13.307476043701172],["▁aty",-13.307506561279297],["Gre",-13.307509422302246],["આઇ",-13.307512283325195],["ন্ধ",-13.307515144348145],["▁horizont",-13.307528495788574],["อาคาร",-13.30752944946289],["▁tuon",-13.30754566192627],["panel",-13.307583808898926],["naga",-13.307588577270508],["ഉ",-13.307598114013672],["boni",-13.307599067687988],["▁विधि",-13.307601928710938],["ອາດ",-13.307615280151367],["▁plač",-13.30762004852295],["▁cibo",-13.307626724243164],["▁absoluta",-13.307640075683594],["ยะ",-13.307649612426758],["▁якую",-13.307658195495604],["▁gaano",-13.30766773223877],["▁Romani",-13.307676315307615],["▁đo",-13.307677268981934],["▁радова",-13.30768585205078],["▁fallen",-13.307690620422363],["opération",-13.307693481445312],["▁విమర్శ",-13.30771827697754],["▁stelle",-13.307724952697754],["▁hej",-13.30772876739502],["sexual",-13.307734489440918],["表情",-13.30774974822998],["▁অপ",-13.307753562927246],["pressão",-13.30776023864746],["bird",-13.307793617248535],["▁lahti",-13.307798385620115],["جسد",-13.307817459106444],["ходили",-13.30782699584961],["▁důvod",-13.307831764221191],["▁Daw",-13.30787467956543],["פיר",-13.307875633239746],["▁երկրների",-13.30788230895996],["▁правилно",-13.307902336120604],["委员",-13.30791473388672],["ეტა",-13.3079195022583],["▁بولدى",-13.307924270629885],["ίσουν",-13.307933807373049],["ارب",-13.307965278625488],["▁предмета",-13.307982444763184],["▁आकार",-13.308034896850586],["▁ದೇಶದ",-13.30804443359375],["▁කාර්ය",-13.308061599731444],["▁තිබ්බ",-13.308064460754396],["▁Ezen",-13.30806827545166],["▁intention",-13.308070182800291],["▁кен",-13.308073043823242],["स्तर",-13.308086395263672],["▁ninyo",-13.308097839355469],["ତାର",-13.308098793029783],["ですよ",-13.308099746704102],["მაც",-13.30810260772705],["▁değerli",-13.308107376098633],["大众",-13.308113098144531],["किंग",-13.308125495910645],["墓",-13.308127403259276],["ением",-13.308128356933594],["▁групата",-13.30813694000244],["▁cry",-13.308149337768556],["モデル",-13.308154106140137],["မဟုတ်",-13.308155059814451],["еду",-13.308159828186035],["ສະມາຊິກ",-13.308159828186035],["▁następnie",-13.308159828186035],["▁Цркве",-13.308159828186035],["▁అవసరం",-13.308159828186035],["▁కళ్యాణ్",-13.308159828186035],["▁కేవలం",-13.308159828186035],["▁Başbakan",-13.308160781860352],["ltu",-13.308170318603516],["▁அவரது",-13.30817699432373],["噴",-13.308186531066896],["▁агреси",-13.308192253112791],["▁стін",-13.308194160461426],["▁보여",-13.308199882507324],["▁إطار",-13.308210372924805],["▁standaard",-13.308219909667969],["▁oluline",-13.308226585388184],["▁Fakta",-13.308238983154297],["▁நூல்",-13.308245658874512],["出す",-13.308266639709473],["各级",-13.308268547058104],["▁berak",-13.30827808380127],["ുണ്ടായ",-13.3082914352417],["▁الخدمات",-13.308300971984863],["▁impresion",-13.308319091796877],["лама",-13.30832862854004],["ጠው",-13.308337211608888],["▁dabas",-13.308340072631836],["▁तल",-13.308351516723633],["▁้",-13.308364868164062],["ափոխ",-13.30837345123291],["▁muoto",-13.308385848999023],["교회",-13.308438301086426],["▁professionnel",-13.308451652526855],["WER",-13.308501243591309],["立場",-13.30850601196289],["▁feadh",-13.308511734008787],["960",-13.30852222442627],["▁jälle",-13.3085298538208],["▁olmasa",-13.308537483215332],["▁Naša",-13.308539390563965],["▁1907",-13.308584213256836],["▁chaidh",-13.308605194091797],["▁বই",-13.308615684509276],["▁Старо",-13.308616638183594],["▁ngan",-13.308626174926758],["കര്",-13.30864143371582],["מחקר",-13.308645248413086],["▁zeit",-13.308650016784668],["ZET",-13.3086519241333],["boro",-13.30865478515625],["▁постои",-13.308655738830566],["OME",-13.308661460876465],["▁붙",-13.308667182922363],["▁مجتمع",-13.308709144592283],["▁odmor",-13.308710098266602],["▁tinc",-13.308721542358398],["▁உணவு",-13.308725357055664],["登記",-13.308751106262209],["dės",-13.308761596679688],["▁sanhi",-13.308762550354004],["▁pakke",-13.308781623840332],["▁1901",-13.308782577514648],["itteen",-13.308799743652344],["▁wider",-13.30880641937256],["給予",-13.308826446533203],["▁Wander",-13.30882740020752],["▁позива",-13.308829307556152],["ئه",-13.308842658996582],["वाला",-13.30886173248291],["▁усили",-13.30886173248291],["▁과정",-13.30889129638672],["áns",-13.308923721313477],["ਸਿ",-13.308923721313477],["▁izveidot",-13.308929443359377],["▁neiš",-13.308931350708008],["▁pranë",-13.308967590332031],["▁pogosto",-13.308971405029297],["fax",-13.308981895446776],["▁šťast",-13.308987617492676],["▁келди",-13.308989524841309],["▁бивш",-13.309021949768066],["נב",-13.309035301208496],["▁bonita",-13.309052467346191],["واصل",-13.309069633483888],["▁snakker",-13.309076309204102],["夺",-13.309100151062012],["▁nest",-13.309102058410645],["▁Graf",-13.30912971496582],["▁toque",-13.30915069580078],["▁conhece",-13.309167861938477],["ጠብ",-13.309189796447754],["▁játékos",-13.309203147888184],["▁naują",-13.309226036071776],["نٽ",-13.30923080444336],["▁Birliği",-13.309244155883787],["▁vazhd",-13.309258460998535],["skifte",-13.309277534484863],["▁toekomst",-13.309280395507812],["▁Ove",-13.309286117553713],["AKI",-13.309290885925291],["ీర్",-13.309318542480469],["管道",-13.309318542480469],["▁կայք",-13.309326171875],["▁ไม่ว่าจะเป็น",-13.309334754943848],["▁जोड़",-13.309350967407228],["▁حرب",-13.30936050415039],["日的",-13.309362411499023],["ਐਸ",-13.309370994567873],["تنفيذ",-13.30937957763672],["▁konkrét",-13.30939483642578],["▁граѓани",-13.309404373168944],["此时",-13.30941677093506],["If",-13.309429168701172],["اسى",-13.309435844421388],["▁Ted",-13.309455871582031],["▁체험",-13.309460639953612],["कला",-13.309483528137209],["▁النق",-13.309496879577637],["tería",-13.309517860412598],["▁descoperit",-13.309518814086914],["දුම්",-13.30953311920166],["pori",-13.30953884124756],["zice",-13.309553146362305],["▁filmo",-13.309561729431152],["vë",-13.309571266174316],["pent",-13.309590339660645],["គឺ",-13.30960464477539],["ാർ",-13.30964469909668],["älä",-13.309678077697754],["슨",-13.309683799743652],["ທົ່ວ",-13.309690475463867],["▁प्राधिकरण",-13.30969524383545],["▁ଝିଅ",-13.30969524383545],["▁മുന്ന",-13.30969524383545],["▁Bất",-13.309696197509766],["▁ನಡುವೆ",-13.309696197509766],["▁Lisboa",-13.309697151184082],["▁повинна",-13.309697151184082],["▁כסף",-13.309697151184082],["▁ಡ",-13.309698104858398],["▁Welcome",-13.309707641601562],["▁ಮಾಜಿ",-13.30971908569336],["▁ақпараттық",-13.309720039367676],["▁кантип",-13.309721946716309],["▁соңғы",-13.309734344482422],["基础设施",-13.309735298156738],["▁өзүнүн",-13.309738159179688],["▁kaldı",-13.309741020202637],["▁recoñecemento",-13.309744834899902],["авља",-13.30974578857422],["▁furt",-13.3097505569458],["▁Children",-13.309757232666016],["▁மேல்",-13.309758186340332],["साई",-13.309767723083496],["▁نثار",-13.309774398803713],["▁sokak",-13.309776306152344],["▁zināt",-13.309788703918455],["VAT",-13.309796333312988],["的前",-13.30979824066162],["▁ໃຈ",-13.30980110168457],["▁Flam",-13.309802055358888],["ขนาดใหญ่",-13.309803009033203],["▁nők",-13.309821128845217],["▁unig",-13.309849739074709],["▁javnost",-13.30985164642334],["▁angeboten",-13.309852600097656],["▁Ā",-13.309852600097656],["▁पट",-13.309889793395996],["▁celles",-13.309895515441896],["▁protocol",-13.309898376464844],["▁ቢሆን",-13.309901237487791],["▁Breaking",-13.309903144836426],["▁બાળક",-13.309913635253906],["▁пишу",-13.30992031097412],["▁퍼",-13.309922218322754],["▁сделан",-13.309937477111816],["▁netwerk",-13.30994701385498],["OOR",-13.309947967529297],["▁Loh",-13.309950828552246],["▁arány",-13.309956550598145],["illé",-13.309961318969728],["▁victor",-13.309964179992676],["▁published",-13.309968948364258],["▁Programme",-13.309975624084473],["得很",-13.309979438781738],["▁wünschen",-13.310029983520508],["▁ngừng",-13.310040473937988],["▁نقطه",-13.310052871704102],["▁Tamen",-13.310053825378418],["POS",-13.310068130493164],["Eng",-13.31007480621338],["ستا",-13.310086250305176],["فعل",-13.310097694396973],["▁håret",-13.310112953186035],["▁želimo",-13.31014633178711],["▁унаа",-13.310160636901855],["▁dirbti",-13.310176849365234],["تې",-13.310179710388184],["▁taux",-13.310181617736816],["▁posameznik",-13.310188293457031],["NEC",-13.310230255126951],["ိဳ",-13.310236930847168],["▁ucap",-13.310237884521484],["▁bilmə",-13.31026840209961],["▁qızı",-13.310290336608888],["ımızın",-13.31029224395752],["來源",-13.31029224395752],["ดูด",-13.31029987335205],["▁ligge",-13.310307502746582],["dimension",-13.310308456420898],["▁znanosti",-13.310325622558594],["▁carica",-13.31032657623291],["ाइन",-13.310336112976074],["mination",-13.310338020324709],["▁होम",-13.310338973999023],["nið",-13.310344696044922],["▁møter",-13.310359954833984],["▁gazetar",-13.310375213623049],["▁problemy",-13.310388565063477],["ធ្វើការ",-13.310389518737791],["дөг",-13.31041431427002],["мила",-13.310420036315918],["▁വിദ്യാഭ്യാസ",-13.310434341430664],["▁desteği",-13.31043529510498],["▁pangalan",-13.310461044311523],["▁publică",-13.3104887008667],["▁mujhe",-13.310491561889648],["▁Máte",-13.310502052307127],["ಾಗಿದೆ",-13.310519218444824],["dienā",-13.310522079467772],["▁γε",-13.31056022644043],["torului",-13.310564994812012],["ដ្ឋាន",-13.310564994812012],["▁Rural",-13.310580253601074],["irg",-13.31059455871582],["▁ስለሚ",-13.3106050491333],["anipun",-13.310606002807615],["▁бата",-13.310620307922363],["एका",-13.310628890991213],["▁Temos",-13.310633659362791],["▁ఛా",-13.31063461303711],["aber",-13.310650825500488],["▁fasi",-13.310664176940918],["วิชา",-13.310670852661133],["ສອງ",-13.310681343078612],["verdig",-13.31068515777588],["▁serius",-13.310705184936523],["▁Liberal",-13.310714721679688],["▁nieruchomości",-13.310718536376951],["▁episod",-13.310723304748535],["achat",-13.310760498046877],["్రి",-13.310769081115724],["скае",-13.31077766418457],["ாள",-13.310800552368164],["մին",-13.310802459716797],["▁postu",-13.31081771850586],["▁Apel",-13.310821533203123],["▁ёй",-13.310824394226074],["▁ucapan",-13.310831069946287],["日子",-13.310831069946287],["▁gull",-13.310846328735352],["кад",-13.310853004455566],["▁ماشوم",-13.310872077941896],["盈",-13.310879707336426],["んだろう",-13.310904502868652],["siq",-13.310937881469728],["іку",-13.31094455718994],["RIE",-13.310946464538574],["क्शन",-13.310951232910156],["中有",-13.310967445373535],["banken",-13.310994148254396],["คู",-13.311029434204102],["ശാല",-13.311062812805176],["▁Gym",-13.311062812805176],["fac",-13.31106662750244],["תית",-13.31107234954834],["یٹر",-13.311077117919922],["▁disposi",-13.311089515686035],["▁مٿي",-13.3110933303833],["▁temelji",-13.311123847961426],["▁ຕາ",-13.311128616333008],["▁altor",-13.311144828796388],["▁couleur",-13.311153411865234],["▁состоит",-13.311155319213867],["歡",-13.311156272888184],["もちろん",-13.311161041259766],["Yes",-13.311163902282717],["黎",-13.311179161071776],["чување",-13.311185836791992],["انش",-13.31119155883789],["爐",-13.311202049255373],["▁Hamar",-13.3112154006958],["▁አጥ",-13.311216354370115],["▁geçti",-13.311219215393066],["▁τμήμα",-13.311219215393066],["▁సైట్",-13.311220169067385],["▁Lever",-13.311223030090332],["▁двата",-13.311223030090332],["欧盟",-13.311227798461914],["▁Anasayfa",-13.311234474182127],["▁espectáculo",-13.311234474182127],["▁fútbol",-13.311234474182127],["▁pomiędzy",-13.311234474182127],["▁érvényes",-13.311234474182127],["▁дүүрэг",-13.311234474182127],["▁учраас",-13.311234474182127],["▁جګړې",-13.311234474182127],["▁کنفرانس",-13.311234474182127],["▁కంపెనీ",-13.311234474182127],["▁تایید",-13.311235427856444],["▁தேர்தல்",-13.311235427856444],["▁պարտադիր",-13.311236381530762],["▁لذت",-13.311236381530762],["▁berminat",-13.311237335205078],["▁dokonca",-13.311241149902344],["▁sıkıntı",-13.311241149902344],["▁ujenzi",-13.311241149902344],["▁sotsiaal",-13.311245918273926],["▁oktobra",-13.31124782562256],["▁Хууль",-13.311249732971191],["▁προσφέρει",-13.311250686645508],["▁cilvēks",-13.311261177062988],["禮物",-13.31126880645752],["▁اڳتي",-13.311271667480469],["▁халқы",-13.311277389526367],["▁victim",-13.311279296875],["▁piscine",-13.311281204223633],["▁ਹੇਠ",-13.311283111572266],["gö",-13.311285972595217],["▁KÖZ",-13.311288833618164],["▁wybrać",-13.31130313873291],["▁oraș",-13.311304092407228],["quad",-13.311319351196287],["VÄ",-13.31133270263672],["▁принимать",-13.311338424682615],["▁uwanja",-13.311359405517578],["luvun",-13.311372756958008],["▁dü",-13.311386108398438],["▁6.1",-13.311387062072754],["सिक",-13.311392784118652],["▁2018-2019",-13.311402320861816],["▁vysoko",-13.311402320861816],["පය",-13.31140422821045],["افی",-13.311408042907717],["▁Hỗ",-13.311413764953612],["▁Golaha",-13.311433792114258],["ették",-13.31143569946289],["▁ភាគ",-13.311456680297852],["▁چلا",-13.311480522155762],["тън",-13.311494827270508],["نين",-13.31149959564209],["ல்லா",-13.31157398223877],["▁obliki",-13.311617851257324],["▁ötən",-13.311630249023438],["▁Петро",-13.311649322509766],["▁Sancti",-13.31168270111084],["kesk",-13.311683654785156],["NJ",-13.311685562133787],["vino",-13.31169319152832],["▁menda",-13.311694145202637],["▁parlamentari",-13.311710357666016],["▁віз",-13.31173610687256],["▁verlede",-13.311741828918455],["rido",-13.311748504638672],["▁fons",-13.311775207519531],["便是",-13.31178092956543],["▁ভাই",-13.311784744262695],["▁Paolo",-13.311790466308594],["▁Оба",-13.311830520629885],["▁experiència",-13.311872482299805],["먼",-13.31190299987793],["▁비용",-13.311914443969728],["▁breed",-13.311925888061523],["▁Konf",-13.311930656433104],["regning",-13.311932563781738],["tall",-13.311966896057127],["cimiento",-13.312005043029783],["▁Fundació",-13.312019348144531],["▁tenger",-13.312020301818848],["ssie",-13.312026023864746],["▁kategorija",-13.31205940246582],["▁potete",-13.312088012695312],["מכר",-13.31210708618164],["jing",-13.31212329864502],["20,000",-13.312127113342283],["▁ඉදිරි",-13.31214427947998],["diler",-13.312148094177246],["▁inimene",-13.312159538269045],["▁relations",-13.312179565429688],["▁ганц",-13.312180519104004],["diena",-13.312182426452637],["களாக",-13.312192916870115],["▁айып",-13.31219482421875],["▁انها",-13.312199592590332],["▁câte",-13.31221866607666],["▁Doğu",-13.312232971191406],["ജീ",-13.312235832214355],["▁anmäl",-13.312250137329102],["pama",-13.312265396118164],["േഷ്",-13.31227684020996],["▁క్యా",-13.312278747558594],["עכט",-13.31228256225586],["اذ",-13.312299728393556],["▁особист",-13.312344551086426],["▁insulin",-13.31239128112793],["PORT",-13.312414169311523],["trage",-13.31241512298584],["▁venis",-13.312418937683104],["▁ለተ",-13.312433242797852],["▁bạc",-13.312454223632812],["▁produkci",-13.312467575073242],["▁পড়ে",-13.312470436096191],["▁corsi",-13.312494277954102],["▁การรักษา",-13.31252670288086],["▁komunit",-13.31253147125244],["▁modne",-13.31256866455078],["बाद",-13.312596321105955],["plikt",-13.312597274780272],["有任何",-13.312616348266602],["▁sommeren",-13.312617301940918],["▁barba",-13.3126220703125],["频",-13.312636375427246],["Mod",-13.312661170959473],["▁Baik",-13.3126802444458],["改革开放",-13.3126802444458],["▁Пока",-13.312686920166016],["▁Cura",-13.312692642211914],["hockey",-13.312708854675291],["பிடி",-13.312708854675291],["▁Solidari",-13.31271743774414],["繳",-13.312744140625],["▁ahayn",-13.312752723693848],["▁tabul",-13.312756538391112],["妹妹",-13.312759399414062],["▁ಆರೋಪ",-13.31276798248291],["ລູກ",-13.312772750854492],["▁ustawi",-13.312772750854492],["▁lắp",-13.31277561187744],["▁obzirom",-13.31277561187744],["▁доколку",-13.31277561187744],["▁підготовки",-13.31277561187744],["▁трьох",-13.31277561187744],["▁धार्मिक",-13.31277561187744],["▁ಸಾವಿರ",-13.31277561187744],["▁විදුලි",-13.31277561187744],["▁najlepší",-13.312776565551758],["▁күрөш",-13.312776565551758],["▁గొప్ప",-13.312776565551758],["▁Iqbal",-13.312777519226074],["▁Consellería",-13.312782287597656],["▁çareser",-13.312783241271973],["kaze",-13.312790870666504],["കൃഷ്ണ",-13.31279182434082],["▁मामला",-13.31279182434082],["▁bieži",-13.31279468536377],["▁bűn",-13.312795639038086],["▁പിന്നീട്",-13.312799453735352],["▁ultimul",-13.312808990478516],["▁jautājumu",-13.312820434570312],["▁모집",-13.312821388244627],["▁rolig",-13.312825202941896],["▁обсяг",-13.31282901763916],["▁محض",-13.31283187866211],["▁Chand",-13.312833786010742],["Hor",-13.31283473968506],["▁quorum",-13.312848091125488],["▁приходится",-13.312849044799805],["▁একজন",-13.312849044799805],["▁რასაც",-13.312851905822754],["▁Κορ",-13.312853813171388],["▁zawo",-13.31286334991455],["▁مسکن",-13.312867164611816],["▁ආයතන",-13.31287956237793],["▁σώμα",-13.312884330749512],["ชวน",-13.312910079956056],["תייחס",-13.312920570373535],["xente",-13.31293773651123],["▁Valor",-13.312952995300291],["ництво",-13.312976837158203],["وې",-13.312994003295898],["ntį",-13.31300163269043],["▁nakita",-13.31302261352539],["▁paredzēt",-13.313036918640137],["ினார்",-13.313043594360352],["▁mhór",-13.313044548034668],["▁මතක්",-13.313048362731934],["▁gubi",-13.313058853149414],["▁숨",-13.313082695007324],["ىمەن",-13.313095092773438],["▁Bendr",-13.313115119934082],["▁дэд",-13.313117980957031],["-08",-13.313132286071776],["атып",-13.313138008117676],["हान",-13.313138008117676],["haber",-13.313157081604004],["▁szabály",-13.31316089630127],["▁Diet",-13.3131685256958],["▁comentariu",-13.313170433044434],["▁Basic",-13.313191413879396],["string",-13.313207626342772],["▁Dara",-13.313207626342772],["▁LGBT",-13.313220977783203],["哥哥",-13.313233375549316],["gib",-13.313237190246582],["पान",-13.313261032104492],["kille",-13.313291549682615],["plass",-13.3133544921875],["adam",-13.313356399536133],["систем",-13.313362121582031],["▁prad",-13.313392639160156],["抑",-13.313395500183104],["▁redd",-13.313409805297852],["▁erum",-13.313410758972168],["▁tuba",-13.313414573669434],["▁พอ",-13.31342601776123],["orin",-13.31342887878418],["ුණ",-13.313429832458496],["▁Moc",-13.313433647155762],["▁Літ",-13.313454627990724],["事項",-13.313478469848633],["▁champ",-13.313480377197266],["發布",-13.313488006591797],["▁ಹೆಚ್ಚ",-13.313509941101074],["▁صدور",-13.313516616821287],["▁spray",-13.313563346862791],["▁siyah",-13.313568115234377],["得以",-13.31358814239502],["▁mieltä",-13.313605308532717],["īgās",-13.31363296508789],["argi",-13.313644409179688],["사항",-13.31364631652832],["लेट",-13.313663482666016],["▁Του",-13.313666343688965],["ställningar",-13.313693046569824],["▁нез",-13.313694953918455],["▁удовлетворен",-13.313703536987305],["▁posvet",-13.31375217437744],["▁ที่มี",-13.313779830932615],["▁Client",-13.313785552978516],["▁tón",-13.313788414001465],["▁годов",-13.313790321350098],["▁працяг",-13.313801765441896],["▁rules",-13.313812255859377],["▁ئىل",-13.313823699951172],["▁lancer",-13.313835144042969],["▁6%",-13.313838958740234],["גמר",-13.31384563446045],["▁LLC",-13.313849449157717],["▁zanîn",-13.313852310180664],["▁коль",-13.313881874084473],["▁démon",-13.313916206359863],["▁почна",-13.313941955566406],["១៤",-13.313942909240724],["ଠା",-13.31396198272705],["ล้อ",-13.313970565795898],["ケア",-13.313992500305176],["▁soka",-13.3140230178833],["გზა",-13.314038276672363],["kjær",-13.314047813415527],["▁Оваа",-13.314068794250488],["yorlar",-13.31407356262207],["▁delibera",-13.314082145690918],["▁binh",-13.314096450805664],["ವಿದೆ",-13.314122200012209],["льныя",-13.314133644104004],["▁magni",-13.314141273498535],["▁Teema",-13.31422519683838],["näs",-13.314228057861328],["οδό",-13.31423282623291],["▁income",-13.314233779907228],["ித்து",-13.314239501953123],["NJE",-13.314244270324709],["▁туристи",-13.314252853393556],["承诺",-13.314269065856934],["▁intellectu",-13.31428050994873],["情緒",-13.314284324645996],["▁hẳn",-13.314301490783691],["労働",-13.314311027526855],["ພາສາອັງກິດ",-13.314315795898438],["สัมผัส",-13.314318656921388],["▁difficoltà",-13.314318656921388],["▁giọng",-13.314318656921388],["▁tĩnh",-13.314318656921388],["▁zdjęć",-13.314318656921388],["▁δεύτερο",-13.314318656921388],["▁येईल",-13.314318656921388],["▁ಬಹಳ",-13.314318656921388],["▁Aalborg",-13.314319610595703],["▁харчування",-13.314319610595703],["▁କାର୍ଯ୍ୟକ୍ରମ",-13.31432056427002],["▁hasznos",-13.314321517944336],["▁सुरुवात",-13.314321517944336],["▁sprzęt",-13.314323425292969],["▁आरक्षण",-13.314323425292969],["▁Bahagian",-13.314325332641602],["▁છતાં",-13.314326286315918],["▁δίνει",-13.314327239990234],["▁администрации",-13.314329147338867],["▁өңір",-13.314329147338867],["▁વ્યાખ્યાઓ",-13.314329147338867],["▁ലഭിക്ക",-13.3143310546875],["▁përfshirë",-13.314332008361816],["▁трансфер",-13.314332008361816],["▁milliárd",-13.314332962036133],["ნაწილ",-13.314336776733398],["▁יוסף",-13.314338684082031],["▁ಅರ್ಥ",-13.314342498779297],["▁Hafðu",-13.314345359802246],["▁బు",-13.314349174499512],["ერს",-13.314370155334473],["▁actuar",-13.314382553100586],["▁utilizada",-13.31438446044922],["▁ይላል",-13.314390182495115],["▁nástroj",-13.314391136169434],["▁требуется",-13.314393997192385],["▁195",-13.314404487609863],["▁hieronder",-13.31441879272461],["▁varma",-13.314424514770508],["תמונה",-13.31443214416504],["▁מוצרים",-13.31443977355957],["全文",-13.314444541931152],["▁sequi",-13.314456939697266],["▁नियमित",-13.31446647644043],["رعاية",-13.31450080871582],["▁günah",-13.314528465270996],["▁кызматы",-13.31453800201416],["kood",-13.314547538757324],["▁встреч",-13.31455421447754],["▁ចិត្ត",-13.314565658569336],["ისგან",-13.31457233428955],["ади",-13.314573287963867],["▁கலை",-13.314577102661133],["حيل",-13.31463623046875],["▁کمر",-13.314679145812988],["▁κάποιος",-13.314688682556152],["▁təşkilat",-13.314702033996582],["▁отлично",-13.31474494934082],["तंत्र",-13.314749717712402],["lovchi",-13.314754486083984],["պատ",-13.314754486083984],["▁најави",-13.314756393432615],["▁پیمان",-13.31475830078125],["▁afferma",-13.31478786468506],["▁prosti",-13.31478786468506],["ۈك",-13.314824104309082],["juta",-13.314830780029297],["कन",-13.314886093139648],["lood",-13.314891815185549],["▁olmadan",-13.314895629882812],["▁pendek",-13.314900398254396],["travel",-13.314903259277344],["ritza",-13.314908027648926],["รัง",-13.314921379089355],["למת",-13.314929008483888],["▁فإنه",-13.31493091583252],["▁essent",-13.314943313598633],["▁mnogi",-13.31497573852539],["ιον",-13.315000534057615],["▁Juda",-13.31500244140625],["▁division",-13.31504249572754],["ridge",-13.315046310424805],["这个问题",-13.315068244934082],["▁ایڈ",-13.315091133117676],["一看",-13.31511688232422],["つか",-13.31514835357666],["▁ablak",-13.315213203430176],["kształ",-13.315230369567873],["▁पहल",-13.31523323059082],["จนถึง",-13.31525707244873],["CIJA",-13.315272331237791],["▁Aufgaben",-13.31528663635254],["▁Clark",-13.315299987792969],["▁হাত",-13.315321922302246],["ສ່ວນ",-13.315330505371094],["▁Akkor",-13.315332412719728],["ană",-13.315337181091309],["indre",-13.315346717834473],["postadress",-13.315359115600586],["▁공식",-13.315359115600586],["्यां",-13.315360069274902],["ANGA",-13.315377235412598],["▁కేసు",-13.315381050109863],["▁กิจกรรม",-13.315381050109863],["▁KAT",-13.31538200378418],["ుకొని",-13.315393447875977],["tarra",-13.315400123596191],["ЖЕ",-13.315428733825684],["ಯಾಗಿದೆ",-13.315428733825684],["先后",-13.315447807312012],["ದ್ಧ",-13.315454483032228],["հաղորդ",-13.315465927124023],["štev",-13.315471649169922],["▁βίντεο",-13.315476417541504],["32)",-13.315482139587402],["▁OC",-13.315482139587402],["തെ",-13.31548309326172],["▁හැකිය",-13.315496444702148],["▁राई",-13.315507888793944],["▁تہ",-13.31552028656006],["▁भेज",-13.315521240234377],["ગ્રા",-13.315542221069336],["ඔ",-13.315542221069336],["achas",-13.3155517578125],["▁kufu",-13.315592765808104],["િયમ",-13.315632820129396],["▁អង់គ្លេស",-13.31564235687256],["antys",-13.315658569335938],["▁मस्त",-13.315673828125],["▁(31)",-13.315692901611328],["▁Adapt",-13.315701484680176],["▁okvir",-13.31572437286377],["▁витрат",-13.315732955932615],["▁Olá",-13.315739631652832],["重要な",-13.315756797790527],["▁puo",-13.315762519836426],["▁الحال",-13.315775871276855],["集体",-13.315790176391602],["ljanje",-13.315804481506348],["LAG",-13.315808296203612],["▁шаардлага",-13.315831184387209],["hehehe",-13.315834045410156],["មាស",-13.315837860107422],["▁UU",-13.315842628479004],["тельств",-13.315848350524902],["ものが",-13.3158540725708],["സഭ",-13.315862655639648],["▁ცხოვრების",-13.315863609313965],["낙",-13.315863609313965],["▁Gedanken",-13.31586456298828],["▁Također",-13.31586456298828],["▁ndihmë",-13.31586456298828],["▁reprehenderit",-13.31586456298828],["▁αύξηση",-13.31586456298828],["▁приликом",-13.31586456298828],["▁جىنپىڭ",-13.31586456298828],["▁অংশ",-13.31586456298828],["▁ದಕ್ಷಿಣ",-13.31586456298828],["▁относительно",-13.315865516662598],["▁zobaczyć",-13.315868377685549],["▁මෙසේ",-13.31587028503418],["▁omtrent",-13.315871238708496],["▁организатор",-13.315871238708496],["▁ginagawa",-13.315874099731444],["▁ବିବାହ",-13.315876960754396],["▁lembut",-13.315877914428713],["▁ISSN",-13.315879821777344],["▁الوصول",-13.31588077545166],["▁காட்சி",-13.31588649749756],["▁դրանք",-13.315888404846191],["▁로그인",-13.315890312194824],["▁civiliza",-13.315895080566406],["हाल",-13.315896034240724],["▁dab",-13.315942764282228],["▁МИ",-13.315977096557615],["▁кнігі",-13.31597900390625],["▁hverju",-13.315991401672363],["เป็นผู้",-13.31601619720459],["▁pornofilm",-13.31602668762207],["udha",-13.316041946411133],["▁végzett",-13.316044807434082],["▁nalika",-13.316046714782717],["所谓",-13.316049575805664],["!!!!!!!",-13.316062927246094],["нным",-13.316065788269045],["▁Skip",-13.316067695617676],["▁காரணம்",-13.316080093383787],["▁lengo",-13.316085815429688],["حقوق",-13.316102981567385],["ພົບ",-13.3161039352417],["zaam",-13.316118240356444],["ıntı",-13.31612491607666],["▁alfabet",-13.316144943237305],["▁გუ",-13.316150665283203],["렸",-13.31616497039795],["▁Затова",-13.316168785095217],["▁Integra",-13.316191673278809],["出发",-13.316198348999023],["ίδη",-13.316204071044922],["▁precisamente",-13.316222190856934],["ခိုင်",-13.316224098205566],["很久",-13.316262245178224],["হার",-13.31626796722412],["ijden",-13.316272735595703],["1.4",-13.316274642944336],["▁хаяг",-13.316277503967283],["bagai",-13.316283226013184],["▁alusel",-13.316290855407717],["▁poraz",-13.31629467010498],["▁Anlage",-13.31630516052246],["日々",-13.31630516052246],["▁počasi",-13.316312789916992],["konduk",-13.316314697265623],["▁kurrë",-13.316324234008787],["▁ventila",-13.31633472442627],["▁тендер",-13.316339492797852],["▁Menn",-13.3163480758667],["▁رحمت",-13.316360473632812],["▁Год",-13.316366195678713],["چن",-13.316399574279783],["aaday",-13.316407203674316],["הפוך",-13.316426277160645],["▁replace",-13.316442489624023],["eranno",-13.316446304321287],["šy",-13.316447257995604],["تناول",-13.316457748413086],["ნოთ",-13.316471099853516],["▁מתחיל",-13.31649684906006],["▁რის",-13.31650447845459],["ውና",-13.316515922546388],["▁brød",-13.3165283203125],["▁התק",-13.316568374633787],["▁presne",-13.316569328308104],["▁emër",-13.316594123840332],["▁നന്നായി",-13.316610336303713],["ფორ",-13.316627502441406],["▁Wad",-13.316642761230469],["▁Aap",-13.316693305969238],["▁šalt",-13.316697120666504],["▁싸",-13.316706657409668],["hű",-13.316707611083984],["ファ",-13.316712379455566],["psu",-13.3167142868042],["▁Twin",-13.3167142868042],["▁pravic",-13.316722869873049],["шет",-13.316731452941896],["▁mulighet",-13.316758155822754],["▁ກ່ຽວ",-13.31676197052002],["▁torre",-13.3167724609375],["ულა",-13.316774368286133],["▁javni",-13.316781044006348],["▁pudi",-13.316808700561523],["▁хүргэ",-13.316838264465332],["▁यूपी",-13.316839218139648],["saba",-13.316843032836914],["ेपछि",-13.31687068939209],["▁ຜ",-13.316871643066406],["▁garderob",-13.316908836364746],["нята",-13.316929817199709],["ልክ",-13.316951751708984],["▁felhasználó",-13.31695556640625],["▁Maret",-13.316964149475098],["▁облада",-13.316965103149414],["▁פרס",-13.316967964172363],["രാജ",-13.316973686218262],["ጊዜ",-13.317001342773438],["جميع",-13.317031860351562],["штво",-13.317061424255373],["▁ylä",-13.317072868347168],["ائت",-13.317090034484863],["態度",-13.317097663879396],["▁desarrolla",-13.31710147857666],["▁kulan",-13.317147254943848],["▁xD",-13.317147254943848],["ବୀ",-13.31717014312744],["▁veze",-13.317188262939451],["Win",-13.317205429077148],["చె",-13.31722927093506],["▁Rica",-13.317235946655272],["하는데",-13.317243576049805],["▁fampi",-13.317246437072754],["▁predict",-13.3172607421875],["kime",-13.317286491394045],["ทุกอย่าง",-13.317296981811523],["▁ආයතනය",-13.317307472229004],["▁productie",-13.317325592041016],["▁ziel",-13.317330360412598],["▁PAL",-13.317331314086914],["雕",-13.317362785339355],["värv",-13.317370414733888],["所以我",-13.317371368408203],["▁Harus",-13.317377090454102],["▁യൂ",-13.317388534545898],["配套",-13.317404747009276],["决赛",-13.317405700683594],["ສູ່",-13.317410469055176],["▁Todėl",-13.317412376403809],["▁bụng",-13.317412376403809],["▁vrátane",-13.317412376403809],["▁спечели",-13.317412376403809],["▁ճիշտ",-13.317412376403809],["▁ۋاقتى",-13.317413330078123],["▁düzgün",-13.317415237426758],["▁अलावा",-13.317415237426758],["▁समावेश",-13.317415237426758],["▁álbum",-13.31741714477539],["▁заштиту",-13.317418098449709],["▁belediye",-13.317421913146973],["▁කෙල්ල",-13.317424774169922],["▁इथे",-13.31743621826172],["óður",-13.317438125610352],["▁мел",-13.317438125610352],["▁vaikams",-13.317439079284668],["▁курорт",-13.317440032958984],["▁ବିଜେଡି",-13.317441940307615],["▁большое",-13.31747055053711],["kās",-13.317500114440918],["熱門",-13.317505836486816],["▁zoznam",-13.317511558532717],["病気",-13.317522048950195],["▁Utrecht",-13.317529678344728],["▁बनाया",-13.31753158569336],["▁ఫా",-13.31754207611084],["▁tenni",-13.317572593688965],["ríki",-13.317596435546877],["▁fält",-13.31761074066162],["ദിന",-13.31762409210205],["▁ಭಾವ",-13.317632675170898],["▁çözüm",-13.317652702331545],["zwol",-13.317659378051758],["суди",-13.31766414642334],["旅館",-13.31766414642334],["fusion",-13.317666053771973],["ુલ",-13.317667961120604],["έντ",-13.317684173583984],["даш",-13.317689895629885],["▁செய்யும்",-13.317691802978516],["▁செயல்",-13.317699432373049],["▁172",-13.317705154418944],["▁وایي",-13.317706108093262],["హో",-13.317731857299805],["更好地",-13.317740440368652],["hiko",-13.317745208740234],["▁Afya",-13.317745208740234],["공사",-13.317749977111816],["sensi",-13.31775188446045],["▁deegaanka",-13.317763328552246],["▁Milf",-13.317764282226562],["▁вашия",-13.317764282226562],["▁Kapital",-13.317782402038574],["▁ayak",-13.317818641662598],["▁Cidade",-13.317835807800291],["▁allanol",-13.317852973937988],["▁सूत्र",-13.317856788635254],["لوك",-13.31786060333252],["▁webových",-13.31789493560791],["▁מים",-13.31790542602539],["چھ",-13.31793212890625],["社員",-13.31795597076416],["pew",-13.31796646118164],["ほか",-13.31796932220459],["1.1",-13.31797695159912],["▁9.00",-13.31797695159912],["▁quiet",-13.317992210388184],["تري",-13.318007469177246],["άτη",-13.318032264709473],["規格",-13.318036079406738],["acce",-13.318042755126951],["Rus",-13.318047523498535],["▁margin",-13.318058967590332],["үрө",-13.31806755065918],["▁Stav",-13.318072319030762],["ÎN",-13.318077087402344],["我说",-13.318114280700684],["ຈັນ",-13.31814956665039],["▁Katolik",-13.318150520324709],["واج",-13.31816577911377],["▁Gim",-13.318180084228516],["lerim",-13.31822395324707],["▁රණ",-13.318229675292969],["kainen",-13.318236351013184],["▁realize",-13.318255424499512],["▁horo",-13.31826114654541],["zsa",-13.318263053894045],["▁Kamati",-13.318265914916992],["▁bumili",-13.318278312683104],["lnih",-13.318312644958496],["▁ನೀಡಿ",-13.318346977233888],["учили",-13.318364143371582],["μένοι",-13.318365097045898],["śle",-13.318382263183594],["▁ক্র",-13.31838321685791],["▁cəmiyyət",-13.318384170532228],["لوم",-13.318391799926758],["▁qaban",-13.31841278076172],["▁인천",-13.318416595458984],["dienste",-13.318449020385742],["▁pourrez",-13.318490982055664],["▁същото",-13.318499565124512],["▁aké",-13.318507194519045],["▁разреш",-13.318525314331056],["▁جنيه",-13.31855010986328],["▁augustus",-13.318553924560549],["skipta",-13.318574905395508],["▁կոր",-13.31857967376709],["טיס",-13.318603515625],["▁Hyper",-13.318618774414062],["▁وقالت",-13.31864070892334],["督",-13.31865406036377],["▁tantara",-13.318666458129885],["uuliyiin",-13.318682670593262],["σιμ",-13.31870460510254],["สว",-13.318717002868652],["ကယ်",-13.318718910217283],["▁İndi",-13.318727493286133],["▁گرا",-13.318742752075195],["▁경영",-13.318754196166992],["▁خبرنگار",-13.31875705718994],["▁Luca",-13.318775177001951],["սան",-13.318790435791016],["ъп",-13.318792343139648],["▁vindeiro",-13.318799018859863],["kuasa",-13.31879997253418],["μια",-13.318806648254396],["▁спрос",-13.31884765625],["スター",-13.31886863708496],["出售",-13.31887149810791],["▁المقبل",-13.318882942199709],["▁vstop",-13.318889617919922],["戈",-13.318906784057615],["ดื่ม",-13.318907737731934],["鸟",-13.318910598754885],["steigen",-13.318913459777832],["欣賞",-13.318921089172363],["▁संघीय",-13.318931579589844],["ფილ",-13.318944931030272],["ඬ",-13.318954467773438],["อุตสาหกรรม",-13.318961143493652],["큐",-13.318961143493652],["▁kababaihan",-13.318962097167969],["▁kërkuar",-13.318962097167969],["▁pengunjung",-13.318962097167969],["▁شہباز",-13.318962097167969],["▁საკმაოდ",-13.318962097167969],["▁pronuncia",-13.318963050842283],["▁προστασία",-13.318963050842283],["▁компютър",-13.318963050842283],["▁માણસ",-13.318964004516602],["▁ئەمەس",-13.318967819213867],["▁öelda",-13.318970680236816],["▁बिक्री",-13.318971633911133],["▁prostředí",-13.318974494934082],["▁талкуу",-13.318975448608398],["გუ",-13.318979263305664],["▁sobra",-13.318979263305664],["▁арест",-13.319000244140623],["▁Tall",-13.319010734558104],["▁doonaan",-13.319015502929688],["▁ਜਾਰੀ",-13.319024085998535],["▁รายละเอียด",-13.319028854370115],["▁dagelijks",-13.31903076171875],["▁있게",-13.319040298461914],["▁сімей",-13.31904411315918],["િકા",-13.319050788879396],["▁rapidamente",-13.319072723388672],["▁లక్షల",-13.319074630737305],["मुक्त",-13.319091796875],["表達",-13.319091796875],["▁жаман",-13.319097518920898],["davad",-13.319099426269531],["စင္",-13.31910800933838],["▁viaţă",-13.319108963012695],["អនុ",-13.319110870361328],["▁ਸਾਂਝਾ",-13.319121360778809],["▁nha",-13.319147109985352],["▁వెళ్లి",-13.319156646728516],["▁Sino",-13.319157600402832],["▁kuwo",-13.319168090820312],["▁Murat",-13.319169998168944],["▁presentation",-13.319186210632324],["턴",-13.319186210632324],["▁नभएको",-13.31919288635254],["dať",-13.319194793701172],["ສັນ",-13.319194793701172],["komand",-13.319231033325195],["▁djup",-13.319247245788574],["hida",-13.319265365600586],["▁Gib",-13.319266319274902],["иков",-13.319275856018066],["ዘር",-13.319283485412598],["▁Catalana",-13.319296836853027],["▁medieval",-13.319299697875977],["▁French",-13.319315910339355],["▁poslova",-13.319342613220217],["▁комисс",-13.319344520568848],["▁Usaha",-13.319361686706545],["รับรอง",-13.319391250610352],["▁ingatlan",-13.319411277770996],["ເລ",-13.31942653656006],["▁периоду",-13.319430351257324],["▁corta",-13.31943130493164],["кажи",-13.319432258605955],["▁sklen",-13.319452285766602],["▁PAK",-13.319461822509766],["▁Matu",-13.319465637207031],["せて",-13.31947898864746],["▁almış",-13.319488525390623],["▁ayaan",-13.31948947906494],["BIL",-13.31950569152832],["ربة",-13.319506645202637],["▁сайце",-13.319507598876951],["ສຸ",-13.319514274597168],["▁Tool",-13.3195161819458],["▁cherche",-13.319522857666016],["▁unang",-13.319543838500977],["前景",-13.319550514221191],["▁karibuni",-13.319567680358888],["יגה",-13.319584846496582],["ರದ",-13.319616317749023],["ശ്രമ",-13.319624900817873],["▁християн",-13.31965160369873],["▁patir",-13.319653511047363],["很重要",-13.319683074951172],["▁இப்போது",-13.319707870483398],["▁ጭ",-13.319710731506348],["▁Mika",-13.319729804992676],["šajā",-13.319759368896484],["▁소리",-13.319766998291016],["▁joukkue",-13.319769859313965],["▁Samtidig",-13.319774627685549],["▁השל",-13.319780349731444],["▁δράση",-13.319791793823242],["▁الإسلامي",-13.319791793823242],["▁රෝග",-13.319796562194824],["ጠፋ",-13.319804191589355],["ilish",-13.319819450378418],["▁بدر",-13.319838523864746],["▁حکمران",-13.31984043121338],["▁등에",-13.319856643676758],["ణ్",-13.319868087768556],["മര്",-13.319891929626465],["Lie",-13.319924354553224],["GES",-13.31993007659912],["▁Mudane",-13.319961547851562],["▁разби",-13.319978713989258],["▁terk",-13.319987297058104],["▁lower",-13.32000732421875],["▁beslis",-13.32004451751709],["▁gedir",-13.32006549835205],["შვი",-13.320075035095217],["▁182",-13.320083618164062],["▁توضیح",-13.320120811462402],["마다",-13.32012939453125],["ಾಸ್",-13.320137977600098],["▁Мета",-13.320171356201172],["ніцы",-13.320197105407717],["▁Period",-13.320198059082031],["สวยงาม",-13.320201873779297],["ΑΠ",-13.32020664215088],["▁пренос",-13.320207595825195],["▁tervise",-13.320208549499512],["▁EH",-13.320230484008787],["unha",-13.320246696472168],["▁қажетті",-13.320282936096191],["strik",-13.320286750793455],["лява",-13.320290565490724],["▁hatibû",-13.320310592651367],["▁neist",-13.320316314697266],["קב",-13.320326805114746],["つつ",-13.32032871246338],["▁Brig",-13.320332527160645],["forsikring",-13.320343017578123],["▁Македонски",-13.320350646972656],["ิม",-13.320369720458984],["▁[6]",-13.320375442504885],["▁producere",-13.32038688659668],["▁Tipp",-13.320398330688477],["ախտ",-13.320425987243652],["▁gildi",-13.3204345703125],["▁Peda",-13.32044506072998],["იათ",-13.320460319519045],["▁важны",-13.3204984664917],["ぬ",-13.320502281188965],["髪",-13.32050609588623],["逸",-13.320509910583496],["▁события",-13.320514678955078],["▁gevonden",-13.320515632629396],["▁marrëdhënie",-13.320515632629396],["▁xénero",-13.320515632629396],["▁ευκαιρία",-13.320515632629396],["▁имущество",-13.320515632629396],["▁каждой",-13.320515632629396],["▁නෙමෙයි",-13.320515632629396],["▁წყარო",-13.320515632629396],["▁අමතක",-13.320516586303713],["▁mtandao",-13.32051944732666],["Mil",-13.320526123046877],["▁Pali",-13.320526123046877],["▁пропонує",-13.32052993774414],["▁спасибо",-13.320534706115724],["▁Piştî",-13.320538520812988],["的目的",-13.320538520812988],["▁tradisional",-13.320539474487305],["▁סקס",-13.320548057556152],["▁Boz",-13.320556640625],["▁räkna",-13.320561408996582],["▁قارا",-13.32056713104248],["Święt",-13.320568084716797],["काळ",-13.320569038391112],["endam",-13.320576667785645],["▁Тан",-13.320579528808594],["▁bröst",-13.320585250854492],["▁doonaa",-13.320600509643556],["iisul",-13.320606231689451],["▁aldiz",-13.320630073547363],["▁fotball",-13.320630073547363],["▁Бразил",-13.320642471313477],["edat",-13.320651054382324],["▁maggiori",-13.32065200805664],["മസ്",-13.32065486907959],["▁učini",-13.320691108703612],["компози",-13.320722579956056],["▁მალე",-13.3207426071167],["▁прозор",-13.32075309753418],["▁vyt",-13.320755958557127],["▁الاقتصادية",-13.320765495300291],["▁Հայաստանից",-13.32078456878662],["וות",-13.320788383483888],["▁војна",-13.320788383483888],["تراجع",-13.320806503295898],["▁według",-13.320808410644531],["▁قید",-13.32081413269043],["▁১৬",-13.320820808410645],["arten",-13.320840835571287],["▁marxa",-13.320844650268556],["▁Vagy",-13.32084846496582],["רוח",-13.320857048034668],["主流",-13.32086181640625],["oarele",-13.320878982543944],["▁lumii",-13.320927619934082],["штин",-13.32093334197998],["VL",-13.32093906402588],["▁کردہ",-13.32097339630127],["▁Government",-13.32097625732422],["ያቸው",-13.320980072021484],["▁aquele",-13.32099151611328],["9/",-13.32099437713623],["▁Câ",-13.321019172668455],["▁jeweiligen",-13.321044921875],["▁Somali",-13.321067810058594],["▁પહોંચ",-13.321070671081545],["ಚು",-13.32109260559082],["сова",-13.321094512939451],["▁പരീക്ഷ",-13.32110595703125],["▁neska",-13.321107864379885],["▁Yun",-13.321112632751465],["▁عالمي",-13.32113265991211],["▁მოვ",-13.321142196655272],["▁ប្រាក់",-13.32115650177002],["работать",-13.32119846343994],["▁మూ",-13.32120132446289],["▁Seni",-13.32120418548584],["NIE",-13.321206092834473],["лико",-13.32122039794922],["▁Abo",-13.3212308883667],["▁престо",-13.321249008178713],["▁ಹಾಗೆ",-13.321250915527344],["▁Ane",-13.321259498596191],["నాయక",-13.321273803710938],["sumi",-13.321277618408203],["早已",-13.321280479431152],["▁arom",-13.321282386779783],["ദീ",-13.321292877197266],["▁نادر",-13.321294784545898],["▁biur",-13.321306228637695],["ticas",-13.321310997009276],["▁آتا",-13.321334838867188],["stille",-13.321346282958984],["▁පැවති",-13.321351051330566],["लिक",-13.321356773376465],["▁תת",-13.321372032165527],["വൈ",-13.32139015197754],["造成的",-13.321392059326172],["▁куче",-13.321393013000488],["▁freue",-13.32142734527588],["▁істер",-13.32144832611084],["▁읽",-13.321492195129396],["▁goud",-13.32150650024414],["գետ",-13.321551322937012],["▁되었다",-13.321555137634276],["वें",-13.321560859680176],["▁prowadzi",-13.321587562561035],["항공",-13.321587562561035],["▁극",-13.3215970993042],["▁oprette",-13.321599960327148],["ลัด",-13.32164192199707],["▁apne",-13.321646690368652],["แน่",-13.321677207946776],["чут",-13.321700096130373],["▁їхні",-13.321703910827637],["ΙΟ",-13.321721076965332],["▁ಕಾರು",-13.321732521057127],["TTA",-13.321748733520508],["zbud",-13.321778297424316],["▁filii",-13.321781158447266],["▁mitta",-13.321783065795898],["ニュース",-13.321800231933594],["زام",-13.32181453704834],["精心",-13.321824073791504],["人數",-13.321829795837402],["▁чат",-13.32184600830078],["reiten",-13.321847915649414],["▁pensamento",-13.321855545043944],["nelli",-13.321887969970703],["▁szint",-13.32188892364502],["عتمد",-13.321894645690918],["িশ",-13.321945190429688],["▁할인",-13.321950912475586],["ULI",-13.321980476379396],["早就",-13.321991920471191],["ల్లి",-13.32199764251709],["账",-13.321998596191406],["▁العمر",-13.322019577026367],["▁Lak",-13.322031021118164],["▁구조",-13.322038650512695],["4,000",-13.322056770324709],["カメラ",-13.322060585021973],["און",-13.322064399719238],["▁Galatasaray",-13.322070121765137],["▁LinkedIn",-13.322070121765137],["▁gréasáin",-13.322070121765137],["▁llengua",-13.322070121765137],["▁ಮದುವೆ",-13.322070121765137],["▁ಸ್ಮಾರ್ಟ್",-13.322070121765137],["▁බලාපොරොත්තු",-13.322070121765137],["▁náttúru",-13.322071075439451],["▁utjecaj",-13.322071075439451],["▁ευχαριστ",-13.322071075439451],["▁בעקבות",-13.322071075439451],["▁Campionat",-13.32207489013672],["▁зупин",-13.32207489013672],["▁ਪਾਣੀ",-13.322077751159668],["▁kozmetik",-13.3220853805542],["▁ಸಿಎಂ",-13.3220853805542],["orkest",-13.322088241577148],["▁ತುಂಬಾ",-13.322092056274414],["▁تمرین",-13.32209300994873],["實施",-13.322093963623049],["ffo",-13.322096824645996],["képpen",-13.322098731994627],["▁머",-13.322098731994627],["▁родителей",-13.322102546691896],["▁எடுக்க",-13.322102546691896],["▁reso",-13.322111129760742],["▁mekaar",-13.322113037109377],["ผสม",-13.322117805480955],["▁svojimi",-13.322118759155272],["▁дейінгі",-13.322128295898438],["huile",-13.322134971618652],["ஷி",-13.322144508361816],["үүр",-13.322148323059082],["ေမာင္",-13.322154998779297],["▁місцевого",-13.322195053100586],["▁салбарын",-13.3222017288208],["▁XML",-13.3222074508667],["▁मोड",-13.322208404541016],["▁paroli",-13.322225570678713],["سلسل",-13.322253227233888],["▁diras",-13.322270393371582],["▁organizacji",-13.322283744812012],["tação",-13.322299003601074],["▁Biznes",-13.322312355041504],["entur",-13.32231616973877],["▁questione",-13.322338104248049],["▁policía",-13.322346687316896],["▁туш",-13.322361946105955],["▁løbet",-13.322365760803224],["▁masáž",-13.322381019592283],["▁മാത്രമ",-13.322391510009766],["▁noć",-13.322405815124512],["▁بالای",-13.322405815124512],["▁alimente",-13.322406768798828],["▁കുഞ്ഞ",-13.32240867614746],["いますが",-13.322409629821776],["ствени",-13.322423934936523],["plate",-13.322427749633787],["HY",-13.322453498840332],["▁Kreis",-13.322463035583496],["▁simila",-13.322463035583496],["እናንተ",-13.322464942932127],["piti",-13.32247257232666],["وفا",-13.322484016418455],["ల్ని",-13.32249355316162],["pisati",-13.322510719299316],["▁کوچ",-13.322514533996582],["▁Гали",-13.322517395019531],["학원",-13.322545051574709],["สิ้น",-13.322564125061035],["▁표시",-13.322569847106934],["СТИ",-13.322574615478516],["ንዳ",-13.322592735290527],["ድሮ",-13.322602272033691],["▁LEO",-13.322607040405272],["▁Универзитет",-13.322613716125488],["▁اړ",-13.322622299194336],["▁programı",-13.322623252868652],["内の",-13.322643280029297],["▁faccia",-13.322656631469728],["▁eksik",-13.322657585144045],["เหมือนกัน",-13.322669982910156],["▁पाय",-13.322691917419434],["▁гэтых",-13.3226957321167],["birin",-13.322705268859863],["unternehmen",-13.322723388671877],["▁lumina",-13.322731971740724],["▁irəli",-13.322750091552734],["▁სას",-13.322769165039062],["ключение",-13.322779655456545],["▁никой",-13.322820663452148],["▁ಪತ್ರ",-13.32282543182373],["itetit",-13.32284450531006],["▁концентра",-13.322847366333008],["xl",-13.322866439819336],["▁κόσμου",-13.322871208190918],["▁首頁",-13.322887420654297],["ekről",-13.322920799255373],["▁uut",-13.322925567626951],["▁ຫຼາຍ",-13.32294464111328],["▁pastaj",-13.322959899902344],["▁Aile",-13.322964668273926],["▁атмосфера",-13.32297420501709],["▁մարզի",-13.323005676269531],["אַק",-13.323027610778809],["▁diferent",-13.323040008544922],["тикалық",-13.323054313659668],["▁Олар",-13.323060989379885],["▁nauding",-13.323062896728516],["▁suspect",-13.323092460632324],["vješt",-13.323100090026855],["▁میز",-13.323111534118652],["▁imens",-13.323132514953612],["SAM",-13.323135375976562],["ঙ্ক",-13.323145866394045],["▁ismi",-13.323184967041016],["лықты",-13.323186874389648],["ದಿನ",-13.323187828063965],["nood",-13.323193550109863],["▁kusema",-13.323212623596191],["stall",-13.323217391967772],["ميد",-13.323238372802734],["▁svakog",-13.323240280151367],["▁צייט",-13.323260307312012],["▁සේවා",-13.323264122009276],["▁Şer",-13.323270797729492],["لط",-13.323275566101074],["ակալ",-13.32328987121582],["mdan",-13.323308944702148],["▁beslutning",-13.323318481445312],["▁чанар",-13.32334041595459],["పల్లి",-13.32336711883545],["▁Handy",-13.323384284973145],["▁realitate",-13.32339096069336],["▁lack",-13.323419570922852],["▁ustav",-13.32344913482666],["▁jellemző",-13.32346248626709],["▁administrativa",-13.323474884033203],["▁þessari",-13.323484420776367],["▁العب",-13.32351303100586],["▁کړو",-13.323529243469238],["▁fide",-13.323531150817873],["▁legno",-13.323549270629885],["▁เจ้า",-13.323561668395996],["伊朗",-13.323562622070312],["财富",-13.323562622070312],["▁Саме",-13.3236083984375],["ваща",-13.323614120483398],["動畫",-13.323626518249512],["ນາຍົກລັດຖະມົນຕີ",-13.323627471923828],["Trøndelag",-13.323628425598145],["▁genießen",-13.323628425598145],["▁ixtisas",-13.323628425598145],["▁umiejętności",-13.323628425598145],["▁διάστημα",-13.323628425598145],["▁छोटे",-13.323628425598145],["▁शुक्रबार",-13.323628425598145],["▁Bewegung",-13.32362937927246],["▁Wünsche",-13.32362937927246],["▁परेशान",-13.32362937927246],["▁ਸ਼ਬਦ",-13.32362937927246],["卵",-13.323631286621094],["▁Seguridad",-13.32363224029541],["ตาราง",-13.323634147644045],["▁ovvero",-13.323634147644045],["▁איזה",-13.323634147644045],["▁الطاقة",-13.323641777038574],["ፕሬ",-13.323649406433104],["▁Manifest",-13.32365894317627],["▁záujem",-13.323667526245115],["▁wypełni",-13.323671340942385],["ພຽງ",-13.32367706298828],["▁bianco",-13.32367706298828],["tages",-13.323701858520508],["ણો",-13.32370662689209],["▁облысының",-13.323715209960938],["▁vat",-13.323735237121582],["▁tepki",-13.32375431060791],["▁أشهر",-13.323756217956545],["увате",-13.32379150390625],["▁wygląd",-13.323817253112791],["11.2017",-13.323819160461426],["▁сэр",-13.32384204864502],["▁ہوگیا",-13.323843955993652],["▁hudud",-13.323850631713867],["▁тэкст",-13.323854446411133],["цивил",-13.323858261108398],["あれ",-13.323859214782717],["なお",-13.323862075805664],["▁ਮਹ",-13.323863983154297],["ደርጉ",-13.323881149291992],["овом",-13.323884963989258],["لوس",-13.323914527893066],["評論",-13.32392692565918],["rediger",-13.323951721191406],["▁kjærligheten",-13.323965072631836],["ጣሪ",-13.323966979980469],["▁juntos",-13.323984146118164],["nadi",-13.32398796081543],["▁վիճակ",-13.323991775512695],["▁samostatn",-13.32399559020996],["▁noto",-13.32401180267334],["▁เมือง",-13.324014663696287],["▁kuongeza",-13.324056625366213],["▁திரை",-13.324061393737791],["▁Belanda",-13.324078559875488],["▁Sudan",-13.32410717010498],["šne",-13.32412338256836],["TAK",-13.324136734008787],["ורת",-13.324200630187988],["ประกัน",-13.324222564697266],["beck",-13.32424259185791],["▁socialista",-13.32425594329834],["3,5",-13.324257850646973],["▁парти",-13.324281692504885],["есте",-13.32430648803711],["▁SHA",-13.324316024780272],["▁لنک",-13.324335098266602],["چىلىق",-13.32433795928955],["▁поперед",-13.324346542358398],["իական",-13.324358940124512],["▁število",-13.324359893798828],["続",-13.324359893798828],["▁vetem",-13.324377059936523],["▁यसले",-13.324377059936523],["▁datele",-13.32437801361084],["▁auka",-13.324381828308104],["▁كۇ",-13.324386596679688],["▁advert",-13.324397087097168],["▁ব্র",-13.324422836303713],["▁боли",-13.324440956115724],["▁pracovn",-13.324471473693848],["then",-13.324474334716797],["рыг",-13.324487686157228],["ismeret",-13.324532508850098],["Бу",-13.324594497680664],["▁Colle",-13.32459831237793],["▁проблему",-13.324651718139648],["▁Δημο",-13.324670791625977],["дки",-13.324673652648926],["soni",-13.324701309204102],["还没",-13.324711799621582],["хоў",-13.324719429016112],["▁Оно",-13.32473850250244],["▁minkä",-13.32474422454834],["сыр",-13.324769020080566],["ର୍ଥ",-13.324769020080566],["▁Response",-13.3247709274292],["▁suuda",-13.324782371520996],["▁febrero",-13.32479763031006],["▁daxwaz",-13.324810028076172],["سلم",-13.324810981750488],["▁Живот",-13.324837684631348],["▁privati",-13.324849128723145],["▁ரெ",-13.324853897094728],["承认",-13.32485580444336],["▁funksjon",-13.324893951416016],["wnego",-13.324899673461914],["1⁄2",-13.324902534484863],["▁модул",-13.324914932250977],["▁шама",-13.32492446899414],["▁ģimenes",-13.324934005737305],["▁stjórnar",-13.324935913085938],["▁الحي",-13.324944496154783],["ናይ",-13.324945449829102],["ему",-13.324962615966797],["исто",-13.32496738433838],["clin",-13.324989318847656],["度假",-13.325007438659668],["▁Rafi",-13.325013160705566],["▁negozi",-13.325024604797363],["▁ลง",-13.325043678283691],["fél",-13.32504653930664],["bilita",-13.325054168701172],["yalar",-13.32505989074707],["▁concentr",-13.32506275177002],["ण्याचे",-13.325072288513184],["huri",-13.325077056884766],["▁fotografií",-13.325078010559082],["▁માર",-13.325101852416992],["▁időszak",-13.325102806091309],["ൂരി",-13.32510757446289],["上帝",-13.325117111206056],["▁assure",-13.325119972229004],["郁",-13.325135231018066],["乾燥",-13.325152397155762],["▁студенттер",-13.325155258178713],["elia",-13.325165748596191],["ジェ",-13.325180053710938],["▁tp",-13.325181007385254],["บรรยากาศ",-13.325186729431152],["วัฒนธรรม",-13.325186729431152],["▁užsienio",-13.325188636779783],["▁zariadenia",-13.325188636779783],["▁દરમિયાન",-13.325188636779783],["▁প্রতিবেদন",-13.325189590454102],["▁lampu",-13.325191497802734],["▁tocmai",-13.32519245147705],["▁prebival",-13.325194358825684],["▁pomoci",-13.32521152496338],["▁საიტი",-13.325220108032228],["▁להשתמש",-13.325222969055176],["▁तुलना",-13.325223922729492],["یح",-13.325227737426758],["ಗೋ",-13.325237274169922],["▁আবার",-13.325250625610352],["▁arrimaha",-13.325252532958984],["▁المناطق",-13.3252534866333],["▁Høj",-13.325255393981934],["▁الفيديو",-13.325255393981934],["悪い",-13.325260162353516],["tarian",-13.325261116027832],["▁baduzu",-13.325264930725098],["▁doubt",-13.325273513793944],["▁lahendus",-13.325273513793944],["▁Lietuvą",-13.325278282165527],["▁келіп",-13.32528018951416],["▁أخبار",-13.325303077697754],["▁ממנו",-13.325307846069336],["खु",-13.325310707092283],["▁đương",-13.3253173828125],["▁gəldi",-13.325319290161133],["RIO",-13.32533359527588],["奇怪",-13.325336456298828],["▁Праз",-13.325346946716309],["rieš",-13.325353622436523],["òng",-13.325364112854004],["▁novità",-13.3253755569458],["शास्त्र",-13.325387954711914],["UO",-13.32538890838623],["drow",-13.325406074523926],["▁destas",-13.325406074523926],["▁Gwe",-13.325410842895508],["▁partage",-13.325414657592772],["▁आपल्याला",-13.325428009033203],["▁spracovan",-13.325429916381836],["XD",-13.325432777404783],["क्य",-13.325432777404783],["eyso",-13.325445175170898],["▁lando",-13.32548999786377],["▁peligro",-13.325523376464844],["▁არს",-13.325546264648438],["ပွ",-13.32555103302002],["▁odam",-13.325554847717283],["▁تنگ",-13.325579643249512],["▁håber",-13.325599670410156],["▁yubor",-13.325599670410156],["tého",-13.325604438781738],["如同",-13.325604438781738],["ouvrir",-13.325623512268066],["▁الذهب",-13.325636863708496],["cyjna",-13.325650215148926],["▁utvikle",-13.325675964355469],["▁wajen",-13.325679779052734],["▁Крым",-13.325693130493164],["damos",-13.325698852539062],["ມີການ",-13.325714111328123],["▁Tomas",-13.325716018676758],["ให้คุณ",-13.325721740722656],["джан",-13.325737953186035],["buti",-13.325756072998049],["감을",-13.325764656066896],["▁குடி",-13.32577133178711],["ពល",-13.325782775878906],["▁players",-13.32578468322754],["וציא",-13.325819969177246],["בֿ",-13.325822830200195],["ਬੋ",-13.325839042663574],["展览",-13.325868606567385],["▁pasos",-13.325871467590332],["▁بغداد",-13.32590389251709],["▁يريد",-13.325926780700684],["▁Был",-13.325966835021973],["▁sinua",-13.325976371765137],["▁adatta",-13.325997352600098],["▁шү",-13.326008796691896],["racha",-13.326021194458008],["▁Barra",-13.326041221618652],["▁düşür",-13.326042175292969],["▁personaliza",-13.326042175292969],["▁сможет",-13.326055526733398],["▁ഥ",-13.326059341430664],["писи",-13.326064109802246],["หม",-13.32607078552246],["▁អង្គការ",-13.32607364654541],["่ว",-13.326085090637209],["▁روشنی",-13.326091766357422],["▁opciones",-13.326099395751951],["Стр",-13.326104164123535],["ваем",-13.326105117797852],["▁ئاد",-13.3261137008667],["▁usare",-13.326128005981444],["▁البو",-13.32613754272461],["によっては",-13.32614803314209],["▁fertil",-13.326159477233888],["▁යන්නේ",-13.326171875],["զմ",-13.326189041137695],["YL",-13.326199531555176],["pré",-13.326210021972656],["▁գնահատ",-13.326214790344238],["▁արդյունքում",-13.326220512390137],["▁διακ",-13.326223373413086],["ፈጠረ",-13.3262357711792],["סדר",-13.326255798339844],["格式",-13.326257705688477],["ዲያ",-13.326260566711426],["▁ਆਰ",-13.326276779174805],["āža",-13.326281547546388],["unica",-13.326300621032717],["▁glori",-13.326323509216309],["ដូចជា",-13.32632827758789],["▁ஒன்ற",-13.326348304748535],["▁అస",-13.326351165771484],["▁นิ้ว",-13.326359748840332],["▁बाबा",-13.32636260986328],["मार्ग",-13.326364517211914],["сија",-13.32636833190918],["▁ծառայություն",-13.32637882232666],["ћемо",-13.326398849487305],["▁агенција",-13.326449394226074],["▁dernières",-13.326471328735352],["▁wc",-13.326480865478516],["▁prevenci",-13.326493263244627],["▁کاربر",-13.32650089263916],["▁করছে",-13.32651710510254],["▁náhrad",-13.326530456542969],["ارم",-13.326541900634766],["▁argazki",-13.326543807983398],["なもの",-13.326549530029297],["▁बीमा",-13.32658863067627],["▁Pea",-13.326592445373535],["ვნებ",-13.326617240905762],["LOS",-13.326655387878418],["blement",-13.326661109924316],["вает",-13.326705932617188],["培育",-13.326730728149414],["▁Anni",-13.326737403869627],["▁ತುಂಬ",-13.326737403869627],["នៅលើ",-13.326743125915527],["▁advantage",-13.32675075531006],["▁menyelesaikan",-13.32675075531006],["▁príspevkov",-13.32675075531006],["▁reprezintă",-13.32675075531006],["▁внимания",-13.32675075531006],["▁conhecido",-13.326751708984377],["▁usluge",-13.326751708984377],["▁ամիս",-13.326751708984377],["▁experienta",-13.32675552368164],["▁weather",-13.32675552368164],["▁Egyetem",-13.326756477355955],["▁atëherë",-13.32675838470459],["▁diçka",-13.326759338378906],["hifadhi",-13.326765060424805],["トップ",-13.326774597167969],["▁месцы",-13.326775550842283],["▁Şurasının",-13.326794624328612],["▁ٻولي",-13.326794624328612],["achaidh",-13.326807022094728],["▁Avant",-13.326818466186523],["zustellen",-13.326828956604004],["▁kurulu",-13.326847076416016],["▁Fet",-13.32685661315918],["▁fırsat",-13.32686996459961],["▁하이",-13.32688331604004],["უა",-13.326911926269531],["▁Import",-13.326923370361328],["خې",-13.326946258544922],["▁Europese",-13.326953887939451],["оу",-13.326986312866213],["слів",-13.32699203491211],["▁prošle",-13.327000617980955],["zē",-13.327004432678224],["▁horror",-13.327011108398438],["▁գործող",-13.327011108398438],["合わせ",-13.327013969421388],["新竹",-13.327014923095703],["herë",-13.327020645141602],["▁abzu",-13.327038764953612],["gí",-13.327048301696776],["還要",-13.327048301696776],["▁Brno",-13.32705307006836],["▁μορφή",-13.327061653137209],["קשה",-13.327073097229004],["চে",-13.32707977294922],["сав",-13.327088356018066],["▁사고",-13.327115058898926],["гий",-13.32711696624756],["▁Miks",-13.32713508605957],["പ്പൊ",-13.327143669128418],["▁السورية",-13.327152252197266],["▁نبي",-13.327157020568848],["▁melde",-13.327160835266112],["odott",-13.327178001403809],["▁günde",-13.327187538146973],["pisan",-13.327192306518556],["▁tiesa",-13.32719898223877],["тике",-13.327210426330566],["▁देखिए",-13.327211380004885],["▁liệt",-13.327220916748049],["ılarak",-13.32723617553711],["сив",-13.327241897583008],["▁knows",-13.327245712280272],["▁საათი",-13.32724666595459],["সু",-13.327266693115234],["Раз",-13.327269554138184],["▁kræver",-13.327272415161133],["▁Әр",-13.327289581298828],["▁arbejdet",-13.32730484008789],["registre",-13.32730770111084],["▁Elect",-13.327320098876951],["чево",-13.327367782592772],["▁оруулах",-13.327387809753418],["ටිය",-13.327393531799316],["▁жетиш",-13.327431678771973],["▁نوې",-13.327431678771973],["▁uger",-13.327434539794922],["worth",-13.327445030212402],["▁пав",-13.327462196350098],["▁पुनर्",-13.327465057373049],["وائی",-13.327476501464844],["▁دبي",-13.327476501464844],["▁bejegyzés",-13.327481269836426],["▁entidade",-13.327499389648438],["مؤشر",-13.327544212341309],["这也是",-13.3275728225708],["▁geus",-13.327574729919434],["nęła",-13.32760524749756],["▁ডে",-13.327608108520508],["▁seres",-13.327616691589355],["▁túi",-13.327621459960938],["وها",-13.327631950378418],["▁löp",-13.32763957977295],["▁построен",-13.327680587768556],["nieka",-13.327688217163086],["となった",-13.327696800231934],["Ð",-13.327698707580566],["ေဘာ",-13.327714920043944],["▁websted",-13.327715873718262],["elwe",-13.327716827392578],["▁délai",-13.327730178833008],["ନିଆ",-13.327750205993652],["หนี",-13.327762603759766],["▁lái",-13.32777500152588],["ሰላ",-13.327795028686523],["建造",-13.327795028686523],["▁वार्ता",-13.32779598236084],["אנר",-13.327808380126951],["▁गरज",-13.327808380126951],["▁ദേശ",-13.327839851379396],["▁બને",-13.327850341796877],["▁reproduk",-13.327877044677734],["bergs",-13.327884674072266],["▁улице",-13.327888488769531],["▁disciplin",-13.327929496765137],["▁handels",-13.327953338623049],["▁Thứ",-13.327954292297363],["ушев",-13.327964782714844],["▁zatim",-13.327974319458008],["динг",-13.327988624572754],["新生",-13.32800579071045],["▁रक्त",-13.328008651733398],["▁adora",-13.328012466430664],["▁krásne",-13.32801628112793],["▁partija",-13.32801628112793],["amista",-13.328051567077637],["ສໍາ",-13.328059196472168],["accès",-13.328063011169434],["▁кабел",-13.328083038330078],["gler",-13.328086853027344],["яж",-13.328092575073242],["▁iznos",-13.328103065490724],["▁soba",-13.328119277954102],["▁propriu",-13.328121185302734],["▁profet",-13.328130722045898],["работал",-13.328161239624023],["▁ባል",-13.328161239624023],["▁làn",-13.32816219329834],["न्ध",-13.32818603515625],["▁הדו",-13.328213691711426],["▁જુ",-13.32822036743164],["توق",-13.328224182128906],["tule",-13.328227996826172],["kjör",-13.328237533569336],["▁apotek",-13.328240394592283],["支撑",-13.328252792358398],["显然",-13.328256607055664],["▁направления",-13.328262329101562],["▁الإرهاب",-13.328265190124512],["▁අම්මා",-13.328266143798828],["ుగా",-13.328279495239258],["▁нормално",-13.32830810546875],["▁Potrivit",-13.32831573486328],["▁pakalpojumi",-13.32831573486328],["▁Ďakujem",-13.32831573486328],["▁φυσικά",-13.32831573486328],["▁вокруг",-13.32831573486328],["▁پرسپولیس",-13.32831573486328],["▁چارواکي",-13.32831573486328],["▁যৌন",-13.32831573486328],["▁નોંધ",-13.32831573486328],["▁ಜಗತ್ತಿನ",-13.32831573486328],["▁əməkdaşları",-13.328316688537598],["▁bisogna",-13.328317642211914],["▁أنواع",-13.328319549560549],["▁ପ୍ରେମ",-13.328319549560549],["▁जनवरी",-13.32832145690918],["▁უბრალოდ",-13.32832145690918],["▁Сте",-13.328323364257812],["▁מדיניות",-13.328323364257812],["▁Bilbao",-13.328324317932127],["▁सहकारी",-13.328325271606444],["▁آش",-13.328326225280762],["▁pagrindini",-13.32833194732666],["▁yerinde",-13.328332901000977],["▁duyệt",-13.328333854675291],["يمي",-13.32833480834961],["▁kötelező",-13.32833766937256],["▁уменьш",-13.32834243774414],["▁εμπειρία",-13.328349113464355],["▁قاسم",-13.328361511230469],["▁εάν",-13.328375816345217],["▁Göz",-13.32837963104248],["构成",-13.328388214111328],["▁(2009)",-13.328399658203123],["кту",-13.328410148620604],["▁wanaagsan",-13.328410148620604],["▁homines",-13.32841682434082],["▁Pip",-13.328426361083984],["ವ್ಯ",-13.328438758850098],["नायक",-13.328441619873049],["難以",-13.328441619873049],["▁komerc",-13.328444480895996],["UTI",-13.328446388244627],["▁vastuu",-13.32848834991455],["▁Elkartea",-13.328490257263184],["provi",-13.328506469726562],["▁पू",-13.32852268218994],["▁cận",-13.328523635864258],["違",-13.328560829162598],["▁Oromoo",-13.32856559753418],["ြတ္",-13.328568458557127],["▁администрация",-13.328606605529783],["จ้า",-13.32862663269043],["▁inguruko",-13.32863426208496],["▁migliorare",-13.328645706176758],["▁അര",-13.328649520874023],["Info",-13.328659057617188],["дије",-13.328661918640137],["▁رايون",-13.328669548034668],["▁చూప",-13.328670501708984],["▁fruits",-13.328680992126465],["▁조직",-13.328685760498049],["▁concede",-13.328697204589844],["▁товарів",-13.32871913909912],["▁مشاوره",-13.3287353515625],["▁Voll",-13.328753471374512],["ജാ",-13.328770637512209],["▁kjønn",-13.328771591186523],["▁independente",-13.32878875732422],["luokka",-13.328829765319824],["▁kampanje",-13.328856468200684],["ిని",-13.328907012939451],["гул",-13.328908920288086],["▁filosofia",-13.328912734985352],["▁munu",-13.328924179077148],["▁qərarı",-13.32893180847168],["ላለ",-13.32895851135254],["▁pensare",-13.328970909118652],["▁sija",-13.328973770141602],["▁sg",-13.32897663116455],["▁सामना",-13.328991889953612],["▁이는",-13.329011917114258],["ΝΟ",-13.329061508178713],["▁දීම",-13.329061508178713],["▁Punt",-13.329079627990724],["ว่าง",-13.329096794128418],["▁વિશ્વ",-13.329096794128418],["▁Saor",-13.329100608825684],["▁enged",-13.329102516174316],["▁визначення",-13.329102516174316],["▁escena",-13.329103469848633],["▁සිටිය",-13.329110145568848],["▁Agentliyi",-13.329121589660645],["▁عدل",-13.329121589660645],["▁запозна",-13.329131126403809],["▁Cic",-13.32913303375244],["▁ಗುರು",-13.32913589477539],["▁kullanma",-13.32913875579834],["kule",-13.329143524169922],["มั",-13.329157829284668],["▁vprašanje",-13.32917594909668],["▁conclusion",-13.329180717468262],["Ul",-13.329187393188477],["▁turnaj",-13.329214096069336],["▁studere",-13.329227447509766],["▁энергия",-13.329238891601562],["▁вигляд",-13.329262733459473],["stedet",-13.329264640808104],["/01",-13.3292818069458],["æða",-13.329291343688965],["▁jūr",-13.329291343688965],["تعرض",-13.329303741455078],["▁Dritte",-13.32930850982666],["食用",-13.329323768615724],["▁בסי",-13.32933521270752],["ρτ",-13.329353332519531],["offerta",-13.329360961914062],["▁виклад",-13.329378128051758],["pü",-13.329411506652832],["▁balse",-13.32941722869873],["głos",-13.329423904418944],["기로",-13.329428672790527],["▁찍",-13.329448699951172],["ようになりました",-13.329456329345703],["▁employees",-13.329460144042969],["▁skar",-13.329462051391602],["▁HY",-13.329479217529297],["▁Registroval",-13.329485893249512],["▁Heli",-13.32949161529541],["плаче",-13.329492568969728],["▁රජයේ",-13.329496383666992],["ബൈ",-13.32949924468994],["▁Kadın",-13.329505920410156],["▁Канад",-13.329505920410156],["케이",-13.32952880859375],["▁siasa",-13.3295316696167],["glasi",-13.329537391662598],["ֆիլ",-13.329570770263672],["▁felhasználás",-13.32960319519043],["хил",-13.329617500305176],["алка",-13.329619407653809],["Line",-13.329620361328123],["ЛІ",-13.329622268676758],["走出",-13.329634666442873],["ுள்ளனர்",-13.329645156860352],["τητα",-13.329666137695312],["ਜ਼ਰ",-13.329668998718262],["גד",-13.32968521118164],["▁vállalkozás",-13.329703330993652],["▁Straßen",-13.32970905303955],["dimo",-13.329710960388184],["▁impone",-13.329720497131348],["대학",-13.32974910736084],["▁sluta",-13.329750061035156],["▁לט",-13.329754829406738],["ትግራይ",-13.329789161682127],["▁برخوردار",-13.32979679107666],["▁talep",-13.329815864562988],["寂",-13.329830169677734],["ホーム",-13.32985496520996],["ાવવા",-13.329874038696287],["ရတဲ့",-13.329874992370604],["คว้า",-13.32988166809082],["▁Después",-13.329882621765137],["▁mouvement",-13.329882621765137],["▁zatiaľ",-13.329882621765137],["▁најбољи",-13.329882621765137],["▁тээврийн",-13.329882621765137],["▁التربية",-13.329882621765137],["▁বুধবার",-13.329882621765137],["▁ପ୍ରଶ୍ନ",-13.329882621765137],["▁инвестиции",-13.329883575439451],["▁객실",-13.32988452911377],["▁Frederik",-13.329885482788086],["▁улуттук",-13.329885482788086],["▁епископ",-13.329886436462402],["▁ਤੂੰ",-13.329886436462402],["▁तत्काल",-13.329888343811035],["▁информације",-13.329889297485352],["▁BhBC",-13.329890251159668],["▁ఉంటాయి",-13.329894065856934],["▁inneholder",-13.329915046691896],["▁rời",-13.329944610595703],["▁මහත්",-13.329967498779297],["▁ЗША",-13.32996940612793],["ບຸນ",-13.32997703552246],["ότι",-13.329977989196776],["▁삼성",-13.329980850219728],["▁szavaz",-13.330000877380373],["▁мянган",-13.330004692077637],["▁hukumat",-13.33000659942627],["▁vállalat",-13.330007553100586],["бө",-13.330018043518066],["▁انساني",-13.330034255981444],["▁לבין",-13.330039978027344],["wechsel",-13.330052375793455],["▁الشخصية",-13.33005428314209],["▁publiceras",-13.33006191253662],["▁sade",-13.33006763458252],["▁nettsted",-13.330073356628418],["▁numuru",-13.330074310302734],["大数据",-13.33009147644043],["▁проводить",-13.330095291137695],["laksana",-13.330107688903809],["ប្រភេទ",-13.330111503601074],["▁Napa",-13.330120086669922],["▁ओलीले",-13.330124855041504],["▁영국",-13.330134391784668],["▁Firmen",-13.330142974853516],["▁jutott",-13.330153465270996],["低于",-13.33016586303711],["دقيق",-13.330172538757324],["▁وڃن",-13.330180168151855],["▁Klasse",-13.330195426940918],["▁δικό",-13.330211639404297],["ίζουμε",-13.330215454101562],["ობენ",-13.330224990844728],["てきた",-13.33026123046875],["事实上",-13.33026123046875],["▁Balo",-13.330277442932127],["▁способност",-13.330277442932127],["一把",-13.33029556274414],["суз",-13.330314636230469],["▁Marte",-13.330317497253418],["▁مرګ",-13.330323219299316],["▁cinc",-13.330324172973633],["פית",-13.330334663391112],["▁ଦେବ",-13.330339431762695],["حكام",-13.330349922180176],["dheid",-13.330358505249023],["iskola",-13.330361366271973],["piri",-13.330395698547363],["เบา",-13.330395698547363],["▁officer",-13.33041763305664],["▁simile",-13.330452919006348],["ohje",-13.330476760864258],["▁filmov",-13.330477714538574],["▁فرانسه",-13.33049201965332],["▁Із",-13.330496788024902],["▁konstrukcij",-13.330506324768066],["UAR",-13.330517768859863],["方に",-13.33051872253418],["ANY",-13.330519676208496],["▁रेल",-13.330537796020508],["▁المجال",-13.330547332763672],["▁clube",-13.33055305480957],["ивают",-13.33055591583252],["jší",-13.330570220947266],["esztő",-13.330581665039062],["▁Ella",-13.33059787750244],["löt",-13.330615043640137],["အျဖစ္",-13.330617904663086],["limit",-13.330690383911133],["▁ରିପୋର୍ଟର",-13.33070182800293],["יתה",-13.330730438232422],["▁szeretet",-13.330784797668455],["mām",-13.330795288085938],["▁felület",-13.330801010131836],["▁trwał",-13.330803871154783],["צפון",-13.330805778503418],["تلك",-13.330811500549316],["ordine",-13.33081340789795],["veien",-13.330836296081545],["ίλ",-13.330838203430176],["▁відносин",-13.330864906311035],["▁gura",-13.330866813659668],["▁avete",-13.330890655517578],["tuksesta",-13.330906867980955],["ПЦ",-13.330913543701172],["▁Hod",-13.330937385559082],["ښو",-13.330973625183104],["▁Павел",-13.330974578857422],["ታል።",-13.330975532531738],["قامة",-13.330985069274902],["▁бөлімі",-13.331008911132812],["▁skrbi",-13.331010818481444],["▁ભૂલ",-13.331027030944824],["▁свеце",-13.3310546875],["İŞ",-13.33107566833496],["kritik",-13.331083297729492],["bilo",-13.331099510192873],["▁výlet",-13.331110954284668],["▁тартуу",-13.331122398376465],["▁ustvarja",-13.33114242553711],["▁برف",-13.331143379211426],["orto",-13.33115577697754],["учить",-13.331158638000488],["▁فردا",-13.331185340881348],["▁көрсет",-13.331198692321776],["ட்டும்",-13.331199645996094],["presión",-13.33120346069336],["▁всеми",-13.33120822906494],["posledn",-13.33121109008789],["▁ძვ",-13.331216812133787],["▁Traum",-13.331220626831056],["▁origen",-13.331223487854004],["▁aktie",-13.33124542236328],["▁роботі",-13.331254959106444],["ikä",-13.331255912780762],["مكن",-13.33126449584961],["▁baterij",-13.331270217895508],["▁repo",-13.331281661987305],["▁কারণে",-13.331295013427734],["ಿರುವುದು",-13.33133029937744],["▁ಬಹು",-13.331334114074709],["▁tens",-13.331345558166504],["把自己",-13.331354141235352],["рид",-13.331361770629885],["raca",-13.331372261047363],["បន្",-13.33138370513916],["柜",-13.331391334533691],["▁phase",-13.331393241882324],["▁spread",-13.331421852111816],["▁Sehr",-13.33142375946045],["lindje",-13.331428527832031],["üst",-13.331442832946776],["が出て",-13.331449508666992],["打击",-13.331449508666992],["bw",-13.33145236968994],["ኸ",-13.33145236968994],["▁dikeluarkan",-13.331453323364258],["▁véritable",-13.331453323364258],["▁zamówienia",-13.331453323364258],["▁байдлаар",-13.331453323364258],["กาแฟ",-13.33145523071289],["▁արտաքին",-13.33145523071289],["හැර",-13.331456184387209],["▁келісім",-13.331456184387209],["▁సెక్స్",-13.331459999084473],["▁துறை",-13.331464767456056],["▁भन्छन्",-13.331465721130373],["▁રહેશે",-13.331472396850586],["Hz",-13.331473350524902],["▁дојде",-13.331478118896484],["▁effects",-13.331480979919434],["▁بجلی",-13.331485748291016],["▁현실",-13.331487655639648],["▁luoghi",-13.331488609313965],["幾年",-13.33149528503418],["gát",-13.331506729125977],["ኙ",-13.331506729125977],["▁бері",-13.331510543823242],["▁Đến",-13.331515312194824],["läkare",-13.331531524658203],["▁kukk",-13.331531524658203],["▁கேட்க",-13.33154010772705],["яцца",-13.331551551818848],["▁ΕΕ",-13.331551551818848],["▁भन्न",-13.331564903259276],["拒絕",-13.331573486328123],["▁оюн",-13.331578254699709],["નિક",-13.331583976745604],["▁regiono",-13.331589698791504],["▁مهمان",-13.331592559814451],["▁തീര്",-13.33160400390625],["▁Sjá",-13.331612586975098],["▁metabol",-13.331612586975098],["▁hadapan",-13.331618309020996],["יקט",-13.331626892089844],["▁입력",-13.331629753112791],["▁unieke",-13.33163833618164],["▁маюць",-13.33163833618164],["ที่ต้องการ",-13.33164119720459],["スの",-13.331655502319336],["▁поједин",-13.331674575805664],["▁pirs",-13.331693649291992],["assemble",-13.331716537475586],["▁asistencia",-13.33172607421875],["ääri",-13.331727981567385],["▁Río",-13.331753730773926],["▁Правда",-13.331754684448242],["▁பெரு",-13.331779479980469],["වුන",-13.331785202026367],["▁Greqi",-13.331794738769531],["▁كۈنى",-13.331805229187012],["bör",-13.331817626953123],["▁annonse",-13.331829071044922],["atsioon",-13.331838607788086],["▁Sarı",-13.331849098205566],["▁කිව්වා",-13.331856727600098],["توقع",-13.331864356994627],["▁Cyber",-13.331890106201172],["▁lade",-13.331897735595703],["▁tabell",-13.331950187683104],["不受",-13.331953048706056],["fuck",-13.331964492797852],["legur",-13.331972122192385],["OLU",-13.33198356628418],["्दैन",-13.332005500793455],["ამო",-13.332006454467772],["оса",-13.332022666931152],["လု",-13.332033157348633],["▁vij",-13.332035064697266],["▁beroep",-13.332064628601074],["▁carrega",-13.33209228515625],["नः",-13.332107543945312],["▁træning",-13.332120895385742],["▁កិច្ច",-13.332122802734377],["▁verán",-13.332147598266602],["▁హి",-13.332160949707031],["▁vortoj",-13.33218002319336],["▁උන",-13.332191467285156],["▁betalt",-13.332197189331056],["▁એવા",-13.332199096679688],["▁አሜሪካ",-13.33220100402832],["тыка",-13.3322114944458],["ערס",-13.332215309143066],["▁Ошол",-13.332222938537598],["▁2017/2018",-13.33222484588623],["▁ଯା",-13.332250595092772],["schilder",-13.332269668579102],["▁использование",-13.332282066345217],["하겠다",-13.33229160308838],["▁PDK",-13.33230686187744],["▁سلم",-13.332334518432615],["人を",-13.332345962524414],["فشل",-13.332348823547363],["▁könnt",-13.332358360290527],["רש",-13.332372665405272],["όμο",-13.332379341125488],["▁Numer",-13.33238410949707],["wezen",-13.332399368286133],["▁شل",-13.33241081237793],["高端",-13.33241367340088],["તન",-13.332416534423828],["▁ເຫັນ",-13.332419395446776],["▁skemmtileg",-13.332432746887209],["▁Vem",-13.332467079162598],["ည္",-13.33247184753418],["шыя",-13.332494735717772],["хання",-13.332508087158203],["gangan",-13.33251953125],["iseach",-13.33251953125],["ማት",-13.332523345947266],["▁پيل",-13.33253574371338],["▁witte",-13.33254623413086],["▁чувств",-13.332558631896973],["ഞ്ച്",-13.332581520080566],["▁አለመ",-13.332597732543944],["▁Keli",-13.332616806030272],["痛み",-13.33263111114502],["▁смысл",-13.332636833190918],["ਅਸ",-13.3326416015625],["▁kalde",-13.332647323608398],["▁опасност",-13.332664489746094],["▁systemet",-13.33267307281494],["▁қосымша",-13.332676887512209],["▁ritm",-13.332682609558104],["▁tundi",-13.332722663879396],["▁kiitos",-13.332731246948242],["▁riz",-13.33273696899414],["▁luu",-13.332768440246582],["▁haute",-13.332783699035645],["▁kept",-13.332806587219238],["▁Й",-13.332825660705566],["્રા",-13.332830429077148],["▁мнения",-13.332840919494627],["▁teologi",-13.332855224609377],["ஆர்",-13.332856178283691],["ումն",-13.33287525177002],["ნუ",-13.3328857421875],["▁umetni",-13.332903861999512],["части",-13.33290672302246],["▁əlaqələr",-13.332929611206056],["▁nebija",-13.332935333251951],["fuld",-13.332937240600586],["贯彻",-13.332938194274902],["屈",-13.332942008972168],["▁csomag",-13.332952499389648],["ورت",-13.332955360412598],["መሰ",-13.332964897155762],["▁технологи",-13.332969665527344],["▁damals",-13.33297634124756],["▁bölgə",-13.332980155944824],["▁перемог",-13.332980155944824],["ストレス",-13.333005905151367],["liini",-13.333013534545898],["▁እር",-13.33301830291748],["ប្រជាពលរដ្ឋ",-13.33302402496338],["▁köszönhetően",-13.333024978637695],["▁tecnoloxía",-13.333024978637695],["▁zumindest",-13.333024978637695],["▁паводле",-13.333024978637695],["▁ذرائع",-13.333024978637695],["▁პრობლემა",-13.333024978637695],["▁얼굴",-13.333024978637695],["▁Beziehung",-13.333025932312012],["▁αποτελέσματα",-13.333026885986328],["▁diperoleh",-13.333030700683594],["▁тапшыр",-13.33303165435791],["▁téměř",-13.333032608032228],["▁menjalani",-13.333033561706545],["▁ईमेल",-13.333040237426758],["▁رسالة",-13.33304500579834],["▁kohustus",-13.333046913146973],["▁tunel",-13.333046913146973],["pušč",-13.333047866821287],["▁aconseguir",-13.333048820495604],["▁spalvos",-13.333052635192873],["▁شش",-13.333059310913086],["▁Вин",-13.33306121826172],["▁αυτών",-13.333062171936035],["▁vikend",-13.333064079284668],["▁חיפה",-13.333070755004885],["▁ತಲೆ",-13.333073616027832],["▁Anspruch",-13.333075523376465],["▁αρχές",-13.333085060119627],["▁zöld",-13.333097457885742],["шым",-13.33311367034912],["▁адным",-13.333114624023438],["▁вище",-13.333147048950195],["▁wizyt",-13.333157539367676],["▁udtryk",-13.333158493041992],["▁fremst",-13.333162307739258],["mètre",-13.333172798156738],["▁וועלכע",-13.333174705505373],["▁прас",-13.333179473876951],["yksen",-13.33318328857422],["▁आधारमा",-13.3331937789917],["▁glazbe",-13.333194732666016],["▁qaror",-13.333200454711914],["чних",-13.333213806152344],["vassa",-13.333216667175291],["▁મેળવવા",-13.333216667175291],["hát",-13.33322525024414],["▁Министарство",-13.333255767822266],["▁النفس",-13.33326244354248],["arren",-13.333279609680176],["▁regne",-13.333284378051758],["▁boobs",-13.33330249786377],["▁escuela",-13.333311080932615],["▁drugom",-13.333314895629885],["ពាក់",-13.333319664001465],["▁dejlig",-13.333340644836426],["▁регистрация",-13.33334255218506],["▁велике",-13.33335304260254],["షా",-13.333368301391602],["▁pulsa",-13.33338451385498],["▁ٹیکس",-13.33340072631836],["▁식사",-13.333401679992676],["▁бори",-13.333417892456056],["rækt",-13.333429336547852],["▁وأشار",-13.33351230621338],["ساهم",-13.33351993560791],["δώ",-13.333521842956545],["▁bekijken",-13.333538055419922],["▁المغرب",-13.333547592163086],["▁luze",-13.333552360534668],["▁sanoo",-13.333556175231934],["▁lettera",-13.33356475830078],["一個人",-13.333566665649414],["ไปด้วย",-13.33358669281006],["▁десяти",-13.333617210388184],["опер",-13.3336181640625],["0.5",-13.333622932434082],["dæk",-13.333625793457031],["ZAR",-13.333636283874512],["▁barnehage",-13.333641052246094],["▁කේ",-13.333642959594728],["▁soovita",-13.33364486694336],["բի",-13.333670616149902],["▁famo",-13.333680152893066],["ဖက္",-13.333683967590332],["usap",-13.33371353149414],["▁тогтоо",-13.33371639251709],["▁туған",-13.333717346191406],["168",-13.333734512329102],["▁λύση",-13.333758354187012],["▁јесте",-13.33376407623291],["efect",-13.33376693725586],["▁угроз",-13.33382511138916],["▁molekul",-13.333826065063477],["mlad",-13.33384895324707],["▁कथ",-13.333864212036133],["▁Всеки",-13.333871841430664],["▁Soomaalida",-13.333880424499512],["▁leverer",-13.333892822265623],["▁Raa",-13.33390998840332],["▁shit",-13.333924293518066],["▁හේතුව",-13.333931922912598],["▁សម្",-13.333938598632812],["▁앱",-13.33394718170166],["▁достига",-13.333961486816406],["▁ulos",-13.334031105041504],["一声",-13.33405590057373],["כבד",-13.334056854248049],["▁buget",-13.334063529968262],["łoś",-13.334075927734377],["▁Komplet",-13.334087371826172],["▁spaud",-13.334100723266602],["▁TRE",-13.334129333496094],["ുവാന്",-13.33413028717041],["▁తీ",-13.334134101867676],["▁ଯାଇ",-13.334138870239258],["spire",-13.334145545959473],["▁procesor",-13.334145545959473],["▁втора",-13.334152221679688],["所得",-13.3341703414917],["റിഞ്ഞ",-13.334189414978027],["ሃይማኖት",-13.334208488464355],["УД",-13.334211349487305],["▁पञ्च",-13.334230422973633],["தான",-13.33423900604248],["▁ההת",-13.334240913391112],["▁помоћи",-13.33425235748291],["▁puhta",-13.334256172180176],["▁agresiv",-13.334257125854492],["hetünk",-13.334260940551758],["ЕГ",-13.33427906036377],["”),",-13.334285736083984],["▁ďalší",-13.334287643432615],["▁Domain",-13.334320068359377],["▁uxor",-13.334321022033691],["אַד",-13.33432388305664],["промышленн",-13.334327697753906],["▁jutu",-13.334341049194336],["érés",-13.334349632263184],["trä",-13.33437156677246],["raksta",-13.334373474121094],["шина",-13.33438777923584],["▁lieli",-13.334392547607422],["1,000",-13.334400177001951],["تشغيل",-13.334402084350586],["ステ",-13.334434509277344],["手を",-13.334452629089355],["ଭାବ",-13.334453582763672],["▁Bhe",-13.33446979522705],["▁mógł",-13.334513664245604],["ीच",-13.334531784057615],["北海道",-13.334598541259766],["컴",-13.334599494934082],["ဥပဒေ",-13.334600448608398],["▁անկախ",-13.334600448608398],["▁július",-13.334601402282717],["慈",-13.334602355957031],["▁Friedrich",-13.334603309631348],["宗旨",-13.334611892700195],["แดน",-13.334612846374512],["▁køkken",-13.334617614746094],["▁tíz",-13.334617614746094],["ሁለቱ",-13.334622383117676],["▁lugn",-13.334623336791992],["▁đệ",-13.334634780883787],["ခဲ့သည်။",-13.334637641906738],["ທະຫານ",-13.33464241027832],["خته",-13.334646224975586],["▁크게",-13.334646224975586],["ензи",-13.334647178649902],["▁захоп",-13.334650993347168],["puti",-13.334656715393066],["ელე",-13.334659576416016],["▁ayay",-13.33466339111328],["株式会社",-13.334667205810549],["▁არსებული",-13.33468532562256],["▁καμία",-13.334689140319824],["▁твое",-13.334705352783203],["▁relaciones",-13.334712982177734],["▁कहना",-13.334717750549316],["▁zgjidh",-13.334721565246582],["▁comprender",-13.334760665893556],["ajai",-13.334763526916504],["▁exprima",-13.334797859191896],["▁TB",-13.33481216430664],["9,5",-13.33481788635254],["▁Netzwerk",-13.334839820861816],["▁سائين",-13.334850311279297],["กล้า",-13.334869384765623],["▁अपि",-13.334882736206056],["▁الدو",-13.334901809692385],["▁vaši",-13.334918975830078],["▁tizimi",-13.334951400756836],["▁Pek",-13.334996223449709],["▁jarrai",-13.335022926330566],["ாட்சி",-13.335030555725098],["كريم",-13.335067749023438],["▁efficient",-13.33506965637207],["نشین",-13.335079193115234],["▁harika",-13.335095405578612],["▁brenn",-13.335098266601562],["▁Sklep",-13.335108757019045],["fair",-13.335118293762209],["ಲೈ",-13.335127830505373],["▁konzul",-13.335174560546877],["▁FS",-13.335183143615724],["▁bền",-13.335183143615724],["▁prega",-13.335201263427734],["▁дели",-13.335208892822266],["▁дизайнер",-13.335209846496582],["▁consumidores",-13.335229873657228],["▁güc",-13.335245132446287],["prise",-13.335247039794922],["cultural",-13.335261344909668],["人たち",-13.335281372070312],["ทําได้",-13.33528995513916],["▁चैत",-13.335299491882324],["▁қалған",-13.335304260253906],["▁2559",-13.335341453552246],["tiere",-13.335355758666992],["▁reddit",-13.335356712341309],["▁المستخدم",-13.335366249084473],["▁گیرند",-13.335372924804688],["▁амжилт",-13.335392951965332],["▁Нарын",-13.33539867401123],["▁княз",-13.33539867401123],["▁kaibigan",-13.335432052612305],["▁indirim",-13.335443496704102],["▁възможности",-13.335448265075684],["യുണ്ട്",-13.33544921875],["▁ойлго",-13.33544921875],["▁hört",-13.335456848144531],["iện",-13.335457801818848],["▁سالہ",-13.33545970916748],["sakan",-13.33546257019043],["шени",-13.33547592163086],["▁фірм",-13.335477828979492],["▁строго",-13.335478782653809],["▁BIL",-13.335484504699709],["▁intención",-13.335491180419922],["▁pami",-13.335524559020996],["ščina",-13.335527420043944],["▁yere",-13.335531234741213],["▁МЕ",-13.33554458618164],["▁waan",-13.335551261901855],["であること",-13.335553169250488],["75)",-13.335588455200195],["hatunk",-13.335614204406738],["ជាច្រើន",-13.335660934448242],["SAS",-13.335680961608888],["▁sən",-13.335686683654783],["知らない",-13.335722923278809],["▁meele",-13.335736274719238],["▁begynte",-13.335755348205566],["sági",-13.335759162902832],["edici",-13.335793495178224],["▁თემ",-13.335808753967283],["Аб",-13.335829734802246],["▁dhigay",-13.335858345031738],["人心",-13.335858345031738],["▁природа",-13.33587646484375],["uuteen",-13.335931777954102],["▁დანა",-13.335939407348633],["▁заключен",-13.335944175720217],["بيه",-13.335949897766112],["▁Джа",-13.335949897766112],["▁رہیں",-13.335952758789062],["北京市",-13.33596897125244],["▁musíte",-13.3360013961792],["▁ਦੱਸਿਆ",-13.336002349853516],["会場",-13.336003303527832],[",«",-13.336005210876465],["▁ყველას",-13.33600616455078],["▁लु",-13.336012840270996],["▁створ",-13.336021423339844],["ಯಿ",-13.33602809906006],["▁২১",-13.336063385009766],["▁Vasco",-13.336092948913574],["▁Masih",-13.336102485656738],["▁çalıştı",-13.336113929748535],["▁منظم",-13.33613109588623],["▁සමාජය",-13.336135864257812],["mesine",-13.33615779876709],["ველო",-13.33616065979004],["▁публіка",-13.336173057556152],["▁ολοκληρ",-13.336174011230469],["▁Phạm",-13.336176872253418],["▁khắp",-13.336176872253418],["▁εμείς",-13.336176872253418],["▁ліпеня",-13.336176872253418],["▁cobertura",-13.336177825927734],["▁ஒவ்வொரு",-13.336177825927734],["▁alkalmas",-13.33617877960205],["▁skaičius",-13.33617877960205],["▁asaj",-13.336179733276367],["▁ሺህ",-13.336180686950684],["▁Atunci",-13.336182594299316],["▁Ủy",-13.336183547973633],["▁ሜ",-13.336190223693848],["▁फूल",-13.336194038391112],["▁hatalmas",-13.33619499206543],["▁понятно",-13.336204528808594],["▁drodze",-13.33620548248291],["▁Đặc",-13.336207389831545],["▁escal",-13.33620834350586],["▁пакуль",-13.336210250854492],["▁గారి",-13.336215019226074],["▁لگے",-13.33621597290039],["▁plupart",-13.336217880249023],["▁ವಾಹನ",-13.33621883392334],["Κα",-13.336222648620604],["▁azaz",-13.336223602294922],["▁baggrund",-13.336223602294922],["економ",-13.336224555969238],["متابعة",-13.336230278015137],["的通知",-13.336237907409668],["▁faritr",-13.336259841918944],["▁माझा",-13.33626651763916],["▁пройти",-13.33626937866211],["▁utóbbi",-13.336270332336426],["▁यांचे",-13.33627223968506],["▁районної",-13.336277961730955],["▁innenfor",-13.336280822753906],["lanti",-13.336285591125488],["niejszych",-13.33629322052002],["▁aanvaar",-13.336295127868652],["▁التعاون",-13.336316108703612],["ndolo",-13.33632755279541],["▁doilea",-13.336347579956056],["▁столько",-13.336349487304688],["▁heinäkuuta",-13.336358070373535],["▁jakby",-13.336363792419434],["mên",-13.336380004882812],["體系",-13.336383819580078],["理論",-13.33639144897461],["▁femeie",-13.33639430999756],["rious",-13.336406707763672],["ाच",-13.336406707763672],["▁ஜா",-13.336418151855469],["▁Rece",-13.336427688598633],["▁Його",-13.336432456970217],["▁полиция",-13.336474418640137],["möte",-13.336477279663086],["▁සෑම",-13.336478233337402],["▁ആള",-13.336481094360352],["iame",-13.336514472961426],["ভাগ",-13.336514472961426],["াত",-13.33651638031006],["79)",-13.336518287658691],["▁comemora",-13.33652687072754],["▁загина",-13.336532592773438],["▁വാങ്ങ",-13.336532592773438],["కోవ",-13.336560249328612],["▁stručn",-13.33656406402588],["▁گاهی",-13.336596488952637],["▁igal",-13.336607933044434],["▁Мамлекеттик",-13.3366117477417],["kás",-13.336649894714355],["ளு",-13.336654663085938],["▁вещества",-13.336661338806152],["▁Hệ",-13.336663246154783],["▁élvez",-13.33667278289795],["สติ",-13.336694717407228],["▁SMA",-13.336726188659668],["정부",-13.336767196655272],["▁internacia",-13.33679485321045],["DAH",-13.336799621582031],["▁welkom",-13.336809158325195],["▁jarak",-13.336827278137209],["ХО",-13.336828231811523],["▁جیل",-13.336833000183104],["数据显示",-13.33685302734375],["саар",-13.336861610412598],["hlé",-13.336864471435549],["▁נב",-13.33693504333496],["ونا",-13.336935997009276],["หวัง",-13.33695125579834],["թափ",-13.336955070495604],["▁سرخ",-13.3369779586792],["分からない",-13.33698272705078],["jalle",-13.336989402770996],["▁cobra",-13.337029457092283],["▁Indo",-13.337034225463867],["▁ishlari",-13.337035179138184],["тарга",-13.337051391601562],["▁విషయంలో",-13.337068557739258],["▁beskytte",-13.33708381652832],["▁pripada",-13.337087631225586],["Università",-13.337100982666016],["здрав",-13.337102890014648],["ኩት",-13.337163925170898],["▁elv",-13.337164878845217],["▁stavu",-13.337165832519531],["▁օրինակ",-13.33716869354248],["ሰጡ",-13.337172508239746],["▁qora",-13.337178230285645],["かも",-13.33721923828125],["▁Fis",-13.337224006652832],["vento",-13.337249755859377],["有时",-13.337279319763184],["▁ଆଇ",-13.337297439575195],["▁Հա",-13.337313652038574],["▁ফি",-13.33731460571289],["▁huwa",-13.337316513061523],["路線",-13.337326049804688],["▁läuft",-13.337347030639648],["יגע",-13.337358474731444],["▁груз",-13.33736515045166],["larimiz",-13.337392807006836],["▁Beruf",-13.337407112121582],["▁perso",-13.337419509887695],["▁прими",-13.337419509887695],["▁Jepun",-13.33743381500244],["నాటి",-13.337445259094238],["objekt",-13.337446212768556],["▁gcomh",-13.337484359741213],["怎么样",-13.337485313415527],["Rip",-13.337510108947754],["และความ",-13.337510108947754],["▁kumain",-13.337516784667969],["esimo",-13.337532997131348],["▁sayıda",-13.337538719177246],["sell",-13.337547302246094],["çisi",-13.33754825592041],["▁Leistungen",-13.33754825592041],["olan",-13.337553024291992],["▁Зам",-13.33757209777832],["▁darmo",-13.337584495544434],["▁längst",-13.33759593963623],["▁setja",-13.33759593963623],["KIR",-13.337642669677734],["▁dinner",-13.337654113769531],["dalam",-13.337668418884276],["▁چى",-13.33767318725586],["Lietuvos",-13.337674140930176],["particulièrement",-13.337678909301758],["health",-13.337691307067873],["躲",-13.337692260742188],["牲",-13.337695121765137],["▁importants",-13.337712287902832],["bizi",-13.337713241577148],["驾",-13.337714195251465],["mheas",-13.337726593017578],["პას",-13.337726593017578],["▁ganti",-13.337728500366213],["傷害",-13.33774471282959],["follow",-13.337745666503906],["▁पेट्रोल",-13.33775520324707],["▁permintaan",-13.337757110595703],["▁चौधरी",-13.337757110595703],["▁यस्तै",-13.337757110595703],["▁হার্ডকোর",-13.337757110595703],["▁ਨਵੰਬਰ",-13.337757110595703],["▁ਮਾਮਲੇ",-13.337757110595703],["▁ଖାଦ୍ୟ",-13.337757110595703],["▁ನಿರ್ಮಾಣ",-13.337757110595703],["เตียง",-13.337759971618652],["▁২০১৬",-13.337760925292969],["▁TAHUN",-13.337762832641602],["▁जहाँ",-13.33776569366455],["▁Mesa",-13.337766647338867],["▁Performance",-13.337767601013184],["信用卡",-13.337767601013184],["▁повністю",-13.337777137756348],["▁వెబ్",-13.337777137756348],["▁පාසල්",-13.337778091430664],["▁बाँकी",-13.337779998779297],["▁galėtų",-13.337786674499512],["▁Margaret",-13.337793350219728],["દાન",-13.337812423706056],["▁bayyana",-13.337813377380373],["▁확대",-13.33782196044922],["▁naturligvis",-13.337825775146484],["celler",-13.337834358215332],["▁കുറിച്ച്",-13.33783721923828],["alitet",-13.337868690490724],["სმენ",-13.33786964416504],["▁Tx",-13.337882041931152],["▁دېدى",-13.337892532348633],["حضار",-13.337895393371582],["BOX",-13.337945938110352],["ضل",-13.337953567504885],["▁opdracht",-13.337968826293944],["šća",-13.337998390197754],["说了",-13.338004112243652],["▁skjønn",-13.338014602661133],["لانغان",-13.338025093078612],["klass",-13.338040351867676],["▁defnydd",-13.338047981262209],["नुसार",-13.33804988861084],["▁променя",-13.33806037902832],["houding",-13.338062286376951],["▁Chaidh",-13.33808422088623],["▁uzlabo",-13.338095664978027],["muoto",-13.338128089904783],["▁tombol",-13.338134765625],["▁Избор",-13.338181495666504],["▁ង",-13.33822536468506],["ግም",-13.338265419006348],["later",-13.338279724121094],["▁Rick",-13.338286399841309],["▁нутаг",-13.338286399841309],["крэ",-13.338290214538574],["▁تصميم",-13.338300704956056],["▁χρήσης",-13.338301658630373],["အလုပ္",-13.33833122253418],["포토",-13.33835220336914],["▁imatge",-13.338375091552734],["▁kháng",-13.338400840759276],["льд",-13.338423728942873],["▁wydaje",-13.338443756103516],["▁Buah",-13.33846378326416],["▁Willie",-13.338467597961426],["çılıq",-13.338481903076172],["▁കണക്ക",-13.33848476409912],["▁Kristi",-13.338496208190918],["▁velocidade",-13.338496208190918],["▁nyttig",-13.338513374328612],["ρίν",-13.33852481842041],["▁بنیادی",-13.33853530883789],["inai",-13.338540077209473],["▁neil",-13.338582992553713],["▁Gwa",-13.338583946228027],["▁وجد",-13.338583946228027],["մունք",-13.338589668273926],["▁sentimento",-13.338592529296877],["▁бізнесу",-13.338597297668455],["▁viegli",-13.338608741760254],["யில",-13.338615417480469],["▁жеткіз",-13.338658332824709],["▁강화",-13.338665962219238],["▁Athen",-13.3386812210083],["▁티",-13.338702201843262],["▁poți",-13.338703155517578],["ीस",-13.338723182678224],["▁funcionamento",-13.338735580444336],["추천",-13.338738441467283],["▁pendapatan",-13.338741302490234],["▁kurią",-13.338753700256348],["▁orosz",-13.338770866394045],["▁tutul",-13.338770866394045],["▁svém",-13.33878231048584],["تواجد",-13.338785171508787],["▁למו",-13.338801383972168],["▁uzi",-13.33883571624756],["ביט",-13.338836669921877],["ດູ",-13.338838577270508],["ச்சா",-13.338860511779783],["Ком",-13.33886432647705],["▁نزد",-13.338865280151367],["▁했",-13.338865280151367],["▁Sasa",-13.338875770568848],["▁Ölkə",-13.338878631591797],["დღე",-13.33888053894043],["ስቲ",-13.33888053894043],["▁схема",-13.338892936706545],["TIR",-13.338910102844238],["▁የሰው",-13.338910102844238],["▁artinya",-13.338936805725098],["▁situé",-13.338991165161133],["قس",-13.339003562927246],["▁Ita",-13.339004516601562],["▁seconds",-13.339035034179688],["▁студија",-13.339055061340332],["▁చేసుకున్న",-13.33907699584961],["വരും",-13.339085578918455],["▁fú",-13.339094161987305],["▁piele",-13.339098930358888],["zemes",-13.339110374450684],["randi",-13.339126586914062],["▁idej",-13.339146614074709],["▁jedini",-13.339146614074709],["nuk",-13.33914852142334],["▁Barna",-13.33916473388672],["▁pakar",-13.339171409606934],["奏",-13.339190483093262],["▁प्रचण्ड",-13.33920192718506],["celi",-13.339211463928224],["isque",-13.339232444763184],["▁ქო",-13.33923625946045],["یابی",-13.339241981506348],["▁քաղաքացի",-13.33926773071289],["▁tře",-13.339290618896484],["藉",-13.339303016662598],["త్రి",-13.339306831359863],["溢",-13.339311599731444],["▁నిర్వహించ",-13.339320182800291],["ඝ",-13.33933925628662],["▁kambarys",-13.33933925628662],["▁réalité",-13.33933925628662],["▁tecniche",-13.33933925628662],["▁uczniów",-13.33933925628662],["▁zwłaszcza",-13.33933925628662],["▁وسیله",-13.33933925628662],["▁ስብሰባ",-13.33933925628662],["▁ይችላሉ",-13.33933925628662],["룸",-13.33933925628662],["▁irakasle",-13.339340209960938],["▁Түркия",-13.339340209960938],["▁සිරිසේන",-13.339340209960938],["▁wersji",-13.339348793029783],["РП",-13.339350700378418],["▁phạt",-13.339353561401367],["▁opções",-13.339354515075684],["▁पूरे",-13.339354515075684],["▁bheidh",-13.33935546875],["▁посветен",-13.339363098144531],["▁terrible",-13.339364051818848],["▁چقدر",-13.33936882019043],["▁அவள்",-13.33936882019043],["▁അഭി",-13.33938217163086],["▁നമുക്ക്",-13.339394569396973],["专利",-13.33940315246582],["▁əməl",-13.339425086975098],["▁kilicho",-13.33942985534668],["▁domā",-13.33944320678711],["▁остается",-13.339447021484377],["▁impi",-13.339452743530272],["hore",-13.339473724365234],["▁mười",-13.3394775390625],["િયો",-13.339479446411133],["▁أقل",-13.339486122131348],["▁அதி",-13.339497566223145],["▁Bayram",-13.339500427246094],["óval",-13.339502334594728],["▁masuala",-13.33951187133789],["▁mediu",-13.339523315429688],["▁Neka",-13.339524269104004],["цыян",-13.339532852172852],["▁HAL",-13.339536666870115],["▁nemoc",-13.339555740356444],["ரச",-13.339558601379396],["levi",-13.33956241607666],["LAM",-13.339570999145508],["▁претседателот",-13.339576721191406],["เมีย",-13.339591979980469],["▁levan",-13.339644432067873],["▁шууд",-13.339651107788086],["▁chal",-13.339654922485352],["▁درمانی",-13.339658737182615],["▁lặng",-13.33967113494873],["▁בניית",-13.33968448638916],["▁Министерство",-13.339688301086426],["श्री",-13.339707374572754],["▁greitai",-13.339707374572754],["/2004",-13.339712142944336],["▁Vlade",-13.339712142944336],["一刻",-13.339723587036133],["▁cyber",-13.33975601196289],["▁treści",-13.33979034423828],["బడ",-13.339826583862305],["▁itlog",-13.339832305908203],["▁аудит",-13.339838981628418],["јат",-13.33986473083496],["▁fibra",-13.339888572692873],["▁kiállítás",-13.339900016784668],["应该是",-13.339903831481934],["▁drzew",-13.339909553527832],["ບາ",-13.339910507202148],["▁présenté",-13.339911460876465],["oratra",-13.339916229248049],["▁Nói",-13.339926719665527],["ก็ต้อง",-13.339947700500488],["ουσα",-13.33995532989502],["ывают",-13.339957237243652],["▁Donostia",-13.339966773986816],["ናገር",-13.340001106262209],["▁208",-13.340002059936523],["trouw",-13.340005874633787],["াতে",-13.340009689331056],["▁Lauren",-13.340014457702637],["xor",-13.340022087097168],["▁മറ്റ",-13.340027809143066],["▁بندر",-13.340046882629396],["▁ఆరోగ్య",-13.340067863464355],["▁जापान",-13.340096473693848],["▁გოგონა",-13.340110778808594],["▁ஜி",-13.340116500854492],["аша",-13.340121269226074],["φάν",-13.340126037597656],["▁Kaka",-13.340132713317873],["range",-13.340149879455566],["▁младите",-13.340152740478516],["▁уради",-13.340163230895996],["▁இருந்தது",-13.340166091918944],["НИК",-13.340191841125488],["ဖမ္း",-13.340197563171388],["▁halde",-13.340215682983398],["bureau",-13.340237617492676],["ιση",-13.340238571166992],["▁Maß",-13.34026336669922],["▁fruct",-13.340267181396484],["prin",-13.3402738571167],["なくなって",-13.34028148651123],["▁wystąpi",-13.340293884277344],["ೊಬ್ಬ",-13.340320587158203],["げる",-13.340347290039062],["pher",-13.340353965759276],["▁வயது",-13.34036922454834],["ટલ",-13.34037971496582],["▁hadisələr",-13.34038543701172],["▁ଜାଣି",-13.340410232543944],["മുതല",-13.34041690826416],["వార్",-13.34042739868164],["▁Сама",-13.340436935424805],["▁такова",-13.340442657470703],["▁للج",-13.340446472167969],["▁hijau",-13.340448379516602],["eceğini",-13.340458869934082],["▁شمس",-13.340461730957031],["ČI",-13.34046745300293],["▁değerlendirme",-13.34046745300293],["த்துவ",-13.340498924255373],["ೇಶ",-13.340502738952637],["▁ارتش",-13.340519905090332],["▁Саве",-13.340524673461914],["▁skv",-13.340545654296877],["andina",-13.34055519104004],["▁ලෝකය",-13.340559005737305],["▁glavo",-13.340564727783203],["සක්",-13.340576171875],["▁Ап",-13.340576171875],["▁باس",-13.340587615966797],["투어",-13.340609550476074],["ința",-13.340612411499023],["▁mergi",-13.34064769744873],["ўны",-13.340664863586426],["▁lety",-13.340667724609377],["▁مقر",-13.340676307678224],["出台",-13.340686798095703],["▁antra",-13.340696334838867],["miljø",-13.340738296508787],["的过程中",-13.340757369995115],["▁saepe",-13.340773582458496],["▁Veg",-13.340786933898926],["جس",-13.34079360961914],["HAM",-13.340806007385254],["▁నిజ",-13.340811729431152],["▁অভি",-13.340815544128418],["tzaren",-13.340819358825684],["▁accusa",-13.340826988220217],["েন্ট",-13.340829849243164],["▁skiria",-13.340832710266112],["▁qiyməti",-13.340837478637695],["▁veprim",-13.340838432312012],["▁otok",-13.340840339660645],["晨",-13.340850830078123],["трудни",-13.34085464477539],["汤",-13.340879440307615],["▁ciki",-13.340883255004885],["▁дозволяє",-13.340889930725098],["촉",-13.340901374816896],["▁Чем",-13.340906143188477],["導入",-13.340909004211426],["ພະນັກງານ",-13.340922355651855],["▁deuxième",-13.340924263000488],["▁meletakkan",-13.340924263000488],["▁supérieur",-13.340924263000488],["▁territoire",-13.340924263000488],["▁коллектив",-13.340924263000488],["▁литература",-13.340924263000488],["▁ժամկետ",-13.340924263000488],["▁շաբաթ",-13.340924263000488],["▁ઉત્તર",-13.340924263000488],["▁ଶରୀର",-13.340924263000488],["▁ලකුණු",-13.340924263000488],["짓",-13.340924263000488],["▁fengið",-13.340929985046388],["▁ਮੋਦੀ",-13.340933799743652],["▁shkruar",-13.340936660766602],["▁ஆட்சி",-13.340937614440918],["▁अर्ज",-13.340938568115234],["▁Subject",-13.340941429138184],["▁இதில்",-13.340956687927246],["▁Tec",-13.340965270996094],["çü",-13.340967178344728],["▁γνωστό",-13.340981483459473],["ισμούς",-13.340984344482422],["▁ошиб",-13.3410062789917],["▁ଭଳି",-13.341008186340332],["▁детаљ",-13.341009140014648],["▁Yayın",-13.34101104736328],["▁сериал",-13.341029167175291],["ngkang",-13.34103298187256],["ုပ်",-13.34103298187256],["▁ಸೂ",-13.341033935546877],["butikk",-13.341041564941406],["毕业生",-13.341083526611328],["▁visitantes",-13.34108543395996],["▁Ster",-13.341093063354492],["▁طرفان",-13.341103553771973],["niach",-13.341113090515137],["reden",-13.341121673583984],["▁relacionadas",-13.34113311767578],["ології",-13.341141700744627],["▁родин",-13.341143608093262],["▁чыгарма",-13.341150283813477],["пион",-13.341151237487791],["▁Комп",-13.341168403625488],["▁Awam",-13.341175079345703],["▁+2",-13.341208457946776],["▁kakak",-13.34122085571289],["THA",-13.341222763061523],["মুক্ত",-13.341225624084473],["▁ražot",-13.341245651245115],["రికి",-13.341253280639648],["330",-13.341254234313965],["▁matemat",-13.341254234313965],["▁pembaca",-13.341272354125977],["грама",-13.341279983520508],["▁факта",-13.341294288635254],["▁nosas",-13.341304779052734],["▁számú",-13.341309547424316],["تداول",-13.341315269470217],["▁funzione",-13.341322898864746],["▁пропис",-13.341330528259276],["quid",-13.341403007507324],["æð",-13.341423988342283],["重量",-13.34143352508545],["▁Deb",-13.341434478759766],["все",-13.34145164489746],["▁wasiir",-13.341458320617676],["▁мајка",-13.34149169921875],["▁શાહ",-13.341516494750977],["▁фигур",-13.341519355773926],["有个",-13.34152603149414],["▁машины",-13.341527938842772],["▁gået",-13.341532707214355],["▁KG",-13.341548919677734],["ایم",-13.341558456420898],["Respons",-13.34158992767334],["▁Potem",-13.34161376953125],["ismen",-13.34162139892578],["paikan",-13.341641426086426],["▁Qatar",-13.341647148132324],["▁dövr",-13.341651916503906],["ومات",-13.341670036315918],["▁hrvatskih",-13.341670036315918],["▁groups",-13.341690063476562],["▁arhiv",-13.34169101715088],["బర్",-13.34169864654541],["▁regenera",-13.341699600219728],["غاز",-13.341726303100586],["▁ryg",-13.34172821044922],["▁účast",-13.341729164123535],["भु",-13.341736793518066],["▁Více",-13.341742515563965],["!),",-13.34174633026123],["▁Лит",-13.341747283935549],["▁finit",-13.34176254272461],["▁marked",-13.341793060302734],["▁купа",-13.341794967651367],["melo",-13.34181022644043],["數量",-13.341832160949709],["osamente",-13.341833114624023],["▁എത്തി",-13.341835021972656],["浮気",-13.341852188110352],["▁وويل",-13.34188175201416],["▁سرا",-13.341883659362791],["லீ",-13.341885566711426],["▁ඇද",-13.341893196105955],["억원",-13.34189796447754],["щия",-13.341925621032717],["STU",-13.341926574707031],["▁fréttir",-13.34193229675293],["ปรา",-13.34194564819336],["säkra",-13.341954231262209],["lanib",-13.341964721679688],["eysay",-13.341995239257812],["ŝanĝ",-13.342007637023926],["▁pagini",-13.342031478881836],["▁süresi",-13.342039108276367],["▁останні",-13.342049598693848],["脸上",-13.342062950134276],["▁ต่อ",-13.34206771850586],["▁majoria",-13.342074394226074],["abot",-13.34207534790039],["▁заплати",-13.342079162597656],["▁первом",-13.342092514038086],["▁robh",-13.34209442138672],["walk",-13.342105865478516],["▁сау",-13.34212875366211],["перше",-13.342130661010742],["▁boji",-13.34213161468506],["▁Мә",-13.34214210510254],["▁непосредственно",-13.34217357635498],["rigard",-13.342207908630373],["öör",-13.342251777648926],["ϊκ",-13.342255592346191],["ΔΕ",-13.342267036437988],["רבה",-13.342279434204102],["▁Habe",-13.342288970947266],["▁በተለይ",-13.342323303222656],["ليون",-13.342347145080566],["▁parcial",-13.342348098754885],["▁කියවන්න",-13.342360496520996],["hausen",-13.342367172241213],["導演",-13.342371940612791],["ikli",-13.342391967773438],["▁Leider",-13.342416763305664],["농",-13.342430114746094],["ಪಾಲ",-13.34243106842041],["เข้าร่วม",-13.342463493347168],["▁Maca",-13.3424654006958],["▁serioz",-13.342477798461914],["标志",-13.342480659484863],["入口",-13.34249210357666],["▁Lire",-13.34249782562256],["▁Mahal",-13.34249782562256],["▁Мемлекет",-13.342500686645508],["▁piuttosto",-13.342511177062988],["▁इतना",-13.342511177062988],["▁ரொம்ப",-13.342511177062988],["โรงพยาบาล",-13.342512130737305],["▁Член",-13.342512130737305],["▁wystarczy",-13.34251308441162],["▁веднага",-13.34251308441162],["▁وفاقی",-13.342514038085938],["▁krótki",-13.342514991760254],["▁lembaga",-13.342514991760254],["▁समिती",-13.342514991760254],["▁свидетельств",-13.342517852783203],["gezogen",-13.342519760131836],["▁Metall",-13.342520713806152],["▁սոցիալական",-13.342523574829102],["▁четврт",-13.34252643585205],["▁проверка",-13.342527389526367],["▁kivi",-13.342529296875],["▁interiér",-13.342530250549316],["▁देखिन्छ",-13.342530250549316],["ርክ",-13.342536926269531],["▁víctima",-13.342538833618164],["stimmen",-13.34254264831543],["▁épít",-13.342544555664062],["▁sententia",-13.34255027770996],["百年",-13.342562675476074],["▁sociaux",-13.34256649017334],["ಳೆಯ",-13.342569351196287],["▁ترجمان",-13.342577934265137],["▁mobilen",-13.342581748962402],["▁χρονιά",-13.342581748962402],["stavu",-13.342588424682615],["▁Киев",-13.34260082244873],["▁sieviete",-13.342605590820312],["ములు",-13.342607498168944],["▁strumento",-13.342608451843262],["▁tradición",-13.342609405517578],["dër",-13.342613220214844],["KUR",-13.342620849609377],["▁laugh",-13.342629432678224],["▁організація",-13.342641830444336],["▁руками",-13.342663764953612],["▁Gew",-13.342687606811523],["γραμμ",-13.342690467834473],["▁biar",-13.342693328857422],["▁médicos",-13.342694282531738],["▁waandishi",-13.342700958251951],["▁търсене",-13.342716217041016],["ાણી",-13.342729568481444],["ែន",-13.342755317687988],["sopimus",-13.342765808105469],["▁kreeg",-13.342782974243164],["ીક",-13.342787742614746],["ብል",-13.342791557312012],["▁obstaja",-13.342806816101074],["СКО",-13.342818260192873],["लू",-13.342818260192873],["▁TRI",-13.342825889587402],["เสียว",-13.34283447265625],["▁гли",-13.342850685119627],["STR",-13.342853546142578],["▁akcji",-13.34286117553711],["▁augstāk",-13.342870712280272],["▁Taču",-13.342873573303224],["▁дадат",-13.342888832092283],["▁umano",-13.342903137207031],["▁osalta",-13.342906951904297],["▁Pisa",-13.342913627624512],["ínu",-13.342936515808104],["▁ද්",-13.34294605255127],["▁مذہبی",-13.342947959899902],["ослов",-13.3429536819458],["علوم",-13.342999458312988],["▁ciyaar",-13.343005180358888],["Cal",-13.343009948730469],["▁utsikt",-13.343023300170898],["的消息",-13.343026161193848],["forløb",-13.343029975891112],["▁పలు",-13.34303855895996],["ckel",-13.343046188354492],["▁março",-13.343046188354492],["นิด",-13.343060493469238],["Blog",-13.343074798583984],["haga",-13.343074798583984],["▁തന്ന",-13.343096733093262],["▁Lisää",-13.343097686767578],["▁ρε",-13.34310245513916],["▁Financi",-13.343120574951172],["▁fanat",-13.343120574951172],["を見る",-13.343120574951172],["▁lähtö",-13.343124389648438],["रों",-13.343130111694336],["ціць",-13.343131065368652],["▁miembros",-13.343132972717283],["▁შექმნა",-13.3431396484375],["▁36.",-13.34314250946045],["ەسى",-13.343148231506348],["▁وز",-13.343191146850586],["ουσ",-13.34321117401123],["തൊ",-13.343217849731444],["▁ASV",-13.34324073791504],["▁Италия",-13.343242645263672],["▁Punkte",-13.34324836730957],["точно",-13.343314170837402],["▁jednoduché",-13.3433198928833],["▁الكا",-13.3433256149292],["▁գործողություն",-13.343326568603516],["イス",-13.34335994720459],["▁Darbo",-13.34338092803955],["YP",-13.343399047851562],["gym",-13.34340000152588],["កោះ",-13.343402862548828],["▁slecht",-13.343408584594728],["▁פס",-13.343416213989258],["ட்டம்",-13.343432426452637],["▁സന്",-13.343438148498535],["▁вяс",-13.343440055847168],["რომელ",-13.343453407287598],["planen",-13.343456268310549],["▁Gaga",-13.343460083007812],["▁буруу",-13.343460083007812],["▁Meny",-13.343475341796877],["▁الامر",-13.343506813049316],["▁દેવ",-13.343510627746582],["▁linii",-13.343539237976074],["▁ঝ",-13.34355640411377],["льним",-13.34359645843506],["▁gyorsan",-13.343598365783691],["THI",-13.343618392944336],["kubali",-13.343629837036133],["gtige",-13.343649864196776],["▁Fiz",-13.343657493591309],["▁Sizin",-13.343657493591309],["işim",-13.34368896484375],["▁garder",-13.343695640563965],["▁ଫେ",-13.343695640563965],["營業",-13.343701362609863],["▁Stockholms",-13.34371566772461],["uojama",-13.343734741210938],["▁सारे",-13.34373950958252],["▁tarixində",-13.34377670288086],["▁maalum",-13.343788146972656],["▁igazol",-13.343796730041504],["▁vedení",-13.343810081481934],["▁συμπ",-13.343860626220703],["▁Grundlage",-13.34386920928955],["tratto",-13.343889236450195],["פרי",-13.343894004821776],["édé",-13.34393310546875],["▁석",-13.343964576721191],["xwîn",-13.34396743774414],["▁тая",-13.34397792816162],["▁disponibil",-13.343994140625],["PRE",-13.344024658203123],["▁altele",-13.344026565551758],["吻",-13.344027519226074],["▁nezin",-13.34403133392334],["ໂລກ",-13.344034194946287],["▁тақырыбы",-13.344036102294922],["装备",-13.344038963317873],["筹",-13.344045639038086],["▁మర",-13.344049453735352],["▁فس",-13.344051361083984],["赚钱",-13.3440523147583],["▁تعلم",-13.344060897827148],["▁ئورۇن",-13.344077110290527],["kriva",-13.344079971313477],["Land",-13.344087600708008],["テーマ",-13.344088554382324],["预防",-13.344094276428224],["▁Barba",-13.344096183776855],["▁unbedingt",-13.344100952148438],["▁zunächst",-13.344100952148438],["▁наверное",-13.344100952148438],["▁подршку",-13.344100952148438],["▁ситуації",-13.344100952148438],["▁ग्रामीण",-13.344100952148438],["▁परियोजना",-13.344100952148438],["▁फागुन",-13.344100952148438],["▁உரிமை",-13.344101905822754],["▁ಸೇರಿದಂತೆ",-13.34410285949707],["▁proqram",-13.344108581542969],["ANJA",-13.344110488891602],["▁Mbunge",-13.344110488891602],["ຂາຍ",-13.344114303588867],["▁khối",-13.344114303588867],["▁روسيا",-13.344120025634766],["▁kassa",-13.34412670135498],["▁bolezni",-13.34413242340088],["▁sfeer",-13.34413719177246],["▁Chị",-13.344141006469728],["▁Antwort",-13.344144821166992],["▁Domingo",-13.344148635864258],["并在",-13.344148635864258],["▁mobiltelefon",-13.344159126281738],["▁Hamilton",-13.344175338745115],["▁sudu",-13.34417724609375],["▁apalagi",-13.344181060791016],["▁акы",-13.344181060791016],["▁დავი",-13.34420108795166],["豐富的",-13.344232559204102],["▁különleges",-13.344233512878418],["▁الحمل",-13.344234466552734],["▁Ukrajin",-13.344242095947266],["▁mère",-13.34425449371338],["▁naturel",-13.344256401062012],["▁Eftir",-13.344258308410645],["▁Ning",-13.344265937805176],["▁инициатив",-13.344265937805176],["▁שמת",-13.344267845153809],["▁viaţa",-13.34427261352539],["έχουν",-13.344286918640137],["▁Ramón",-13.344289779663086],["الب",-13.344318389892578],["تسبب",-13.344327926635742],["▁ejendom",-13.34433937072754],["是对",-13.344342231750488],["▁sita",-13.344367980957031],["▁સમાજ",-13.344396591186523],["▁Race",-13.34439754486084],["▁چهره",-13.344406127929688],["▁Eid",-13.34442138671875],["当初",-13.344427108764648],["▁керівник",-13.344442367553713],["▁regte",-13.344468116760254],["умов",-13.344478607177734],["muigh",-13.344484329223633],["ையில்",-13.344486236572266],["大约",-13.344488143920898],["▁slip",-13.344494819641112],["▁Direito",-13.344499588012695],["▁sprawia",-13.344505310058594],["▁nastal",-13.344534873962402],["▁Kiể",-13.344541549682615],["▁preveč",-13.344563484191896],["वळ",-13.344565391540527],["овац",-13.344566345214844],["▁معاشر",-13.344593048095703],["धान",-13.344637870788574],["▁Immer",-13.344643592834473],["୨୦",-13.344644546508787],["ავა",-13.344654083251951],["▁tief",-13.344670295715332],["ոհ",-13.344672203063965],["▁оцени",-13.344675064086914],["▁Future",-13.344681739807127],["▁õpilas",-13.344691276550291],["▁jetës",-13.344698905944824],["quar",-13.344700813293455],["▁ಬೆಳ",-13.34475326538086],["▁Cí",-13.344776153564451],["lihat",-13.344778060913086],["▁Имам",-13.34477996826172],["▁හතර",-13.344785690307615],["享有",-13.344819068908691],["後的",-13.344826698303224],["▁יצירת",-13.344833374023438],["▁ផ្ទះ",-13.344834327697754],["▁kiya",-13.344837188720703],["нского",-13.344877243041992],["culus",-13.34488296508789],["მოს",-13.34488296508789],["דאַ",-13.34489917755127],["▁prácticas",-13.34489917755127],["▁Konto",-13.344903945922852],["▁економічно",-13.344910621643066],["▁kelime",-13.34493350982666],["igos",-13.34493637084961],["▁ερωτ",-13.344943046569824],["▁opgave",-13.344947814941406],["▁କରୁଥିବା",-13.344961166381836],["tīvo",-13.344985008239746],["ganj",-13.344989776611328],["▁samfélag",-13.344993591308594],["Press",-13.345003128051758],["ខ្",-13.345008850097656],["▁sugger",-13.345020294189451],["▁erstmal",-13.3450288772583],["▁стиль",-13.345052719116213],["を使う",-13.345099449157717],["дник",-13.34510898590088],["▁因為",-13.345137596130373],["ბალ",-13.345142364501951],["▁darbs",-13.345142364501951],["截",-13.345170974731444],["▁አሁንም",-13.345182418823242],["OSA",-13.345206260681152],["▁కొట్ట",-13.345211029052734],["িতে",-13.34522533416748],["ическим",-13.345230102539062],["genti",-13.34523582458496],["ēķ",-13.345237731933594],["▁firem",-13.345271110534668],["这一点",-13.345274925231934],["ouverture",-13.345293998718262],["▁povolen",-13.34530544281006],["▁incontra",-13.345317840576172],["დელი",-13.345381736755373],["▁pieza",-13.345391273498535],["▁smar",-13.345415115356444],["▁turbo",-13.34542465209961],["▁táto",-13.34544563293457],["gelegd",-13.345458030700684],["Ђ",-13.345458030700684],["▁латин",-13.345471382141112],["▁colores",-13.345487594604492],["टल",-13.34549045562744],["▁отличие",-13.345497131347656],["อาร์",-13.345515251159668],["телям",-13.3455171585083],["▁växt",-13.345531463623049],["מלי",-13.345551490783691],["θρ",-13.345553398132324],["▁مرحوم",-13.345553398132324],["▁Kiam",-13.345556259155272],["pasti",-13.345559120178224],["kiya",-13.34556007385254],["เรียกว่า",-13.34557056427002],["劃",-13.345580101013184],["სტა",-13.34559154510498],["wiec",-13.345593452453612],["ربي",-13.345626831054688],["耕",-13.3456392288208],["guma",-13.345647811889648],["▁vágy",-13.345653533935549],["рака",-13.345660209655762],["▁söy",-13.345673561096191],["蔬菜",-13.34567928314209],["▁oled",-13.345683097839355],["ເຊັ່ນ",-13.345693588256836],["កម្លាំង",-13.345693588256836],["▁korábbi",-13.345693588256836],["▁βράδυ",-13.345693588256836],["▁развіцця",-13.345693588256836],["▁ભાષા",-13.345693588256836],["▁doskonale",-13.345694541931152],["▁siècle",-13.345694541931152],["▁слобода",-13.345695495605469],["▁ponuky",-13.345697402954102],["▁последствия",-13.345697402954102],["▁habilidades",-13.345698356628418],["หุ้น",-13.345702171325684],["▁ხელისუფლება",-13.345704078674316],["หมู่บ้าน",-13.345707893371582],["แห่งชาติ",-13.345707893371582],["▁19:30",-13.345707893371582],["▁Anthony",-13.345707893371582],["▁majo",-13.345725059509276],["▁поліції",-13.345725059509276],["▁eskura",-13.345748901367188],["opisto",-13.34575080871582],["▁بصورت",-13.34575080871582],["▁Screen",-13.345755577087402],["відповідальності",-13.34577465057373],["Office",-13.345784187316896],["▁(2008)",-13.345784187316896],["upload",-13.345795631408691],["▁kulturo",-13.345805168151855],["▁имало",-13.345807075500488],["▁آئون",-13.34581184387207],["▁довољно",-13.345817565917969],["▁لهذه",-13.345818519592283],["▁Angaben",-13.34582233428955],["▁método",-13.345834732055664],["▁filluar",-13.345843315124512],["▁تخم",-13.345850944519045],["▁cid",-13.345870018005373],["▁criação",-13.34587860107422],["▁ነጋ",-13.3458833694458],["▁Deum",-13.345904350280762],["▁džiaug",-13.34592056274414],["▁همدا",-13.34599781036377],["ARS",-13.346023559570312],["▁behandla",-13.34605598449707],["▁objed",-13.346059799194336],["▁Nederlands",-13.346073150634766],["მუშა",-13.34608268737793],["▁ආර්ථික",-13.346124649047852],["ערוץ",-13.346172332763672],["ଭର",-13.346179962158203],["▁slave",-13.346179962158203],["ësinë",-13.34619140625],["пети",-13.346192359924316],["Ти",-13.34619426727295],["όμενη",-13.346229553222656],["გეგ",-13.346235275268556],["▁Waarom",-13.346240997314451],["▁альбом",-13.3462553024292],["ИЈЕ",-13.346258163452148],["▁វីដេអូ",-13.346264839172363],["▁razpis",-13.34626579284668],["ڑھ",-13.346266746520996],["steller",-13.346323013305664],["▁dhal",-13.34632396697998],["መጡ",-13.346336364746094],["誉",-13.346342086791992],["리아",-13.346343040466309],["▁gjerman",-13.346362113952637],["▁stoppe",-13.346364974975586],["▁connection",-13.34640407562256],["▁Права",-13.346405982971191],["现有",-13.346410751342772],["ባን",-13.346427917480469],["▁tente",-13.346433639526367],["▁naturais",-13.346450805664062],["▁viloyat",-13.34645175933838],["TEM",-13.346453666687012],["ikkaa",-13.346453666687012],["▁posil",-13.346461296081545],["”).",-13.346482276916504],["▁officiel",-13.3464994430542],["udstyr",-13.34650993347168],["▁Rii",-13.346513748168944],["▁බා",-13.346517562866213],["ičar",-13.346531867980955],["KES",-13.346532821655272],["▁우리의",-13.346534729003906],["▁учета",-13.346566200256348],["▁kite",-13.346582412719728],["▁டெ",-13.346596717834473],["ທີ່ສຸດ",-13.346603393554688],["▁zahlreiche",-13.346606254577637],["laze",-13.346610069274902],["▁ឃើញ",-13.34661865234375],["۲۸",-13.34662628173828],["希望能",-13.346632957458496],["▁ارد",-13.34664249420166],["ецот",-13.34671688079834],["დოს",-13.346731185913086],["pöytä",-13.346738815307615],["则是",-13.346755027770996],["▁Ил",-13.346760749816896],["▁vistas",-13.34676456451416],["ေပါ့",-13.346774101257324],["नेपाल",-13.34677505493164],["▁ጉ",-13.346779823303224],["▁နိုင်ငံ",-13.34679126739502],["toko",-13.346793174743652],["меди",-13.346799850463867],["Ћ",-13.34680461883545],["▁ຜົນ",-13.346820831298828],["▁Linna",-13.346824645996094],["▁сфера",-13.346843719482422],["music",-13.346851348876951],["िटी",-13.34685230255127],["sys",-13.34687328338623],["halli",-13.346884727478027],["▁zcela",-13.34688949584961],["ቆም",-13.346903800964355],["▁compact",-13.346909523010254],["▁Publika",-13.346912384033203],["보호",-13.346921920776367],["০১",-13.346932411193848],["kran",-13.346935272216797],["▁közösség",-13.346941947937012],["▁중에",-13.346976280212402],["asının",-13.346978187561035],["nejše",-13.346980094909668],["▁대회",-13.346983909606934],["▁slovo",-13.347036361694336],["▁ganga",-13.347039222717283],["HER",-13.347055435180664],["▁beyaz",-13.34705638885498],["▁бола",-13.34708023071289],["BAL",-13.347085952758787],["▁באו",-13.347103118896484],["ເຈ",-13.347113609313965],["▁таба",-13.347123146057127],["കൊണ്ടു",-13.347142219543455],["მაშ",-13.347152709960938],["▁sprechen",-13.347180366516112],["fassen",-13.347186088562012],["လည်",-13.347197532653809],["LOV",-13.347210884094238],["塑料",-13.347217559814451],["▁Stø",-13.347230911254885],["扫",-13.347235679626465],["කල්ප",-13.347240447998049],["▁teach",-13.347257614135742],["专项",-13.347257614135742],["▁dahilan",-13.347262382507324],["▁Aleks",-13.347278594970703],["▁Bruxelles",-13.347288131713867],["▁nodrošina",-13.347288131713867],["▁उत्कृष्ट",-13.347288131713867],["▁താഴെ",-13.347288131713867],["▁다운로드",-13.347288131713867],["▁беларусаў",-13.347289085388184],["▁تۇرمۇش",-13.347289085388184],["爆炸",-13.347289085388184],["▁présence",-13.3472900390625],["▁требований",-13.3472900390625],["▁κάνουμε",-13.347290992736816],["▁আগস্ট",-13.347290992736816],["▁niekedy",-13.347291946411133],["▁confronto",-13.347293853759766],["违法",-13.347298622131348],["▁سگهن",-13.34730052947998],["เดอะ",-13.347302436828612],["▁zaštiti",-13.347302436828612],["▁trzech",-13.34730339050293],["▁ואז",-13.347304344177246],["ตนเอง",-13.34730625152588],["▁اڃا",-13.347308158874512],["▁сравни",-13.34731674194336],["▁inglese",-13.347318649291992],["▁wika",-13.347320556640623],["▁тиімді",-13.347322463989258],["gintza",-13.347325325012209],["▁حامد",-13.347346305847168],["ੋਲ",-13.347349166870115],["▁њему",-13.34735107421875],["▁يعد",-13.347354888916016],["▁Arka",-13.347356796264648],["▁forretnings",-13.347366333007812],["மொழி",-13.34739875793457],["▁Yay",-13.347404479980469],["▁الإدارة",-13.347404479980469],["▁formen",-13.347419738769531],["▁νεκρ",-13.347427368164062],["▁coloro",-13.347442626953123],["▁recentemente",-13.347456932067873],["▁Dama",-13.347472190856934],["▁nakonec",-13.347493171691896],["ЦИЯ",-13.347508430480955],["さんに",-13.347509384155272],["μοι",-13.347511291503906],["▁پوي",-13.347529411315918],["▁1909",-13.34753704071045],["विरुद्ध",-13.347540855407717],["արգել",-13.347557067871094],["ણે",-13.347562789916992],["้อน",-13.347562789916992],["wej",-13.34756565093994],["avance",-13.347569465637209],["▁naszą",-13.347585678100586],["knop",-13.34759521484375],["▁dizu",-13.347604751586914],["▁saluran",-13.347617149353027],["▁osebnih",-13.34761905670166],["ناقش",-13.347628593444824],["ayî",-13.34764003753662],["ڊر",-13.347646713256836],["franc",-13.347648620605469],["авых",-13.347661018371582],["▁9001",-13.34766674041748],["ТК",-13.347685813903809],["▁그룹",-13.34769344329834],["▁malaking",-13.347701072692873],["▁պայքար",-13.347712516784668],["▁invers",-13.347732543945312],["▁underbar",-13.347739219665527],["▁роли",-13.34775733947754],["ისათვის",-13.347763061523438],["तिक",-13.34777545928955],["▁əhəmiyyət",-13.347786903381348],["голям",-13.347806930541992],["മാര്",-13.347840309143066],["thir",-13.347844123840332],["▁දරුවන්",-13.347850799560549],["▁відзнач",-13.347860336303713],["▁vang",-13.347864151000977],["▁Rada",-13.347877502441406],["ndru",-13.347880363464355],["පො",-13.347887992858888],["8.00",-13.347898483276367],["▁prive",-13.347907066345217],["huga",-13.34791660308838],["วัง",-13.347929000854492],["▁370",-13.347942352294922],["▁fornece",-13.34794807434082],["▁૪",-13.347967147827148],["▁divina",-13.347972869873049],["▁లేని",-13.347996711730955],["нус",-13.34800148010254],["▁речі",-13.34800624847412],["▁khuôn",-13.348012924194336],["いると",-13.34801959991455],["▁okoz",-13.348030090332031],["▁Još",-13.34804344177246],["сила",-13.348047256469728],["▁qator",-13.34804916381836],["▁nô",-13.348055839538574],["▁Kelly",-13.348058700561523],["בעיות",-13.348063468933104],["רגע",-13.34807586669922],["▁సాధించ",-13.348095893859863],["クロ",-13.348101615905762],["▁párt",-13.348114013671877],["რონ",-13.34812831878662],["▁Kaune",-13.348132133483888],["▁morti",-13.34814739227295],["▁modelli",-13.348172187805176],["▁Babu",-13.348184585571287],["ტული",-13.348194122314451],["▁Yek",-13.34819793701172],["▁ಸಮಯ",-13.34821319580078],["▁ответственность",-13.348223686218262],["ੀਨ",-13.348233222961426],["▁psiho",-13.348246574401855],["қыр",-13.34825038909912],["▁Ды",-13.348252296447754],["კონ",-13.348258018493652],["▁предприниматель",-13.348278045654297],["▁(100",-13.348292350769045],["мм",-13.3483304977417],["্লা",-13.348363876342772],["▁ιδιωτικ",-13.348443031311035],["chę",-13.3484468460083],["▁нұсқа",-13.34846019744873],["していきます",-13.3485107421875],["▁performa",-13.348535537719728],["▁ಇನ್",-13.348546028137209],["ดัน",-13.348608016967772],["▁Hep",-13.348608016967772],["▁עיר",-13.348609924316406],["▁ekintza",-13.348617553710938],["dhal",-13.34868049621582],["▁Հայկական",-13.3486909866333],["▁morajo",-13.34869384765625],["दृष्ट",-13.34870433807373],["住在",-13.348716735839844],["▁ترقي",-13.34872055053711],["பூ",-13.348753929138184],["▁parallel",-13.348758697509766],["вайки",-13.348760604858398],["พร้อมกับ",-13.348764419555664],["▁bijzonder",-13.348769187927246],["▁residencia",-13.348820686340332],["辈",-13.348827362060549],["രംഗ",-13.348835945129396],["▁досто",-13.348838806152344],["衷",-13.34884262084961],["虛",-13.348843574523926],["勾",-13.348848342895508],["▁omgang",-13.348849296569824],["▁területén",-13.348851203918455],["▁MEG",-13.348858833312988],["編輯",-13.348870277404783],["▁componente",-13.348875045776367],["춘",-13.348875999450684],["▁зберіг",-13.348878860473633],["ຫມັ້ນ",-13.348882675170898],["ไซ",-13.348885536193848],["▁kebijakan",-13.348885536193848],["▁shuningdek",-13.348885536193848],["▁રહ્યું",-13.348885536193848],["▁രാവിലെ",-13.348885536193848],["▁ዙሪያ",-13.348885536193848],["官员",-13.348885536193848],["▁testosteron",-13.348886489868164],["▁نصر",-13.348886489868164],["▁ഫോട്ടോ",-13.34888744354248],["▁클릭",-13.34888744354248],["▁නියමිත",-13.34889316558838],["▁perhaps",-13.348895072937012],["▁മാത്രമാണ്",-13.348904609680176],["▁macchina",-13.348905563354492],["▁Mukh",-13.348906517028809],["▁יעקב",-13.34891414642334],["▁мікро",-13.348917007446287],["▁Murah",-13.348921775817873],["▁egzamin",-13.348923683166504],["▁npr",-13.348926544189451],["[14]",-13.34892749786377],["微笑",-13.348929405212402],["ချိန်",-13.348931312561035],["▁tokių",-13.34893798828125],["sjekk",-13.348944664001465],["▁dělat",-13.34894561767578],["▁páči",-13.348954200744627],["▁vader",-13.348958015441896],["▁הקי",-13.34897804260254],["ੀਂ",-13.348980903625488],["पोर्ट",-13.348986625671388],["▁навык",-13.34898853302002],["▁есе",-13.348997116088867],["▁oglas",-13.349021911621094],["▁persönliche",-13.349027633666992],["▁yönetimi",-13.349040985107422],["▁وحدت",-13.349041938781738],["▁economico",-13.34905242919922],["▁요리",-13.3490571975708],["WW",-13.349058151245115],["iladigan",-13.349058151245115],["первых",-13.349064826965332],["▁Padahal",-13.34906768798828],["правен",-13.349071502685549],["▁rối",-13.349096298217772],["ిస్తే",-13.34910011291504],["▁volant",-13.349102973937988],["જુ",-13.349117279052734],["ΚΕ",-13.34912109375],["后面",-13.349126815795898],["▁толико",-13.34913730621338],["휴",-13.34915828704834],["▁encuentran",-13.349160194396973],["илася",-13.349173545837402],["▁Sanayi",-13.349178314208984],["NIN",-13.34919548034668],["nică",-13.349196434020996],["Потреб",-13.349197387695312],["▁രീതി",-13.349199295043944],["tatakse",-13.349217414855955],["▁мини",-13.349238395690918],["▁traduk",-13.3492431640625],["▁социально",-13.349261283874512],["یں۔",-13.349267959594728],["▁سڏ",-13.349272727966309],["▁penja",-13.34928035736084],["▁решили",-13.34928035736084],["▁ولسي",-13.349294662475586],["▁Čia",-13.349300384521484],["▁учир",-13.349308967590332],["rían",-13.349309921264648],["▁події",-13.349319458007812],["▁fotky",-13.349328994750977],["▁metsa",-13.34934425354004],["▁gerektiğini",-13.349346160888672],["或者是",-13.349355697631836],["▁komunal",-13.349363327026367],["▁mji",-13.349370002746582],["dzo",-13.349373817443848],["▁teisė",-13.34937572479248],["മാരെ",-13.349380493164062],["▁kemarin",-13.349408149719238],["llistä",-13.349410057067873],["၃၀",-13.349422454833984],["▁zemes",-13.349441528320312],["▁maki",-13.349447250366213],["▁זכויות",-13.34945011138916],["सुद्धा",-13.349452018737791],["Bet",-13.349461555480955],["▁pedal",-13.34947681427002],["▁Comité",-13.349495887756348],["相對",-13.349534034729004],["▁टिम",-13.34959888458252],["ులను",-13.349628448486328],["▁€.",-13.34963607788086],["▁پاش",-13.349639892578123],["▁โครงการ",-13.349661827087402],["centra",-13.34967041015625],["▁Oper",-13.34967803955078],["inot",-13.349691390991213],["హార",-13.349692344665527],["မိုး",-13.349717140197754],["▁வீடு",-13.349743843078612],["▁seadus",-13.349746704101562],["▁Ebben",-13.349751472473145],["▁მრავალ",-13.349753379821776],["药物",-13.349778175354004],["కుడు",-13.34978199005127],["▁Wieder",-13.34979248046875],["することは",-13.34979248046875],["▁spada",-13.34980010986328],["lengkap",-13.349802017211914],["ებად",-13.349828720092772],["▁ልብ",-13.349878311157228],["▁කියව",-13.349884033203123],["▁tiom",-13.349894523620604],["ASAN",-13.349918365478516],["impresa",-13.349926948547363],["▁trabalhos",-13.349934577941896],["特点",-13.349946975708008],["▁nekim",-13.349980354309082],["ուհի",-13.349985122680664],["▁Najbolj",-13.34999656677246],["৫০",-13.350017547607422],["▁אג",-13.350030899047852],["▁dimensione",-13.350035667419434],["▁окуя",-13.35003662109375],["▁Numa",-13.350043296813965],["▁məlumatlar",-13.350048065185549],["▁problemu",-13.350059509277344],["▁pakko",-13.350062370300291],["hluk",-13.350089073181152],["qli",-13.35009765625],["▁finis",-13.35009765625],["ചാര",-13.35011100769043],["ிலேயே",-13.350120544433594],["ജു",-13.350136756896973],["మర్",-13.3501558303833],["▁љубов",-13.350160598754885],["рол",-13.3501615524292],["▁eremu",-13.350177764892578],["▁પૂર",-13.350192070007324],["▁farin",-13.350202560424805],["▁рака",-13.350202560424805],["▁TY",-13.350207328796388],["▁निष्",-13.350208282470703],["ੈਨ",-13.350213050842283],["▁tanken",-13.350220680236816],["▁bahagi",-13.350224494934082],["▁შეე",-13.35023307800293],["قتل",-13.350239753723145],["▁participant",-13.350247383117676],["▁Quae",-13.35025691986084],["▁১৭",-13.350285530090332],["▁vodič",-13.350312232971191],["辞",-13.35032081604004],["▁duc",-13.350324630737305],["▁licencia",-13.350324630737305],["▁Επ",-13.350335121154783],["ېرى",-13.350353240966797],["▁contine",-13.350363731384276],["▁doći",-13.350369453430176],["というもの",-13.350378036499023],["bić",-13.350388526916504],["療",-13.350397109985352],["スタ",-13.350443840026855],["bér",-13.35045051574707],["昼",-13.350471496582031],["ใช่",-13.35048484802246],["▁uzyska",-13.35048484802246],["▁estadounidense",-13.350485801696776],["▁tắm",-13.350485801696776],["▁நடவடிக்கை",-13.350485801696776],["▁ಘಟನೆ",-13.350485801696776],["▁carretera",-13.350486755371094],["▁אָבער",-13.35048770904541],["▁답변",-13.350489616394045],["▁സാധാരണ",-13.350492477416992],["▁amacıyla",-13.350497245788574],["▁لاندې",-13.350500106811523],["▁związanych",-13.35050106048584],["▁musiikki",-13.350502014160156],["▁Niko",-13.35051155090332],["▁būv",-13.350513458251951],["▁appropriate",-13.350525856018066],["▁בירושלים",-13.3505277633667],["▁آمریکایی",-13.350531578063965],["▁Wellness",-13.350536346435549],["▁ಆಫ್",-13.350537300109863],["▁პროექტის",-13.350546836853027],["▁എന്നാൽ",-13.350558280944824],["ויז",-13.350566864013672],["▁mwyaf",-13.35057544708252],["▁usalama",-13.35060691833496],["▁իրավունքները",-13.350613594055176],["եկտ",-13.350625038146973],["▁violence",-13.350626945495604],["▁понимает",-13.350638389587402],["экономи",-13.350653648376465],["project",-13.350662231445312],["▁анкет",-13.350667953491213],["▁aanu",-13.350669860839844],["▁یابد",-13.350672721862791],["▁réserve",-13.350674629211426],["гдаж",-13.350686073303224],["收获",-13.350688934326172],["трі",-13.35069465637207],["здоров",-13.350698471069336],["τησε",-13.35070514678955],["ទ្ធ",-13.350709915161133],["▁favorito",-13.350801467895508],["▁Karo",-13.350805282592772],["過的",-13.350808143615724],["▁całego",-13.350834846496582],["▁pupper",-13.350839614868164],["ισε",-13.350850105285645],["ბან",-13.35085391998291],["▁వార",-13.350858688354492],["▁динам",-13.350881576538086],["▁हमला",-13.3508882522583],["▁Secondo",-13.350916862487791],["டர்",-13.350922584533691],["▁postaje",-13.350946426391602],["ويل",-13.350963592529297],["samba",-13.350977897644045],["анты",-13.350981712341309],["importe",-13.350990295410156],["▁junge",-13.351015090942385],["हिल",-13.3510160446167],["puna",-13.351031303405762],["▁̈",-13.351031303405762],["▁trama",-13.35103702545166],["យល់",-13.35104274749756],["▁apro",-13.351062774658203],["▁vyž",-13.351072311401367],["سۇن",-13.351107597351074],["लम्ब",-13.351109504699709],["disi",-13.35111141204834],["▁محتوا",-13.351117134094238],["dıq",-13.351128578186035],["▁coste",-13.3511381149292],["▁хоче",-13.351155281066896],["rį",-13.35116195678711],["▁त्व",-13.35118293762207],["▁ພາ",-13.351198196411133],["onica",-13.35119915008545],["იერი",-13.351201057434082],["garna",-13.351211547851562],["သူများ",-13.351223945617676],["▁többet",-13.351224899291992],["▁perangkat",-13.351225852966309],["▁३१",-13.351234436035156],["kopp",-13.35124683380127],["рај",-13.35129165649414],["▁حياة",-13.351381301879885],["▁verti",-13.351383209228516],["▁instituto",-13.351404190063477],["▁Eskola",-13.35141372680664],["▁morgun",-13.351417541503906],["▁Geç",-13.35142421722412],["▁Dö",-13.351435661315918],["▁Glass",-13.351441383361816],["▁rami",-13.351451873779297],["▁atendi",-13.35145664215088],["▁сепак",-13.35147476196289],["▁doğu",-13.351476669311523],["▁만큼",-13.351478576660156],["ódás",-13.351479530334473],["lendirme",-13.351481437683104],["▁пишува",-13.351482391357422],["▁փոքր",-13.351482391357422],["▁почне",-13.35148811340332],["▁propiedad",-13.351497650146484],["Kas",-13.351503372192385],["▁ajaloo",-13.351503372192385],["▁мястото",-13.35150909423828],["мете",-13.351529121398926],["劲",-13.35158920288086],["▁желез",-13.351613998413086],["학과",-13.351619720458984],["olaeth",-13.35163116455078],["esco",-13.351634979248049],["金屬",-13.351634979248049],["▁emprega",-13.351648330688477],["▁நீங்க",-13.351658821105955],["▁ستا",-13.35166072845459],["MIT",-13.351666450500488],["▁کردیم",-13.351725578308104],["市政府",-13.35173225402832],["bato",-13.351738929748535],["Hindi",-13.351768493652344],["జ్ఞాన",-13.351787567138672],["дуць",-13.351794242858888],["▁Вук",-13.351795196533203],["▁чеч",-13.351799011230469],["▁դատարան",-13.351804733276367],["ёз",-13.351832389831545],["▁postura",-13.351859092712402],["▁erraz",-13.351874351501465],["▁mašin",-13.35187816619873],["vandring",-13.351880073547363],["றா",-13.351932525634766],["▁Компания",-13.351956367492676],["väl",-13.351964950561523],["▁Toko",-13.351966857910156],["吵",-13.35201930999756],["▁tiver",-13.35202693939209],["斜",-13.35203456878662],["坚定",-13.352039337158203],["▁కుటుంబ",-13.352049827575684],["かり",-13.352084159851074],["مكافحة",-13.352088928222656],["▁främst",-13.352088928222656],["ອື່ນໆ",-13.352089881896973],["▁upptäck",-13.352090835571287],["▁Въпреки",-13.352090835571287],["resse",-13.352093696594238],["▁ਪ੍ਰਭੂ",-13.352093696594238],["▁addysg",-13.352096557617188],["▁aiemmin",-13.352097511291504],["▁અલગ",-13.35210132598877],["互动",-13.35210132598877],["▁øjne",-13.35210418701172],["▁taageer",-13.352108001708984],["▁perintah",-13.3521089553833],["▁Όλα",-13.35211181640625],["ược",-13.35212230682373],["EZA",-13.352127075195312],["ملة",-13.352128028869627],["gihan",-13.352128982543944],["ciência",-13.352134704589844],["▁თქმა",-13.35213565826416],["▁דאָס",-13.352137565612791],["στάσεις",-13.35214900970459],["▁Budget",-13.35215187072754],["超级",-13.352169036865234],["▁elske",-13.352176666259766],["▁mengga",-13.35218334197998],["▁Czytaj",-13.352194786071776],["▁elämä",-13.352217674255373],["änsä",-13.352218627929688],["▁Eduardo",-13.352219581604004],["▁орно",-13.352222442626951],["▁Manusia",-13.352230072021484],["▁libertad",-13.352230072021484],["काम",-13.352245330810549],["▁माथि",-13.352291107177734],["▁qarşısında",-13.35229778289795],["▁متوجه",-13.352304458618164],["▁hastane",-13.352309226989746],["▁बजारमा",-13.35231113433838],["▁kram",-13.35232162475586],["▁deltager",-13.35232925415039],["▁ພັກ",-13.35232925415039],["▁والر",-13.352360725402832],["▁beskikbaar",-13.352376937866213],["的角色",-13.352387428283691],["▁ಬ್ರ",-13.352397918701172],["▁сурет",-13.352398872375488],["▁បុរស",-13.352401733398438],["▁ake",-13.352412223815918],["သမ",-13.352437019348145],["▁мерка",-13.352474212646484],["▁assign",-13.352502822875977],["▁category",-13.352513313293455],["▁stiri",-13.35252285003662],["▁කුමාර",-13.35252857208252],["天空",-13.35252857208252],["ufu",-13.352534294128418],["▁kituo",-13.352581024169922],["▁ordet",-13.352601051330566],["שאַ",-13.352605819702148],["альной",-13.35262966156006],["▁sistēma",-13.352654457092283],["자료",-13.352657318115234],["ເກ",-13.35267162322998],["▁SHI",-13.352697372436523],["▁spôsobom",-13.352700233459473],["▁بوک",-13.352720260620115],["▁بىل",-13.35272216796875],["▁saavuta",-13.35273265838623],["▁نیمه",-13.352744102478027],["parêz",-13.352750778198242],["็อก",-13.352773666381836],["▁dvig",-13.352784156799316],["▁Puis",-13.352845191955566],["▁따른",-13.352862358093262],["fabrik",-13.35288143157959],["▁tillgång",-13.352887153625488],["▁לוי",-13.352922439575195],["▁suci",-13.352930068969728],["▁usus",-13.352930068969728],["▁อุปกรณ์",-13.352957725524902],["▁Entra",-13.352959632873535],["▁перша",-13.352960586547852],["▁градски",-13.352972984313965],["▁보는",-13.352987289428713],["енти",-13.352997779846191],["▁قانداق",-13.353012084960938],["ണിയ",-13.353039741516112],["▁inteligente",-13.353042602539062],["▁ပြန်",-13.353073120117188],["얻",-13.353083610534668],["▁determinada",-13.353095054626465],["▁luonno",-13.353116989135742],["step",-13.353121757507324],["▁davanti",-13.353121757507324],["ŠE",-13.353131294250488],["▁8%",-13.353137016296388],["ofon",-13.353150367736816],["дерді",-13.353150367736816],["▁distant",-13.353172302246094],["▁пати",-13.353215217590332],["▁ქვეყანაში",-13.35323715209961],["▁публ",-13.353252410888672],["URL",-13.35328197479248],["看到的",-13.35330867767334],["ewski",-13.353313446044922],["▁өлең",-13.353314399719238],["서는",-13.35333251953125],["patri",-13.35334587097168],["建成",-13.353346824645996],["▁penn",-13.353352546691896],["▁njej",-13.35335922241211],["▁спря",-13.35336208343506],["▁minhas",-13.353363037109377],["▁wear",-13.353363990783691],["▁խնդր",-13.35339069366455],["▁ආස",-13.353392601013184],["płaty",-13.353425979614258],["▁poso",-13.353425979614258],["▁الكبير",-13.353440284729004],["zīst",-13.353458404541016],["▁trimite",-13.353473663330078],["овска",-13.353476524353027],["▁fewn",-13.353485107421877],["▁گفتم",-13.35348892211914],["ngwa",-13.353516578674316],["▁тогтоол",-13.353516578674316],["▁مہم",-13.353519439697266],["takiwa",-13.35352611541748],["סק",-13.35354709625244],["ላን",-13.353554725646973],["Bİ",-13.353560447692873],["▁havet",-13.353607177734377],["▁nõ",-13.353619575500488],["▁알아",-13.353633880615234],["寿",-13.35364818572998],["協議",-13.353655815124512],["麻烦",-13.353655815124512],["趁",-13.353656768798828],["葬",-13.353660583496094],["ការណ៍",-13.353662490844728],["રાજ",-13.35366439819336],["▁មហា",-13.35366439819336],["▁müs",-13.353667259216309],["获取",-13.35367488861084],["稼",-13.353684425354004],["▁Phan",-13.35368824005127],["▁retrouve",-13.35368824005127],["个小时",-13.353690147399902],["▁эмэгтэй",-13.353692054748535],["▁նկատ",-13.353692054748535],["ാണോ",-13.353693962097168],["▁Alzheimer",-13.353693962097168],["▁keyfiyyət",-13.353693962097168],["▁լեզու",-13.353693962097168],["▁որովհետեւ",-13.353693962097168],["▁விரும்ப",-13.3536958694458],["maskinen",-13.353703498840332],["▁විදේශ",-13.35370635986328],["jist",-13.353708267211914],["▁προϊόντα",-13.353723526000977],["▁ويمكن",-13.35373306274414],["ುತ್ತೇನೆ",-13.35373592376709],["▁licenza",-13.353742599487305],["▁данного",-13.353744506835938],["▁sisällä",-13.353745460510254],["utilise",-13.35374641418457],["ບ່ອນ",-13.353754043579102],["רווח",-13.353761672973633],["▁রায়",-13.35379409790039],["▁سميت",-13.353816032409668],["▁Fai",-13.353824615478516],["▁ئىچى",-13.353824615478516],["▁ամս",-13.353826522827148],["▁ජීවිත",-13.353833198547363],["îd",-13.353835105895996],["runde",-13.353838920593262],["▁Clinic",-13.35384750366211],["▁hogar",-13.353848457336426],["écran",-13.353863716125488],["▁industriale",-13.353894233703612],["▁발견",-13.35390567779541],["▁daro",-13.35391330718994],["▁επαν",-13.353926658630373],["CION",-13.35393524169922],["▁עסקים",-13.353941917419434],["醫學",-13.35395622253418],["▁basso",-13.353967666625977],["▁երկ",-13.35399055480957],["bagi",-13.35400676727295],["▁bisita",-13.354021072387695],["▁سنی",-13.35402774810791],["美麗",-13.354031562805176],["▁તું",-13.35403537750244],["Кра",-13.354065895080566],["▁Lili",-13.35410499572754],["▁Saka",-13.35410499572754],["ಾಳ",-13.354106903076172],["▁класу",-13.35412883758545],["mash",-13.354131698608398],["funa",-13.354143142700195],["▁प्रेस",-13.354151725769045],["▁historische",-13.354171752929688],["일에",-13.354182243347168],["▁teksten",-13.35418701171875],["▁процедури",-13.354191780090332],["▁thê",-13.354211807250977],["ບາດ",-13.354218482971191],["besar",-13.35422420501709],["▁թող",-13.354265213012695],["▁пожела",-13.354281425476074],["պտ",-13.35429573059082],["▁teatre",-13.354305267333984],["▁fortes",-13.354308128356934],["▁tøj",-13.35430908203125],["stress",-13.354331016540527],["▁предложен",-13.354336738586426],["▁opnå",-13.354345321655272],["もなく",-13.354347229003906],["VAS",-13.35435390472412],["ქმედ",-13.35435390472412],["▁donker",-13.354371070861816],["一張",-13.354374885559082],["▁yerini",-13.354411125183104],["▁Kirche",-13.35442352294922],["нець",-13.354491233825684],["ijoiden",-13.35450553894043],["▁Kine",-13.354517936706545],["مب",-13.354522705078123],["▁olmasına",-13.35452651977539],["cross",-13.354581832885742],["ــــ",-13.354585647583008],["▁septembri",-13.354586601257324],["نیو",-13.354588508605955],["கொள்ள",-13.354618072509766],["කමක්",-13.354637145996094],["ulum",-13.354655265808104],["▁хозяйств",-13.3546724319458],["▁կարգ",-13.354680061340332],["heu",-13.35472583770752],["▁लेखन",-13.354730606079102],["ມູນ",-13.354766845703123],["▁зарыл",-13.354766845703123],["▁zanimljiv",-13.354781150817873],["▁দেশে",-13.354790687561035],["lips",-13.3547945022583],["ytojų",-13.3548002243042],["ത്തിലാണ്",-13.35480499267578],["▁omaa",-13.35480499267578],["عون",-13.35482406616211],["▁march",-13.35483741760254],["▁ambiti",-13.354846000671388],["顾客",-13.354854583740234],["্জ",-13.3548583984375],["ございます",-13.354859352111816],["▁Tila",-13.354878425598145],["▁Sain",-13.354897499084473],["dilar",-13.354904174804688],["▁készít",-13.35490894317627],["▁Giz",-13.354913711547852],["▁أك",-13.3549165725708],["stamme",-13.3549222946167],["▁참가",-13.354926109313965],["▁നോ",-13.35492992401123],["▁occ",-13.354944229125977],["дель",-13.35495662689209],["▁cli",-13.354958534240724],["▁ตอน",-13.354969024658203],["komun",-13.354975700378418],["▁bros",-13.35500431060791],["距",-13.35501194000244],["自体",-13.355023384094238],["asszony",-13.355025291442873],["▁Porr",-13.355029106140137],["ള്ളി",-13.355048179626465],["▁الدوري",-13.355051040649414],["▁شہری",-13.35509967803955],["▁rendu",-13.355103492736816],["▁ସମ",-13.355107307434082],["넣",-13.355111122131348],["▁professionelle",-13.355113983154297],["پس",-13.355132102966309],["▁سئو",-13.35513973236084],["▁التالي",-13.355144500732422],["সের",-13.355154991149902],["▁ramas",-13.3551664352417],["لوا",-13.35520839691162],["预算",-13.355210304260254],["▁Сур",-13.355216026306152],["逐漸",-13.355217933654783],["▁بحال",-13.355237007141112],["錶",-13.355263710021973],["舌",-13.355268478393556],["须",-13.355271339416504],["議題",-13.355277061462402],["ದರ್ಶನ",-13.355291366577148],["溶",-13.355295181274414],["ខ្ពស់",-13.355300903320312],["톤",-13.355300903320312],["▁Björn",-13.355301856994627],["▁Halmashauri",-13.355301856994627],["▁Жұмыс",-13.355301856994627],["▁књиге",-13.355301856994627],["핀",-13.355301856994627],["▁Βουλή",-13.355305671691896],["▁ګڼ",-13.355305671691896],["▁veriyor",-13.355308532714844],["▁२०१७",-13.355310440063477],["▁oluşan",-13.355313301086426],["ရိွ",-13.35531997680664],["▁Kasuta",-13.355326652526855],["▁ustanove",-13.355328559875488],["▁pamilya",-13.35533046722412],["▁תהיה",-13.355337142944336],["▁istoric",-13.355339050292969],["▁хөгжил",-13.355345726013184],["▁ہمارا",-13.355347633361816],["▁обласної",-13.355348587036133],["▁Mahmud",-13.355355262756348],["หมู",-13.355358123779297],["▁വളര്",-13.355371475219728],["营养",-13.355423927307127],["▁sonho",-13.355430603027344],["ກິລາ",-13.355432510375977],["ζί",-13.355453491210938],["ांक",-13.355457305908203],["▁کړل",-13.355463027954102],["Тө",-13.355476379394531],["▁určen",-13.355476379394531],["▁սկս",-13.355484008789062],["лерди",-13.355502128601074],["אין",-13.355509757995604],["▁playa",-13.355510711669922],["әл",-13.355535507202148],["▁Mell",-13.355535507202148],["▁rând",-13.355541229248049],["▁audience",-13.355545043945312],["وضع",-13.355551719665527],["▁Aynı",-13.355551719665527],["กระเป๋า",-13.355571746826172],["▁полезно",-13.355581283569336],["▁semplici",-13.355582237243652],["▁tilanne",-13.3555908203125],["ਵਿੰਦਰ",-13.355600357055664],["ολο",-13.355640411376951],["▁betalings",-13.355660438537598],["▁nagra",-13.355667114257812],["ATAN",-13.355674743652344],["▁долази",-13.35568618774414],["lənməsi",-13.355706214904783],["▁tellement",-13.355711936950684],["ізму",-13.355720520019531],["31)",-13.355731010437012],["▁хүсэлт",-13.355751991271973],["▁உருவாக்க",-13.355752944946287],["kísér",-13.355756759643556],["pangan",-13.355769157409668],["owały",-13.355780601501465],["▁mulţi",-13.355790138244627],["▁məlum",-13.355802536010742],["HQ",-13.355806350708008],["ക്കര",-13.355806350708008],["▁klap",-13.35581398010254],["halla",-13.355827331542969],["▁perdi",-13.355846405029297],["▁soutien",-13.355850219726562],["gunakan",-13.355868339538574],["▁вашей",-13.35588550567627],["▁PSOE",-13.355897903442385],["▁خصوصا",-13.35590934753418],["▁divas",-13.355918884277344],["ရု",-13.355920791625977],["чых",-13.355928421020508],["▁ajusta",-13.35594654083252],["पती",-13.355947494506836],["▁apresentar",-13.355989456176758],["ଫଳ",-13.355999946594238],["нё",-13.356012344360352],["▁mennyiség",-13.356024742126465],["lacaq",-13.356026649475098],["▁hjælper",-13.35603141784668],["படுத்த",-13.356034278869627],["▁estatal",-13.35604476928711],["ைப்",-13.356075286865234],["UJ",-13.3560791015625],["▁atopar",-13.356098175048828],["々の",-13.356121063232422],["ைக்",-13.356145858764648],["▁Solid",-13.35615348815918],["▁tuntia",-13.356155395507812],["गरी",-13.356163024902344],["वली",-13.356200218200684],["liiton",-13.356203079223633],["綠色",-13.356226921081545],["▁30.000",-13.356236457824709],["قضي",-13.356256484985352],["▁කලේ",-13.356266975402832],["▁trött",-13.35627555847168],["ዲዮ",-13.356294631958008],["▁Outra",-13.356307983398438],["ಅಂ",-13.356310844421388],["▁מט",-13.35633945465088],["▁보다",-13.356340408325195],["COS",-13.356351852416992],["▁Сме",-13.356367111206056],["qları",-13.35639762878418],["intza",-13.356404304504396],["之所以",-13.356415748596191],["▁kiedyś",-13.35642433166504],["นิ้ว",-13.35644245147705],["▁industrie",-13.3564453125],["stund",-13.356446266174316],["ජන",-13.356464385986328],["ርያ",-13.356471061706545],["owaniem",-13.356490135192873],["כני",-13.356491088867188],["ವಂತ",-13.35649585723877],["वान्",-13.3565034866333],["▁тенис",-13.356558799743652],["KEL",-13.356571197509766],["දුර",-13.356584548950195],["▁ປາ",-13.356590270996094],["▁szent",-13.356605529785156],["▁ବ୍ୟା",-13.356607437133787],["järn",-13.356611251831056],["dane",-13.356618881225586],["▁pewnie",-13.356656074523926],["▁λειτουργία",-13.356659889221191],["▁Нов",-13.356671333312988],["kënd",-13.356681823730469],["nag",-13.356708526611328],["rix",-13.356711387634276],["▁arrivare",-13.356719970703123],["西部",-13.356728553771973],["▁Δήμου",-13.356734275817873],["▁જવા",-13.356776237487791],["rapan",-13.356789588928224],["튼",-13.356791496276855],["että",-13.356820106506348],["▁hvid",-13.35682773590088],["روی",-13.356829643249512],["奋斗",-13.356837272644045],["တွေ့",-13.356842041015623],["▁kelak",-13.35686492919922],["พืช",-13.35690975189209],["▁απάντηση",-13.356911659240724],["▁المؤتمر",-13.356911659240724],["▁هغې",-13.356911659240724],["▁आदमी",-13.356911659240724],["▁मान्छे",-13.356911659240724],["▁समृद्धि",-13.356911659240724],["▁இன்றைய",-13.356911659240724],["▁მოხდა",-13.356911659240724],["▁huru",-13.35691261291504],["▁sisältö",-13.35691261291504],["▁İnternet",-13.356914520263672],["fähig",-13.356915473937988],["▁regelmatig",-13.356918334960938],["▁responsável",-13.356919288635254],["▁vašo",-13.356924057006836],["ર્જ",-13.356926918029783],["▁свидетел",-13.356927871704102],["▁fékk",-13.356928825378418],["ທະ",-13.356940269470217],["▁imala",-13.356940269470217],["លែង",-13.356956481933594],["▁дүн",-13.35697078704834],["▁بعنوان",-13.356992721557615],["▁შუა",-13.357022285461426],["▁сакаат",-13.357032775878906],["ក្រៅ",-13.357043266296388],["▁hálf",-13.357057571411133],["▁leite",-13.357073783874512],["▁ieftin",-13.357088088989258],["ebben",-13.357099533081056],["▁conducere",-13.357099533081056],["▁провинци",-13.357101440429688],["ເຮືອນ",-13.357105255126951],["▁hải",-13.357110023498535],["hallin",-13.3571195602417],["▁કંપની",-13.35712718963623],["▁පිහිට",-13.357136726379396],["▁Каза",-13.357145309448242],["▁билдир",-13.357165336608888],["طيب",-13.357184410095217],["بیر",-13.357189178466797],["▁tivo",-13.357197761535645],["▁energji",-13.357200622558594],["▁zub",-13.35721492767334],["γαν",-13.357221603393556],["▁سلسلو",-13.357240676879885],["▁ट्विटर",-13.357240676879885],["ahli",-13.3572416305542],["团结",-13.357244491577148],["話し",-13.357259750366213],["عادل",-13.357288360595703],["abka",-13.357340812683104],["▁Glen",-13.357342720031738],["▁konvert",-13.357348442077637],["▁sover",-13.357351303100586],["▁Literatur",-13.357357025146484],["удың",-13.357364654541016],["▁multitud",-13.35736846923828],["▁žar",-13.357392311096191],["ფუ",-13.357396125793455],["▁okresie",-13.35741901397705],["▁ndodhur",-13.357420921325684],["umise",-13.357434272766112],["ңда",-13.357450485229492],["▁politikë",-13.357452392578123],["pant",-13.357487678527832],["▁univerzit",-13.35749053955078],["ODA",-13.35754108428955],["▁selecciona",-13.357558250427246],["kossa",-13.35756778717041],["Yu",-13.35758113861084],["گونه",-13.35758113861084],["▁другог",-13.357587814331056],["УЗ",-13.357600212097168],["IGH",-13.357611656188965],["▁kansainvälis",-13.357656478881836],["▁ওই",-13.357688903808594],["כש",-13.357698440551758],["scono",-13.357709884643556],["माण",-13.357726097106934],["ИЋ",-13.357776641845703],["ਕਟ",-13.35777759552002],["ရည်",-13.357800483703612],["ର୍ତ୍ତ",-13.357813835144045],["iumi",-13.357815742492676],["どれ",-13.357821464538574],["▁जिले",-13.357824325561523],["▁sucede",-13.357830047607422],["▁novice",-13.357833862304688],["wijze",-13.357843399047852],["೬",-13.357848167419434],["遭到",-13.357877731323242],["pire",-13.357898712158203],["▁Zuma",-13.35790729522705],["▁barbar",-13.357909202575684],["chip",-13.35791301727295],["FTA",-13.357918739318848],["▁아이들",-13.357925415039062],["地面",-13.357927322387695],["▁unika",-13.357935905456545],["manan",-13.357956886291504],["▁finanziari",-13.35795783996582],["▁nutné",-13.35795783996582],["函",-13.357970237731934],["的美",-13.35798168182373],["вом",-13.357985496520996],["▁starši",-13.358024597167969],["▁hazi",-13.358043670654297],["▁vuoi",-13.35805606842041],["▁पुस",-13.358057975769045],["▁स्वा",-13.358062744140623],["ටයි",-13.358064651489258],["▁జి",-13.35807991027832],["▁ισ",-13.358099937438965],["▁идеја",-13.35810375213623],["mobi",-13.358118057250977],["▁Grey",-13.358158111572266],["▁votos",-13.358165740966797],["▁anaknya",-13.358181953430176],["मधे",-13.358183860778809],["ukiwa",-13.358197212219238],["▁салу",-13.35820770263672],["▁horta",-13.3582181930542],["ੰਟ",-13.358247756958008],["▁galing",-13.358253479003906],["▁контрола",-13.358266830444336],["зите",-13.3582763671875],["лектор",-13.35830020904541],["india",-13.358319282531738],["ήρ",-13.358323097229004],["Qui",-13.358325004577637],["▁ақын",-13.358344078063965],["▁оперативно",-13.358351707458496],["▁مشرف",-13.358369827270508],["▁ବର",-13.35838508605957],["стій",-13.358399391174316],["▁ئوقۇ",-13.35841178894043],["あたり",-13.358418464660645],["仪",-13.358418464660645],["▁Finger",-13.358452796936035],["▁drogi",-13.358463287353516],["朵",-13.358468055725098],["▁Kodu",-13.358518600463867],["▁იხ",-13.358518600463867],["ครึ่ง",-13.358522415161133],["▁benötigt",-13.358524322509766],["▁penerbangan",-13.358524322509766],["▁ξενοδοχείο",-13.358524322509766],["▁գտնվող",-13.358524322509766],["▁तेस्रो",-13.358524322509766],["▁वार्षिक",-13.358524322509766],["▁ფარგლებში",-13.358524322509766],["▁Anbieter",-13.358525276184082],["▁conxunto",-13.358525276184082],["▁Повеќе",-13.358525276184082],["▁ترامب",-13.358526229858398],["▁različne",-13.358527183532717],["▁ліквід",-13.358527183532717],["▁শহর",-13.358529090881348],["▁ammo",-13.358530044555664],["▁같습니다",-13.358535766601562],["▁keçirilir",-13.35853672027588],["▁Központ",-13.358537673950195],["ฟัน",-13.358538627624512],["過ごし",-13.358539581298828],["▁요청",-13.358540534973145],["▁atitinka",-13.35854434967041],["▁المرحلة",-13.358546257019045],["▁përfundim",-13.35854721069336],["▁여름",-13.35854721069336],["▁कधीही",-13.358575820922852],["▁Extrem",-13.3585786819458],["构建",-13.358582496643066],["▁himoya",-13.35859489440918],["▁Уг",-13.358598709106444],["▁thy",-13.358601570129396],["ayso",-13.358607292175291],["▁ጨዋታ",-13.35863208770752],["ALAM",-13.358641624450684],["▁istorija",-13.358654975891112],["345",-13.35865879058838],["▁hilft",-13.358662605285645],["融資",-13.35866928100586],["▁শাহ",-13.358701705932615],["kesi",-13.358705520629885],["မီး",-13.35873317718506],["алды",-13.358741760253906],["带来了",-13.358769416809082],["学生的",-13.358770370483398],["更大的",-13.358800888061523],["▁Министр",-13.358803749084473],["▁ہائی",-13.358809471130373],["▁Masalah",-13.35881233215332],["▁дары",-13.35881233215332],["▁Дома",-13.358813285827637],["上有",-13.358820915222168],["▁замын",-13.358837127685549],["▁muller",-13.358842849731444],["▁معمول",-13.358847618103027],["мудр",-13.358858108520508],["▁ଦେଖା",-13.35887622833252],["▁chic",-13.358882904052734],["▁تۆ",-13.358896255493164],["▁offline",-13.358927726745604],["▁Pove",-13.35897159576416],["▁Cristiano",-13.358972549438477],["▁დარ",-13.358975410461426],["대회",-13.359017372131348],["▁Dva",-13.359028816223145],["earre",-13.35903263092041],["▁billion",-13.359044075012209],["laps",-13.359057426452637],["бух",-13.359067916870115],["▁ustvari",-13.3590726852417],["мыслов",-13.359074592590332],["рно",-13.35914134979248],["▁kotiin",-13.359149932861328],["מספר",-13.359180450439451],["▁sevgili",-13.35918426513672],["▁потребител",-13.359190940856934],["▁tarafta",-13.359196662902832],["▁Org",-13.35920238494873],["GUN",-13.359206199645996],["▁Vele",-13.359213829040527],["폴",-13.359219551086426],["egyen",-13.35923957824707],["▁gerir",-13.359243392944336],["▁پيار",-13.359244346618652],["▁contatta",-13.359296798706056],["▁lança",-13.359310150146484],["▁Zobacz",-13.359326362609863],["▁rakennus",-13.359326362609863],["▁angin",-13.359335899353027],["▁river",-13.359353065490724],["وفي",-13.359362602233888],["▁boyu",-13.359374046325684],["▁מצד",-13.359374046325684],["▁comma",-13.35939598083496],["▁bidin",-13.35940647125244],["優先",-13.35944652557373],["▁raditi",-13.359466552734377],["▁sezona",-13.35948657989502],["▁вперед",-13.359489440917969],["▁Ghar",-13.3594970703125],["प्रा",-13.359500885009766],["▁Lenin",-13.359500885009766],["вание",-13.359530448913574],["▁hêz",-13.359538078308104],["ąjį",-13.359561920166016],["▁Elev",-13.35958766937256],["LEI",-13.359617233276367],["▁ruhig",-13.359626770019531],["▁lozim",-13.359641075134276],["▁платформа",-13.35964584350586],["▁Neden",-13.359663963317873],["完成了",-13.359665870666504],["▁viaxe",-13.35967254638672],["▁quadri",-13.359678268432615],["шық",-13.3596830368042],["▁Hawk",-13.359696388244627],["ическую",-13.359718322753906],["増え",-13.359725952148438],["每天都",-13.35975456237793],["▁rouge",-13.359777450561523],["kezd",-13.35978889465332],["▁1990.",-13.359796524047852],["▁maximáln",-13.359820365905762],["▁ایده",-13.35983943939209],["▁украин",-13.359845161437988],["ография",-13.359846115112305],["сня",-13.35984706878662],["ullo",-13.359874725341797],["▁ustanovi",-13.359885215759276],["▁teknis",-13.359893798828123],["要去",-13.359904289245604],["▁Sunda",-13.359908103942873],["▁žive",-13.359936714172363],["▁kätte",-13.359942436218262],["▁GD",-13.359944343566896],["слуша",-13.35995388031006],["▁projekty",-13.359975814819336],["▁CAD",-13.359992980957031],["▁millors",-13.36001968383789],["mese",-13.360029220581056],["ጪ",-13.36003303527832],["entretien",-13.36005687713623],["▁ефективно",-13.360065460205078],["▁obszar",-13.36007308959961],["▁Мартин",-13.36008644104004],["慮",-13.360093116760254],["▁vacib",-13.360097885131836],["脆",-13.360100746154783],["宾",-13.360101699829102],["बह",-13.360105514526367],["შია",-13.360109329223633],["تیک",-13.360115051269531],["▁izvēlē",-13.36012077331543],["▁ուղղ",-13.360121726989746],["▁مين",-13.360127449035645],["ၽ",-13.360129356384276],["▁Jennifer",-13.360139846801758],["▁Ricardo",-13.360139846801758],["▁ditetapkan",-13.360139846801758],["▁legfontosabb",-13.360139846801758],["▁przedsiębiorstw",-13.360139846801758],["▁засідання",-13.360139846801758],["▁тақырып",-13.360139846801758],["▁وزيراعظم",-13.360139846801758],["▁उदाहरण",-13.360139846801758],["▁ਰੁਪਏ",-13.360139846801758],["▁Row",-13.360140800476074],["▁keinginan",-13.36014175415039],["▁କେବଳ",-13.36014175415039],["▁jednostavno",-13.360142707824709],["的文化",-13.360142707824709],["ಹಿತ",-13.360143661499023],["▁ngại",-13.36014461517334],["économie",-13.360145568847656],["▁включително",-13.360145568847656],["▁اعتراف",-13.360146522521973],["▁kryer",-13.360147476196287],["мисл",-13.360152244567873],["▁veidot",-13.360167503356934],["▁ይልቅ",-13.360170364379885],["▁ochranu",-13.360173225402832],["بيل",-13.360175132751465],["▁uzsāk",-13.360201835632324],["▁המלון",-13.360219955444336],["▁тэдний",-13.360223770141602],["▁الجامعة",-13.360239028930664],["หน้าอก",-13.36023998260498],["▁sociální",-13.360261917114258],["▁tic",-13.360262870788574],["▁되지",-13.360269546508787],["▁талбай",-13.360271453857422],["ಬೋ",-13.360286712646484],["сай",-13.3602876663208],["深深",-13.360305786132812],["▁kwake",-13.360318183898926],["ෆ",-13.360319137573242],["▁Carolina",-13.360319137573242],["gár",-13.360332489013672],["illion",-13.360333442687988],["bisa",-13.360345840454102],["▁гру",-13.360349655151367],["ferien",-13.360359191894531],["▁connaître",-13.36037540435791],["▁پورته",-13.360390663146973],["▁shpirt",-13.36042022705078],["▁కొంత",-13.360437393188477],["पैकी",-13.360447883605955],["ಲ್ಲಾ",-13.360447883605955],["▁kuona",-13.360466003417969],["▁especie",-13.360498428344728],["▁ווער",-13.360498428344728],["▁geriau",-13.360518455505373],["▁գեր",-13.360518455505373],["▁በመሆኑ",-13.360544204711914],["▁싶",-13.360552787780762],["推進",-13.360562324523926],["čnik",-13.36058235168457],["דרך",-13.360589027404783],["▁দিকে",-13.360596656799316],["▁Hl",-13.36060905456543],["▁выгляд",-13.36060905456543],["puli",-13.36061191558838],["ल्ली",-13.360613822937012],["▁prace",-13.36063289642334],["рц",-13.360639572143556],["▁utilizare",-13.360642433166504],["▁asist",-13.360650062561035],["▁bati",-13.360651016235352],["дзіць",-13.360678672790527],["▁zazna",-13.360705375671388],["▁전략",-13.360705375671388],["دىر",-13.36070728302002],["טא",-13.360710144042969],["▁miele",-13.360715866088867],["eliai",-13.3607177734375],["give",-13.36074447631836],["க்கொண்ட",-13.360761642456056],["ώρη",-13.36077117919922],["▁عوامی",-13.360779762268066],["▁الهند",-13.360801696777344],["▁gedrag",-13.360811233520508],["kriti",-13.360857009887695],["▁letni",-13.360867500305176],["ადგენ",-13.360870361328123],["એલ",-13.360872268676758],["▁конкретни",-13.360873222351074],["▁Dome",-13.360895156860352],["nyomás",-13.36090850830078],["iniams",-13.36092472076416],["တံ",-13.36095905303955],["▁Gue",-13.360965728759766],["зве",-13.36099910736084],["▁happens",-13.36101531982422],["▁fugiat",-13.361026763916016],["جزاء",-13.361038208007812],["রাজ",-13.361069679260254],["▁Alto",-13.361074447631836],["љиво",-13.36108112335205],["দু",-13.361123085021973],["▁ekstrakt",-13.361135482788086],["ayya",-13.361136436462402],["▁పంచ",-13.361151695251465],["მული",-13.361160278320312],["▁ചെയ്ത്",-13.361169815063477],["▁Ono",-13.36117172241211],["▁vlády",-13.361303329467772],["ðri",-13.36130714416504],["▁levens",-13.36131477355957],["াব",-13.361320495605469],["▁hohen",-13.36134910583496],["نص",-13.361391067504885],["ทุ",-13.361424446105955],["дэм",-13.361427307128906],["iausias",-13.361474990844728],["isla",-13.361483573913574],["baka",-13.361485481262209],["diamo",-13.361499786376951],["▁Məclisin",-13.361502647399902],["מענט",-13.3615083694458],["mons",-13.361510276794434],["плав",-13.36151123046875],["▁Kub",-13.36151885986328],["なくなる",-13.361528396606444],["▁Գրիգորյան",-13.361557006835938],["เพื่อให้",-13.361557960510254],["▁Zug",-13.361589431762695],["फ्ट",-13.361604690551758],["▁regionu",-13.36160659790039],["មានការ",-13.361610412597656],["сос",-13.361639976501465],["▁penser",-13.36166763305664],["కలు",-13.361685752868652],["proq",-13.361687660217283],["▁මෙන්",-13.361696243286133],["董",-13.36169719696045],["▁Kolle",-13.361703872680664],["▁explic",-13.361708641052246],["programmet",-13.36172103881836],["培養",-13.361735343933104],["▁Afri",-13.36174488067627],["ดาวน์โหลด",-13.361756324768066],["տեխնիկա",-13.361757278442385],["ทรัพย์",-13.361757278442385],["▁hầu",-13.3617582321167],["▁зовсім",-13.3617582321167],["▁илэрхийл",-13.3617582321167],["▁ਫਿਲਮ",-13.3617582321167],["ខ្លះ",-13.361760139465332],["▁pričom",-13.361760139465332],["▁үндсэн",-13.361760139465332],["▁kryeministri",-13.361761093139648],["▁studies",-13.361761093139648],["▁त्यही",-13.361761093139648],["▁Тиймээс",-13.36176300048828],["▁דולר",-13.361764907836914],["▁Senin",-13.361766815185549],["離婚",-13.361766815185549],["tembelea",-13.361769676208496],["▁јој",-13.361769676208496],["▁бензин",-13.361771583557127],["▁zatrudni",-13.361772537231444],["▁Szeged",-13.361776351928713],["▁Lauf",-13.361777305603027],["▁unseres",-13.36177921295166],["ใบหน้า",-13.36178207397461],["▁माझी",-13.361783981323242],["▁шаардлагатай",-13.36179256439209],["▁olleet",-13.361804962158203],["▁נושא",-13.361808776855469],["▁yum",-13.361809730529783],["▁brod",-13.361810684204102],["лазак",-13.361812591552734],["▁दिशा",-13.361815452575684],["нор",-13.36181640625],["▁хутка",-13.36181640625],["▁לילדים",-13.361820220947266],["ццё",-13.361825942993164],["▁дейт",-13.361836433410645],["▁previous",-13.36184787750244],["▁іншого",-13.361861228942873],["▁обратиться",-13.361863136291504],["▁urbana",-13.361873626708984],["手続き",-13.3618745803833],["▁compro",-13.361928939819336],["▁praca",-13.361934661865234],["▁Buhok",-13.361937522888184],["درجة",-13.36194133758545],["การทํางาน",-13.361949920654297],["прет",-13.361964225769045],["කී",-13.361974716186523],["▁produsele",-13.361974716186523],["တခု",-13.361988067626951],["▁dedicar",-13.362005233764648],["▁ezeket",-13.362009048461914],["不妨",-13.36201286315918],["▁ຜ່ານ",-13.36202907562256],["▁pasak",-13.362037658691406],["▁odloč",-13.362045288085938],["▁lapangan",-13.362049102783203],["ण्य",-13.362054824829102],["▁transmet",-13.362071990966797],["ોના",-13.362072944641112],["3]",-13.362090110778809],["▁напр",-13.362101554870604],["▁pláž",-13.362110137939451],["nella",-13.362125396728516],["▁sute",-13.362139701843262],["▁detect",-13.362144470214844],["▁ببین",-13.362186431884766],["▁Ô",-13.362189292907717],["▁rara",-13.362202644348145],["▁kommenterar",-13.362204551696776],["აკი",-13.36220932006836],["▁Mev",-13.362211227416992],["校园",-13.362217903137209],["ணா",-13.36223602294922],["▁Uganda",-13.362244606018066],["borgar",-13.362281799316406],["▁bia",-13.36228847503662],["vatten",-13.362293243408203],["▁paikallis",-13.362295150756836],["▁ringe",-13.362296104431152],["▁חוות",-13.362314224243164],["▁Seb",-13.362322807312012],["▁Financial",-13.362324714660645],["▁خرد",-13.36232852935791],["▁ගත්තේ",-13.362329483032228],["▁بیست",-13.36233615875244],["▁kërko",-13.362337112426758],["검",-13.362343788146973],["alnych",-13.362350463867188],["gamos",-13.36239242553711],["▁pagkaka",-13.362401008605955],["▁діа",-13.36245059967041],["טוס",-13.362476348876951],["▁જાણી",-13.362479209899902],["▁Pues",-13.36248016357422],["tengah",-13.362486839294434],["▁требованиям",-13.362492561340332],["伙",-13.36251163482666],["▁historik",-13.36251735687256],["してから",-13.36253833770752],["ീകരിക്ക",-13.362540245056152],["ন্য",-13.362570762634276],["सद",-13.362574577331545],["▁drugače",-13.362591743469238],["▁Криму",-13.362592697143556],["▁novac",-13.362600326538086],["HEN",-13.362604141235352],["する事",-13.362640380859377],["iliyor",-13.36265754699707],["naithe",-13.362680435180664],["▁avalia",-13.362680435180664],["▁Sekretari",-13.36268138885498],["▁effettua",-13.362698554992676],["▁poser",-13.362714767456056],["▁ljudima",-13.362717628479004],["tlač",-13.36272430419922],["ຫ້ອງ",-13.362738609313965],["▁urtean",-13.362748146057127],["όνα",-13.362762451171877],["ográfica",-13.362768173217772],["▁pierwszym",-13.362771034240724],["▁Цвет",-13.362787246704102],["nevad",-13.362793922424316],["čič",-13.362802505493164],["▁දැක්",-13.362831115722656],["රැ",-13.362835884094238],["fungua",-13.362871170043944],["▁예상",-13.36287784576416],["▁pandai",-13.36289882659912],["していて",-13.362902641296388],["▁activitatea",-13.362907409667969],["▁építés",-13.3629150390625],["ிகள்",-13.362919807434082],["يام",-13.362924575805664],["▁інша",-13.36298370361328],["keliling",-13.362991333007812],["うちに",-13.363001823425291],["mised",-13.363008499145508],["▁Norr",-13.363055229187012],["▁разни",-13.36306095123291],["▁ບ້ານ",-13.363067626953123],["րեց",-13.363079071044922],["▁içeri",-13.363082885742188],["ğan",-13.363085746765137],["▁presentera",-13.363091468811035],["▁vienā",-13.363091468811035],["Sub",-13.363094329833984],["සන",-13.363096237182615],["ക്കാൻ",-13.36310863494873],["১৯",-13.363112449645996],["▁gæti",-13.363114356994627],["stři",-13.363126754760742],["urken",-13.36313533782959],["IVE",-13.363158226013184],["▁Баш",-13.363162994384766],["TAT",-13.363178253173828],["juna",-13.36320972442627],["▁Ім",-13.363213539123535],["ytė",-13.363214492797852],["▁imprez",-13.363224029541016],["▁redzam",-13.363228797912598],["कः",-13.363258361816406],["▁possit",-13.363259315490724],["▁قبلی",-13.363266944885254],["▁अरब",-13.363280296325684],["▁dựa",-13.363303184509276],["▁əhali",-13.363303184509276],["昆",-13.363309860229492],["坡",-13.363324165344238],["계획",-13.363325119018556],["▁vuoro",-13.363341331481934],["arvio",-13.363344192504885],["ឡើយ",-13.363348007202148],["预测",-13.363351821899414],["ኽ",-13.363378524780272],["▁төхөөрөмж",-13.363378524780272],["▁Uluslararası",-13.36337947845459],["▁najviac",-13.36337947845459],["▁ανάμεσα",-13.36337947845459],["▁καλύτερη",-13.36337947845459],["▁Галоўная",-13.36337947845459],["▁набавке",-13.36337947845459],["▁оборудования",-13.36337947845459],["▁спасылкі",-13.36337947845459],["▁эсрэг",-13.36337947845459],["▁ਮੁਫ਼ਤ",-13.36337947845459],["▁Profesional",-13.363380432128906],["▁Wilhelm",-13.363380432128906],["geschreven",-13.36338233947754],["▁государственных",-13.363383293151855],["▁الخليج",-13.363388061523438],["▁الشرطة",-13.363388061523438],["ಂಥ",-13.36338996887207],["▁ଦେଇଛନ୍ତି",-13.363390922546388],["▁darum",-13.363393783569336],["▁caratterizza",-13.36340045928955],["▁քիչ",-13.363401412963867],["▁گویند",-13.363401412963867],["▁erwartet",-13.363404273986816],["▁жазып",-13.363414764404297],["▁دوشنبه",-13.363426208496094],["▁Éireann",-13.36343002319336],["▁Waktu",-13.363441467285156],["▁потврди",-13.363447189331056],["▁polskich",-13.36345100402832],["▁კატეგორია",-13.363452911376951],["شک",-13.363459587097168],["▁արտա",-13.36346435546875],["▁денари",-13.363485336303713],["▁बढेको",-13.363485336303713],["▁ڪيترا",-13.363486289978027],["▁kənar",-13.36350154876709],["রূপ",-13.363519668579102],["▁entitats",-13.36353588104248],["▁ถูก",-13.36353588104248],["▁kilde",-13.363540649414062],["▁cim",-13.36354923248291],["▁מאפשר",-13.36355209350586],["ρους",-13.363554000854492],["▁neemt",-13.363555908203123],["▁wskaza",-13.363566398620604],["PRESS",-13.363576889038086],["▁κομ",-13.3635835647583],["သည့်",-13.363587379455566],["▁negativo",-13.36359405517578],["▁variety",-13.363595962524414],["▁istiyor",-13.363598823547363],["▁asoci",-13.363600730895996],["зам",-13.363612174987791],["▁ہوگی",-13.36361312866211],["▁credito",-13.363617897033691],["▁yayım",-13.363628387451172],["▁محسن",-13.36363410949707],["ናል።",-13.363645553588867],["▁Pensa",-13.363648414611816],["...............",-13.36368179321289],["▁класа",-13.36368465423584],["▁ئۆت",-13.36371612548828],["▁numărul",-13.363723754882812],["▁Европы",-13.363750457763672],["可以看到",-13.363763809204102],["ライン",-13.363774299621582],["▁୧୫",-13.363777160644531],["מוסד",-13.363855361938477],["▁fakti",-13.36386013031006],["ရြာ",-13.36386489868164],["เรียนรู้",-13.363865852355955],["▁visitas",-13.36388111114502],["▁portes",-13.36391544342041],["ქო",-13.363921165466309],["มากที่สุด",-13.363924026489258],["▁توانست",-13.363930702209473],["shit",-13.363954544067385],["▁dərs",-13.363958358764648],["▁اخذ",-13.363961219787598],["大使",-13.363972663879396],["▁organização",-13.36397647857666],["▁кеч",-13.363987922668455],["ුල්",-13.363990783691406],["▁libertate",-13.36399269104004],["▁sürücü",-13.364015579223633],["▁روغن",-13.36403751373291],["▁essen",-13.36406421661377],["დათ",-13.364070892333984],["иум",-13.364106178283691],["▁stir",-13.3641357421875],["▁hrad",-13.364142417907717],["▁берді",-13.364160537719728],["▁టె",-13.36417007446289],["▁zweite",-13.364212036132812],["▁persönlichen",-13.364212989807127],["መና",-13.36422634124756],["精品",-13.364243507385254],["▁Ibn",-13.36424732208252],["▁GSM",-13.364261627197266],["▁القرن",-13.364269256591797],["わり",-13.36427879333496],["hnen",-13.36428165435791],["సింగ్",-13.364283561706545],["工厂",-13.364301681518556],["▁Dach",-13.364302635192873],["▁huius",-13.3643159866333],["合う",-13.364331245422363],["utti",-13.364347457885742],["student",-13.36436653137207],["մո",-13.364368438720703],["▁Espanya",-13.364370346069336],["▁varēs",-13.364423751831056],["▁الخل",-13.36443328857422],["▁verge",-13.364439964294434],["matay",-13.364453315734863],["▁νο",-13.364456176757812],["▁policist",-13.36448860168457],["ўна",-13.36449909210205],["▁мақсатында",-13.364548683166504],["▁სხვ",-13.364548683166504],["▁podería",-13.364553451538086],["ಂಟಿ",-13.36455535888672],["▁odločil",-13.364559173583984],["вине",-13.364563941955566],["זור",-13.364566802978516],["lavo",-13.364571571350098],["▁quiera",-13.364585876464844],["جين",-13.36458683013916],["athrú",-13.364596366882324],["▁لاش",-13.364629745483398],["▁buenas",-13.36463737487793],["▁jawaban",-13.36464786529541],["▁svenske",-13.364665985107422],["文化的",-13.364706993103027],["რეკ",-13.364734649658203],["акты",-13.364755630493164],["stelsel",-13.36478328704834],["▁✓",-13.364813804626465],["▁vremea",-13.364815711975098],["▁konference",-13.364828109741213],["▁Lur",-13.364849090576172],["ดําเนิน",-13.364863395690918],["यी",-13.36486530303955],["untuk",-13.364875793457031],["潘",-13.364896774291992],["▁علمي",-13.36490249633789],["▁시즌",-13.364912986755373],["rawan",-13.364925384521484],["▁toerana",-13.364952087402344],["经典",-13.364961624145508],["▁kości",-13.364967346191406],["▁احمدی",-13.364983558654783],["ীন",-13.364988327026367],["เงื่อนไข",-13.36500072479248],["تېخىمۇ",-13.365002632141112],["ቶችን",-13.365002632141112],["▁Umgebung",-13.365002632141112],["▁adecuada",-13.365002632141112],["▁budynku",-13.365002632141112],["▁référence",-13.365002632141112],["▁βρίσκονται",-13.365002632141112],["▁πρώτα",-13.365002632141112],["▁наличии",-13.365002632141112],["▁орналасқан",-13.365002632141112],["▁المختلفة",-13.365002632141112],["▁харесва",-13.36500358581543],["▁അപകട",-13.36500358581543],["▁највећи",-13.365004539489746],["▁احتساب",-13.365005493164062],["▁लगभग",-13.365005493164062],["▁언어",-13.365005493164062],["▁Vælg",-13.365007400512695],["▁fortsätter",-13.365015029907228],["▁keretében",-13.365015983581545],["larımızı",-13.36501693725586],["▁coneixement",-13.36501693725586],["▁पोष्ट",-13.365019798278809],["▁requirements",-13.365020751953123],["خارج",-13.365023612976074],["▁സാധ്യത",-13.365031242370604],["▁sőt",-13.365032196044922],["ненне",-13.365045547485352],["▁tenker",-13.365071296691896],["▁기반",-13.365073204040527],["▁აღმო",-13.365086555480955],["ឈើ",-13.36509132385254],["도의",-13.36510944366455],["▁incremento",-13.3651123046875],["ifikation",-13.36511516571045],["▁gæster",-13.365121841430664],["▁الهيئة",-13.36513328552246],["▁druhej",-13.36513900756836],["▁rappel",-13.365140914916992],["▁semplicemente",-13.365147590637209],["▁küt",-13.365156173706056],["▁szans",-13.365161895751951],["▁chili",-13.36516284942627],["▁Utiliza",-13.365166664123535],["lną",-13.365169525146484],["िला",-13.365179061889648],["▁bisschen",-13.365182876586914],["▁ឆ",-13.365217208862305],["▁complicat",-13.36522102355957],["แอร์",-13.365222930908203],["ということです",-13.36523151397705],["▁Fuß",-13.365236282348633],["▁عظيم",-13.365242958068848],["నాలు",-13.365253448486328],["െന്നാണ്",-13.365289688110352],["TURA",-13.36531925201416],["▁የሚለው",-13.36532211303711],["▁mpango",-13.36534309387207],["ଫ୍",-13.365344047546388],["שיב",-13.36534595489502],["জী",-13.365349769592283],["▁nỗi",-13.365352630615234],["アイ",-13.365364074707031],["▁Death",-13.365374565124512],["▁shqiptarëve",-13.365374565124512],["ტოვ",-13.365389823913574],["▁ofertas",-13.36540412902832],["▁दृश्य",-13.365406036376951],["▁Жак",-13.365415573120115],["▁elevado",-13.365434646606444],["▁मंत्र",-13.365443229675291],["▁licenci",-13.365464210510254],["klāj",-13.365488052368164],["மதி",-13.36549472808838],["▁examina",-13.365500450134276],["▁deverá",-13.365503311157228],["чёт",-13.365514755249023],["იუ",-13.365524291992188],["▁오전",-13.36553192138672],["魔法",-13.365532875061035],["സ്വ",-13.365557670593262],["▁चले",-13.365570068359377],["▁guía",-13.365572929382324],["▁паста",-13.365572929382324],["1200",-13.365578651428224],["жэй",-13.36557960510254],["▁Bass",-13.365589141845703],["סא",-13.365596771240234],["حص",-13.365610122680664],["만큼",-13.365626335144045],["▁Fix",-13.365630149841309],["حتاج",-13.365633010864258],["xeber",-13.365662574768066],["▁estudantes",-13.36567211151123],["▁konts",-13.365690231323242],["▁Samuti",-13.365705490112305],["▁helyez",-13.36570644378662],["iniziativa",-13.365708351135254],["ଷ୍",-13.36571216583252],["▁সম",-13.365730285644531],["μαι",-13.36573314666748],["▁keren",-13.36573886871338],["▁beskriver",-13.365742683410645],["▁random",-13.36574363708496],["▁convert",-13.365751266479492],["▁АЛ",-13.36575698852539],["▁planen",-13.365758895874023],["▁تصوير",-13.365760803222656],["виш",-13.365789413452148],["trou",-13.365818977355955],["író",-13.365821838378906],["▁тврди",-13.365852355957031],["▁ካል",-13.365862846374512],["▁faoliyat",-13.365864753723145],["▁своїми",-13.365878105163574],["▁مسلمانان",-13.365880012512209],["▁মহিলা",-13.365889549255373],["organisme",-13.365946769714355],["▁Gabi",-13.365962028503418],["nienie",-13.36596393585205],["ात्मक",-13.365975379943848],["▁materiales",-13.36598777770996],["повед",-13.365999221801758],["▁pagine",-13.366002082824709],["ičkih",-13.36600399017334],["▁наречен",-13.36600399017334],["▁သြား",-13.366040229797363],["тлумач",-13.366056442260742],["juće",-13.366058349609377],["historie",-13.366076469421388],["▁ბერ",-13.366093635559082],["LINK",-13.366104125976562],["lægning",-13.36611557006836],["ევი",-13.366117477416992],["වීමේ",-13.366144180297852],["മ്",-13.3661527633667],["各国",-13.366162300109863],["▁කරන්නට",-13.366209983825684],["díl",-13.366225242614746],["ท้าย",-13.366262435913086],["▁münasibətlər",-13.366270065307615],["tuot",-13.36628532409668],["년에",-13.366287231445312],["▁физи",-13.366289138793944],["▁önkormányzat",-13.366296768188477],["开了",-13.366326332092283],["▁skapar",-13.366327285766602],["वला",-13.36633014678955],["séggel",-13.366341590881348],["▁երեխա",-13.366362571716309],["לתי",-13.366381645202637],["お話",-13.366394996643066],["▁buite",-13.366395950317385],["rami",-13.36640167236328],["▁គ្រប់",-13.366403579711914],["▁ergonomi",-13.366408348083496],["líz",-13.366412162780762],["▁Nø",-13.366415023803713],["▁Stars",-13.366440773010254],["▁corto",-13.366458892822266],["▁ഫി",-13.366462707519531],["▁makina",-13.366473197937012],["▁ល្អ",-13.366474151611328],["pending",-13.366480827331545],["慧",-13.366494178771973],["▁skupi",-13.366498947143556],["組成",-13.366501808166504],["▁antall",-13.366507530212402],["▁защите",-13.366530418395996],["▁kategorie",-13.366541862487791],["▁etnik",-13.366552352905272],["摄",-13.366573333740234],["▁عوامي",-13.36657428741455],["mby",-13.366586685180664],["חומר",-13.366606712341309],["áky",-13.366617202758787],["ชัด",-13.366621971130373],["สรุป",-13.366626739501951],["ጩ",-13.366628646850586],["▁diálogo",-13.366628646850586],["▁Ιουνίου",-13.366628646850586],["▁συζητ",-13.366628646850586],["▁පොලිසිය",-13.366628646850586],["▁həftə",-13.366629600524902],["▁જાહેર",-13.366629600524902],["▁ਤਿਆਰ",-13.366631507873535],["▁којих",-13.366632461547852],["▁gwybodaeth",-13.366634368896484],["▁Konzept",-13.366642951965332],["▁Nghệ",-13.366642951965332],["cção",-13.366646766662598],["▁ئېچى",-13.366660118103027],["▁მეტად",-13.36667251586914],["▁وحتى",-13.366679191589355],["ปวด",-13.366681098937988],["화를",-13.36668300628662],["▁maxsus",-13.366695404052734],["▁includes",-13.366710662841797],["▁butuh",-13.366717338562012],["kwi",-13.366718292236328],["▁dixe",-13.366724014282228],["▁ေသ",-13.366724014282228],["дігі",-13.366734504699709],["▁sikeres",-13.366735458374023],["▁شهریور",-13.36673641204834],["ាត់",-13.366753578186035],["activitat",-13.366756439208984],["рање",-13.366756439208984],["▁Моля",-13.366761207580566],["▁burn",-13.366783142089844],["▁енергија",-13.36678695678711],["▁salaam",-13.366796493530272],["ohej",-13.366798400878906],["▁Dublin",-13.366803169250488],["▁Stadion",-13.366806030273438],["一个月",-13.36681079864502],["▁jovem",-13.36681842803955],["▁Demokratik",-13.366828918457031],["▁бре",-13.366828918457031],["▁genitori",-13.366844177246094],["▁koks",-13.366847038269045],["▁rabi",-13.366854667663574],["▁mulți",-13.366862297058104],["▁Gero",-13.366872787475586],["asjoner",-13.366876602172852],["nyom",-13.366884231567385],["שפה",-13.36690902709961],["▁sinteti",-13.36692237854004],["▁ಮಳೆ",-13.366929054260254],["▁କେ",-13.366937637329102],["▁એની",-13.366945266723633],["▁pobytu",-13.366949081420898],["lých",-13.366957664489746],["▁domino",-13.366957664489746],["▁consciente",-13.366975784301758],["多人",-13.366992950439451],["▁száz",-13.367037773132324],["▁وٺ",-13.36704158782959],["ضرب",-13.367045402526855],["▁πόλης",-13.367048263549805],["▁denar",-13.367058753967283],["▁아마",-13.367064476013184],["▁интереси",-13.367074966430664],["paran",-13.367085456848145],["▁ujian",-13.367103576660156],["▁келишим",-13.367103576660156],["▁musím",-13.36711597442627],["▁1906",-13.367137908935549],["▁значно",-13.367148399353027],["著名的",-13.367157936096191],["▁Потому",-13.36717128753662],["▁cure",-13.367213249206545],["▁адно",-13.36721420288086],["▁велику",-13.367216110229492],["כנו",-13.367244720458984],["နယ်",-13.367254257202148],["▁මණ්ඩලය",-13.367273330688477],["▁Wirtschaft",-13.367290496826172],["▁kuş",-13.367311477661133],["▁אח",-13.36731243133545],["▁περ",-13.367323875427246],["ដង",-13.3673734664917],["▁изјавио",-13.36737823486328],["▁αιτ",-13.36739730834961],["aĵojn",-13.36740016937256],["。《",-13.367416381835938],["ELSE",-13.367423057556152],["▁айы",-13.367429733276367],["▁привести",-13.367430686950684],["▁anderer",-13.367436408996582],["▁Xəbər",-13.367444038391112],["▁кант",-13.367453575134276],["▁deveni",-13.367464065551758],["▁1904",-13.367465019226074],["▁อิ",-13.367469787597656],["▁سٹی",-13.367484092712402],["กี",-13.36751937866211],["▁hakkı",-13.367526054382324],["سليم",-13.36753273010254],["dori",-13.367541313171388],["▁acepta",-13.367561340332031],["▁taso",-13.367571830749512],["олот",-13.36758041381836],["ုိက္",-13.367605209350586],["▁looma",-13.367616653442385],["tesa",-13.3676176071167],["▁حرام",-13.367619514465332],["เมื่อวันที่",-13.367645263671877],["kowej",-13.367664337158203],["▁Massa",-13.367677688598633],["bomb",-13.367685317993164],["▁Siti",-13.36768627166748],["▁blonde",-13.367714881896973],["▁गाय",-13.367721557617188],["▁christ",-13.367731094360352],["ניו",-13.367734909057615],["▁Galli",-13.367752075195312],["süz",-13.36776638031006],["▁seua",-13.367788314819336],["nkt",-13.367794036865234],["جاد",-13.367813110351562],["BRI",-13.367817878723145],["kausi",-13.367849349975586],["bone",-13.36788272857666],["▁أمير",-13.36788845062256],["▁Одно",-13.367902755737305],["▁ಗಮನ",-13.367918014526367],["这么多",-13.367928504943848],["म्बर",-13.36793613433838],["▁Artes",-13.367982864379885],["迎来",-13.367985725402832],["▁cheese",-13.367987632751465],["▁ይህም",-13.368000984191896],["効",-13.368034362792969],["▁Conte",-13.368046760559082],["▁BAS",-13.368080139160156],["▁lietas",-13.368103981018066],["▁तहत",-13.368107795715332],["▁silang",-13.368108749389648],["ኛል",-13.368125915527344],["ማቸው",-13.368133544921877],["bilitate",-13.368141174316406],["ෂණ",-13.368146896362305],["▁წევრი",-13.368146896362305],["▁Kadang",-13.368148803710938],["▁יתר",-13.368148803710938],["口味",-13.368148803710938],["өрү",-13.36816120147705],["丘",-13.368165969848633],["▁Zahlung",-13.368178367614746],["טען",-13.36818027496338],["▁требует",-13.36818027496338],["Este",-13.368193626403809],["▁rhain",-13.368206024169922],["▁яг",-13.368209838867188],["erima",-13.368219375610352],["वेत",-13.368224143981934],["▁demək",-13.36822509765625],["자에게",-13.368236541748049],["ッシュ",-13.36823844909668],["صعب",-13.368244171142578],["▁ayının",-13.36825180053711],["▁विराट",-13.368252754211426],["▁faoliyati",-13.368253707885742],["▁Kiswahili",-13.368257522583008],["▁Tuesday",-13.368257522583008],["▁behövs",-13.368257522583008],["▁wyposażon",-13.368257522583008],["▁وغیرہ",-13.368257522583008],["▁അവളുടെ",-13.368257522583008],["▁받을",-13.368257522583008],["▁Валентин",-13.368258476257324],["▁ਸਾਨੂੰ",-13.368260383605955],["▁Äripäev",-13.368261337280272],["▁योगदान",-13.368263244628906],["▁praktijk",-13.36826992034912],["▁બનાવવા",-13.368277549743652],["owało",-13.368280410766602],["tojë",-13.3682861328125],["▁فیصلے",-13.36829948425293],["▁działań",-13.368301391601562],["▁نظری",-13.368325233459473],["▁غذائي",-13.368337631225586],["▁Κλ",-13.368338584899902],["okból",-13.368340492248535],["▁Syarat",-13.368342399597168],["tworzy",-13.368345260620115],["▁tehdään",-13.368355751037598],["111",-13.368358612060549],["▁devis",-13.368358612060549],["శు",-13.368361473083496],["ЭХ",-13.368362426757812],["▁ātri",-13.368382453918455],["ЛАН",-13.36839199066162],["▁ΤΑ",-13.368407249450684],["▁Juk",-13.36841106414795],["▁prosi",-13.36841106414795],["▁Carta",-13.368416786193848],["▁otroci",-13.368417739868164],["verkehr",-13.368423461914062],["▁lançamento",-13.368432998657228],["uud",-13.368436813354492],["▁раней",-13.36844539642334],["▁padaryti",-13.368451118469238],["▁Qab",-13.368452072143556],["olojik",-13.368453979492188],["ੱਜ",-13.368484497070312],["▁Bem",-13.368489265441896],["húz",-13.368494033813477],["▁diskrimin",-13.36849594116211],["मिति",-13.368505477905272],["充電",-13.368510246276855],["САН",-13.368518829345703],["▁sume",-13.368528366088867],["▁నాగ",-13.368544578552246],["▁కం",-13.368550300598145],["▁trabajadores",-13.368556022644045],["▁정신",-13.36856460571289],["Play",-13.368570327758787],["алд",-13.368573188781738],["主角",-13.368587493896484],["1980",-13.3685884475708],["▁สถานที่",-13.3685884475708],["▁safle",-13.368593215942385],["▁видове",-13.368595123291016],["婆",-13.368609428405762],["▁считают",-13.368637084960938],["▁Kuba",-13.368640899658203],["▁Virus",-13.368642807006836],["▁տարածք",-13.368654251098633],["рога",-13.36866569519043],["cabang",-13.368674278259276],["tiszt",-13.36867618560791],["▁voltar",-13.36868953704834],["빈",-13.368701934814451],["ยุค",-13.36870574951172],["ära",-13.368719100952148],["▁galerij",-13.368741035461426],["ទទួលបាន",-13.368743896484377],["▁otsust",-13.36874771118164],["бөй",-13.368748664855955],["લું",-13.368778228759766],["▁Razak",-13.368779182434082],["混合",-13.368794441223145],["▁എഴുത",-13.368800163269045],["▁አንተ",-13.36880111694336],["▁самому",-13.36881160736084],["▁акын",-13.368844032287598],["▁Վի",-13.368865966796877],["pływ",-13.36886978149414],["деги",-13.368874549865724],["▁старе",-13.368876457214355],["▁arany",-13.368882179260254],["▁헤",-13.36888599395752],["miya",-13.368915557861328],["▁ఇంటి",-13.36893367767334],["你在",-13.36894416809082],["▁темп",-13.36895751953125],["economia",-13.368959426879885],["▁recoll",-13.368964195251465],["šių",-13.368972778320312],["▁glaube",-13.368972778320312],["▁könnyen",-13.36898422241211],["већ",-13.369003295898438],["▁ტი",-13.369029998779297],["pligt",-13.369035720825195],["cser",-13.369036674499512],["▁ଗୁ",-13.369072914123535],["▁lielāka",-13.369073867797852],["ņemt",-13.369075775146484],["层次",-13.3690767288208],["▁سہ",-13.36910915374756],["ాస్",-13.36911964416504],["3.1",-13.369131088256836],["▁comenzar",-13.369132041931152],["▁hjelper",-13.369148254394531],["▁కృష్ణ",-13.36915683746338],["▁వేస",-13.369175910949709],["ಂಧ",-13.369181632995604],["ाधिकार",-13.369217872619627],["▁ရဲ႕",-13.369242668151855],["वरून",-13.36926555633545],["▁мәселе",-13.369282722473145],["▁चीज",-13.36933422088623],["▁көтөр",-13.369342803955078],["▁קרוב",-13.369351387023926],["▁correr",-13.369359970092772],["jē",-13.369364738464355],["▁სწავლ",-13.369399070739746],["▁notkun",-13.36943244934082],["▁необхідн",-13.369454383850098],["▁Ћ",-13.36947536468506],["▁allvar",-13.369478225708008],["රක්",-13.36948013305664],["▁działanie",-13.369492530822754],["▁изв",-13.369519233703612],["▁husus",-13.369529724121094],["▁pertence",-13.369534492492676],["▁Jual",-13.369536399841309],["▁Agra",-13.369540214538574],["saran",-13.36955451965332],["ไปแล้ว",-13.36956024169922],["ziwa",-13.369562149047852],["会在",-13.369574546813965],["▁badu",-13.369596481323242],["Day",-13.369598388671877],["vráti",-13.369601249694824],["▁ennyi",-13.369668006896973],["▁čtvrt",-13.369684219360352],["ੱਦ",-13.369694709777832],["ಿದ್ದೇನೆ",-13.36970329284668],["▁yin",-13.369711875915527],["▁SOM",-13.369738578796388],["φυ",-13.36974048614502],["ह्य",-13.369755744934082],["дій",-13.369768142700195],["両",-13.369793891906738],["▁iris",-13.369807243347168],["salar",-13.369818687438965],["继",-13.369818687438965],["▁ရက်",-13.369827270507812],["ologisk",-13.369833946228027],["▁втори",-13.369836807250977],["軌",-13.369839668273926],["▁prono",-13.369847297668455],["▁nät",-13.369861602783203],["▁goza",-13.369866371154783],["▁Ammo",-13.369867324829102],["ဖိုး",-13.369874954223633],["ໄມ້",-13.369879722595217],["▁loh",-13.36988639831543],["һ",-13.369887351989746],["▁تصریح",-13.369888305664062],["▁બ્લોગ",-13.369888305664062],["▁Zoom",-13.36988925933838],["▁pasākumu",-13.36988925933838],["▁rửa",-13.36988925933838],["▁потенцијал",-13.36988925933838],["▁премијер",-13.36988925933838],["▁افتخار",-13.36988925933838],["렉",-13.36988925933838],["▁tegemist",-13.369891166687012],["▁վարչական",-13.369891166687012],["δόσεις",-13.36989688873291],["▁තබා",-13.369898796081545],["▁хангах",-13.369903564453123],["▁mācību",-13.36990451812744],["▁olunacaq",-13.369915008544922],["▁Нүүр",-13.369915962219238],["▁가능합니다",-13.369915962219238],["▁वापस",-13.369916915893556],["▁missatge",-13.369921684265137],["▁mechi",-13.369922637939451],["תיים",-13.36992359161377],["▁bekliyor",-13.369930267333984],["ppt",-13.36993408203125],["▁तरुण",-13.369939804077148],["ējumu",-13.36994171142578],["szen",-13.369955062866213],["Արցախ",-13.369967460632324],["ਸਕ",-13.369969367980955],["▁фла",-13.369972229003906],["▁bakarrik",-13.36997413635254],["▁ریز",-13.369975090026855],["▁дослід",-13.369985580444336],["▁Liam",-13.369990348815918],["▁zorganizowa",-13.369991302490234],["άντα",-13.370002746582031],["▁monitoring",-13.370002746582031],["▁अभाव",-13.370003700256348],["▁रखने",-13.370003700256348],["Wal",-13.37001132965088],["tyyppi",-13.370015144348145],["▁hediye",-13.370039939880373],["▁امارات",-13.370041847229004],["▁বাজার",-13.370052337646484],["ovacie",-13.370059967041016],["▁studente",-13.370059967041016],["रियल",-13.37006950378418],["70%",-13.370073318481444],["▁население",-13.37007999420166],["▁plod",-13.370101928710938],["▁forrás",-13.370124816894531],["משפחת",-13.370158195495604],["rinkti",-13.370163917541504],["▁alkaen",-13.370193481445312],["▁Loe",-13.37021255493164],["▁Moll",-13.37021255493164],["▁dimensi",-13.370220184326172],["▁používá",-13.370244026184082],["▁გვა",-13.370250701904297],["▁грех",-13.37025547027588],["▁inventar",-13.370264053344728],["▁스타",-13.370279312133787],["▁maailmas",-13.37028694152832],["▁нашого",-13.370295524597168],["liana",-13.370298385620115],["▁աշխատանքի",-13.370304107666016],["وصل",-13.370319366455078],["▁kles",-13.370331764221191],["▁nagpapa",-13.37034034729004],["てしまった",-13.370344161987305],["▁saxla",-13.37037181854248],["▁Föld",-13.37037467956543],["▁fecit",-13.370380401611328],["جرى",-13.370404243469238],["▁ನಮಗೆ",-13.370409965515137],["ықты",-13.370423316955566],["▁realización",-13.37043571472168],["ລະຫວ່າງ",-13.370448112487791],["能否",-13.37044906616211],["pies",-13.370466232299805],["คุ้ม",-13.370477676391602],["Fun",-13.370492935180664],["▁Lider",-13.370494842529297],["ksid",-13.370502471923828],["▁kosmetik",-13.370502471923828],["▁Mex",-13.370521545410156],["▁اسد",-13.370532989501951],["▁वक्त",-13.370532989501951],["ијске",-13.37053680419922],["▁Ама",-13.37054443359375],["▁Bike",-13.370567321777344],["002",-13.370579719543455],["وبة",-13.370582580566406],["りの",-13.370587348937988],["▁substanc",-13.37058925628662],["▁lulus",-13.37059211730957],["▁sources",-13.370623588562012],["жите",-13.370677947998049],["▁berma",-13.370702743530272],["avęs",-13.37070655822754],["▁ਮਾਨ",-13.370707511901855],["▁risi",-13.370726585388184],["▁tothom",-13.370732307434082],["▁часам",-13.370742797851562],["▁правя",-13.370747566223145],["▁proyek",-13.370760917663574],["▁halus",-13.370792388916016],["▁ກ່ຽວກັບ",-13.370817184448242],["▁podeu",-13.370866775512695],["samo",-13.370867729187012],["▁cilës",-13.370868682861328],["▁Pă",-13.37088394165039],["▁скорост",-13.370893478393556],["solid",-13.370926856994627],["▁၂၀",-13.370927810668944],["▁гір",-13.370929718017578],["ονταν",-13.370939254760742],["պեր",-13.370954513549805],["/25",-13.37096118927002],["യ്ക്കും",-13.370965003967283],["▁أج",-13.37096881866455],["ഗാ",-13.37098503112793],["▁korke",-13.370997428894045],["▁Archive",-13.37099838256836],["▁Days",-13.370999336242676],["州市",-13.371006965637209],["▁pga",-13.371018409729004],["▁açıl",-13.371045112609863],["框",-13.371047973632812],["▁serii",-13.371052742004396],["льскай",-13.371055603027344],["▁Tradition",-13.371060371398926],["verde",-13.371074676513672],["平方米",-13.37109661102295],["▁desco",-13.37110710144043],["طول",-13.371131896972656],["▁Insel",-13.371196746826172],["▁adica",-13.371220588684082],["ізація",-13.371224403381348],["▁ფე",-13.371224403381348],["▁plural",-13.371252059936523],["ियत",-13.371257781982422],["▁postavil",-13.371257781982422],["▁estavam",-13.371270179748535],["түн",-13.371271133422852],["▁KT",-13.371346473693848],["ЕП",-13.371359825134276],["Stra",-13.371397972106934],["θεωρ",-13.371417045593262],["▁treballar",-13.371430397033691],["殘",-13.371439933776855],["steb",-13.371454238891602],["द्धा",-13.371454238891602],["▁Ниш",-13.371481895446776],["寵物",-13.371499061584473],["▁Mengapa",-13.371500015258787],["▁envia",-13.371515274047852],["▁Hristos",-13.371521949768066],["▁նախատեսված",-13.371521949768066],["▁בהתאם",-13.371521949768066],["▁صحيفة",-13.371521949768066],["▁თქმით",-13.371521949768066],["ປະທານາທິບໍດີ",-13.371522903442385],["▁подготовки",-13.371522903442385],["▁неделю",-13.3715238571167],["▁mokyklos",-13.371527671813965],["▁Bashkuara",-13.37152862548828],["▁représente",-13.371535301208496],["帶著",-13.371546745300291],["▁əvəz",-13.371556282043455],["▁wybór",-13.37156105041504],["んですけど",-13.371563911437988],["▁Kapit",-13.371578216552734],["▁paglago",-13.371578216552734],["เล่ม",-13.371586799621582],["▁odamlar",-13.371586799621582],["▁מאמרים",-13.37160587310791],["▁മൈ",-13.371618270874023],["▁മുമ്പ്",-13.371620178222656],["▁العاصمة",-13.37162971496582],["▁ضعف",-13.371631622314451],["▁populares",-13.371646881103516],["▁xera",-13.371651649475098],["trie",-13.371676445007324],["▁Петра",-13.371707916259766],["▁sætter",-13.371753692626951],["ىسىنى",-13.37176513671875],["▁vypadá",-13.371774673461914],["▁ساختار",-13.371785163879396],["▁relativt",-13.37184238433838],["ခဲ့တာ",-13.37185001373291],["მალ",-13.37185287475586],["▁Rhod",-13.37185764312744],["▁수집",-13.371872901916504],["▁noticia",-13.371903419494627],["▁نشانی",-13.371903419494627],["▁કપ",-13.37191104888916],["관계",-13.371968269348145],["▁വായിച്ച",-13.371970176696776],["▁oorlog",-13.37200164794922],["divi",-13.372002601623535],["▁seguido",-13.372004508972168],["▁possibili",-13.3720064163208],["käi",-13.372030258178713],["▁البي",-13.372032165527344],["язки",-13.37204647064209],["▁Universitas",-13.372053146362305],["▁predik",-13.372065544128418],["▁domači",-13.372066497802734],["▁calci",-13.372074127197266],["▁kalp",-13.372097969055176],["▁aute",-13.372102737426758],["▁pengen",-13.372113227844238],["Lab",-13.372121810913086],["▁시간을",-13.372121810913086],["▁toen",-13.372142791748049],["▁learned",-13.37217140197754],["yın",-13.37218189239502],["拍照",-13.3721923828125],["ruti",-13.372224807739258],["▁apta",-13.372249603271484],["▁jiný",-13.372275352478027],["▁всему",-13.37227725982666],["▁könnten",-13.372282028198242],["▁Societat",-13.372319221496582],["▁পরে",-13.372349739074709],["мды",-13.372352600097656],["ອຸ",-13.372374534606934],["пира",-13.372387886047363],["併",-13.372387886047363],["glabā",-13.37238883972168],["▁Teile",-13.37238883972168],["▁222",-13.372391700744627],["▁Алла",-13.372396469116213],["्री",-13.372404098510742],["▁pije",-13.372427940368652],["ვებს",-13.372476577758787],["▁okuma",-13.372480392456056],["▁ell",-13.3725004196167],["▁телевизор",-13.372511863708496],["▁Tê",-13.372532844543455],["▁Hic",-13.372566223144531],["▁Maq",-13.372570991516112],["loģis",-13.37257194519043],["▁опитва",-13.372605323791504],["▁Potom",-13.37261962890625],["▁armë",-13.372641563415527],["▁جمل",-13.372674942016602],["produktion",-13.372687339782717],["▁mejora",-13.372694969177246],["гія",-13.372740745544434],["▁шп",-13.372759819030762],["haltung",-13.372763633728027],["▁pekerja",-13.372767448425291],["▁հատ",-13.372788429260254],["ลิน",-13.37283706665039],["▁glasbe",-13.372838020324709],["▁reciente",-13.372855186462402],["▁participer",-13.372891426086426],["▁tõttu",-13.372912406921388],["іру",-13.372915267944336],["▁архі",-13.372920989990234],["стях",-13.372932434082031],["ราว",-13.37294101715088],["▁barco",-13.372943878173828],["▁personaje",-13.37294578552246],["МД",-13.372954368591309],["▁Նորություններ",-13.372957229614258],["▁obavlja",-13.372970581054688],["ภาวะ",-13.373011589050291],["ียน",-13.373011589050291],["ایک",-13.373018264770508],["чков",-13.37304401397705],["▁avtale",-13.37304401397705],["შვ",-13.37305736541748],["▁drømme",-13.373069763183594],["有一個",-13.373085975646973],["ပိုင္",-13.373091697692873],["▁vaša",-13.373136520385742],["児",-13.373141288757324],["▁Qafqaz",-13.373159408569336],["▁búsqueda",-13.373159408569336],["▁pengobatan",-13.373159408569336],["▁γυναίκες",-13.373159408569336],["▁Всичко",-13.373159408569336],["▁Међународн",-13.373159408569336],["▁арганізацыі",-13.373159408569336],["▁Փաշինյանի",-13.373159408569336],["▁پوائنٹ",-13.373159408569336],["▁শনিবার",-13.373159408569336],["▁ਅਕਤੂਬਰ",-13.373159408569336],["▁දෙමළ",-13.373159408569336],["▁luwih",-13.373160362243652],["▁rege",-13.373160362243652],["▁முடியாது",-13.373162269592283],["部落格",-13.373162269592283],["▁مشغول",-13.373164176940918],["diğim",-13.373165130615234],["▁ಇಡೀ",-13.373165130615234],["▁քեզ",-13.373167037963867],["▁मानसिक",-13.373167991638184],["▁ਲੱਖ",-13.373169898986816],["▁ਉਹਨਾਂ",-13.37317180633545],["▁талдау",-13.373175621032717],["▁дүние",-13.37317943572998],["▁úrovni",-13.373180389404297],["▁nêzî",-13.373181343078612],["قسام",-13.373188018798828],["▁저희",-13.373188972473145],["hira",-13.373199462890623],["▁Sovet",-13.37320327758789],["ेला",-13.373208045959473],["ιβ",-13.373209953308104],["▁Maailma",-13.373213768005373],["ခြဲ",-13.37321662902832],["▁טא",-13.373217582702637],["▁löytää",-13.373218536376951],["plätze",-13.373221397399902],["▁wiedzy",-13.373221397399902],["▁emrin",-13.373224258422852],["▁građani",-13.373235702514648],["▁пожеж",-13.37324047088623],["Shabaab",-13.373249053955078],["▁चालू",-13.373252868652344],["▁Etiketter",-13.373258590698242],["αίνουν",-13.373265266418455],["όλα",-13.37331771850586],["▁گذاشت",-13.373337745666504],["었습니다",-13.373350143432615],["▁tavo",-13.373363494873049],["likka",-13.373367309570312],["▁ulice",-13.373373985290527],["التزام",-13.373394012451172],["▁grupas",-13.373394966125488],["ندس",-13.373398780822754],["४२",-13.373403549194336],["ቃት",-13.373409271240234],["▁◄",-13.37341594696045],["▁klage",-13.373424530029297],["▁औं",-13.373424530029297],["▁achos",-13.373428344726562],["▁осигурява",-13.373432159423828],["前提",-13.373435974121094],["centes",-13.373443603515623],["ስራ",-13.373443603515623],["▁процентов",-13.373475074768066],["▁sobretot",-13.373485565185549],["▁छौं",-13.373493194580078],["やすく",-13.373493194580078],["orvos",-13.373523712158203],["▁ဘုရား",-13.373527526855469],["▁ngực",-13.373528480529783],["▁андан",-13.373540878295898],["tischen",-13.373544692993164],["▁азамат",-13.373565673828123],["转型",-13.373579978942873],["ભાઇ",-13.373580932617188],["adwy",-13.3735933303833],["▁seizoen",-13.373616218566896],["▁ønsket",-13.373618125915527],["çû",-13.37362003326416],["▁järjestä",-13.37362003326416],["▁Gob",-13.373655319213867],["▁lijep",-13.373662948608398],["ライブ",-13.373666763305664],["୦୦",-13.373669624328612],["▁сестра",-13.37367057800293],["▁терең",-13.37367820739746],["▁आयोजन",-13.37368869781494],["▁hemmelig",-13.37370491027832],["▁tenere",-13.373716354370115],["ியம்",-13.373741149902344],["-03",-13.373781204223633],["rzał",-13.373785018920898],["▁Гэты",-13.373787879943848],["▁ընկերության",-13.37379550933838],["260",-13.37381076812744],["▁graves",-13.373819351196287],["olóxica",-13.373824119567873],["▁ସେମାନଙ୍କ",-13.373828887939451],["ඨ",-13.373839378356934],["▁mohlo",-13.373845100402832],["βί",-13.373855590820312],["▁boost",-13.373891830444336],["گير",-13.373908042907717],["出發",-13.37391185760498],["ləşdirilməsi",-13.37391471862793],["▁akti",-13.37391757965088],["尼亚",-13.373939514160156],["စပ္",-13.373941421508787],["▁economie",-13.373946189880373],["करी",-13.37396240234375],["▁przyda",-13.373993873596191],["▁ମିଳିବ",-13.37400722503662],["▁olmaqla",-13.374014854431152],["bál",-13.374030113220217],["/02",-13.374046325683594],["▁प्ल",-13.374061584472656],["igna",-13.374064445495604],["对话",-13.374068260192873],["▁fondu",-13.37408447265625],["διπλ",-13.374091148376465],["வல",-13.374112129211426],["▁Koš",-13.374125480651855],["▁čtyři",-13.37414836883545],["的生命",-13.374164581298828],["▁ચિ",-13.374178886413574],["▁banque",-13.374215126037598],["▁haluat",-13.374218940734863],["▁hrvatskog",-13.37424373626709],["▁kast",-13.37424373626709],["▁sommaren",-13.37425708770752],["▁креатив",-13.37427043914795],["ավորում",-13.374276161193848],[".''",-13.37427806854248],["▁Ким",-13.374307632446287],["ושים",-13.37432098388672],["▁најде",-13.374333381652832],["▁nochmal",-13.374348640441896],["したこと",-13.37435245513916],["▁dawlad",-13.37437343597412],["ظاهر",-13.374377250671388],["▁තරග",-13.374384880065918],["▁retorn",-13.374396324157717],["▁हित",-13.37441635131836],["▁Kategorio",-13.374420166015623],["גלי",-13.37442111968994],["▁Hut",-13.374423027038574],["▁171",-13.374427795410156],["DIA",-13.3744478225708],["hatha",-13.374455451965332],["ข้าม",-13.374455451965332],["出演",-13.374456405639648],["▁Nime",-13.37448501586914],["▁Junta",-13.37449073791504],["▁goreng",-13.374491691589355],["ONAL",-13.374521255493164],["vārd",-13.374526023864746],["seja",-13.374528884887695],["ķis",-13.374557495117188],["▁5.2",-13.374557495117188],["াৰ",-13.374567031860352],["ssero",-13.374578475952148],["тығы",-13.374584197998049],["▁blas",-13.374595642089844],["▁fritt",-13.374615669250488],["▁נפש",-13.374624252319336],["јање",-13.374627113342283],["tiladi",-13.37464714050293],["▁દુ",-13.37465000152588],["▁вистав",-13.374651908874512],["▁لطف",-13.374656677246094],["▁เลย",-13.374663352966309],["▁tunc",-13.374669075012209],["▁désir",-13.374674797058104],["sätze",-13.374706268310549],["алните",-13.374714851379396],["▁bëri",-13.374720573425291],["騎",-13.374728202819824],["▁мекемелер",-13.374741554260254],["▁کشف",-13.37474536895752],["▁exclusive",-13.374762535095217],["廚",-13.374765396118164],["因而",-13.374771118164062],["▁ló",-13.374773979187012],["介護",-13.37477684020996],["טאָ",-13.374777793884276],["▁үргэлж",-13.374777793884276],["…",-13.374794960021973],["ເຫນືອ",-13.374797821044922],["▁fönster",-13.374797821044922],["▁хүүхдүүд",-13.374797821044922],["▁bezpieczeństwa",-13.374798774719238],["▁melahirkan",-13.374798774719238],["▁mitjançant",-13.374798774719238],["▁perjuangan",-13.374798774719238],["▁μεγαλύτερη",-13.374798774719238],["▁Израел",-13.374798774719238],["▁परंतु",-13.374798774719238],["▁भ्रष्टाचार",-13.374798774719238],["▁தற்போது",-13.374798774719238],["▁Yahudi",-13.374799728393556],["▁belőle",-13.374801635742188],["▁једну",-13.374802589416504],["▁Mahallesi",-13.374810218811035],["սպ",-13.374812126159668],["▁analýz",-13.3748140335083],["▁албум",-13.374815940856934],["▁ekitaldi",-13.374821662902832],["▁исторически",-13.374825477600098],["▁землю",-13.374833106994627],["ウン",-13.374833106994627],["▁onderwijs",-13.374835968017578],["ചി",-13.37484073638916],["▁Prezent",-13.374845504760742],["▁Σπ",-13.374845504760742],["▁aurkitu",-13.374855995178224],["หู",-13.374858856201172],["▁(2007)",-13.37486457824707],["মের",-13.374871253967283],["enllaç",-13.374879837036133],["▁érdek",-13.37488079071045],["▁3-0",-13.374899864196776],["ຄວນ",-13.374910354614258],["▁bydrae",-13.37491226196289],["engage",-13.374924659729004],["వై",-13.374926567077637],["▁ചെയ്യും",-13.374927520751951],["立て",-13.37497329711914],["▁Nauja",-13.374984741210938],["ીન",-13.37498664855957],["▁kalendar",-13.37498664855957],["▁Kanske",-13.375001907348633],["▁Կարեն",-13.375015258789062],["ecko",-13.37502670288086],["эш",-13.375044822692873],["讀者",-13.375059127807615],["要在",-13.375073432922363],["piše",-13.375091552734377],["čiek",-13.375091552734377],["ਤਾਰ",-13.375110626220703],["▁framtiden",-13.375123977661133],["kasih",-13.375140190124512],["▁учител",-13.375151634216309],["SCH",-13.375199317932127],["ināšanas",-13.375211715698242],["▁kiujn",-13.375211715698242],["▁كأس",-13.375214576721191],["▁ថៃ",-13.37522029876709],["ጥን",-13.375222206115724],["▁hrá",-13.375231742858888],["▁Tø",-13.37526512145996],["▁Zijn",-13.37526798248291],["▁ግንኙነት",-13.375268936157228],["▁Luke",-13.375319480895996],["বান",-13.375332832336426],["ystės",-13.375336647033691],["matto",-13.375340461730955],["▁Informoj",-13.375350952148438],["ក្រោយ",-13.375358581542969],["▁recomanda",-13.375375747680664],["▁Cré",-13.37538242340088],["허",-13.375388145446776],["ДІ",-13.375443458557127],["ána",-13.375455856323242],["୍ରା",-13.37546157836914],["▁كنند",-13.375484466552734],["applicazione",-13.375502586364746],["▁phái",-13.375508308410645],["▁sibuk",-13.37550926208496],["ीचे",-13.375527381896973],["îyên",-13.375532150268556],["▁poshtë",-13.375544548034668],["▁Present",-13.3755521774292],["▁पाने",-13.375557899475098],["▁gruppen",-13.375565528869627],["مەك",-13.375577926635742],["関連",-13.375581741333008],["▁şərh",-13.37559986114502],["▁Гос",-13.375608444213867],["כמות",-13.37562370300293],["▁книгата",-13.37564468383789],["▁ಹೋ",-13.375664710998535],["▁सदा",-13.37567138671875],["Black",-13.375679969787598],["▁exe",-13.375690460205078],["գործ",-13.37569522857666],["джу",-13.37569808959961],["▁vėliau",-13.375699996948242],["одно",-13.37571144104004],["▁douce",-13.375715255737305],["▁представител",-13.375720024108888],["▁Mladi",-13.375723838806152],["機關",-13.375743865966797],["لیک",-13.375744819641112],["жж",-13.375762939453123],["▁heq",-13.375783920288086],["uwi",-13.375792503356934],["mmande",-13.37582015991211],["▁पुण्य",-13.375824928283691],["▁миру",-13.375843048095703],["தற்",-13.37585163116455],["чання",-13.375872611999512],["▁imita",-13.375898361206056],["▁svake",-13.375922203063965],["▁хөг",-13.37594223022461],["ร่า",-13.375984191894531],["▁କରାଯିବ",-13.375990867614746],["▁motta",-13.376023292541504],["ሰቡ",-13.376066207885742],["λης",-13.376076698303224],["järjestö",-13.376086235046388],["კურს",-13.376092910766602],["▁estate",-13.3760986328125],["kvar",-13.376099586486816],["▁lokacin",-13.376108169555664],["▁Gründe",-13.37611198425293],["▁Jaume",-13.376112937927246],["▁13.00",-13.376152992248535],["▁Көп",-13.376160621643066],["iloj",-13.376168251037598],["ніж",-13.376173973083496],["járat",-13.376187324523926],["▁업체",-13.37619400024414],["▁Ŝi",-13.37619686126709],["копа",-13.376228332519531],["发生了",-13.376239776611328],["▁chances",-13.376294136047363],["tamisesta",-13.376309394836426],["估",-13.376350402832031],["▁वाह",-13.37635612487793],["奋",-13.376361846923828],["▁селищ",-13.37636375427246],["शत",-13.376368522644045],["ාවක්",-13.376368522644045],["арка",-13.37637710571289],["süsteem",-13.376380920410156],["leverandør",-13.376387596130373],["翼",-13.37641716003418],["検",-13.376418113708496],["剤",-13.376425743103027],["すごく",-13.37643051147461],["▁さて",-13.376431465148926],["▁Sitten",-13.376434326171877],["роден",-13.376439094543455],["แชมป์",-13.376440048217772],["កំណត់",-13.376440048217772],["▁hrvatske",-13.376440048217772],["▁ننګرهار",-13.376440048217772],["▁کارکنان",-13.376440048217772],["▁ኮሚቴ",-13.376440048217772],["▁vjerojatno",-13.37644100189209],["장애인",-13.37644100189209],["▁təbii",-13.376441955566406],["▁خورشید",-13.376441955566406],["▁പ്രതീക്ഷ",-13.376441955566406],["▁kuanza",-13.376447677612305],["▁najnowsz",-13.376449584960938],["▁рынок",-13.376456260681152],["▁Երբ",-13.376459121704102],["▁עריכה",-13.376459121704102],["▁tvivl",-13.376460075378418],["▁میاں",-13.376462936401367],["▁hugmynd",-13.376471519470217],["ประชา",-13.376473426818848],["ကျောင်း",-13.37647533416748],["▁megismer",-13.376476287841797],["▁шап",-13.376476287841797],["▁שעה",-13.376479148864746],["▁namizəd",-13.376483917236328],["▁עושים",-13.376486778259276],["▁enega",-13.376490592956545],["▁სკ",-13.376492500305176],["▁ساحل",-13.376493453979492],["▁Naud",-13.376502990722656],["▁sprijin",-13.376503944396973],["▁desafio",-13.376511573791504],["情况下",-13.376514434814451],["েল",-13.376522064208984],["如果是",-13.376529693603516],["▁materiałów",-13.376537322998049],["▁yaşayış",-13.376543998718262],["▁આધાર",-13.376544952392578],["▁тракт",-13.376546859741213],["▁galt",-13.376551628112791],["भाष",-13.37655258178711],["▁pytania",-13.376554489135742],["▁ontstaan",-13.37655544281006],["▁Новости",-13.376564025878906],["▁jumpa",-13.376594543457031],[".10.2017",-13.376626014709473],["▁قيمت",-13.376632690429688],["▁selvom",-13.376662254333496],["▁accesso",-13.376664161682127],["▁používania",-13.376669883728027],["▁정의",-13.376670837402344],["natt",-13.376672744750977],["▁хамгаал",-13.37668800354004],["▁הטוב",-13.376688957214355],["oittaja",-13.376696586608888],["▁nokkur",-13.376699447631836],["ычны",-13.37670612335205],["១០",-13.37672519683838],["ไหล",-13.376729011535645],["▁jelenleg",-13.37672996520996],["▁hua",-13.376730918884276],["▁사람은",-13.376733779907228],["likus",-13.376745223999023],["相機",-13.376748085021973],["torium",-13.376776695251465],["▁šā",-13.376786231994627],["▁sekunder",-13.376798629760742],["伏",-13.376805305480955],["கும்",-13.37681484222412],["pean",-13.376834869384766],["▁virksomhet",-13.37684440612793],["▁večji",-13.37686252593994],["फो",-13.376897811889648],["Jun",-13.376904487609863],["▁ailleurs",-13.376911163330078],["stati",-13.376919746398926],["▁rente",-13.376922607421877],["ලයි",-13.376941680908203],["▁izpild",-13.376943588256836],["▁કેટલાક",-13.376948356628418],["▁gekommen",-13.37695026397705],["▁приходи",-13.376965522766112],["▁спаси",-13.37696933746338],["ರ್ಥ",-13.376978874206545],["▁мәселелері",-13.376978874206545],["▁Miért",-13.376989364624023],["▁commerce",-13.37699031829834],["▁quería",-13.37700080871582],["▁օրենքի",-13.377017974853516],["假日",-13.377029418945312],["स्ते",-13.37704086303711],["ناظر",-13.377073287963867],["्रि",-13.37707805633545],["▁pojawił",-13.377082824707031],["▁სახლი",-13.377083778381348],["▁විසි",-13.37710666656494],["rimit",-13.377115249633787],["න්ති",-13.377119064331056],["▁Knull",-13.377137184143066],["▁Pene",-13.377138137817385],["▁sabia",-13.377138137817385],["▁salita",-13.377140998840332],["▁gelmiş",-13.377145767211914],["غرض",-13.377150535583496],["ຮ່ວມ",-13.377154350280762],["ਜੇ",-13.377203941345217],["كيل",-13.377211570739746],["lıdır",-13.37721824645996],["▁κατηγορία",-13.377230644226074],["ഫോ",-13.37723445892334],["▁চু",-13.377262115478516],["论文",-13.377263069152832],["▁захар",-13.377264022827148],["却是",-13.37728500366211],["isté",-13.377291679382324],["▁beginnt",-13.377291679382324],["▁señala",-13.377300262451172],["▁bail",-13.377323150634766],["ПЛ",-13.377325057983398],["brig",-13.377334594726562],["▁తెర",-13.377336502075195],["▁ಬಂದು",-13.37734317779541],["▁szokás",-13.377347946166992],["▁объекта",-13.377363204956056],["чака",-13.377370834350586],["▁mundi",-13.377386093139648],["Ķ",-13.377398490905762],["▁bags",-13.377413749694824],["▁전시",-13.377461433410645],["önü",-13.377477645874023],["▁sàng",-13.377477645874023],["▁دیدن",-13.377483367919922],["קרן",-13.377532958984377],["partiet",-13.377544403076172],["باك",-13.377545356750488],["łek",-13.377548217773438],["电商",-13.377551078796388],["▁Dasar",-13.37762451171875],["▁ευχ",-13.377628326416016],["ፎች",-13.377636909484863],["מינים",-13.377638816833496],["▁خدای",-13.37764835357666],["▁فمن",-13.377650260925291],["▁Beşiktaş",-13.377655982971191],["▁Əliyeva",-13.377663612365724],["▁مشاهدة",-13.37766456604004],["胜利",-13.377704620361328],["▁qartë",-13.377705574035645],["▁hinda",-13.377724647521973],["oloģijas",-13.377744674682615],["▁citron",-13.377747535705566],["床上",-13.377752304077148],["▁четыре",-13.377761840820312],["▁шука",-13.37778091430664],["▁düşünce",-13.377781867980955],["illaan",-13.37785530090332],["▁одан",-13.37786865234375],["ඥා",-13.377869606018066],["▁incontro",-13.377873420715332],["شہ",-13.377911567687988],["▁deberán",-13.377917289733888],["竞",-13.377918243408203],["▁Voz",-13.377923011779783],["▁айткан",-13.377923965454102],["▁agents",-13.37792682647705],["▁tercera",-13.37793254852295],["▁тұрған",-13.377936363220217],["▁derbyn",-13.37794017791748],["miliki",-13.377942085266112],["▁përpara",-13.377951622009276],["นึก",-13.377959251403809],["▁радіо",-13.37796688079834],["κεντρ",-13.377972602844238],["▁ჯგუფ",-13.37798309326172],["RIM",-13.377988815307615],["▁gey",-13.377989768981934],["▁Mě",-13.3779935836792],["選択",-13.378000259399414],["▁länk",-13.378002166748049],["▁سىز",-13.378003120422363],["转变",-13.378003120422363],["▁Түр",-13.378005981445312],["რც",-13.378007888793944],["нш",-13.378010749816896],["▁3500",-13.378011703491213],["▁trak",-13.378019332885742],["▁2:1",-13.37803077697754],["läget",-13.3780517578125],["プログラム",-13.378069877624512],["šou",-13.37807846069336],["▁Sampai",-13.378080368041992],["▁sierpnia",-13.378085136413574],["▁літаратуры",-13.378085136413574],["▁миллиард",-13.378085136413574],["▁ღირს",-13.378085136413574],["▁Κέντρο",-13.37808609008789],["▁Такође",-13.378094673156738],["▁ඇතැයි",-13.378097534179688],["▁해당되는",-13.378100395202637],["aldea",-13.378110885620115],["▁વખતે",-13.378119468688965],["▁blijkt",-13.37812042236328],["▁נר",-13.378131866455078],["▁목표",-13.37814235687256],["iecību",-13.378144264221191],["▁چاہئے",-13.378153800964355],["▁बेटी",-13.37816333770752],["▁vidět",-13.378168106079102],["▁tudjuk",-13.378169059753418],["ראי",-13.378174781799316],["สีขาว",-13.378174781799316],["▁İste",-13.37817668914795],["tinga",-13.37818431854248],["▁රෑ",-13.378185272216797],["▁उनलाई",-13.378191947937012],["▁Wisata",-13.378206253051758],["▁pixel",-13.378212928771973],["▁[-]",-13.37822723388672],["hagi",-13.37826442718506],["▁grin",-13.378268241882324],["▁پرا",-13.378277778625488],["▁تركيا",-13.3783540725708],["▁ljubavi",-13.378376007080078],["zazi",-13.378409385681152],["ాన్",-13.378430366516112],["▁específica",-13.378430366516112],["сің",-13.378437995910645],["▁منعقد",-13.378437995910645],["▁ماي",-13.378445625305176],["▁concellos",-13.378448486328123],["▁адміністрації",-13.378457069396973],["的合作",-13.378483772277832],["▁selfie",-13.378524780273438],["ėjusi",-13.378530502319336],["▁Concert",-13.378530502319336],["Од",-13.378532409667969],["simme",-13.378534317016602],["▁JK",-13.37856388092041],["AUS",-13.378564834594728],["▁Rast",-13.37860107421875],["slava",-13.37860870361328],["▁važno",-13.378612518310549],["▁يار",-13.378629684448242],["▁וס",-13.378636360168455],["إعلام",-13.378668785095217],["▁cuti",-13.37873077392578],["ွန်း",-13.378742218017578],["▁አመት",-13.37874698638916],["▁Arkiv",-13.378765106201172],["උ",-13.37884521484375],["leistung",-13.378847122192385],["ຫມາຍ",-13.378859519958496],["darbībā",-13.378861427307127],["دلی",-13.378861427307127],["ಳಿಯ",-13.378872871398926],["▁Српска",-13.378872871398926],["អី",-13.37887954711914],["▁ukupno",-13.378888130187988],["▁Welle",-13.37889289855957],["▁tokom",-13.37890338897705],["ésével",-13.37893009185791],["ичко",-13.378931999206545],["קוק",-13.37893772125244],["▁수준",-13.37899398803711],["ódik",-13.37899684906006],["سقط",-13.379003524780272],["▁Penye",-13.379003524780272],["哲",-13.37901496887207],["▁знаменит",-13.379019737243652],["CAT",-13.37903118133545],["▁рам",-13.379043579101562],["NGU",-13.379045486450195],["▁Infos",-13.379046440124512],["▁formie",-13.379053115844728],["▁lisans",-13.379054069519045],["▁ផ្សាយ",-13.379070281982422],["▁päällä",-13.379080772399902],["▁энд",-13.379122734069824],["יִ",-13.379132270812988],["▁அக",-13.37915325164795],["ਪਲ",-13.379234313964844],["▁samego",-13.379244804382324],["▁বেশ",-13.379254341125488],["▁juz",-13.379267692565918],["▁ທຸລະກິດ",-13.379267692565918],["पार",-13.37929344177246],["▁акција",-13.379318237304688],["▁sudar",-13.379355430603027],["podrobn",-13.379362106323242],["▁ماء",-13.37936782836914],["くなった",-13.379372596740724],["▁рассказ",-13.379380226135254],["ତୀ",-13.37938117980957],["▁dirigi",-13.379413604736328],["ару",-13.37941551208496],["▁Poste",-13.379426956176758],["▁patut",-13.379432678222656],["▁определить",-13.379480361938477],["ீங்க",-13.379483222961426],["ਪੇ",-13.379484176635742],["▁litteratur",-13.379496574401855],["▁lapas",-13.379509925842283],["▁örnek",-13.37951374053955],["авање",-13.379562377929688],["▁Maks",-13.379596710205078],["gemaakt",-13.379603385925291],["▁knap",-13.37960433959961],["▁0,7",-13.37962245941162],["▁открыт",-13.379639625549316],["conta",-13.379668235778809],["లక్ష",-13.379677772521973],["▁გვე",-13.379683494567873],["▁jakt",-13.37969207763672],["估计",-13.379698753356934],["▁etadi",-13.37969970703125],["忍不住",-13.379703521728516],["鳴",-13.379705429077148],["▁баян",-13.379709243774414],["バランス",-13.379716873168944],["lbstverständlich",-13.379732131958008],["ปฏิ",-13.379732131958008],["▁Daxili",-13.379732131958008],["▁насколько",-13.379732131958008],["▁өгсөн",-13.379732131958008],["▁ማዕከል",-13.379732131958008],["▁naujienos",-13.37973403930664],["▁تباہ",-13.379737854003906],["▁ଥାନା",-13.379738807678224],["有时候",-13.379740715026855],["▁രണ്ടാം",-13.379746437072754],["იმე",-13.37975025177002],["▁ireki",-13.379755020141602],["▁природно",-13.379755020141602],["▁האחרון",-13.3797607421875],["▁семінар",-13.379768371582031],["ဒ္",-13.379770278930664],["▁صلاحیت",-13.379770278930664],["بناء",-13.379775047302246],["гавор",-13.37978458404541],["▁બીજી",-13.379786491394045],["ÄN",-13.379791259765623],["▁Bora",-13.37979221343994],["▁Kasi",-13.379801750183104],["više",-13.379813194274902],["gatás",-13.379828453063965],["isseur",-13.379836082458496],["▁տվել",-13.379837989807127],["ীদের",-13.379838943481444],["▁VÀ",-13.37984848022461],["▁Občin",-13.379857063293455],["▁orsak",-13.379870414733888],["▁کلي",-13.37988567352295],["▁outdoor",-13.379887580871582],["▁comunicação",-13.379904747009276],["▁1995.",-13.379905700683594],["▁Religion",-13.379927635192873],["väri",-13.379939079284668],["ăţi",-13.379952430725098],["▁АҚ",-13.379953384399414],["▁kaun",-13.379965782165527],["▁1.500",-13.379990577697754],["▁equipment",-13.379992485046388],["▁simti",-13.379992485046388],["োর",-13.37999439239502],["▁méter",-13.380000114440918],["აყ",-13.380002975463867],["원의",-13.380008697509766],["▁investir",-13.380019187927246],["▁смисъл",-13.380030632019045],["▁відповідни",-13.38003158569336],["▁համակարգի",-13.380032539367676],["▁माया",-13.38003635406494],["သီ",-13.380048751831056],["▁REC",-13.380059242248535],["▁Sava",-13.380059242248535],["命令",-13.380062103271484],["▁package",-13.380066871643066],["▁pièce",-13.38008975982666],["▁лидери",-13.380107879638672],["БГ",-13.380120277404783],["▁sitesi",-13.380120277404783],["▁cultiva",-13.38012409210205],["Ғ",-13.380151748657228],["▁opravlja",-13.380162239074709],["▁وفات",-13.380162239074709],["▁ontdek",-13.3801908493042],["ところで",-13.380203247070312],["ေရာင္",-13.380209922790527],["▁Fondu",-13.380209922790527],["४५",-13.380220413208008],["▁त्यसको",-13.380227088928224],["cení",-13.380240440368652],["PAS",-13.38025188446045],["▁towards",-13.380264282226562],["ശം",-13.380290985107422],["▁аби",-13.380306243896484],["▁စိတ်",-13.380321502685549],["иците",-13.380326271057127],["moq",-13.380327224731444],["▁кошти",-13.380332946777344],["▁atskir",-13.380334854125977],["としての",-13.380346298217772],["▁gunung",-13.38034725189209],["紅色",-13.380351066589355],["gola",-13.380365371704102],["▁modd",-13.38038158416748],["▁aard",-13.380385398864746],["ағы",-13.380396842956545],["▁fes",-13.380404472351074],["▁gamme",-13.380410194396973],["▁290",-13.38041877746582],["・・・。",-13.380437850952148],["การรักษา",-13.380460739135742],["ღვი",-13.38051414489746],["▁душу",-13.380533218383787],["▁sindrom",-13.380569458007812],["▁nautti",-13.380606651306152],["▁vermi",-13.380621910095217],["ชิง",-13.380623817443848],["masının",-13.380635261535645],["gerecht",-13.380640983581545],["カル",-13.380645751953123],["▁Општи",-13.380672454833984],["▁yhteistyö",-13.380699157714844],["▁endi",-13.380702018737791],["▁Kaffee",-13.380706787109377],["ეფ",-13.3807373046875],["▁alga",-13.380741119384766],["▁савремен",-13.380743026733398],["▁kollega",-13.380751609802246],["ရို",-13.380762100219728],["scape",-13.38077163696289],["▁boje",-13.380789756774902],["▁kurac",-13.380793571472168],["▁jovens",-13.380799293518066],["▁riscos",-13.380806922912598],["אמ",-13.38080883026123],["▁rondom",-13.380812644958496],["▁relativo",-13.380816459655762],["▁Нур",-13.380824089050291],["цький",-13.38082504272461],["▁Cumar",-13.380829811096191],["▁potensial",-13.38083839416504],["rovná",-13.380846977233888],["自治",-13.380849838256836],["ÇA",-13.38086223602295],["▁собственост",-13.38089084625244],["▁знає",-13.38089656829834],["europa",-13.380901336669922],["▁პარ",-13.380910873413086],["▁አደ",-13.380916595458984],["НОВ",-13.380924224853516],["▁zale",-13.380924224853516],["քեր",-13.380936622619627],["▁singkat",-13.380938529968262],["▁sobě",-13.380939483642578],["▁alimentar",-13.380967140197754],["▁sveikata",-13.380975723266602],["ww",-13.380986213684082],["▁צוריק",-13.381011009216309],["ΣΗ",-13.381016731262209],["▁մշակ",-13.381021499633787],["▁ياز",-13.38103199005127],["▁матэрыял",-13.3810396194458],["▁መሰ",-13.381048202514648],["▁বার",-13.381065368652344],["▁aldatu",-13.38106918334961],["▁подробно",-13.381105422973633],["▁1970-",-13.381111145019531],["ابي",-13.381113052368164],["第六",-13.381120681762695],["▁Jel",-13.381163597106934],["オン",-13.38117218017578],["健康的",-13.381187438964844],["ανε",-13.381216049194336],["▁නැතිව",-13.381232261657717],["▁ropa",-13.381240844726562],["▁zbirk",-13.381243705749512],["▁mõned",-13.38124656677246],["सुर",-13.38125991821289],["展现",-13.381261825561523],["ांग",-13.381270408630373],["▁Estatu",-13.381298065185549],["▁заявления",-13.381330490112305],["▁terorist",-13.381336212158203],["▁Кре",-13.381340026855469],["における",-13.381353378295898],["実施",-13.381354331970217],["రాలు",-13.381380081176758],["▁ব্যক্তি",-13.381380081176758],["▁particolari",-13.381382942199709],["▁онлајн",-13.381382942199709],["▁موبائل",-13.381382942199709],["▁ਕੇਂਦਰ",-13.381382942199709],["▁අවුරුද්ද",-13.381382942199709],["▁დაბრუნება",-13.381382942199709],["▁መሠረት",-13.381382942199709],["弊社",-13.381382942199709],["▁уживо",-13.381383895874023],["▁ළඟ",-13.381383895874023],["▁பாடல்",-13.381385803222656],["▁ក្រោម",-13.381386756896973],["▁fällt",-13.381387710571287],["▁আজকের",-13.381389617919922],["▁በሌላ",-13.381391525268556],["▁πρόσ",-13.381392478942873],["▁σταθερ",-13.381392478942873],["▁TN",-13.38139533996582],["▁Υπο",-13.381402015686035],["▁başlayıb",-13.3814058303833],["▁ಕ್ಷೇತ್ರ",-13.381406784057615],["▁മുഴുവന്",-13.381406784057615],["▁לאורך",-13.381407737731934],["▁begonnen",-13.381412506103516],["ทีมชาติ",-13.381420135498049],["▁marami",-13.381421089172363],["▁αρκετά",-13.381428718566896],["▁가져",-13.381442070007324],["▁Kolo",-13.381454467773438],["ksyon",-13.381468772888184],["bantur",-13.38149356842041],["格局",-13.381498336791992],["pierw",-13.381505012512209],["▁המשחק",-13.381524085998535],["ხარჯ",-13.381556510925291],["▁гарсан",-13.381556510925291],["▁เล่น",-13.38156795501709],["▁false",-13.381572723388672],["▁Bisnis",-13.381573677062988],["▁جوش",-13.38157844543457],["▁memory",-13.381579399108888],["▁Restaurants",-13.381599426269531],["▁spremeni",-13.38160228729248],["представ",-13.381609916687012],["▁однесува",-13.381614685058594],["血管",-13.38161563873291],["▁интереса",-13.38164520263672],["▁Кен",-13.381668090820312],["▁puikiai",-13.38167667388916],["конодательством",-13.381694793701172],["hinder",-13.38169765472412],["temperatur",-13.3817138671875],["BOL",-13.38174057006836],["▁เพลง",-13.381744384765623],["▁negócio",-13.38175106048584],["▁kulinar",-13.38176727294922],["▁سنج",-13.38176727294922],["▁Nordic",-13.38177490234375],["▁തിര",-13.381802558898926],["▁salu",-13.381808280944824],["blis",-13.381827354431152],["YON",-13.38183307647705],["▁Pagal",-13.381834030151367],["şen",-13.381836891174316],["▁Sacra",-13.38183879852295],["▁Законот",-13.381874084472656],["ບໍ່ມີ",-13.381885528564451],["▁langile",-13.381905555725098],["▁चाहे",-13.381915092468262],["▁compila",-13.38192081451416],["▁واقعات",-13.381924629211426],["▁участі",-13.381925582885742],["stap",-13.381927490234377],["▁බවද",-13.38193416595459],["fem",-13.381942749023438],["▁പറഞ്ഞത്",-13.381950378417969],["▁Kova",-13.381954193115234],["sonder",-13.381957054138184],["чества",-13.381958961486816],["▁besig",-13.381978034973145],["▁describe",-13.381986618041992],["▁මස",-13.381986618041992],["▁motivation",-13.38199234008789],["▁డే",-13.382006645202637],["▁gelang",-13.38201141357422],["生産",-13.382015228271484],["▁రావు",-13.382034301757812],["▁варта",-13.382037162780762],["امين",-13.382049560546877],["▁විශ්ව",-13.382052421569824],["创作",-13.382061004638672],["ाळ",-13.38206672668457],["▁занимава",-13.382074356079102],["້ງ",-13.382100105285645],["克服",-13.38210105895996],["▁салық",-13.382105827331545],["▁pep",-13.382109642028809],["tellä",-13.382142066955566],["▁весел",-13.382153511047363],["벌",-13.382182121276855],["▁bildu",-13.38218879699707],["▁gledal",-13.382198333740234],["John",-13.382216453552246],["Пар",-13.382227897644045],["▁viver",-13.38223361968994],["▁hoopis",-13.382247924804688],["▁travnja",-13.382257461547852],["▁如果你",-13.382261276245115],["利息",-13.382299423217772],["▁товара",-13.38230800628662],["▁devez",-13.382330894470217],["েস",-13.382344245910645],["TELE",-13.382347106933594],["▁daryti",-13.38234806060791],["▁zhvillim",-13.382354736328123],["बन",-13.38235855102539],["jevi",-13.382369041442873],["ाक",-13.382375717163086],["郑",-13.382401466369627],["LÓ",-13.382411003112791],["▁érintett",-13.382433891296388],["▁šāda",-13.382436752319336],["ಣಾ",-13.382445335388184],["▁diminui",-13.382448196411133],["精力",-13.382453918457031],["▁financijsk",-13.382488250732422],["havi",-13.382491111755373],["▁ھال",-13.38249969482422],["▁DF",-13.382549285888672],["prins",-13.38257884979248],["ିବା",-13.382615089416504],["▁fotoana",-13.382628440856934],["известни",-13.382645606994627],["▁Viking",-13.382655143737791],["grün",-13.382670402526855],["▁garbi",-13.382683753967283],["▁πόσο",-13.38268756866455],["址",-13.38268756866455],["▁اخلاقی",-13.382710456848145],["ruc",-13.382723808288574],["▁Each",-13.382728576660156],["ογ",-13.382729530334473],["ējis",-13.382752418518066],["غضب",-13.382755279541016],["▁статистика",-13.382760047912598],["कॉम",-13.382767677307127],["▁ධර්ම",-13.38277816772461],["▁легенд",-13.38279151916504],["▁avanti",-13.38279628753662],["▁næsta",-13.38280200958252],["િયન",-13.382805824279783],["▁vlade",-13.382810592651367],["▁استعداد",-13.382837295532228],["нси",-13.382842063903809],["اعتبار",-13.382850646972656],["▁compete",-13.382862091064451],["▁stykke",-13.382875442504885],["▁poñer",-13.38288688659668],["▁steps",-13.382889747619627],["▁nəqliyyat",-13.382894515991213],["南部",-13.382894515991213],["▁kobiety",-13.38290023803711],["ായ്",-13.382929801940918],["geven",-13.382949829101562],["弃",-13.382953643798828],["▁ಹಿಂ",-13.382954597473145],["日本語",-13.382957458496094],["依靠",-13.382960319519045],["stimmt",-13.38298797607422],["▁upor",-13.382993698120115],["ņē",-13.383001327514648],["维持",-13.38300323486328],["▁المسيح",-13.38300895690918],["▁पुत्र",-13.383009910583496],["籌",-13.383009910583496],["▁pagas",-13.383011817932127],["メーカー",-13.383015632629396],["ภาษี",-13.383034706115724],["▁Кытай",-13.383034706115724],["▁giovane",-13.38303565979004],["▁rendkívül",-13.38303565979004],["▁sesungguhnya",-13.38303565979004],["▁рабочих",-13.38303565979004],["▁لوازم",-13.38303565979004],["▁۱۳۹۵",-13.38303565979004],["▁संकेत",-13.38303565979004],["▁شہید",-13.383036613464355],["▁উচ্চ",-13.383037567138672],["▁wixii",-13.383038520812988],["网址",-13.383044242858888],["▁ትግል",-13.383049964904783],["▁situada",-13.383052825927734],["▁도착",-13.383054733276367],["▁այնպես",-13.383060455322266],["▁zenbait",-13.383065223693848],["▁לעבן",-13.383065223693848],["σκέ",-13.38306713104248],["▁башкаруу",-13.38306999206543],["suun",-13.383089065551758],["▁Μετα",-13.383097648620604],["▁kitchen",-13.383118629455566],["ທັນ",-13.383135795593262],["▁Tampere",-13.383138656616213],["▁sotme",-13.383150100708008],["പര",-13.383153915405272],["▁publikum",-13.38316249847412],["▁Luís",-13.383164405822754],["▁Chy",-13.383176803588867],["▁gäster",-13.383187294006348],["▁Sür",-13.383190155029297],["▁නීතිය",-13.383195877075195],["▁πάει",-13.383201599121094],["▁प्रे",-13.383255004882812],["▁кафедр",-13.383267402648926],["▁বছরের",-13.38327693939209],["▁فرع",-13.383281707763672],["▁relatif",-13.383283615112305],["ாம",-13.383293151855469],["▁Declara",-13.38330078125],["有一天",-13.383301734924316],["diyini",-13.383302688598633],["▁manger",-13.383309364318848],["ženja",-13.383341789245604],["▁Горе",-13.38335418701172],["▁sopi",-13.383358001708984],["▁پان",-13.383379936218262],["▁изглед",-13.383382797241213],["▁bytte",-13.383386611938477],["കൂട",-13.38338851928711],["▁სან",-13.383390426635742],["▁bebé",-13.383402824401855],["ულის",-13.383405685424805],["▁지난해",-13.38340663909912],["anada",-13.383427619934082],["▁Свято",-13.383450508117676],["▁Omdat",-13.383471488952637],["ΙΝ",-13.383480072021484],["▁ប៉",-13.383492469787598],["▁tender",-13.383501052856444],["spæn",-13.383511543273926],["lumina",-13.383567810058594],["▁офіційн",-13.383577346801758],["、『",-13.38358211517334],["કુલ",-13.383584022521973],["ದರ್",-13.383623123168944],["付款",-13.383639335632324],["SSI",-13.38364028930664],["▁Peki",-13.38364028930664],["▁7%",-13.38364601135254],["▁၏",-13.383647918701172],["FORM",-13.383659362792969],["mahan",-13.38366413116455],["ہو",-13.383679389953612],["thanda",-13.38368034362793],["▁നടക്ക",-13.383687019348145],["▁الرياض",-13.383706092834473],["▁nozīmē",-13.383708953857422],["▁Савета",-13.383711814880373],["▁planira",-13.383722305297852],["нском",-13.383726119995115],["▁1996.",-13.38373851776123],["▁Consulte",-13.383740425109863],["▁Zul",-13.383753776550291],["▁nomes",-13.383756637573242],["ड्ड",-13.383763313293455],["▁dake",-13.383764266967772],["▁centar",-13.38377571105957],["▁Paese",-13.38377857208252],["▁रक्षा",-13.38377857208252],["Έ",-13.383785247802734],["FD",-13.38379192352295],["ikot",-13.383818626403809],["▁danach",-13.383818626403809],["▁pintar",-13.383818626403809],["szczenie",-13.383835792541504],["▁сакта",-13.38383960723877],["▁Які",-13.383840560913086],["▁چکی",-13.383851051330566],["prosjekt",-13.383855819702148],["ққа",-13.383886337280272],["▁находи",-13.38389015197754],["▁mét",-13.383893013000488],["acijo",-13.38389778137207],["ইল",-13.383901596069336],["▁moving",-13.383920669555664],["纪念",-13.383923530578612],["အစ",-13.383949279785156],["▁correspondente",-13.383956909179688],["mida",-13.383974075317385],["▁ГА",-13.383978843688965],["▁оператив",-13.383990287780762],["▁منفی",-13.384008407592772],["▁केल्या",-13.38400936126709],["中华",-13.384021759033203],["২৭",-13.384038925170898],["などに",-13.384065628051758],["ચર",-13.384114265441896],["▁garam",-13.38412857055664],["▁인사",-13.38413143157959],["gänge",-13.384140014648438],["▁Ministro",-13.38414192199707],["▁ጥሪ",-13.384147644042969],["▁ವ್ಯ",-13.384173393249512],["▁бомба",-13.384181022644045],["သိမ္း",-13.384185791015623],["▁بالح",-13.38422393798828],["▁dobila",-13.384238243103027],["нях",-13.384279251098633],["icae",-13.384285926818848],["boleh",-13.38429069519043],["忘了",-13.384331703186035],["צרי",-13.384356498718262],["▁টা",-13.38436794281006],["යන්ගේ",-13.384370803833008],["▁تولد",-13.384382247924805],["▁myötä",-13.38438320159912],["▁Almaniya",-13.384384155273438],["▁1997.",-13.384397506713867],["▁Ceza",-13.384401321411133],["▁историја",-13.38440227508545],["▁εταιρεία",-13.384408950805664],["▁دورہ",-13.384408950805664],["skurs",-13.384410858154297],["▁allá",-13.38444709777832],["▁berda",-13.384449005126951],["ርድ",-13.384462356567385],["▁metrov",-13.384482383728027],["▁MOD",-13.384483337402344],["▁naredil",-13.384483337402344],["ाध्यक्ष",-13.384495735168455],["▁projeler",-13.384514808654783],["ستخدم",-13.38451862335205],["چاق",-13.384533882141112],["▁духа",-13.384535789489746],["▁Jeshi",-13.384546279907228],["ڪڙ",-13.384552001953123],["▁Visu",-13.384559631347656],["▁дому",-13.384573936462402],["▁alcanzar",-13.384577751159668],["▁hlut",-13.38458251953125],["ące",-13.384592056274414],["ביע",-13.384613990783691],["ινο",-13.384617805480955],["▁түсін",-13.38461971282959],["▁දුක",-13.384626388549805],["▁pengene",-13.384637832641602],["▁കുറച്ചു",-13.38464641571045],["有利于",-13.384647369384766],["賴",-13.384651184082031],["彻底",-13.38466453552246],["▁siedzi",-13.384671211242676],["siyasının",-13.384678840637209],["möglichkeiten",-13.38469123840332],["▁pridržana",-13.38469123840332],["▁Ιουλίου",-13.38469123840332],["▁συνθήκες",-13.38469123840332],["▁навистина",-13.38469123840332],["▁Փաշինյանը",-13.38469123840332],["▁اوباما",-13.38469123840332],["▁मोठ्या",-13.38469123840332],["▁এপ্রিল",-13.38469123840332],["▁প্রতিনিধি",-13.38469123840332],["កីឡាករ",-13.384692192077637],["▁Sándor",-13.384693145751951],["▁δύναμη",-13.384693145751951],["雜誌",-13.384693145751951],["▁inversión",-13.384703636169434],["▁აქვთ",-13.384708404541016],["▁Pérez",-13.384716033935549],["▁Aceh",-13.384716987609863],["演员",-13.384716987609863],["▁Grün",-13.38471794128418],["▁vysoké",-13.384718894958496],["▁माझे",-13.384719848632812],["▁արագ",-13.384720802307127],["展覽",-13.384720802307127],["entrepreneur",-13.384727478027344],["EVER",-13.38473129272461],["▁gwar",-13.38473415374756],["▁ارشاد",-13.38475227355957],["▁tekijä",-13.38477897644043],["▁פלא",-13.38477897644043],["▁ستاسو",-13.38477897644043],["ଯାଏ",-13.384785652160645],["▁Semana",-13.384787559509276],["▁hangulat",-13.384800910949709],["DRE",-13.384801864624023],["ittain",-13.384803771972656],["ţiune",-13.384808540344238],["▁datelor",-13.384814262390137],["▁ബ്ലോഗ്",-13.384824752807615],["onnement",-13.384831428527832],["▁üyeleri",-13.384832382202148],["▁SON",-13.384840965270996],["▁עשוי",-13.384844779968262],["്യൂട്ട",-13.384861946105955],["кірава",-13.384888648986816],["▁načel",-13.384888648986816],["▁рухани",-13.38489055633545],["▁sessió",-13.384896278381348],["מעלה",-13.38489818572998],["▁Visit",-13.384902000427246],["solut",-13.38491153717041],["stens",-13.38491153717041],["▁tract",-13.384912490844728],["pály",-13.38491439819336],["jeno",-13.384920120239258],["ണ്ടു",-13.384923934936523],["effetto",-13.384940147399902],["▁muscular",-13.385000228881836],["▁Saksa",-13.385001182556152],["यते",-13.38500690460205],["▁показ",-13.385016441345217],["زە",-13.38503074645996],["▁кең",-13.385034561157228],["定義",-13.385035514831545],["▁მნიშვნელოვანი",-13.385043144226074],["▁viven",-13.385046005249023],["PAC",-13.385076522827148],["▁가정",-13.385076522827148],["▁суди",-13.385086059570312],["▁ilikuwa",-13.385095596313477],["anlæg",-13.385109901428224],["▁편집",-13.38511562347412],["▁људима",-13.385137557983398],["▁јуни",-13.38514518737793],["۴۰",-13.38515281677246],["▁arbeidet",-13.38515567779541],["▁добија",-13.385177612304688],["кага",-13.38517951965332],["cego",-13.385193824768066],["대한",-13.38520336151123],["পন",-13.385207176208496],["▁هيون",-13.385207176208496],["ቀን",-13.385215759277344],["φυλακ",-13.385221481323242],["▁Napi",-13.385226249694824],["▁맥",-13.385244369506836],["▁Κατ",-13.385275840759276],["Tor",-13.385300636291504],["ఛ",-13.385334968566896],["၁၄",-13.385372161865234],["TIC",-13.385422706604004],["▁sellist",-13.3854341506958],["▁bangun",-13.385443687438965],["▁kullanımı",-13.38547706604004],["MBA",-13.38548755645752],["▁ương",-13.385494232177734],["maton",-13.38552188873291],["▁странах",-13.385583877563477],["júcich",-13.385589599609377],["▁prave",-13.385591506958008],["▁hảo",-13.385601997375488],["▁selve",-13.385618209838867],["ຄະນະ",-13.385622024536133],["বহ",-13.38562297821045],["teken",-13.385652542114258],["▁wykonane",-13.385666847229004],["日から",-13.385713577270508],["rāda",-13.385738372802734],["▁berikutnya",-13.385743141174316],["arázs",-13.385744094848633],["▁එළ",-13.385744094848633],["ԱՎ",-13.385765075683594],["▁رفض",-13.38576889038086],["▁european",-13.385781288146973],["▁maradt",-13.385786056518556],["▁telt",-13.385787963867188],["▁هستم",-13.385808944702148],["▁pisar",-13.385811805725098],["match",-13.38585376739502],["▁noter",-13.385859489440918],["▁الحالي",-13.38586139678955],["תפו",-13.385863304138184],["▁Passe",-13.38586711883545],["▁ເວລາ",-13.385891914367676],["สําเร็จ",-13.385894775390623],["▁sektori",-13.385912895202637],["ೀರಿ",-13.385940551757812],["▁analize",-13.385951042175291],["▁prachtig",-13.385972023010254],["▁මුල",-13.385980606079102],["▁Toni",-13.38599681854248],["dame",-13.386014938354492],["rating",-13.386022567749023],["▁colegi",-13.386032104492188],["リア",-13.38603401184082],["ซี่",-13.386041641235352],["真实",-13.386046409606934],["▁povis",-13.386052131652832],["אוו",-13.386061668395996],["ېتى",-13.386063575744627],["▁стандард",-13.386089324951172],["偶",-13.386090278625488],["Фор",-13.38609790802002],["▁osoa",-13.3861083984375],["▁germana",-13.38613224029541],["larım",-13.386137962341309],["다니",-13.386143684387209],["▁corporal",-13.386152267456056],["▁celou",-13.38616943359375],["ものは",-13.386191368103027],["▁rodi",-13.386195182800291],["▁Šal",-13.386197090148926],["ఫో",-13.386198043823242],["▁редакция",-13.386211395263672],["▁สร้าง",-13.38623332977295],["lapan",-13.386247634887695],["▁мэдэх",-13.386259078979492],["يشن",-13.386270523071287],["▁프리",-13.386273384094238],["现金",-13.386306762695312],["▁ന്",-13.386314392089844],["แลนด์",-13.386335372924805],["以外の",-13.38633632659912],["プレゼント",-13.38633918762207],["ସମ୍ପାଦନା",-13.38634967803955],["▁Conselho",-13.38634967803955],["▁území",-13.38634967803955],["▁будівель",-13.38634967803955],["▁تڏهن",-13.38634967803955],["▁କଟକ",-13.38634967803955],["▁ସ୍ୱାସ୍ଥ୍ୟ",-13.38634967803955],["▁రిలీజ్",-13.38634967803955],["▁የውጭ",-13.38634967803955],["▁ពលរដ្ឋ",-13.38634967803955],["▁Football",-13.38635540008545],["▁итоге",-13.386361122131348],["รีวิว",-13.386372566223145],["▁תוכן",-13.386374473571776],["▁شركات",-13.386374473571776],["▁сподели",-13.38637638092041],["▁나의",-13.386381149291992],["▁changed",-13.386382102966309],["▁사례",-13.386394500732422],["▁jízd",-13.38640022277832],["▁måndag",-13.386406898498535],["▁гори",-13.386406898498535],["▁impreuna",-13.386407852172852],["▁kerjasama",-13.386420249938965],["▁specjali",-13.386429786682127],["▁житті",-13.386435508728027],["rera",-13.38645362854004],["▁recicla",-13.386454582214355],["▁Arusha",-13.386455535888672],["▁polícia",-13.38646125793457],["mpuan",-13.38647747039795],["▁Jubil",-13.386483192443848],["▁technologii",-13.38648509979248],["贷",-13.38650131225586],["גט",-13.386510848999023],["▁revolution",-13.386514663696287],["ინგ",-13.386524200439451],["▁Úgy",-13.386527061462402],["▁मान्यता",-13.38653564453125],["▁chorob",-13.386557579040527],["财产",-13.386574745178224],["▁പ്രിയ",-13.386578559875488],["reif",-13.386589050292969],["▁நினைவு",-13.386592864990234],["aadka",-13.386602401733398],["▁rito",-13.386632919311523],["▁grind",-13.386636734008787],["▁messo",-13.386646270751951],["▁ഹെ",-13.3866548538208],["▁Italy",-13.386670112609863],["истер",-13.386677742004396],["▁ravni",-13.386679649353027],["ապատ",-13.386683464050291],["▁ക്ല",-13.386700630187988],["▁Ivo",-13.386720657348633],["جام",-13.38672161102295],["▁Ändra",-13.386731147766112],["的努力",-13.386778831481934],["▁नो",-13.38682746887207],["lanıyor",-13.386828422546388],["外面",-13.386837005615234],["ほしい",-13.386856079101562],["▁алтын",-13.386874198913574],["မလဲ",-13.386876106262209],["▁klabu",-13.386880874633787],["19)",-13.386893272399902],["▁reviews",-13.386913299560549],["▁atawa",-13.386916160583496],["തുട",-13.386947631835938],["ლში",-13.386960983276367],["proof",-13.387002944946287],["▁rich",-13.387003898620604],["▁Tänk",-13.387009620666504],["▁Volume",-13.387009620666504],["▁Explo",-13.387022018432615],["ლენ",-13.387029647827148],["▁Lego",-13.387057304382324],["▁eiendom",-13.387089729309082],["▁pepe",-13.387091636657717],["मिन",-13.387097358703612],["▁निर्देशक",-13.387102127075195],["ΕΥ",-13.387139320373535],["▁scandal",-13.387150764465332],["▁remarc",-13.387195587158203],["जल",-13.387206077575684],["വ്വ",-13.387224197387695],["▁ລາ",-13.38723087310791],["▁הנו",-13.387232780456545],["อําเภอ",-13.387240409851074],["වම",-13.387293815612791],["▁kontrola",-13.387309074401855],["ቀጥ",-13.387310981750488],["əncə",-13.387311935424805],["▁konkursu",-13.387313842773438],["▁helpful",-13.387338638305664],["▁sadržaja",-13.387346267700195],["ელა",-13.387347221374512],["زہ",-13.387351989746094],["ätä",-13.387359619140623],["ګل",-13.387372016906738],["ляв",-13.387392044067385],["จัง",-13.387408256530762],["▁ආවේ",-13.387449264526367],["ความปลอดภัย",-13.387468338012695],["▁வழக்கு",-13.387481689453123],["▁मिति",-13.38748836517334],["เวิร์ม",-13.3875150680542],["▁Arif",-13.387532234191896],["arbete",-13.387534141540527],["local",-13.38753890991211],["džia",-13.38754177093506],["▁gazeta",-13.387547492980955],["▁remind",-13.387585639953612],["馬上",-13.387591361999512],["rocz",-13.387617111206056],["Che",-13.38762092590332],["と考え",-13.387638092041016],["ИКА",-13.387645721435549],["णारे",-13.387650489807127],["▁işte",-13.387653350830078],["ලින්",-13.38766098022461],["▁ضبط",-13.387701988220217],["▁riik",-13.387702941894531],["शेष",-13.387703895568848],["▁incepe",-13.387770652770996],["▁болго",-13.387775421142578],["ակցություն",-13.387800216674805],["άρια",-13.38780689239502],["▁executa",-13.387818336486816],["外出",-13.387819290161133],["▁บทความ",-13.387822151184082],["edz",-13.38783073425293],["▁saabsan",-13.387843132019045],["的确",-13.387847900390623],["▁۲۵",-13.387860298156738],["आप",-13.38786506652832],["▁бетон",-13.38786792755127],["▁մահ",-13.387876510620115],["แปล",-13.387877464294434],["▁카드",-13.38788604736328],["▁ಹರಿ",-13.387892723083496],["▁примеру",-13.387896537780762],["▁ຕົວ",-13.38790225982666],["▁moyens",-13.387910842895508],["මෙන්",-13.387914657592772],["▁nili",-13.387935638427734],["▁pikir",-13.387937545776367],["▁2012)",-13.387948036193848],["▁враг",-13.387951850891112],["▁podrán",-13.387977600097656],["dução",-13.387994766235352],["▁formy",-13.3880033493042],["▁новия",-13.388007164001465],["▁Fenerbahçe",-13.38801097869873],["▁kịp",-13.38801097869873],["▁müsahibə",-13.38801097869873],["▁байдлыг",-13.38801097869873],["▁жергиликтүү",-13.38801097869873],["▁несмотря",-13.38801097869873],["▁විවාහ",-13.38801097869873],["▁በማድረግ",-13.38801097869873],["흡",-13.38801097869873],["▁análisis",-13.388011932373049],["▁вновь",-13.388011932373049],["▁crescimento",-13.388017654418944],["[1].",-13.388019561767578],["▁Согласно",-13.388023376464844],["▁آفتاب",-13.38802719116211],["ခဲ႔",-13.388028144836426],["▁нэгдсэн",-13.388028144836426],["▁Świat",-13.388032913208008],["▁ئۇيغۇرچە",-13.388032913208008],["▁αναφέρει",-13.38804054260254],["▁дотоод",-13.38804054260254],["▁άλλους",-13.388047218322754],["▁арасындағы",-13.388050079345703],["▁Statistik",-13.388052940368652],["▁ಹೋದ",-13.388055801391602],["▁permettant",-13.388057708740234],["▁garantire",-13.3880615234375],["▁utazás",-13.388076782226562],["พระองค์",-13.388092041015623],["ရှင်း",-13.388096809387209],["▁ifanc",-13.388103485107422],["ноос",-13.388118743896484],["▁povsem",-13.388120651245115],["▁omul",-13.38813304901123],["▁Palau",-13.388138771057127],["ഗോ",-13.388148307800291],["▁pornstar",-13.388160705566406],["232",-13.38816261291504],["▁indenfor",-13.388163566589355],["ပိ",-13.388176918029783],["▁الحزب",-13.38818073272705],["ījās",-13.388197898864746],["▁esperti",-13.388206481933594],["▁trebalo",-13.38821029663086],["手中",-13.388214111328123],["וועל",-13.388216972351074],["▁القلب",-13.388218879699709],["▁లైన్",-13.388233184814451],["▁Camiño",-13.3882417678833],["▁Alltag",-13.388243675231934],["rigo",-13.388245582580566],["▁смотреть",-13.388249397277832],["▁шан",-13.388251304626465],["▁محافظة",-13.38825225830078],["▁wysokiej",-13.388267517089844],["▁нешта",-13.388275146484377],["▁കണ്ടെത്ത",-13.38828182220459],["▁psy",-13.388298034667969],["▁Мария",-13.3883056640625],["▁ngược",-13.388306617736816],["csé",-13.388315200805664],["▁hyvän",-13.38831901550293],["сман",-13.388324737548828],["▁dessert",-13.38832664489746],["قصر",-13.38833236694336],["visible",-13.388334274291992],["بخت",-13.388339042663574],["ಹ್",-13.388372421264648],["ņemot",-13.388373374938965],["GK",-13.38839054107666],["▁опо",-13.388394355773926],["▁wysoki",-13.388397216796877],["almi",-13.388398170471191],["▁بهذه",-13.388399124145508],["领导人",-13.388400077819824],["▁Рай",-13.388401985168455],["▁kanye",-13.388415336608888],["▁vaati",-13.388416290283203],["त्रि",-13.38841724395752],["▁баня",-13.388421058654783],["▁взе",-13.388426780700684],["的路",-13.388429641723633],["▁lauda",-13.388453483581545],["▁rastî",-13.38848114013672],["▁Singapura",-13.3884859085083],["ялі",-13.3885498046875],["的發展",-13.388565063476562],["▁tänu",-13.388568878173828],["▁သီ",-13.388572692871094],["▁Alban",-13.388599395751951],["အစား",-13.388601303100586],["ðist",-13.388611793518066],["▁корм",-13.388614654541016],["नामा",-13.38862133026123],["ंडर",-13.38864040374756],["▁Gad",-13.388646125793455],["ගොඩ",-13.388657569885254],["▁обмін",-13.38867473602295],["▁niyo",-13.388678550720217],["▁Johnny",-13.38869285583496],["▁peine",-13.388694763183594],["▁оларды",-13.388697624206545],["▁processus",-13.388713836669922],["saam",-13.388723373413086],["▁једне",-13.388724327087402],["要注意",-13.388733863830566],["industri",-13.388771057128906],["▁войск",-13.388803482055664],["▁ଅର୍ଥ",-13.38881492614746],["దని",-13.388836860656738],["▁futura",-13.388854026794434],["▁μουσική",-13.388861656188965],["txu",-13.388874053955078],["いただきました",-13.388888359069824],["▁Geri",-13.38889217376709],["▁Facebooku",-13.388903617858888],["▁Existe",-13.388904571533203],["▁terbang",-13.388922691345217],["▁cock",-13.38899040222168],["▁ضروري",-13.389007568359377],["zán",-13.38901138305664],["▁інтерес",-13.38901424407959],["▁lønn",-13.389032363891602],["นว",-13.389058113098145],["insu",-13.389060974121094],["▁Sogn",-13.389062881469728],["▁Lex",-13.38909149169922],["ROD",-13.389092445373535],["పతి",-13.389127731323242],["撮",-13.389140129089355],["▁initiative",-13.389142990112305],["▁Raf",-13.38914394378662],["▁trzyma",-13.389152526855469],["▁INI",-13.389171600341797],["poka",-13.38917350769043],["работа",-13.389190673828123],["▁müəssisə",-13.389220237731934],["▁ചരിത്ര",-13.389222145080566],["гчид",-13.38925552368164],["bound",-13.389281272888184],["kļa",-13.389288902282717],["▁γενικ",-13.38930320739746],["սը",-13.389318466186523],["ર્વ",-13.389328002929688],["mışdı",-13.389361381530762],["▁ഹൃദയ",-13.389361381530762],["▁क्ष",-13.389362335205078],["▁गुरुङ",-13.389363288879396],["ረን",-13.389376640319824],["חנה",-13.38938331604004],["κας",-13.389387130737305],["▁dotyk",-13.389427185058594],["太大",-13.389437675476074],["▁geneem",-13.389470100402832],["മൂല",-13.389472007751465],["ண்டா",-13.38949203491211],["слава",-13.389496803283691],["▁medico",-13.389501571655272],["కాలు",-13.389519691467283],["▁Hopp",-13.389554977416992],["▁સેવા",-13.38956069946289],["▁samun",-13.389570236206056],["▁sóng",-13.389571189880373],["▁complexe",-13.389575958251951],["的城市",-13.389583587646484],["ansvar",-13.3895845413208],["రూప",-13.3895845413208],["▁эмч",-13.389613151550291],["积累",-13.389616012573242],["吓",-13.38962745666504],["頼",-13.389639854431152],["бло",-13.389642715454102],["▁సూ",-13.389642715454102],["ສຸກ",-13.389650344848633],["▁ticket",-13.389650344848633],["芸",-13.389654159545898],["កូន",-13.389657974243164],["参观",-13.389660835266112],["สมุนไพร",-13.389674186706545],["ሃይ",-13.389674186706545],["▁Araştırma",-13.389674186706545],["▁ہدایت",-13.389674186706545],["▁सम्भावना",-13.389674186706545],["Ụ",-13.38967514038086],["▁indústria",-13.38967514038086],["▁memberitahu",-13.38967514038086],["▁nepieciešama",-13.38967514038086],["▁nümayiş",-13.38967514038086],["▁asos",-13.389676094055176],["▁ځواب",-13.389676094055176],["▁дополнительно",-13.389678001403809],["หลวงพ่อ",-13.38967990875244],["▁නිකුත්",-13.389692306518556],["▁riso",-13.389695167541504],["▁ашиглах",-13.38969612121582],["▁الإمام",-13.38970184326172],["യെന്ന",-13.389705657958984],["▁تکرار",-13.389705657958984],["▁പുതു",-13.38970947265625],["▁ඡන්ද",-13.38970947265625],["▁10:30",-13.389725685119627],["▁uomo",-13.389727592468262],["▁दोस्तों",-13.389734268188477],["▁aprende",-13.389758110046388],["▁easier",-13.38977336883545],["▁Kemi",-13.389777183532717],["▁മുൻ",-13.389792442321776],["となっています",-13.389796257019045],["刑事",-13.389800071716309],["教えて",-13.38980197906494],["▁heavy",-13.389812469482422],["▁plošč",-13.38981819152832],["▁korupci",-13.389820098876951],["treffen",-13.389836311340332],["uscita",-13.389845848083496],["▁rămâne",-13.38985824584961],["▁تحر",-13.38987922668457],["нета",-13.389880180358888],["ρική",-13.389891624450684],["ొక్క",-13.389892578125],["moč",-13.38989543914795],["ക്കല്",-13.389898300170898],["▁maine",-13.389912605285645],["▁ያላቸው",-13.389927864074709],["טרא",-13.389969825744627],["▁ዲ",-13.389995574951172],["ଜଣ",-13.39000129699707],["▁техните",-13.390008926391602],["▁Russland",-13.390013694763184],["▁перес",-13.39002513885498],["这两",-13.39002513885498],["▁següent",-13.390096664428713],["▁тод",-13.390103340148926],["NICA",-13.39010524749756],["سرا",-13.390110969543455],["▁основи",-13.39011573791504],["▁избора",-13.39013671875],["▁којем",-13.39015769958496],["▁priateľ",-13.390165328979492],["▁Bahar",-13.39016819000244],["▁junho",-13.390192031860352],["▁Burger",-13.390195846557615],["▁กระ",-13.39019775390625],["▁පොදු",-13.390198707580566],["หล่อ",-13.390213966369627],["系统的",-13.39022159576416],["露出",-13.390233993530272],["▁Ара",-13.390240669250488],["रमा",-13.390246391296388],["▁included",-13.390254974365234],["ဖက်",-13.39025592803955],["▁ወጥ",-13.390268325805664],["▁նախաձեռն",-13.390270233154297],["ктуу",-13.390295028686523],["▁təlim",-13.390302658081056],["ပို့",-13.390324592590332],["▁ככל",-13.390341758728027],["▁өндіріс",-13.39034938812256],["ಪರ್",-13.390360832214355],["▁Aufgabe",-13.390374183654783],["新年",-13.390387535095217],["▁серия",-13.39041519165039],["▁frum",-13.390419960021973],["▁طلاق",-13.390437126159668],["▁klikk",-13.390445709228516],["(4)",-13.39044952392578],["▁분들",-13.390498161315918],["▁diagnose",-13.390520095825195],["▁Gök",-13.39055061340332],["kilpailu",-13.39055347442627],["▁strategia",-13.39055347442627],["yczna",-13.390563011169434],["▁cijena",-13.390584945678713],["shad",-13.390591621398926],["vaart",-13.390602111816406],["▁univerz",-13.390619277954102],["ելը",-13.390642166137695],["职责",-13.390642166137695],["▁بله",-13.39068603515625],["越来越多",-13.390690803527832],["▁OCH",-13.390692710876465],["▁kuzi",-13.390701293945312],["▁MF",-13.39072608947754],["▁জুন",-13.39073657989502],["аючы",-13.390745162963867],["ZIONE",-13.390771865844728],["▁చివరి",-13.390795707702637],["▁हल्",-13.390802383422852],["▁fjell",-13.390825271606444],["▁หนังโป๊",-13.390829086303713],["▁ඉගෙන",-13.39084243774414],["▁Samen",-13.390851974487305],["▁diep",-13.390860557556152],["▁omega",-13.39089012145996],["пити",-13.390899658203123],["ql",-13.390912055969238],["യിലാണ്",-13.3909273147583],["▁consuma",-13.390936851501465],["▁ελληνικό",-13.390939712524414],["▁Кир",-13.390939712524414],["ブラ",-13.390969276428224],["शिवाय",-13.390993118286133],["▁Verbind",-13.390995025634766],["▁ahead",-13.391002655029297],["bieg",-13.39100456237793],["▁strop",-13.391018867492676],["▁kuruluş",-13.391019821166992],["▁grupės",-13.391033172607422],["ものの",-13.391051292419434],["▁Bhai",-13.391079902648926],["▁Tuli",-13.39112949371338],["780",-13.391133308410645],["▁LOS",-13.391146659851074],["ಟಿಂಗ್",-13.391151428222656],["Sie",-13.391172409057615],["unkat",-13.391179084777832],["ማዕ",-13.391190528869627],["▁řízení",-13.391191482543944],["1945",-13.39119815826416],["▁noapte",-13.391221046447754],["analisi",-13.391228675842283],["心得",-13.391247749328612],["▁దీని",-13.39125156402588],["инка",-13.391257286071776],["二是",-13.391268730163574],["▁strie",-13.391271591186523],["分布",-13.391300201416016],["攻擊",-13.391303062438965],["評估",-13.39130687713623],["▁dedicada",-13.391307830810549],["法规",-13.39130973815918],["μαθ",-13.391317367553713],["仇",-13.391318321228027],["忌",-13.391318321228027],["▁pastel",-13.391331672668455],["課題",-13.39133358001709],["▁떨어",-13.391337394714355],["ท้องถิ่น",-13.39134120941162],["▁Diabetes",-13.39134120941162],["▁Truyền",-13.39134120941162],["▁felnőtt",-13.39134120941162],["▁pieniądze",-13.39134120941162],["▁Ысык",-13.39134120941162],["▁יצחק",-13.39134120941162],["▁आइतबार",-13.39134120941162],["▁اطراف",-13.391342163085938],["▁verwenden",-13.391343116760254],["▁කිසිදු",-13.391347885131836],["ภู",-13.391351699829102],["▁detalhes",-13.391352653503418],["▁어디",-13.391358375549316],["utha",-13.391363143920898],["瑞士",-13.391364097595217],["ገኘው",-13.391366004943848],["▁除了",-13.391369819641112],["kabidhi",-13.391392707824709],["▁ష",-13.391395568847656],["tunni",-13.391396522521973],["▁pariatur",-13.391400337219238],["▁цікава",-13.391400337219238],["▁sedmi",-13.391403198242188],["ītu",-13.39141845703125],["odaw",-13.391443252563477],["▁sira",-13.391453742980955],["▁betegség",-13.391465187072754],["▁रद्द",-13.391483306884766],["▁ښک",-13.391494750976562],["▁dày",-13.39149570465088],["▁piloto",-13.391505241394045],["▁Tham",-13.391523361206056],["様に",-13.39155387878418],["▁transmite",-13.39159870147705],["▁বিভাগের",-13.391606330871582],["▁المادة",-13.39161777496338],["chine",-13.39163303375244],["▁wensen",-13.39166259765625],["太阳",-13.391674995422363],["හ්",-13.391683578491213],["给大家",-13.391733169555664],["▁Nelson",-13.39173984527588],["▁datumoj",-13.391743659973145],["መል",-13.391753196716309],["лалын",-13.391761779785156],["▁kodus",-13.391790390014648],["▁yrkes",-13.391793251037598],["日常生活",-13.39179801940918],["▁підпис",-13.391798973083496],["▁мозг",-13.39181423187256],["▁پیامبر",-13.391823768615724],["نهج",-13.391833305358888],["▁Very",-13.391833305358888],["▁هیات",-13.391841888427734],["▁الزوج",-13.391849517822266],["▁विधान",-13.391849517822266],["guan",-13.391860961914062],["hukum",-13.391892433166504],["通道",-13.39189910888672],["▁riittä",-13.391913414001465],["hā",-13.39191436767578],["▁indre",-13.391929626464844],["▁Центральн",-13.391942024230955],["▁artística",-13.391944885253906],["▁slett",-13.391947746276855],["ustatud",-13.39195442199707],["▁хранене",-13.391966819763184],["ીની",-13.39197063446045],["▁terv",-13.391975402832031],["បង្ក",-13.391982078552246],["▁naturales",-13.391984939575195],["▁सुना",-13.391997337341309],["被人",-13.39199924468994],["▁bintang",-13.392001152038574],["▁passant",-13.39201831817627],["▁ئۇلار",-13.3920259475708],["ങ്ങളാണ്",-13.39206600189209],["▁시간이",-13.39206600189209],["▁משפחה",-13.39207649230957],["▁selja",-13.392078399658203],["▁قرن",-13.392111778259276],["▁თვის",-13.392120361328123],["скор",-13.39212417602539],["▁чад",-13.39213752746582],["við",-13.392145156860352],["▁doku",-13.39215850830078],["ohja",-13.392159461975098],["čené",-13.39216136932373],["▁разходи",-13.39216136932373],["▁կատար",-13.39216136932373],["▁viską",-13.392166137695312],["▁Battle",-13.392170906066896],["▁campi",-13.39217472076416],["գան",-13.392189979553224],["▁מהמ",-13.392226219177246],["▁12%",-13.39222812652588],["▁Shqiptar",-13.392237663269045],["▁esat",-13.39224338531494],["▁Erde",-13.392276763916016],["ប្រាក់",-13.392289161682127],["▁Beng",-13.392305374145508],["▁قائمة",-13.392306327819824],["ନାହିଁ",-13.392330169677734],["▁АК",-13.392333984375],["▁pakket",-13.392339706420898],["▁lauku",-13.392345428466797],["▁layar",-13.392358779907228],["ards",-13.392376899719238],["▁Imper",-13.392376899719238],["kalt",-13.392412185668944],["▁හැක",-13.392436981201172],["▁أرض",-13.392437934875488],["גם",-13.39243984222412],["המ",-13.3924560546875],["▁سروس",-13.392463684082031],["をかけ",-13.392468452453612],["▁monar",-13.39248752593994],["race",-13.392513275146484],["laust",-13.392518997192385],["ៀប",-13.392520904541016],["ŽE",-13.392536163330078],["▁prvih",-13.392547607421877],["▁rezo",-13.392550468444824],["▁빈",-13.392555236816406],["▁계약",-13.392559051513672],["ที่สามารถ",-13.39256191253662],["ණ්",-13.39256763458252],["▁시험",-13.392601013183594],["έζ",-13.39260959625244],["จง",-13.392611503601074],["▁1.9",-13.392620086669922],["layotgan",-13.39262866973877],["▁ålder",-13.392632484436035],["▁pièces",-13.392648696899414],["uthu",-13.392657279968262],["▁과학",-13.39268398284912],["▁מיוחד",-13.392720222473145],["гуля",-13.392733573913574],["▁Position",-13.392741203308104],["జన్",-13.392742156982422],["▁trase",-13.392762184143066],["下一",-13.3927640914917],["▁ქვეყანა",-13.392769813537598],["green",-13.392780303955078],["▁конкур",-13.39278793334961],["로는",-13.392789840698242],["จอ",-13.392807960510254],["жэ",-13.392814636230469],["קח",-13.39283847808838],["ിട്ടുള്ള",-13.392847061157228],["▁organisasjon",-13.392857551574709],["▁ஜெ",-13.392867088317873],["▁Clara",-13.392871856689451],["▁Кры",-13.3928804397583],["க்குள்",-13.392884254455566],["▁Мит",-13.392884254455566],["धर्म",-13.392898559570312],["▁хяналт",-13.392914772033691],["▁ഭയ",-13.392914772033691],["▁ಅವನ",-13.392927169799805],["られています",-13.392940521240234],["▁Väli",-13.392955780029297],["▁жүйесін",-13.392956733703612],["▁omista",-13.39296054840088],["▁Okt",-13.39296531677246],["▁Andy",-13.392970085144045],["▁ئاپ",-13.392991065979004],["FAN",-13.39299488067627],["範囲",-13.39299774169922],["墊",-13.392998695373535],["▁shka",-13.392999649047852],["腺",-13.393001556396484],["详情",-13.393006324768066],["▁बेहतर",-13.393007278442385],["▁ಉಚಿತ",-13.393010139465332],["▁piękne",-13.393011093139648],["▁саобраћај",-13.393011093139648],["▁Նախագահ",-13.393011093139648],["▁صحافی",-13.393011093139648],["▁અનુભવ",-13.393011093139648],["лям",-13.393012046813965],["▁melainkan",-13.393012046813965],["▁Mūsu",-13.39301300048828],["▁Груевски",-13.393014907836914],["▁uppdatera",-13.393017768859863],["▁höchste",-13.393020629882812],["▁सम्पूर्ण",-13.393020629882812],["▁mengine",-13.393025398254396],["▁Prihlásiť",-13.393033027648926],["▁दिनुहोस्",-13.39303493499756],["▁özgü",-13.39304256439209],["▁केंद्रीय",-13.39304256439209],["▁الفت",-13.39304542541504],["▁उद्देश्य",-13.39305591583252],["▁Enkel",-13.393061637878418],["▁وکیل",-13.393064498901367],["▁genommen",-13.393073081970217],["▁होटल",-13.393080711364746],["▁есте",-13.393083572387695],["▁ਪਹੁੰਚ",-13.393083572387695],["▁bikar",-13.39311981201172],["▁показник",-13.393126487731934],["▁osobowych",-13.393131256103516],["▁mitten",-13.393144607543944],["сите",-13.393157958984377],["▁Koncert",-13.393162727355955],["▁Auftrag",-13.393178939819336],["▁НК",-13.3931884765625],["သတ်",-13.393190383911133],["iero",-13.393235206604004],["bate",-13.393244743347168],["▁Ndi",-13.393260955810549],["▁укреп",-13.393270492553713],["ன்ஸ்",-13.393280029296877],["▁hewan",-13.39329433441162],["▁שמח",-13.393298149108888],["▁действа",-13.393335342407228],["salon",-13.39335823059082],["чния",-13.393399238586426],["▁פול",-13.39340877532959],["▁Baden",-13.393418312072754],["▁dick",-13.393424034118652],["▁изврши",-13.393446922302246],["chtung",-13.393485069274902],["▁הערב",-13.393489837646484],["代码",-13.393498420715332],["stî",-13.39350128173828],["▁unije",-13.393502235412598],["▁รักษา",-13.393534660339355],["▁арыз",-13.393592834472656],["を作って",-13.393596649169922],["точна",-13.393600463867188],["▁bulat",-13.393604278564451],["લખ",-13.393614768981934],["სვ",-13.39362907409668],["づ",-13.393635749816896],["▁მოწ",-13.393648147583008],["多年的",-13.393661499023438],["▁minua",-13.393662452697754],["▁प्रकाशन",-13.393667221069336],["▁tarixli",-13.393670082092283],["▁טבע",-13.393671989440918],["▁politieke",-13.39367389678955],["▁имала",-13.393683433532717],["szék",-13.393685340881348],["▁גבי",-13.393694877624512],["▁flora",-13.393726348876951],["ՅԱՆ",-13.393729209899902],["۹۶",-13.393760681152344],["▁fry",-13.393796920776367],["▁Betriebs",-13.393817901611328],["ляць",-13.393835067749023],["▁odigra",-13.393840789794922],["9,00",-13.39384937286377],["▁اسیا",-13.393858909606934],["інді",-13.3938627243042],["▁croc",-13.393863677978516],["parent",-13.39387321472168],["ndab",-13.393877983093262],["▁કલાક",-13.393891334533691],["కొండ",-13.393895149230955],["ابر",-13.393906593322754],["▁nood",-13.39390754699707],["▁tavaly",-13.393918991088867],["ημένο",-13.393927574157717],["Българ",-13.393945693969728],["▁choroby",-13.393950462341309],["ම්ම",-13.393959999084473],["▁өзін",-13.393994331359863],["但也",-13.394007682800291],["೭",-13.394009590148926],["درج",-13.394023895263672],["▁трап",-13.394028663635254],["にした",-13.394048690795898],["ىدى",-13.394052505493164],["ഗണ",-13.394052505493164],["▁moyenne",-13.39405632019043],["ٽو",-13.39406394958496],["ကနေ",-13.394076347351074],["▁డ్ర",-13.394097328186035],["្វ",-13.394134521484377],["ស្រុក",-13.39413833618164],["▁стоки",-13.394142150878906],["▁portalu",-13.394165992736816],["ଯିବ",-13.394193649291992],["▁போல்",-13.394201278686523],["প্ত",-13.394210815429688],["▁UTC",-13.394225120544434],["veilig",-13.39423656463623],["▁taler",-13.39423942565918],["acheter",-13.394272804260254],["▁הלב",-13.394278526306152],["ывая",-13.394298553466797],["▁lequel",-13.3943452835083],["▁ਬੋਲ",-13.39437484741211],["ตั",-13.39438533782959],["▁Siku",-13.394386291503906],["ることが",-13.39438819885254],["▁Noch",-13.39440631866455],["▁Lumi",-13.394410133361816],["کین",-13.394417762756348],["ที่มีความ",-13.39441967010498],["χου",-13.394426345825195],["▁leiden",-13.394428253173828],["▁Чех",-13.39443588256836],["تەك",-13.394465446472168],["▁رق",-13.394488334655762],["loch",-13.39449405670166],["▁эффективно",-13.39449977874756],["▁Zy",-13.394511222839355],["тивен",-13.394529342651367],["დგომ",-13.39453125],["▁dokumenter",-13.39453411102295],["ကဗ်ာ",-13.394542694091797],["abri",-13.394546508789062],["▁exclusiva",-13.39454746246338],["▁പിടിച്ച",-13.394550323486328],["ानी",-13.394551277160645],["▁værelser",-13.394553184509276],["urb",-13.39456844329834],["ядна",-13.394570350646973],["ניס",-13.39457893371582],["▁хооронд",-13.394588470458984],["ayu",-13.394594192504885],["▁ଜନ",-13.394620895385742],["▁alti",-13.39464282989502],["▁cansa",-13.394676208496094],["แดด",-13.394681930541992],["ແຈ້ງ",-13.394681930541992],["អគ្គ",-13.394681930541992],["▁Herausforderung",-13.394682884216309],["▁попа",-13.394682884216309],["▁टिप्पणी",-13.394682884216309],["▁Aujourd",-13.394683837890623],["▁hneď",-13.394683837890623],["▁ओळख",-13.394683837890623],["▁शुरुआत",-13.394683837890623],["▁kuanzia",-13.394685745239258],["▁եկել",-13.39468765258789],["▁rahoitus",-13.394692420959473],["▁خطرناک",-13.394694328308104],["▁увидеть",-13.394695281982422],["▁విధంగా",-13.39470100402832],["ΘΕ",-13.394707679748535],["▁санитар",-13.3947114944458],["materjal",-13.39472484588623],["▁سیاسي",-13.394725799560549],["ировка",-13.394753456115724],["▁representativ",-13.39475917816162],["▁landi",-13.394760131835938],["考試",-13.394770622253418],["▁Кыз",-13.394771575927734],["ดีๆ",-13.394787788391112],["調べ",-13.394790649414062],["▁philosophi",-13.394807815551758],["ສຸດ",-13.39481258392334],["▁anbefaler",-13.394820213317873],["▁مجاز",-13.394826889038086],["▁不過",-13.394843101501465],["िक्स",-13.394850730895996],["телство",-13.394878387451172],["ologische",-13.3948974609375],["▁математика",-13.394899368286133],["▁podcast",-13.39490032196045],["දේ",-13.394914627075195],["vára",-13.39492416381836],["▁olymp",-13.394943237304688],["▁шт",-13.394943237304688],["▁imaxes",-13.394946098327637],["cél",-13.39494800567627],["▁հոդվածի",-13.394948959350586],["рате",-13.394951820373535],["▁obsta",-13.394967079162598],["▁lagen",-13.394980430603027],["▁ծառ",-13.39498519897461],["▁metalli",-13.394989013671877],["▁pasada",-13.394990921020508],["ાંત",-13.395015716552734],["▁scoprire",-13.395015716552734],["▁кімнат",-13.395018577575684],["iņi",-13.395020484924316],["▁yalan",-13.395021438598633],["되며",-13.395025253295898],["beszéd",-13.39503002166748],["▁magia",-13.39503574371338],["▁satellit",-13.395045280456545],["▁دهیم",-13.395049095153809],["▁Gipuzkoako",-13.395059585571287],["▁Klu",-13.395059585571287],["▁رساند",-13.395082473754885],["kelia",-13.39509391784668],["hivatal",-13.395094871520996],["▁बैठ",-13.395100593566896],["ייס",-13.395101547241213],["కుల",-13.395122528076172],["cionit",-13.395130157470703],["▁Богдан",-13.395130157470703],["▁взяли",-13.395130157470703],["▁meraih",-13.395150184631348],["▁souci",-13.395150184631348],["▁критик",-13.395167350769045],["▁romance",-13.395176887512209],["▁nepro",-13.395193099975586],["▁القا",-13.395215034484863],["ANE",-13.395235061645508],["▁approfondi",-13.395255088806152],["gestuur",-13.39526081085205],["▁đê",-13.39528751373291],["▁स्वत",-13.395293235778809],["ไพ",-13.395310401916504],["▁legume",-13.39536190032959],["▁Pár",-13.395366668701172],["▁magkano",-13.39537239074707],["▁तुला",-13.395387649536133],["▁जिल्लामा",-13.39538860321045],["▁Yamaha",-13.395393371582031],["今天的",-13.395403861999512],["▁existent",-13.395407676696776],["▁alim",-13.39541244506836],["கின்ற",-13.39541721343994],["▁differenti",-13.395421028137209],["などが",-13.39543914794922],["▁Estudi",-13.395451545715332],["▁destinado",-13.395483016967772],["▁rejt",-13.395484924316406],["ឥ",-13.395490646362305],["▁strategic",-13.39550495147705],["▁nuen",-13.395557403564451],["▁elf",-13.39556884765625],["ાઓ",-13.395570755004885],["rund",-13.395586013793944],["hila",-13.39559268951416],["▁отдыха",-13.39559268951416],["pale",-13.395604133605955],["iewe",-13.395607948303224],["▁svare",-13.395623207092283],["ples",-13.395657539367676],["acağınız",-13.395660400390623],["▁devine",-13.39569091796875],["stavite",-13.395713806152344],["ರಾಮ",-13.395721435546877],["▁låta",-13.395727157592772],["▁தொழில்",-13.395730018615724],["Europe",-13.395742416381836],["▁enfermedad",-13.395755767822266],["hydrat",-13.395757675170898],["tempor",-13.395774841308594],["▁Alexandr",-13.395777702331545],["▁besede",-13.39577865600586],["štva",-13.395781517028809],["▁목적",-13.395788192749023],["▁meiri",-13.395827293395996],["▁природе",-13.395835876464844],["唯",-13.395845413208008],["▁słucha",-13.395861625671388],["ერო",-13.395865440368652],["▁captura",-13.395909309387209],["λογο",-13.395913124084473],["▁experiencias",-13.395930290222168],["éirí",-13.3959379196167],["▁tuyên",-13.395950317382812],["▁Dolor",-13.395962715148926],["ejä",-13.395974159240724],["▁၂၀၁၈",-13.395986557006836],["▁цэнтр",-13.396036148071287],["ларда",-13.39605712890625],["▁Irland",-13.396063804626465],["terem",-13.396071434020996],["XII",-13.396074295043944],["tuksessa",-13.396078109741213],["▁алыс",-13.396079063415527],["stander",-13.396116256713867],["▁barva",-13.396116256713867],["▁البن",-13.396125793457031],["▁shared",-13.39613437652588],["▁oblik",-13.39613914489746],["אָרט",-13.39614200592041],["▁имаш",-13.396143913269045],["▁நகர",-13.396150588989258],["▁قامت",-13.396153450012209],["▁프",-13.396154403686523],["esnis",-13.396183013916016],["▁Хятад",-13.396188735961914],["fogás",-13.396202087402344],["stoffe",-13.396203994750977],["▁небо",-13.39620876312256],["ttinen",-13.396217346191406],["samfunn",-13.396225929260254],["▁yılın",-13.396251678466797],["▁कन्",-13.396288871765137],["▁લઈ",-13.396324157714844],["▁технологий",-13.39632511138916],["▁želja",-13.396326065063477],["diť",-13.396332740783691],["▁drago",-13.396339416503906],["▁новина",-13.396339416503906],["▁հարցերի",-13.396342277526855],["舞蹈",-13.396342277526855],["kål",-13.396350860595703],["▁MALAYSIA",-13.396358489990234],["▁egyedül",-13.396358489990234],["▁kimataifa",-13.396358489990234],["▁zmanjša",-13.396358489990234],["▁գտնվում",-13.396358489990234],["▁խաղաղ",-13.396358489990234],["▁پشتیبانی",-13.396358489990234],["▁हमेशा",-13.396358489990234],["▁업데이트",-13.396358489990234],["ប៉",-13.39635944366455],["▁keamanan",-13.396361351013184],["▁sərgi",-13.396361351013184],["▁Косову",-13.396364212036133],["周辺",-13.396364212036133],["▁атайын",-13.39636516571045],["▁priprave",-13.396366119384766],["▁zajęcia",-13.396367073059082],["ntul",-13.396368980407717],["▁tráfico",-13.396371841430664],["▁інтэр",-13.39637279510498],["▁hauska",-13.396374702453612],["▁يوسف",-13.39637565612793],["▁મોટી",-13.396376609802246],["▁जिससे",-13.396382331848145],["ช้าง",-13.39638614654541],["▁الشمس",-13.396387100219728],["▁conocido",-13.396389961242676],["▁ئوي",-13.396391868591309],["▁numquam",-13.39639663696289],["▁அதற்கு",-13.39639663696289],["र्प",-13.396401405334473],["INTA",-13.39640998840332],["▁волосся",-13.396413803100586],["yczny",-13.396416664123535],["事务",-13.396422386169434],["▁hygien",-13.396443367004396],["▁ill",-13.396443367004396],["▁Код",-13.396448135375977],["▁Թե",-13.396454811096191],["▁wymiar",-13.39645767211914],["▁المقدس",-13.396466255187988],["នូវ",-13.39647102355957],["▁ماں",-13.396476745605469],["▁महान",-13.39649772644043],["▁putu",-13.396519660949709],["▁ostrov",-13.3965425491333],["▁Maksu",-13.396551132202148],["▁robí",-13.39655590057373],["▁Soul",-13.396556854248049],["▁הדברים",-13.39657211303711],["▁Lyon",-13.396573066711426],["▁نوجوانوں",-13.396596908569336],["ören",-13.396598815917969],["дження",-13.396598815917969],["አምስት",-13.396601676940918],["▁Νε",-13.396608352661133],["▁nemôže",-13.396611213684082],["▁буга",-13.396615028381348],["မယ္။",-13.396618843078612],["րը",-13.396632194519045],["CAN",-13.396638870239258],["▁restaurantes",-13.396666526794434],["▁natal",-13.396678924560549],["ТЬ",-13.396697998046877],["Ț",-13.39670181274414],["▁જાત",-13.396743774414062],["▁Sezon",-13.39674472808838],["▁아시아",-13.396747589111328],["证据",-13.396758079528809],["▁Boer",-13.396769523620604],["vjeçare",-13.39680290222168],["lecció",-13.396821022033691],["▁zapraszamy",-13.396836280822754],["▁회장",-13.396842956542969],["▁amani",-13.39686107635498],["▁funny",-13.39686393737793],["▁Låt",-13.396870613098145],["▁socialist",-13.396875381469728],["▁activo",-13.396881103515623],["lógica",-13.396883010864258],["▁කියනවා",-13.396925926208496],["ਭਾਵ",-13.396939277648926],["▁жылдары",-13.396942138671877],["periode",-13.396944999694824],["ليف",-13.396985054016112],["▁свето",-13.39699363708496],["ျဖစ္ပါတယ္။",-13.397025108337402],["▁dejtingsida",-13.397027015686035],["▁آموزان",-13.3970308303833],["batu",-13.397031784057615],["▁ઘટના",-13.397034645080566],["▁vecchi",-13.39704704284668],["▁историята",-13.397047996520996],["▁Evi",-13.397085189819336],["▁ժողով",-13.397103309631348],["▁fejlődés",-13.397109031677246],["▁SAR",-13.397116661071776],["业绩",-13.39712619781494],["řez",-13.39714241027832],["▁napraviti",-13.39714527130127],["すべき",-13.39716339111328],["▁Przez",-13.397165298461914],["▁ddolen",-13.397170066833496],["▁osam",-13.39717960357666],["▁numarul",-13.397202491760254],["rašyti",-13.39720344543457],["מקור",-13.397215843200684],["эма",-13.397216796875],["mbé",-13.397249221801758],["озу",-13.39725399017334],["▁جانا",-13.397269248962402],["等地",-13.397333145141602],["qaz",-13.3973388671875],["はある",-13.397343635559082],["▁tėv",-13.397347450256348],["▁אויפ",-13.397351264953612],["▁éléments",-13.397361755371094],["▁עקב",-13.397367477416992],["原理",-13.397381782531738],["▁Same",-13.397388458251951],["▁ቤቶች",-13.3973970413208],["▁kezdet",-13.3974027633667],["▁arazo",-13.39741325378418],["▁poslovni",-13.397435188293455],["▁общ",-13.397459030151367],["ሜን",-13.39747714996338],["קורא",-13.39749526977539],["▁എവിടെ",-13.397496223449709],["irajte",-13.397512435913086],["▁kris",-13.397534370422363],["кап",-13.39755153656006],["▁премьер",-13.397553443908691],["линг",-13.397561073303224],["▁ôm",-13.397571563720703],["▁২৫",-13.397571563720703],["لاحظ",-13.39757251739502],["ринг",-13.397576332092283],["▁Hug",-13.397582054138184],["는다",-13.397582054138184],["โฮ",-13.397588729858398],["0.0",-13.397595405578612],["▁kemas",-13.397598266601562],["ਵਾਦ",-13.39762020111084],["merci",-13.397625923156738],["お金を",-13.39763069152832],["▁жарыя",-13.397644996643066],["▁thác",-13.397652626037598],["が高い",-13.397669792175291],["аре",-13.39768886566162],["▁போட்டு",-13.397712707519531],["▁ostalim",-13.397732734680176],["▁мощност",-13.39773941040039],["려고",-13.39776611328125],["▁cute",-13.397793769836426],["▁들어가",-13.39781093597412],["kræft",-13.397828102111816],["▁fabul",-13.39783763885498],["▁provoz",-13.397839546203612],["lule",-13.397846221923828],["▁ఆట",-13.397846221923828],["▁primeiras",-13.397856712341309],["▁patur",-13.39788818359375],["rumque",-13.397891998291016],["▁gezin",-13.397894859313965],["нці",-13.397899627685549],["▁LL",-13.397919654846191],["diniz",-13.397921562194824],["isestä",-13.397927284240724],["▁gwasanaeth",-13.397942543029783],["abilmek",-13.397960662841797],["▁Сіз",-13.39796257019043],["▁Көр",-13.397963523864746],["▁GMT",-13.397964477539062],["▁Mwy",-13.397979736328123],["▁ವಿದ್ಯಾರ್ಥಿ",-13.397991180419922],["ekhez",-13.39799976348877],["故障",-13.398001670837402],["გვერდ",-13.398003578186035],["▁rauha",-13.398003578186035],["▁ස්ථානය",-13.3980073928833],["ዘው",-13.398014068603516],["▁အတွက်",-13.398014068603516],["หลุด",-13.398032188415527],["WORLD",-13.398035049438477],["Ổ",-13.398036003112791],["▁Vďaka",-13.398036003112791],["▁mwingine",-13.398036003112791],["▁někdy",-13.398036003112791],["▁scuole",-13.398036003112791],["▁zdrowia",-13.398036003112791],["▁εκδήλωση",-13.398036003112791],["▁белсенді",-13.398036003112791],["▁الأسبوع",-13.398036003112791],["▁সবচেয়ে",-13.398036003112791],["▁เกี่ยวกับเรา",-13.39803695678711],["▁fepetra",-13.398037910461426],["▁peniaze",-13.398037910461426],["▁Tayyip",-13.398038864135742],["▁أعضاء",-13.39803981781006],["▁коомдук",-13.39804458618164],["மண",-13.398045539855955],["傾向",-13.398051261901855],["štěstí",-13.398056983947754],["▁исте",-13.398056983947754],["▁ٹیسٹ",-13.398069381713867],["▁идти",-13.398070335388184],["▁očekuje",-13.3980712890625],["▁residu",-13.3980712890625],["▁käytetään",-13.398078918457031],["▁ΝΔ",-13.39808177947998],["sorge",-13.398120880126951],["агляд",-13.398125648498535],["▁හොර",-13.398128509521484],["▁ekspres",-13.3981294631958],["▁Kamis",-13.398138046264648],["▁النس",-13.398143768310549],["▁почетокот",-13.398147583007812],["▁adegua",-13.398153305053713],["ítja",-13.398162841796877],["▁വേണം",-13.398165702819824],["▁söt",-13.398168563842772],["▁ਨਾਂ",-13.398195266723633],["गाई",-13.398221015930176],["▁buton",-13.39823055267334],["这是一个",-13.398231506347656],["▁يۈر",-13.398239135742188],["▁γλυκ",-13.398241996765137],["▁οποίοι",-13.398242950439451],["▁Bakan",-13.39824676513672],["ացող",-13.398253440856934],["▁ALT",-13.398255348205566],["▁arkadaş",-13.398276329040527],["▁прослав",-13.398276329040527],["▁Punjabi",-13.39827823638916],["enseignement",-13.39828395843506],["▁găsit",-13.39828395843506],["▁букв",-13.398290634155272],["▁članka",-13.39829921722412],["▁zájem",-13.3983154296875],["▁skirting",-13.398317337036133],["▁الصحة",-13.398323059082031],["حسب",-13.398329734802246],["ິກ",-13.398355484008787],["▁అమెరికా",-13.398357391357422],["sense",-13.398369789123535],["ective",-13.398374557495115],["▁varianta",-13.398406982421877],["▁interessiert",-13.39841651916504],["▁qila",-13.398420333862305],["held",-13.398439407348633],["▁كثيرة",-13.398446083068848],["▁دراسة",-13.398448944091797],["ਪੋ",-13.398449897766112],["▁Sistemi",-13.398452758789062],["▁dragi",-13.398466110229492],["९८",-13.398476600646973],["TD",-13.398477554321287],["▁គ្មាន",-13.398482322692873],["▁жылдар",-13.398502349853516],["næring",-13.398504257202148],["akty",-13.39850902557373],["ጥቅ",-13.398517608642578],["▁mB",-13.39852237701416],[".<",-13.398544311523438],["▁hakka",-13.398545265197754],["▁основните",-13.398569107055664],["ปลอดภัย",-13.398581504821776],["няли",-13.398591995239258],["חפש",-13.39861011505127],["▁працягу",-13.39861297607422],["▁топло",-13.3986234664917],["▁នូវ",-13.398637771606444],["▁Muchas",-13.398641586303713],["velo",-13.398646354675291],["وره",-13.398655891418455],["▁ikusten",-13.39865779876709],["▁pih",-13.398658752441406],["▁puisse",-13.398670196533203],["▁manana",-13.398688316345217],["▁günler",-13.398743629455566],["▁pojawi",-13.398743629455566],["▁gifte",-13.398758888244627],["▁képesség",-13.398759841918944],["▁스타일",-13.398761749267578],["▁లాంటి",-13.39878749847412],["stück",-13.398789405822754],["мел",-13.398794174194336],["sätta",-13.398839950561523],["いても",-13.398852348327637],["▁որոշում",-13.398858070373535],["▁courant",-13.3988618850708],["▁terram",-13.398874282836914],["▁telugu",-13.398882865905762],["▁toprak",-13.398898124694824],["عني",-13.398905754089355],["▁Ware",-13.398964881896973],["開啟",-13.398969650268556],["ଯିବା",-13.3989839553833],["badan",-13.398984909057615],["▁Profi",-13.399009704589844],["▁země",-13.399014472961426],["cok",-13.399027824401855],["▁приложения",-13.399040222167969],["▁Ulla",-13.399066925048828],["▁tranz",-13.399097442626951],["▁understanding",-13.399123191833496],["▁እንደሚያ",-13.399126052856444],["▁ਦਿਲ",-13.39914321899414],["زيت",-13.39917278289795],["одні",-13.399188995361328],["▁viikkoa",-13.399188995361328],["▁ទីក្រុង",-13.399199485778809],["ڻن",-13.399258613586426],["▁Herzen",-13.399269104003906],["▁хөгжүүлэх",-13.399279594421388],["▁cresce",-13.39928913116455],["▁зохиол",-13.399327278137209],["▁বলা",-13.399332046508787],["ાયો",-13.399336814880373],["▁శాఖ",-13.399344444274902],["ಿರುತ್ತದೆ",-13.39935302734375],["kauf",-13.39939785003662],["liście",-13.399418830871582],["▁شیراز",-13.399420738220217],["▁внешне",-13.399446487426758],["саж",-13.399460792541504],["▁ລົດ",-13.39946460723877],["INEN",-13.3994722366333],["яда",-13.399476051330566],["▁sezione",-13.39948844909668],["država",-13.399492263793944],["kładzie",-13.399499893188477],["▁സ്വപ്",-13.399524688720703],["trú",-13.39952564239502],["ўскага",-13.399539947509766],["▁booking",-13.399541854858398],["▁zame",-13.399542808532717],["▁objavi",-13.399571418762209],["▁Dona",-13.399579048156738],["▁இய",-13.399584770202637],["▁Xildhibaan",-13.399636268615724],["秩序",-13.399641036987305],["pál",-13.39966869354248],["ราบ",-13.39966869354248],["▁hakkas",-13.399673461914062],["увајќи",-13.399686813354492],["遮",-13.399709701538086],["शाला",-13.39971160888672],["ລະດັບ",-13.399715423583984],["xigeenka",-13.3997163772583],["បរទេស",-13.3997163772583],["▁Lahatsoratra",-13.3997163772583],["▁flwyddyn",-13.3997163772583],["▁všetkým",-13.3997163772583],["▁εκλογές",-13.3997163772583],["▁менавіта",-13.3997163772583],["▁آذربایجان",-13.3997163772583],["▁સાહિત્ય",-13.3997163772583],["▁అభివృద్ధి",-13.3997163772583],["▁వివిధ",-13.3997163772583],["▁එතකොට",-13.3997163772583],["섭",-13.3997163772583],["▁powinny",-13.399717330932615],["▁kebaikan",-13.399718284606934],["▁proizvaja",-13.399718284606934],["మన",-13.3997220993042],["▁ūkio",-13.399723052978516],["徒歩",-13.399724006652832],["ഭാഗ",-13.399725914001465],["動き",-13.399725914001465],["▁esquerda",-13.39972686767578],["หัวข้อ",-13.399731636047363],["▁кітап",-13.39973258972168],["▁tragen",-13.399733543395996],["▁മാത്രമേ",-13.399733543395996],["▁بجائے",-13.399734497070312],["▁ຄື",-13.399735450744627],["▁suteikia",-13.399737358093262],["▁Eso",-13.399738311767578],["्रे",-13.399740219116213],["ทุกวัน",-13.39974308013916],["▁להבין",-13.399747848510742],["ვინ",-13.399758338928224],["▁Manfaat",-13.39975929260254],["ganti",-13.399765014648438],["▁обрат",-13.399765968322754],["わない",-13.399772644042969],["▁tegutse",-13.399785995483398],["▁क्षति",-13.39980411529541],["▁общин",-13.399810791015623],["jaks",-13.399819374084473],["私たち",-13.399819374084473],["▁isimler",-13.399834632873535],["▁користат",-13.399843215942385],["▁Тај",-13.39985466003418],["elect",-13.39986801147461],["▁सम्बन्धी",-13.399877548217772],["▁laittaa",-13.399879455566406],["सरी",-13.399883270263672],["▁auzit",-13.399908065795898],["راجع",-13.399917602539062],["▁davomida",-13.399930953979492],["LEM",-13.399940490722656],["▁196",-13.399981498718262],["▁naye",-13.399988174438477],["▁группа",-13.399989128112791],["▁Néz",-13.399998664855955],["▁Gunung",-13.400009155273438],["民生",-13.400023460388184],["álna",-13.400028228759766],["દ્ધ",-13.400032997131348],["▁Sharif",-13.400046348571776],["jour",-13.40005588531494],["वारी",-13.400057792663574],["▁alături",-13.400063514709473],["▁gruppi",-13.400069236755373],["▁Üle",-13.400084495544434],["▁wielo",-13.400091171264648],["▁xwedî",-13.400092124938965],["Photo",-13.400099754333496],["▁planu",-13.40012264251709],["▁lõike",-13.400132179260254],["▁کوٽ",-13.40013313293457],["▁útok",-13.400136947631836],["▁filia",-13.400148391723633],["▁davranış",-13.40015697479248],["이스",-13.40016746520996],["▁ልማት",-13.400191307067873],["▁πραγματικότητα",-13.40019989013672],["▁берег",-13.400203704833984],["人体",-13.400227546691896],["urun",-13.400259971618652],["▁පක්ෂය",-13.400269508361816],["Organ",-13.40027141571045],["ĉo",-13.400278091430664],["▁dobrej",-13.400286674499512],["▁Tommy",-13.400304794311523],["▁များ",-13.400320053100586],["▁blago",-13.400330543518066],["คําถาม",-13.400335311889648],["▁Konser",-13.400341987609863],["▁słab",-13.40035629272461],["this",-13.400395393371582],["对手",-13.400464057922363],["▁sonrasında",-13.400474548339844],["▁남성",-13.40047836303711],["▁lesbisk",-13.400484085083008],["▁saylı",-13.400485038757324],["Баяр",-13.400487899780272],["▁دنوں",-13.400506019592283],["▁Enligt",-13.400511741638184],["ன்னா",-13.4005126953125],["▁ಅದೇ",-13.4005126953125],["▁አካል",-13.40051555633545],["思路",-13.400565147399902],["▁watching",-13.400593757629396],["▁Νομ",-13.400594711303713],["▁biyu",-13.400636672973633],["▁kaveri",-13.40065097808838],["▁أعلن",-13.400674819946287],["न्ना",-13.400691986083984],["▁RF",-13.400714874267578],["▁Igen",-13.400717735290527],["▁голод",-13.40072536468506],["▁جانور",-13.400728225708008],["Hay",-13.400734901428224],["▁тілінде",-13.40074634552002],["▁cumplir",-13.40075397491455],["▁звезд",-13.400757789611816],["▁നിങ്ങൾ",-13.400799751281738],["نق",-13.40082550048828],["अघि",-13.400849342346191],["▁kunsti",-13.400856018066406],["▁έφ",-13.400870323181152],["បន្ថែម",-13.400887489318848],["▁ახალგაზრდა",-13.40090560913086],["▁pryd",-13.400945663452148],["នាំ",-13.40095329284668],["tamiseen",-13.400964736938477],["ਕੂ",-13.400964736938477],["brid",-13.40099048614502],["▁intuitiv",-13.401004791259766],["keel",-13.401010513305664],["потреб",-13.401031494140623],["تصرف",-13.401044845581056],["▁posiciona",-13.40104866027832],["▁mbola",-13.401055335998535],["▁त्याचा",-13.401056289672852],["dengar",-13.401058197021484],["قيل",-13.4010648727417],["ЦЕ",-13.401077270507812],["▁taką",-13.40108871459961],["本科",-13.401104927062988],["▁trendy",-13.40110683441162],["بري",-13.40110969543457],["രാജ്",-13.401137351989746],["ingia",-13.40115451812744],["▁hetkel",-13.401156425476074],["sebenzisa",-13.401167869567873],["▁принес",-13.401175498962402],["▁ຕີ",-13.401177406311035],["čnem",-13.40119743347168],["nikiem",-13.401199340820312],["▁lanse",-13.40120792388916],["怎么办",-13.401244163513184],["杂",-13.401259422302246],["▁jaj",-13.40126132965088],["较高",-13.40127658843994],["▁దిగ",-13.401277542114258],["▁Bakar",-13.401285171508787],["ेय",-13.401289939880373],["liches",-13.40130615234375],["ఫ",-13.401314735412598],["ಹಾರ",-13.40131664276123],["▁Қыз",-13.40131950378418],["▁tumani",-13.401324272155762],["▁psik",-13.401330947875977],["▁lover",-13.401345252990724],["▁elite",-13.401351928710938],["▁PAT",-13.401355743408203],["افية",-13.401359558105469],["ардын",-13.401363372802734],["内に",-13.40137004852295],["siwn",-13.40138339996338],["周围",-13.40138339996338],["▁δρ",-13.401390075683594],["柄",-13.401390075683594],["ခွင့်",-13.401399612426758],["▁LUMPUR",-13.401400566101074],["▁Mapinduzi",-13.401400566101074],["▁Pejabat",-13.401400566101074],["▁ausschließlich",-13.401400566101074],["▁costruzione",-13.401400566101074],["▁murojaat",-13.401400566101074],["▁nonostante",-13.401400566101074],["▁täiesti",-13.401400566101074],["▁милдет",-13.401400566101074],["▁сыяктуу",-13.401400566101074],["▁تړون",-13.401400566101074],["▁ఇటీవల",-13.401400566101074],["▁ಕುಮಾರಸ್ವಾಮಿ",-13.401400566101074],["▁받는",-13.401400566101074],["afhankelijk",-13.40140151977539],["jylland",-13.40140151977539],["▁τομέα",-13.401402473449709],["лькі",-13.401403427124023],["เดี่ยว",-13.401403427124023],["▁મળશે",-13.401403427124023],["▁krijuar",-13.401405334472656],["فش",-13.401406288146973],["▁kærlighed",-13.401406288146973],["▁niedo",-13.401410102844238],["▁Uhuru",-13.401412010192873],["▁آشنایی",-13.401412963867188],["▁바람",-13.401413917541504],["▁ਆਪਣਾ",-13.40141487121582],["iệu",-13.40141773223877],["▁Đình",-13.40141773223877],["▁праздник",-13.40141773223877],["▁cabinet",-13.40142059326172],["เดอร์",-13.401432037353516],["▁poklic",-13.401432037353516],["▁Өөр",-13.401433944702148],["kuwepo",-13.40143871307373],["▁значения",-13.401442527770996],["▁ಕಂಡು",-13.401444435119627],["▁finanční",-13.401453971862791],["▁українців",-13.40146541595459],["▁здійснення",-13.401487350463867],["доброто",-13.4014892578125],["▁általában",-13.401491165161133],["▁የዓለም",-13.401494026184082],["▁Gesicht",-13.40149974822998],["▁МВР",-13.401500701904297],["▁Obi",-13.401515007019045],["▁acasă",-13.401515007019045],["▁blød",-13.40151596069336],["ஆம்",-13.40152359008789],["▁esencial",-13.401531219482422],["回來",-13.401545524597168],["▁redaktə",-13.40155792236328],["▁العلاج",-13.401569366455078],["מנהל",-13.401578903198242],["▁Balozi",-13.40159797668457],["▁فردی",-13.401616096496582],["ИО",-13.40164566040039],["▁واجب",-13.401653289794922],["▁marcat",-13.401684761047363],["▁dị",-13.401693344116213],["מלא",-13.401719093322754],["▁ٺه",-13.401719093322754],["▁ತೊ",-13.401721000671388],["▁බෙ",-13.40172290802002],["▁ואם",-13.401729583740234],["▁тав",-13.401748657226562],["▁métier",-13.401758193969728],["▁زاده",-13.401763916015623],["יתי",-13.401779174804688],["тарда",-13.401787757873535],["หล",-13.40179443359375],["▁faktur",-13.401803016662598],["๊า",-13.401803970336914],["▁юли",-13.401813507080078],["ったら",-13.401813507080078],["▁одмах",-13.401840209960938],["▁Lahko",-13.401848793029783],["ošu",-13.40186595916748],["▁livelli",-13.401877403259276],["信息化",-13.401884078979492],["▁modellen",-13.40190601348877],["▁doğa",-13.4019193649292],["▁تطوير",-13.40192413330078],["▁ھڪ",-13.401948928833008],["平常",-13.401948928833008],["▁Mug",-13.401969909667969],["▁Agung",-13.401978492736816],["▁зээлийн",-13.401981353759766],["▁filtra",-13.40199375152588],["▁ძველი",-13.402020454406738],["▁Yana",-13.402027130126951],["▁जगत",-13.402044296264648],["masis",-13.402080535888672],["ճանաչ",-13.402091026306152],["▁직원",-13.40211582183838],["▁taman",-13.402119636535645],["▁отзыв",-13.402128219604492],["▁ຄະນະ",-13.402129173278809],["▁całym",-13.402142524719238],["▁axudas",-13.402182579040527],["cări",-13.402192115783691],["भ्या",-13.40219497680664],["▁ngàn",-13.402198791503906],["Њ",-13.402210235595703],["ിപ്പിക്കുന്ന",-13.4022216796875],["▁neniu",-13.402235984802246],["▁граду",-13.402236938476562],["▁suyu",-13.402257919311523],["▁hijo",-13.402265548706056],["owano",-13.402267456054688],["▁ඕනේ",-13.402283668518066],["▁Biologi",-13.402295112609863],["ռն",-13.402303695678713],["gelig",-13.402338981628418],["90%",-13.40234375],["optim",-13.402355194091797],["▁pokazuje",-13.40239429473877],["▁stranka",-13.4024019241333],["יקו",-13.402410507202148],["▁створи",-13.402425765991213],["▁എന്ത",-13.40242862701416],["▁1890",-13.402453422546388],["▁blisko",-13.402522087097168],["تفا",-13.402524948120115],["▁receta",-13.40253734588623],["▁taşıma",-13.402548789978027],["▁శా",-13.40255641937256],["▁cục",-13.40256118774414],["▁licet",-13.402579307556152],["▁yekun",-13.402591705322266],["បង្ហាញ",-13.402597427368164],["ുണ്ടായിരുന്നു",-13.40260124206543],["▁Pombe",-13.40260124206543],["▁საკ",-13.402624130249023],["▁nikmat",-13.402630805969238],["▁Fau",-13.402669906616213],["▁ខ្",-13.40267848968506],["▁edellis",-13.402714729309082],["২৬",-13.402722358703612],["▁Pelo",-13.402746200561523],["▁Будь",-13.402750968933104],["▁snygg",-13.402756690979004],["คุ",-13.402770042419434],["ানো",-13.402774810791016],["jag",-13.402782440185549],["▁julho",-13.402787208557127],["▁නාට්",-13.40280055999756],["знаў",-13.40280532836914],["▁ملاحظ",-13.402816772460938],["yut",-13.402819633483888],["▁dhinac",-13.402822494506836],["▁gune",-13.40282917022705],["▁atkal",-13.402841567993164],["▁Vom",-13.402844429016112],["tuloy",-13.402871131896973],["bios",-13.402873992919922],["View",-13.402877807617188],["bygger",-13.402877807617188],["ovine",-13.4028902053833],["▁대부분",-13.402892112731934],["드는",-13.402897834777832],["▁destaque",-13.40290641784668],["▁yandan",-13.402912139892578],["pełni",-13.402921676635742],["▁Gür",-13.40293025970459],["▁gly",-13.40293025970459],["▁shin",-13.402945518493652],["合理的",-13.402949333190918],["szerelés",-13.402962684631348],["▁dneh",-13.402962684631348],["tejä",-13.402965545654297],["▁विरुद्ध",-13.402968406677246],["▁termino",-13.40297794342041],["▁juris",-13.402982711791992],["дељ",-13.40301513671875],["胶",-13.403026580810549],["брана",-13.403034210205078],["ואי",-13.403034210205078],["蕾",-13.403057098388672],["mbal",-13.403058052062988],["ສາກົນ",-13.403085708618164],["គ្រួសារ",-13.40308666229248],["▁alligevel",-13.40308666229248],["▁melanjutkan",-13.40308666229248],["▁samozrejme",-13.40308666229248],["▁сүлжээ",-13.40308666229248],["▁Благодаря",-13.40308952331543],["▁bərpa",-13.403090476989746],["▁کورنیو",-13.403090476989746],["权利",-13.403090476989746],["▁الطلاب",-13.403091430664062],["▁우리나라",-13.40309238433838],["▁society",-13.403103828430176],["▁حرارت",-13.403103828430176],["▁became",-13.403105735778809],["▁Jumala",-13.403106689453123],["▁PLAN",-13.403112411499023],["▁uzņēmuma",-13.403121948242188],["▁верува",-13.403121948242188],["▁పోలీసులు",-13.403122901916504],["▁дуг",-13.403124809265137],["なります",-13.40312957763672],["Islam",-13.40315055847168],["еміз",-13.403155326843262],["▁semata",-13.403159141540527],["▁dlaczego",-13.40316390991211],["▁diverso",-13.403164863586426],["عرب",-13.403168678283691],["tyczny",-13.403178215026855],["▁américain",-13.403188705444336],["▁nikt",-13.403203010559082],["▁bununla",-13.403218269348145],["▁ქვეყნ",-13.40321922302246],["▁실제",-13.403226852416992],["১৫",-13.403261184692385],["▁obligat",-13.403295516967772],["ejše",-13.403305053710938],["▁doona",-13.403314590454102],["▁Cando",-13.40333366394043],["フォ",-13.40333366394043],["ជនជាតិ",-13.403335571289062],["▁atklāt",-13.403340339660645],["▁nekom",-13.40334701538086],["▁Студент",-13.403353691101074],["▁étape",-13.40335750579834],["我们要",-13.403363227844238],["▁അത്ര",-13.403404235839844],["▁Донецьк",-13.403423309326172],["енню",-13.40342903137207],["▁Byl",-13.40343952178955],["▁uzat",-13.403440475463867],["▁Dagaal",-13.403451919555664],["▁посмотреть",-13.40345573425293],["เราได้",-13.40346336364746],["ก้าว",-13.403474807739258],["▁masculin",-13.403508186340332],["まとめ",-13.403517723083496],["▁Terror",-13.403549194335938],["▁ရဲ့",-13.403554916381836],["▁فيديو",-13.403563499450684],["▁csap",-13.403569221496582],["саб",-13.403592109680176],["▁maddi",-13.403602600097656],["▁Bour",-13.403610229492188],["▁competencia",-13.403610229492188],["▁diensten",-13.403610229492188],["বাহ",-13.403611183166504],["দ্ধ",-13.403618812561035],["▁जवान",-13.403627395629885],["▁insult",-13.403636932373049],["▁WP",-13.403660774230955],["ფს",-13.403671264648438],["демо",-13.40370750427246],["abilă",-13.403709411621094],["იჩ",-13.403718948364258],["▁galegas",-13.4037446975708],["▁plaani",-13.40374755859375],["פרסם",-13.40376091003418],["تەن",-13.403768539428713],["▁copilul",-13.403770446777344],["▁यसमा",-13.403791427612305],["ውስ",-13.403797149658203],["▁иргэдийн",-13.403799057006836],["fanta",-13.403806686401367],["Actual",-13.40381145477295],["▁министри",-13.403833389282228],["Абад",-13.403837203979492],["▁trofe",-13.403837203979492],["▁Balaton",-13.403841018676758],["poser",-13.4038724899292],["ντι",-13.403879165649414],["▁एकदा",-13.403883934020996],["▁dýr",-13.403887748718262],["▁domnul",-13.40389633178711],["▁ukrain",-13.403915405273438],["Kam",-13.403923034667969],["▁აბა",-13.403924942016602],["770",-13.40392780303955],["先に",-13.403944969177246],["▁argumento",-13.403955459594728],["▁ваших",-13.40395736694336],["cours",-13.403998374938965],["▁характера",-13.404003143310549],["かけて",-13.404014587402344],["▁ຂໍ້",-13.404027938842772],["enquête",-13.404029846191406],["Los",-13.40403938293457],["τικός",-13.404040336608888],["▁гүйцэтгэ",-13.404040336608888],["све",-13.404044151306152],["▁бесед",-13.40404987335205],["被害",-13.40406608581543],["οξ",-13.404074668884276],["πρό",-13.40408420562744],["▁Dân",-13.404091835021973],["▁Wys",-13.40410327911377],["▁آبادی",-13.404115676879885],["hér",-13.404123306274414],["專輯",-13.404123306274414],["▁saate",-13.40412425994873],["▁kondom",-13.404129028320312],["пало",-13.404131889343262],["▁Prezidentinin",-13.404138565063477],["▁Fier",-13.404152870178224],["▁الدور",-13.40416431427002],["atuta",-13.404172897338867],["摘",-13.404184341430664],["▁выплат",-13.40419578552246],["▁hoeft",-13.404197692871094],["ப்பில்",-13.404208183288574],["algo",-13.404236793518066],["▁комисия",-13.40424633026123],["▁свеж",-13.404272079467772],["▁قيام",-13.404274940490724],["▁CAT",-13.40428352355957],["▁Bunlar",-13.404285430908203],["ચી",-13.404290199279783],["▁sval",-13.404290199279783],["▁레이",-13.404292106628418],["ट्र",-13.404304504394531],["ԱՍ",-13.404364585876465],["▁أش",-13.40438461303711],["kondi",-13.404386520385742],["▁шкір",-13.404412269592283],["▁сауда",-13.404419898986816],["uppgifter",-13.404436111450195],["RAV",-13.40444278717041],["▁dvd",-13.404444694519045],["រថយន្ត",-13.404447555541992],["▁Studium",-13.404458045959473],["▁acontecer",-13.404467582702637],["▁Pyar",-13.404529571533203],["▁popust",-13.40456199645996],["一部分",-13.40457534790039],["وزی",-13.404581069946287],["▁syksy",-13.404592514038086],["үүнү",-13.404630661010742],["ખે",-13.40463924407959],["kozott",-13.404667854309082],["▁బల",-13.404671669006348],["뿐만",-13.404674530029297],["▁катар",-13.40468406677246],["▁создание",-13.40469455718994],["▁jarð",-13.404705047607422],["暢",-13.404712677001951],["攝",-13.404728889465332],["▁медиуми",-13.40475082397461],["▁Toh",-13.404760360717772],["▁realitet",-13.404762268066406],["メッセージ",-13.40476417541504],["ທ່ອງທ່ຽວ",-13.40477466583252],["▁Perkhidmatan",-13.404775619506836],["▁xarakter",-13.404775619506836],["▁вартість",-13.404775619506836],["▁नुकसान",-13.404775619506836],["▁νέες",-13.404776573181152],["▁ፍቅር",-13.404779434204102],["Béarla",-13.404787063598633],["▁동영상",-13.40478801727295],["▁supply",-13.404791831970217],["▁फायदे",-13.404792785644531],["DJ",-13.404793739318848],["абыз",-13.404793739318848],["試験",-13.404794692993164],["▁jauniešu",-13.404796600341797],["ங்களும்",-13.404799461364746],["▁꽃",-13.404804229736328],["▁الصلاة",-13.404809951782228],["▁duidelik",-13.404812812805176],["tividade",-13.404817581176758],["▁mirada",-13.404829978942873],["▁צורך",-13.404839515686035],["▁kilometri",-13.404865264892578],["作业",-13.404871940612791],["遊客",-13.404871940612791],["▁světě",-13.40487289428711],["▁мү",-13.404921531677246],["Educació",-13.404930114746094],["▁المكتب",-13.404942512512209],["▁탄",-13.404943466186523],["മാക്ക",-13.404952049255373],["▁Став",-13.404959678649902],["▁Claus",-13.404967308044434],["▁จุด",-13.404967308044434],["loon",-13.404979705810549],["▁लिग",-13.404983520507812],["▁المحلية",-13.405030250549316],["▁өнөө",-13.405068397521973],["▁Sector",-13.405075073242188],["აუ",-13.405089378356934],["▁Wilson",-13.405089378356934],["▁Caso",-13.40509796142578],["மூ",-13.40510368347168],["pisco",-13.405112266540527],["klep",-13.40511703491211],["▁كيفية",-13.405122756958008],["forbundet",-13.405123710632324],["Қа",-13.4051513671875],["عمر",-13.405169486999512],["▁міністр",-13.40517234802246],["▁ಲಾ",-13.405179977416992],["▁دوري",-13.405183792114258],["▁Holand",-13.405192375183104],["▁Kön",-13.405232429504396],["▁uko",-13.405242919921877],["▁чаго",-13.40524673461914],["▁परी",-13.405255317687988],["去做",-13.405261993408203],["้อย",-13.40527057647705],["▁třetí",-13.405271530151367],["▁මගෙ",-13.405291557312012],["▁svima",-13.405303001403809],["▁ന്യൂസ്",-13.405303955078123],["▁oceni",-13.405305862426758],["▁chảy",-13.405345916748049],["قض",-13.405352592468262],["▁Reviewed",-13.405363082885742],["▁montant",-13.405372619628906],["entie",-13.405400276184082],["▁münasibət",-13.405414581298828],["غت",-13.405426025390623],["▁कुन",-13.405427932739258],["ుకోవాలి",-13.405428886413574],["▁videl",-13.405433654785156],["▁reunir",-13.40544605255127],["integra",-13.405449867248535],["▁preferat",-13.405449867248535],["ទុក",-13.405461311340332],["േക്ക്",-13.405489921569824],["ராஜா",-13.40552043914795],["tiae",-13.405525207519531],["是他",-13.40554428100586],["安全性",-13.405548095703123],["▁penale",-13.405550956726074],["▁今日は",-13.405631065368652],["осо",-13.405633926391602],["▁جهة",-13.405654907226562],["ପାରେ",-13.40567111968994],["やる",-13.405673027038574],["▁etgan",-13.40570068359375],["sites",-13.40571117401123],["▁Mooi",-13.405725479125977],["▁requiere",-13.40572738647461],["Continu",-13.405734062194824],["▁numera",-13.405735969543455],["▁giun",-13.405742645263672],["▁giza",-13.40575885772705],["▁וואו",-13.405781745910645],["qur",-13.405792236328123],["▁partnere",-13.40579605102539],["▁sæt",-13.40581512451172],["पूर्वी",-13.405817985534668],["▁afro",-13.405818939208984],["რუს",-13.405835151672363],["แจ",-13.405838966369627],["វិញ",-13.40586280822754],["▁그리",-13.405865669250488],["十六",-13.405875205993652],["▁3-1",-13.405912399291992],["▁reais",-13.405922889709473],["няла",-13.405927658081056],["▁yerler",-13.405932426452637],["чење",-13.4059419631958],["vag",-13.405961990356444],["事実",-13.405977249145508],["▁garš",-13.405981063842772],["BAS",-13.406003952026367],["팔",-13.406044006347656],["▁zdravi",-13.40605926513672],["▁Прво",-13.406083106994627],["▁quiser",-13.406086921691896],["▁சிறந்த",-13.406098365783691],["▁Thay",-13.406113624572754],["Ду",-13.406137466430664],["▁ଗା",-13.406145095825195],["ljivost",-13.406164169311523],["▁kristal",-13.40616512298584],["▁avgust",-13.406174659729004],["▁ಯುವ",-13.406189918518066],["▁риб",-13.406205177307127],["▁piersi",-13.406207084655762],["▁KN",-13.406214714050291],["ામ",-13.40621566772461],["▁Kế",-13.406225204467772],["▁elé",-13.406227111816406],["▁кете",-13.40622901916504],["だと思う",-13.406246185302734],["▁hissə",-13.406251907348633],["15%",-13.406271934509276],["주세요",-13.406293869018556],["кті",-13.406306266784668],["▁სასამართლო",-13.406314849853516],["ינס",-13.40632152557373],["▁pequeños",-13.406330108642578],["マー",-13.406353950500488],["▁söka",-13.406370162963867],["▁vlieg",-13.406377792358398],["▁Products",-13.406379699707031],["rosi",-13.406381607055664],["нети",-13.40640640258789],["▁ร้านอาหาร",-13.406428337097168],["奢",-13.406435012817385],["fone",-13.406436920166016],["▁ಬಳಕೆ",-13.406436920166016],["▁suuna",-13.406438827514648],["註冊",-13.40644359588623],["დუ",-13.406454086303713],["ស្ថាន",-13.406458854675291],["こういう",-13.406463623046877],["পূর্ণ",-13.406464576721191],["museet",-13.406465530395508],["ปรับปรุง",-13.40646743774414],["▁Abbiamo",-13.40646743774414],["▁Mädchen",-13.40646743774414],["▁bertindak",-13.40646743774414],["▁famiglie",-13.40646743774414],["▁futebol",-13.40646743774414],["▁jurídica",-13.40646743774414],["▁najczęściej",-13.40646743774414],["▁nóvember",-13.40646743774414],["▁odbędzie",-13.40646743774414],["▁powiedzieć",-13.40646743774414],["▁مقایسه",-13.40646743774414],["▁जग्गा",-13.40646743774414],["تكنولوجيا",-13.406468391418455],["▁Гадаад",-13.406469345092772],["▁हिंसा",-13.40647029876709],["▁تحريڪ",-13.406471252441406],["▁списък",-13.40647315979004],["▁adelante",-13.406474113464355],["▁ପ୍ରଧାନ",-13.406475067138672],["▁Kilometer",-13.406476020812988],["▁стагоддзя",-13.406476020812988],["▁తల్లి",-13.406476020812988],["▁ስለሆነ",-13.40647792816162],["диги",-13.406481742858888],["▁coaching",-13.406482696533203],["▁상담",-13.406486511230469],["▁السا",-13.406488418579102],["▁највише",-13.40649700164795],["▁ihmisiä",-13.406503677368164],["▁সরকারি",-13.406514167785645],["ຈິງ",-13.406516075134276],["▁шаарында",-13.406517028808594],["▁Cameron",-13.40652084350586],["נל",-13.406537055969238],["▁(2006)",-13.406542778015137],["▁Tirkiye",-13.406542778015137],["ຕີ",-13.406553268432615],["▁сав",-13.406561851501465],["▁metade",-13.406563758850098],["▁konserv",-13.406570434570312],["იმი",-13.406572341918944],["▁مرتبہ",-13.406578063964844],["未能",-13.406579971313477],["▁چەك",-13.406583786010742],["▁изкуство",-13.406591415405272],["▁အထူး",-13.406596183776855],["ប្រកួត",-13.40660285949707],["▁говоре",-13.406604766845703],["是因為",-13.406606674194336],["erend",-13.406608581542969],["▁tahmin",-13.40663719177246],["јак",-13.406641006469728],["▁Гур",-13.406648635864258],["▁ನೀಡುವ",-13.406658172607422],["زین",-13.406661033630373],["oszczę",-13.406661987304688],["چان",-13.40666675567627],["▁প্রায়",-13.406685829162598],["▁Oor",-13.406688690185549],["▁ພາບ",-13.406689643859863],["çeyê",-13.406697273254396],["▁kokia",-13.406697273254396],["ដំ",-13.406699180603027],["▁ветеран",-13.40670680999756],["▁అమ",-13.406720161437988],["ировали",-13.406721115112305],["▁défi",-13.406728744506836],["ināšanu",-13.406732559204102],["▁disturb",-13.406753540039062],["pasok",-13.40676498413086],["▁gcuid",-13.40676498413086],["▁MON",-13.40678596496582],["▁Српски",-13.406786918640137],["▁Türkiyənin",-13.406789779663086],["▁देना",-13.406811714172363],["▁аясында",-13.40681266784668],["▁sella",-13.406818389892578],["▁voldoende",-13.406848907470703],["中午",-13.40684986114502],["ۋاتقان",-13.406853675842283],["▁опера",-13.4068603515625],["мовы",-13.406862258911133],["kuse",-13.406875610351562],["▁байв",-13.406905174255373],["▁языка",-13.4069185256958],["▁nợ",-13.406937599182127],["彼ら",-13.406943321228027],["jetno",-13.406946182250977],["▁horrela",-13.406947135925291],["旨",-13.406957626342772],["▁1.7",-13.406970024108888],["▁obmedz",-13.40698528289795],["ITT",-13.40699863433838],["misli",-13.407063484191896],["бий",-13.407111167907717],["લ્ડ",-13.40711498260498],["▁minunat",-13.407123565673828],["အဖြဲ႕",-13.407145500183104],["▁440",-13.40717315673828],["▁gói",-13.40717315673828],["ảng",-13.407186508178713],["രിക",-13.40720272064209],["▁Host",-13.407204627990724],["▁Академи",-13.407217025756836],["թվ",-13.40722942352295],["▁Politica",-13.407230377197266],["▁slutet",-13.407241821289062],["▁අයත්",-13.407246589660645],["▁hein",-13.407249450683594],["▁ଗତ",-13.40725040435791],["▁DH",-13.40725326538086],["▁baho",-13.40726089477539],["▁habrá",-13.407285690307615],["▁iyun",-13.407289505004885],["▁Basi",-13.407312393188477],["续",-13.40731430053711],["▁measc",-13.407318115234377],["▁filmy",-13.407333374023438],["äpp",-13.407340049743652],["▁kültür",-13.407340049743652],["شرع",-13.407342910766602],["▁règles",-13.407343864440918],["▁саяси",-13.40735149383545],["улуп",-13.40736198425293],["دست",-13.40738010406494],["▁ភ",-13.407388687133787],["▁aitab",-13.407389640808104],["▁rádi",-13.407393455505373],["ங்களின்",-13.407397270202637],["▁сем",-13.40739917755127],["阳光",-13.407402992248535],["外界",-13.407415390014648],["ӨН",-13.407422065734863],["cés",-13.407425880432127],["▁utilizarea",-13.407431602478027],["ਟੋ",-13.407463073730469],["▁rull",-13.40748119354248],["НК",-13.407483100891112],["▁finnst",-13.40748405456543],["▁høyt",-13.407520294189451],["▁llum",-13.407536506652832],["дикт",-13.407577514648438],["jungti",-13.407590866088867],["▁Sylv",-13.40759563446045],["▁Dalma",-13.40760612487793],["▁Ē",-13.407608032226562],["▁DNS",-13.407615661621094],["▁kepe",-13.40764617919922],["▁Pä",-13.407716751098633],["▁soalan",-13.407766342163086],["Afrikaanse",-13.407796859741213],["▁معتقد",-13.407797813415527],["џа",-13.407798767089844],["chair",-13.407811164855955],["▁littera",-13.40781593322754],["▁İngilizce",-13.407841682434082],["1981",-13.407846450805664],["DRA",-13.407846450805664],["គី",-13.407855033874512],["දය",-13.407872200012209],["▁жария",-13.407877922058104],["01)",-13.407893180847168],["ачки",-13.407896041870115],["▁telefonisch",-13.407896041870115],["▁vyber",-13.40792751312256],["Ган",-13.407946586608888],["▁181",-13.407963752746582],["arrivo",-13.407968521118164],["обходим",-13.408005714416504],["▁midis",-13.408013343811035],["ින්ම",-13.408018112182615],["レス",-13.408019065856934],["▁tulajdon",-13.408056259155272],["కృష్ణ",-13.408061027526855],["лност",-13.408079147338867],["▁Peta",-13.40810203552246],["鬥",-13.40810489654541],["捐",-13.408124923706056],["継続",-13.408147811889648],["▁મુક",-13.408153533935549],["няць",-13.408156394958496],["オープン",-13.408158302307127],["멘",-13.408162117004396],["▁Mahkamah",-13.408163070678713],["▁dilluns",-13.408163070678713],["▁zariadení",-13.408163070678713],["▁кастрычніка",-13.408163070678713],["▁ខណៈ",-13.408163070678713],["▁incontri",-13.408164024353027],["▁peristiwa",-13.408164024353027],["▁سرپرست",-13.408164024353027],["▁வாழ்த்துக்கள்",-13.408164024353027],["▁పరిస్థితి",-13.408164024353027],["▁ರಾಷ್ಟ್ರೀಯ",-13.408164978027344],["▁ନ୍ୟୁଜ୍",-13.408166885375977],["▁економіки",-13.408167839050291],["▁εαυτό",-13.408169746398926],["▁వెంటనే",-13.408169746398926],["▁შესაბამისად",-13.408169746398926],["▁байланыштуу",-13.408174514770508],["▁Європи",-13.40817642211914],["▁moeite",-13.408177375793455],["▁வருகிறது",-13.408177375793455],["▁Jersey",-13.408178329467772],["▁عيد",-13.40817928314209],["▁Laptop",-13.408181190490724],["שית",-13.408185005187988],["▁ndërtim",-13.408185005187988],["▁automatic",-13.408190727233888],["▁Consejo",-13.408193588256836],["▁հատված",-13.408198356628418],["有趣",-13.408204078674316],["若干",-13.408212661743164],["▁دیکھنے",-13.408238410949709],["▁Technic",-13.408242225646973],["▁pogleda",-13.408246994018556],["tänk",-13.408248901367188],["云南",-13.408248901367188],["配件",-13.40825080871582],["▁Pemba",-13.408265113830566],["▁ചെയ്യാന്",-13.408270835876465],["▁livres",-13.40827178955078],["केश",-13.408294677734377],["gandi",-13.408297538757324],["▁дойде",-13.408312797546388],["▁අම",-13.408326148986816],["ाण",-13.408339500427246],["▁Mount",-13.408348083496094],["▁община",-13.408376693725586],["▁Steam",-13.408397674560549],["看起來",-13.408404350280762],["▁αυτής",-13.408409118652344],["强化",-13.408418655395508],["ბაზ",-13.408424377441406],["▁cestovn",-13.408448219299316],["fanywa",-13.408449172973633],["▁bendrovė",-13.40846824645996],["▁sisa",-13.408469200134276],["શિ",-13.408504486083984],["▁mùi",-13.4085054397583],["▁адамдын",-13.408525466918944],["ပါဝင်",-13.408533096313477],["▁افسر",-13.408535957336426],["ショ",-13.40854549407959],["▁небес",-13.408556938171388],["కాల",-13.408562660217283],["▁opinia",-13.408577919006348],["▁rík",-13.408581733703612],["agost",-13.408586502075195],["▁سگهو",-13.408626556396484],["שמו",-13.408653259277344],["ยอมรับ",-13.40865993499756],["▁Hop",-13.408689498901367],["▁รวมทั้ง",-13.408689498901367],["▁soup",-13.408693313598633],["▁prison",-13.408705711364746],["▁bath",-13.40870761871338],["▁sugar",-13.40871238708496],["▁offert",-13.408719062805176],["▁switch",-13.408719062805176],["▁Добав",-13.408732414245604],["▁elegir",-13.408743858337402],["▁итгэ",-13.408784866333008],["▁жете",-13.40880012512207],["ੱਪ",-13.408807754516602],["kval",-13.408824920654297],["▁ubo",-13.40883731842041],["▁escolher",-13.408860206604004],["aring",-13.40888214111328],["▁تجد",-13.40888214111328],["▁dừng",-13.408892631530762],["ার্স",-13.40890121459961],["▁elkezd",-13.408913612365724],["▁വിട",-13.408913612365724],["▁оқушылар",-13.408921241760254],["▁ringan",-13.408923149108888],["▁suomi",-13.408930778503418],["▁vieni",-13.408931732177734],["▁Län",-13.408933639526367],["യിട",-13.408946990966797],["把它",-13.408967971801758],["▁ಸಲ್ಲಿಸ",-13.408970832824709],["▁публика",-13.409049034118652],["▁घाल",-13.409049987792969],["▁Diesel",-13.4090576171875],["▁одяг",-13.409058570861816],["▁talvi",-13.409059524536133],["▁abuz",-13.409063339233398],["▁ekskurs",-13.409076690673828],["zdu",-13.40908908843994],["▁mountain",-13.40909194946289],["▁mieleen",-13.409107208251951],["աձ",-13.409113883972168],["QS",-13.4091157913208],["zić",-13.4091215133667],["▁prieteni",-13.409139633178713],["ünket",-13.409180641174316],["ত্য",-13.40918254852295],["▁vähenda",-13.40918254852295],["▁หนอน",-13.409201622009276],["するのが",-13.409210205078123],["▁menší",-13.409255027770996],["学历",-13.409255981445312],["trus",-13.409262657165527],["死了",-13.40926456451416],["▁Kombin",-13.4093017578125],["▁versija",-13.409332275390623],["ਉਂ",-13.409334182739258],["▁promenad",-13.409334182739258],["▁бабу",-13.409339904785156],["ckiej",-13.409341812133787],["▁olulise",-13.409346580505373],["подозр",-13.409348487854004],["lipat",-13.4093656539917],["ବ୍ର",-13.409375190734863],["▁волі",-13.409383773803713],["קרב",-13.409399032592772],["▁Cana",-13.409403800964355],["▁çal",-13.409446716308594],["▁పాల",-13.409446716308594],["hasil",-13.409485816955566],["▁објекти",-13.409496307373049],["blar",-13.409500122070312],["▁Ofte",-13.409517288208008],["癌症",-13.409539222717283],["▁ձայն",-13.40955924987793],["trina",-13.409567832946776],["▁mej",-13.409567832946776],["ਕਲ",-13.409584045410156],["LJE",-13.409603118896484],["▁Estou",-13.409626960754396],["▁dök",-13.40966510772705],["▁Roja",-13.409672737121582],["▁चन्द्र",-13.409672737121582],["sunod",-13.409676551818848],["▁poids",-13.40968418121338],["▁надзор",-13.40968418121338],["ատու",-13.409722328186035],["▁రావ",-13.409733772277832],["บํารุง",-13.409747123718262],["▁vagas",-13.409748077392578],["▁បញ្ហា",-13.409749031066896],["▁táv",-13.409759521484377],["▁පිරි",-13.40977668762207],["▁liya",-13.40977954864502],["ለፍ",-13.409781455993652],["▁SN",-13.409795761108398],["▁පිරිස",-13.409796714782717],["үлүп",-13.409797668457031],["▁저작권",-13.40980625152588],["графічн",-13.409807205200195],["ुस",-13.40981101989746],["劣",-13.40981388092041],["博物馆",-13.409825325012209],["妖",-13.40984344482422],["▁соз",-13.40985107421875],["롤",-13.409852027893066],["▁жаны",-13.409857749938965],["↓",-13.40985870361328],["ประชาสัมพันธ์",-13.409859657287598],["ยืนยัน",-13.409859657287598],["благоприят",-13.409860610961914],["▁behoefte",-13.409860610961914],["▁злоупотреб",-13.409860610961914],["▁ریکارڈ",-13.409860610961914],["▁हिंदू",-13.409860610961914],["▁కొనసాగ",-13.409860610961914],["▁తిరిగి",-13.409860610961914],["탕",-13.409860610961914],["▁Ngài",-13.40986156463623],["▁mfumo",-13.409862518310549],["▁тави",-13.40986442565918],["▁0,8",-13.409865379333496],["▁жаштагы",-13.409868240356444],["▁Këto",-13.409869194030762],["▁संघर्ष",-13.409872055053713],["▁δυναμ",-13.409873008728027],["▁birthday",-13.40987491607666],["▁حقيقي",-13.409886360168455],["▁differenza",-13.409902572631836],["reglu",-13.409937858581545],["ရွှေ",-13.409954071044922],["▁Dowlad",-13.409969329833984],["មិត្ត",-13.409971237182615],["zentrum",-13.40997314453125],["▁שאתה",-13.4099760055542],["▁3、",-13.40998077392578],["athair",-13.409982681274414],["ворот",-13.410019874572754],["▁пісьменнік",-13.410024642944336],["▁görmek",-13.410028457641602],["▁کشتی",-13.410037994384766],["▁зменш",-13.410048484802246],["▁басып",-13.41005039215088],["оха",-13.410054206848145],["裡的",-13.41005802154541],["forvaltning",-13.410063743591309],["▁käytä",-13.410067558288574],["▁новой",-13.410073280334473],["▁önceki",-13.410075187683104],["▁հաջող",-13.410076141357422],["▁geword",-13.410093307495115],["▁školu",-13.410097122192385],["άνι",-13.410117149353027],["καρ",-13.410123825073242],["▁Био",-13.410154342651367],["തുപോലെ",-13.410155296325684],["saimniec",-13.410173416137695],["▁اداری",-13.41018772125244],["▁რამე",-13.410194396972656],["μπά",-13.41020393371582],["▁suurt",-13.410205841064451],["▁kwaye",-13.410221099853516],["▁gmin",-13.410222053527832],["地说",-13.410229682922363],["▁skro",-13.410247802734377],["▁մարմին",-13.41025447845459],["▁подобри",-13.410261154174805],["▁proprietari",-13.410272598266602],["▁služi",-13.410274505615234],["▁jätka",-13.4102783203125],["▁אומ",-13.410289764404297],["▁stala",-13.410290718078612],["щее",-13.41030216217041],["▁njerëzve",-13.410326957702637],["▁Meni",-13.41033935546875],["▁잔",-13.410340309143066],["上一",-13.410355567932127],["gehalten",-13.410357475280762],["▁наші",-13.410358428955078],["▁kevés",-13.410371780395508],["ुः",-13.410396575927734],["▁kartic",-13.410425186157228],["២៥",-13.410440444946287],["▁ನೀಡಿದ",-13.410445213317873],["▁суму",-13.410457611083984],["切实",-13.41047191619873],["мышлен",-13.410479545593262],["ტონ",-13.410481452941896],["▁akurat",-13.410486221313477],["▁នាង",-13.410493850708008],["▁ressurs",-13.41051197052002],["bulan",-13.410547256469728],["ക്സ്",-13.410582542419434],["▁ज्यान",-13.41058349609375],["▁guitar",-13.410587310791016],["▁Råd",-13.410612106323242],["árd",-13.410626411437988],["▁유럽",-13.410635948181152],["▁қатыс",-13.410645484924316],["▁Dirección",-13.410654067993164],["▁اراد",-13.410669326782228],["▁serê",-13.41067123413086],["▁povesti",-13.410672187805176],["uvchilar",-13.410691261291504],["何を",-13.410717964172363],["евіч",-13.410720825195312],["inscription",-13.410721778869627],["▁Sangat",-13.410724639892578],["ក្នុងការ",-13.41072940826416],["ርት",-13.41073513031006],["طرح",-13.410746574401855],["▁kdaj",-13.410746574401855],["▁berhubung",-13.410774230957031],["▁військово",-13.410778999328612],["▁পূর্ব",-13.41078758239746],["מאַ",-13.41081714630127],["▁slaap",-13.41086483001709],["▁komentara",-13.410865783691406],["▁روزی",-13.41086769104004],["▁Dul",-13.41088581085205],["തര",-13.41090202331543],["▁prawda",-13.410903930664062],["▁selten",-13.410918235778809],["ॉय",-13.410923957824709],["▁његове",-13.410924911499023],["NAP",-13.41092586517334],["İS",-13.410950660705566],["ົມ",-13.410964012145996],["▁ශා",-13.410970687866213],["▁تصل",-13.410978317260742],["▁রাজ",-13.41098690032959],["▁læser",-13.411004066467283],["▁прэ",-13.411006927490234],["▁Gry",-13.411015510559082],["ヘ",-13.411033630371094],["▁rase",-13.411053657531738],["▁Դե",-13.411073684692385],["▁skär",-13.411080360412598],["▁vapor",-13.411083221435549],["ebiliyor",-13.411091804504396],["ləyir",-13.41111183166504],["pron",-13.41111946105957],["▁rendszeres",-13.41111946105957],["▁internett",-13.411160469055176],["▁малка",-13.411218643188477],["▁aşk",-13.411230087280272],["проект",-13.411232948303224],["יאָ",-13.41123867034912],["▁уход",-13.41123867034912],["aaminen",-13.411247253417969],["▁обект",-13.41127872467041],["▁అందులో",-13.41127872467041],["▁hodín",-13.411284446716309],["▁African",-13.411285400390623],["▁tələbələr",-13.411299705505373],["▁պատասխանատվությ",-13.411330223083496],["ův",-13.41137981414795],["exposition",-13.411380767822266],["▁teszt",-13.41140079498291],["同步",-13.41140842437744],["▁troll",-13.41141128540039],["▁निरा",-13.411420822143556],["▁හදා",-13.41142463684082],["▁Tree",-13.41143035888672],["duci",-13.411434173583984],["抬",-13.4114351272583],["остей",-13.41147518157959],["spiegel",-13.411477088928224],["რეთ",-13.4114990234375],["mpung",-13.411500930786133],["闹",-13.411504745483398],["逾",-13.411505699157717],["人力资源",-13.411511421203612],["čkih",-13.41152286529541],["▁farby",-13.41152572631836],["اعة",-13.411526679992676],["▁عہد",-13.41153335571289],["▁odpoved",-13.41153621673584],["▁언제",-13.411547660827637],["運転",-13.411551475524902],["เชิญ",-13.411554336547852],["▁մրց",-13.411555290222168],["ດໍາເນີນ",-13.411556243896484],["▁কেন্দ্র",-13.41156005859375],["▁gjithmonë",-13.411561012268066],["▁karácsony",-13.411561012268066],["▁പ്രതിഷേധ",-13.411561012268066],["▁අත්අඩංගුවට",-13.411561012268066],["ტალი",-13.411563873291016],["▁ಗಾತ್ರ",-13.411563873291016],["▁miejscowości",-13.411564826965332],["▁холбогдох",-13.411564826965332],["▁ארויס",-13.411564826965332],["▁michezo",-13.411566734313965],["تأكد",-13.411569595336914],["▁formes",-13.411569595336914],["▁літератури",-13.411569595336914],["▁ይገባል",-13.41157054901123],["▁drużyn",-13.411571502685549],["▁fotboll",-13.411571502685549],["ນະຄອນ",-13.411578178405762],["一眼",-13.411579132080078],["▁العلاقات",-13.411582946777344],["▁алга",-13.411587715148926],["ଗଡ଼",-13.411593437194824],["▁ಯಾರು",-13.41161823272705],["▁عليكم",-13.411629676818848],["▁wręcz",-13.41163158416748],["▁inspirere",-13.41165542602539],["▁Quân",-13.41165828704834],["govori",-13.411660194396973],["▁hing",-13.411663055419922],["▁masy",-13.411665916442873],["møde",-13.411675453186035],["▁ଲି",-13.411693572998049],["▁başta",-13.411694526672363],["▁Absolut",-13.411697387695312],["Mor",-13.411698341369627],["▁ಸ್ಪ",-13.411698341369627],["的国家",-13.411703109741213],["▁소재",-13.41170597076416],["▁ඔබගේ",-13.41171169281006],["▁Palestina",-13.41171646118164],["▁espa",-13.411718368530272],["▁Благо",-13.41172981262207],["▁беларуская",-13.411731719970703],["юм",-13.41175937652588],["▁Многие",-13.411767959594728],["Team",-13.411768913269045],["willig",-13.411771774291992],["▁transporto",-13.411779403686523],["▁genetic",-13.411788940429688],["HAS",-13.411789894104004],["▁ඈ",-13.411802291870115],["▁Потом",-13.411808967590332],["▁vrede",-13.41184139251709],["▁сухо",-13.411864280700684],["တန်",-13.411870002746582],["▁слава",-13.411882400512695],["▁средина",-13.411884307861328],["▁probable",-13.41189670562744],["HV",-13.411904335021973],["لحق",-13.411914825439451],["▁dosis",-13.411968231201172],["出て",-13.411975860595703],["iate",-13.411994934082031],["▁ستاره",-13.412010192871094],["▁Хал",-13.412020683288574],["faka",-13.41202449798584],["proti",-13.412032127380373],["▁жада",-13.412042617797852],["02)",-13.412056922912598],["▁akció",-13.412070274353027],["iyyar",-13.41207504272461],["▁izmantošana",-13.412089347839355],["▁SG",-13.412118911743164],["czkę",-13.41214084625244],["▁خواند",-13.412142753601074],["▁daarop",-13.412145614624023],["Jag",-13.412147521972656],["▁havne",-13.412153244018556],["▁suffit",-13.412161827087402],["브라",-13.4121675491333],["▁Hade",-13.412237167358398],["rath",-13.412240982055664],["ড়ে",-13.412240982055664],["▁nyumbani",-13.412242889404297],["beat",-13.412273406982422],["رحم",-13.412277221679688],["▁brasil",-13.412281036376951],["▁علينا",-13.41229248046875],["▁brend",-13.412302017211914],["▁Sole",-13.412304878234863],["▁ලියන",-13.412313461303713],["▁blanco",-13.412320137023926],["▁Nhi",-13.412321090698242],["ნჯ",-13.412330627441406],["履",-13.412331581115724],["عنا",-13.412333488464355],["▁Debe",-13.412334442138672],["рви",-13.412399291992188],["нара",-13.412405014038086],["▁குழு",-13.412409782409668],["▁ekspo",-13.412410736083984],["▁पुग",-13.412415504455566],["▁видеть",-13.412432670593262],["▁inimesi",-13.412434577941896],["▁annoncer",-13.41247844696045],["▁folks",-13.412493705749512],["נעמען",-13.412505149841309],["jci",-13.412554740905762],["何度も",-13.412557601928713],["▁Zavod",-13.41257381439209],["▁numerosi",-13.412590980529783],["ಡಾ",-13.412603378295898],["ākās",-13.412630081176758],["னோ",-13.412637710571287],["总部",-13.412643432617188],["վիր",-13.412644386291504],["▁leírás",-13.412647247314451],["▁kampanya",-13.412652969360352],["kracht",-13.412664413452148],["▁Tutto",-13.412665367126465],["两种",-13.41269302368164],["▁putus",-13.412708282470703],["నల్",-13.412731170654297],["peda",-13.412737846374512],["▁цените",-13.412766456604004],["ికి",-13.412771224975586],["▁трох",-13.412801742553713],["단체",-13.412806510925291],["कले",-13.412816047668455],["▁qəzeti",-13.412849426269531],["▁telle",-13.412878036499023],["sögn",-13.412884712219238],["▁Bratislave",-13.41290283203125],["▁חודש",-13.412911415100098],["▁kaise",-13.412917137145996],["gezond",-13.412933349609377],["lander",-13.412938117980955],["▁puas",-13.412955284118652],["Không",-13.412961959838867],["ארט",-13.412961959838867],["▁ذو",-13.412973403930664],["▁वाहन",-13.412981033325195],["▁dubi",-13.41299533843994],["စက်",-13.413002967834473],["공원",-13.413008689880373],["ว์",-13.413026809692385],["नां",-13.413034439086914],["▁สมัคร",-13.413053512573242],["▁detí",-13.41306495666504],["▁qiymətləndir",-13.413080215454102],["▁எங்க",-13.413081169128418],["regne",-13.413103103637695],["更为",-13.413122177124023],["▁அதில்",-13.413137435913086],["edifici",-13.413165092468262],["▁Пак",-13.413175582885742],["灾",-13.413183212280272],["јски",-13.41318416595459],["▁Vårt",-13.41319751739502],["削",-13.413200378417969],["▁istedi",-13.413202285766602],["▁ይችላል",-13.413204193115234],["▁გარემო",-13.413213729858398],["▁ғасыр",-13.413249969482422],["蝦",-13.413249969482422],["▁Konkur",-13.413252830505373],["▁Während",-13.413264274597168],["▁mafunzo",-13.413264274597168],["▁відбудеться",-13.413264274597168],["▁друштва",-13.413264274597168],["▁самоврядування",-13.413264274597168],["▁өнүктүрүү",-13.413264274597168],["▁الاثنين",-13.413264274597168],["▁پشاور",-13.413264274597168],["ទោស",-13.413265228271484],["‮",-13.413265228271484],["▁оружје",-13.413265228271484],["▁بويىچە",-13.413265228271484],["▁څرګند",-13.413265228271484],["▁परिचय",-13.413265228271484],["▁വിനോദ",-13.413265228271484],["▁wesentlich",-13.4132661819458],["▁خمینی",-13.413267135620115],["គ្រាប់",-13.413268089294434],["▁Ticaret",-13.413268089294434],["▁ymlaen",-13.413269996643066],["▁Treba",-13.413272857666016],["▁ចែក",-13.413272857666016],["▁đảng",-13.413273811340332],["▁మోడీ",-13.413273811340332],["▁પાંચ",-13.413274765014648],["เด่น",-13.413277626037598],["▁үеэр",-13.41328239440918],["▁mitjans",-13.413285255432127],["▁cynnig",-13.413290023803713],["▁للغاية",-13.413290977478027],["وقف",-13.413302421569824],["▁thẻ",-13.41330337524414],["▁pokuša",-13.413304328918455],["หัน",-13.413307189941406],["▁አምላክ",-13.41331958770752],["特征",-13.413323402404783],["▁idées",-13.41332721710205],["▁petrece",-13.413328170776367],["▁នយោបាយ",-13.413329124450684],["▁פרטים",-13.41334629058838],["ჩხ",-13.413352966308594],["▁confronti",-13.41335678100586],["▁šeimos",-13.413363456726074],["▁dělá",-13.413385391235352],["▁Ólaf",-13.413387298583984],["▁Opći",-13.41339111328125],["kostnað",-13.413393020629885],["▁відкрити",-13.41339874267578],["▁viață",-13.413405418395996],["▁хорооны",-13.413406372070312],["emaks",-13.413410186767578],["▁անում",-13.413413047790527],["▁rakenne",-13.41341495513916],["▁අක්ක",-13.41343116760254],["▁bumaba",-13.413433074951172],["▁иши",-13.413433074951172],["▁ediyoruz",-13.413434982299805],["จัดส่ง",-13.413440704345703],["inizde",-13.413447380065918],["你会",-13.413453102111816],["▁ajutor",-13.413455963134766],["▁peça",-13.413457870483398],["▁eodem",-13.413469314575195],["лить",-13.413479804992676],["▁Stunde",-13.413479804992676],["▁ລາຍ",-13.413481712341309],["▁derde",-13.41348934173584],["▁retail",-13.413509368896484],["▁نشانه",-13.413515090942385],["▁Манас",-13.41352081298828],["Jes",-13.41352367401123],["▁Тарас",-13.413524627685549],["▁turėjo",-13.413525581359863],["leidžia",-13.413529396057127],["عجب",-13.413540840148926],["turik",-13.413548469543455],["▁Vielen",-13.41355323791504],["▁yaparak",-13.413558959960938],["סער",-13.41356372833252],["▁Cú",-13.413581848144531],["▁vull",-13.41358757019043],["▁eladó",-13.41364288330078],["▁naturaleza",-13.413657188415527],["αστεί",-13.413660049438477],["▁pietei",-13.413660049438477],["▁falt",-13.413663864135742],["▁ወያኔ",-13.413683891296388],["ланган",-13.413691520690918],["orri",-13.413702964782717],["▁uçak",-13.41370677947998],["vîn",-13.41373062133789],["▁روزهای",-13.413735389709473],["세트",-13.413738250732422],["▁ເມ",-13.413769721984863],["ເຕ",-13.413771629333496],["▁مناسبت",-13.413772583007812],["▁სტუდენტ",-13.413774490356444],["ଦେଶ",-13.413782119750977],["▁районі",-13.413789749145508],["▁işlemleri",-13.413796424865724],["▁kick",-13.413813591003418],["▁სერ",-13.413837432861328],["اتور",-13.413840293884276],["舒服",-13.413873672485352],["▁2014)",-13.413890838623049],["ාගේ",-13.413898468017578],["ğü",-13.413907051086426],["▁divide",-13.413928031921388],["▁arriver",-13.413935661315918],["▁келтир",-13.413994789123535],["ilayotgan",-13.414061546325684],["padlo",-13.414076805114746],["▁Georgi",-13.414082527160645],["▁قدیمی",-13.414082527160645],["ombra",-13.414100646972656],["▁бүтээ",-13.414105415344238],["แซ",-13.414108276367188],["نن",-13.414125442504885],["▁resz",-13.414134979248049],["▁پوځي",-13.414149284362791],["設計師",-13.41415309906006],["اعي",-13.41418170928955],["Ро",-13.414185523986816],["statt",-13.414188385009766],["▁инди",-13.414230346679688],["99)",-13.414238929748535],["длын",-13.414251327514648],["▁hält",-13.414267539978027],["▁sənədlər",-13.414319038391112],["riyê",-13.414323806762695],["ुम",-13.414332389831545],["▁mãn",-13.414363861083984],["美国的",-13.414365768432615],["▁Bier",-13.414373397827148],["▁lalo",-13.414392471313477],["▁իրավունքների",-13.414393424987791],["SSE",-13.414443016052246],["ຈັກ",-13.414450645446776],["▁magnitud",-13.414474487304688],["▁صفر",-13.414508819580078],["ಬದಲಾಯಿಸಿ",-13.414532661437988],["ellt",-13.414533615112305],["▁הקרוב",-13.414535522460938],["▁Nano",-13.414543151855469],["ėjimą",-13.414552688598633],["MIA",-13.414572715759276],["例子",-13.414578437805176],["SAL",-13.414579391479492],["▁өмч",-13.41460418701172],["oille",-13.414613723754885],["ျမန္မာ",-13.414644241333008],["▁Jeff",-13.41465187072754],["eien",-13.414701461791992],["しまった",-13.414703369140623],["отека",-13.41470432281494],["▁روپ",-13.41470432281494],["병원",-13.41470432281494],["application",-13.414729118347168],["▁приведен",-13.414737701416016],["▁salonu",-13.414742469787598],["скоре",-13.414756774902344],["▁guztia",-13.414787292480469],["АЈ",-13.414801597595217],["▁előny",-13.414806365966797],["ፋት",-13.414810180664062],["▁истински",-13.41481113433838],["คนไทย",-13.414831161499023],["▁ופ",-13.414843559265137],["▁avtomat",-13.414861679077148],["興趣",-13.414873123168944],["▁ኪ",-13.414875984191896],["裕",-13.414883613586426],["▁ببر",-13.41489028930664],["▁proposer",-13.414894104003906],["위원장",-13.414898872375488],["▁kodi",-13.414901733398438],["勃",-13.4149169921875],["▁ኃላፊ",-13.414923667907717],["的管理",-13.414928436279297],["▁favoritt",-13.414934158325195],["ാളി",-13.414935111999512],["▁Yorum",-13.414937019348145],["▁rozš",-13.414939880371094],["〔",-13.414939880371094],["供应商",-13.41494083404541],["▁LR",-13.414949417114258],["ダウンロード",-13.414959907531738],["▁signifi",-13.414968490600586],["အောက်",-13.41497039794922],["▁Hyderabad",-13.414971351623535],["▁Sánchez",-13.414971351623535],["▁diharapkan",-13.414971351623535],["▁kiváló",-13.414971351623535],["▁müşahidə",-13.414971351623535],["▁връща",-13.414971351623535],["▁դժվար",-13.414971351623535],["맹",-13.414971351623535],["▁взаимо",-13.414972305297852],["▁శరీర",-13.414972305297852],["ଯୁକ୍ତ",-13.414973258972168],["▁extrém",-13.414973258972168],["▁terceira",-13.414973258972168],["▁cocktail",-13.4149751663208],["▁тұрақты",-13.414976119995115],["▁поможет",-13.414977073669434],["▁сутрин",-13.414978981018066],["▁одновременно",-13.414979934692385],["▁polyester",-13.414981842041016],["▁의견",-13.414984703063965],["▁probar",-13.414986610412598],["▁football",-13.414987564086914],["▁navegador",-13.41498851776123],["▁ହୋଇଥାଏ",-13.41498851776123],["▁ਚੱਲ",-13.414989471435549],["▁此",-13.414992332458496],["▁udalerri",-13.414993286132812],["ซ่า",-13.414995193481444],["▁potvrdil",-13.414995193481444],["ሁሉም",-13.414996147155762],["▁제대로",-13.41500473022461],["ቷል።",-13.415005683898926],["▁աջակց",-13.415019989013672],["▁ପିଲା",-13.41502285003662],["▁melayu",-13.41502857208252],["▁Dêr",-13.415044784545898],["曝光",-13.41506004333496],["▁жители",-13.415064811706545],["▁janeiro",-13.415068626403809],["▁interface",-13.415069580078123],["▁עולה",-13.415069580078123],["▁Ibnu",-13.415072441101074],["িট",-13.41508674621582],["▁εξής",-13.415088653564451],["त्वं",-13.415096282958984],["▁zostać",-13.415111541748049],["әрі",-13.415128707885742],["▁passiv",-13.41512966156006],["▁undersøgelse",-13.415133476257324],["有効",-13.415151596069336],["లర్",-13.415154457092283],["▁mòr",-13.41517448425293],["ฝ",-13.415183067321776],["▁вовсе",-13.415213584899902],["不必",-13.41523265838623],["▁Gwen",-13.415254592895508],["▁osobne",-13.415265083312988],["▁Turist",-13.415294647216797],["▁kumi",-13.415306091308594],["▁barabara",-13.41531467437744],["▁అల",-13.41531753540039],["▁exclusiv",-13.41533374786377],["▁లేక",-13.415335655212402],["▁Información",-13.415342330932615],["склад",-13.415352821350098],["מרת",-13.415352821350098],["▁owner",-13.4154052734375],["▁friendly",-13.415419578552246],["▁trgu",-13.415443420410156],["tzaileak",-13.41545295715332],["▁رجال",-13.415456771850586],["▁Does",-13.41545867919922],["▁Віль",-13.41546630859375],["▁पार्क",-13.415478706359863],["ījis",-13.415501594543455],["▁самостійно",-13.415509223937988],["skimi",-13.415522575378418],["▁razstav",-13.415522575378418],["▁견",-13.41552448272705],["namn",-13.415547370910645],["▁Meillä",-13.415597915649414],["285",-13.415641784667969],["▁Medan",-13.415651321411133],["▁hüququ",-13.41568088531494],["了一個",-13.415690422058104],["▁lijst",-13.415694236755373],["▁convention",-13.4157075881958],["▁กําลัง",-13.41575050354004],["36)",-13.415769577026367],["gane",-13.415771484375],["▁आउन",-13.415777206420898],["▁පරීක්ෂණ",-13.415777206420898],["üß",-13.415783882141112],["▁éagsúla",-13.41579246520996],["▁Baar",-13.415793418884276],["chová",-13.415796279907228],["jili",-13.415809631347656],["▁ພາກ",-13.415817260742188],["▁أسر",-13.415875434875488],["jibu",-13.415879249572754],["izmit",-13.415892601013184],["▁उम",-13.415899276733398],["సూ",-13.415912628173828],["行李",-13.415928840637209],["聽到",-13.415939331054688],["▁تكن",-13.415945053100586],["▁Kek",-13.415947914123535],["▁stanice",-13.4159517288208],["niški",-13.415961265563965],["▁Mole",-13.415970802307127],["▁Vilka",-13.415973663330078],["มุ",-13.415977478027344],["รับสมัคร",-13.415990829467772],["▁Valo",-13.416004180908203],["למה",-13.416028022766112],["▁keto",-13.416050910949709],["▁prijzen",-13.416054725646973],["▁novembri",-13.416091918945312],["▁hankki",-13.416098594665527],["▁ഡാ",-13.416099548339844],["78)",-13.41615390777588],["▁처",-13.416154861450195],["姐姐",-13.416156768798828],["▁відправ",-13.416166305541992],["▁sär",-13.416172981262209],["▁lavorare",-13.416190147399902],["пълни",-13.416192054748535],["▁நடந்த",-13.416197776794434],["▁знати",-13.416199684143066],["▁አድ",-13.41622829437256],["▁certificado",-13.416251182556152],["platsen",-13.416266441345217],["ibles",-13.416268348693848],["▁tush",-13.416297912597656],["▁wnętrz",-13.416299819946287],["ರಂ",-13.416305541992188],["▁Šie",-13.416325569152832],["▁fornøyd",-13.416335105895996],["krig",-13.41635513305664],["გრამ",-13.416363716125488],["▁luonnon",-13.416369438171388],["▁Mamma",-13.41640853881836],["पाट",-13.41643238067627],["▁PhD",-13.41644287109375],["强大的",-13.416444778442385],["sborg",-13.416446685791016],["FB",-13.416461944580078],["çısı",-13.416461944580078],["▁skön",-13.416471481323242],["IFI",-13.416473388671877],["▁sio",-13.416495323181152],["soek",-13.41650390625],["чыя",-13.416513442993164],["▁jomā",-13.416519165039062],["วาด",-13.416525840759276],["zê",-13.416582107543944],["▁passare",-13.416586875915527],["▁zapach",-13.416614532470703],["▁маё",-13.416614532470703],["Tôi",-13.416630744934082],["苗",-13.416631698608398],["▁ສາມາດ",-13.416650772094728],["▁தடை",-13.416651725769045],["▁महत्त्व",-13.416666984558104],["pler",-13.416668891906738],["liik",-13.416669845581056],["▁ievēro",-13.416677474975586],["▁Maxamuud",-13.416680335998535],["▁δημόσιο",-13.416680335998535],["▁дигитал",-13.416680335998535],["▁тәртібі",-13.416680335998535],["▁अभिनेत्री",-13.416680335998535],["วงศ์",-13.416681289672852],["ស្អាត",-13.416681289672852],["▁unterwegs",-13.416681289672852],["▁তালিকা",-13.416681289672852],["빙",-13.416681289672852],["▁tecnico",-13.416682243347168],["スタート",-13.416682243347168],["▁गर्दछ",-13.416685104370115],["ໄຂ",-13.4166898727417],["▁נתניה",-13.4166898727417],["▁problemów",-13.416693687438965],["▁ਨਗਰ",-13.416698455810549],["▁Pí",-13.416702270507812],["▁providing",-13.416709899902344],["िन्छ",-13.41671085357666],["▁lifestyle",-13.41671371459961],["▁행동",-13.41671371459961],["▁düşük",-13.416720390319824],["▁يبدو",-13.416730880737305],["▁Adolf",-13.41673755645752],["ใช้เวลา",-13.416740417480469],["▁206",-13.416746139526367],["ināšana",-13.416755676269531],["sökning",-13.416768074035645],["▁escolas",-13.416768074035645],["▁वहां",-13.416768074035645],["ERC",-13.416769981384276],["▁vacibdir",-13.416769981384276],["၀န္",-13.416770935058594],["SPA",-13.41677188873291],["▁chaleur",-13.41677188873291],["▁volo",-13.416775703430176],["▁cursa",-13.41677951812744],["▁તૈયાર",-13.416788101196287],["生涯",-13.416790962219238],["▁apkārt",-13.41679859161377],["чего",-13.416803359985352],["▁vārds",-13.4168119430542],["▁해서",-13.416830062866213],["▁øje",-13.41683578491211],["▁sebaiknya",-13.416848182678224],["▁അന്ന",-13.416851043701172],["▁thema",-13.416852951049805],["▁interroga",-13.41685676574707],["▁numbers",-13.416858673095703],["▁Brief",-13.41685962677002],["▁rămas",-13.416862487792969],["▁магазина",-13.416876792907717],["ијум",-13.416879653930664],["▁písm",-13.416887283325195],["▁Intr",-13.416893005371094],["ሶች",-13.416898727416992],["▁Καν",-13.416903495788574],["▁laaste",-13.416958808898926],["▁generelt",-13.416974067687988],["現実",-13.417028427124023],["▁noon",-13.417040824890137],["▁Nazionale",-13.417041778564451],["سية",-13.41704273223877],["▁mradi",-13.417052268981934],["باح",-13.417071342468262],["▁مجمع",-13.417083740234377],["wienie",-13.417089462280272],["ከል",-13.417122840881348],["ດາ",-13.417128562927246],["Claude",-13.41714572906494],["▁maioría",-13.417147636413574],["▁තුන",-13.417163848876951],["会話",-13.417174339294434],["▁수행",-13.41718864440918],["▁दोस्त",-13.417227745056152],["ପାଳ",-13.417228698730469],["っちゃ",-13.41726016998291],["▁žais",-13.417292594909668],["▁חזק",-13.417301177978516],["chron",-13.417303085327148],["▁ქა",-13.417314529418944],["▁tegas",-13.417325019836426],["/04",-13.417360305786133],["ંટ",-13.417369842529297],["▁anderes",-13.417380332946776],["▁ਲਿਖ",-13.417384147644045],["ుతు",-13.417387962341309],["▁hakem",-13.417396545410156],["เนีย",-13.417403221130373],["ີດ",-13.417413711547852],["kW",-13.417428970336914],["తూ",-13.417434692382812],["мър",-13.417439460754396],["▁слышал",-13.417444229125977],["和我",-13.41744613647461],["ාට",-13.417447090148926],["iniame",-13.417454719543455],["▁යව",-13.417462348937988],["▁Ernest",-13.417482376098633],["▁VIS",-13.417490005493164],["▁திட்ட",-13.417506217956545],["όρων",-13.417508125305176],["ሰን",-13.417513847351074],["▁справу",-13.41751766204834],["pór",-13.417536735534668],["▁планета",-13.417539596557615],["▁založen",-13.4175443649292],["的专业",-13.417560577392578],["▁воспита",-13.417564392089844],["▁സഭ",-13.41758155822754],["▁želim",-13.41761302947998],["ريس",-13.417625427246094],["լան",-13.417640686035156],["▁studii",-13.417669296264648],["▁Buda",-13.417672157287598],["جائزة",-13.417688369750977],["▁Eden",-13.417688369750977],["基本的に",-13.417694091796877],["▁ಮ್ಯಾ",-13.417736053466797],["▁کمال",-13.417743682861328],["▁intrig",-13.417752265930176],["epoca",-13.417757034301758],["gach",-13.417778968811035],["▁Tên",-13.417784690856934],["▁Alami",-13.4177885055542],["▁salsa",-13.4177885055542],["▁nettsiden",-13.417816162109377],["0.1",-13.41782283782959],["رقة",-13.417824745178224],["▁festas",-13.417831420898438],["suar",-13.417840957641602],["идент",-13.417848587036133],["▁الخير",-13.417854309082031],["▁overfor",-13.417856216430664],["▁Факт",-13.417860984802246],["פנה",-13.417903900146484],["▁գործում",-13.417905807495115],["▁унш",-13.41790771484375],["▁चालक",-13.41791820526123],["▁iniciativas",-13.417924880981444],["▁أصبح",-13.417938232421877],["▁dekabr",-13.417957305908203],["▁жылға",-13.417987823486328],["년간",-13.417990684509276],["▁Черно",-13.417998313903809],["պետ",-13.418004035949709],["▁arvuti",-13.418013572692873],["▁putere",-13.418023109436035],["▁Добре",-13.418033599853516],["ग्न",-13.41804313659668],["▁Okul",-13.418049812316896],["2-3",-13.41805362701416],["φύγ",-13.41807460784912],["▁saran",-13.41808795928955],["søgning",-13.41812515258789],["▁wann",-13.41812515258789],["▁corti",-13.418174743652344],["▁rain",-13.418183326721191],["gitur",-13.418209075927734],["დებიან",-13.418270111083984],["BH",-13.41827392578125],["ાને",-13.418275833129885],["▁lieber",-13.418282508850098],["۲۷",-13.41829776763916],["nyol",-13.418322563171388],["▁തീര",-13.418331146240234],["szeg",-13.418338775634766],["ക്കെ",-13.41834831237793],["▁szerep",-13.418350219726562],["યલ",-13.418356895446776],["▁Trek",-13.418363571166992],["审查",-13.418365478515623],["诉讼",-13.418371200561523],["مساعدة",-13.418375968933104],["サロン",-13.418377876281738],["▁диви",-13.41838264465332],["▁cynllun",-13.418387413024902],["▁බදු",-13.41838836669922],["▁gdyby",-13.418389320373535],["▁коом",-13.418390274047852],["▁նկար",-13.418390274047852],["ခေါ်",-13.418392181396484],["▁ограничи",-13.418392181396484],["Ơ",-13.4183931350708],["▁Afganistan",-13.4183931350708],["▁Maßnahmen",-13.4183931350708],["▁Missatges",-13.4183931350708],["▁альтернатив",-13.4183931350708],["▁галузі",-13.4183931350708],["▁խորհուրդ",-13.4183931350708],["▁घंटे",-13.4183931350708],["tehnoloogia",-13.418394088745115],["▁άνθρωπος",-13.418396949768066],["▁மூன்று",-13.4183988571167],["▁orixe",-13.418399810791016],["▁težko",-13.418400764465332],["▁نمایندگان",-13.418405532836914],["▁мазнини",-13.41840934753418],["▁rezervuara",-13.418415069580078],["▁18:30",-13.418418884277344],["▁ነኝ",-13.418428421020508],["▁vásárol",-13.418432235717772],["ුනු",-13.41843318939209],["esnė",-13.41844081878662],["▁Anmeldung",-13.41844367980957],["ڳي",-13.418445587158203],["▁хүүхдийн",-13.418449401855469],["ေသာက္",-13.41847324371338],["▁ناول",-13.418477058410645],["▁gustaría",-13.41848373413086],["▁verra",-13.418486595153809],["ສະແດງ",-13.41849136352539],["ພິທີ",-13.418495178222656],["▁обладнання",-13.418496131896973],["▁yaşanan",-13.41850471496582],["사장",-13.418522834777832],["▁créé",-13.418526649475098],["နွ",-13.418538093566896],["▁ఒకటి",-13.41854476928711],["▁ਸਮਾਜ",-13.41855525970459],["1.0",-13.418563842773438],["▁Verander",-13.418585777282717],["▁odsto",-13.418588638305664],["▁फ्र",-13.418590545654297],["יכון",-13.418596267700195],["▁imenovan",-13.418596267700195],["▁przesz",-13.418604850769045],["▁Genç",-13.41861057281494],["▁Sparta",-13.41861057281494],["▁жазыл",-13.418618202209473],["ිලි",-13.418619155883787],["AQ",-13.418623924255373],["ഭവ",-13.418623924255373],["▁വല്ല",-13.41863250732422],["▁النص",-13.418652534484863],["▁пружа",-13.418654441833496],["▁malpli",-13.41868495941162],["▁ostane",-13.418703079223633],["▁ಜಯ",-13.418710708618164],["▁Germany",-13.418713569641112],["▁žali",-13.41873836517334],["▁eeuw",-13.418750762939451],["padá",-13.418758392333984],["▁објект",-13.4187650680542],["国内外",-13.41878890991211],["sebenzi",-13.41879177093506],["րին",-13.41879177093506],["▁quanti",-13.41880702972412],["者は",-13.418816566467283],["▁ہوا۔",-13.418837547302246],["▁arkadaşları",-13.418848991394045],["▁např",-13.418861389160156],["ક્ષણ",-13.418867111206056],["ຮັກສາ",-13.418879508972168],["▁রো",-13.418890953063965],["▁Zach",-13.418893814086914],["▁مقررات",-13.418912887573242],["ovec",-13.418932914733888],["▁Racing",-13.418941497802734],["▁කථා",-13.4189453125],["▁Usono",-13.418981552124023],["▁απέ",-13.418989181518556],["▁넘어",-13.419027328491213],["▁gjet",-13.419044494628906],["▁چرخ",-13.41905403137207],["штен",-13.41908836364746],["▁razvoju",-13.419127464294434],["▁décadas",-13.419148445129396],["解除",-13.419151306152344],["্টার",-13.419153213500977],["▁грижа",-13.419172286987305],["▁files",-13.419177055358888],["▁있음",-13.41918659210205],["▁linja",-13.419198989868164],["legra",-13.41920566558838],["โก้",-13.41921329498291],["водить",-13.41921615600586],["ាប",-13.419246673583984],["▁рођен",-13.419255256652832],["▁იმას",-13.41928005218506],["▁そう",-13.41928768157959],["▁dadurch",-13.41929817199707],["▁խորհրդ",-13.419310569763184],["▁одне",-13.41932201385498],["არქი",-13.419326782226562],["▁wp",-13.419336318969728],["▁नाग",-13.419336318969728],["ன்றி",-13.419363021850586],["गौ",-13.419370651245115],["▁ବୃଦ୍ଧି",-13.419381141662598],["पूर्वक",-13.41938304901123],["លើក",-13.419397354125977],["▁pime",-13.419416427612305],["▁يىلى",-13.419426918029783],["свят",-13.419461250305176],["ාය",-13.419461250305176],["▁izjavio",-13.419466972351074],["▁آلات",-13.419570922851562],["▁qon",-13.41957664489746],["پە",-13.41957950592041],["తలు",-13.419596672058104],["▁ร่วม",-13.419596672058104],["と思っている",-13.419598579406738],["▁συνεργ",-13.419620513916016],["海岸",-13.419632911682127],["qı",-13.419648170471191],["▁Bestellung",-13.419654846191406],["▁חייב",-13.419668197631836],["▁вашу",-13.419675827026367],["▁všeč",-13.41969394683838],["▁Luar",-13.419708251953123],["ICS",-13.419718742370604],["▁ការងារ",-13.419761657714844],["▁турски",-13.41976547241211],["▁Hlav",-13.419788360595703],["जान",-13.419797897338867],["▁Operation",-13.419817924499512],["ഓ",-13.419822692871094],["▁Θεσσαλονίκη",-13.419825553894045],["▁kokoa",-13.419836044311523],["▁petang",-13.41986083984375],["▁uvidí",-13.419865608215332],["飛行",-13.419877052307127],["▁השו",-13.419878959655762],["▁oldi",-13.419892311096191],["競爭",-13.419899940490724],["maksa",-13.419912338256836],["шісі",-13.419920921325684],["▁døren",-13.419925689697266],["HAY",-13.41993808746338],["ที่ดีที่สุดใน",-13.419949531555176],["▁habent",-13.419981002807615],["▁пром",-13.41999340057373],["ometer",-13.42002010345459],["慰",-13.42003345489502],["▁inima",-13.420036315917969],["▁tenang",-13.420038223266602],["ിയെ",-13.420044898986816],["ർക്ക",-13.42004680633545],["▁myynti",-13.420065879821776],["▁koper",-13.42007541656494],["▁Turbo",-13.420083045959473],["▁shqip",-13.420089721679688],["อารมณ์",-13.42010498046875],["▁көл",-13.420105934143066],["ፈት",-13.420106887817385],["ວຽງຈັນ",-13.4201078414917],["▁gyakran",-13.4201078414917],["▁sănătate",-13.4201078414917],["▁Порошенко",-13.4201078414917],["▁изцяло",-13.4201078414917],["▁Սուրբ",-13.4201078414917],["▁اچھی",-13.4201078414917],["▁पहुँच",-13.4201078414917],["cair",-13.420108795166016],["▁bữa",-13.420108795166016],["▁आरोग्य",-13.420108795166016],["▁മനസ്സിലാക്ക",-13.420108795166016],["አገሪቱ",-13.420109748840332],["ပြည်နယ်",-13.420111656188965],["▁Enterprise",-13.420113563537598],["▁постепенно",-13.420116424560549],["花蓮",-13.420116424560549],["oeuvre",-13.420119285583496],["▁старонку",-13.420119285583496],["▁drža",-13.420122146606444],["▁ئىسلام",-13.420127868652344],["▁irányít",-13.420129776000977],["▁Policía",-13.420132637023926],["▁Middle",-13.420136451721191],["▁Hrvatsku",-13.420148849487305],["▁sicuro",-13.420151710510254],["▁xidməti",-13.420151710510254],["▁مشتمل",-13.42015552520752],["▁अपेक्षा",-13.420170783996582],["ជ័យ",-13.420174598693848],["▁Nincs",-13.420174598693848],["ರಿಯಾ",-13.420178413391112],["▁Spletna",-13.420184135437012],["ಚರ",-13.420190811157228],["▁хүчний",-13.420190811157228],["▁అండ్",-13.420190811157228],["▁jendela",-13.420193672180176],["nawi",-13.42020320892334],["▁rừng",-13.420207023620604],["▁555",-13.420217514038086],["▁السلطة",-13.420236587524414],["▁ragione",-13.420239448547363],["▁കൊടുത്ത",-13.420244216918944],["▁apró",-13.420259475708008],["▁ठाउँमा",-13.420263290405272],["Film",-13.420266151428224],["kovej",-13.42027473449707],["▁søt",-13.420280456542969],["▁ਡਿ",-13.420297622680664],["▁saskaņā",-13.42030906677246],["▁يقدم",-13.420310974121094],["▁വരും",-13.420310974121094],["▁отец",-13.420321464538574],["ลุก",-13.420323371887209],["▁нисам",-13.42032527923584],["tycznie",-13.420331954956056],["▁paskir",-13.420334815979004],["▁соёл",-13.420342445373535],["▁minni",-13.420347213745115],["▁eşya",-13.42035675048828],["▁tolong",-13.42037296295166],["777",-13.420379638671877],["▁vết",-13.420403480529783],["oterapia",-13.420406341552734],["▁وارا",-13.420437812805176],["NOM",-13.420465469360352],["ცხო",-13.420493125915527],["▁ಪೋ",-13.420520782470703],["도가",-13.420551300048828],["▁qoraal",-13.420561790466309],["▁рэчы",-13.42056655883789],["▁нали",-13.420586585998535],["▁denbora",-13.420588493347168],["▁കേരളം",-13.420595169067385],["▁वाद",-13.420597076416016],["▁сипатта",-13.42060089111328],["ነጋ",-13.420612335205078],["▁blow",-13.420615196228027],["▁جاده",-13.420621871948242],["▁kterým",-13.42063808441162],["据了解",-13.42068576812744],["▁Mislim",-13.420693397521973],["▁гадаад",-13.420700073242188],["▁результаті",-13.420721054077148],["▁fx",-13.42074489593506],["▁consecuencia",-13.420745849609377],["▁nikdo",-13.420746803283691],["▁پچ",-13.420753479003906],["▁ට්",-13.420753479003906],["єкта",-13.420759201049805],["▁שד",-13.42076301574707],["шите",-13.420782089233398],["▁utro",-13.42078685760498],["▁ebenso",-13.42080020904541],["▁Republica",-13.420802116394045],["ಕಾಂ",-13.420817375183104],["▁delegat",-13.420819282531738],["▁Sept",-13.420845985412598],["▁বার্তা",-13.420846939086914],["▁portas",-13.420859336853027],["▁हेर्न",-13.42086124420166],["▁Koreya",-13.420890808105469],["高达",-13.420899391174316],["机器人",-13.42090129852295],["▁rese",-13.420906066894531],["សម្",-13.420923233032228],["ம்பி",-13.42094612121582],["LUS",-13.420966148376465],["▁хөл",-13.420966148376465],["પૂ",-13.420984268188477],["▁supone",-13.420987129211426],["▁stip",-13.420989990234377],["ҮР",-13.421011924743652],["ଳୀ",-13.42104148864746],["▁leveren",-13.421045303344728],["▁الدر",-13.421046257019045],["▁просвет",-13.421053886413574],["listan",-13.42105484008789],["▁dominio",-13.421080589294434],["▁samning",-13.421096801757812],["▁noorte",-13.421160697937012],["▁ауто",-13.421171188354492],["▁የዘ",-13.421183586120604],["▁надзора",-13.42120361328125],["▁Haq",-13.4212646484375],["မြင်",-13.421271324157717],["▁viagra",-13.42127513885498],["▁stanu",-13.421284675598145],["QU",-13.421305656433104],["▁ಬದುಕ",-13.42131233215332],["▁цель",-13.421313285827637],["▁قوة",-13.421317100524902],["adreça",-13.42133617401123],["▁Слова",-13.421345710754396],["▁politiku",-13.421359062194824],["▁tehnologij",-13.42136001586914],["▁стигна",-13.42140007019043],["▁codice",-13.421403884887695],["▁מגוון",-13.421417236328123],["▁сап",-13.421420097351074],["▁scio",-13.42143440246582],["▁Russian",-13.42146110534668],["▁пяти",-13.421481132507324],["▁érzés",-13.421516418457031],["▁karu",-13.421521186828612],["ваюцца",-13.42152214050293],["▁KAP",-13.42153263092041],["akit",-13.421534538269045],["หอ",-13.421544075012209],["patan",-13.421573638916016],["▁තියන",-13.421611785888672],["카드",-13.421626091003418],["renta",-13.421632766723633],["▁Hände",-13.42164134979248],["tades",-13.421655654907228],["▁মন",-13.42165756225586],["komunika",-13.421666145324709],["▁گیر",-13.421667098999023],["غی",-13.421670913696287],["плю",-13.42167854309082],["▁יח",-13.421690940856934],["▁njëri",-13.42169189453125],["▁نفسها",-13.421692848205566],["ሓ",-13.42169952392578],["▁vispār",-13.421710014343262],["▁ਪਿ",-13.421711921691896],["▁käe",-13.421712875366213],["жир",-13.42172145843506],["ျပည့္",-13.421737670898438],["IPO",-13.421740531921388],["▁merce",-13.421767234802246],["有機",-13.421768188476562],["棋",-13.421770095825195],["▁Cristian",-13.42177677154541],["遥",-13.42177963256836],["▁Kaup",-13.421796798706056],["▁Lise",-13.421796798706056],["штај",-13.42180061340332],["〕",-13.42180061340332],["ਾਤ",-13.421801567077637],["廚房",-13.42181396484375],["คล้าย",-13.421825408935549],["ခွဲ",-13.421825408935549],["▁Permainan",-13.421826362609863],["▁excited",-13.421826362609863],["▁gyümölcs",-13.421826362609863],["▁seharusnya",-13.421826362609863],["▁λόγια",-13.421826362609863],["▁внутрішньо",-13.421826362609863],["▁заболявания",-13.421826362609863],["▁ಗಾಂಧಿ",-13.421826362609863],["▁ಬೆಂಬಲ",-13.421826362609863],["▁верасня",-13.421828269958496],["▁က်ေနာ္",-13.421829223632812],["▁شکایت",-13.421832084655762],["▁ആഘോഷ",-13.421833992004396],["หนอง",-13.421836853027344],["▁eficaz",-13.42184066772461],["مراكز",-13.421845436096191],["▁rezolva",-13.421846389770508],["▁கொஞ்சம்",-13.421853065490724],["▁айрым",-13.42185401916504],["▁įvairių",-13.421855926513672],["▁аталган",-13.421862602233888],["▁चुके",-13.421865463256836],["nggah",-13.421866416931152],["▁kurze",-13.421870231628418],["▁سفیر",-13.42188549041748],["гара",-13.421889305114746],["▁ସମ୍ପର୍କରେ",-13.421889305114746],["▁zavatra",-13.421902656555176],["piro",-13.421917915344238],["▁сотрудников",-13.421930313110352],["produc",-13.421931266784668],["▁നിറഞ്ഞ",-13.421935081481934],["▁کاغذ",-13.421951293945312],["▁pleine",-13.421953201293944],["熱情",-13.421956062316896],["▁Yunan",-13.42196273803711],["▁niveles",-13.421985626220703],["เผ",-13.42203140258789],["▁이루어",-13.422048568725586],["▁ਪੇਸ਼",-13.422049522399902],["▁जरूर",-13.422069549560549],["▁مدى",-13.422099113464355],["علي",-13.422110557556152],["bber",-13.422146797180176],["▁cúi",-13.422148704528809],["具備",-13.42215061187744],["▁digitaal",-13.422154426574709],["ดารา",-13.422188758850098],["▁နား",-13.422192573547363],["ðinn",-13.422196388244627],["▁wobec",-13.422231674194336],["▁آئ",-13.42224407196045],["▁بور",-13.422252655029297],["▁synty",-13.422255516052246],["▁veli",-13.422276496887209],["▁nera",-13.422281265258787],["이기",-13.422282218933104],["▁Suh",-13.422285079956056],["▁odnosu",-13.422286987304688],["▁Naast",-13.422306060791016],["POR",-13.422307968139648],["▁gevra",-13.422310829162598],["ခါ",-13.422327041625977],["見た",-13.422335624694824],["سيطر",-13.422337532043455],["▁ընտանիք",-13.422346115112305],["кува",-13.422358512878418],["▁incat",-13.422385215759276],["▁وهذه",-13.422396659851074],["▁Üks",-13.422418594360352],["گرام",-13.422436714172363],["важно",-13.422441482543944],["▁түрлері",-13.422456741333008],["▁Focus",-13.422467231750488],["▁българските",-13.422480583190918],["▁regulament",-13.42249584197998],["obs",-13.42250633239746],["ටුව",-13.422526359558104],["▁frí",-13.422531127929688],["▁SEB",-13.422545433044434],["vík",-13.422553062438965],["დარ",-13.422558784484863],["имов",-13.422561645507812],["пка",-13.422579765319824],["නව",-13.422593116760254],["іки",-13.422603607177734],["sicherung",-13.422619819641112],["▁čeká",-13.422657012939451],["kortet",-13.422661781311035],["委会",-13.422673225402832],["▁בקר",-13.422686576843262],["wg",-13.422690391540527],["▁Fernseh",-13.422696113586426],["▁isten",-13.42269802093506],["tých",-13.422701835632324],["▁Wilayah",-13.422701835632324],["▁одбор",-13.42270278930664],["تعرف",-13.422706604003906],["▁romano",-13.422711372375488],["▁mechanizm",-13.422715187072754],["▁moden",-13.422723770141602],["▁Friends",-13.42273235321045],["▁lekin",-13.422735214233398],["▁қатысу",-13.422743797302246],["▁процедур",-13.422747611999512],["声音",-13.422768592834473],["Aus",-13.422786712646484],["▁pesos",-13.4227876663208],["▁Mater",-13.42280387878418],["6,000",-13.422816276550291],["▁gratuite",-13.422823905944824],["بها",-13.422842025756836],["的学习",-13.422859191894531],["kontoret",-13.422889709472656],["jevanje",-13.422893524169922],["▁Jenny",-13.422900199890137],["ელთა",-13.422910690307615],["這款",-13.422938346862791],["ခ်ိဳ",-13.422943115234377],["ゼ",-13.422947883605955],["▁Vlad",-13.422951698303224],["随时",-13.422956466674805],["тику",-13.422981262207031],["▁gleichen",-13.422982215881348],["ที่สําคัญ",-13.422990798950195],["▁Katar",-13.423003196716309],["学员",-13.42302131652832],["▁Innen",-13.423035621643066],["oleh",-13.423041343688965],["▁renom",-13.423047065734863],["提出的",-13.423063278198242],["kák",-13.423065185546877],["▁contin",-13.423066139221191],["▁astept",-13.423067092895508],["GW",-13.42307186126709],["▁zvezd",-13.423073768615724],["▁høst",-13.423084259033203],["лэлт",-13.423088073730469],["▁терп",-13.423105239868164],["▁demê",-13.423108100891112],["ようになった",-13.423125267028809],["▁dolce",-13.423128128051758],["แน",-13.423139572143556],["▁жалоб",-13.423144340515137],["▁دکھا",-13.423148155212402],["▁විතරක්",-13.423154830932615],["ערך",-13.423155784606934],["▁lietuvi",-13.42318058013916],["時刻",-13.423210144042969],["▁πρωί",-13.423213005065918],["cső",-13.42322063446045],["▁toimintaa",-13.423250198364258],["LAI",-13.42327117919922],["skild",-13.42327117919922],["retten",-13.423282623291016],["▁கலந்து",-13.42329216003418],["▁Liz",-13.423295974731444],["▁gegaan",-13.42330265045166],["▁funkció",-13.423321723937988],["▁mecanism",-13.423321723937988],["▁Проте",-13.423325538635254],["tiger",-13.42333698272705],["▁helburu",-13.423347473144531],["лөг",-13.42335033416748],["სავ",-13.42336368560791],["▁중요",-13.423375129699709],["▁Коз",-13.423377990722656],["▁ያሳ",-13.423395156860352],["金额",-13.423402786254885],["▁තෙ",-13.423431396484377],["▁တွေ",-13.423432350158691],["▁1°",-13.423434257507324],["Sol",-13.423437118530272],["óban",-13.423444747924805],["▁ciast",-13.423454284667969],["破坏",-13.423460960388184],["▁zë",-13.42346477508545],["▁zdraví",-13.42347526550293],["涵",-13.423480033874512],["挖",-13.423489570617676],["▁millest",-13.42349624633789],["扇",-13.423511505126951],["ងាយ",-13.423521041870115],["皮肤",-13.423521041870115],["▁雖然",-13.423526763916016],["殼",-13.42353057861328],["döntő",-13.423534393310549],["ເພີ່ມ",-13.423542022705078],["通讯",-13.423543930053713],["กางเกง",-13.423545837402344],["팩",-13.42354679107666],["▁Gábor",-13.423547744750977],["▁perioadă",-13.423547744750977],["▁інфармацыі",-13.423547744750977],["▁جګړه",-13.423547744750977],["▁हार्दिक",-13.423547744750977],["▁প্রার্থী",-13.423547744750977],["▁આનંદ",-13.423547744750977],["▁ସୁରକ୍ଷା",-13.423547744750977],["▁sëmundje",-13.423548698425291],["▁adlandır",-13.423551559448242],["▁Miller",-13.423562049865724],["▁കാഴ്ച",-13.423564910888672],["▁בטוח",-13.423565864562988],["kombin",-13.423572540283203],["▁Suzuki",-13.423572540283203],["▁atlikti",-13.423579216003418],["▁применения",-13.42358112335205],["▁эз",-13.423590660095217],["शंकर",-13.423592567443848],["▁Sohn",-13.423592567443848],["▁Inde",-13.423599243164062],["เจอร์",-13.423603057861328],["▁dedik",-13.42360496520996],["▁hạng",-13.423624992370604],["▁pradėjo",-13.423629760742188],["▁сучасна",-13.423641204833984],["一條",-13.423645973205566],["世界各地",-13.4236478805542],["▁percuma",-13.423651695251465],["▁Сав",-13.423654556274414],["▁ekspon",-13.423656463623049],["▁Laga",-13.423657417297363],["▁proceder",-13.42365837097168],["▁zmniejsz",-13.423672676086426],["რელი",-13.42367458343506],["▁afrika",-13.423677444458008],["▁fondos",-13.423677444458008],["▁ఉత్తర",-13.423683166503906],["cijų",-13.423688888549805],["ilmiştir",-13.423688888549805],["▁plezier",-13.423693656921388],["▁бірақ",-13.4237060546875],["లోనూ",-13.423747062683104],["liny",-13.423755645751951],["роо",-13.42380428314209],["▁snabb",-13.423835754394531],["▁tragi",-13.423848152160645],["aysa",-13.423852920532228],["Associazione",-13.423864364624023],["hild",-13.423869132995604],["▁hesabat",-13.423877716064451],["うまく",-13.4238862991333],["통신",-13.423888206481934],["▁गराउन",-13.423890113830566],["▁pašu",-13.42391300201416],["▁veld",-13.423922538757324],["nomo",-13.423948287963867],["▁เป็นต้น",-13.423952102661133],["▁robić",-13.423961639404297],["▁приви",-13.423982620239258],["▁მია",-13.42398738861084],["▁Confe",-13.423992156982422],["▁Srbi",-13.423993110656738],["▁kape",-13.423993110656738],["▁לחו",-13.42399787902832],["рыз",-13.424007415771484],["ђује",-13.42401885986328],["▁begynder",-13.424020767211914],["код",-13.424046516418455],["▁Ponti",-13.424059867858888],["▁Perché",-13.42407512664795],["▁sounds",-13.424079895019531],["▁відділу",-13.424079895019531],["džius",-13.424081802368164],["▁жибер",-13.424098014831545],["▁dopad",-13.424099922180176],["▁püsi",-13.424102783203123],["φή",-13.424121856689451],["我很",-13.424123764038086],["▁INTE",-13.424127578735352],["▁биолог",-13.424128532409668],["▁bucura",-13.424159049987791],["▁відділ",-13.424160957336426],["▁Letras",-13.42418098449707],["מדינות",-13.4241943359375],["▁4.4",-13.4241943359375],["▁začel",-13.424200057983398],["▁dahi",-13.424232482910156],["▁yksilö",-13.424243927001951],["маль",-13.424260139465332],["▁faydalı",-13.424290657043455],["いろいろ",-13.424298286437988],["頑張って",-13.424306869506836],["▁šventė",-13.424307823181152],["▁Реч",-13.42431354522705],["对外",-13.424342155456545],["kosa",-13.42434310913086],["▁അങ്ങ",-13.42434310913086],["▁వచ్చా",-13.42435073852539],["▁Moro",-13.424354553222656],["्दी",-13.4243745803833],["▁بورڈ",-13.4243745803833],["▁laboro",-13.424386024475098],["▁negu",-13.424400329589844],["▁մասնակցել",-13.424418449401855],["த்துடன்",-13.424429893493652],["▁Euroopan",-13.4244384765625],["▁postęp",-13.42445182800293],["રત",-13.424504280090332],["▁Актив",-13.424505233764648],["▁۲۴",-13.424531936645508],["رئيس",-13.424551010131836],["жыў",-13.424560546875],["ležit",-13.424590110778809],["svēt",-13.42459774017334],["scéal",-13.424605369567873],["KAI",-13.424612045288086],["상의",-13.424615859985352],["гүл",-13.424616813659668],["が発生",-13.424617767333984],["ranse",-13.424620628356934],["▁idol",-13.424640655517578],["▁Virtu",-13.424656867980955],["شب",-13.424662590026855],["lending",-13.424681663513184],["ûrê",-13.424707412719728],["plads",-13.424721717834473],["▁מקצוע",-13.424736976623535],["▁korekt",-13.424745559692385],["▁Avenue",-13.424768447875977],["▁hutan",-13.424771308898926],["3.6",-13.424776077270508],["παρα",-13.424793243408203],["steder",-13.42482852935791],["seits",-13.424849510192873],["ОГО",-13.424863815307615],["յանն",-13.424870491027832],["▁bëni",-13.424870491027832],["擴",-13.424885749816896],["ШЕ",-13.424907684326172],["▁meilleurs",-13.424917221069336],["▁novega",-13.424922943115234],["вл",-13.42494010925293],["▁točno",-13.42499542236328],["▁privāt",-13.425021171569824],["steach",-13.42503261566162],["分为",-13.425043106079102],["رفت",-13.425045013427734],["成交",-13.425078392028809],["מוס",-13.42508029937744],["▁komissiya",-13.425094604492188],["ຽງ",-13.425108909606934],["▁traballos",-13.425153732299805],["▁වචන",-13.425177574157717],["▁클",-13.425182342529297],["ជួយ",-13.425200462341309],["▁դարձ",-13.425210952758787],["▁Watu",-13.425233840942385],["ЈУ",-13.425244331359863],["曜",-13.42525577545166],["▁göstərən",-13.425257682800291],["怡",-13.425260543823242],["▁муниципал",-13.425268173217772],["▁төлөөлөгч",-13.425271034240724],["▁Факултет",-13.42527198791504],["▁підтримки",-13.42527198791504],["▁януари",-13.42527198791504],["▁lucrări",-13.425272941589355],["ខ្សែ",-13.425273895263672],["▁учествува",-13.425273895263672],["▁вулиці",-13.42527675628662],["▁responsible",-13.42527961730957],["▁Қазіргі",-13.425283432006836],["▁जाएगी",-13.425283432006836],["લ્લ",-13.425286293029783],["信頼",-13.425286293029783],["▁scène",-13.425291061401367],["▁غوښتنه",-13.425291061401367],["▁απαντ",-13.42529296875],["▁hawwe",-13.425294876098633],["执法",-13.42529582977295],["อัพ",-13.42530345916748],["▁ňom",-13.425304412841797],["▁giận",-13.425315856933594],["▁Біздің",-13.425331115722656],["▁לפחות",-13.425339698791504],["▁потужн",-13.425345420837402],["▁desenvolupament",-13.425349235534668],["▁məsələləri",-13.425363540649414],["▁стига",-13.425372123718262],["▁ٹیلی",-13.425372123718262],["▁tähendab",-13.425373077392578],["▁മുമ്പ",-13.42537784576416],["▁terem",-13.42540168762207],["▁អស់",-13.42540168762207],["inky",-13.425419807434082],["▁tuilleadh",-13.425427436828612],["tautu",-13.42543888092041],["▁שוק",-13.425439834594728],["▁ספורט",-13.42544651031494],["▁کلونو",-13.425447463989258],["غرق",-13.425456047058104],["▁Grace",-13.425467491149902],["▁Поради",-13.425503730773926],["նո",-13.42551040649414],["▁שבוע",-13.425515174865724],["▁авна",-13.425524711608888],["▁Spraw",-13.425530433654783],["WY",-13.42553424835205],["▁arvu",-13.425537109375],["+)",-13.425543785095217],["▁Абай",-13.425552368164062],["रोध",-13.425572395324709],["▁artikkelen",-13.425580024719238],["អាស៊ី",-13.42558479309082],["ሉን",-13.42560863494873],["▁принято",-13.425618171691896],["▁காலை",-13.42562198638916],["▁kiradi",-13.425626754760742],["ARTE",-13.425639152526855],["gitara",-13.425640106201172],["▁joga",-13.425652503967283],["affär",-13.425653457641602],["▁וכי",-13.425654411315918],["selskab",-13.425660133361816],["ബന്ധ",-13.42567253112793],["▁ishlash",-13.425673484802246],["▁Marcus",-13.425687789916992],["eremo",-13.42569351196289],["▁शव",-13.425700187683104],["▁Жал",-13.42575740814209],["하라",-13.425758361816406],["balo",-13.425761222839355],["رأ",-13.42576503753662],["▁התו",-13.425771713256836],["▁državi",-13.425782203674316],["▁klāt",-13.425786018371582],["ცეს",-13.42580795288086],["▁تحول",-13.425819396972656],["რილი",-13.42583179473877],["▁pojed",-13.425847053527832],["▁좌",-13.425885200500488],["wsze",-13.425890922546388],["▁κυβέρνησης",-13.42591667175293],["drome",-13.425918579101562],["五十",-13.425957679748535],["сцей",-13.4259672164917],["▁platit",-13.425981521606444],["ებიან",-13.42600154876709],["lainn",-13.42600440979004],["▁APK",-13.426005363464355],["▁pridruži",-13.426021575927734],["registru",-13.426061630249023],["ටී",-13.426063537597656],["rîn",-13.426070213317873],["રામ",-13.426071166992188],["frage",-13.426079750061035],["▁корисник",-13.42609977722168],["▁paesi",-13.426109313964844],["▁lajme",-13.426115036010742],["▁обекти",-13.426140785217283],["नू",-13.426167488098145],["▁actes",-13.42617130279541],["回应",-13.426177978515623],["ดุ",-13.42620086669922],["▁паля",-13.426201820373535],["▁انس",-13.426215171813965],["яй",-13.426217079162598],["▁grija",-13.426217079162598],["▁അറിയിച്ചു",-13.42625331878662],["▁oftast",-13.426265716552734],["عاش",-13.426329612731934],["▁एच",-13.426353454589844],["▁policies",-13.42636013031006],["もらえ",-13.42637825012207],["क्का",-13.426387786865234],["▁srečanj",-13.426392555236816],["伙伴",-13.426416397094728],["▁ປະເທດ",-13.426438331604004],["μπα",-13.426446914672852],["▁deserunt",-13.426447868347168],["નર",-13.426453590393066],["民众",-13.426457405090332],["▁Wada",-13.426469802856444],["सित",-13.42647933959961],["▁treeni",-13.426480293273926],["▁posebne",-13.426491737365724],["казы",-13.426512718200684],["▁Gute",-13.426518440246582],["▁presentació",-13.42653465270996],["▁bicara",-13.426544189453123],["▁علما",-13.426559448242188],["▁Sofi",-13.42658233642578],["▁مشتریان",-13.426586151123049],["▁Türkiyədə",-13.426590919494627],["ସୁ",-13.426593780517578],["unnar",-13.426641464233398],["▁mohol",-13.426671028137209],["കാരം",-13.42668628692627],["▁Методи",-13.42668914794922],["▁органдар",-13.426698684692385],["▁motif",-13.426738739013672],["ాల్సి",-13.426750183105469],["ෂිත",-13.4267578125],["ΔΙ",-13.426785469055176],["ζουν",-13.426801681518556],["在大",-13.426810264587402],["บอร์ด",-13.4268159866333],["▁लक्ष",-13.42682647705078],["▁기회",-13.426836967468262],["▁primari",-13.426838874816896],["▁konkurranse",-13.42684841156006],["▁fascina",-13.426891326904297],["▁жау",-13.42690658569336],["ቶቹ",-13.426956176757812],["浅",-13.426957130432127],["▁yerdə",-13.426962852478027],["꽃",-13.42696475982666],["和谐",-13.42697811126709],["徹底",-13.426980018615724],["わたし",-13.42698860168457],["▁təmsil",-13.426996231079102],["▁පෞද්ගලික",-13.426998138427734],["☺",-13.426998138427734],["▁Wszystko",-13.42699909210205],["▁mbrojtje",-13.42699909210205],["▁mengembangkan",-13.42699909210205],["▁հետաքրքր",-13.42699909210205],["▁الانتخابات",-13.42699909210205],["▁बधाई",-13.42699909210205],["▁আয়োজন",-13.42699909210205],["▁pieprasī",-13.427000045776367],["▁yoyote",-13.427000045776367],["สาเหตุ",-13.427003860473633],["▁νίκη",-13.427007675170898],["▁cosmetic",-13.427009582519531],["▁পেয়ে",-13.427014350891112],["▁судебны",-13.42701530456543],["▁உண்டு",-13.427023887634276],["的部分",-13.427024841308594],["képzés",-13.42702579498291],["▁അഭിപ്രായങ്ങള്",-13.42702579498291],["▁۲۲",-13.42703342437744],["▁فرمانده",-13.427041053771973],["▁tekmovanj",-13.427042961120604],["▁задава",-13.42705249786377],["සැ",-13.427053451538086],["▁смерть",-13.427057266235352],["▁ಎಂದರು",-13.4270601272583],["▁üsna",-13.42706298828125],["▁საქმეთა",-13.427068710327148],["▁ստացել",-13.427071571350098],["▁illius",-13.427077293395996],["▁Palla",-13.42708969116211],["▁Gymru",-13.42710018157959],["шната",-13.427112579345703],["▁Mostra",-13.4271240234375],["▁Едно",-13.427138328552246],["nytt",-13.427139282226562],["ways",-13.427141189575195],["▁моменти",-13.427154541015623],["▁palestra",-13.42715549468994],["▁gyárt",-13.42715835571289],["▁xương",-13.427170753479004],["▁although",-13.427188873291016],["▁Определение",-13.427218437194824],["Form",-13.42722511291504],["▁povedať",-13.427239418029783],["▁نکات",-13.427242279052734],["▁měst",-13.427284240722656],["▁falan",-13.427289009094238],["험",-13.427292823791504],["健全",-13.427301406860352],["stod",-13.42732048034668],["ทั่ว",-13.427337646484377],["▁devenir",-13.427339553833008],["分かり",-13.427350997924805],["▁festivali",-13.427360534667969],["ြန္း",-13.4273681640625],["jës",-13.427373886108398],["éticos",-13.42738437652588],["的一部分",-13.427388191223145],["▁queria",-13.42739200592041],["▁Unite",-13.427401542663574],["▁vejen",-13.42742919921875],["stað",-13.427433967590332],["▁toalet",-13.427435874938965],["▁slabi",-13.42743682861328],["▁champion",-13.42745304107666],["▁kasvi",-13.427468299865724],["ээн",-13.42748737335205],["čnou",-13.427491188049316],["▁ಪರೀಕ್ಷೆ",-13.427495002746582],["▁ganhar",-13.42750644683838],["sagen",-13.427518844604492],["خە",-13.42752456665039],["▁هوش",-13.427528381347656],["▁comentario",-13.427532196044922],["раження",-13.42754364013672],["▁visel",-13.42757797241211],["▁састав",-13.427581787109377],["▁aminte",-13.427593231201172],["▁mase",-13.427597999572754],["▁père",-13.42760181427002],["pojen",-13.427606582641602],["▁Lauri",-13.427606582641602],["▁Анан",-13.427610397338867],["っち",-13.427616119384766],["協力",-13.427616119384766],["▁racional",-13.427619934082031],["جون",-13.427621841430664],["▁පාර්ලිමේන්තු",-13.427637100219728],["中国共产党",-13.427645683288574],["▁ക്കുന്ന",-13.427651405334473],["▁مذهب",-13.427677154541016],["▁රදේශයේ",-13.427724838256836],["דיק",-13.427736282348633],["▁орай",-13.427752494812012],["ښي",-13.427770614624023],["▁የግ",-13.427773475646973],["gueira",-13.427783966064451],["かと思います",-13.427789688110352],["▁knyttet",-13.427790641784668],["छि",-13.427807807922363],["を作る",-13.427826881408691],["▁erityis",-13.42783260345459],["▁Maps",-13.427838325500488],["ативни",-13.427839279174805],["чів",-13.427870750427246],["▁promise",-13.427870750427246],["បាត់",-13.427889823913574],["ānu",-13.427892684936523],["JM",-13.427915573120115],["司机",-13.427933692932127],["▁razm",-13.427935600280762],["અલ",-13.42794418334961],["▁Tiene",-13.427998542785645],["▁odds",-13.428020477294922],["▁Llo",-13.428033828735352],["▁egészen",-13.428038597106934],["ደል",-13.428045272827148],["▁తెలుసు",-13.428059577941896],["▁hurt",-13.428062438964844],["tape",-13.42806339263916],["ુર",-13.428072929382324],["▁финансира",-13.428075790405272],["aardig",-13.428078651428224],["zije",-13.42808723449707],["消除",-13.428089141845703],["▁දෙනෙකු",-13.428101539611816],["▁karti",-13.428106307983398],["▁Svart",-13.428133964538574],["▁בריאות",-13.428168296813965],["▁pipi",-13.428199768066406],["▁bonnes",-13.428208351135254],["▁polisie",-13.428217887878418],["▁бүрийн",-13.428218841552734],["288",-13.428227424621582],["ٿي",-13.428253173828123],["ପକ୍ଷ",-13.42825698852539],["чот",-13.428263664245604],["▁заповед",-13.428281784057615],["規則",-13.42829704284668],["▁öffentlichen",-13.428315162658691],["▁смени",-13.428338050842283],["▁derniers",-13.428343772888184],["oslav",-13.428363800048828],["▁එකෙන්",-13.428389549255373],["▁선물",-13.428390502929688],["▁məqalələr",-13.42839241027832],["ālajā",-13.428397178649902],["▁bende",-13.4284086227417],["▁ბანკ",-13.428414344787598],["zaki",-13.428418159484863],["▁shkon",-13.428437232971191],["▁افتاد",-13.428454399108888],["роби",-13.42848014831543],["PLE",-13.42850399017334],["▁ایپ",-13.428511619567873],["▁affair",-13.42851448059082],["▁ေတြ",-13.42851448059082],["ပါေစ",-13.428515434265137],["▁членов",-13.428576469421388],["▁douche",-13.428581237792969],["▁құры",-13.428584098815918],["haar",-13.428590774536133],["购物",-13.428619384765623],["ziņu",-13.428624153137209],["mahal",-13.428644180297852],["ंवर",-13.42865753173828],["ဆွေးနွေး",-13.428674697875977],["▁ກອງ",-13.428677558898926],["нями",-13.428680419921877],["▁khoá",-13.42868709564209],["▁инвестиция",-13.428704261779783],["▁مادی",-13.428709983825684],["▁maklum",-13.428718566894531],["裙",-13.428719520568848],["ИГ",-13.428722381591797],["ኡ",-13.428729057312012],["▁kekurangan",-13.428729057312012],["▁municipais",-13.428729057312012],["▁sunnuntai",-13.428729057312012],["▁terbukti",-13.428729057312012],["▁γλώσσα",-13.428729057312012],["▁τέτοια",-13.428729057312012],["▁قیادت",-13.428729057312012],["▁ፕሮግራም",-13.428729057312012],["▁matokeo",-13.428730010986328],["▁उत्साह",-13.428730010986328],["▁апостол",-13.428730964660645],["▁GRATIS",-13.42873191833496],["▁సూపర్",-13.42873191833496],["▁형태",-13.428739547729492],["▁uspešno",-13.428743362426758],["▁born",-13.428744316101074],["▁болохоор",-13.42874813079834],["▁законодавства",-13.428752899169922],["मधून",-13.428753852844238],["▁ገጽ",-13.428759574890137],["ີ່",-13.428763389587402],["▁belangrijke",-13.42877960205078],["▁अम",-13.428800582885742],["하나",-13.428810119628906],["▁приче",-13.428813934326172],["▁Très",-13.428815841674805],["ility",-13.428820610046388],["▁shkrim",-13.428827285766602],["▁એવો",-13.428828239440918],["▁bottom",-13.428844451904297],["▁Malin",-13.428852081298828],["▁команди",-13.42886447906494],["▁പാട",-13.428869247436523],["▁출발",-13.428869247436523],["istele",-13.428890228271484],["ปีที่",-13.428900718688965],["▁بازیگر",-13.428903579711914],["▁llegir",-13.428912162780762],["▁Lorenzo",-13.428929328918455],["▁Rusiyanın",-13.428954124450684],["נהל",-13.428955078125],["▁oamenilor",-13.428956031799316],["machine",-13.428961753845217],["▁стоял",-13.428967475891112],["▁Kanna",-13.428970336914062],["чылі",-13.42897129058838],["▁ميدان",-13.42897605895996],["▁എന്ത്",-13.42897891998291],["▁eleven",-13.428984642028809],["▁свр",-13.428984642028809],["prezent",-13.428993225097656],["▁расходов",-13.428994178771973],["▁tablic",-13.429039001464844],["▁สาย",-13.429047584533691],["▁Brut",-13.429048538208008],["વાસ",-13.429049491882324],["▁සැම",-13.429052352905272],["▁میاشت",-13.42906665802002],["ခန်း",-13.429086685180664],["▁Interessen",-13.429115295410156],["▁srca",-13.429141998291016],["επι",-13.429183959960938],["▁بیوی",-13.429189682006836],["▁Pinang",-13.429194450378418],["の話",-13.429198265075684],["▁определи",-13.42919921875],["▁orbit",-13.429204940795898],["szerkesztés",-13.429211616516112],["▁Kund",-13.42922306060791],["▁الجزائر",-13.429235458374023],["▁Unii",-13.42923641204834],["OMO",-13.429245948791504],["▁مطالبہ",-13.42927074432373],["ướng",-13.429275512695312],["▁Fue",-13.429282188415527],["ורג",-13.42928409576416],["▁ਪਤਾ",-13.42928981781006],["Tavs",-13.429313659667969],["▁asesor",-13.429319381713867],["interesse",-13.429320335388184],["၁၉",-13.429327011108398],["లేక",-13.429327964782717],["òl",-13.429329872131348],["▁జయ",-13.429332733154297],["markað",-13.42934513092041],["▁రీ",-13.429357528686523],["▁incluí",-13.429359436035156],["▁função",-13.429372787475586],["▁muutos",-13.429375648498535],["▁ομο",-13.429381370544434],["▁Spy",-13.429388046264648],["lunga",-13.429471015930176],["▁فقر",-13.429505348205566],["▁PAG",-13.429520606994627],["казаў",-13.429545402526855],["astro",-13.429561614990234],["▁филма",-13.429566383361816],["▁מסלול",-13.429604530334473],["般的",-13.429608345031738],["▁الحسن",-13.429614067077637],["▁redakti",-13.42961597442627],["пта",-13.4296236038208],["▁pagtata",-13.42966079711914],["▁highly",-13.429664611816406],["лардан",-13.4296875],["▁vəziyyəti",-13.429691314697266],["▁منصور",-13.429692268371582],["▁instalación",-13.429710388183594],["▁Avis",-13.429720878601074],["▁Sli",-13.429726600646973],["બાર",-13.429728507995604],["▁أساس",-13.429730415344238],["▁2015)",-13.429732322692873],["americana",-13.429763793945312],["正确的",-13.429766654968262],["▁своему",-13.429771423339844],["itelji",-13.429773330688477],["కార్",-13.429780006408691],["▁작가",-13.429791450500488],["▁prosjekt",-13.429815292358398],["▁nyilatkozat",-13.429841995239258],["وجود",-13.429864883422852],["učí",-13.42987060546875],["▁Rabu",-13.429879188537598],["▁видели",-13.42989730834961],["ávání",-13.429905891418455],["დევ",-13.429908752441406],["▁получили",-13.42991065979004],["▁razu",-13.42992877960205],["▁möjligheter",-13.42993450164795],["電池",-13.429939270019531],["úirt",-13.429941177368164],["▁явж",-13.429950714111328],["のもの",-13.429965019226074],["找不到",-13.429973602294922],["ក្បាល",-13.429978370666504],["认证",-13.429984092712402],["▁engin",-13.429988861083984],["ವಾಹ",-13.43002223968506],["▁SPD",-13.430030822753906],["istin",-13.430033683776855],["▁ជួប",-13.430058479309082],["▁ஆரம்ப",-13.430084228515623],["▁puš",-13.430109024047852],["reisen",-13.430135726928713],["ಲಿನ",-13.430136680603027],["▁piesa",-13.430145263671877],["цент",-13.43014907836914],["▁كوم",-13.430152893066406],["▁هنری",-13.430163383483888],["▁képest",-13.430164337158203],["▁waxaad",-13.430176734924316],["認真",-13.430200576782228],["▁fifa",-13.430217742919922],["神奇",-13.430227279663086],["schnitt",-13.430331230163574],["▁новиот",-13.430334091186523],["Open",-13.430355072021484],["▁ব্যাংক",-13.430376052856444],["▁Лин",-13.43038845062256],["砂",-13.430411338806152],["administra",-13.430418968200684],["getragen",-13.430420875549316],["वाई",-13.430429458618164],["▁házi",-13.430437088012695],["邁",-13.430438041687012],["▁नोटिफिकेशन",-13.43044662475586],["▁kupiti",-13.430453300476074],["▁نگه",-13.430456161499023],["続きを読む",-13.430460929870604],["ေထာက္",-13.430461883544922],["▁changamoto",-13.430461883544922],["▁umsagnir",-13.430461883544922],["▁αστυνομικ",-13.430461883544922],["▁Міністэрства",-13.430461883544922],["▁گسترده",-13.430461883544922],["▁ଆଲୋଚନା",-13.430461883544922],["▁розташован",-13.430462837219238],["▁държавни",-13.430463790893556],["▁smislu",-13.430464744567873],["▁perjantai",-13.430465698242188],["mašīna",-13.43046760559082],["▁intenziv",-13.43046760559082],["สัญญาณ",-13.430469512939451],["▁زيارة",-13.430469512939451],["▁ڪيئن",-13.430477142333984],["โรงงาน",-13.430481910705566],["▁গিয়ে",-13.430481910705566],["▁بگذارید",-13.430482864379885],["יכול",-13.430484771728516],["▁vtedy",-13.430487632751465],["▁관광",-13.43049144744873],["▁зави",-13.430492401123049],["раган",-13.430496215820312],["▁inaczej",-13.430502891540527],["▁Vatican",-13.430517196655272],["▁لماذا",-13.430549621582031],["▁vazhdon",-13.430556297302246],["▁מראש",-13.430556297302246],["▁institucija",-13.430559158325195],["▁bege",-13.430566787719728],["απο",-13.430571556091309],["▁رنز",-13.43057346343994],["▁Agency",-13.430580139160156],["▁wynosi",-13.430580139160156],["யால்",-13.430584907531738],["▁aradan",-13.430591583251951],["▁ndege",-13.430635452270508],["ຂະ",-13.43064785003662],["خلي",-13.430648803710938],["ขอให้",-13.430649757385254],["▁અં",-13.430651664733888],["▁മാസം",-13.430658340454102],["ଳ୍ପ",-13.430665969848633],["▁نمائند",-13.430672645568848],["мся",-13.43067455291748],["▁Mic",-13.430679321289062],["altı",-13.430682182312012],["ሰሩ",-13.430692672729492],["mettere",-13.430709838867188],["いくつか",-13.43071746826172],["▁التج",-13.430747985839844],["зап",-13.430756568908691],["▁Freund",-13.430768966674805],["▁tekur",-13.430779457092283],["▁Number",-13.430781364440918],["▁இல்ல",-13.430788040161133],["ումները",-13.43079662322998],["▁JI",-13.43079662322998],["▁Spider",-13.43079662322998],["IŠ",-13.430808067321776],["poliitika",-13.430824279785156],["▁tænke",-13.430832862854004],["▁ರೈತರ",-13.430832862854004],["ИСТ",-13.43085765838623],["енная",-13.430872917175291],["▁나오",-13.430875778198242],["▁Oyunları",-13.43087673187256],["▁investitor",-13.430880546569824],["▁módosítás",-13.430886268615724],["ریز",-13.43088722229004],["тюр",-13.430898666381836],["▁dø",-13.430907249450684],["続けて",-13.43091106414795],["▁frequent",-13.430913925170898],["▁అడుగు",-13.430919647216797],["▁слух",-13.430927276611328],["wasa",-13.43092918395996],["ьян",-13.430947303771973],["▁IPL",-13.430949211120604],["OJA",-13.430951118469238],["航空公司",-13.430975914001465],["ೀಕರಣ",-13.43097686767578],["▁тэрмін",-13.431001663208008],["▁služba",-13.43100357055664],["▁plačil",-13.431008338928224],["▁toekoms",-13.43101692199707],["▁érdekel",-13.431045532226562],["corso",-13.431050300598145],["▁порядка",-13.431073188781738],["▁Rasmi",-13.431075096130373],["කාරී",-13.431102752685549],["▁pourtant",-13.431105613708496],["ିଂ",-13.431118965148926],["lesen",-13.431124687194824],["зре",-13.43113136291504],["æmi",-13.43114948272705],["▁молитв",-13.431151390075684],["НР",-13.431159973144531],["▁kenne",-13.43116283416748],["▁säker",-13.43116283416748],["მერ",-13.431182861328123],["▁प्रिय",-13.431184768676758],["▁округа",-13.431200981140137],["▁arkadaşı",-13.431228637695312],["▁наслов",-13.431229591369627],["▁Dave",-13.431243896484377],["혁신",-13.431264877319336],["HIT",-13.431266784667969],["שכר",-13.431280136108398],["▁tệ",-13.431283950805664],["▁oluşu",-13.431292533874512],["▁երեխաների",-13.431302070617676],["▁Taba",-13.43130874633789],["▁programs",-13.43132781982422],["чылар",-13.43134880065918],["แอบ",-13.431402206420898],["tanggap",-13.431432723999023],["▁UEA",-13.431443214416504],["owicz",-13.431466102600098],["▁καιρό",-13.431477546691896],["▁ВИ",-13.431517601013184],["▁хув",-13.431524276733398],["▁ବ୍ୟାଙ୍କ",-13.431533813476562],["▁sting",-13.431570053100586],["ўскай",-13.431575775146484],["▁типу",-13.431584358215332],["人事",-13.43158721923828],["trí",-13.431595802307127],["▁staty",-13.43160343170166],["▁айтыл",-13.43161392211914],["▁skvel",-13.43162441253662],["▁történ",-13.431632041931152],["ၿ",-13.431662559509276],["▁hayata",-13.431681632995604],["讲话",-13.431682586669922],["สภา",-13.431684494018556],["▁kommunik",-13.43169116973877],["▁užtikrin",-13.431707382202148],["▁papu",-13.431740760803224],["▁чији",-13.43174934387207],["▁නැත්ත",-13.431753158569336],["නයක්",-13.431775093078612],["미술",-13.431777954101562],["▁Gesundheit",-13.431781768798828],["ministro",-13.431782722473145],["▁โดยมี",-13.431793212890623],["avisen",-13.431812286376951],["▁vaiheessa",-13.431815147399902],["▁Planung",-13.431817054748535],["ỳ",-13.431821823120115],["----------------",-13.431851387023926],["▁phu",-13.431854248046877],["▁dispozi",-13.431872367858888],["bunden",-13.43190097808838],["▁Publicat",-13.431910514831545],["זים",-13.431915283203123],["▁cơm",-13.43191623687744],["▁працу",-13.431921005249023],["▁الإسرائيلي",-13.431930541992188],["▁menyebut",-13.431949615478516],["▁sabar",-13.431960105895996],["▁oyunları",-13.431970596313477],["شناس",-13.43198013305664],["中山",-13.431987762451172],["▁өзгерістер",-13.432014465332031],["Ки",-13.432051658630373],["▁внос",-13.432069778442385],["那天",-13.432090759277344],["▁inveni",-13.43209171295166],["▁կանոն",-13.432108879089355],["gâ",-13.43212604522705],["ようで",-13.432127952575684],["Монгол",-13.432161331176758],["および",-13.432178497314451],["螢幕",-13.432183265686035],["スタイル",-13.432186126708984],["▁நம்ம",-13.432188987731934],["កណ្តាល",-13.43219757080078],["▁අපරාධ",-13.43219757080078],["רפואה",-13.432198524475098],["▁Mangeshkar",-13.432198524475098],["▁jarduera",-13.432198524475098],["▁numérique",-13.432198524475098],["▁urządzeń",-13.432198524475098],["▁вышэй",-13.432198524475098],["▁టాప్",-13.432198524475098],["▁እርምጃ",-13.432198524475098],["▁ዜጎች",-13.432198524475098],["▁Diğer",-13.432199478149414],["▁اسڪول",-13.432199478149414],["▁ogystal",-13.432202339172363],["▁وأكد",-13.43220329284668],["gán",-13.432207107543944],["▁ବିକାଶ",-13.432207107543944],["▁הזאת",-13.432208061218262],["▁അവസ്ഥ",-13.432208061218262],["▁Berdasarkan",-13.432212829589844],["▁Информация",-13.432218551635742],["▁үйлдвэрлэл",-13.432221412658691],["wikiloc",-13.432225227355955],["▁ಎಷ್ಟು",-13.432233810424805],["▁ايضا",-13.432235717773438],["▁χέρια",-13.43223762512207],["観光",-13.432241439819336],["▁hətta",-13.432245254516602],["▁Sheikh",-13.4322509765625],["▁долг",-13.4322509765625],["▁አለበት",-13.432252883911133],["违反",-13.432257652282717],["▁bladsy",-13.43226146697998],["▁activity",-13.432266235351562],["列表",-13.43227481842041],["▁одсто",-13.43228816986084],["▁ಮೊದಲು",-13.432308197021484],["▁eska",-13.4323091506958],["▁nagka",-13.4323148727417],["▁dead",-13.432315826416016],["▁stii",-13.432317733764648],["までは",-13.432329177856444],["▁recuperación",-13.432333946228027],["▁অভিযান",-13.43234157562256],["▁мисс",-13.432348251342772],["▁lumière",-13.432350158691406],["▁küzd",-13.432353973388672],["男生",-13.432357788085938],["▁වන්න",-13.432381629943848],["谈判",-13.432384490966797],["vaks",-13.432429313659668],["▁Европейския",-13.432433128356934],["▁minangka",-13.432456016540527],["▁Φιλ",-13.432467460632324],["▁tehlike",-13.432488441467283],["▁pattern",-13.432501792907717],["▁метода",-13.432509422302246],["აშვილის",-13.432515144348145],["▁პასუხი",-13.432525634765623],["▁ساي",-13.432533264160156],["▁ਚੁ",-13.432536125183104],["▁ចំ",-13.43254280090332],["лука",-13.432608604431152],["▁gcu",-13.43262004852295],["▁kendine",-13.432621955871582],["▁ମୋଦି",-13.4326753616333],["推出的",-13.432682037353516],["▁고려",-13.432689666748049],["▁ຈັດ",-13.432692527770996],["▁piazza",-13.432705879211426],["mático",-13.43271827697754],["ਂਦਾ",-13.43272876739502],["▁SSL",-13.432738304138184],["▁զոհ",-13.432748794555664],["▁అప్",-13.432767868041992],["维修",-13.432769775390623],["▁хама",-13.43277645111084],["▁portato",-13.432784080505373],["▁Hesab",-13.432793617248535],["σίας",-13.432794570922852],["▁берем",-13.432794570922852],["▁symud",-13.4327974319458],["▁detalla",-13.432806015014648],["▁Alloh",-13.432830810546877],["▁ääne",-13.43284034729004],["▁벌",-13.43284511566162],["▁хү",-13.432847023010254],["ဗ်ာ",-13.432861328125],["▁banku",-13.432868003845217],["▁Suc",-13.43287467956543],["▁meslek",-13.432910919189451],["▁senzor",-13.432923316955566],["كرا",-13.432928085327148],["▁pašvaldības",-13.432944297790527],["šnja",-13.432948112487791],["จํานวนมาก",-13.432952880859377],["vrij",-13.432960510253906],["▁stijl",-13.432991027832031],["▁odloči",-13.433051109313965],["ségét",-13.433127403259276],["kampanj",-13.433135986328123],["分の",-13.433177947998049],["पत्",-13.433192253112791],["▁дає",-13.433226585388184],["▁uutis",-13.433229446411133],["▁takımı",-13.433247566223145],["▁거리",-13.433263778686523],["▁кайсы",-13.43326473236084],["巴士",-13.433283805847168],["▁максат",-13.433298110961914],["രേ",-13.433304786682127],["▁இற",-13.433321952819824],["▁impun",-13.433323860168455],["▁hämta",-13.433326721191406],["▁richtigen",-13.43333339691162],["руга",-13.433340072631836],["广场",-13.433344841003418],["િસ",-13.433353424072266],["実行",-13.433368682861328],["▁kotona",-13.433378219604492],["▁fizic",-13.4334077835083],["▁yame",-13.4334077835083],["考核",-13.43341064453125],["פתי",-13.433452606201172],["IoT",-13.433453559875488],["Ազատություն",-13.433459281921388],["екции",-13.433472633361816],["▁cyklu",-13.43347454071045],["ဟို",-13.433477401733398],["▁jezelf",-13.43348503112793],["▁Základ",-13.43349266052246],["mówić",-13.433541297912598],["▁sykdom",-13.433574676513672],["कल्प",-13.43358325958252],["iyaa",-13.433629035949709],["▁پیک",-13.433632850646973],["zain",-13.433639526367188],["rjocht",-13.43366813659668],["ưởng",-13.433669090270996],["▁beneficio",-13.43368148803711],["▁Magazin",-13.433710098266602],["صوب",-13.433724403381348],["ဟုတ္",-13.433728218078612],["▁Pomp",-13.43374729156494],["▁dienen",-13.433761596679688],["▁daļas",-13.433780670166016],["▁khóc",-13.433789253234863],["ပ်က္",-13.433796882629396],["▁देउवा",-13.433796882629396],["ช่วยเหลือ",-13.433825492858888],["άδες",-13.43382740020752],["fokus",-13.433830261230469],["->",-13.433838844299316],["▁resolve",-13.433855056762695],["Жи",-13.433867454528809],["аба",-13.433886528015137],["▁កា",-13.433894157409668],["慕",-13.433895111083984],["વાળા",-13.433908462524414],["▁χαρακτηρ",-13.43393325805664],["มงคล",-13.433937072753906],["▁Thursday",-13.433937072753906],["▁Thần",-13.433937072753906],["▁olahraga",-13.433937072753906],["▁ábyrgð",-13.433937072753906],["▁Γερμανία",-13.433937072753906],["▁բողոք",-13.433937072753906],["▁վնաս",-13.433937072753906],["▁समीक्षा",-13.433937072753906],["▁ফেব্রুয়ারি",-13.433937072753906],["▁సంచలన",-13.433937072753906],["▁ಸಿದ್ದರಾಮಯ್ಯ",-13.433937072753906],["▁Això",-13.433938026428224],["▁birîndar",-13.433938026428224],["▁memasuki",-13.433938026428224],["▁Халықаралық",-13.433938026428224],["▁विधायक",-13.433938026428224],["▁gimnazij",-13.433940887451172],["▁órgano",-13.433941841125488],["▁leeftijd",-13.43394374847412],["▁महासंघ",-13.433945655822754],["▁županije",-13.43394660949707],["▁ජනාධිපතිවරයා",-13.43394660949707],["▁భర్త",-13.433953285217283],["▁cioè",-13.433955192565918],["▁پاڼې",-13.433956146240234],["პრეს",-13.433960914611816],["▁vasitəsilə",-13.433964729309082],["▁deportiva",-13.433966636657717],["▁sengaja",-13.433971405029297],["▁logiciel",-13.433981895446776],["▁ఒకే",-13.433988571166992],["tajat",-13.43400764465332],["▁עליה",-13.434015274047852],["bayan",-13.43402099609375],["ဘု",-13.434027671813965],["הרשמה",-13.43402862548828],["▁зөвхөн",-13.434045791625977],["▁arbeitet",-13.434048652648926],["▁ทัวร์",-13.434052467346191],["▁Czech",-13.434057235717772],["Ձ",-13.43405818939209],["▁найз",-13.434100151062012],["กํา",-13.434103965759276],["▁Praktik",-13.434103965759276],["▁veebi",-13.434111595153809],["ësuar",-13.434122085571287],["marketing",-13.43412971496582],["geht",-13.434134483337402],["▁Cem",-13.434154510498049],["▁insect",-13.434159278869627],["▁enako",-13.434165000915527],["▁जिसे",-13.434167861938477],["நாள்",-13.434173583984377],["delingen",-13.434176445007324],["▁Etelä",-13.434178352355955],["▁ماند",-13.434178352355955],["ભાવ",-13.434181213378906],["▁собственно",-13.434182167053224],["าร",-13.434185981750488],["▁културни",-13.434200286865234],["إصدار",-13.434221267700195],["▁szar",-13.434228897094728],["▁записа",-13.43423557281494],["logie",-13.434247016906738],["▁ਸਿਰ",-13.434250831604004],["▁analisa",-13.434266090393066],["▁परम",-13.434269905090332],["▁batterie",-13.43429183959961],["▁mfano",-13.434306144714355],["▁Puede",-13.434317588806152],["▁आरोपी",-13.434329986572266],["▁Produc",-13.434330940246582],["▁Ró",-13.434337615966797],["▁निगम",-13.434345245361328],["▁megfelelően",-13.434364318847656],["▁BİR",-13.434374809265137],["▁mattina",-13.434391021728516],["ျမတ္",-13.434401512145996],["▁actua",-13.434410095214844],["LAY",-13.434436798095703],["boje",-13.434453010559082],["ലോകത്തെ",-13.434462547302246],["▁년",-13.43446445465088],["ົ້",-13.434470176696776],["▁грунт",-13.434473991394045],["▁ಪ್ರಯತ್ನ",-13.434476852416992],["▁육",-13.434480667114258],["поз",-13.434493064880373],["▁pedagogi",-13.434507369995115],["321",-13.434532165527344],["▁Duniya",-13.434532165527344],["▁vegg",-13.434544563293455],["▁становить",-13.434568405151367],["▁1994.",-13.434569358825684],["▁radhë",-13.434603691101074],["▁пункту",-13.434626579284668],["ānā",-13.434633255004885],["▁umjetni",-13.434637069702148],["ijų",-13.434647560119627],["голямата",-13.434653282165527],["▁बिल",-13.434659004211426],["▁résultat",-13.43466567993164],["▁sàn",-13.43466567993164],["ரங்க",-13.434680938720703],["▁Ular",-13.4346923828125],["راش",-13.43470287322998],["全新的",-13.43470287322998],["▁standa",-13.434706687927246],["▁district",-13.434707641601562],["▁ട്ട",-13.43471622467041],["▁Stephan",-13.434720993041992],["etate",-13.43472671508789],["ອະ",-13.434755325317385],["ніць",-13.434757232666016],["تحف",-13.434762001037598],["umbi",-13.434769630432127],["скре",-13.434774398803713],["いましたが",-13.434785842895508],["loqui",-13.43482494354248],["RAI",-13.434850692749023],["斯坦",-13.434865951538086],["срочно",-13.43486785888672],["יזה",-13.43488883972168],["▁Monet",-13.434892654418944],["ampu",-13.434903144836426],["▁overal",-13.434903144836426],["▁Основни",-13.43491268157959],["▁Duna",-13.434924125671388],["▁madeira",-13.434931755065918],["▁Elektr",-13.434947967529297],["ով՝",-13.434951782226562],["▁nenhuma",-13.43495750427246],["▁közötti",-13.434959411621094],["ියාව",-13.434982299804688],["▁مختصر",-13.434983253479004],["មេ",-13.434995651245115],["▁ဖြစ်ပါတယ်။",-13.435012817382812],["▁نیب",-13.435013771057127],["▁frågan",-13.435029983520508],["רחב",-13.43506908416748],["ničar",-13.435080528259276],["ළුව",-13.435098648071287],["лікті",-13.435104370117188],["Ин",-13.435121536254885],["▁peor",-13.43513298034668],["▁conseguiu",-13.435141563415527],["أي",-13.435178756713867],["▁ენ",-13.435179710388184],["▁commis",-13.435187339782717],["類似",-13.43520164489746],["▁खि",-13.435227394104004],["ljon",-13.4352388381958],["▁ääni",-13.435250282287598],["RAJ",-13.435251235961914],["ливост",-13.435272216796877],["তুল",-13.435279846191406],["▁Сада",-13.43528938293457],["▁întâlni",-13.43529987335205],["▁špi",-13.435307502746582],["龄",-13.435314178466797],["▁gắn",-13.435325622558594],["▁erhvervs",-13.43533706665039],["▁ansikt",-13.435338020324709],["▁atleti",-13.435364723205566],["yerek",-13.435365676879885],["ക്കൾ",-13.4353666305542],["▁захтева",-13.435372352600098],["▁zákaz",-13.435389518737791],["żeń",-13.435422897338867],["ຍະ",-13.43543815612793],["КП",-13.435441970825195],["ిస్తున్నారు",-13.435450553894045],["わず",-13.435455322265623],["gelt",-13.435465812683104],["прэ",-13.435465812683104],["Mü",-13.4354829788208],["▁tienda",-13.43548583984375],["▁vapa",-13.43548583984375],["ходзіць",-13.4354887008667],["▁forno",-13.435492515563965],["▁supporto",-13.43549346923828],["▁ఉద్యోగ",-13.435527801513672],["ästi",-13.435541152954102],["▁prosp",-13.435547828674316],["▁адной",-13.435551643371582],["的市场",-13.43558120727539],["ολογίας",-13.435585975646973],["築",-13.435599327087402],["▁пережива",-13.43560028076172],["▁अंदाज",-13.4356050491333],["Unis",-13.435626983642578],["▁propag",-13.435629844665527],["凭",-13.435630798339844],["优惠",-13.43563175201416],["▁keturi",-13.435639381408691],["逝",-13.435641288757324],["霧",-13.435656547546388],["▁lembrar",-13.435667037963867],["やっぱり",-13.435670852661133],["мац",-13.435675621032717],["▁ტერიტორი",-13.435676574707031],["ફેરફાર",-13.43567943572998],["▁brīdī",-13.43567943572998],["▁cuidados",-13.43567943572998],["▁müsabiqə",-13.43567943572998],["▁svibnja",-13.43567943572998],["▁uluslararası",-13.43567943572998],["▁взагалі",-13.43567943572998],["▁खूबसूरत",-13.43567943572998],["▁സാമ്പത്തിക",-13.43567943572998],["▁dewletê",-13.435680389404297],["▁pinjaman",-13.435680389404297],["נצח",-13.435681343078612],["▁nzuri",-13.435688018798828],["▁Более",-13.435688972473145],["▁seleziona",-13.435690879821776],["▁Tomáš",-13.43570041656494],["▁ochrony",-13.43570327758789],["▁जिसके",-13.435712814331056],["▁Library",-13.435715675354004],["▁bakgrunn",-13.435717582702637],["▁magán",-13.43571949005127],["▁liturgi",-13.43574047088623],["▁لڳو",-13.43574047088623],["▁Amerikaanse",-13.435745239257812],["▁wisi",-13.435750007629396],["Shirt",-13.43577003479004],["방송",-13.43577766418457],["▁2001,",-13.435781478881836],["▁проверя",-13.435790061950684],["▁କଲା",-13.43581485748291],["▁жодн",-13.43581771850586],["▁habt",-13.435823440551758],["▁Елбасы",-13.435826301574709],["▁nemůže",-13.435831069946287],["łóż",-13.435832023620604],["ირებული",-13.435832023620604],["ΛΙ",-13.435848236083984],["דורך",-13.435880661010742],["쓰",-13.43588161468506],["▁Fredrik",-13.435883522033691],["▁Koch",-13.435890197753906],["▁Аллах",-13.435890197753906],["قائد",-13.435928344726562],["▁predsjed",-13.435928344726562],["ພາກ",-13.435943603515623],["▁केवळ",-13.435954093933104],["▁jug",-13.435973167419434],["一体",-13.435978889465332],["敌",-13.435978889465332],["▁živali",-13.43598747253418],["▁arki",-13.435996055603027],["▁farver",-13.435996055603027],["ningarna",-13.435997009277344],["▁pravda",-13.436007499694824],["▁vaikutta",-13.436025619506836],["স্তা",-13.436042785644531],["▁울",-13.436044692993164],["▁Partiya",-13.43604564666748],["tegevus",-13.436065673828123],["▁conclude",-13.436067581176758],["▁Ivana",-13.43606948852539],["▁Sura",-13.436070442199709],["iyorsunuz",-13.436074256896973],["▁भूमि",-13.436075210571287],["يارات",-13.436102867126465],["▁discover",-13.436108589172363],["▁Šport",-13.43612289428711],["▁ihmisten",-13.436124801635742],["ovce",-13.436127662658691],["нулся",-13.436139106750488],["▁causar",-13.436145782470703],["▁kader",-13.4361572265625],["კომპ",-13.436164855957031],["צרים",-13.43617057800293],["▁bidrar",-13.436176300048828],["ồ",-13.436182975769045],["jskega",-13.43618392944336],["▁dipende",-13.436184883117676],["▁ନିର୍",-13.436186790466309],["loca",-13.43621826171875],["основан",-13.436223030090332],["▁denkt",-13.436237335205078],["▁bej",-13.436240196228027],["▁rewşa",-13.436269760131836],["▁Mostar",-13.436271667480469],["▁الموسم",-13.43628978729248],["အမ်ား",-13.436308860778809],["真相",-13.436327934265137],["▁Alternative",-13.436335563659668],["也都",-13.436336517333984],["يدا",-13.436365127563477],["▁據",-13.43636703491211],["▁Guds",-13.436395645141602],["▁podobné",-13.436400413513184],["LX",-13.43641185760498],["Yeni",-13.43641757965088],["ເຕີ້",-13.436430931091309],["ларында",-13.43643856048584],["plak",-13.436442375183104],["高考",-13.436468124389648],["овое",-13.436481475830078],["ologji",-13.436508178710938],["▁الغرب",-13.436517715454102],["ڪري",-13.436532020568848],["▁prodaje",-13.43653678894043],["σό",-13.436548233032228],["صلح",-13.436548233032228],["GAS",-13.436553001403809],["▁frustr",-13.436567306518556],["▁బయట",-13.43660831451416],["▁оқушы",-13.43661117553711],["115",-13.436616897583008],["▁στρ",-13.436625480651855],["ерите",-13.436636924743652],["▁marcado",-13.4366455078125],["▁5.0",-13.43664836883545],["▁ล้าน",-13.436663627624512],["ल्क",-13.436668395996094],["▁Держ",-13.436688423156738],["▁sənət",-13.436752319335938],["tjenester",-13.43675708770752],["cidos",-13.436777114868164],["▁Willem",-13.436790466308594],["점이",-13.436807632446287],["ulio",-13.436811447143556],["旨在",-13.436813354492188],["に対応",-13.436826705932615],["▁avtal",-13.436854362487791],["▁enkle",-13.43685531616211],["研究院",-13.436856269836426],["ില്ലാത്ത",-13.436860084533691],["mandla",-13.436866760253906],["▁fijn",-13.436874389648438],["рске",-13.436891555786133],["ుతున్నారు",-13.436909675598145],["▁બ્ર",-13.436911582946776],["បូ",-13.436932563781738],["▁Marius",-13.436945915222168],["טבע",-13.436946868896484],["▁kaikkia",-13.43697738647461],["确",-13.437000274658203],["ಾರ್ಥ",-13.437044143676758],["පාද",-13.437045097351074],["▁පහත",-13.437052726745604],["▁hors",-13.437091827392578],["yanı",-13.437101364135742],["მუშავე",-13.437113761901855],["▁మహ",-13.437119483947754],["自助",-13.437125205993652],["▁පී",-13.437134742736816],["▁silent",-13.437138557434082],["θος",-13.437143325805664],["▁jelenti",-13.437207221984863],["▁मिळत",-13.437228202819824],["▁esemény",-13.43723964691162],["TZA",-13.437249183654783],["▁Leta",-13.437253952026367],["▁타이",-13.437255859375],["▁имати",-13.437284469604492],["ався",-13.437299728393556],["▁thé",-13.437314987182615],["▁Cass",-13.437317848205566],["▁användas",-13.437322616577148],["茨",-13.437358856201172],["栏",-13.437359809875488],["▁pamięta",-13.437360763549805],["阔",-13.437362670898438],["aura",-13.437369346618652],["ကံ",-13.437370300292969],["ວຽກ",-13.43739128112793],["পূ",-13.437397956848145],["塵",-13.437399864196776],["trž",-13.437400817871094],["ෛ",-13.437422752380373],["ម្តង",-13.437423706054688],["▁Anforderungen",-13.437424659729004],["▁Norwegian",-13.437424659729004],["▁fasilitas",-13.437424659729004],["▁müsəlman",-13.437424659729004],["▁obsługi",-13.437424659729004],["▁příspěvek",-13.437424659729004],["▁παρακάτω",-13.437424659729004],["▁Нагадаємо",-13.437424659729004],["▁поддержки",-13.437424659729004],["▁суспільства",-13.437424659729004],["▁دوبارہ",-13.437424659729004],["▁પત્ની",-13.437424659729004],["▁ଭାଷା",-13.437424659729004],["▁పరీక్ష",-13.437424659729004],["▁ಪೊಲೀಸ್",-13.437424659729004],["▁Εγγραφή",-13.437426567077637],["▁پیچھے",-13.437426567077637],["▁परीक्षण",-13.437426567077637],["▁विमानस्थल",-13.437426567077637],["▁berbeza",-13.437427520751951],["▁DENGAN",-13.43742847442627],["▁ресми",-13.43743133544922],["▁Mahathir",-13.437433242797852],["▁ခုႏွစ္",-13.437433242797852],["▁важливо",-13.437435150146484],["▁مبنی",-13.437450408935549],["▁hörmət",-13.437451362609863],["▁چاہتا",-13.437458038330078],["▁Einrichtung",-13.437459945678713],["▁дазвол",-13.437472343444824],["▁Lát",-13.437478065490724],["▁Европската",-13.43747901916504],["cide",-13.437482833862305],["▁૧૯",-13.437506675720217],["▁empleo",-13.437511444091797],["ffordd",-13.43752670288086],["വിഷ",-13.437535285949709],["▁Гүл",-13.437546730041504],["했지만",-13.437559127807615],["ляться",-13.437565803527832],["▁Сондай",-13.437565803527832],["ปะ",-13.437575340270996],["▁Taigi",-13.437575340270996],["thana",-13.437582969665527],["▁जानते",-13.43758773803711],["▁banjur",-13.43759059906006],["מעשה",-13.437597274780272],["▁التعامل",-13.437618255615234],["▁തീരുമാന",-13.43762493133545],["▁کارگر",-13.437628746032717],["▁العسكرية",-13.43764305114746],["355",-13.437644004821776],["▁skyn",-13.43765640258789],["▁észre",-13.437661170959473],["-06",-13.437662124633787],["лювати",-13.437664985656738],["▁prevod",-13.437679290771484],["▁سوخت",-13.4376802444458],["▁ఏమి",-13.437686920166016],["▁оплати",-13.43769073486328],["▁menteri",-13.437695503234863],["▁saiba",-13.437701225280762],["ročil",-13.43772315979004],["▁karma",-13.437759399414062],["▁tủ",-13.43776512145996],["▁paši",-13.437769889831545],["我有",-13.437776565551758],["▁ձեռնարկ",-13.437780380249023],["▁نفوذ",-13.437784194946287],["ନ୍ସ",-13.43779468536377],["▁istorie",-13.437796592712402],["ίνετε",-13.43780517578125],["▁holding",-13.437807083129885],["中で",-13.437810897827148],["▁Фин",-13.43781280517578],["▁concours",-13.437819480895996],["▁BUR",-13.437820434570312],["சல்",-13.437848091125488],["▁ღმერთ",-13.437857627868652],["▁costante",-13.437858581542969],["ိုက်",-13.437912940979004],["▁oslav",-13.437945365905762],["ριά",-13.43796443939209],["▁vuur",-13.437973022460938],["▁такія",-13.43800163269043],["▁MENG",-13.438017845153809],["▁cukor",-13.438023567199709],["йтеся",-13.438024520874023],["還可以",-13.438037872314451],["जनक",-13.4380464553833],["teret",-13.43804931640625],["صاف",-13.438057899475098],["具體",-13.438064575195312],["▁موافق",-13.438065528869627],["ruimte",-13.43807601928711],["▁љубав",-13.438087463378906],["▁högre",-13.438095092773438],["စာအုပ္",-13.43809986114502],["▁അക",-13.438104629516602],["zato",-13.43812656402588],["▁ಎನ್ನ",-13.438133239746094],["ttaminen",-13.438143730163574],["ფართო",-13.438165664672852],["सले",-13.4381685256958],["▁szervez",-13.438170433044434],["▁dúvida",-13.438185691833496],["▁instalar",-13.438188552856444],["▁Javier",-13.438223838806152],["▁راحتی",-13.438264846801758],["▁Полу",-13.438268661499023],["hirdetés",-13.438278198242188],["▁ísť",-13.438279151916504],["▁жене",-13.438283920288086],["ERN",-13.438284873962402],["▁ጋ",-13.438299179077148],["▁pils",-13.438305854797363],["▁തുടര",-13.438324928283691],["▁szyb",-13.438335418701172],["คุย",-13.43837833404541],["▁Северна",-13.438401222229004],["▁тун",-13.43842315673828],["рест",-13.438435554504396],["▁улога",-13.438436508178713],["wezesha",-13.438461303710938],["(5",-13.438471794128418],["▁uzvar",-13.43849277496338],["стои",-13.438507080078123],["foon",-13.438521385192873],["▁jazd",-13.43852424621582],["▁виник",-13.43852710723877],["Чер",-13.438538551330566],["darê",-13.438553810119627],["▁hamma",-13.438557624816896],["▁Davis",-13.438559532165527],["MED",-13.438563346862791],["тую",-13.438563346862791],["▁klink",-13.438563346862791],["vlje",-13.43857479095459],["▁wow",-13.438599586486816],["flutning",-13.438608169555664],["โพ",-13.438624382019045],["▁olundu",-13.438629150390623],["▁වත්",-13.438642501831056],["▁እምነት",-13.438674926757812],["▁ابتدای",-13.438680648803713],["मध",-13.438684463500977],["▁Pest",-13.438687324523926],["▁воно",-13.43869972229004],["Tro",-13.43871784210205],["riba",-13.438720703125],["환경",-13.438725471496582],["▁مفاد",-13.438732147216797],["ALLA",-13.438735008239746],["▁talve",-13.438739776611328],["ତ୍ୱ",-13.438767433166504],["▁baad",-13.438769340515137],["ด้วยกัน",-13.438777923583984],["זען",-13.4387845993042],["▁saapu",-13.43879222869873],["גדל",-13.43879508972168],["UTE",-13.43880558013916],["бити",-13.438809394836426],["海南",-13.438830375671388],["gnu",-13.438834190368652],["šava",-13.438837051391602],["▁দেখে",-13.4388427734375],["raith",-13.43885898590088],["ório",-13.438862800598145],["▁ಸೇ",-13.438873291015623],["▁javnih",-13.438876152038574],["မွ်",-13.43890380859375],["▁laukia",-13.43891429901123],["▁Bilet",-13.43894863128662],["▁EXP",-13.438953399658203],["alp",-13.438960075378418],["▁aurrean",-13.43896484375],["的意思",-13.43897819519043],["ුන්",-13.438987731933594],["ద్య",-13.43898868560791],["▁مقابله",-13.43903923034668],["▁ունենալ",-13.439057350158691],["▁таблет",-13.439068794250488],["▁asiaa",-13.439093589782717],["▁شاہد",-13.43910312652588],["寸",-13.439133644104004],["hơ",-13.43914031982422],["▁shoqëri",-13.439149856567385],["▁வாச",-13.43915557861328],["벤",-13.43916130065918],["પૂર્ણ",-13.439167022705078],["กระทู้",-13.43917179107666],["טלוויזיה",-13.439172744750977],["ពាណិជ្ជកម្ម",-13.439172744750977],["▁entwickeln",-13.439172744750977],["▁яғни",-13.439172744750977],["▁הממשלה",-13.439172744750977],["▁সাম্প্রতিক",-13.439172744750977],["▁મુજબ",-13.439172744750977],["▁క్లిక్",-13.439172744750977],["▁opowiada",-13.439173698425291],["▁puzzle",-13.439173698425291],["▁ఆంధ్రప్రదేశ్",-13.439173698425291],["▁سلسلہ",-13.43917465209961],["▁wallpaper",-13.439176559448242],["ขวา",-13.43917751312256],["▁मिनट",-13.439178466796877],["▁disponível",-13.439179420471191],["▁nəşr",-13.439181327819824],["▁spectacol",-13.43918228149414],["▁auzo",-13.439200401306152],["▁comeza",-13.439208030700684],["▁ଆଣି",-13.439212799072266],["mayacak",-13.439228057861328],["▁ਯਾਦ",-13.439234733581545],["▁dikarin",-13.439244270324709],["Bri",-13.439247131347656],["▁vuelta",-13.439249038696287],["▁sejuk",-13.439250946044922],["суна",-13.439253807067873],["▁Señor",-13.43926239013672],["íð",-13.439274787902832],["▁shkollë",-13.439289093017578],["▁يحتاج",-13.43929386138916],["▁Shirika",-13.43929958343506],["▁আম",-13.439311981201172],["ുണ്ടോ",-13.439318656921388],["▁spår",-13.439318656921388],["▁Kerala",-13.439327239990234],["成立于",-13.439334869384766],["ដែរ",-13.439349174499512],["например",-13.439372062683104],["▁للب",-13.439373016357422],["▁märg",-13.439375877380373],["ことがある",-13.43938159942627],["▁Muze",-13.439395904541016],["▁sessions",-13.439396858215332],["ాని",-13.43940544128418],["▁വെള്ളം",-13.439414024353027],["▁מתח",-13.43941593170166],["▁جائز",-13.439416885375977],["ନ୍ୟ",-13.43942642211914],["▁нашої",-13.439437866210938],["svæði",-13.439448356628418],["▁Sug",-13.439448356628418],["▁suve",-13.439453125],["dón",-13.439460754394531],["כער",-13.439462661743164],["ILLA",-13.43946647644043],["▁tamanho",-13.439481735229492],["theater",-13.439494132995604],["▁يخ",-13.439501762390137],["工艺",-13.43950366973877],["▁korting",-13.439505577087402],["▁потеря",-13.439534187316896],["▁Lop",-13.439537048339844],["▁policial",-13.439537048339844],["arrivée",-13.43953800201416],["▁مجوز",-13.43955135345459],["▁throw",-13.439559936523438],["市场上",-13.43956184387207],["cijom",-13.43956470489502],["▁אחרות",-13.439568519592283],["▁اوقات",-13.439568519592283],["▁Raad",-13.439574241638184],["▁ularni",-13.4395751953125],["▁भक्त",-13.43961238861084],["▁vemos",-13.439652442932127],["ycznej",-13.439661026000977],["▁Liefer",-13.43968391418457],["建物",-13.43972873687744],["اسات",-13.439759254455566],["▁senator",-13.439759254455566],["▁usku",-13.439764022827148],["▁Samu",-13.439766883850098],["天使",-13.439775466918944],["▁235",-13.43978500366211],["▁حوال",-13.43979549407959],["▁בחר",-13.43980598449707],["▁අධි",-13.439806938171388],["▁3.4",-13.439807891845703],["fortuna",-13.4398193359375],["▁ሆነው",-13.439826965332031],["אנד",-13.439847946166992],["▁الی",-13.43985652923584],["iliú",-13.439857482910156],["▁kiriku",-13.439859390258787],["▁لوح",-13.439863204956056],["–1",-13.439865112304688],["кажите",-13.43988037109375],["▁관심",-13.439889907836914],["▁løse",-13.43989372253418],["270",-13.439905166625977],["▁Hostel",-13.43990707397461],["▁anume",-13.439908981323242],["▁akcie",-13.439955711364746],["▁sınır",-13.439996719360352],["▁zudem",-13.440003395080566],["▁поздрав",-13.440021514892578],["ਬੇ",-13.440023422241213],["▁efectiva",-13.440044403076172],["▁zilele",-13.44006633758545],["▁स्वर",-13.440077781677246],["▁inici",-13.440080642700195],["▁басым",-13.44009494781494],["▁شڪار",-13.440096855163574],["vore",-13.440118789672852],["▁soare",-13.440118789672852],["▁veramente",-13.440141677856444],["▁kirine",-13.44015121459961],["▁პირველად",-13.440155982971191],["▁членови",-13.440167427062988],["▁vægt",-13.440168380737305],["▁həlli",-13.440170288085938],["న్నాయి",-13.440176010131836],["▁семе",-13.440194129943848],["회사",-13.44019889831543],["িয়ার",-13.440215110778809],["▁metoder",-13.440232276916504],["▁strid",-13.4402494430542],["▁rinne",-13.440312385559082],["▁dedicado",-13.440313339233398],["▁ноч",-13.440317153930664],["▁svá",-13.440340042114258],["▁Bulgaria",-13.440346717834473],["▁تونس",-13.440349578857422],["▁පොත්",-13.440373420715332],["▁palo",-13.440384864807127],["mânt",-13.44042682647705],["▁Шаб",-13.4404296875],["ረጥ",-13.440444946289062],["▁tőle",-13.440444946289062],["無料で",-13.440462112426758],["▁Teresa",-13.44049835205078],["▁Рис",-13.440522193908691],["ക്കള",-13.44052505493164],["▁Vän",-13.440544128417969],["OTT",-13.440545082092283],["▁humain",-13.440563201904297],["▁ٿيا",-13.440570831298828],["▁Eis",-13.440589904785156],["प्ट",-13.440596580505373],["▁ställe",-13.440621376037598],["▁Guarda",-13.440649032592772],["ющими",-13.44066333770752],["若是",-13.44066333770752],["▁társaság",-13.440680503845217],["的结果",-13.440712928771973],["▁яриа",-13.440715789794922],["▁awood",-13.440723419189451],["▁прашање",-13.440736770629885],["385",-13.440776824951172],["▁Joka",-13.440811157226562],["文學",-13.440814971923828],["▁publication",-13.440825462341309],["▁Veik",-13.440841674804688],["ನನ್ನು",-13.440855026245115],["кем",-13.440858840942385],["ല്ലാ",-13.440887451171877],["îka",-13.440890312194824],["兽",-13.440905570983888],["新华社",-13.44090747833252],["γεται",-13.440918922424316],["ฝั่ง",-13.440922737121582],["▁düzenleme",-13.440922737121582],["▁συχνά",-13.440923690795898],["▁Diskussion",-13.440924644470217],["▁Städte",-13.440924644470217],["▁алынган",-13.440924644470217],["▁વાર્તા",-13.440924644470217],["▁២០១៧",-13.440925598144531],["▁ٻارڙن",-13.440927505493164],["▁দূর",-13.440929412841797],["▁белгилүү",-13.440930366516112],["▁قناة",-13.440930366516112],["▁vdekur",-13.440943717956545],["религиозн",-13.440945625305176],["▁جایگاه",-13.44094944000244],["▁krāsa",-13.440950393676758],["ٹنگ",-13.440967559814451],["förmåga",-13.440969467163086],["▁ክርስቶስ",-13.440998077392578],["▁જેવી",-13.441000938415527],["機制",-13.441003799438477],["▁comuna",-13.441007614135742],["ດໍາ",-13.441010475158691],["ودة",-13.441011428833008],["קומען",-13.44101333618164],["▁يستطيع",-13.44101333618164],["▁алкогол",-13.44101905822754],["манов",-13.441021919250488],["▁intorno",-13.441025733947754],["כלל",-13.44102954864502],["▁Theme",-13.441036224365234],["ப்படி",-13.441038131713867],["▁permita",-13.441051483154297],["▁Krievijas",-13.441059112548828],["▁religiosa",-13.441059112548828],["▁indicar",-13.44107151031494],["▁correcta",-13.441073417663574],["▁Oost",-13.441081047058104],["▁الحد",-13.441089630126951],["▁parempi",-13.441097259521484],["burgo",-13.4410982131958],["▁יחיד",-13.44110107421875],["▁الجانب",-13.441102981567385],["東京都",-13.441105842590332],["▁κοινό",-13.441123962402344],["▁माता",-13.44112777709961],["通訊",-13.441128730773926],["▁යාම",-13.441133499145508],["▁syl",-13.441137313842772],["▁reusit",-13.441142082214355],["▁magad",-13.441163063049316],["▁دخول",-13.441170692443848],["▁inaad",-13.44118309020996],["▁széles",-13.44119358062744],["Црвен",-13.441197395324709],["قضاء",-13.44119930267334],["studie",-13.44121551513672],["▁कल्याण",-13.441216468811035],["▁носе",-13.441224098205566],["▁lorsqu",-13.441228866577148],["ndhu",-13.44123077392578],["▁مميز",-13.441241264343262],["▁अवि",-13.441245079040527],["会社の",-13.441245079040527],["▁opnieuw",-13.441262245178224],["kerta",-13.441277503967283],["▁гос",-13.441282272338867],["uesi",-13.4412841796875],["▁ajaran",-13.441286087036133],["encontre",-13.441299438476562],["▁presentazione",-13.441307067871094],["lərimiz",-13.441314697265623],["▁olemme",-13.44132137298584],["▁желан",-13.44133758544922],["▁hangat",-13.441338539123535],["▁जुट",-13.441340446472168],["▁ПРЕД",-13.441350936889648],["▁Kill",-13.44136905670166],["▁내용을",-13.44137477874756],["▁mijenja",-13.44139289855957],["▁thang",-13.441394805908203],["állított",-13.441413879394531],["ेऽ",-13.44141960144043],["▁medicamentos",-13.441442489624023],["打開",-13.441452026367188],["▁objetos",-13.44145965576172],["▁strax",-13.441469192504885],["的教育",-13.44147491455078],["▁думал",-13.441475868225098],["▁sinulla",-13.441482543945312],["▁သင္",-13.441494941711426],["▁Gift",-13.441503524780272],["▁Waxay",-13.441506385803224],["шысы",-13.441527366638184],["▁ಮುನ್ನ",-13.441539764404297],["▁webgune",-13.441540718078612],["žių",-13.441542625427246],["▁Yüz",-13.441550254821776],["▁saytı",-13.441567420959473],["తుల",-13.441605567932127],["овки",-13.441611289978027],["▁АҚШ",-13.441617965698242],["しまい",-13.44162654876709],["▁kvalita",-13.441632270812988],["▁අග",-13.44163703918457],["▁стратеги",-13.441655158996582],["കന്",-13.441685676574709],["▁kvality",-13.441688537597656],["ბარ",-13.441689491271973],["▁secretar",-13.441709518432615],["▁libere",-13.441737174987791],["▁Spal",-13.441778182983398],["▁මතු",-13.441784858703612],["gyr",-13.441810607910156],["▁трэ",-13.441815376281738],["▁agri",-13.441819190979004],["▁pequenas",-13.441827774047852],["▁اولاد",-13.441829681396484],["▁Kupa",-13.441831588745115],["▁taş",-13.441838264465332],["▁dikenali",-13.441849708557127],["Sel",-13.441864967346191],["等到",-13.44187355041504],["ნდო",-13.44187831878662],["▁Kuru",-13.441882133483888],["▁lue",-13.441895484924316],["hula",-13.441896438598633],["īšanu",-13.44190788269043],["೮",-13.441932678222656],["pī",-13.44194221496582],["▁Postare",-13.441954612731934],["▁وینا",-13.4419584274292],["lapis",-13.441962242126465],["Сп",-13.441990852355955],["▁aufzu",-13.442022323608398],["▁tutur",-13.442024230957031],["第二天",-13.442024230957031],["▁интересов",-13.442030906677246],["▁అభి",-13.442030906677246],["tamina",-13.442044258117676],["▁РУ",-13.442051887512209],["▁Sicherheits",-13.442059516906738],["▁visiems",-13.442063331604004],["▁시민",-13.442063331604004],["عجز",-13.44211483001709],["▁poslu",-13.442174911499023],["▁حماس",-13.442178726196287],["lift",-13.442194938659668],["▁ລັດຖະບານ",-13.442201614379885],["영상",-13.442230224609377],["▁ısı",-13.442238807678224],["▁семье",-13.442253112792969],["▁voici",-13.442255973815918],["treba",-13.4422607421875],["▁ubica",-13.442262649536133],["ທາ",-13.44229507446289],["▁पाइ",-13.442298889160156],["ടുക്ക",-13.442315101623535],["▁потым",-13.442325592041016],["▁cơn",-13.442336082458496],["Nem",-13.442339897155762],["▁növel",-13.442340850830078],["gî",-13.442358016967772],["▁kereszt",-13.442378044128418],["▁სიყვარული",-13.4423828125],["480",-13.442391395568848],["▁dokumente",-13.442404747009276],["▁drejtim",-13.442404747009276],["▁налага",-13.442412376403809],["▁isole",-13.442434310913086],["▁հարցում",-13.442435264587402],["▁Won",-13.442441940307615],["سيل",-13.44244384765625],["▁среды",-13.44245147705078],["స్తోంది",-13.442465782165527],["▁registrar",-13.44247341156006],["арен",-13.442503929138184],["▁acquista",-13.442523002624512],["linder",-13.442530632019045],["▁financira",-13.442541122436523],["▁मर्",-13.442554473876951],["▁Ferrol",-13.442557334899902],["▁herre",-13.442584037780762],["▁kurse",-13.442587852478027],["一台",-13.44259548187256],["日报道",-13.442601203918455],["▁meðan",-13.442615509033203],["灌",-13.442625999450684],["▁தெரிய",-13.442632675170898],["泣",-13.442633628845217],["慌",-13.44265079498291],["nggih",-13.442654609680176],["▁organisé",-13.442654609680176],["瀏覽",-13.442655563354492],["▁kuwaas",-13.442675590515137],["ยุโรป",-13.442676544189451],["ปกครอง",-13.44267749786377],["▁Өзбекстан",-13.44267749786377],["▁Pourquoi",-13.442678451538086],["▁Prenumerera",-13.442678451538086],["▁inkludert",-13.442678451538086],["▁natječaj",-13.442678451538086],["▁sogenannte",-13.442678451538086],["▁większość",-13.442678451538086],["▁сообраќај",-13.442678451538086],["▁כגון",-13.442678451538086],["▁تەرەپ",-13.442678451538086],["▁ਚੰਡੀਗੜ੍ਹ",-13.442678451538086],["▁మళ్లీ",-13.442678451538086],["▁ეკლესია",-13.442678451538086],["훈련",-13.442682266235352],["▁Pravilnik",-13.44268798828125],["▁സ്വയം",-13.44268798828125],["▁jueves",-13.442692756652832],["▁ಸಂದರ್ಭದಲ್ಲಿ",-13.442692756652832],["▁inzerát",-13.442694664001465],["▁අංක",-13.442694664001465],["мыг",-13.442699432373049],["tellen",-13.442706108093262],["▁доза",-13.442708969116213],["īks",-13.442710876464844],["ÍT",-13.442723274230955],["▁hjärta",-13.44273281097412],["▁comfortable",-13.44273853302002],["▁Liter",-13.442747116088867],["▁그대로",-13.4427490234375],["▁trông",-13.442754745483398],["▁болоод",-13.442764282226562],["▁રામ",-13.442777633666992],["▁viteve",-13.44278049468994],["▁छैनन्",-13.442788124084473],["▁үлгі",-13.442789077758787],["▁Yapı",-13.442797660827637],["▁panorami",-13.442798614501951],["chester",-13.442806243896484],["▁گهرجي",-13.4428071975708],["▁qoftë",-13.4428129196167],["майда",-13.442814826965332],["▁puik",-13.442824363708496],["▁تحتاج",-13.442849159240724],["▁nagusia",-13.442851066589355],["▁mocno",-13.442859649658203],["▁hominum",-13.442865371704102],["මෙ",-13.442867279052734],["versicherung",-13.442877769470217],["危機",-13.442883491516112],["ZK",-13.44289207458496],["changia",-13.442898750305176],["▁институција",-13.442904472351074],["halle",-13.44290542602539],["▁השירות",-13.442912101745604],["▁ලංකාවට",-13.442917823791504],["ነገ",-13.442922592163086],["▁غرفة",-13.442927360534668],["оўкі",-13.4429349899292],["などは",-13.442941665649414],["▁Mayor",-13.44294261932373],["▁ಹೆಸರ",-13.442951202392578],["kehr",-13.442953109741213],["خاص",-13.442960739135742],["▁agencija",-13.442964553833008],["▁̧",-13.442968368530272],["▁genomför",-13.443001747131348],["глу",-13.443044662475586],["▁sitte",-13.443049430847168],["▁بهدف",-13.443053245544434],["たくない",-13.443056106567385],["▁DDR",-13.443077087402344],["▁کھا",-13.443082809448242],["▁defekt",-13.443092346191406],["▁държавата",-13.443093299865724],["▁Bayi",-13.443116188049316],["▁dokonal",-13.443120002746582],["はずです",-13.44312858581543],["▁Centrul",-13.443143844604492],["5,6",-13.44314670562744],["▁minimo",-13.443163871765137],["ចូលរួម",-13.443172454833984],["▁syk",-13.4431791305542],["▁публикации",-13.443188667297363],["学位",-13.443199157714844],["zetten",-13.44320011138916],["▁shpër",-13.443215370178224],["වැසි",-13.443222999572754],["▁пришел",-13.443224906921388],["▁ΤΗΣ",-13.44324016571045],["▁högt",-13.443245887756348],["▁turístico",-13.443245887756348],["▁пројекта",-13.443252563476562],["very",-13.44326400756836],["让他们",-13.443283081054688],["skrivning",-13.443303108215332],["зба",-13.44330883026123],["óid",-13.443318367004396],["することができる",-13.44332218170166],["middelen",-13.443326950073242],["▁распространен",-13.443334579467772],["▁complessi",-13.443337440490724],["ローン",-13.443345069885254],["ਫੀ",-13.443352699279783],["jelentés",-13.443358421325684],["▁solide",-13.443367958068848],["779",-13.443399429321287],["ščine",-13.443403244018556],["rechte",-13.44341278076172],["▁patrimoni",-13.443432807922363],["კრა",-13.44343376159668],["▁միջ",-13.443448066711426],["▁பதவி",-13.443455696105955],["▁تیز",-13.443467140197754],["............",-13.443485260009766],["▁Titul",-13.443492889404297],["▁gjald",-13.443500518798828],["RAF",-13.44351577758789],["ल्लो",-13.44351577758789],["▁олж",-13.443531036376951],["▁მეფე",-13.443559646606444],["ಗೊಳ್ಳ",-13.443567276000977],["▁keladi",-13.443581581115724],["▁namba",-13.443594932556152],["ဘို",-13.443603515625],["すべての",-13.44364070892334],["የሁ",-13.443653106689451],["▁звезда",-13.443660736083984],["そこで",-13.443700790405272],["ljev",-13.443703651428224],["▁કરાવ",-13.44370460510254],["しようと",-13.443716049194336],["▁tuis",-13.4437255859375],["▁bikini",-13.443750381469728],["▁pojazd",-13.443764686584473],["▁hållbar",-13.443767547607422],["▁Kutil",-13.443777084350586],["▁urge",-13.443788528442385],["▁dorm",-13.443790435791016],["▁neskôr",-13.443792343139648],["▁سوى",-13.443809509277344],["▁ડી",-13.443815231323242],["▁mjuk",-13.443828582763672],["▁수정",-13.443859100341797],["৩১",-13.443867683410645],["▁सञ्चार",-13.443877220153809],["kevin",-13.443891525268556],["▁hospitali",-13.443891525268556],["ստր",-13.443927764892578],["▁Evropian",-13.443947792053224],["▁ആർ",-13.443974494934082],["▁виконан",-13.443981170654297],["▁Lup",-13.44398593902588],["חוף",-13.444031715393066],["ramento",-13.444046974182127],["▁публічн",-13.444056510925291],["는지",-13.444058418273926],["▁말을",-13.444062232971191],["▁França",-13.444064140319824],["Pay",-13.444068908691406],["▁verkeerd",-13.444071769714355],["Love",-13.444084167480469],["▁şəxslərin",-13.444097518920898],["▁trio",-13.44411277770996],["▁oito",-13.444124221801758],["▁utsatt",-13.44412612915039],["▁produkte",-13.44414234161377],["▁თანამშრომლ",-13.444175720214844],["▁näher",-13.444183349609377],["▁Trí",-13.444198608398438],["২৯",-13.444202423095703],["▁소비자",-13.444205284118652],["拒",-13.444205284118652],["▁գործընկեր",-13.444207191467283],["овка",-13.444208145141602],["▁написан",-13.44421672821045],["▁воле",-13.444228172302246],["ителен",-13.444231986999512],["hiş",-13.444236755371094],["▁നടത്തുന്ന",-13.44423770904541],["ေန႕",-13.444250106811523],["bax",-13.444259643554688],["▁Szo",-13.44426441192627],["しておく",-13.444294929504396],["▁Augustin",-13.444306373596191],["ītes",-13.44431972503662],["▁189",-13.444327354431152],["vill",-13.44435214996338],["▁Ihres",-13.444361686706545],["EDE",-13.444364547729492],["▁laenu",-13.44437313079834],["賢",-13.444382667541504],["壯",-13.44438934326172],["▁turu",-13.444395065307615],["卸",-13.444400787353516],["ており",-13.444421768188477],["▁સંબંધ",-13.444433212280272],["ឈ្នះ",-13.444436073303224],["តេជោ",-13.444436073303224],["▁alacsony",-13.444436073303224],["▁слободно",-13.444436073303224],["▁गठबन्धन",-13.444436073303224],["▁তরুণ",-13.444436073303224],["▁বঙ্গবন্ধু",-13.444436073303224],["▁građana",-13.44443702697754],["▁xarxa",-13.444438934326172],["認知",-13.444439888000488],["▁yavaş",-13.444446563720703],["▁Exchange",-13.44444751739502],["▁दाखल",-13.444448471069336],["ชี้",-13.444450378417969],["▁thoải",-13.444451332092283],["▁тусгай",-13.444456100463867],["▁Zub",-13.444459915161133],["ුනා",-13.444498062133787],["▁wijn",-13.444502830505373],["حديث",-13.444504737854004],["หน่วยงาน",-13.444509506225586],["▁mbetet",-13.44451141357422],["▁kamarád",-13.444515228271484],["▁berbanding",-13.444527626037598],["▁ಹೋಗಿ",-13.444528579711914],["▁júní",-13.444533348083496],["▁මාර්ග",-13.444541931152344],["게시판",-13.44454288482666],["▁berkuasa",-13.444550514221191],["▁karriere",-13.44456386566162],["▁apni",-13.44457721710205],["▁Berri",-13.44459342956543],["ახლო",-13.444599151611328],["აშ",-13.444602966308594],["▁kultury",-13.444605827331545],["▁proizvodi",-13.444612503051758],["▁Tume",-13.44468593597412],["ძღ",-13.444689750671388],["θυ",-13.444706916809082],["▁popoln",-13.444706916809082],["▁létre",-13.444709777832031],["▁tradizione",-13.444711685180664],["▁клиентов",-13.444716453552246],["▁бүртгэл",-13.444753646850586],["眾多",-13.44475555419922],["өөд",-13.444766998291016],["áronse",-13.444786071777344],["校长",-13.444795608520508],["shib",-13.44480800628662],["▁Fish",-13.444809913635254],["ুম",-13.44482135772705],["▁társa",-13.444867134094238],["▁uşaqlar",-13.44489574432373],["ნაკ",-13.444905281066896],["▁කෙටි",-13.44491481781006],["poteza",-13.444954872131348],["θεν",-13.444968223571776],["▁reka",-13.444972038269045],["▁నేత",-13.444987297058104],["ničke",-13.444988250732422],["▁ඉතිහාසය",-13.444998741149902],["▁ടോ",-13.445012092590332],["вей",-13.44501495361328],["▁querido",-13.445039749145508],["▁qorsh",-13.445045471191406],["baga",-13.445046424865724],["▁kendisine",-13.445046424865724],["▁एत",-13.445066452026367],["لقي",-13.445079803466797],["▁phó",-13.445086479187012],["hátíð",-13.445097923278809],["▁абдан",-13.445098876953123],["ችንን",-13.44509983062744],["Lib",-13.44510269165039],["▁представник",-13.445106506347656],["▁Հետ",-13.445138931274414],["▁betur",-13.445160865783691],["▁passada",-13.445162773132324],["ytų",-13.445175170898438],["▁другі",-13.445176124572754],["▁Munka",-13.445197105407717],["؟؟؟",-13.445199012756348],["ವ್ಯಾ",-13.44520664215088],["ვრის",-13.445220947265623],["▁befinden",-13.445233345031738],["conto",-13.445235252380373],["ыч",-13.445267677307127],["▁himmel",-13.445281982421877],["▁teklif",-13.445337295532228],["▁ถ",-13.445340156555176],["摄影",-13.445358276367188],["海上",-13.445368766784668],["ിക്കുകയും",-13.445388793945312],["▁ligon",-13.445390701293944],["だけど",-13.445419311523438],["▁Regina",-13.44542407989502],["▁vegetal",-13.445430755615234],["ировки",-13.445446014404297],["ाका",-13.445456504821776],["▁Sitz",-13.445459365844728],["革",-13.445465087890623],["▁ప్రారంభ",-13.445474624633787],["▁constata",-13.445480346679688],["ačné",-13.445490837097168],["▁desire",-13.445490837097168],["વલ",-13.445500373840332],["pravljen",-13.445504188537598],["रक",-13.445527076721191],["טוב",-13.445547103881836],["▁ők",-13.44555377960205],["▁краіне",-13.445569038391112],["▁случайно",-13.44558048248291],["tatzen",-13.44558334350586],["เครื่องมือ",-13.445611000061035],["ाज",-13.445623397827148],["▁คณะ",-13.44564437866211],["▁6-8",-13.445653915405272],["▁blizu",-13.445673942565918],["▁կետ",-13.445682525634766],["▁siit",-13.445697784423828],["4.5",-13.44570255279541],["ልም",-13.445708274841309],["սա",-13.445731163024902],["▁Ian",-13.445765495300291],["▁Zapo",-13.445777893066406],["▁krag",-13.445781707763672],["صن",-13.445789337158203],["ጠረ",-13.445789337158203],["▁készülék",-13.44579029083252],["▁ddod",-13.44580364227295],["ಮೋ",-13.445823669433594],["▁bilmir",-13.445837020874023],["▁бисквитки",-13.445867538452148],["▁humo",-13.44586944580078],["paš",-13.445893287658691],["ភា",-13.445926666259766],["شە",-13.445929527282717],["ಷಿ",-13.445945739746094],["лышы",-13.44598388671875],["İT",-13.446009635925291],["主演",-13.446015357971191],["ργα",-13.446036338806152],["nnettu",-13.44604206085205],["fungu",-13.446043014526367],["▁շարժ",-13.446049690246582],["TIV",-13.446050643920898],["▁keduanya",-13.446050643920898],["ණා",-13.446067810058594],["תלמיד",-13.446085929870604],["чныя",-13.446100234985352],["nutím",-13.446102142333984],["的最佳",-13.446139335632324],["▁преду",-13.446145057678224],["▁автоматы",-13.44614601135254],["▁ಫಿ",-13.44614601135254],["▁helped",-13.446151733398438],["折扣",-13.446167945861816],["孟",-13.446173667907717],["惨",-13.446179389953612],["仪式",-13.446186065673828],["▁пря",-13.44619369506836],["펜",-13.446195602416992],["▁Privacy",-13.446196556091309],["▁коришћења",-13.446196556091309],["▁הקר",-13.446196556091309],["▁تعطیل",-13.446196556091309],["▁2018/2019",-13.446197509765623],["▁λένε",-13.446197509765623],["▁Иначе",-13.446197509765623],["▁ହୁଏ",-13.446200370788574],["コード",-13.446200370788574],["▁alcance",-13.446202278137209],["▁Building",-13.44620418548584],["▁ਆਉਣ",-13.44620418548584],["▁Application",-13.446205139160156],["▁прибыль",-13.446209907531738],["▁जाएंगे",-13.446216583251951],["▁Meskipun",-13.44621753692627],["▁कस्तो",-13.446219444274902],["▁praktyk",-13.446221351623535],["▁берілген",-13.446221351623535],["เปอร์",-13.446223258972168],["▁ہوگا۔",-13.4462251663208],["▁కాకుండా",-13.446231842041016],["▁долари",-13.446237564086914],["▁జరగ",-13.446240425109863],["▁europene",-13.446259498596191],["▁fisse",-13.446269035339355],["▁vineri",-13.446276664733888],["▁besøkende",-13.44627857208252],["▁Verfahren",-13.446296691894531],["▁premios",-13.446306228637695],["▁целях",-13.446322441101074],["▁mahusay",-13.446328163146973],["aegse",-13.446331977844238],["▁отколкото",-13.446335792541504],["ិក",-13.446343421936035],["▁Comhrá",-13.4463472366333],["▁tạp",-13.446375846862791],["мейді",-13.446382522583008],["▁París",-13.446386337280272],["▁שאלה",-13.44639778137207],["を与え",-13.44639778137207],["▁ligesom",-13.446409225463867],["▁Reading",-13.446422576904297],["ที่ถูก",-13.44645881652832],["láda",-13.446460723876951],["▁(31",-13.446462631225586],["少なく",-13.446497917175291],["үң",-13.446515083312988],["່ນ",-13.446532249450684],["▁тэг",-13.44653606414795],["ਪਤ",-13.446551322937012],["eceğim",-13.446552276611328],["huni",-13.446563720703123],["▁querer",-13.446566581726074],["znani",-13.446575164794922],["▁Skar",-13.44658660888672],["▁azaldu",-13.446621894836426],["▁encima",-13.446624755859377],["▁lesbian",-13.446645736694336],["▁mendon",-13.446660041809082],["▁خوردن",-13.446664810180664],["市の",-13.446669578552246],["זרים",-13.44667911529541],["トラ",-13.446691513061523],["ГРА",-13.446693420410156],["▁يصل",-13.446703910827637],["екция",-13.446749687194824],["▁گذشت",-13.44675064086914],["▁ေဆး",-13.44676685333252],["▁rrjet",-13.446767807006836],["라도",-13.446779251098633],["▁Høy",-13.446785926818848],["▁አፍሪካ",-13.44681167602539],["անակ",-13.44681453704834],["▁sinä",-13.446817398071287],["āles",-13.446833610534668],["נור",-13.446857452392578],["ابه",-13.44686794281006],["ੇਸ਼",-13.44690990447998],["▁colors",-13.446918487548828],["▁množství",-13.44692325592041],["▁עלי",-13.446937561035156],["ေလာ",-13.446953773498535],["នគរបាល",-13.4469633102417],["接着",-13.446979522705078],["▁ομάδας",-13.446986198425291],["▁spese",-13.44699478149414],["čeva",-13.44700527191162],["▁najmanj",-13.44700527191162],["▁Weil",-13.447009086608888],["家居",-13.44701099395752],["meel",-13.447019577026367],["تقييم",-13.447019577026367],["▁alcool",-13.447021484375],["▁ہاں",-13.447025299072266],["▁Кудай",-13.447032928466797],["▁กัน",-13.447044372558594],["上去",-13.447067260742188],["▁რუ",-13.447068214416504],["拉斯",-13.447086334228516],["▁grupė",-13.44709014892578],["ضمن",-13.44709300994873],["-2008",-13.447105407714844],["▁Prä",-13.447108268737791],["▁вивч",-13.44712734222412],["▁ISIS",-13.447155952453612],["енной",-13.447166442871094],["здар",-13.447169303894045],["▁acasa",-13.447176933288574],["▁pirmą",-13.447197914123535],["▁овим",-13.447200775146484],["▁kajian",-13.447202682495115],["່ມ",-13.44720458984375],["Power",-13.44721221923828],["▁belirtti",-13.447264671325684],["▁profesionál",-13.447275161743164],["▁fandt",-13.44727611541748],["一周",-13.44727897644043],["resh",-13.447283744812012],["980",-13.447298049926758],["▁PRIM",-13.447305679321287],["▁MEL",-13.447312355041504],["▁Rapid",-13.44731330871582],["▁gospod",-13.447318077087402],["▁જઈ",-13.44731903076172],["adres",-13.447335243225098],["שטיין",-13.447361946105955],["▁quotidien",-13.447365760803224],["▁Andrei",-13.447416305541992],["sprache",-13.447417259216309],["網上",-13.447421073913574],["스코",-13.447421073913574],["▁θέσεις",-13.447437286376951],["▁پوره",-13.447442054748535],["elwa",-13.447443962097168],["mees",-13.447464942932127],["ែម",-13.44748306274414],["после",-13.447486877441406],["▁nekog",-13.447502136230469],["vägen",-13.447511672973633],["prijs",-13.447517395019531],["▁diventare",-13.447526931762695],["▁씨",-13.447532653808594],["▁ενεργ",-13.447539329528809],["▁भइ",-13.44757843017578],["вался",-13.447587966918944],["董事",-13.447587966918944],["▁dimensioni",-13.447601318359377],["थर",-13.447604179382324],["kaista",-13.447629928588867],["▁distribución",-13.447640419006348],["крыва",-13.44765853881836],["配信",-13.44766616821289],["▁которому",-13.447673797607422],["ण्",-13.447677612304688],["ίστα",-13.447710990905762],["▁Cruise",-13.447725296020508],["өрө",-13.44772720336914],["▁второ",-13.44774055480957],["ټي",-13.447744369506836],["არტ",-13.447771072387695],["▁dezvoltarea",-13.44779109954834],["▁Лев",-13.447792053222656],["▁barem",-13.447802543640137],["пач",-13.447803497314451],["▁dobit",-13.447811126708984],["тация",-13.447813987731934],["▁viikko",-13.44782257080078],["▁ရာ",-13.447840690612791],["▁ცენტრი",-13.44785499572754],["laban",-13.44786262512207],["▁BEL",-13.447871208190918],["apuram",-13.4478759765625],["▁SAS",-13.447912216186523],["▁skarp",-13.447921752929688],["▁öğretmen",-13.44793701171875],["层面",-13.447942733764648],["进程",-13.447946548461914],["законодательства",-13.447959899902344],["▁BIO",-13.447959899902344],["▁Watanzania",-13.447959899902344],["▁seguretat",-13.447959899902344],["▁yderligere",-13.447959899902344],["▁друштво",-13.447959899902344],["▁континент",-13.447959899902344],["▁بهرنیو",-13.447959899902344],["▁সংরক্ষিত",-13.447959899902344],["▁ਪਾਕਿਸਤਾਨ",-13.447959899902344],["▁നിരവധി",-13.447959899902344],["▁မူဝါဒ",-13.447959899902344],["▁Karnataka",-13.44796085357666],["▁יהודה",-13.44796085357666],["▁શિક્ષણ",-13.447961807250977],["▁Ձեզ",-13.44796371459961],["▁رتبه",-13.447964668273926],["ανθρωπ",-13.44796657562256],["▁kialakul",-13.44796657562256],["▁bertambah",-13.447967529296877],["▁muidugi",-13.447967529296877],["▁зошто",-13.447973251342772],["▁Cambridge",-13.44797420501709],["▁նվիրված",-13.447979927062988],["▁કરવું",-13.447980880737305],["▁تماشا",-13.447982788085938],["▁کلمات",-13.447983741760254],["▁ሊሆን",-13.447989463806152],["▁Calendar",-13.447990417480469],["▁ऋण",-13.447999000549316],["▁Hefyd",-13.448001861572266],["▁ogóle",-13.448001861572266],["ించింది",-13.448005676269531],["ΣΥ",-13.448006629943848],["▁traum",-13.448006629943848],["▁EÚ",-13.448012351989746],["▁చేస్తున్నారు",-13.448015213012695],["מזרח",-13.448026657104492],["ปุ่ม",-13.448026657104492],["▁basert",-13.448027610778809],["ਪ੍ਰੀਤ",-13.448038101196287],["▁нову",-13.448041915893556],["▁prosenttia",-13.448047637939451],["▁الأمور",-13.44804859161377],["ԵԿ",-13.44805145263672],["▁аграр",-13.448083877563477],["ダイ",-13.448091506958008],["แมน",-13.448092460632324],["▁postitus",-13.448092460632324],["باش",-13.44809913635254],["▁Albi",-13.448100090026855],["νση",-13.448112487792969],["▁iestā",-13.448116302490234],["ہن",-13.448125839233398],["▁Culture",-13.448125839233398],["▁tečaj",-13.448128700256348],["▁omogući",-13.448139190673828],["▁Sonne",-13.448156356811523],["▁मंडळ",-13.448160171508787],["▁صغيرة",-13.448162078857422],["▁Luiz",-13.44817066192627],["▁profilak",-13.4481782913208],["hodne",-13.448179244995115],["▁216",-13.448179244995115],["▁namuose",-13.44820499420166],["คาร์",-13.44821071624756],["▁പിന്തുണ",-13.44821834564209],["▁postala",-13.448219299316406],["yapti",-13.448235511779783],["સં",-13.448241233825684],["フラ",-13.4482421875],["өз",-13.448257446289062],["▁yıldır",-13.448265075683594],["brev",-13.448270797729492],["▁kući",-13.448302268981934],["▁giochi",-13.448311805725098],["▁अन्तर्गत",-13.448324203491213],["▁vērtē",-13.448331832885742],["▁anlamın",-13.448339462280272],["▁cotxe",-13.448351860046388],["▁Mặc",-13.448357582092283],["▁продуктов",-13.4483642578125],["▁وقوع",-13.448382377624512],["▁®",-13.44838809967041],["កាល",-13.448389053344728],["▁концепт",-13.448415756225586],["sitz",-13.448418617248535],["▁ziehen",-13.448420524597168],["▁किलो",-13.44843292236328],["möglichst",-13.448433876037598],["▁kuweka",-13.448436737060549],["garanti",-13.448453903198242],["ಡಿಯ",-13.448464393615724],["▁אנשי",-13.448468208312988],["▁Mār",-13.448487281799316],["▁kostar",-13.448493003845217],["evski",-13.448497772216797],["ჰა",-13.448506355285645],["льнік",-13.448513984680176],["▁felül",-13.448527336120604],["▁vaate",-13.448533058166504],["▁גב",-13.448534965515137],["▁налога",-13.44853973388672],["▁доказательств",-13.44855499267578],["▁stílus",-13.44855785369873],["рели",-13.448566436767578],["▁родителите",-13.448567390441896],["▁twarzy",-13.448604583740234],["მშ",-13.448609352111816],["▁Որ",-13.448613166809082],["ขึ้นไป",-13.448655128479004],["▁жеті",-13.448678970336914],["register",-13.448687553405762],["▁ұлт",-13.448689460754396],["▁sarı",-13.448712348937988],["▁बुद्धि",-13.44872760772705],["라이브",-13.44872760772705],["▁Hoxe",-13.448735237121582],["▁úz",-13.448750495910645],["▁ଘରେ",-13.448752403259276],["▁leading",-13.448759078979492],["▁şef",-13.448769569396973],["▁kabin",-13.4487886428833],["▁Dabar",-13.448795318603516],["▁खट",-13.44880199432373],["မယ့္",-13.448805809020996],["▁принц",-13.448813438415527],["Jul",-13.44881820678711],["▁Сот",-13.448822021484377],["▁evening",-13.44882583618164],["moodi",-13.448829650878906],["▁intenso",-13.448834419250488],["▁Ask",-13.448844909667969],["garren",-13.448858261108398],["▁Kawa",-13.448864936828612],["▁algus",-13.448932647705078],["▁xebat",-13.448949813842772],["▁Вось",-13.448967933654783],["多個",-13.448975563049316],["▁participation",-13.448982238769531],["▁ideaal",-13.44898796081543],["▁Cambodia",-13.448993682861328],["▁žym",-13.449000358581545],["рэл",-13.449003219604492],["▁hukuk",-13.449015617370604],["▁змя",-13.449018478393556],["▁creating",-13.44903564453125],["tsaka",-13.449047088623049],["שטע",-13.44906520843506],["burn",-13.449101448059082],["▁directly",-13.449101448059082],["有時",-13.449103355407717],["ՆԵՐ",-13.449108123779297],["karî",-13.449137687683104],["▁चुन",-13.449138641357422],["▁øke",-13.449145317077637],["مدني",-13.4491605758667],["▁permitirá",-13.449164390563965],["发布的",-13.449167251586914],["▁இங்கு",-13.44916820526123],["θύ",-13.449175834655762],["ôr",-13.449207305908203],["▁Спа",-13.449212074279783],["▁קט",-13.44921588897705],["▁Institution",-13.449217796325684],["▁Loka",-13.44923973083496],["▁نوروز",-13.449249267578123],["depend",-13.44925022125244],["▁қосыл",-13.449307441711426],["ਹਲ",-13.449308395385742],["تمكن",-13.449311256408691],["▁Geschäft",-13.449322700500488],["ORT",-13.449380874633787],["estero",-13.449383735656738],["▁dobar",-13.449405670166016],["örök",-13.449418067932127],["▁genetik",-13.449422836303713],["ामध्ये",-13.449453353881836],["ζο",-13.449460983276367],["vagn",-13.449469566345217],["▁Где",-13.44947910308838],["وزي",-13.449481964111328],["▁Pub",-13.449482917785645],["gwa",-13.44948673248291],["▁flirt",-13.449491500854492],["Cam",-13.44949722290039],["▁noč",-13.44949722290039],["▁versa",-13.44950008392334],["▁Analyse",-13.449512481689451],["rëve",-13.44951343536377],["▁lamba",-13.449529647827148],["壽",-13.449529647827148],["ይል",-13.449538230895996],["▁многим",-13.449540138244627],["banan",-13.449546813964844],["▁mord",-13.449562072753906],["▁miedo",-13.449583053588867],["バック",-13.44958782196045],["▁خرج",-13.44960594177246],["៉",-13.44963264465332],["▁ਹੋਵੇ",-13.449644088745115],["▁laaja",-13.44964599609375],["▁газо",-13.449661254882812],["姑娘",-13.449664115905762],["▁kork",-13.449665069580078],["消耗",-13.44967555999756],["▁Tog",-13.449687004089355],["▁عهده",-13.44969367980957],["トラブル",-13.449708938598633],["▁szolgáltatások",-13.449712753295898],["▁Prieš",-13.449719429016112],["เพิ่ง",-13.449725151062012],["ۂ",-13.449727058410645],["▁Ljouwert",-13.449727058410645],["▁Радыё",-13.449727058410645],["▁көптеген",-13.449727058410645],["▁лістапада",-13.449727058410645],["▁היימישע",-13.449727058410645],["▁ئامېرىكا",-13.449727058410645],["▁স্ত্রী",-13.449727058410645],["▁Megyei",-13.449729919433594],["工作者",-13.449732780456545],["▁сферы",-13.44973373413086],["▁złotych",-13.449735641479492],["▁социальных",-13.449735641479492],["▁alkalommal",-13.44973850250244],["ワン",-13.449740409851074],["▁প্রদান",-13.44974136352539],["▁külső",-13.449746131896973],["▁अवधि",-13.449749946594238],["▁असल्याचे",-13.449749946594238],["▁जबकि",-13.4497652053833],["▁rytm",-13.449769973754885],["бега",-13.4497709274292],["▁باره",-13.449771881103516],["▁szül",-13.449772834777832],["▁ทําไม",-13.449773788452148],["▁significado",-13.449779510498049],["▁apesar",-13.44979190826416],["ждане",-13.449803352355955],["ေတြး",-13.449807167053224],["▁instalaciones",-13.449811935424805],["変え",-13.44981288909912],["ליק",-13.449813842773438],["kring",-13.44981575012207],["▁ത്തകള്",-13.449817657470703],["▁Повече",-13.449853897094728],["här",-13.449856758117676],["▁servit",-13.449873924255373],["原料",-13.449896812438965],["ແປ",-13.449907302856444],["prodaj",-13.449909210205078],["▁nemcsak",-13.449910163879396],["▁പണം",-13.449954986572266],["တန်း",-13.449957847595217],["grāf",-13.449962615966797],["หาด",-13.449983596801758],["▁Liburu",-13.449993133544922],["duto",-13.449996948242188],["वासी",-13.45001220703125],["▁Već",-13.450021743774414],["installation",-13.450023651123049],["Heeft",-13.450027465820312],["głęb",-13.450033187866213],["▁títulos",-13.450037956237791],["▁topo",-13.450054168701172],["iausi",-13.450063705444336],["মেন্ট",-13.450072288513184],["▁Messe",-13.450075149536133],["▁ئۈچ",-13.450084686279297],["▁beraten",-13.45008945465088],["▁Неко",-13.450092315673828],["្យ",-13.450109481811523],["▁ಕೇಳಿ",-13.45013427734375],["▁затим",-13.450138092041016],["феврал",-13.45014190673828],["support",-13.450143814086914],["▁tunay",-13.45014476776123],["했다고",-13.450156211853027],["୧୩",-13.45015811920166],["▁politiske",-13.450164794921877],["boks",-13.450166702270508],["▁Prahy",-13.450175285339355],["▁औ",-13.45018196105957],["▁Βα",-13.450186729431152],["▁rapi",-13.450222969055176],["ブル",-13.45022964477539],["сјед",-13.450240135192873],["ుకున్నారు",-13.450260162353516],["Gen",-13.450267791748049],["▁הפס",-13.450270652770996],["▁Conform",-13.450275421142578],["jord",-13.45028591156006],["▁indian",-13.45030403137207],["banda",-13.4503173828125],["식을",-13.450318336486816],["खो",-13.450325012207031],["▁학생들",-13.450337409973145],["фирм",-13.450360298156738],["pyörä",-13.45037841796875],["flytt",-13.450403213500977],["▁solamente",-13.450429916381836],["цької",-13.450445175170898],["▁275",-13.450446128845217],["ବାସ",-13.450454711914062],["▁cardio",-13.450477600097656],["▁urteko",-13.450480461120604],["utredning",-13.450481414794922],["▁yapıldı",-13.450486183166504],["zogen",-13.45048999786377],["esque",-13.450511932373049],["▁बन्न",-13.450516700744627],["ξης",-13.45054054260254],["▁отур",-13.450556755065918],["motion",-13.450557708740234],["міністр",-13.450557708740234],["szabad",-13.450579643249512],["▁написано",-13.450584411621094],["גנט",-13.450599670410156],["▁határozat",-13.450616836547852],["▁novu",-13.450627326965332],["žai",-13.450631141662598],["βαλ",-13.450634956359863],["พวกเรา",-13.450654029846191],["▁kohde",-13.450654029846191],["▁выбора",-13.45068073272705],["▁አጋ",-13.450688362121582],["joy",-13.450733184814451],["വാസ",-13.45075225830078],["▁counter",-13.450754165649414],["ակում",-13.45079517364502],["kujt",-13.450798034667969],["▁pravdu",-13.45083236694336],["▁intermediul",-13.450860977172852],["▁هاتف",-13.4508638381958],["▁straks",-13.45087432861328],["အပေါ်",-13.450875282287598],["▁река",-13.45088005065918],["的过程",-13.450901985168455],["▁Fans",-13.450920104980469],["选举",-13.450926780700684],["▁silmi",-13.450940132141112],["ثناء",-13.450942993164062],["▁проектот",-13.45094871520996],["從事",-13.450960159301758],["▁Swi",-13.450968742370604],["гори",-13.450970649719238],["▁guul",-13.4509916305542],["▁sieltä",-13.451004028320312],["▁njeriu",-13.451007843017578],["▁DET",-13.451017379760742],["▁ачык",-13.451056480407717],["טעם",-13.451065063476562],["バイ",-13.451112747192385],["▁ਲੈਣ",-13.451120376586914],["▁وروست",-13.451138496398926],["▁бүрд",-13.451154708862305],["▁грешка",-13.45116901397705],["▁мужа",-13.451189994812012],["▁பார்",-13.451197624206545],["▁аналоги",-13.451205253601074],["▁базар",-13.45121955871582],["▁حياته",-13.451228141784668],["▁americana",-13.451255798339844],["▁Letter",-13.451275825500488],["▁Tada",-13.451276779174805],["▁Leik",-13.451295852661133],["مندان",-13.45129680633545],["ೇಶ್ವರ",-13.451336860656738],["साथ",-13.451343536376951],["▁இணை",-13.45134735107422],["toje",-13.451354026794434],["▁Sax",-13.451359748840332],["▁ingredientes",-13.451396942138672],["穷",-13.451416969299316],["▁กรุณา",-13.451417922973633],["▁kombi",-13.451445579528809],["裸",-13.451449394226074],["蠻",-13.451451301574709],["▁شعور",-13.451452255249023],["▁உங்கள",-13.451471328735352],["sedia",-13.451475143432615],["▁олгох",-13.451475143432615],["照顾",-13.451475143432615],["◎",-13.4514799118042],["タイム",-13.451491355895996],["꼬",-13.451496124267578],["ເຟສບຸກ",-13.451497077941896],["▁communauté",-13.451497077941896],["▁kjøkken",-13.451497077941896],["▁käytössä",-13.451497077941896],["▁דווקא",-13.451497077941896],["▁દિલ્હી",-13.451497077941896],["▁අමාරු",-13.451497077941896],["▁නැත්නම්",-13.451497077941896],["▁peculiar",-13.451498985290527],["▁фантасти",-13.451498985290527],["▁ללמוד",-13.451498985290527],["▁திரைப்பட",-13.451502799987791],["実際",-13.451505661010742],["▁भदौ",-13.451507568359377],["▁Alonso",-13.451508522033691],["орда",-13.451519966125488],["▁अजून",-13.45152187347412],["▁مجرد",-13.451525688171388],["▁inmiddels",-13.451530456542969],["▁فىلىم",-13.451532363891602],["▁부담",-13.451533317565918],["▁részére",-13.451537132263184],["▁ନିକଟରେ",-13.451544761657717],["blja",-13.451556205749512],["▁(2004)",-13.451560974121094],["▁Domnul",-13.451565742492676],["▁ሕይወት",-13.451567649841309],["▁colectivo",-13.45157527923584],["▁Menjadi",-13.451577186584473],["▁fugit",-13.451618194580078],["本土",-13.451618194580078],["▁तिच्या",-13.451620101928713],["▁justifi",-13.451624870300291],["원이",-13.451627731323242],["▁Small",-13.45162868499756],["edett",-13.451644897460938],["▁WWW",-13.451645851135254],["▁אירועים",-13.451662063598633],["▁hälso",-13.45167064666748],["▁Бага",-13.45167350769043],["דוד",-13.451687812805176],["טוט",-13.451688766479492],["▁zásob",-13.451700210571287],["▁μείωση",-13.45170783996582],["▁Kombe",-13.451725959777832],["▁Kilo",-13.45173168182373],["▁Nytt",-13.451775550842283],["grama",-13.45178508758545],["▁Θεσσαλονίκης",-13.451787948608398],["/2005",-13.451826095581056],["තේ",-13.451852798461914],["▁اسناد",-13.451860427856444],["Рос",-13.451868057250977],["▁عمارت",-13.45188808441162],["▁discuta",-13.451896667480469],["bildet",-13.45190143585205],["▁இல",-13.45190143585205],["▁NAJ",-13.45191478729248],["▁padu",-13.45192527770996],["lák",-13.451937675476074],["▁účastník",-13.451940536499023],["▁чемпионат",-13.451940536499023],["▁সাইট",-13.45195770263672],["に行った",-13.45195770263672],["▁kompetanse",-13.45196533203125],["▁миро",-13.45196533203125],["▁lieben",-13.451966285705566],["▁Marcos",-13.451984405517578],["णु",-13.45199203491211],["▁Shaq",-13.45199203491211],["گۇ",-13.451992988586426],["▁postavljen",-13.452009201049805],["▁999",-13.452011108398438],["ตั้งอยู่",-13.45201587677002],["Settings",-13.452028274536133],["▁ਰਾਮ",-13.45205020904541],["▁चुका",-13.452055931091309],["skum",-13.452066421508787],["▁πολιτικής",-13.452080726623535],["CV",-13.452099800109863],["▁कमल",-13.452101707458496],["▁altı",-13.452103614807127],["▁catch",-13.452146530151367],["▁(35)",-13.4521484375],["▁Puu",-13.45215129852295],["▁continguts",-13.452157974243164],["ÆR",-13.452163696289062],["ดนตรี",-13.452184677124023],["▁séance",-13.452210426330566],["▁diyeta",-13.452234268188477],["▁mii",-13.452240943908691],["ografie",-13.452259063720703],["▁Bø",-13.452276229858398],["್ತಾ",-13.452279090881348],["شود",-13.452281951904297],["ологија",-13.452282905578612],["▁baix",-13.45228385925293],["провадження",-13.45229148864746],["brala",-13.452292442321776],["▁देर",-13.452329635620115],["▁የሆኑ",-13.45235824584961],["瞭解",-13.452373504638672],["tuvo",-13.452434539794922],["сфер",-13.452437400817873],["▁generasi",-13.452445030212402],["▁necesarios",-13.452458381652832],["▁ဒုတိယ",-13.45246410369873],["перед",-13.45252799987793],["ไร่",-13.452531814575195],["ستو",-13.452540397644045],["▁formulario",-13.452554702758787],["خبار",-13.452559471130373],["233",-13.452563285827637],["ەتتى",-13.452566146850586],["rari",-13.452594757080078],["▁achega",-13.452614784240724],["ියක්",-13.452621459960938],["▁Blood",-13.452621459960938],["ендер",-13.452643394470217],["▁30-40",-13.452651977539062],["▁правом",-13.45266819000244],["▁feminist",-13.45272445678711],["可靠",-13.452741622924805],["vänlig",-13.452756881713867],["▁ribo",-13.452775955200195],["pflege",-13.452816009521484],["како",-13.452823638916016],["▁पर्व",-13.45282745361328],["▁भै",-13.452832221984863],["ガー",-13.452840805053713],["三角",-13.452840805053713],["▁Ła",-13.452863693237305],["▁కట్ట",-13.452893257141112],["kivi",-13.452903747558594],["್ರ",-13.452909469604492],["▁profiler",-13.452913284301758],["하시",-13.452914237976074],["▁culori",-13.452920913696287],["▁자주",-13.452921867370604],["▁sfida",-13.45292854309082],["▁Види",-13.4529447555542],["стом",-13.45297622680664],["▁fotografij",-13.452990531921388],["▁copa",-13.452994346618652],["▁ധന",-13.452996253967283],["070",-13.452997207641602],["SKU",-13.453003883361816],["EAN",-13.453018188476562],["▁Koro",-13.45302963256836],["▁великим",-13.453038215637209],["看得",-13.453063011169434],["▁എത്ത",-13.453072547912598],["販",-13.45307445526123],["▁ድር",-13.453075408935549],["xido",-13.453079223632812],["лики",-13.453084945678713],["ければ",-13.453091621398926],["▁pişt",-13.453119277954102],["▁블랙",-13.453119277954102],["▁kialakítás",-13.453140258789062],["▁zavarovan",-13.453145027160645],["▁şerê",-13.453168869018556],["▁എക്",-13.453176498413086],["מדו",-13.453189849853516],["ቀበል",-13.453197479248049],["的產品",-13.453203201293944],["KUP",-13.453205108642578],["基层",-13.453205108642578],["▁tuolla",-13.453241348266602],["嘗試",-13.453246116638184],["▁кноп",-13.4532470703125],["жак",-13.453255653381348],["▁내려",-13.45326805114746],["ភ្ជាប់",-13.453269958496094],["▁inderdaad",-13.453269958496094],["▁zajednice",-13.453269958496094],["▁περισσότερες",-13.453269958496094],["▁τέτοιο",-13.453269958496094],["▁آپریشن",-13.453269958496094],["▁ಹುದ್ದೆ",-13.453269958496094],["▁სამხრეთ",-13.453269958496094],["▁مصنوعات",-13.453271865844728],["▁tilføje",-13.453272819519045],["▁scolaire",-13.45327377319336],["▁आणखी",-13.45327377319336],["▁వైద్య",-13.45327377319336],["км",-13.453276634216309],["ေရွ႕",-13.453276634216309],["▁hikaye",-13.453276634216309],["年初",-13.45327854156494],["▁bekezdés",-13.453279495239258],["▁pengembangan",-13.453279495239258],["▁ofercie",-13.453290939331056],["▁उपकरण",-13.453290939331056],["Азаттыктын",-13.453291893005373],["▁Uppsala",-13.45329475402832],["教材",-13.453296661376951],["▁явдал",-13.453298568725586],["▁məqsədilə",-13.453301429748535],["▁ವಿಮಾನ",-13.453304290771484],["ോട്ട",-13.4533109664917],["▁ціни",-13.453313827514648],["▁మాజీ",-13.45331573486328],["▁комиссии",-13.453316688537598],["▁Собранието",-13.453327178955078],["지요",-13.453332901000977],["▁kéz",-13.453372955322266],["▁Այլ",-13.453376770019531],["ੱਛ",-13.453380584716797],["▁लगाया",-13.45338249206543],["ੱਡ",-13.45339584350586],["▁amerikane",-13.453404426574709],["二手",-13.453413963317873],["▁גור",-13.45342254638672],["▁ஹி",-13.453451156616213],["სოვ",-13.453475952148438],["▁пуст",-13.45347785949707],["▁potranno",-13.453484535217283],["▁paragraf",-13.453496932983398],["▁čeprav",-13.453514099121094],["▁etern",-13.453516960144045],["▁kurtar",-13.453535079956056],["▁азаматтар",-13.453536033630373],["▁переваг",-13.453551292419434],["消化",-13.453569412231444],["联合国",-13.453594207763672],["ліс",-13.453595161437988],["▁barato",-13.453598976135254],["▁کشته",-13.453614234924316],["▁φορολογ",-13.45362949371338],["ywania",-13.453635215759276],["▁សេចក្តី",-13.45364761352539],["▁leggen",-13.453653335571287],["▁പുസ്തക",-13.453658103942873],["▁Жалал",-13.45366382598877],["▁pritom",-13.4536714553833],["plot",-13.453680992126465],["학년",-13.45368480682373],["linjen",-13.45370388031006],["▁viktigste",-13.453706741333008],["▁Вед",-13.453717231750488],["ylab",-13.453731536865234],["▁polega",-13.453734397888184],["iwch",-13.45375633239746],["▁EUA",-13.453791618347168],["▁historic",-13.453798294067385],["sioon",-13.453824043273926],["үнү",-13.453840255737305],["skade",-13.45384120941162],["ீடு",-13.453859329223633],["▁takimi",-13.45387077331543],["служба",-13.453874588012695],["▁naujų",-13.453874588012695],["shoot",-13.453877449035645],["Cũng",-13.453880310058594],["▁visste",-13.453882217407228],["看著",-13.453886032104492],["▁kuliah",-13.453904151916504],["▁айда",-13.453923225402832],["▁feira",-13.453930854797363],["iñas",-13.453934669494627],["▁(33)",-13.453934669494627],["választás",-13.453959465026855],["▁materiał",-13.453962326049805],["▁партньор",-13.453962326049805],["▁diplomati",-13.45396327972412],["室内",-13.45398235321045],["▁stau",-13.453993797302246],["386",-13.45400047302246],["▁ඉදි",-13.454011917114258],["лун",-13.45402717590332],["▁lettre",-13.454031944274902],["▁državni",-13.454034805297852],["▁Benim",-13.4540433883667],["MAP",-13.45405387878418],["▁pokoje",-13.45406723022461],["سىنىڭ",-13.45408058166504],["▁mold",-13.454087257385254],["▁accetta",-13.454116821289062],["▁केलेल्या",-13.454129219055176],["▁енгізу",-13.454150199890137],["लिया",-13.45417594909668],["рэз",-13.454179763793944],["▁reporter",-13.454182624816896],["▁جنګ",-13.454184532165527],["▁номера",-13.454185485839844],["▁bērni",-13.454190254211426],["wawa",-13.454195976257324],["▁닫",-13.45422077178955],["viez",-13.454228401184082],["▁ជាប់",-13.454240798950195],["▁hoger",-13.45425796508789],["▁šlo",-13.45425796508789],["آر",-13.454266548156738],["▁pridobi",-13.45429801940918],["૧૮",-13.454298973083496],["▁Ísland",-13.454299926757812],["▁තනි",-13.454326629638672],["▁פסק",-13.454337120056152],["▁آمدن",-13.454340934753418],["▁depression",-13.454352378845217],["անիշ",-13.454357147216797],["ಿಗಳ",-13.454365730285645],["HAL",-13.454371452331545],["TERI",-13.45438289642334],["شغل",-13.45439338684082],["פינ",-13.45439624786377],["▁míst",-13.45440673828125],["▁เน",-13.454445838928224],["▁академия",-13.454456329345703],["մոն",-13.454474449157717],["▁izveido",-13.45451831817627],["ચંદ",-13.454527854919434],["jær",-13.454562187194824],["▁Počas",-13.454570770263672],["нских",-13.45457363128662],["▁оролц",-13.454575538635254],["▁otte",-13.454607009887695],["▁gekom",-13.45461654663086],["▁նշանակում",-13.454638481140137],["▁veida",-13.454652786254885],["▁aðal",-13.454657554626465],["▁රාම",-13.45465850830078],["▁nydelig",-13.454660415649414],["▁Orain",-13.454689979553224],["▁periodista",-13.454697608947754],["▁proteger",-13.454699516296388],["(4",-13.454705238342283],["▁ದಿನಗಳ",-13.454707145690918],["cyjnym",-13.454721450805664],["▁ଠାରୁ",-13.45474338531494],["був",-13.45476531982422],["▁surge",-13.45476531982422],["Norge",-13.454797744750977],["▁அதே",-13.45484733581543],["▁külm",-13.454867362976074],["දන",-13.45486831665039],["受伤",-13.454879760742188],["▁prestation",-13.454880714416504],["الہ",-13.45488739013672],["नों",-13.454903602600098],["fok",-13.454909324645996],["▁ware",-13.454930305480955],["rić",-13.454957008361816],["▁opo",-13.45497703552246],["▁කිරි",-13.454992294311523],["脉",-13.454998970031738],["替代",-13.455010414123535],["ႀက",-13.4550199508667],["▁cycle",-13.45503044128418],["変わって",-13.455035209655762],["sajili",-13.455041885375977],["วัสดุ",-13.455045700073242],["▁príslušn",-13.455045700073242],["▁uopće",-13.455045700073242],["▁знаходиться",-13.455045700073242],["▁प्रतिवेदन",-13.455045700073242],["▁અમને",-13.455045700073242],["▁ବୈଠକ",-13.455045700073242],["▁አቡነ",-13.455045700073242],["▁Código",-13.45504665374756],["▁verfügbar",-13.45504665374756],["▁궁금",-13.45504665374756],["▁Diễn",-13.455047607421877],["▁Emmanuel",-13.455047607421877],["▁истакна",-13.455047607421877],["▁ਇਨ੍ਹਾਂ",-13.455047607421877],["▁perbedaan",-13.455048561096191],["▁изложба",-13.455048561096191],["▁irregular",-13.45505142211914],["▁మాట్లాడుతూ",-13.45505142211914],["▁특징",-13.45505142211914],["▁לצאת",-13.45505428314209],["▁ಸ್ಟಾರ್",-13.455058097839355],["▁enthalten",-13.455059051513672],["▁євро",-13.455062866210938],["▁Zusammenhang",-13.455063819885254],["▁기념",-13.455065727233888],["empaa",-13.455080032348633],["PIS",-13.45508098602295],["ସାର",-13.455081939697266],["емой",-13.455093383789062],["▁фактори",-13.455093383789062],["放置",-13.45509433746338],["▁mahakama",-13.455103874206545],["▁sutra",-13.455103874206545],["iarratas",-13.45511245727539],["▁전망",-13.455121040344238],["tese",-13.455123901367188],["▁Sjekk",-13.455134391784668],["▁নির্বাচনে",-13.455134391784668],["▁கல்",-13.455138206481934],["▁located",-13.455142974853516],["▁жигит",-13.455143928527832],["▁përgatit",-13.455145835876465],["▁হার",-13.45514678955078],["üne",-13.455153465270996],["лец",-13.455178260803224],["▁हुन्छन्",-13.455183029174805],["ଯାଇଛି",-13.45518398284912],["▁anumit",-13.45518398284912],["▁přesně",-13.455190658569336],["▁prompt",-13.455198287963867],["▁mreže",-13.455204010009766],["▁사전",-13.455204010009766],["▁logement",-13.45521354675293],["လွှတ်တော်",-13.455230712890623],["▁Yanvar",-13.455242156982422],["▁ништо",-13.455243110656738],["▁тренинг",-13.45524787902832],["▁רעד",-13.45524787902832],["خطأ",-13.45526885986328],["mód",-13.45529556274414],["▁calendario",-13.455305099487305],["leitung",-13.45530605316162],["▁potente",-13.455312728881836],["▁listu",-13.455321311950684],["▁منهنجو",-13.455339431762695],["denk",-13.45534610748291],["▁Schon",-13.455358505249023],["ರಾಗ",-13.455363273620604],["▁gesamten",-13.455368041992188],["▁prednost",-13.455379486083984],["▁топли",-13.45540714263916],["ifrån",-13.455408096313477],["▁पाक",-13.455408096313477],["▁européenne",-13.455425262451172],["▁pira",-13.45543098449707],["▁Twoje",-13.455440521240234],["▁گذاشته",-13.455440521240234],["▁təsərrüfatı",-13.45544719696045],["bedingungen",-13.455459594726562],["▁सां",-13.455467224121094],["níků",-13.455469131469728],["▁neamh",-13.455470085144045],["أماكن",-13.455479621887209],["▁økonomisk",-13.455491065979004],["▁bideo",-13.455509185791016],["▁भे",-13.45551872253418],["Tal",-13.455537796020508],["კან",-13.455540657043455],["▁gwel",-13.45555019378662],["cznik",-13.455551147460938],["laşdırılması",-13.455556869506836],["▁sungai",-13.45556640625],["퇴",-13.455572128295898],["▁Zelen",-13.455577850341797],["▁escorts",-13.455580711364746],["skabet",-13.455588340759276],["▁publié",-13.455591201782228],["anter",-13.455620765686035],["▁growing",-13.455638885498049],["▁reklamy",-13.455657958984377],["それでも",-13.45566749572754],["▁результати",-13.455702781677246],["▁takový",-13.455731391906738],["جې",-13.455733299255373],["ذيب",-13.45573902130127],["▁preços",-13.45574951171875],["▁mando",-13.455755233764648],["dogo",-13.455767631530762],["తంగా",-13.455770492553713],["▁બસ",-13.45577907562256],["▁جميل",-13.455795288085938],["▁chegada",-13.455799102783203],["ផ្ត",-13.455808639526367],["▁bedel",-13.455818176269531],["▁prácu",-13.455830574035645],["▁baze",-13.45583438873291],["▁Sabtu",-13.455851554870604],["▁играчи",-13.455872535705566],["▁enjoyed",-13.4558744430542],["דירה",-13.455888748168944],["▁എടുക്ക",-13.45589828491211],["▁Tiru",-13.45590114593506],["பார",-13.455928802490234],["▁equal",-13.45593547821045],["▁مشخصات",-13.455951690673828],["საბა",-13.45596694946289],["▁merken",-13.45599365234375],["飞行",-13.456005096435549],["▁recita",-13.456012725830078],["▁malih",-13.456039428710938],["istí",-13.456040382385254],["▁Konya",-13.456040382385254],["▁паказ",-13.45604133605957],["▁የሚገኙ",-13.456048965454102],["▁қызметті",-13.456052780151367],["pilot",-13.456069946289062],["▁ব্যবস্থা",-13.456085205078123],["▁MBA",-13.456098556518556],["▁montre",-13.456099510192873],["เช่นกัน",-13.456100463867188],["दारी",-13.456110000610352],["▁potravin",-13.456110954284668],["▁كىشىلەر",-13.45611572265625],["▁పంప",-13.456119537353516],["▁என்றும்",-13.456120491027832],["ույց",-13.456138610839844],["▁wyboru",-13.456138610839844],["▁szét",-13.45613956451416],["▁функцион",-13.456157684326172],["▁служители",-13.45617961883545],[".[4]",-13.456192016601562],["hasa",-13.45619297027588],["▁eşit",-13.456194877624512],["▁indsats",-13.45620822906494],["सागर",-13.456209182739258],["▁ürünü",-13.456212997436523],["matig",-13.456223487854004],["വശ",-13.45622444152832],["▁ചെയ്തത്",-13.456243515014648],["链",-13.456243515014648],["▁kesken",-13.456244468688965],["▁Aŭ",-13.456249237060549],["elämä",-13.45626163482666],["リスク",-13.456265449523926],["▁kurus",-13.456274032592772],["▁tross",-13.45627784729004],["▁Juo",-13.456303596496582],["▁scale",-13.456311225891112],["▁Põ",-13.456315994262695],["▁شکر",-13.456316947937012],["▁តារា",-13.45633316040039],["▁gefallen",-13.456355094909668],["▁stets",-13.456356048583984],["109",-13.456363677978516],["▁developer",-13.456377029418944],["▁дуп",-13.456377029418944],["▁miehen",-13.456382751464844],["▁OÜ",-13.456388473510742],["▁vinst",-13.456393241882324],["▁traballar",-13.456414222717283],["▁esetleg",-13.456418991088867],["nskega",-13.456429481506348],["ческа",-13.456436157226562],["可能な",-13.456467628479004],["▁jaman",-13.456472396850586],["▁asılı",-13.456487655639648],["heart",-13.456498146057127],["▁drošības",-13.456527709960938],["▁Route",-13.456546783447266],["GOS",-13.45655632019043],["ారెడ్డి",-13.456562995910645],["▁performant",-13.45656394958496],["▁Tö",-13.45656681060791],["▁Ecclesiae",-13.456571578979492],["現實",-13.456578254699709],["▁पत",-13.456584930419922],["▁Trailer",-13.456587791442873],["mainīt",-13.456607818603516],["ძუ",-13.45662784576416],["▁عنہ",-13.456634521484377],["▁fëmijët",-13.456658363342283],["▁potreby",-13.456687927246094],["▁encam",-13.456704139709473],["▁даяр",-13.456705093383787],["ЛТ",-13.456725120544434],["▁सहा",-13.45673656463623],["呈",-13.456743240356444],["ٽڪ",-13.456745147705078],["▁புத்தக",-13.456746101379396],["掩",-13.456751823425291],["▁unaweza",-13.456753730773926],["ագրի",-13.45676612854004],["▁आयुष्य",-13.456781387329102],["▁නිදහස",-13.45680046081543],["udur",-13.45680332183838],["叹",-13.45681095123291],["在一个",-13.456823348999023],["ֲ",-13.45682430267334],["ჲ",-13.456825256347656],["▁bitartean",-13.456825256347656],["▁gyvenimą",-13.456825256347656],["▁sérstaklega",-13.456825256347656],["▁Đoàn",-13.456825256347656],["▁Впрочем",-13.456825256347656],["▁құқығы",-13.456825256347656],["▁Үндсэн",-13.456825256347656],["▁یوازې",-13.456825256347656],["▁विधेयक",-13.456825256347656],["▁सकारात्मक",-13.456825256347656],["▁મંદિર",-13.456825256347656],["فائ",-13.456826210021973],["੨",-13.456826210021973],["▁prvenstvo",-13.456826210021973],["ช้า",-13.456827163696287],["▁혹은",-13.456827163696287],["▁традицій",-13.456828117370604],["▁ڏٺو",-13.456829071044922],["▁сообщает",-13.456830024719238],["▁peralatan",-13.456830978393556],["▁šéf",-13.456830978393556],["保証",-13.456833839416504],["▁dovuto",-13.45684051513672],["▁vacanza",-13.4568510055542],["▁ಆಹಾರ",-13.4568510055542],["▁povjeren",-13.456851959228516],["မ်က္",-13.456860542297363],["▁Although",-13.456865310668944],["▁Івана",-13.456869125366213],["▁babu",-13.45687484741211],["▁FF",-13.456891059875488],["▁maahan",-13.45689868927002],["▁քո",-13.456903457641602],["رین",-13.45690631866455],["▁zajedničk",-13.45691204071045],["▁ganha",-13.456918716430664],["感想",-13.456920623779297],["▁ქარ",-13.456921577453612],["▁salto",-13.456926345825195],["▁mogelijkheid",-13.456948280334473],["▁التجارية",-13.45695972442627],["▁pobre",-13.45697021484375],["ითი",-13.456978797912598],["▁verbunden",-13.456985473632812],["▁avsluta",-13.456986427307127],["▁куће",-13.457008361816406],["▁fericit",-13.457015991210938],["▁okolje",-13.457015991210938],["▁Arna",-13.457024574279783],["feed",-13.457032203674316],["เป้าหมาย",-13.457056999206545],["▁बन्ने",-13.45706844329834],["Dom",-13.457093238830566],["КОМ",-13.45712661743164],["класти",-13.45712661743164],["▁Switch",-13.45713996887207],["niczych",-13.457159996032717],["▁फर्",-13.457182884216309],["▁కారణం",-13.4572114944458],["OVER",-13.457215309143066],["▁filles",-13.4572172164917],["pidi",-13.457221031188965],["▁203",-13.457226753234863],["▁نان",-13.457230567932127],["▁விட்ட",-13.457230567932127],["▁penca",-13.45723819732666],["čnu",-13.457247734069824],["▁الأب",-13.457249641418455],["▁სიტყვა",-13.457258224487305],["----",-13.45725917816162],["▁करतो",-13.457283973693848],["▁scenario",-13.457284927368164],["▁compagnie",-13.457286834716797],["irken",-13.457301139831545],["▁reiškia",-13.457310676574709],["വൃ",-13.457320213317873],["▁prsia",-13.457322120666504],["લ્સ",-13.457329750061035],["▁हट",-13.457335472106934],["▁několika",-13.457355499267578],["▁homma",-13.457359313964844],["▁imela",-13.457393646240234],["оўцы",-13.457396507263184],["▁Коло",-13.457403182983398],["▁notiks",-13.457409858703612],["▁jauno",-13.45741081237793],["きっと",-13.457411766052246],["mmende",-13.457420349121094],["ደርስ",-13.457427024841309],["▁soweit",-13.457427978515623],["沒想到",-13.457429885864258],["▁VPN",-13.45743465423584],["sjö",-13.45744514465332],["luğunu",-13.457456588745115],["▁ինչը",-13.45749282836914],["▁meestal",-13.457518577575684],["▁видя",-13.457536697387695],["▁Skicka",-13.457537651062012],["Poli",-13.457545280456545],["▁parim",-13.45755100250244],["vilja",-13.457559585571287],["▁burax",-13.45757007598877],["ЕЦ",-13.457576751708984],["▁osobní",-13.457578659057615],["▁minulla",-13.457590103149414],["រ៉",-13.457596778869627],["▁Warga",-13.457599639892578],["▁Жаз",-13.457690238952637],["pate",-13.4576997756958],["גדר",-13.45770263671875],["त्मक",-13.457707405090332],["▁crucial",-13.457708358764648],["▁egyszerűen",-13.45771598815918],["▁Аў",-13.457724571228027],["bē",-13.45773983001709],["▁Conf",-13.45777702331543],["▁نیرو",-13.45777988433838],["שינוי",-13.457791328430176],["▁zakresu",-13.45779800415039],["▁menahan",-13.457820892333984],["judi",-13.457863807678224],["▁fəal",-13.457871437072754],["▁irakur",-13.457889556884766],["▁безопасн",-13.457894325256348],["бус",-13.45790195465088],["vuosi",-13.457907676696776],["▁گزار",-13.457907676696776],["▁diverti",-13.45797061920166],["▁rimelig",-13.45799160003662],["▁Onların",-13.458006858825684],["▁akcent",-13.458014488220217],["▁τιμ",-13.458019256591797],["ഗതി",-13.458025932312012],["ilua",-13.45803165435791],["▁ေပး",-13.458040237426758],["кати",-13.458046913146973],["हल",-13.458065032958984],["▁osnova",-13.4580659866333],["ുമാണ്",-13.458085060119627],["▁Grim",-13.458085060119627],["ۇلغان",-13.458090782165527],["ल्यावर",-13.45811939239502],["▁соли",-13.458127975463867],["▁Ул",-13.458160400390623],["阅",-13.458181381225586],["▁menü",-13.458194732666016],["ຽມ",-13.458195686340332],["платформ",-13.45819854736328],["မြေ",-13.458243370056152],["▁കടന്നു",-13.458246231079102],["▁assistir",-13.458252906799316],["▁ukázal",-13.45826816558838],["ері",-13.458279609680176],["▁μο",-13.458297729492188],["هون",-13.458322525024414],["▁volledige",-13.45832633972168],["විට",-13.458340644836426],["ించడం",-13.458352088928224],["▁Hoppas",-13.45838737487793],["gesellschaft",-13.458389282226562],["مسلم",-13.458389282226562],["▁napisal",-13.458393096923828],["රූ",-13.458398818969728],["▁plača",-13.458406448364258],["▁svag",-13.458416938781738],["hiz",-13.45842456817627],["▁qaldı",-13.458430290222168],["▁Netz",-13.45844554901123],["▁absol",-13.458477973937988],["就是要",-13.458483695983888],["établissement",-13.458488464355469],["▁sperm",-13.458501815795898],["▁okupa",-13.458535194396973],["났",-13.458547592163086],["▁evne",-13.458551406860352],["牵",-13.458555221557615],["▁ေက်ာ္",-13.458569526672363],["üzü",-13.45857048034668],["補充",-13.458582878112791],["▁السر",-13.458584785461426],["霜",-13.458587646484377],["فتاح",-13.458598136901855],["人民法院",-13.458606719970703],["▁Donnerstag",-13.45860767364502],["▁Sebenarnya",-13.45860767364502],["▁pemeriksaan",-13.45860767364502],["▁wciąż",-13.45860767364502],["▁wyróżni",-13.45860767364502],["▁πολίτες",-13.45860767364502],["▁өнгөрсөн",-13.45860767364502],["▁డబ్బు",-13.45860767364502],["▁ውጤት",-13.45860767364502],["▁čudovit",-13.458608627319336],["▁সভাপতি",-13.458608627319336],["▁momencie",-13.458609580993652],["▁χαρακτηριστικά",-13.458609580993652],["▁феномен",-13.458610534667969],["▁요즘",-13.458610534667969],["▁Mọi",-13.458611488342283],["▁këshill",-13.458611488342283],["▁अनलाइन",-13.458614349365234],["▁Háskóla",-13.4586181640625],["▁खुलासा",-13.458622932434082],["▁θέλουν",-13.458624839782717],["▁veidā",-13.45863437652588],["tadbirlar",-13.458637237548828],["meet",-13.458641052246094],["▁хэмжээнд",-13.458647727966309],["▁espacios",-13.458648681640623],["▁Έχει",-13.458648681640623],["▁рецепти",-13.458663940429688],["▁عادل",-13.45866584777832],["мора",-13.4586763381958],["เหตุผล",-13.45867919921875],["▁우선",-13.458681106567385],["силь",-13.458685874938965],["ntlig",-13.458687782287598],["осу",-13.458687782287598],["▁iubire",-13.458690643310549],["varme",-13.458725929260254],["▁контра",-13.458789825439451],["ірі",-13.458807945251465],["辦公室",-13.458817481994627],["▁gostaria",-13.458818435668944],["▁muddat",-13.458824157714844],["▁రాయ",-13.458833694458008],["лике",-13.458844184875488],["டிய",-13.458856582641602],["▁Carte",-13.45888900756836],["▁kres",-13.458891868591309],["▁Кож",-13.45889377593994],["มิตร",-13.458914756774902],["的设计",-13.45893096923828],["domo",-13.45893383026123],["▁중심",-13.458942413330078],["▁204",-13.458944320678713],["▁vprašanja",-13.45895004272461],["▁न्यू",-13.45897388458252],["▁түшүн",-13.459001541137695],["▁unten",-13.459009170532228],["연합",-13.459019660949709],["▁fake",-13.459027290344238],["▁şair",-13.459038734436035],["▁especializada",-13.45907211303711],["certa",-13.459075927734377],["రని",-13.459083557128906],["▁rendelkező",-13.459091186523438],["求人",-13.459097862243652],["▁Coast",-13.45910358428955],["informations",-13.459117889404297],["▁lông",-13.459132194519045],["▁calme",-13.459146499633787],["צפו",-13.459151268005373],["▁videtur",-13.459152221679688],["▁herramientas",-13.459155082702637],["ມື",-13.459156036376951],["३०",-13.459158897399902],["▁tartib",-13.459162712097168],["▁करतात",-13.459163665771484],["pö",-13.459174156188965],["▁půjč",-13.459188461303713],["შიც",-13.45919418334961],["▁natuke",-13.459230422973633],["▁učinkovit",-13.459235191345217],["ຫານ",-13.45924472808838],["odzie",-13.459251403808594],["▁lähellä",-13.459287643432615],["▁Fight",-13.459295272827148],["ibilità",-13.45932960510254],["수록",-13.45932960510254],["НОСТ",-13.459332466125488],["ietis",-13.45934009552002],["стон",-13.4593505859375],["▁iskustvo",-13.4593505859375],["▁Compl",-13.459364891052246],["上海市",-13.459396362304688],["▁айында",-13.459402084350586],["▁języka",-13.4594144821167],["▁saviem",-13.4594144821167],["▁हों",-13.45944881439209],["latan",-13.459449768066406],["▁ਰੁ",-13.45946979522705],["အက်",-13.459497451782228],["yötä",-13.459515571594238],["dref",-13.45952033996582],["▁Okre",-13.459529876708984],["ბთ",-13.45954132080078],["▁образова",-13.45954418182373],["▁pääsi",-13.459552764892578],["▁որակ",-13.45956039428711],["▁қан",-13.459562301635742],["förening",-13.45957851409912],["жено",-13.459585189819336],["ნიშ",-13.459586143493652],["Tai",-13.459589004516602],["股东",-13.459607124328612],["▁Maanta",-13.459657669067385],["하시는",-13.459661483764648],["▁ಮನೆಯ",-13.459670066833496],["▁هيو",-13.459671020507812],["▁පාලන",-13.459674835205078],["▁ବଡ଼",-13.459675788879396],["▁iddo",-13.45968246459961],["潮流",-13.459689140319824],["▁sikrer",-13.45971393585205],["▁섬",-13.459721565246582],["ლოგი",-13.459724426269531],["▁contou",-13.459736824035645],["layarak",-13.459755897521973],["▁1903",-13.459772109985352],["occasione",-13.45979118347168],["duce",-13.459806442260742],["▁черно",-13.459808349609377],["▁ਪਿਆਰ",-13.459809303283691],["κια",-13.459821701049805],["▁personeel",-13.459831237792969],["▁Xer",-13.459842681884766],["фар",-13.45984935760498],["-2000",-13.45987033843994],["▁මෙහෙම",-13.459877967834473],["▁STAR",-13.45988655090332],["▁interessere",-13.459920883178713],["▁анхны",-13.459930419921877],["▁जिला",-13.459952354431152],["▁düz",-13.4599609375],["clip",-13.459968566894531],["viesti",-13.459972381591797],["▁ያልተ",-13.45997428894043],["ገዛ",-13.459980964660645],["▁Jugo",-13.459989547729492],["▁dizze",-13.460000991821287],["დიო",-13.460010528564451],["ծի",-13.460038185119627],["øver",-13.46006965637207],["вили",-13.46009922027588],["▁kapsa",-13.46010398864746],["žnja",-13.460121154785156],["ерлер",-13.460131645202637],["▁metres",-13.460156440734863],["▁Etiket",-13.460162162780762],["▁लग्न",-13.460165023803713],["▁sange",-13.460197448730469],["▁ಯಾವುದ",-13.460206985473633],["व्रत",-13.460214614868164],["ishini",-13.46022605895996],["▁નવ",-13.460232734680176],["▁188",-13.460241317749023],["▁straffe",-13.460257530212402],["▁universiteti",-13.46027374267578],["▁피해",-13.460280418395996],["Una",-13.460298538208008],["▁gło",-13.460318565368652],["peal",-13.460383415222168],["茂",-13.460383415222168],["▁автомобіль",-13.460392951965332],["▁menghubungi",-13.460393905639648],["▁raccolta",-13.460393905639648],["▁saptamana",-13.460393905639648],["▁settimane",-13.460393905639648],["▁относится",-13.460393905639648],["▁հիմք",-13.460393905639648],["▁محسوب",-13.460393905639648],["▁कहिले",-13.460393905639648],["▁অপরাধ",-13.460393905639648],["▁যৌনসঙ্গম",-13.460393905639648],["▁ભગવાન",-13.460393905639648],["▁противоречи",-13.460394859313965],["▁všetci",-13.46039581298828],["គ្រប់គ្រង",-13.460396766662598],["▁അവകാശ",-13.460396766662598],["▁ರಾತ್ರಿ",-13.460397720336914],["▁Lietošana",-13.460406303405762],["▁ઘણી",-13.460406303405762],["▁காதல்",-13.460409164428713],["▁Consiliului",-13.46041488647461],["▁lyft",-13.460415840148926],["▁segur",-13.460415840148926],["▁மழை",-13.460416793823242],["pokój",-13.460420608520508],["▁Tänään",-13.460423469543455],["▁nahaja",-13.460426330566406],["▁човешки",-13.460429191589355],["▁گناه",-13.460437774658203],["▁высока",-13.46044635772705],["▁gutt",-13.46045207977295],["▁Ausstellung",-13.460457801818848],["を選択",-13.460460662841797],["▁المواطنين",-13.460474967956545],["ុត",-13.460477828979492],["▁যোগ",-13.46048069000244],["▁کلمه",-13.460492134094238],["▁spojen",-13.460494995117188],["1970",-13.460531234741213],["ก้า",-13.460541725158691],["postadressen",-13.46054458618164],["▁കുറഞ്ഞ",-13.460546493530272],["江苏",-13.460546493530272],["▁shoot",-13.460549354553224],["打破",-13.460549354553224],["▁основание",-13.46056079864502],["ခိုင္",-13.460577011108398],["▁δώσει",-13.46058750152588],["▁autoritet",-13.460588455200195],["buli",-13.46059513092041],["▁Akhirnya",-13.460609436035156],["యన",-13.460630416870115],["▁ஒருவர்",-13.46063232421875],["▁melo",-13.460633277893066],["▁الوضع",-13.460647583007812],["▁məni",-13.460651397705078],["▁declaración",-13.460678100585938],["▁confronta",-13.460710525512695],["▁mājās",-13.46071434020996],["越来越多的",-13.46071720123291],["冬天",-13.460758209228516],["LAV",-13.460779190063477],["▁ຖ້າ",-13.460786819458008],["▁ultime",-13.46079921722412],["legge",-13.460807800292969],["डिया",-13.46082592010498],["ređen",-13.460840225219728],["▁Arzt",-13.46087646484375],["▁CAM",-13.460894584655762],["ಜೀ",-13.460897445678713],["▁suora",-13.46091079711914],["കൊള്ള",-13.4609375],["▁Chuo",-13.46095371246338],["▁menciona",-13.460956573486328],["▁багат",-13.460981369018556],["▁Zakaj",-13.461010932922363],["Latvijas",-13.461019515991213],["สูตร",-13.46102523803711],["பன்",-13.461029052734377],["▁ኢትዮጵያዊ",-13.461034774780272],["▁particularly",-13.46107006072998],["▁dóm",-13.461078643798828],["त्ते",-13.461081504821776],["ovac",-13.46109104156494],["▁коз",-13.46109390258789],["état",-13.461095809936523],["▁мани",-13.461129188537598],["▁farany",-13.461153984069824],["▁aggiorna",-13.461159706115724],["▁calcio",-13.46116065979004],["▁Doar",-13.461167335510254],["მპ",-13.46116828918457],["▁തേ",-13.461195945739746],["啟動",-13.461211204528809],["▁የደ",-13.461212158203123],["▁միավոր",-13.46121311187744],["▁ଯୁବକ",-13.46121311187744],["žku",-13.461224555969238],["▁delar",-13.4612398147583],["ലിയ",-13.461275100708008],["▁gases",-13.46130657196045],["дает",-13.461308479309082],["raksti",-13.46134090423584],["πετ",-13.461349487304688],["בק",-13.461349487304688],["▁Pasar",-13.461359977722168],["роди",-13.461377143859863],["▁stamp",-13.461392402648926],["▁dúvidas",-13.461416244506836],["▁Sér",-13.461427688598633],["▁אליו",-13.46142864227295],["▁जल्द",-13.46143627166748],["▁انهي",-13.46144962310791],["▁wad",-13.461477279663086],["▁poslovne",-13.461478233337402],["bö",-13.461481094360352],["▁Flat",-13.461481094360352],["▁보도",-13.461525917053224],["▁langsam",-13.461538314819336],["▁Lekin",-13.461539268493652],["рій",-13.461548805236816],["吃了",-13.461549758911133],["គាំទ្រ",-13.461565971374512],["▁salvar",-13.46158218383789],["أمر",-13.461589813232422],["▁роста",-13.461602210998535],["klick",-13.461610794067385],["penge",-13.46164894104004],["▁artículos",-13.46165370941162],["▁πού",-13.46165943145752],["▁faza",-13.461678504943848],["解答",-13.46169376373291],["▁místa",-13.461711883544922],["▁szerintem",-13.461737632751465],["▁disfruta",-13.461740493774414],["えます",-13.461742401123049],["patti",-13.461746215820312],["▁yapacağı",-13.461750984191896],["▁skolas",-13.461780548095703],["සෙ",-13.461783409118652],["▁1993.",-13.461801528930664],["▁Neft",-13.46180248260498],["ነግ",-13.461857795715332],["drama",-13.461886405944824],["adevăr",-13.461929321289062],["ebant",-13.46193027496338],["▁znati",-13.461949348449709],["▁recolle",-13.461955070495604],["▁Kér",-13.46200942993164],["ካሄድ",-13.46202278137207],["▁здраве",-13.462028503417969],["椅",-13.462034225463867],["救援",-13.462079048156738],["жиж",-13.462082862854004],["全方位",-13.462096214294434],["шук",-13.462105751037598],["▁לעבוד",-13.462111473083496],["▁បង្កើត",-13.462121963500977],["▁næst",-13.462133407592772],["ונט",-13.462143898010254],["dža",-13.462149620056152],["▁preti",-13.46215534210205],["▁programmas",-13.462156295776367],["妊娠",-13.462164878845217],["ボタン",-13.462177276611328],["▁පියවර",-13.462180137634276],["zeichnung",-13.462182998657228],["▁Unibertsitate",-13.462182998657228],["▁ekzemple",-13.462182998657228],["▁nghèo",-13.462182998657228],["▁соціальних",-13.462182998657228],["▁նույնիսկ",-13.462182998657228],["▁ուսումնասիր",-13.462182998657228],["▁اردیبهشت",-13.462182998657228],["▁همچنان",-13.462182998657228],["▁प्रशिक्षण",-13.462182998657228],["▁लिमिटेड",-13.462182998657228],["▁દુનિયા",-13.462182998657228],["▁ନିର୍ଦ୍ଦେଶ",-13.462182998657228],["▁zgolj",-13.462183952331545],["▁incredibil",-13.46218490600586],["▁sencillo",-13.46218490600586],["▁କୁମାର",-13.462185859680176],["dhjetë",-13.462187767028809],["▁Categories",-13.462188720703123],["▁ταινία",-13.46218967437744],["▁φωτογραφίες",-13.46218967437744],["▁Frankrike",-13.462198257446287],["▁palīdzību",-13.462201118469238],["▁לבחור",-13.462201118469238],["▁कार्यकारी",-13.462203025817873],["▁meum",-13.462206840515137],["▁статей",-13.462210655212402],["▁ಪ್ರಕಾರ",-13.462213516235352],["▁Feature",-13.462214469909668],["▁بنسټ",-13.462214469909668],["▁rozwój",-13.462234497070312],["▁있어요",-13.462238311767578],["▁Uh",-13.462239265441896],["▁tiesību",-13.462244033813477],["ерги",-13.462254524230955],["▁состояния",-13.462255477905272],["▁Зе",-13.462263107299805],["vennlig",-13.46226692199707],["▁olmuşdur",-13.462278366088867],["▁történik",-13.462285041809082],["▁2010)",-13.462288856506348],["▁ಸರ್ಕಾರಿ",-13.462292671203612],["чења",-13.462294578552246],["▁публикация",-13.46229648590088],["▁uvedené",-13.462300300598145],["▁ئاس",-13.462309837341309],["▁จากนั้น",-13.462312698364258],["▁allgemein",-13.462336540222168],["▁צוות",-13.462337493896484],["▁België",-13.462361335754396],["▁Pessoa",-13.462362289428713],["▁หน้าแรก",-13.462389945983888],["▁udar",-13.46239948272705],["▁icke",-13.462403297424316],["▁واک",-13.462407112121582],["▁etablere",-13.462409019470217],["logist",-13.46241855621338],["କରଣ",-13.46243381500244],["▁vajza",-13.462447166442873],["леб",-13.462468147277832],["▁pavadin",-13.462474822998049],["▁ફા",-13.462480545043944],["▁najviš",-13.462512969970703],["പ്പെടുത്തിയ",-13.462532043457031],["ียง",-13.462546348571776],["▁ကုိ",-13.46255111694336],["αιρ",-13.46255588531494],["他就",-13.462556838989258],["▁ഉപയോഗിക്ക",-13.462580680847168],["गळ",-13.462587356567385],["场景",-13.46259593963623],["▁siguranta",-13.462605476379396],["框架",-13.462614059448242],["▁läge",-13.462621688842772],["äksi",-13.462631225585938],["jawab",-13.462635040283203],["▁Kip",-13.462638854980469],["▁Защото",-13.462639808654783],["▁Turqi",-13.462679862976074],["▁evde",-13.462682723999023],["▁Marke",-13.462708473205566],["▁pełne",-13.462708473205566],["dky",-13.46271514892578],["▁viktiga",-13.46272087097168],["гач",-13.462732315063477],["▁ladda",-13.462732315063477],["▁сваког",-13.46275520324707],["▁စစ်",-13.462778091430664],["rík",-13.462800979614258],["īsies",-13.462820053100586],["▁ટીમ",-13.462848663330078],["▁målet",-13.46285629272461],["▁sinto",-13.46285629272461],["之間的",-13.462868690490724],["▁Đào",-13.462921142578123],["▁inni",-13.46292209625244],["ીલ",-13.462926864624023],["▁initiativ",-13.462930679321287],["同比",-13.462933540344238],["▁תח",-13.462943077087402],["▁cijeli",-13.46294403076172],["▁gagner",-13.462944984436035],["▁разг",-13.462944984436035],["acağım",-13.462946891784668],["Hal",-13.462958335876465],["▁vibr",-13.462963104248049],["לשון",-13.462984085083008],["▁mogła",-13.462986946105955],["싸",-13.462997436523438],["▁doncs",-13.463020324707031],["пустить",-13.463030815124512],["താവ",-13.463034629821776],["harga",-13.463046073913574],["▁financeira",-13.463048934936523],["▁aanbod",-13.463054656982422],["▁farlo",-13.463061332702637],["▁osnov",-13.463080406188965],["▁פתח",-13.46310329437256],["quilla",-13.463119506835938],["▁Mao",-13.463122367858888],["▁solgt",-13.46314525604248],["▁engan",-13.463147163391112],["вшим",-13.463190078735352],["▁agreement",-13.463196754455566],["ımıza",-13.4631986618042],["παρά",-13.463201522827148],["▁խմբի",-13.463204383850098],["を受ける",-13.463226318359377],["▁छाप",-13.46323585510254],["personer",-13.463238716125488],["jadi",-13.463242530822754],["▁CHO",-13.463245391845703],["היסטוריה",-13.463253021240234],["LON",-13.463265419006348],["▁foco",-13.463265419006348],["izzati",-13.46327304840088],["▁fungere",-13.463275909423828],["KIM",-13.463282585144045],["▁pateikt",-13.463303565979004],["▁കേസ്",-13.463319778442385],["▁halve",-13.46334457397461],["िस्ट",-13.463358879089355],["▁маселе",-13.46336555480957],["▁ostaa",-13.463387489318848],["▁повез",-13.463387489318848],["ίως",-13.46338939666748],["▁zvan",-13.463394165039062],["▁peças",-13.463403701782228],["▁ਬੱ",-13.463404655456545],["ட்டர்",-13.46341609954834],["silo",-13.463438987731934],["baad",-13.463440895080566],["lingva",-13.463462829589844],["توفر",-13.463479042053224],["аров",-13.463491439819336],["▁échange",-13.463494300842283],["δεν",-13.463500022888184],["ولوجي",-13.463512420654297],["▁ราคาถูก",-13.46351718902588],["mhach",-13.463522911071776],["▁тобы",-13.463528633117676],["▁Olin",-13.463546752929688],["周末",-13.46355438232422],["▁โดยเฉพาะ",-13.46362018585205],["ilee",-13.463622093200684],["blom",-13.463629722595217],["▁тоқта",-13.463642120361328],["▁सकाळ",-13.463644981384276],["وغ",-13.463652610778809],["داف",-13.463667869567873],["▁producir",-13.463685035705566],["צלח",-13.463688850402832],["▁թիվ",-13.463688850402832],["▁chimic",-13.463690757751465],["ရရှိ",-13.463699340820312],["▁1960-",-13.46371078491211],["Ut",-13.463744163513184],["其他的",-13.463744163513184],["▁കുട",-13.463753700256348],["▁Interna",-13.46376132965088],["▁rejimi",-13.46379852294922],["▁faible",-13.463818550109863],["▁oprema",-13.463844299316406],["ומן",-13.463863372802734],["▁massive",-13.463866233825684],["tamisen",-13.463899612426758],["▁중앙",-13.46390438079834],["骑",-13.463929176330566],["ਗਲ",-13.463933944702148],["漁",-13.463933944702148],["▁ಲೋಕ",-13.46393585205078],["혀",-13.463950157165527],["▁ప్రకటించ",-13.463955879211426],["▁জানুয়ারি",-13.463972091674805],["▁чемпіон",-13.46397304534912],["বাণিজ্য",-13.463973999023438],["▁абавязкова",-13.463973999023438],["▁Ciidamada",-13.463974952697754],["▁Međunarodn",-13.463974952697754],["▁disampaikan",-13.463974952697754],["▁straordinari",-13.463974952697754],["▁İttifaqı",-13.463974952697754],["▁студзеня",-13.463974952697754],["▁Ի՞նչ",-13.463974952697754],["▁बल्कि",-13.463974952697754],["▁मोर्चा",-13.463974952697754],["▁ඇතැම්",-13.463974952697754],["näyttely",-13.46397590637207],["▁kevesebb",-13.46397590637207],["▁ሥርዓት",-13.46397590637207],["▁έπρεπε",-13.463977813720703],["▁mengobati",-13.46397876739502],["▁спортсмен",-13.463979721069336],["▁sohbet",-13.463980674743652],["▁فاروق",-13.463983535766602],["▁قارشى",-13.463983535766602],["▁naslednje",-13.46398639678955],["▁земаља",-13.463993072509766],["очных",-13.464004516601562],["▁glavno",-13.46401023864746],["ėme",-13.464016914367676],["▁Ruotsi",-13.464016914367676],["▁שאתם",-13.46402072906494],["▁نمو",-13.464021682739258],["▁rozdíl",-13.464029312133787],["▁glöm",-13.464031219482422],["▁Đề",-13.464034080505373],["тырып",-13.464038848876951],["▁sötét",-13.46405029296875],["▁వాటిని",-13.4640531539917],["▁kettő",-13.464054107666016],["▁المستقبل",-13.464056015014648],["▁решений",-13.464065551757812],["▁potenti",-13.464075088500977],["▁súil",-13.464081764221191],["▁Einnig",-13.464097023010254],["▁Salman",-13.464111328125],["▁Kỹ",-13.464116096496582],["▁demonstrat",-13.464116096496582],["альны",-13.46412181854248],["▁мою",-13.464144706726074],["▁માતા",-13.464149475097656],["ାଇବା",-13.464157104492188],["▁Tanda",-13.464157104492188],["▁metre",-13.46416473388672],["▁ortiq",-13.464178085327148],["▁ламп",-13.464195251464844],["trud",-13.46419620513916],["класс",-13.464237213134766],["長い",-13.464240074157717],["ზეც",-13.464262962341309],["▁поділ",-13.464272499084473],["KOR",-13.464276313781738],["klid",-13.464308738708496],["▁church",-13.464310646057127],["оўка",-13.46433162689209],["▁захтев",-13.46438694000244],["▁децу",-13.464394569396973],["aître",-13.464414596557615],["▁centenar",-13.464434623718262],["▁көрсө",-13.464447975158691],["▁Asus",-13.464449882507324],["компани",-13.46445655822754],["▁broer",-13.464462280273438],["ራችን",-13.464468955993652],["▁ผล",-13.464471817016602],["qaba",-13.464473724365234],["▁Herre",-13.464473724365234],["boom",-13.464488983154297],["▁Սե",-13.464508056640623],["fanele",-13.464516639709473],["▁UC",-13.464518547058104],["vanda",-13.46452522277832],["▁lehetett",-13.464531898498535],["Ак",-13.464532852172852],["▁insanlara",-13.464544296264648],["▁hjemmesiden",-13.464561462402344],["▁кандидата",-13.464567184448242],["▁своїй",-13.464570999145508],["▁entendre",-13.464576721191406],["ម៉",-13.46458625793457],["ỗi",-13.464598655700684],["ISK",-13.464628219604492],["▁بدأت",-13.46463680267334],["▁Nummer",-13.464648246765137],["▁Бања",-13.46465015411377],["ဝန္",-13.464655876159668],["£",-13.464664459228516],["▁трас",-13.464691162109377],["▁वेळी",-13.464695930480955],["▁रविवार",-13.464719772338867],["సన్",-13.4647216796875],["▁кім",-13.464733123779297],["кун",-13.464738845825195],["▁функционал",-13.464750289916992],["▁wygra",-13.464755058288574],["▁дорого",-13.464799880981444],["OPE",-13.464811325073242],["therapy",-13.46483325958252],["ģis",-13.464834213256836],["▁huur",-13.464844703674316],["▁أعلنت",-13.464847564697266],["ລາຍງານ",-13.464851379394531],["ásához",-13.464853286743164],["ंडी",-13.464865684509276],["▁prekvap",-13.46487808227539],["▁inkomst",-13.464890480041504],["▁bhfo",-13.464914321899414],["зня",-13.464938163757324],["▁менин",-13.464964866638184],["ベル",-13.464990615844728],["▁ბიჭ",-13.465008735656738],["▁Roberts",-13.465028762817385],["目を",-13.465035438537598],["▁nyde",-13.465065956115724],["▁และมี",-13.46507167816162],["▁Сир",-13.465089797973633],["ვნა",-13.465100288391112],["▁etib",-13.465106964111328],["▁vozil",-13.46510887145996],["正面",-13.46511173248291],["▁juhla",-13.46511936187744],["▁ateş",-13.465123176574709],["▁soutěž",-13.465131759643556],["▁hiji",-13.465153694152832],["будова",-13.46517276763916],["▁utak",-13.465174674987791],["hörig",-13.465219497680664],["บาร์",-13.465224266052246],["▁karış",-13.465235710144045],["商家",-13.46526336669922],["▁książ",-13.465267181396484],["▁instrumento",-13.465292930603027],["жання",-13.465309143066406],["landığı",-13.465319633483888],["▁Euskadi",-13.465326309204102],["▁ურთიერთ",-13.465332984924316],["工廠",-13.465332984924316],["▁ухааны",-13.465340614318848],["സും",-13.465359687805176],["वती",-13.465362548828123],["шип",-13.465367317199709],["▁نسب",-13.46537971496582],["سبق",-13.46539306640625],["ってる",-13.46539306640625],["▁நீர்",-13.465394973754885],["جيب",-13.465408325195312],["▁Pogo",-13.465409278869627],["▁Penal",-13.465426445007324],["观点",-13.465457916259766],["协",-13.465469360351562],["もので",-13.465476036071776],["оген",-13.465479850769045],["▁necessitat",-13.46548843383789],["वो",-13.465489387512209],["vās",-13.465505599975586],["▁Reforma",-13.465529441833496],["▁במקרה",-13.465540885925291],["▁លី",-13.465540885925291],["▁مهندس",-13.465551376342772],["▁argu",-13.465575218200684],["▁trụ",-13.465575218200684],["▁بدان",-13.465588569641112],["▁pelan",-13.465603828430176],["लको",-13.465605735778809],["4°",-13.465627670288086],["▁සුව",-13.465651512145996],["pháirt",-13.465652465820312],["▁zmieni",-13.465656280517578],["▁Combi",-13.465691566467283],["▁bəy",-13.465703010559082],["▁бран",-13.465721130371094],["niece",-13.465723991394045],["누",-13.465737342834473],["▁bizler",-13.4657564163208],["ၼ",-13.465760231018066],["▁espectacular",-13.46576976776123],["▁məşhur",-13.46576976776123],["▁samozřejmě",-13.46576976776123],["▁хирург",-13.46576976776123],["▁ਸ਼੍ਰੋਮਣੀ",-13.46576976776123],["▁manchmal",-13.465770721435549],["▁επιτυχία",-13.465770721435549],["▁сегмент",-13.465770721435549],["▁paieška",-13.465771675109863],["▁экинчи",-13.465771675109863],["▁MKUU",-13.465776443481444],["ระยะเวลา",-13.465780258178713],["▁burung",-13.465781211853027],["▁jackpot",-13.465785026550291],["▁کونسل",-13.465787887573242],["▁얼마나",-13.465789794921877],["kiaľ",-13.465791702270508],["▁thước",-13.465797424316406],["▁iure",-13.465803146362305],["▁visoki",-13.46580982208252],["▁партії",-13.46580982208252],["דמות",-13.465816497802734],["▁włosów",-13.465826988220217],["▁kadınlar",-13.465828895568848],["▁niektoré",-13.465838432312012],["▁위하여",-13.465840339660645],["▁погоди",-13.465867042541504],["▁مساعد",-13.465872764587402],["▁електронн",-13.465888023376465],["▁Begriff",-13.465897560119627],["▁समुद्र",-13.465897560119627],["ैया",-13.465907096862791],["▁združen",-13.465920448303224],["βρα",-13.46592140197754],["ეშ",-13.465929985046388],["▁kuliko",-13.465949058532717],["泰國",-13.46595287322998],["加油",-13.465953826904297],["▁blá",-13.465974807739258],["යාගේ",-13.465975761413574],["fesz",-13.46597671508789],["▁Rug",-13.465981483459473],["▁Zudem",-13.465987205505373],["▁indis",-13.465991020202637],["▁gaman",-13.466017723083496],["నలు",-13.466042518615724],["药品",-13.466046333312988],["▁felelősség",-13.466052055358888],["▁myndi",-13.466054916381836],["▁abordar",-13.466059684753418],["▁ከም",-13.46610164642334],["පිට",-13.466105461120604],["హం",-13.466131210327148],["▁prípadne",-13.46613311767578],["供应",-13.46613311767578],["ώνα",-13.466142654418944],["▁ജില്ലാ",-13.466142654418944],["▁eadem",-13.466144561767578],["▁албаны",-13.46614933013916],["ովի",-13.466158866882324],["▁ασκ",-13.466163635253906],["▁allowed",-13.466177940368652],["▁отдельно",-13.466181755065918],["▁Helm",-13.46621036529541],["▁policaj",-13.466214179992676],["ਨਾਮ",-13.466251373291016],["langa",-13.466256141662598],["լուսանկարներ",-13.466259002685549],["feltet",-13.466264724731444],["ázás",-13.46629238128662],["▁အသုံးပြု",-13.466302871704102],["▁respective",-13.46630573272705],["ttuna",-13.46630859375],["▁Volt",-13.466309547424316],["▁bewegen",-13.466320037841797],["szál",-13.466325759887695],["障害",-13.466346740722656],["▁oficina",-13.4663667678833],["▁אימ",-13.466376304626465],["▁regidor",-13.466382026672363],["▁зависимост",-13.466389656066896],["▁चरण",-13.466394424438477],["▁tomate",-13.466434478759766],["發行",-13.466450691223145],["▁recebi",-13.466477394104004],["▁ಒಳ್ಳೆಯ",-13.466482162475586],["mérő",-13.46648406982422],["▁rođen",-13.466485977172852],["PUN",-13.466495513916016],["ក្រុមហ៊ុន",-13.466500282287598],["carna",-13.466508865356444],["3.2",-13.466510772705078],["▁Нұр",-13.466512680053713],["association",-13.466514587402344],["▁truk",-13.466548919677734],["▁ঈদ",-13.466561317443848],["▁fete",-13.46657657623291],["ந்தது",-13.466583251953123],["▁의료",-13.466590881347656],["보이",-13.466596603393556],["IDADE",-13.466605186462402],["▁вице",-13.466609954833984],["▁invloed",-13.46662139892578],["▁Yok",-13.466626167297363],["tawala",-13.466641426086426],["▁criado",-13.4666748046875],["▁decret",-13.466696739196776],["յուղ",-13.466717720031738],["כוח",-13.466720581054688],["jual",-13.466724395751951],["▁baat",-13.466729164123535],["nevelés",-13.4667387008667],["要把",-13.466752052307127],["▁sál",-13.466753959655762],["▁avaient",-13.466757774353027],["▁Както",-13.466766357421877],["▁размере",-13.466768264770508],["▁Rover",-13.466769218444824],["şir",-13.466776847839355],["سوم",-13.466788291931152],["▁الوحيد",-13.466788291931152],["▁금융",-13.466804504394531],["정을",-13.466818809509276],["▁образовательн",-13.466836929321287],["▁જાણ",-13.466845512390137],["garriak",-13.46687126159668],["机器",-13.46687126159668],["▁искате",-13.46688747406006],["▁لاگ",-13.466889381408691],["▁passando",-13.46689224243164],["ामुळे",-13.466946601867676],["地区的",-13.46695327758789],["Mə",-13.466968536376951],["▁بستر",-13.466974258422852],["▁chap",-13.46699047088623],["ilmoqda",-13.466995239257812],["ILO",-13.466998100280762],["سة",-13.467013359069824],["ичного",-13.467028617858888],["▁Шан",-13.467034339904783],["▁fho",-13.467044830322266],["национал",-13.46706771850586],["▁спрямован",-13.467082023620604],["▁orgasme",-13.467123031616213],["▁Sivu",-13.467132568359377],["puhe",-13.467134475708008],["处罚",-13.467145919799805],["▁لکي",-13.467201232910156],["دعاء",-13.467204093933104],["දැයි",-13.46727180480957],["これらの",-13.46727466583252],["▁voix",-13.467288970947266],["▁produktov",-13.467289924621582],["▁visuomenės",-13.46730899810791],["ÉSZ",-13.467317581176758],["▁XD",-13.46731948852539],["росс",-13.467327117919922],["▁articoli",-13.467327117919922],["▁агенция",-13.467329025268556],["▁otsuse",-13.467352867126465],["▁வச",-13.467354774475098],["lació",-13.467370986938477],["▁oferit",-13.467370986938477],["▁basit",-13.467389106750488],["▁proporcionar",-13.467392921447754],["ریا",-13.467414855957031],["halb",-13.467450141906738],["نفذ",-13.46745777130127],["රන්",-13.46746826171875],["cake",-13.467487335205078],["▁alkoholu",-13.467496871948242],["曹",-13.46749782562256],["▁мускул",-13.46750259399414],["▁ibarət",-13.46750545501709],["ылым",-13.467514038085938],["▁הצל",-13.46751880645752],["ίνεται",-13.467520713806152],["▁येते",-13.46753215789795],["艘",-13.467536926269531],["్ర",-13.467537879943848],["τροπή",-13.46754550933838],["améliorer",-13.467568397521973],["▁Kecamatan",-13.467568397521973],["▁abbastanza",-13.467568397521973],["▁dumneavoastră",-13.467568397521973],["▁membunuh",-13.467568397521973],["▁týždeň",-13.467568397521973],["▁Σάββατο",-13.467568397521973],["▁Председник",-13.467568397521973],["▁நம்பிக்கை",-13.467569351196287],["▁ఇందులో",-13.467569351196287],["▁līmeni",-13.467570304870604],["Демократ",-13.467572212219238],["▁քաղաքապետ",-13.467573165893556],["قاتل",-13.46758270263672],["▁tight",-13.467586517333984],["▁त्यसपछि",-13.46759033203125],["▁આપણા",-13.4675931930542],["▁எது",-13.467596054077148],["▁consigo",-13.467597007751465],["▁yıldız",-13.46759796142578],["▁аныкта",-13.46760082244873],["▁በተለያዩ",-13.467606544494627],["▁나온",-13.467625617980955],["▁구입",-13.46762752532959],["▁isagoo",-13.467632293701172],["ਵਲ",-13.467641830444336],["▁seriál",-13.467645645141602],["▁निश्चित",-13.4676513671875],["▁maya",-13.46765422821045],["流动",-13.467655181884766],["คุณสมบัติ",-13.46766185760498],["▁Harvard",-13.467674255371094],["▁пилот",-13.467676162719728],["超市",-13.467676162719728],["▁uwagi",-13.467677116394045],["تشر",-13.467686653137209],["▁kérdez",-13.467694282531738],["▁ঠিক",-13.467697143554688],["▁gəlmə",-13.467707633972168],["見る",-13.467717170715332],["▁ulit",-13.46772289276123],["▁thjesht",-13.467727661132812],["▁découverte",-13.467729568481444],["▁Byt",-13.467738151550291],["când",-13.467761993408203],["ਐੱਸ",-13.4677734375],["▁बॉ",-13.467778205871582],["▁parcours",-13.467782974243164],["收购",-13.46778392791748],["▁Double",-13.46778964996338],["▁državno",-13.46780014038086],["▁આર",-13.467815399169922],["dél",-13.467817306518556],["▁તેમજ",-13.467827796936035],["내는",-13.467829704284668],["▁тобой",-13.467830657958984],["▁fattig",-13.467857360839844],["▁अक",-13.46786117553711],["▁වීර",-13.467876434326172],["מעט",-13.467883110046388],["▁tàn",-13.467883110046388],["▁Apollo",-13.467890739440918],["▁Administrator",-13.467905044555664],["्ड",-13.467914581298828],["▁Отан",-13.467942237854004],["უნი",-13.467952728271484],["▁מיל",-13.467955589294434],["▁objavil",-13.467957496643066],["▁ойн",-13.46796703338623],["серед",-13.467992782592772],["▁patys",-13.468019485473633],["▁заттар",-13.46805477142334],["서를",-13.468066215515137],["чких",-13.4680814743042],["▁Francesc",-13.46808624267578],["Kol",-13.468098640441896],["▁DV",-13.468103408813477],["валось",-13.468113899230955],["ੌਰ",-13.468114852905272],["▁готова",-13.46812629699707],["▁Шам",-13.468130111694336],["▁njema",-13.468133926391602],["▁dikirim",-13.468141555786133],["▁apakš",-13.468143463134766],["кел",-13.468151092529297],["▁Zaradi",-13.468158721923828],["▁प्रसाद",-13.468195915222168],["שתמש",-13.4682035446167],["eblaj",-13.468209266662598],["vým",-13.468231201171877],["▁ներկա",-13.468238830566406],["▁комисија",-13.468242645263672],["▁ծրագրեր",-13.468250274658203],["стрій",-13.46825122833252],["▁విధ",-13.468294143676758],["▁เข้า",-13.468329429626465],["ຈໍານວນ",-13.468342781066896],["▁тарихи",-13.468348503112791],["ເບິ່ງ",-13.468361854553224],["▁consumer",-13.46836757659912],["▁sce",-13.46836757659912],["▁finalist",-13.468393325805664],["▁administrativ",-13.46840763092041],["昨",-13.468408584594728],["▁kartus",-13.468417167663574],["▁comando",-13.468427658081056],["▁빛",-13.468427658081056],["▁Község",-13.46845531463623],["▁savus",-13.468456268310549],["ម៉ា",-13.468467712402344],["drà",-13.46848487854004],["▁olduğuna",-13.46848964691162],["▁puha",-13.468505859375],["▁قىز",-13.46854305267334],["▁gegeven",-13.46855640411377],["▁տուն",-13.468571662902832],["۱۰",-13.468607902526855],["▁አዋ",-13.468607902526855],["fø",-13.468631744384766],["använd",-13.468643188476562],["写真を",-13.468683242797852],["/03",-13.468689918518066],["▁rooli",-13.468697547912598],["WM",-13.46871280670166],["ფოთ",-13.468722343444824],["▁күрес",-13.46872615814209],["了起来",-13.468731880187988],["▁obses",-13.46873664855957],["▁náklady",-13.46876621246338],["▁Ką",-13.46877670288086],["cih",-13.468778610229492],["▁благослов",-13.468792915344238],["▁അതിനു",-13.468794822692873],["▁Fyll",-13.468807220458984],["这款",-13.468823432922363],["▁घेतला",-13.468826293945312],["▁världens",-13.46883773803711],["統一",-13.468841552734377],["لىنىپ",-13.46888542175293],["క్షణ",-13.468892097473145],["cısı",-13.468896865844728],["▁liige",-13.468939781188965],["▁blues",-13.468957901000977],["▁umie",-13.468986511230469],["▁खाता",-13.46901512145996],["मणि",-13.469040870666504],["TUM",-13.469042778015137],["▁odpověd",-13.46904468536377],["▁kemény",-13.469085693359377],["▁õhu",-13.469085693359377],["finale",-13.469103813171388],["▁આવા",-13.469108581542969],["lakon",-13.469109535217283],["▁μέσο",-13.469130516052246],["▁странно",-13.469136238098145],["▁müstəqil",-13.469141960144045],["ገበ",-13.469155311584473],["▁ڏک",-13.469159126281738],["▁Carne",-13.469191551208496],["▁rumor",-13.469205856323242],["倾",-13.469216346740724],["פייסבוק",-13.46923542022705],["▁Moy",-13.469239234924316],["юри",-13.469258308410645],["LIT",-13.46926212310791],["▁كىر",-13.469289779663086],["▁eskatu",-13.469298362731934],["▁aiki",-13.469315528869627],["ক্ষণ",-13.469318389892578],["▁ڪونه",-13.469321250915527],["▁මාර",-13.469325065612791],["▁መጠ",-13.469325065612791],["▁жазуу",-13.469350814819336],["仮",-13.469353675842283],["▁suelo",-13.4693603515625],["ធ្លាក់",-13.469369888305664],["τει",-13.46937084197998],["▁giardino",-13.46937084197998],["▁girîng",-13.46937084197998],["▁kendaraan",-13.46937084197998],["▁sebaliknya",-13.46937084197998],["▁Τετάρτη",-13.46937084197998],["▁дзяўчын",-13.46937084197998],["▁મહિના",-13.46937084197998],["▁రచయిత",-13.46937084197998],["▁ವಸ್ತು",-13.46937084197998],["ภาษาอังกฤษ",-13.469371795654297],["▁tạm",-13.469371795654297],["▁తెలిసిందే",-13.469371795654297],["▁ଉତ୍ତର",-13.46937370300293],["ၿမိဳ႕နယ္",-13.469374656677246],["▁beyond",-13.469375610351562],["ទិញ",-13.46937656402588],["▁دماغ",-13.46938705444336],["▁бял",-13.469392776489258],["▁Informácie",-13.46939468383789],["▁eğer",-13.469411849975586],["ющую",-13.469415664672852],["▁чега",-13.469419479370115],["▁páirt",-13.4694242477417],["▁daarmee",-13.469430923461914],["▁öncə",-13.469436645507812],["▁κερδ",-13.469447135925291],["ingum",-13.46944808959961],["▁കുട്ടികള്",-13.469449043273926],["▁لات",-13.469457626342772],["▁يأتي",-13.469460487365724],["▁berakhir",-13.469486236572266],["▁Romaniei",-13.46949291229248],["▁દિવસે",-13.469496726989746],["▁reizes",-13.469502449035645],["▁जानें",-13.469511032104492],["рум",-13.46951961517334],["▁بيمارين",-13.469526290893556],["krem",-13.469552040100098],["ພູ",-13.469559669494627],["▁большая",-13.469559669494627],["▁հոդված",-13.469566345214844],["▁Nämä",-13.46956729888916],["▁anges",-13.46957302093506],["家の",-13.469584465026855],["រវាង",-13.469585418701172],["hír",-13.469593048095703],["▁почта",-13.469595909118652],["▁menurunkan",-13.46960163116455],["▁заслуг",-13.469606399536133],["▁contas",-13.469619750976562],["▁Heilige",-13.469642639160156],["▁objaw",-13.469660758972168],["ल्ट",-13.469663619995115],["▁măr",-13.469670295715332],["бить",-13.46967315673828],["▁Comh",-13.469704627990724],["▁خبرونه",-13.469709396362305],["▁દી",-13.469711303710938],["▁bunları",-13.469714164733888],["▁hótelum",-13.469717979431152],["▁सार",-13.469717979431152],["▁Adria",-13.469735145568848],["IQ",-13.469746589660645],["▁Kijk",-13.46975040435791],["만의",-13.469767570495604],["▁kuza",-13.469768524169922],["▁Наступ",-13.469770431518556],["манда",-13.469788551330566],["▁virgin",-13.469813346862791],["▁şəxsi",-13.469815254211426],["▁نوعی",-13.46982192993164],["Пу",-13.469831466674805],["1989",-13.469840049743652],["komisjoni",-13.469844818115234],["▁prestigio",-13.469854354858398],["▁дзён",-13.469877243041992],["leşti",-13.469892501831056],["▁peratus",-13.469907760620115],["ırım",-13.469959259033203],["▁לאומי",-13.469969749450684],["▁त्याची",-13.469972610473633],["تحد",-13.469976425170898],["▁nyer",-13.470029830932615],["ოდი",-13.47003173828125],["▁pyetje",-13.47003173828125],["▁առկա",-13.470044136047363],["▁тілін",-13.470047950744627],["▁láta",-13.470078468322754],["▁ማሳ",-13.470084190368652],["ûs",-13.470090866088867],["▁mollit",-13.470091819763184],["rumah",-13.470099449157717],["▁تعامل",-13.470105171203612],["اوية",-13.470109939575195],["▁Elektrik",-13.470133781433104],["▁құқықтық",-13.470159530639648],["▁genus",-13.470170974731444],["▁Күн",-13.470200538635254],["▁Drago",-13.470203399658203],["▁Saman",-13.470203399658203],["sportiv",-13.470264434814451],["dagar",-13.47027587890625],["ပါက",-13.470276832580566],["กี้",-13.470280647277832],["παιδ",-13.470292091369627],["нез",-13.47031021118164],["нивната",-13.470338821411133],["իայում",-13.470343589782717],["▁tamu",-13.470343589782717],["given",-13.470344543457031],["ತೆಯ",-13.47034740447998],["▁polovin",-13.47036838531494],["▁虽然",-13.47036838531494],["höz",-13.470388412475586],["กรอบ",-13.470393180847168],["gramm",-13.4703950881958],["▁игрок",-13.4704008102417],["ټه",-13.47042179107666],["สุดยอด",-13.470449447631836],["▁aware",-13.470458030700684],["▁Jeder",-13.470458984375],["dress",-13.470465660095217],["▁رج",-13.470468521118164],["▁ഫല",-13.470484733581545],["▁Перша",-13.47049617767334],["ŽI",-13.470503807067873],["▁10-12",-13.47051239013672],["▁versla",-13.470518112182615],["▁ਕੰ",-13.470519065856934],["▁kompres",-13.470523834228516],["fulde",-13.47053050994873],["▁дебат",-13.470542907714844],["▁цела",-13.470544815063477],["увань",-13.47056484222412],["xal",-13.470565795898438],["▁primaria",-13.47056770324707],["▁tankar",-13.47057056427002],["Fan",-13.470582008361816],["നങ്ങള്",-13.47058391571045],["laatu",-13.470584869384766],["ەتتە",-13.470629692077637],["▁വച്ച",-13.470630645751951],["▁बचा",-13.470654487609863],["▁вере",-13.470667839050291],["▁garantier",-13.470677375793455],["▁erről",-13.470688819885254],["ağ",-13.47069263458252],["ώματα",-13.470693588256836],["▁húz",-13.470697402954102],["▁ટી",-13.470711708068848],["▁ciet",-13.470718383789062],["▁vroue",-13.47071933746338],["▁suerte",-13.47072696685791],["Ли",-13.470733642578123],["ροφ",-13.4707612991333],["▁Марко",-13.470766067504885],["▁takana",-13.470785140991213],["▁ይመ",-13.470789909362791],["▁מתי",-13.470791816711426],["весці",-13.470792770385742],["▁Oliy",-13.470792770385742],["金属",-13.470807075500488],["▁Народно",-13.47080898284912],["حط",-13.470817565917969],["▁wyniki",-13.47082233428955],["▁useita",-13.47082805633545],["▁lemon",-13.470829963684082],["▁noticias",-13.470842361450195],["▁ells",-13.470854759216309],["▁نمایند",-13.470855712890623],["ためには",-13.470860481262209],["رسید",-13.470861434936523],["σίου",-13.470863342285156],["▁dj",-13.470864295959473],["刚才",-13.470873832702637],["▁судом",-13.470888137817385],["πεί",-13.470900535583496],["тически",-13.47095012664795],["வள",-13.470955848693848],["ővel",-13.470964431762695],["內部",-13.470969200134276],["▁Neuro",-13.470976829528809],["जात",-13.470983505249023],["teatr",-13.470992088317873],["▁দায়",-13.471002578735352],["▁Эс",-13.471031188964844],["vjeçar",-13.471039772033691],["ハイ",-13.471050262451172],["▁دستی",-13.471055030822754],["ਡਰ",-13.47105598449707],["▁kategóriá",-13.47107219696045],["жет",-13.471081733703612],["训",-13.471081733703612],["趕",-13.471099853515623],["▁bussi",-13.471104621887209],["▁ùr",-13.471141815185549],["▁dankie",-13.471160888671877],["スポーツ",-13.47116470336914],["eceğiz",-13.47116756439209],["ยืน",-13.471173286437988],["ເທັກໂນໂລຈີ",-13.47117519378662],["ផ្សេងៗ",-13.47117519378662],["▁približno",-13.47117519378662],["▁Ønsker",-13.47117519378662],["▁βέβαια",-13.47117519378662],["▁مشارکت",-13.47117519378662],["▁supplement",-13.471176147460938],["▁вакцин",-13.471176147460938],["▁արտահայտ",-13.471176147460938],["▁हृदय",-13.471176147460938],["Sesungguhnya",-13.471177101135254],["▁priljubljen",-13.471177101135254],["▁갖고",-13.471177101135254],["σθητ",-13.471179008483888],["젤",-13.471179008483888],["▁దర్శకుడు",-13.471179962158203],["▁rezultātu",-13.47118091583252],["▁fiducia",-13.471181869506836],["▁ցանկանում",-13.471181869506836],["▁האחרונות",-13.471182823181152],["▁ውጭ",-13.471182823181152],["▁keterangan",-13.471193313598633],["Cho",-13.47119426727295],["▁dimulai",-13.471197128295898],["▁დაცვის",-13.471198081970217],["▁egyébként",-13.471202850341797],["▁Årets",-13.471202850341797],["▁سمندر",-13.471202850341797],["学習",-13.471216201782228],["▁Pavi",-13.47121810913086],["▁الهي",-13.471223831176758],["▁питању",-13.471235275268556],["▁බැලුව",-13.47123908996582],["▁לשנות",-13.47126007080078],["όδου",-13.471266746520996],["▁expresión",-13.471269607543944],["dauer",-13.471277236938477],["annut",-13.471278190612791],["確かに",-13.471278190612791],["▁aurinko",-13.471282958984377],["ができ",-13.471282958984377],["айын",-13.471288681030272],["▁août",-13.471302032470703],["▁לבצע",-13.47131633758545],["▁различные",-13.471325874328612],["▁birlikdə",-13.471328735351562],["kinga",-13.471358299255373],["แขน",-13.471358299255373],["▁بیٹی",-13.47137451171875],["▁brú",-13.471375465393066],["というのが",-13.4713773727417],["рика",-13.471383094787598],["▁останалите",-13.471400260925291],["▁ändern",-13.471424102783203],["▁organic",-13.47142505645752],["▁Naši",-13.471427917480469],["▁animi",-13.47143840789795],["▁есім",-13.471443176269531],["▁pesu",-13.471454620361328],["▁Sammen",-13.471457481384276],["layıcı",-13.471480369567873],["▁stendur",-13.471484184265137],["▁ពាក្យ",-13.471490859985352],["età",-13.471495628356934],["cult",-13.471534729003906],["▁एफ",-13.471549987792969],["▁čaro",-13.471555709838867],["▁simpan",-13.471571922302246],["దల",-13.471572875976562],["▁izahay",-13.47158432006836],["▁Kaixo",-13.471587181091309],["▁Twee",-13.471590995788574],["▁mihin",-13.471593856811523],["▁insanları",-13.471604347229004],["▁podria",-13.471627235412598],["▁móc",-13.471630096435549],["Game",-13.471652030944824],["▁rahaa",-13.471656799316406],["▁Sevilla",-13.471659660339355],["DET",-13.471665382385254],["▁registrado",-13.47168254852295],["כע",-13.471710205078123],["▁Gmail",-13.471717834472656],["▁రంగ",-13.47172737121582],["▁члана",-13.471731185913086],["▁साय",-13.471735954284668],["ykseen",-13.471736907958984],["▁віку",-13.471745491027832],["наха",-13.471752166748049],["▁선거",-13.471758842468262],["▁kisa",-13.47176742553711],["▁новим",-13.471771240234377],["▁logra",-13.471776008605955],["žnj",-13.471792221069336],["նդի",-13.471800804138184],["▁حادثه",-13.471800804138184],["дсон",-13.47181224822998],["vlák",-13.47182273864746],["▁organizatori",-13.471846580505373],["۰۰۰",-13.471854209899902],["▁rabatt",-13.471860885620115],["文物",-13.471894264221191],["▁Ihan",-13.471898078918455],["▁kérdések",-13.471912384033203],["▁dokumenti",-13.471921920776367],["akhir",-13.471929550170898],["အစိုးရ",-13.471929550170898],["▁shoh",-13.471948623657228],["سجل",-13.471965789794922],["▁කාලයේ",-13.471970558166504],["▁fotografija",-13.471985816955566],["krb",-13.472001075744627],["以便",-13.472002029418944],["വല്",-13.47202491760254],["▁ಸರ್",-13.472027778625488],["LIC",-13.472041130065918],["ുള്ളത്",-13.472043991088867],["trap",-13.472064971923828],["工事",-13.47206687927246],["▁pubblicato",-13.472067832946776],["▁Влади",-13.472084045410156],["dunud",-13.472091674804688],["ที่ไหน",-13.472131729125977],["پەر",-13.472146034240724],["▁poeng",-13.47215175628662],["▁Дел",-13.472155570983888],["▁Daher",-13.472160339355469],["ተት",-13.472167015075684],["▁vag",-13.47217082977295],["▁rong",-13.472179412841797],["రామ",-13.472185134887695],["▁hanyalah",-13.47220516204834],["ంపు",-13.47222137451172],["▁máquina",-13.47223663330078],["▁أهمية",-13.472271919250488],["▁सक्रिय",-13.472284317016602],["ലോക",-13.472296714782717],["бақ",-13.472309112548828],["月の",-13.472309112548828],["سود",-13.472325325012209],["▁σχολείο",-13.47232723236084],["▁nepře",-13.472373962402344],["▁ನಿಂತ",-13.47237777709961],["▁सर्वात",-13.47238063812256],["▁Mida",-13.472393989562988],["යො",-13.47240924835205],["開幕",-13.47244644165039],["さえ",-13.472454071044922],["ίνω",-13.472464561462402],["▁bazë",-13.472493171691896],["ជី",-13.47250270843506],["ಲರ್",-13.472506523132324],["▁Бұ",-13.472526550292969],["▁architect",-13.472527503967283],["▁maksaa",-13.472546577453612],["中方",-13.47255516052246],["▁folgt",-13.472611427307127],["▁بیمار",-13.472618103027344],["iausių",-13.472620010375977],["전문",-13.472625732421877],["▁Picha",-13.472676277160645],["▁Bilbo",-13.472697257995604],["▁Posting",-13.472705841064451],["▁କର",-13.472710609436035],["▁çiz",-13.472711563110352],["▁pateikia",-13.472715377807615],["▁räägi",-13.472734451293944],["▁അവരെ",-13.472769737243652],["▁چيف",-13.472803115844728],["ÊN",-13.47281265258789],["อย่า",-13.47281265258789],["▁preced",-13.472827911376951],["กฎ",-13.472830772399902],["▁Қазақстанда",-13.47284698486328],["ОДА",-13.472871780395508],["對象",-13.472871780395508],["▁gewisse",-13.47287654876709],["▁runde",-13.472885131835938],["▁کرا",-13.472891807556152],["позор",-13.47289752960205],["යකි",-13.472914695739746],["লীগ",-13.47292423248291],["以色列",-13.472939491271973],["uksella",-13.472943305969238],["បញ្ជាក់",-13.472975730895996],["障礙",-13.472979545593262],["ፁ",-13.472980499267578],["▁안녕하세요",-13.472983360290527],["Ì",-13.472984313964844],["▁Bjørn",-13.472984313964844],["▁Masyarakat",-13.472984313964844],["▁Mittwoch",-13.472984313964844],["▁bậc",-13.472984313964844],["▁stratégie",-13.472984313964844],["▁ułatwi",-13.472984313964844],["▁zejména",-13.472984313964844],["▁ئۈرۈمچى",-13.472984313964844],["▁میکنم",-13.472984313964844],["▁همچون",-13.472984313964844],["▁iekļaut",-13.47298526763916],["▁tyckte",-13.47298526763916],["▁dipilih",-13.472986221313477],["▁rozhovor",-13.472987174987791],["▁സാധിക്ക",-13.472990036010742],["▁تبریز",-13.47299098968506],["▁संयोजक",-13.47299098968506],["▁alldeles",-13.472992897033691],["리가",-13.472993850708008],["▁دلایل",-13.47299861907959],["▁зареєстрован",-13.473003387451172],["▁tömeg",-13.473004341125488],["▁μικρή",-13.473008155822754],["zależnie",-13.473012924194336],["▁dalawa",-13.473015785217283],["▁fjór",-13.473017692565918],["▁затрат",-13.473023414611816],["▁tán",-13.473031997680664],["▁soltanto",-13.47303295135498],["▁загвар",-13.473034858703612],["▁ماجرا",-13.473037719726562],["▁آرزو",-13.473039627075195],["▁Finns",-13.473042488098145],["▁Стран",-13.473052024841309],["▁forbehold",-13.473058700561523],["▁ഉയര്",-13.473061561584473],["▁импорт",-13.473067283630373],["▁Pemuda",-13.47307586669922],["▁ଅଛି",-13.473077774047852],["▁მით",-13.473078727722168],["ичним",-13.473084449768066],["▁input",-13.4730863571167],["▁postavlja",-13.47309112548828],["▁Nuestro",-13.473099708557127],["▁menghilangkan",-13.473102569580078],["▁stalno",-13.473124504089355],["▁пусть",-13.473148345947266],["▁effektivt",-13.473153114318848],["вторых",-13.473158836364746],["ั่น",-13.473169326782228],["到达",-13.473170280456545],["▁میوه",-13.47317886352539],["▁পুলিশের",-13.47317886352539],["VOR",-13.473184585571287],["▁portala",-13.473190307617188],["▁oikeasti",-13.47319221496582],["▁ústav",-13.473204612731934],["一定会",-13.4732084274292],["▁crai",-13.473211288452148],["▁računalnik",-13.47321319580078],["▁اختصاص",-13.473217010498049],["سىيە",-13.47323226928711],["zteko",-13.473237037658691],["▁ویرایش",-13.473251342773438],["▁nyheder",-13.47325325012207],["▁0.1",-13.473262786865234],["▁ryk",-13.473268508911133],["пинг",-13.473275184631348],["▁Հան",-13.473296165466309],["▁getirdi",-13.473305702209473],["▁ಬಿಡ",-13.473320960998535],["▁koris",-13.473326683044434],["स्मिन्",-13.473334312438965],["ALO",-13.473341941833496],["မျှ",-13.473349571228027],["ونکی",-13.473363876342772],["ायचा",-13.473377227783203],["▁Nafarroako",-13.473387718200684],["▁corner",-13.473390579223633],["▁précis",-13.473394393920898],["▁ድርጅቶች",-13.473410606384276],["▁sağlığı",-13.473440170288086],["▁folyamatosan",-13.473470687866213],["▁думите",-13.473474502563477],["▁Resmi",-13.473482131958008],["Wat",-13.47351360321045],["▁puntual",-13.473528861999512],["▁קומט",-13.473548889160156],["lance",-13.473549842834473],["ტიკა",-13.473583221435549],["▁Otel",-13.473590850830078],["خصص",-13.473615646362305],["▁ઘટ",-13.4736328125],["クラ",-13.473637580871582],["▁satunya",-13.47364616394043],["▁seguridade",-13.473652839660645],["園區",-13.47365379333496],["▁Ciao",-13.473670959472656],["תקיים",-13.473672866821287],["▁sledi",-13.473708152770996],["▁potser",-13.473710060119627],["▁Cyr",-13.47373104095459],["690",-13.473734855651855],["▁культура",-13.473740577697754],["▁organizacijo",-13.473742485046388],["hotell",-13.473743438720703],["▁кадров",-13.473743438720703],["өзі",-13.473749160766602],["▁PRODU",-13.473759651184082],["buku",-13.473764419555664],["▁bibir",-13.473773002624512],["desi",-13.473790168762209],["▁õpetaja",-13.4738130569458],["purna",-13.4738187789917],["▁tartalma",-13.473828315734863],["▁vážn",-13.473873138427734],["▁Leistung",-13.473892211914062],["hält",-13.473906517028809],["▁አንድነት",-13.473918914794922],["▁قلع",-13.473919868469238],["Escola",-13.473920822143556],["▁LAS",-13.473943710327148],["ודו",-13.47394561767578],["ສຽງ",-13.473952293395996],["بیا",-13.473957061767578],["▁zrobił",-13.473957061767578],["院校",-13.474018096923828],["офи",-13.47402000427246],["▁Bharat",-13.474031448364258],["berger",-13.47404670715332],["መላ",-13.47404956817627],["การเดินทาง",-13.474056243896484],["პულ",-13.474068641662598],["和社会",-13.47407341003418],["▁xoog",-13.47410774230957],["▁ואו",-13.47412109375],["▁capitol",-13.474125862121582],["生徒",-13.474133491516112],["▁þy",-13.474136352539062],["perin",-13.474138259887695],["נדי",-13.47415256500244],["▁markkinoi",-13.474154472351074],["入手",-13.474170684814451],["▁refere",-13.474224090576172],["▁ਸੁਣ",-13.474246978759766],["▁membran",-13.47425365447998],["kjørt",-13.47426700592041],["担",-13.474302291870115],["ABI",-13.474308967590332],["/0",-13.474313735961914],["▁ដើម",-13.474318504333496],["▁beskrivelse",-13.474329948425291],["▁Lög",-13.474348068237305],["85)",-13.474397659301758],["▁siksi",-13.474414825439451],["▁presentado",-13.47443389892578],["gjøre",-13.47447395324707],["dění",-13.474488258361816],["▁poäng",-13.474488258361816],["▁પોતા",-13.474495887756348],["txar",-13.474523544311523],["▁solusi",-13.474547386169434],["▁studenter",-13.47457504272461],["▁조선",-13.474581718444824],["ကိုး",-13.47459602355957],["▁benytter",-13.474605560302734],["帽",-13.474628448486328],["婚禮",-13.474651336669922],["клю",-13.47465705871582],["ពន្ធ",-13.47467041015625],["▁Pó",-13.474678993225098],["ებების",-13.474679946899414],["tamme",-13.474688529968262],["▁zion",-13.474698066711426],["▁болгож",-13.474703788757324],["▁höra",-13.474719047546388],["▁deltage",-13.474722862243652],["АЛА",-13.474723815917969],["忧",-13.47473430633545],["▁niye",-13.47475528717041],["▁Dress",-13.474757194519045],["签署",-13.474757194519045],["▁голямо",-13.474759101867676],["▁πια",-13.474771499633787],["オススメ",-13.474782943725586],["▁silahlı",-13.4747896194458],["巻",-13.4747896194458],["▁মনোনয়ন",-13.474793434143066],["픽",-13.474794387817385],["תרגום",-13.4747953414917],["▁Demikian",-13.4747953414917],["▁membuktikan",-13.4747953414917],["▁убакыт",-13.4747953414917],["▁उद्घाटन",-13.4747953414917],["▁நிகழ்ச்சி",-13.4747953414917],["▁පුදුම",-13.4747953414917],["▁විනාශ",-13.4747953414917],["▁လမ်းညွှန်",-13.4747953414917],["▁Şəhər",-13.474796295166016],["▁निरन्तर",-13.474796295166016],["▁sekiranya",-13.474797248840332],["▁օրենք",-13.474797248840332],["▁ఇండియా",-13.474797248840332],["▁alohida",-13.474798202514648],["▁opportunities",-13.474799156188965],["▁janúar",-13.47480010986328],["▁дозволу",-13.47480297088623],["ģija",-13.474803924560549],["▁campanya",-13.474803924560549],["▁कार्यकर्ता",-13.474803924560549],["▁odgovornost",-13.474804878234863],["▁만드는",-13.47480583190918],["▁përsëri",-13.474808692932127],["▁пожалуйста",-13.474810600280762],["▁العزيز",-13.474811553955078],["▁Evan",-13.474814414978027],["▁malgranda",-13.474815368652344],["▁நாளை",-13.474815368652344],["دوا",-13.474817276000977],["▁التن",-13.474820137023926],["▁scopul",-13.474822998046877],["ķ",-13.474824905395508],["▁ਜੀਵਨ",-13.474824905395508],["▁ງ",-13.474833488464355],["▁ওপর",-13.474835395812988],["ില്ലെന്ന്",-13.474851608276367],["▁spir",-13.474853515625],["▁agentes",-13.474857330322266],["▁potenciál",-13.474875450134276],["▁jedino",-13.474878311157228],["▁vælger",-13.474881172180176],["▁Кстати",-13.474881172180176],["ਧੀ",-13.474882125854492],["BUS",-13.474893569946287],["▁ibadah",-13.474895477294922],["▁francese",-13.47490406036377],["同学们",-13.47490406036377],["이자",-13.47490406036377],["▁Kisah",-13.47490692138672],["▁moro",-13.474912643432615],["▁lõpuks",-13.474915504455566],["сыг",-13.474919319152832],["▁હોવા",-13.474924087524414],["▁клетки",-13.474937438964844],["ක්ස්",-13.47493839263916],["ಜ್ಞಾನ",-13.474947929382324],["▁вземе",-13.47496223449707],["ЦИЈА",-13.474985122680664],["ด้านล่าง",-13.474987030029297],["שטח",-13.47498893737793],["▁osvet",-13.474994659423828],["▁rejestr",-13.474995613098145],["設有",-13.474995613098145],["մարտ",-13.47499656677246],["▁стоя",-13.474997520446776],["8.5",-13.474998474121094],["▁dotyczy",-13.47499942779541],["▁забот",-13.47500991821289],["=4",-13.475021362304688],["laud",-13.475028038024902],["盈利",-13.47503662109375],["▁Flora",-13.475049018859863],["▁Install",-13.475056648254396],["▁welcher",-13.47509479522705],["▁kappale",-13.475115776062012],["▁ይሰ",-13.475123405456545],["▁historio",-13.475128173828123],["▁qaytar",-13.475133895874023],["▁условима",-13.475153923034668],["▁природата",-13.4751558303833],["▁zamani",-13.475157737731934],["rzew",-13.475160598754885],["意志",-13.475178718566896],["▁változás",-13.475184440612791],["▁expertise",-13.475191116333008],["▁беретін",-13.475196838378906],["աշար",-13.47519874572754],["▁Kontroll",-13.475201606750488],["तुम्",-13.475207328796388],["▁ធំ",-13.475214958190918],["使って",-13.475242614746094],["村民",-13.475279808044434],["מיים",-13.475282669067385],["даль",-13.475287437438965],["▁ჩი",-13.475313186645508],["diplom",-13.47531509399414],["▁envolve",-13.475326538085938],["▁разлику",-13.47532844543457],["брать",-13.475342750549316],["lığının",-13.47534465789795],["辆",-13.475371360778809],["ložiť",-13.475374221801758],["день",-13.475406646728516],["被告",-13.475406646728516],["▁φοβ",-13.475408554077148],["▁Kær",-13.47541332244873],["емого",-13.475415229797363],["▁pieņemt",-13.47542953491211],["▁esposa",-13.47548770904541],["▁venni",-13.475510597229004],["müz",-13.47551727294922],["▁schrijven",-13.4755220413208],["маты",-13.47552490234375],["▁alış",-13.475528717041016],["▁внесення",-13.47553253173828],["ਮਿ",-13.47553539276123],["▁deloc",-13.475553512573242],["▁finnas",-13.475574493408203],["▁85%",-13.475582122802734],["▁måltid",-13.475601196289062],["▁तप",-13.475618362426758],["comunica",-13.475628852844238],["ಗ್ರಹ",-13.475645065307615],["▁អ្វី",-13.475653648376465],["▁Kier",-13.475658416748049],["▁bapak",-13.475663185119627],["▁automaat",-13.475666999816896],["លុយ",-13.475678443908691],["μαρ",-13.475683212280272],["▁látni",-13.475701332092283],["▁информационно",-13.475708961486816],["رەت",-13.475714683532717],["ући",-13.475715637207031],["▁ដុល្លារ",-13.475716590881348],["▁nyug",-13.47575855255127],["▁گاه",-13.475775718688965],["▁gă",-13.475829124450684],["▁నమ్మ",-13.475838661193848],["рди",-13.47584629058838],["▁bér",-13.47585105895996],["жала",-13.475852012634276],["מבער",-13.475872039794922],["▁kompon",-13.47588062286377],["▁trabajos",-13.475885391235352],["▁Pê",-13.475890159606934],["ദീപ",-13.475893020629885],["でお",-13.475893020629885],["poste",-13.475905418395996],["кое",-13.475909233093262],["▁থাকবে",-13.47591495513916],["▁Kadar",-13.475918769836426],["جري",-13.47592067718506],["출장안마",-13.475929260253906],["▁comentar",-13.475933074951172],["dešimt",-13.475937843322754],["шақ",-13.475947380065918],["95)",-13.475956916809082],["็ง",-13.475992202758787],["▁kalte",-13.475997924804688],["▁መጠን",-13.476000785827637],["▁हक",-13.476028442382812],["ลงไป",-13.476036071777344],["利亚",-13.476036071777344],["▁207",-13.47603702545166],["▁Envia",-13.47608470916748],["▁вихід",-13.476096153259276],["արանի",-13.47612190246582],["▁ölkələrin",-13.476128578186035],["square",-13.476133346557615],["▁செய்வ",-13.47613525390625],["WN",-13.476155281066896],["▁sonu",-13.476165771484377],["▁entregar",-13.476203918457031],["江西",-13.476225852966309],["▁pyörä",-13.476231575012209],["▁Coba",-13.476245880126951],["▁tissu",-13.476248741149902],["اسية",-13.476259231567385],["PRES",-13.476277351379396],["ύρα",-13.476300239562988],["▁Herren",-13.476303100585938],["၁၁",-13.476319313049316],["▁listy",-13.476320266723633],["▁ئەمەلىي",-13.47635269165039],["ľov",-13.476365089416504],["▁uitat",-13.476373672485352],["▁אלקטרוני",-13.476374626159668],["▁Hanna",-13.47640323638916],["▁وتن",-13.47641658782959],["惜",-13.476452827453612],["▁wine",-13.476469039916992],["▁речовин",-13.476469039916992],["それに",-13.476473808288574],["ંઘ",-13.47647476196289],["degi",-13.476500511169434],["▁ഗ്രാമ",-13.47650909423828],["owska",-13.476513862609863],["ڊي",-13.47652530670166],["▁работник",-13.476529121398926],["涌",-13.476537704467772],["凉",-13.476539611816406],["▁نگرانی",-13.476545333862305],["玫瑰",-13.476558685302734],["▁కింద",-13.476568222045898],["släpp",-13.476572036743164],["▁Mrs",-13.476576805114746],["▁Nästa",-13.476607322692873],["▁ettir",-13.476608276367188],["ຢ້ຽມຢາມ",-13.476609230041504],["ເທື່ອ",-13.476609230041504],["▁Leipzig",-13.47661018371582],["▁catégorie",-13.47661018371582],["▁ietvaros",-13.47661018371582],["▁khủng",-13.47661018371582],["▁makubwa",-13.47661018371582],["▁mėnesį",-13.47661018371582],["▁məbləğ",-13.47661018371582],["▁бүгд",-13.47661018371582],["▁ունեցած",-13.47661018371582],["▁अनुमति",-13.47661018371582],["▁ব্লগ",-13.47661018371582],["▁məhz",-13.476612091064451],["▁постигне",-13.476612091064451],["▁нічога",-13.47661304473877],["▁මෙරට",-13.47661590576172],["▁confiança",-13.476629257202148],["▁الصد",-13.476632118225098],["▁verband",-13.476635932922363],["▁ሥ",-13.476639747619627],["знай",-13.476645469665527],["▁રંગ",-13.476651191711426],["▁ebook",-13.476655960083008],["▁užívateľ",-13.476656913757324],["▁fees",-13.476662635803224],["▁giường",-13.476665496826172],["▁Съвет",-13.476670265197754],["▁Вопрос",-13.47667407989502],["▁категорії",-13.476675033569336],["▁ಬಸ್",-13.476686477661133],["回應",-13.476688385009766],["▁daarvoor",-13.476693153381348],["▁Prova",-13.476696014404297],["ннем",-13.476701736450195],["▁ڪردار",-13.476709365844728],["▁shemale",-13.476710319519045],["▁μετ",-13.47671127319336],["បាញ់",-13.47672176361084],["েন্স",-13.476724624633787],["▁panik",-13.476725578308104],["▁அதிகம்",-13.476752281188965],["▁otroligt",-13.476758003234863],["▁září",-13.476762771606444],["▁Homepage",-13.476776123046877],["▁байхад",-13.476777076721191],["みなさん",-13.476789474487305],["deal",-13.476802825927734],["▁mbaya",-13.476806640625],["▁celkem",-13.47681999206543],["▁сказ",-13.476863861083984],["สี่",-13.476869583129885],["ວີ",-13.476869583129885],["бод",-13.47687816619873],["ちゃんと",-13.476890563964844],["alnego",-13.476898193359377],["部位",-13.476900100708008],["▁Ziua",-13.476909637451172],["teisiin",-13.476926803588867],["▁브",-13.476938247680664],["▁برجام",-13.47694492340088],["成效",-13.476945877075195],["포트",-13.476953506469728],["▁প্রা",-13.47696304321289],["建立了",-13.476968765258787],["▁atenta",-13.47698211669922],["dilo",-13.477008819580078],["▁alterna",-13.477021217346191],["▁Selama",-13.477032661437988],["▁поход",-13.477038383483888],["▁ሕግ",-13.47704029083252],["फॉर्म",-13.47704792022705],["জু",-13.477048873901367],["▁հասցե",-13.477048873901367],["Comp",-13.47706413269043],["▁vehículos",-13.477075576782228],["▁informativ",-13.477078437805176],["未来的",-13.477078437805176],["▁kirim",-13.477086067199709],["▁pritet",-13.47710132598877],["kammer",-13.4771089553833],["▁košík",-13.477123260498049],["▁existente",-13.477131843566896],["▁යාපන",-13.477131843566896],["▁vocal",-13.47714614868164],["ვებ",-13.477154731750488],["зема",-13.47718334197998],["▁السابقة",-13.47718906402588],["▁рату",-13.477192878723145],["accesso",-13.477202415466309],["▁‚",-13.477231979370115],["▁mērķis",-13.477238655090332],["iskas",-13.477242469787598],["篮",-13.477250099182127],["▁Καθ",-13.477272987365724],["▁تام",-13.477283477783203],["▁اچ",-13.477293968200684],["▁negocios",-13.47733211517334],["北部",-13.477337837219238],["▁listrik",-13.477341651916504],["▁onartu",-13.477343559265137],["▁оних",-13.477343559265137],["使われ",-13.477368354797363],["▁Kone",-13.477370262145996],["ημένη",-13.477387428283691],["▁เธอ",-13.477394104003906],["▁Chang",-13.477399826049805],["ਚੀ",-13.47741985321045],["▁kosong",-13.47741985321045],["▁Sło",-13.477425575256348],["heiten",-13.477439880371094],["ογραφία",-13.47744369506836],["εργο",-13.477461814880373],["▁kapi",-13.47746467590332],["▁государство",-13.4774751663208],["▁китеп",-13.477487564086914],["▁snow",-13.477495193481444],["▁őt",-13.477502822875977],["ਪ੍ਰ",-13.477505683898926],["的食物",-13.477505683898926],["owskiego",-13.477511405944824],["ຂ່າວ",-13.477530479431152],["קיר",-13.47756576538086],["Count",-13.477567672729492],["täminen",-13.477581024169922],["▁Trag",-13.4775972366333],["▁դիմում",-13.477608680725098],["▁Hús",-13.477612495422363],["ლამ",-13.477619171142578],["▁ajtó",-13.477638244628906],["төрү",-13.477648735046388],["រប",-13.477665901184082],["▁agita",-13.477669715881348],["▁déanta",-13.47768211364746],["▁yhtiö",-13.477752685546877],["▁női",-13.477758407592772],["▁tłuszcz",-13.477771759033203],["▁археолог",-13.47780704498291],["ریٹ",-13.47781467437744],["▁зелени",-13.47781467437744],["uros",-13.47782039642334],["ત્રી",-13.47783374786377],["▁никакви",-13.477835655212402],["brak",-13.477864265441896],["的相关",-13.477869987487791],["▁வருகை",-13.477875709533691],["▁Elimu",-13.477913856506348],["▁næstu",-13.47792625427246],["▁MAJ",-13.47792911529541],["▁кадастр",-13.47794246673584],["rzut",-13.477945327758787],["▁Perso",-13.477947235107422],["▁ovi",-13.477981567382812],["▁昨日",-13.477982521057127],["ನ್ನೇ",-13.477986335754396],["GEL",-13.477989196777344],["▁erleben",-13.477999687194824],["▁posibles",-13.478005409240724],["éséhez",-13.478012084960938],["자로",-13.478056907653809],["▁programos",-13.478062629699709],["▁Lietuvai",-13.478065490722656],["ənlər",-13.478069305419922],["▁мебели",-13.478082656860352],["▁답",-13.4780912399292],["ىلى",-13.478097915649414],["▁టై",-13.478099822998049],["ાના",-13.478102684020996],["നിൽ",-13.478105545043944],["ြန္",-13.478107452392578],["▁loĝ",-13.478109359741213],["តួ",-13.478116035461426],["Work",-13.478129386901855],["रत्न",-13.47814655303955],["▁попада",-13.478150367736816],["▁продажу",-13.47816276550293],["▁اپل",-13.478169441223145],["דוק",-13.478185653686523],["372",-13.478212356567385],["еста",-13.47822380065918],["▁地址",-13.47825527191162],["auksen",-13.478269577026367],["▁Rê",-13.478270530700684],["这也",-13.478278160095217],["▁podporo",-13.478314399719238],["▁ጥቅም",-13.478316307067873],["▁මී",-13.478321075439451],["▁saglabā",-13.478333473205566],["▁dəniz",-13.4783353805542],["▁יוני",-13.478344917297363],["▁دھو",-13.478351593017578],["Ros",-13.478355407714844],["名称",-13.478357315063477],["▁ດ້ານ",-13.478387832641602],["鼎",-13.478405952453612],["hef",-13.478410720825195],["▁پى",-13.478416442871094],["เตือน",-13.478426933288574],["転職",-13.478426933288574],["▁भट्टराई",-13.47842788696289],["▁Abertawe",-13.478428840637209],["▁kifayət",-13.478428840637209],["▁príspevok",-13.478428840637209],["▁spécifique",-13.478428840637209],["▁województw",-13.478428840637209],["▁жогорку",-13.478428840637209],["▁завдяки",-13.478428840637209],["▁تۆۋەن",-13.478428840637209],["▁معماری",-13.478428840637209],["▁शायद",-13.478428840637209],["▁ਸਫ਼ੇ",-13.478428840637209],["▁რაოდენობა",-13.478428840637209],["▁conciencia",-13.478429794311523],["▁ਵਿਆਹ",-13.478429794311523],["▁उघड",-13.47843074798584],["▁Breakfast",-13.478431701660156],["▁ٺاهي",-13.478431701660156],["▁ກ່ຽວກັບພວກເຮົາ",-13.478431701660156],["▁заседание",-13.478432655334473],["▁υπόθεση",-13.478433609008787],["補助",-13.478437423706056],["▁أسعار",-13.478438377380373],["കൂടി",-13.478440284729004],["▁ՌԴ",-13.47844123840332],["▁နာ",-13.478443145751951],["▁veiklą",-13.478446006774902],["▁17:30",-13.47844696044922],["▁шалгалт",-13.47844696044922],["▁σημασία",-13.478447914123535],["▁այնքան",-13.4784517288208],["▁обліку",-13.47845458984375],["▁නිල",-13.47846221923828],["▁إطلاق",-13.478470802307127],["▁Geschenk",-13.47848129272461],["▁condicions",-13.478482246398926],["▁집중",-13.47848415374756],["పోతున్న",-13.478485107421877],["▁sanatate",-13.478487014770508],["▁hledá",-13.47849464416504],["zdr",-13.478525161743164],["▁sứ",-13.478532791137695],["ของปรสิต",-13.47856330871582],["▁berbuat",-13.478577613830566],["écrire",-13.478580474853516],["شاعر",-13.478602409362791],["▁Хүү",-13.478628158569336],["పద",-13.478629112243652],["▁Dewi",-13.478636741638184],["▁oila",-13.47867202758789],["▁9000",-13.478707313537598],["▁විශ්වාස",-13.478715896606444],["ကန္",-13.47872257232666],["▁sjaj",-13.47872829437256],["Tom",-13.478747367858888],["▁Musical",-13.478765487670898],["▁функционира",-13.47879695892334],["educa",-13.478812217712402],["komme",-13.47881317138672],["▁ауыр",-13.4788236618042],["▁කෑම",-13.478827476501465],["ellut",-13.478841781616213],["ೇಂದ್ರ",-13.47885036468506],["▁играе",-13.478890419006348],["سۇ",-13.47889518737793],["▁العراقي",-13.47890567779541],["igual",-13.478907585144045],["ukus",-13.47890853881836],["YY",-13.47891616821289],["/30",-13.47891902923584],["자들이",-13.478931427001951],["▁vinne",-13.47893524169922],["▁Član",-13.478954315185549],["व्ही",-13.47900104522705],["▁facilement",-13.479015350341797],["▁informaţii",-13.47902011871338],["▁Shell",-13.479023933410645],["▁shteti",-13.479026794433594],["▁ומי",-13.47903060913086],["▁Zeitung",-13.47905445098877],["▁миг",-13.479059219360352],["法官",-13.479085922241213],["4,7",-13.479089736938477],["CONT",-13.479106903076172],["▁AMB",-13.479135513305664],["SSR",-13.479138374328612],["▁feria",-13.4791898727417],["大會",-13.479209899902344],["▁ţări",-13.47921371459961],["▁සුදු",-13.479232788085938],["ιδε",-13.47926425933838],["▁πιθαν",-13.479329109191896],["▁រួច",-13.479355812072754],["вешт",-13.479361534118652],["部落",-13.479368209838867],["▁noyabr",-13.47938632965088],["ligheter",-13.479406356811523],["▁കുന്ന",-13.47942352294922],["▁Fina",-13.479425430297852],["ตัดสินใจ",-13.479443550109863],["Fra",-13.479466438293455],["್ತ",-13.479474067687988],["▁anuncio",-13.479474067687988],["ับ",-13.479479789733888],["statu",-13.479490280151367],["▁Фре",-13.479491233825684],["▁agenti",-13.4794921875],["▁учет",-13.4794921875],["▁środowisk",-13.479497909545898],["▁Тек",-13.479546546936035],["krise",-13.479562759399414],["▁поставлен",-13.479576110839844],["▁тайлан",-13.479595184326172],["▁ispod",-13.47961711883545],["一块",-13.479632377624512],["મુ",-13.47964572906494],["taas",-13.479660034179688],["ତୋ",-13.479662895202637],["▁всім",-13.47967529296875],["▁Princess",-13.479676246643066],["▁demandé",-13.479693412780762],["範",-13.479694366455078],["رائي",-13.47970962524414],["льность",-13.47972297668457],["▁страж",-13.479745864868164],["ഗ്രഹ",-13.47975730895996],["▁والخ",-13.47976303100586],["Shqip",-13.479776382446287],["ั้น",-13.479776382446287],["掃",-13.479820251464844],["▁INC",-13.479830741882324],["▁Świe",-13.47983169555664],["जार",-13.479884147644045],["▁Alterna",-13.479887008666992],["▁новости",-13.479896545410156],["▁Path",-13.479910850524902],["ഠ",-13.479915618896484],["为您",-13.479923248291016],["▁foretage",-13.479955673217772],["९०",-13.479963302612305],["ητών",-13.479982376098633],["▁Sumqayıt",-13.479990005493164],["▁Республика",-13.47999095916748],["▁зони",-13.480003356933594],["▁سرچ",-13.480019569396973],["▁neku",-13.480053901672363],["▁Ngu",-13.480058670043944],["անս",-13.480072975158691],["▁Pariz",-13.480072975158691],["가고",-13.480076789855955],["kapi",-13.48007869720459],["平日",-13.48008155822754],["ंदी",-13.480100631713867],["avdeling",-13.480116844177246],["はどう",-13.480119705200195],["▁jaką",-13.480124473571776],["ଧାନ",-13.480138778686523],["จันทร์",-13.480143547058104],["ครั้งนี้",-13.480152130126951],["▁proprii",-13.480154037475586],["േണ്ടത്",-13.48017406463623],["أبناء",-13.480196952819824],["紐約",-13.480228424072266],["▁ਚੁੱਕ",-13.480229377746582],["さまざまな",-13.480230331420898],["▁الدي",-13.480231285095217],["▁мислите",-13.480232238769531],["碧",-13.48023796081543],["อันตราย",-13.48024845123291],["เครือข่าย",-13.48024845123291],["Ể",-13.48024845123291],["dikləri",-13.480249404907228],["إنجليزي",-13.480250358581545],["▁Charakter",-13.480250358581545],["▁désormais",-13.480250358581545],["▁główna",-13.480250358581545],["▁शनिबार",-13.480250358581545],["▁ಸ್ವಲ್ಪ",-13.480250358581545],["▁đòi",-13.48025131225586],["▁првенство",-13.48025131225586],["▁меңгер",-13.480252265930176],["▁Ternyata",-13.480253219604492],["▁chiffre",-13.480253219604492],["▁толгой",-13.480253219604492],["▁ਘੱਟ",-13.480255126953123],["பர",-13.48025608062744],["▁жергілікті",-13.48025894165039],["原始",-13.480264663696287],["▁Timbalan",-13.480265617370604],["centa",-13.480268478393556],["▁fructe",-13.480269432067873],["▁títol",-13.4802827835083],["ebileceği",-13.480284690856934],["เหตุการณ์",-13.48029327392578],["▁ակումբ",-13.480306625366213],["▁deseo",-13.480308532714844],["▁κλπ",-13.480313301086426],["▁повідомив",-13.480313301086426],["▁fiskal",-13.480319023132324],["diklerini",-13.480320930480955],["▁prosedur",-13.480331420898438],["▁دستیاب",-13.480331420898438],["▁Tiada",-13.48033618927002],["▁stał",-13.480342864990234],["১০",-13.48034381866455],["贏",-13.480352401733398],["▁Оқу",-13.480359077453612],["▁Bashkimi",-13.480361938476562],["▁individuel",-13.480364799499512],["▁lyn",-13.480376243591309],["▁במרכז",-13.48038387298584],["க்கே",-13.480388641357422],["▁isim",-13.480390548706056],["يک",-13.48039436340332],["ဆန္း",-13.480401039123535],["líž",-13.480401992797852],["のではなく",-13.480409622192385],["▁bölmə",-13.480411529541016],["90)",-13.480432510375977],["▁Рак",-13.48043441772461],["້ອນ",-13.480440139770508],["▁pekne",-13.480443954467772],["▁Więcej",-13.48044490814209],["▁Juma",-13.480451583862305],["▁persoanelor",-13.480483055114746],["▁ပြင်ပ",-13.480494499206545],["ริม",-13.480499267578123],["▁Dieu",-13.480502128601074],["puisto",-13.480506896972656],["▁socia",-13.480520248413086],["▁edilmişdir",-13.480525016784668],["試合",-13.48052978515625],["▁грижи",-13.480536460876465],["▁الحم",-13.480546951293944],["▁Francesco",-13.480565071105955],["服装",-13.48057460784912],["▁siekia",-13.480578422546388],["खन",-13.48058032989502],["អេ",-13.480588912963867],["▁kene",-13.480598449707031],["GIN",-13.480606079101562],["GIF",-13.480615615844728],["salu",-13.48061752319336],["▁Serviço",-13.480621337890623],["ទិ",-13.48062229156494],["▁Ramadhan",-13.48062229156494],["▁sinistra",-13.48064136505127],["▁ומש",-13.48064136505127],["▁홍보",-13.480645179748535],["ໍາ",-13.4806547164917],["▁ոստիկանության",-13.480663299560549],["▁pogovor",-13.480669021606444],["▁білу",-13.480672836303713],["لىشىش",-13.480674743652344],["▁Tomēr",-13.48067855834961],["▁manna",-13.480687141418455],["การสร้าง",-13.480710983276367],["ોલ",-13.480718612670898],["ÖK",-13.480722427368164],["пын",-13.48073959350586],["조사",-13.480820655822754],["הגנה",-13.480850219726562],["haki",-13.480854034423828],["▁Meski",-13.480868339538574],["▁dược",-13.48087215423584],["▁Тип",-13.48088836669922],["▁रोड",-13.48090362548828],["▁walikuwa",-13.48090648651123],["før",-13.480914115905762],["▁Adana",-13.480934143066406],["▁Første",-13.48093605041504],["ическо",-13.480952262878418],["▁producer",-13.480953216552734],["▁kasutamise",-13.480955123901367],["laşır",-13.48095703125],["▁දෙස",-13.48096752166748],["تبادل",-13.480984687805176],["தற்கான",-13.480992317199709],["▁hole",-13.481000900268556],["▁całą",-13.481013298034668],["mentum",-13.481032371520996],["▁كف",-13.481048583984377],["WIN",-13.481073379516602],["čkim",-13.481087684631348],["▁vitamín",-13.48109531402588],["▁Hua",-13.481101989746094],["fler",-13.481108665466309],["ownik",-13.481111526489258],["▁Seine",-13.481118202209473],["រួម",-13.481120109558104],["▁RTV",-13.481141090393066],["▁comunist",-13.48115062713623],["▁kauli",-13.48115348815918],["baixo",-13.481155395507812],["▁aprila",-13.481161117553713],["▁tilbyde",-13.481175422668455],["EMBER",-13.481191635131836],["▁Hahaha",-13.481192588806152],["▁Chá",-13.48121738433838],["яви",-13.481239318847656],["▁cesti",-13.481243133544922],["sområde",-13.481244087219238],["កិច្ច",-13.4812593460083],["အေ",-13.481280326843262],["แป",-13.481300354003906],["03)",-13.48130226135254],["▁резултатите",-13.481316566467283],["АЦ",-13.481341361999512],["ുന്നതിന്",-13.481363296508787],["문을",-13.481380462646484],["▁lady",-13.48139476776123],["ल्यास",-13.481410026550291],["▁tukio",-13.48143196105957],["▁שקל",-13.481436729431152],["spraak",-13.481452941894531],["▁vége",-13.481452941894531],["▁вэб",-13.48146629333496],["▁Gipuzkoa",-13.481473922729492],["▁აღნიშნული",-13.481473922729492],["▁syv",-13.481477737426758],["fertig",-13.481493949890137],["майды",-13.481493949890137],["▁finala",-13.481496810913086],["▁løfte",-13.481526374816896],["なこと",-13.481541633605955],["▁ант",-13.481559753417969],["▁Garda",-13.481568336486816],["യാക",-13.481574058532717],["▁kolega",-13.48158073425293],["leef",-13.481595039367676],["▁маселелер",-13.481647491455078],["▁Britaniya",-13.481649398803713],["▁øl",-13.481675148010254],["κόπ",-13.481681823730469],["670",-13.481690406799316],["▁Bekas",-13.481711387634276],["▁посо",-13.481728553771973],["▁starfa",-13.481765747070312],["fê",-13.481772422790527],["▁visibili",-13.48177433013916],["узи",-13.481791496276855],["imizde",-13.481793403625488],["гуу",-13.481807708740234],["▁ныне",-13.481840133666992],["よかった",-13.481856346130373],["غسل",-13.48185920715332],["▁ženske",-13.481884002685549],["сана",-13.48189926147461],["▁Barre",-13.481905937194824],["schaffen",-13.481926918029783],["▁concur",-13.481929779052734],["ńska",-13.481945037841797],["▁зрител",-13.481950759887695],["annonce",-13.481974601745604],["闲",-13.48199462890625],["▁vrednost",-13.481998443603516],["▁fb",-13.482014656066896],["蜂",-13.48201847076416],["▁негізінде",-13.482032775878906],["েও",-13.482033729553224],["politika",-13.482043266296388],["槍",-13.482043266296388],["锦",-13.48204517364502],["▁қатынастар",-13.482046127319336],["▁имел",-13.482050895690918],["栄養",-13.482051849365234],["暇",-13.48205280303955],["ನ್ಯಾ",-13.482053756713867],["▁charm",-13.482061386108398],["応援",-13.48206615447998],["▁hunden",-13.482068061828612],["▁жақ",-13.48206901550293],["เอ็ม",-13.482073783874512],["认定",-13.482073783874512],["▁voluptas",-13.482074737548828],["▁ιστοσελίδα",-13.482074737548828],["▁מומלץ",-13.482074737548828],["▁शेतकरी",-13.482074737548828],["▁গ্রুপ",-13.482074737548828],["▁ਦੁਆਰਾ",-13.482074737548828],["▁සම්මාන",-13.482074737548828],["▁صحنه",-13.482075691223145],["▁सगळ्या",-13.48207664489746],["▁հայտնում",-13.48208236694336],["値段",-13.482086181640623],["ISU",-13.48208713531494],["▁засоби",-13.482089042663574],["▁الاستثمار",-13.482091903686523],["▁לקחת",-13.482099533081056],["▁добавя",-13.482100486755373],["▁rhaglen",-13.482108116149902],["ଠୁ",-13.482111930847168],["▁прибор",-13.4821138381958],["પોર્ટ",-13.482122421264648],["ස්ට්",-13.482122421264648],["mıza",-13.482132911682127],["▁zuzendari",-13.482134819030762],["ລັກ",-13.482136726379396],["▁معاملے",-13.48214340209961],["▁መልስ",-13.482149124145508],["▁katra",-13.482152938842772],["▁mortis",-13.482155799865724],["▁konsolid",-13.48216152191162],["▁rasva",-13.48216152191162],["▁Оросын",-13.48216438293457],["ttavissa",-13.482196807861328],["▁ദേവ",-13.482217788696287],["▁ປະຊາຊົນ",-13.482217788696287],["▁thonë",-13.4822359085083],["▁calda",-13.48225212097168],["▁olhar",-13.482254028320312],["▁strel",-13.48226547241211],["تین",-13.48227596282959],["下次",-13.482277870178224],["▁formou",-13.482279777526855],["▁ملك",-13.482279777526855],["лучення",-13.48229694366455],["▁kantoor",-13.482307434082031],["▁dijelu",-13.48231315612793],["άστε",-13.482314109802246],["▁שמי",-13.48233699798584],["▁ប្រកាស",-13.482358932495115],["▁Jiří",-13.482376098632812],["▁اکس",-13.482391357421877],["ूँ",-13.482396125793455],["ഹാര",-13.482416152954102],["▁sorunu",-13.482419967651367],["אלה",-13.482427597045898],["rådgiver",-13.482434272766112],["ళి",-13.482439041137695],["▁сопствени",-13.482440948486328],["▁partecipare",-13.482451438903809],["▁проходит",-13.482464790344238],["▁орох",-13.482471466064451],["iyak",-13.482476234436035],["あなたが",-13.482477188110352],["▁леп",-13.482481002807615],["нчо",-13.48248291015625],["▁Gratul",-13.482510566711426],["▁Adidas",-13.482518196105955],["▁μισ",-13.482518196105955],["▁тренд",-13.482526779174805],["czyni",-13.482538223266602],["▁მსგავსი",-13.482573509216309],["▁pelaku",-13.48257827758789],["▁수업",-13.482583045959473],["▁өткізу",-13.48259735107422],["▁شیرین",-13.482620239257812],["यंत्र",-13.482623100280762],["いたら",-13.482623100280762],["tronic",-13.482626914978027],["fear",-13.482633590698242],["▁adicional",-13.482637405395508],["▁उनीहरू",-13.482650756835938],["▁орус",-13.482662200927734],["タイプ",-13.482665061950684],["ittelu",-13.482674598693848],["ībai",-13.482677459716797],["▁გამოვ",-13.482694625854492],["▁gole",-13.482711791992188],["▁gesien",-13.482728958129885],["▁Tura",-13.482742309570312],["šķi",-13.482757568359377],["▁Valentine",-13.482802391052246],["▁dependent",-13.482842445373535],["ենս",-13.48285961151123],["东方",-13.48285961151123],["луб",-13.482861518859863],["▁lood",-13.482861518859863],["ధన",-13.482869148254396],["▁dessin",-13.48287868499756],["▁љуб",-13.482882499694824],["▁22:00",-13.482892036437988],["▁ιδι",-13.482917785644531],["-32",-13.482922554016112],["▁उत्पाद",-13.482928276062012],["ോളം",-13.48294162750244],["NJA",-13.482952117919922],["ሰት",-13.4829740524292],["քա",-13.482976913452148],["▁показал",-13.482977867126465],["▁정상",-13.482988357543944],["▁ndryshim",-13.482989311218262],["主办",-13.483025550842283],["▁ఎక్కువగా",-13.483027458190918],["▁Boli",-13.483033180236816],["▁είχα",-13.483046531677246],["लय",-13.48304843902588],["قطاع",-13.483057022094728],["▁pembuatan",-13.483060836791992],["되기",-13.483067512512209],["▁gitme",-13.4830904006958],["▁ndjek",-13.4830961227417],["▁каму",-13.483153343200684],["頂き",-13.483160972595217],["▁hisz",-13.483171463012695],["▁masala",-13.483194351196287],["▁TK",-13.483198165893556],["▁contabil",-13.4832124710083],["▁ਅੰ",-13.48322582244873],["▁курсу",-13.483230590820312],["▁apple",-13.483232498168944],["sunt",-13.483240127563477],["▁aliran",-13.483258247375488],["rzyć",-13.483304023742676],["वीर",-13.48331069946289],["▁diagnostik",-13.483317375183104],["日間",-13.483325004577637],["▁Krás",-13.483376502990724],["▁عما",-13.48338794708252],["▁liels",-13.483393669128418],["servi",-13.4833984375],["্যান",-13.48342227935791],["▁Dhan",-13.48343563079834],["▁какую",-13.483437538146973],["▁musicale",-13.483444213867188],["▁layout",-13.483450889587402],["▁tutuk",-13.483453750610352],["LEK",-13.483480453491213],["koolitus",-13.483485221862791],["▁levou",-13.483490943908691],["εύουν",-13.483514785766602],["कुट",-13.483514785766602],["▁adabiyoti",-13.483521461486816],["▁conscient",-13.483526229858398],["▁гарах",-13.483528137207031],["▁asiatisk",-13.483535766601562],["ຂາ",-13.483563423156738],["อู",-13.483583450317385],["brat",-13.483610153198242],["▁breyting",-13.483612060546877],["wyf",-13.483613967895508],["▁ĉio",-13.48362159729004],["lenmiş",-13.483631134033203],["▁грав",-13.483685493469238],["▁lutte",-13.48369312286377],["تفاصيل",-13.483704566955566],["▁spreek",-13.483708381652832],["ရှာ",-13.483710289001465],["ાદ",-13.483713150024414],["があるので",-13.48371410369873],["porto",-13.48372745513916],["▁projesi",-13.483738899230955],["eriet",-13.483749389648438],["▁lekar",-13.483794212341309],["▁Små",-13.483804702758787],["▁persoalan",-13.483821868896484],["▁본인",-13.483826637268066],["乏",-13.483826637268066],["▁finaliza",-13.483850479125977],["ुर्",-13.483858108520508],["122",-13.48386287689209],["▁darbai",-13.48386287689209],["恰",-13.48387050628662],["勘",-13.483877182006836],["▁然而",-13.483888626098633],["騰",-13.483888626098633],["біл",-13.48389720916748],["▁зөвшөөр",-13.48389720916748],["٬",-13.48390293121338],["▁Peugeot",-13.48390293121338],["▁kaq",-13.48390293121338],["▁rešitev",-13.48390293121338],["▁Вікіпедыя",-13.48390293121338],["▁веднаш",-13.48390293121338],["▁какъв",-13.48390293121338],["▁հասարակական",-13.48390293121338],["▁ciascun",-13.483903884887695],["▁церков",-13.483903884887695],["▁Bewertung",-13.483904838562012],["▁ସେବା",-13.483904838562012],["▁ଜନ୍ମ",-13.483905792236328],["▁academic",-13.483906745910645],["▁చేస్తూ",-13.483908653259276],["▁хаагч",-13.483909606933594],["▁দেখুন",-13.483909606933594],["▁kommentteja",-13.483911514282228],["▁makhluk",-13.483911514282228],["kondiĉe",-13.48391342163086],["▁təbrik",-13.48391342163086],["▁wieczor",-13.483916282653809],["▁سرزمین",-13.48391819000244],["▁커피",-13.48392105102539],["▁agradable",-13.48392391204834],["▁conference",-13.483926773071287],["▁12:30",-13.483936309814451],["▁hrana",-13.48393726348877],["▁उमेर",-13.48394012451172],["želi",-13.483957290649414],["▁денот",-13.483967781066896],["感恩",-13.483973503112791],["IUS",-13.483975410461426],["fähr",-13.484018325805664],["▁versuchen",-13.484027862548828],["▁businesses",-13.484033584594728],["ръс",-13.484036445617676],["▁Τον",-13.484042167663574],["Ար",-13.484055519104004],["ūz",-13.484057426452637],["huan",-13.484092712402344],["▁بینک",-13.48410701751709],["▁giornali",-13.484127044677734],["60%",-13.484132766723633],["▁bytu",-13.484149932861328],["▁Ernst",-13.484160423278809],["▁dobio",-13.484173774719238],["ဆိုရင္",-13.48418426513672],["▁gasto",-13.4841890335083],["▁שליט",-13.48419189453125],["tiewe",-13.484203338623049],["▁hovor",-13.484203338623049],["▁vlastně",-13.484203338623049],["пиша",-13.484204292297363],["కల",-13.484212875366213],["▁پۇل",-13.484224319458008],["▁figlio",-13.484230041503906],["▁третман",-13.484230995178224],["шњи",-13.484233856201172],["▁kirol",-13.484241485595703],["▁Kola",-13.484259605407717],["ჯობ",-13.484260559082031],["▁ხართ",-13.48426914215088],["▁kannata",-13.484275817871094],["▁аналізу",-13.484282493591309],["▁aurretik",-13.484294891357422],["овими",-13.48432731628418],["скому",-13.484334945678713],["上に",-13.484346389770508],["ificado",-13.484366416931152],["▁ekde",-13.484387397766112],["วย",-13.48439884185791],["MH",-13.48440170288086],["▁รอบ",-13.484404563903809],["▁težave",-13.48440933227539],["▁правата",-13.484414100646973],["存在的",-13.484424591064451],["ผู้ที่",-13.484430313110352],["qdan",-13.484466552734377],["内的",-13.484472274780272],["২৮",-13.484477043151855],["▁Кина",-13.484482765197754],["ავდა",-13.48448371887207],["іліся",-13.48448657989502],["▁Valeri",-13.484488487243652],["▁gadi",-13.484496116638184],["తె",-13.484506607055664],["▁fomba",-13.484508514404297],["▁1,9",-13.48451328277588],["的关键",-13.48451805114746],["▁zlato",-13.48452377319336],["▁allò",-13.484541893005373],["▁بالاتر",-13.48454475402832],["▁intense",-13.48455047607422],["▁турш",-13.484551429748535],["▁produksjon",-13.48455810546875],["gumi",-13.484597206115724],["ৰী",-13.484606742858888],["ாங்க",-13.484609603881836],["сени",-13.484611511230469],["▁Constit",-13.48461627960205],["▁ditën",-13.484619140625],["▁musu",-13.484623908996582],["▁환자",-13.484624862670898],["▁медиа",-13.484639167785645],["yasının",-13.484659194946287],["алдык",-13.484668731689451],["▁เลือก",-13.484682083129885],["▁homofil",-13.484708786010742],["▁kjente",-13.484710693359377],["▁לזה",-13.484728813171388],["▁एकदम",-13.484728813171388],["uttu",-13.484731674194336],["▁ఆంధ్ర",-13.484758377075195],["▁ווע",-13.484766960144045],["▁heran",-13.484769821166992],["▁mø",-13.48477554321289],["夠",-13.484776496887209],["▁ct",-13.484782218933104],["ることができる",-13.484784126281738],["قرر",-13.484789848327637],["▁Estos",-13.484808921813965],["修理",-13.484808921813965],["לף",-13.484814643859863],["▁osebe",-13.48481559753418],["▁видання",-13.484833717346191],["MAIL",-13.484838485717772],["ουσία",-13.48484230041504],["ראות",-13.484850883483888],["▁pression",-13.48485279083252],["▁appear",-13.484865188598633],["ลี่",-13.48487949371338],["▁କରିବ",-13.484892845153809],["लगायत",-13.484893798828123],["▁ophold",-13.484893798828123],["▁gigante",-13.48489761352539],["▁හමුදාව",-13.484907150268556],["երկր",-13.48491668701172],["ሳቸው",-13.484918594360352],["уче",-13.484922409057615],["▁ведае",-13.484926223754885],["▁ሥር",-13.484930992126465],["▁graba",-13.484941482543944],["▁باست",-13.484963417053224],["beba",-13.484966278076172],["कर्ण",-13.48499584197998],["▁lukea",-13.485010147094728],["έψει",-13.485031127929688],["نوں",-13.485037803649902],["▁Neil",-13.48505401611328],["▁συγκεκριμένο",-13.485057830810549],["ினால்",-13.48505973815918],["ماء",-13.48508358001709],["રિયા",-13.48508644104004],["save",-13.485093116760254],["անը",-13.48512077331543],["▁restauranter",-13.485125541687012],["▁subtil",-13.48513889312744],["CER",-13.485140800476074],["nedd",-13.485151290893556],["memor",-13.485156059265137],["▁tervez",-13.485161781311035],["BAH",-13.485162734985352],["▁آورده",-13.485180854797363],["▁vian",-13.485188484191896],["任意",-13.48520565032959],["▁түш",-13.485246658325195],["▁председател",-13.485251426696776],["▁خەۋەر",-13.485257148742676],["▁buď",-13.48526096343994],["▁ਤਰ",-13.485289573669434],["▁трет",-13.485305786132812],["▁οικ",-13.485309600830078],["ของผู้",-13.485313415527344],["▁fyrstu",-13.48531723022461],["NUM",-13.485321998596191],["▁spät",-13.485340118408203],["▁CONS",-13.485345840454102],["Studi",-13.485350608825684],["▁המלא",-13.48536777496338],["▁robež",-13.485389709472656],["▁અર્થ",-13.485393524169922],["▁collectie",-13.485406875610352],["▁Steel",-13.485410690307615],["▁vrsti",-13.48541259765625],["ቀበ",-13.48543643951416],["▁danmark",-13.485441207885742],["άστηκε",-13.485445976257324],["▁montaż",-13.485453605651855],["עבור",-13.485480308532717],["▁dáva",-13.485491752624512],["▁সালে",-13.485516548156738],["▁разположен",-13.485529899597168],["ひとつ",-13.485529899597168],["ుకుంటూ",-13.48554801940918],["▁setuju",-13.48556137084961],["ākā",-13.485576629638672],["Рус",-13.485577583312988],["יופי",-13.485593795776367],["▁पौडेल",-13.485595703125],["גובה",-13.485600471496582],["▁skatīt",-13.485604286193848],["▁Manila",-13.485610961914062],["▁slovensko",-13.485612869262695],["▁steroid",-13.485635757446287],["それぞれの",-13.485645294189451],["增速",-13.485651969909668],["覽",-13.485654830932615],["▁katerim",-13.4856595993042],["▁informazioa",-13.485663414001465],["輩",-13.485674858093262],["穆",-13.485681533813477],["▁kritisk",-13.485713005065918],["▁skok",-13.48571491241455],["惹",-13.485715866088867],["▁sertifikat",-13.485724449157717],["แข่ง",-13.48573112487793],["ภายใต้",-13.485733032226562],["ស្គាល់",-13.48573398590088],["overeenkomst",-13.485734939575195],["▁Këshilli",-13.485734939575195],["▁dažniausiai",-13.485734939575195],["▁démarche",-13.485734939575195],["▁estratégia",-13.485734939575195],["▁pēdējā",-13.485734939575195],["▁ਸਮੇਤ",-13.485734939575195],["▁ຕິດຕາມພວກເຮົາ",-13.485734939575195],["▁თანამედროვე",-13.485734939575195],["▁մայր",-13.485735893249512],["▁betreft",-13.485736846923828],["▁истакао",-13.485736846923828],["បណ្តាញ",-13.485737800598145],["▁istnieje",-13.48573875427246],["▁Dragnea",-13.485740661621094],["തിയ",-13.48574161529541],["▁କରୁଛନ୍ତି",-13.48574161529541],["▁çawa",-13.485743522644045],["もらう",-13.485746383666992],["▁anlaşma",-13.48574924468994],["▁במו",-13.48574924468994],["▁почивка",-13.485750198364258],["زۇ",-13.485758781433104],["▁Sadece",-13.485760688781738],["▁นาง",-13.485764503479004],["▁zdecydowanie",-13.485774993896484],["▁quyidagi",-13.485776901245115],["▁dissi",-13.48578929901123],["▁قبض",-13.485795021057127],["▁dowlad",-13.485795974731444],["收费",-13.485806465148926],["▁നേടിയ",-13.485809326171877],["▁Tāpat",-13.485814094543455],["▁kombëtare",-13.485814094543455],["▁తెలుసా",-13.485814094543455],["χέ",-13.48581600189209],["dzy",-13.48582935333252],["krep",-13.485833168029783],["▁الاتفاق",-13.48583984375],["▁ಕಂಡ",-13.485846519470217],["差异",-13.485847473144531],["▁пах",-13.48585033416748],["續",-13.485857963562012],["▁europeisk",-13.485859870910645],["▁היתר",-13.48586082458496],["antic",-13.485884666442873],["▁ڪپ",-13.48588752746582],["પુ",-13.485891342163086],["▁Julius",-13.485892295837402],["ACI",-13.485895156860352],["▁Martina",-13.485896110534668],["▁tunnus",-13.485896110534668],["▁जोशी",-13.485896110534668],["▁aşağıdaki",-13.485897064208984],["▁агульна",-13.485902786254885],["▁Vertrags",-13.485905647277832],["▁trib",-13.485918045043944],["▁aysan",-13.48592472076416],["▁тяло",-13.485968589782717],["▁yapacak",-13.485971450805664],["aikoina",-13.485986709594728],["▁ragihand",-13.486007690429688],["▁dəqiqə",-13.486011505126951],["▁betrokken",-13.486032485961914],["കൃത",-13.486034393310549],["mässig",-13.48604965209961],["▁treh",-13.486061096191406],["▁bô",-13.486068725585938],["ေျပာင္း",-13.486071586608888],["▁moved",-13.486088752746582],["▁Keskus",-13.486095428466797],["ڀا",-13.486104011535645],["лықтар",-13.486108779907228],["▁וְ",-13.486112594604492],["▁konferens",-13.48612117767334],["▁вечно",-13.486127853393556],["▁అందించ",-13.486156463623049],["▁GEN",-13.486166000366213],["▁metodi",-13.486176490783691],["掉了",-13.48617935180664],["eacha",-13.486184120178224],["▁ábhar",-13.48618507385254],["▁mantiene",-13.486194610595703],["▁viitoare",-13.486202239990234],["▁xizmati",-13.486211776733398],["▁góc",-13.48623752593994],["▁לדבר",-13.48623752593994],["▁त्याने",-13.486242294311523],["▁మనసు",-13.486249923706056],["Club",-13.486254692077637],["▁duración",-13.486263275146484],["rzeń",-13.486281394958496],["▁metas",-13.48629093170166],["▁క్ల",-13.486295700073242],["മിനി",-13.486297607421877],["жали",-13.486303329467772],["ncé",-13.486321449279783],["▁graži",-13.48633098602295],["elį",-13.486331939697266],["▁거래",-13.486357688903809],["espace",-13.4863920211792],["▁üveg",-13.486395835876465],["կե",-13.486405372619627],["▁നാട",-13.486452102661133],["08)",-13.486454010009766],["▁السب",-13.48646640777588],["média",-13.486490249633787],["▁חוזר",-13.486501693725586],["▁prípad",-13.48653507232666],["▁dutela",-13.486541748046877],["फी",-13.48654842376709],["▁Вис",-13.486557960510254],["▁vokal",-13.486572265625],["فية",-13.486576080322266],["ወር",-13.486583709716797],["▁formado",-13.486583709716797],["ให้เป็น",-13.486595153808594],["▁Masuk",-13.486613273620604],["▁Kaba",-13.486623764038086],["зици",-13.48662567138672],["בירה",-13.486647605895996],["ีน",-13.486660957336426],["▁이상의",-13.48666286468506],["▁уряд",-13.48667049407959],["▁teenuse",-13.48669719696045],["850",-13.486717224121094],["▁Prvo",-13.486729621887209],["mesa",-13.486735343933104],["utveckling",-13.486739158630373],["▁voe",-13.486739158630373],["പ്പാട",-13.486754417419434],["▁indgå",-13.48677921295166],["sýn",-13.486781120300291],["▁fiber",-13.486781120300291],["▁חומ",-13.48678207397461],["ísk",-13.486788749694824],["KW",-13.486794471740724],["ագրություն",-13.486797332763672],["כוכב",-13.486804962158203],["▁болохгүй",-13.486808776855469],["חזיק",-13.486811637878418],["▁tenu",-13.48681640625],["▁ಮಟ್ಟ",-13.486823081970217],["्ता",-13.486845016479492],["▁faith",-13.486846923828123],["▁කවි",-13.486856460571287],["▁وڃڻ",-13.486865043640137],["HIR",-13.48686981201172],["▁واقعیت",-13.486897468566896],["شعب",-13.486920356750488],["▁handig",-13.48693561553955],["▁מקו",-13.486940383911133],["▁Quote",-13.486963272094728],["angle",-13.486985206604004],["കേ",-13.486987113952637],["UGU",-13.486992835998535],["ങ്ങളിലും",-13.48703956604004],["▁Otra",-13.487051010131836],["▁позив",-13.487051010131836],["▁Дело",-13.487052917480469],["anlage",-13.487070083618164],["▁которыми",-13.487082481384276],["ונגען",-13.487090110778809],["的概念",-13.487092971801758],["▁статья",-13.487096786499023],["▁уулз",-13.487101554870604],["olwg",-13.487104415893556],["құл",-13.487105369567873],["▁tubig",-13.48711395263672],["▁kulke",-13.487122535705566],["▁крайне",-13.487122535705566],["elda",-13.487126350402832],["▁Aval",-13.48712921142578],["▁RFE",-13.48712921142578],["вс",-13.487165451049805],["▁Eldre",-13.48718547821045],["▁intresserad",-13.48719596862793],["96)",-13.487204551696776],["▁පෙන්න",-13.487204551696776],["去看",-13.487220764160156],["▁valik",-13.487235069274902],["ଚେ",-13.487245559692385],["▁គាត់",-13.487253189086914],["momo",-13.487265586853027],["mung",-13.487277030944824],["በራ",-13.487299919128418],["▁Maaf",-13.487317085266112],["▁клик",-13.487324714660645],["▁revoluci",-13.48732566833496],["▁teatri",-13.487333297729492],["njaka",-13.487347602844238],["近い",-13.48738956451416],["վեցին",-13.487425804138184],["▁فجر",-13.487425804138184],["▁vederea",-13.487427711486816],["るのが",-13.487436294555664],["ኦሮሞ",-13.487448692321776],["habita",-13.48745822906494],["õhk",-13.487481117248535],["गुण",-13.487483978271484],["پان",-13.4874906539917],["▁Ká",-13.487508773803713],["陵",-13.487516403198242],["▁동의",-13.48752784729004],["EIN",-13.48753261566162],["▁willing",-13.487547874450684],["▁earth",-13.487548828125],["知识产权",-13.48755168914795],["▁დაგ",-13.487554550170898],["▁skan",-13.487555503845217],["ពណ៌",-13.487567901611328],["▁Němec",-13.487568855285645],["▁jobbe",-13.487568855285645],["▁ಬದಲಾವಣೆ",-13.487568855285645],["▁berbicara",-13.48756980895996],["▁έλεγχο",-13.48756980895996],["▁απέναντι",-13.48756980895996],["▁δεύτερη",-13.48756980895996],["▁хязгаар",-13.48756980895996],["▁לקראת",-13.48756980895996],["▁ښځو",-13.48756980895996],["▁گسترش",-13.48756980895996],["▁ውይይት",-13.48756980895996],["▁Hyundai",-13.487570762634276],["▁მესამე",-13.487570762634276],["▁biedrība",-13.48757266998291],["▁קישורים",-13.48757266998291],["▁کاروبار",-13.48757266998291],["▁pasākumi",-13.487573623657228],["▁índice",-13.487574577331545],["▁cotidian",-13.48757553100586],["▁위험",-13.487578392028809],["▁vplyv",-13.48758029937744],["▁целосно",-13.487582206726074],["▁חד",-13.48758602142334],["▁persoanele",-13.487586975097656],["шылығы",-13.487587928771973],["▁మృతి",-13.487589836120604],["▁permesilo",-13.487591743469238],["▁തമിഴ്",-13.48759937286377],["▁قىلىدۇ",-13.48760223388672],["▁κάνω",-13.487610816955566],["portu",-13.487618446350098],["ሁን",-13.487631797790527],["emplo",-13.487641334533691],["▁энергии",-13.487645149230955],["▁Πολιτική",-13.48764705657959],["▁præcis",-13.487650871276855],["▁immediately",-13.487652778625488],["обща",-13.487664222717283],["▁gördüğü",-13.487669944763184],["▁defesa",-13.487674713134766],["עלע",-13.487678527832031],["▁5-7",-13.487682342529297],["▁corporis",-13.487738609313965],["實用",-13.487740516662598],["ىسىنىڭ",-13.487741470336914],["пале",-13.487746238708496],["空中",-13.487747192382812],["接著",-13.487748146057127],["▁الحركة",-13.487751007080078],["▁pyöri",-13.487756729125977],["분이",-13.487770080566406],["▁aktivity",-13.487775802612305],["▁ಬುಕ್",-13.487785339355469],["相应的",-13.487811088562012],["▁estus",-13.487817764282228],["stræ",-13.487825393676758],["1988",-13.487826347351074],["▁شبه",-13.487828254699709],["▁нагр",-13.487834930419922],["ცენტრ",-13.487835884094238],["ให้เช่า",-13.487866401672363],["▁ultimo",-13.487872123718262],["لزم",-13.487881660461426],["▁Solutions",-13.48788833618164],["▁харагд",-13.487922668457031],["skych",-13.487929344177246],["▁totoo",-13.487969398498535],["▁imperial",-13.48798370361328],["ますので",-13.487988471984863],["UMI",-13.487991333007812],["▁méid",-13.487993240356444],["πω",-13.487995147705078],["amara",-13.488049507141112],["запад",-13.48807430267334],["▁dengar",-13.488096237182615],["▁ollaan",-13.48809814453125],["▁Reality",-13.488100051879885],["▁квалификации",-13.488103866577148],["▁realizzato",-13.48811149597168],["보다는",-13.488117218017578],["340",-13.488126754760742],["මී",-13.488131523132324],["Korea",-13.488143920898438],["▁разве",-13.488143920898438],["▁eləcə",-13.488144874572754],["grøn",-13.488154411315918],["▁internetowej",-13.488174438476562],["▁представен",-13.488174438476562],["▁Confer",-13.488186836242676],["▁промене",-13.488186836242676],["▁viņas",-13.488211631774902],["مهرجان",-13.488213539123535],["▁dîtin",-13.4882230758667],["▁सुपर",-13.488239288330078],["തിൽ",-13.488266944885254],["მეს",-13.488283157348633],["▁చంద్ర",-13.488286018371582],["▁లీ",-13.488306999206545],["▁versie",-13.488327026367188],["жений",-13.48834228515625],["我國",-13.488368034362791],["▁populær",-13.48837947845459],["тє",-13.488383293151855],["▁fabric",-13.488394737243652],["▁Ditt",-13.48849391937256],["ของคน",-13.488513946533203],["▁догађај",-13.488542556762695],["▁Holl",-13.488545417785645],["దేశ",-13.48854637145996],["deck",-13.488564491271973],["▁കഴിക്ക",-13.488590240478516],["▁Unga",-13.488612174987791],["▁లె",-13.488622665405272],["▁fils",-13.48863697052002],["ტია",-13.488655090332031],["ighean",-13.48865795135498],["待遇",-13.48866081237793],["АЛЬ",-13.488670349121094],["▁господарства",-13.48867416381836],["▁8-10",-13.48867893218994],["▁ferramenta",-13.488682746887209],["▁Fällen",-13.488699913024902],["▁дыя",-13.488706588745115],["特徴",-13.488722801208496],["▁татко",-13.488750457763672],["▁Economi",-13.488773345947266],["▁Yuk",-13.488779067993164],["▁offrir",-13.488781929016112],["▁Iki",-13.48878574371338],["の高い",-13.488788604736328],["▁risti",-13.488789558410645],["ίμ",-13.48879337310791],["▁заведения",-13.48882007598877],["▁handla",-13.488829612731934],["ukko",-13.488837242126465],["▁Principal",-13.488842010498049],["▁wood",-13.488861083984377],["osztás",-13.48886489868164],["вањем",-13.488866806030272],["ζι",-13.488876342773438],["nacional",-13.48888111114502],["▁Maison",-13.488896369934082],["▁matematika",-13.488920211791992],["дама",-13.48894214630127],["四年",-13.488948822021484],["קטור",-13.488969802856444],["▁شغل",-13.48897647857666],["ىۋاتقان",-13.488995552062988],["▁població",-13.48900318145752],["terrein",-13.489005088806152],["▁والاست",-13.489008903503418],["ることができます",-13.489009857177734],["▁เร",-13.489013671875],["▁цэн",-13.489018440246582],["▁író",-13.489019393920898],["بود",-13.489022254943848],["▁kuvan",-13.48902416229248],["▁전문가",-13.489026069641112],["▁თამაშ",-13.48902988433838],["пси",-13.489042282104492],["▁지식",-13.489044189453123],["▁yolunda",-13.48904800415039],["kå",-13.489059448242188],["liť",-13.489059448242188],["▁उम्मीद",-13.48906135559082],["▁ସହର",-13.4890718460083],["ক্ষা",-13.48907470703125],["menų",-13.489117622375488],["▁darbas",-13.48912239074707],["▁typen",-13.48912525177002],["ಿದ್ದರೂ",-13.489137649536133],["VOV",-13.489141464233398],["плив",-13.489160537719728],["ېدل",-13.489160537719728],["ຸນ",-13.489168167114258],["▁Toy",-13.48918342590332],["▁موجودہ",-13.489188194274902],["▁Fala",-13.489190101623535],["zał",-13.489194869995115],["▁Materiál",-13.489198684692385],["▁zadania",-13.489212036132812],["▁روزہ",-13.489214897155762],["▁goals",-13.489218711853027],["finanz",-13.48922348022461],["的基础上",-13.489229202270508],["▁tehnologije",-13.489235877990724],["一道",-13.489294052124023],["ච්චි",-13.4893159866333],["пін",-13.489320755004885],["▁conecta",-13.489333152770996],["冲突",-13.489338874816896],["▁atento",-13.48934268951416],["視覺",-13.489384651184082],["▁পদ",-13.48940658569336],["ក្រហម",-13.489407539367676],["វេលា",-13.489407539367676],["▁profesyonel",-13.489407539367676],["▁réalisation",-13.489407539367676],["▁zmluvy",-13.489407539367676],["▁οικογένεια",-13.489407539367676],["▁ভর্তি",-13.489407539367676],["▁মিডিয়া",-13.489407539367676],["▁મિત્રો",-13.489407539367676],["▁ธันวาคม",-13.489407539367676],["▁განცხადება",-13.489407539367676],["▁Vodafone",-13.489408493041992],["▁dhaawac",-13.489408493041992],["▁շրջանակներում",-13.489409446716309],["พื้นฐาน",-13.48941135406494],["▁البرلمان",-13.489412307739258],["▁санхүүгийн",-13.48941421508789],["▁перыяд",-13.489416122436523],["▁milions",-13.489418983459473],["▁شبكة",-13.489426612854004],["▁негизги",-13.48942756652832],["galan",-13.489429473876951],["▁Därför",-13.489434242248535],["▁طبيعي",-13.489435195922852],["▁মোঃ",-13.489441871643066],["▁20:30",-13.489444732666016],["chukua",-13.48944854736328],["▁көрүп",-13.489449501037598],["burger",-13.489450454711914],["▁Daripada",-13.48945140838623],["โฟ",-13.489453315734863],["保育",-13.489453315734863],["▁Nicolae",-13.489457130432127],["ongea",-13.489463806152344],["uerunt",-13.489463806152344],["▁oposición",-13.489463806152344],["▁mengerti",-13.48946762084961],["▁savaitę",-13.489476203918455],["▁Ministarstvo",-13.489481925964355],["▁здравје",-13.48948860168457],["▁Jumlah",-13.489489555358888],["מטר",-13.48950481414795],["▁Реал",-13.489509582519531],["making",-13.48951816558838],["▁سامنا",-13.489524841308594],["▁spēlēt",-13.489550590515137],["▁trykke",-13.489557266235352],["సార్లు",-13.489559173583984],["▁profesori",-13.489567756652832],["▁గేమ్",-13.489582061767578],["पालिका",-13.489608764648438],["▁Dala",-13.489620208740234],["▁стопа",-13.489625930786133],["▁traslada",-13.48963451385498],["▁cabal",-13.489638328552246],["aisiais",-13.489641189575195],["▁asawa",-13.489641189575195],["接待",-13.48966121673584],["лесно",-13.489665031433104],["തന",-13.489665031433104],["▁кредитор",-13.489672660827637],["、4",-13.489683151245115],["注文",-13.489684104919434],["▁ছ",-13.489689826965332],["कार्य",-13.489709854125977],["▁vārdu",-13.489718437194824],["ادى",-13.48971939086914],["шот",-13.48972988128662],["▁netko",-13.489741325378418],["яка",-13.489757537841797],["מבט",-13.48976230621338],["laması",-13.48977756500244],["▁rivolt",-13.489802360534668],["要做",-13.489810943603516],["▁DELFI",-13.489815711975098],["який",-13.489822387695312],["を見つけ",-13.489822387695312],["ındır",-13.489828109741213],["শী",-13.489830017089844],["▁Søker",-13.489839553833008],["▁пригод",-13.489853858947754],["▁Foru",-13.489862442016602],["▁invitere",-13.489864349365234],["posita",-13.4898681640625],["ကဲ",-13.489885330200195],["гдах",-13.489893913269045],["▁vitamine",-13.48989486694336],["▁ресурсов",-13.489898681640623],["▁բնակարան",-13.489914894104004],["▁детско",-13.489931106567385],["▁армия",-13.489959716796877],["turing",-13.489982604980469],["чност",-13.48998737335205],["▁miste",-13.489990234375],["▁एक्",-13.489994049072266],["▁fiyatları",-13.490001678466797],["▁jasné",-13.49001121520996],["აი",-13.49005889892578],["▁Кір",-13.490071296691896],["imizda",-13.490094184875488],["fürdő",-13.490097045898438],["▁Iya",-13.490111351013184],["▁tjerët",-13.490118026733398],["▁خال",-13.490121841430664],["сля",-13.490141868591309],["▁Capit",-13.490180015563965],["▁κόσμος",-13.490180015563965],["монстр",-13.490187644958496],["▁sabes",-13.49021053314209],["▁ciekawe",-13.490217208862305],["ლიან",-13.490241050720217],["عمق",-13.49025821685791],["▁एंड",-13.490278244018556],["أم",-13.490296363830566],["äistä",-13.490307807922363],["option",-13.490328788757324],["sindən",-13.490362167358398],["ríamos",-13.490365028381348],["▁አማራ",-13.490381240844728],["124",-13.490395545959473],["▁Dz",-13.490403175354004],["▁այլն",-13.490424156188965],["▁PEM",-13.490436553955078],["ФО",-13.490447998046877],["▁Aceste",-13.490460395812988],["▁Ürün",-13.490487098693848],["▁проектов",-13.490500450134276],["▁yah",-13.490504264831545],["がい",-13.490506172180176],["▁pantalon",-13.490521430969238],["ביר",-13.490527153015137],["有些人",-13.490568161010742],["▁desberdin",-13.490598678588867],["▁egyedi",-13.490608215332031],["▁ලෝකයේ",-13.490618705749512],["▁gede",-13.490646362304688],["▁ឧ",-13.490656852722168],["zeiten",-13.4906587600708],["▁spēj",-13.490693092346191],["▁clé",-13.49070644378662],["▁Piotr",-13.490713119506836],["▁зачем",-13.490735054016112],["空调",-13.490764617919922],["▁Practic",-13.490778923034668],["延伸",-13.49079418182373],["▁tokiu",-13.490796089172363],["ራሱ",-13.49082374572754],["▁എടുത്ത",-13.49082851409912],["▁знання",-13.490866661071776],["▁Милош",-13.490872383117676],["▁веро",-13.49087619781494],["▁Nauk",-13.490894317626951],["ological",-13.490911483764648],["ఫె",-13.490923881530762],["▁atij",-13.49093246459961],["▁yakho",-13.49093532562256],["▁rakont",-13.490949630737305],["▁ಅವಳ",-13.490967750549316],["ነታቸው",-13.490978240966797],["▁صدی",-13.490978240966797],["▁especies",-13.490995407104492],["7,5",-13.491013526916504],["▁ذیل",-13.491019248962402],["▁Tahu",-13.491034507751465],["ières",-13.491048812866213],["роид",-13.49106788635254],["פּע",-13.491096496582031],["pair",-13.491100311279297],["▁खुला",-13.491107940673828],["▁рамки",-13.491119384765623],["զա",-13.491128921508787],["▁dolorem",-13.491132736206056],["▁אירוע",-13.491132736206056],["ແດງ",-13.49113941192627],["▁ਸ਼ਹੀਦ",-13.491143226623535],["rota",-13.491154670715332],["▁Verkehrs",-13.491155624389648],["ສັງ",-13.49118995666504],["曉",-13.491198539733888],["放棄",-13.491223335266112],["▁nominal",-13.491238594055176],["▁cuir",-13.491240501403809],["▁dokumentar",-13.49124813079834],["▁cənab",-13.491249084472656],["▁максимум",-13.491249084472656],["▁следующий",-13.491249084472656],["▁століття",-13.491249084472656],["▁ಷೇರು",-13.491249084472656],["▁Lähettänyt",-13.491250038146973],["▁تۈركىيە",-13.491253852844238],["▁memulai",-13.491257667541504],["▁دمشق",-13.491257667541504],["▁오픈",-13.491257667541504],["jandi",-13.491260528564451],["▁актыўна",-13.491260528564451],["▁столь",-13.491263389587402],["▁żadnych",-13.491265296936035],["▁эрчим",-13.491266250610352],["▁speciāli",-13.491268157958984],["▁ಮುಂದ",-13.49129581451416],["tystä",-13.491297721862791],["它們",-13.491302490234377],["▁agente",-13.491307258605955],["▁своєму",-13.491314888000488],["▁ricetta",-13.49131965637207],["▁päivittä",-13.491327285766602],["▁Internacia",-13.491331100463867],["▁выполнения",-13.491332054138184],["政府的",-13.491336822509766],["▁positivt",-13.491339683532717],["kyk",-13.49135971069336],["많",-13.491360664367676],["▁ESTES",-13.491363525390623],["▁casamento",-13.491365432739258],["▁Başkanlığı",-13.491368293762209],["СМ",-13.491379737854004],["▁همایش",-13.491379737854004],["▁Laŭ",-13.4913969039917],["▁यांचा",-13.49140167236328],["▁daquela",-13.49140453338623],["하도록",-13.491409301757812],["▁نقطة",-13.491412162780762],["▁hagi",-13.491415023803713],["بصر",-13.49143886566162],["massage",-13.491447448730469],["▁kendisini",-13.491453170776367],["▁якасці",-13.491459846496582],["▁ٽو",-13.491474151611328],["▁uttryck",-13.49148178100586],["▁Mặt",-13.491487503051758],["δος",-13.491495132446287],["рде",-13.491496086120604],["▁ठिकाणी",-13.49150562286377],["ûk",-13.491528511047363],["χρονος",-13.49152946472168],["▁фондов",-13.491594314575195],["fahamu",-13.491597175598145],["rsatish",-13.491597175598145],["▁beachten",-13.491602897644045],["▁రు",-13.49160385131836],["▁muligheten",-13.491622924804688],["▁amelynek",-13.491626739501951],["meyecek",-13.491636276245115],["øl",-13.491636276245115],["▁سوء",-13.491650581359863],["▁公司",-13.491681098937988],["ଛୁ",-13.491682052612305],["cık",-13.491694450378418],["▁නාම",-13.491703987121582],["▁Hev",-13.491704940795898],["▁ინტერ",-13.49170970916748],["▁kombination",-13.491741180419922],["▁देशको",-13.491744041442873],["▁voči",-13.491768836975098],["ୟନ",-13.491806030273438],["▁אתכם",-13.491826057434082],["йшоў",-13.491830825805664],["▁Pacific",-13.491849899291992],["▁corona",-13.491870880126951],["रिक",-13.491872787475586],["▁Standort",-13.491875648498535],["▁למש",-13.4918794631958],["▁نهایی",-13.491886138916016],["▁Yara",-13.49190902709961],["▁alin",-13.491918563842772],["▁horen",-13.491918563842772],["▁prospect",-13.49192237854004],["cessit",-13.491924285888672],["teste",-13.491963386535645],["эрч",-13.491965293884276],["tört",-13.49196720123291],["аси",-13.49197769165039],["ۋال",-13.491982460021973],["▁vink",-13.491988182067873],["改进",-13.491989135742188],["▁spate",-13.49199104309082],["主動",-13.491994857788086],["▁rău",-13.491995811462402],["▁Bratislav",-13.492003440856934],["▁intru",-13.492009162902832],["▁ශිෂ්",-13.492018699645996],["▁herritar",-13.492026329040527],["▁gegee",-13.49204158782959],["▁lijepo",-13.492047309875488],["▁bliana",-13.492050170898438],["▁Kvalit",-13.492053985595703],["▁épül",-13.492069244384766],["tinho",-13.492094039916992],["▁ipsam",-13.492094993591309],["▁подобни",-13.492106437683104],["▁tarikh",-13.492111206054688],["द्रा",-13.49211883544922],["▁valda",-13.492135047912598],["षण",-13.49215030670166],["ಲಾಯಿತು",-13.492179870605469],["生理",-13.492219924926758],["▁سنتی",-13.49222183227539],["ավա",-13.492226600646973],["▁llista",-13.492236137390137],["▁islami",-13.492255210876465],["頑張",-13.49225902557373],["▁успеха",-13.492260932922363],["فان",-13.49226188659668],["▁대구",-13.492262840270996],["▁ráda",-13.492302894592283],["▁جيد",-13.492307662963867],["tavasti",-13.49233627319336],["별로",-13.492350578308104],["▁እርሱ",-13.492366790771484],["▁надежд",-13.492387771606444],["▁sopii",-13.492401123046877],["▁nejde",-13.492430686950684],["▁Блог",-13.492444038391112],["▁akva",-13.492448806762695],["▁toimin",-13.492462158203123],["telji",-13.492474555969238],["žek",-13.492474555969238],["▁tavara",-13.49248504638672],["խան",-13.4924955368042],["利用者",-13.492502212524414],["intérieur",-13.492508888244627],["ตี้",-13.492525100708008],["▁Europei",-13.492527961730955],["ກໍ່ສ້າງ",-13.492534637451172],["żona",-13.492548942565918],["நூ",-13.492551803588867],["▁koskeva",-13.492559432983398],["스토리",-13.492568969726562],["▁Laden",-13.492581367492676],["▁աշխատանք",-13.492591857910156],["許可",-13.492599487304688],["▁ผ้า",-13.49260711669922],["▁aprendi",-13.492640495300291],["承受",-13.492640495300291],["▁近日",-13.492650985717772],["▁человеком",-13.492668151855469],["אבק",-13.492679595947266],["ымда",-13.492680549621582],["▁Harald",-13.492687225341797],["▁cvič",-13.492701530456545],["▁принял",-13.492701530456545],["ạnh",-13.49271297454834],["▁işletme",-13.492720603942873],["▁macho",-13.492722511291504],["váz",-13.492751121520996],["▁ξέ",-13.492762565612791],["▁Progres",-13.49278450012207],["▁חסר",-13.492785453796388],["▁kasdien",-13.492791175842283],["他說",-13.4927978515625],["▁οικονομική",-13.49281120300293],["▁blau",-13.492817878723145],["▁وغ",-13.492820739746094],["שהו",-13.492825508117676],["▁faculta",-13.492830276489258],["чае",-13.492838859558104],["▁centrul",-13.492841720581056],["▁Эм",-13.492854118347168],["ಬರ್",-13.492857933044434],["▁suave",-13.492889404296877],["▁تاش",-13.492910385131836],["▁уметности",-13.492915153503418],["▁MAH",-13.492916107177734],["ühl",-13.492929458618164],["▁होऊ",-13.492938995361328],["حضور",-13.492959976196287],["▁havo",-13.492966651916504],["త్తి",-13.492977142333984],["滚",-13.493009567260742],["痕",-13.493049621582031],["搖",-13.49305534362793],["走进",-13.493060111999512],["誌",-13.493066787719728],["のことを",-13.49306869506836],["撥",-13.493071556091309],["ที่เคย",-13.493091583251951],["ซ้าย",-13.493093490600586],["▁Algo",-13.493093490600586],["▁freundlich",-13.493093490600586],["▁गौतम",-13.493093490600586],["ጥቂት",-13.493094444274902],["▁Krankheit",-13.493094444274902],["▁Používa",-13.493094444274902],["▁ellenőrzés",-13.493094444274902],["▁katutubong",-13.493094444274902],["▁odpoveď",-13.493094444274902],["▁perawatan",-13.493094444274902],["▁strutture",-13.493094444274902],["▁utakmice",-13.493094444274902],["▁зямлі",-13.493094444274902],["▁ಶಕ್ತಿ",-13.493094444274902],["▁පුහුණු",-13.493094444274902],["▁쇼핑",-13.493094444274902],["सम्पादन",-13.49309539794922],["საქართველოს",-13.493096351623535],["▁Dlaczego",-13.493101119995115],["▁Wikiloc",-13.493101119995115],["▁Unterschied",-13.493102073669434],["vyys",-13.49311065673828],["▁поръчка",-13.49311351776123],["▁السيارات",-13.493114471435549],["▁тэмдэглэ",-13.493117332458496],["▁방향",-13.493121147155762],["ជនសង្ស័យ",-13.493136405944824],["▁채용",-13.493156433105469],["▁Mozilla",-13.493158340454102],["▁buang",-13.493162155151367],["Ру",-13.49316692352295],["▁২২",-13.493169784545898],["ାରେ",-13.493170738220217],["▁poslovanja",-13.49317455291748],["forskning",-13.49317741394043],["▁אמיתי",-13.493183135986328],["▁góða",-13.493185997009276],["МП",-13.493189811706545],["湖南",-13.49319553375244],["となり",-13.493197441101074],["prema",-13.493224143981934],["▁التدريب",-13.493226051330566],["▁раније",-13.493234634399414],["▁malgré",-13.49323844909668],["قارب",-13.493246078491213],["▁угод",-13.493247032165527],["ENTI",-13.493263244628906],["▁armat",-13.493263244628906],["▁inceleme",-13.493270874023438],["各項",-13.493276596069336],["зві",-13.49328327178955],["▁يحدث",-13.493285179138184],["▁contrari",-13.493291854858398],["▁permettra",-13.49332332611084],["▁hodi",-13.493325233459473],["عام",-13.493326187133787],["▁jenom",-13.493330001831056],["▁Oltre",-13.49333381652832],["▁scherm",-13.49334716796875],["▁zarok",-13.493358612060549],["▁mezun",-13.493391036987305],["其他人",-13.493395805358888],["▁հող",-13.493413925170898],["ղա",-13.49341869354248],["▁meal",-13.49343490600586],["▁सीएम",-13.493443489074709],["▁由",-13.493457794189451],["▁alí",-13.493468284606934],["შვილს",-13.493484497070312],["ממש",-13.493487358093262],["โคร",-13.493526458740234],["▁maqsadida",-13.49354648590088],["▁logik",-13.493568420410156],["▁sviluppa",-13.493568420410156],["▁tests",-13.493575096130373],["ძა",-13.493578910827637],["▁ship",-13.49358367919922],["▁منجر",-13.493593215942385],["▁שעל",-13.493609428405762],["த்தால்",-13.49361515045166],["▁корисници",-13.493633270263672],["entium",-13.493640899658203],["▁መለስ",-13.493647575378418],["▁गायक",-13.49366283416748],["ကျင်းပ",-13.493678092956545],["▁مهربان",-13.493680953979492],["зах",-13.49368953704834],["▁mq",-13.493714332580566],["ছিলেন",-13.49378490447998],["▁किं",-13.493792533874512],["▁0,4",-13.493803977966309],["▁գործի",-13.493804931640623],["▁تقول",-13.49384880065918],["زاده",-13.493887901306152],["しません",-13.493897438049316],["▁Syr",-13.493901252746582],["▁වාර්තාව",-13.493919372558594],["▁występ",-13.49392318725586],["▁지적",-13.493932723999023],["▁lënë",-13.493939399719238],["▁dangos",-13.493942260742188],["反正",-13.493975639343262],["▁MEDI",-13.493986129760742],["ফার",-13.493998527526855],["证书",-13.494000434875488],["▁venne",-13.494003295898438],["▁ചെയ്യുന്നു",-13.49400806427002],["▁anyway",-13.494020462036133],["înd",-13.494033813476562],["kukho",-13.49405288696289],["▁animales",-13.49406623840332],["▁Manga",-13.49410343170166],["▁umeme",-13.494114875793455],["ГР",-13.49411678314209],["रोह",-13.49412441253662],["▁Zure",-13.494125366210938],["сили",-13.494138717651367],["錯過",-13.494154930114746],["stoel",-13.494158744812012],["▁Cái",-13.49416732788086],["▁προσωπικό",-13.494186401367188],["WAT",-13.494226455688477],["▁1880",-13.494240760803224],["ZAK",-13.494242668151855],["▁dilu",-13.494253158569336],["attuale",-13.494267463684082],["▁மர",-13.49431037902832],["▁시행",-13.494312286376951],["▁premiér",-13.494329452514648],["▁öllu",-13.494339942932127],["▁ملزم",-13.494370460510254],["▁olmasını",-13.494378089904783],["▁култур",-13.494386672973633],["▁과거",-13.49441909790039],["3.7",-13.494446754455566],["드립니다",-13.494454383850098],["▁tomat",-13.49445915222168],["▁prezentat",-13.494471549987791],["נציג",-13.494487762451172],["▁ирж",-13.49449062347412],["▁sortida",-13.494499206542969],["▁rogha",-13.494511604309082],["▁digni",-13.494516372680664],["▁ætti",-13.49451732635498],["Škol",-13.494526863098145],["ωσαν",-13.49453067779541],["●",-13.49453353881836],["yör",-13.494547843933104],["זין",-13.494547843933104],["பால்",-13.494547843933104],["▁Raw",-13.494549751281738],["▁previa",-13.494553565979004],["ટ્સ",-13.49456024169922],["▁расход",-13.494563102722168],["стки",-13.494571685791016],["ണ്ഡ",-13.494586944580078],["ланған",-13.49460792541504],["最新的",-13.494620323181152],["▁قوی",-13.494621276855469],["▁contratto",-13.49462604522705],["▁novog",-13.49462890625],["saks",-13.49464988708496],["▁ստեղծել",-13.494681358337402],["ՄԱՆ",-13.4946928024292],["owca",-13.494720458984377],["▁sniegt",-13.494726181030272],["मह",-13.494731903076172],["маркет",-13.49473476409912],["▁fissa",-13.494735717773438],["ಬರ",-13.49475383758545],["▁ಅಪ",-13.494787216186523],["ウェ",-13.49478816986084],["prä",-13.494826316833496],["ክት",-13.494847297668455],["ിലുള്ള",-13.49485683441162],["▁živote",-13.49486255645752],["thin",-13.494872093200684],["یټ",-13.494877815246582],["ոզ",-13.494884490966797],["▁Europene",-13.494884490966797],["卓",-13.494888305664062],["荡",-13.494893074035645],["溜",-13.49489402770996],["▁tår",-13.49490451812744],["ظن",-13.494905471801758],["商务",-13.49490737915039],["芽",-13.494917869567873],["釣",-13.494937896728516],["រាជធានីភ្នំពេញ",-13.494942665100098],["▁Aufenthalt",-13.494942665100098],["▁obxecto",-13.494942665100098],["▁Απριλίου",-13.494942665100098],["▁Паводле",-13.494942665100098],["▁жінок",-13.494942665100098],["▁կարծում",-13.494942665100098],["▁همواره",-13.494942665100098],["▁ຕື່ມອີກ",-13.494942665100098],["▁ທຣໍາ",-13.494942665100098],["▁Ảnh",-13.494942665100098],["낼",-13.494942665100098],["▁Coffee",-13.494943618774414],["▁Kështu",-13.494943618774414],["▁przetwarza",-13.494943618774414],["▁података",-13.494943618774414],["▁сярэд",-13.494943618774414],["▁төлөвлөгөө",-13.494943618774414],["ไลค์",-13.494945526123049],["▁χρηματ",-13.494946479797363],["▁објавен",-13.494946479797363],["▁빠른",-13.494946479797363],["▁firenena",-13.49494743347168],["▁καρδιά",-13.49494743347168],["▁ಜಿಲ್ಲೆ",-13.494948387145996],["▁Dortmund",-13.494949340820312],["▁Indica",-13.494950294494627],["▁imposible",-13.494950294494627],["▁эрдэм",-13.494950294494627],["▁sacrifici",-13.494952201843262],["▁اچھا",-13.494953155517578],["▁मुली",-13.494953155517578],["டும்",-13.494954109191896],["▁ուղղությամբ",-13.494954109191896],["όλο",-13.494958877563477],["▁תפקיד",-13.49496364593506],["▁ਜਾਂਦੇ",-13.49496364593506],["▁menyimpan",-13.494976997375488],["失望",-13.49498176574707],["▁ਚੋਣਾਂ",-13.494990348815918],["▁ervaren",-13.494996070861816],["▁shkurt",-13.49500846862793],["fullt",-13.495010375976562],["▁مصالح",-13.495012283325195],["▁Republikës",-13.495015144348145],["▁biyo",-13.49502182006836],["račun",-13.495022773742676],["▁Tantra",-13.495024681091309],["▁volen",-13.495036125183104],["▁Bueno",-13.49504280090332],["▁전달",-13.49504852294922],["▁хоць",-13.495060920715332],["dawać",-13.495061874389648],["僕は",-13.49506664276123],["▁պայման",-13.495067596435549],["▁محافظ",-13.495068550109863],["ለጠ",-13.495084762573242],["photography",-13.495087623596191],["▁természetesen",-13.495088577270508],["▁Број",-13.495092391967772],["▁00:00",-13.49510097503662],["▁శక్తి",-13.495126724243164],["ANDO",-13.495131492614746],["▁kiinnosta",-13.495150566101074],["▁trabaho",-13.495162010192873],["тест",-13.495162963867188],["▁എന്നീ",-13.495165824890137],["డూ",-13.495174407958984],["▁műsor",-13.495197296142578],["▁ആയിരുന്നു",-13.495199203491213],["▁hinek",-13.49521541595459],["▁trait",-13.49522304534912],["▁Kera",-13.495245933532717],["▁горад",-13.495245933532717],["▁armoni",-13.49525547027588],["▁faţa",-13.495256423950195],["▁nášho",-13.495257377624512],["▁уй",-13.495257377624512],["ազդ",-13.495258331298828],["▁alkot",-13.49526309967041],["▁Elisabeth",-13.495268821716309],["医药",-13.49527359008789],["ional",-13.495288848876951],["▁Όσο",-13.495299339294434],["▁belasting",-13.495305061340332],["하겠습니다",-13.495311737060549],["حار",-13.495312690734863],["금액",-13.495322227478027],["การจัดการ",-13.495326042175291],["การเงิน",-13.495330810546877],["▁gələcək",-13.49533748626709],["▁mẽ",-13.495347023010254],["廣場",-13.49537181854248],["الح",-13.495380401611328],["טרו",-13.495399475097656],["▁szálláshely",-13.495399475097656],["aitė",-13.495403289794922],["olimp",-13.495403289794922],["▁saldo",-13.49541187286377],["৫",-13.495421409606934],["รู้ว่า",-13.495423316955566],["قلب",-13.495429039001465],["▁tendance",-13.495431900024414],["ттық",-13.495450019836426],["ђење",-13.495452880859377],["▁objedná",-13.49545955657959],["▁alimentari",-13.495490074157717],["ysta",-13.495529174804688],["▁kalite",-13.49553108215332],["366",-13.495545387268066],["५०",-13.495545387268066],["▁человеку",-13.495551109313965],["▁latino",-13.495564460754396],["▁জিয়া",-13.495585441589355],["▁Provinsi",-13.495594024658203],["स्टर",-13.49562644958496],["θει",-13.49562931060791],["jék",-13.495634078979492],["valikoima",-13.495654106140137],["▁ሠ",-13.49565601348877],["▁માનવ",-13.495665550231934],["06)",-13.49566650390625],["▁xinh",-13.49566650390625],["ඩී",-13.495668411254885],["mutató",-13.495688438415527],["▁proiektua",-13.495705604553224],["▁повышен",-13.495708465576172],["shika",-13.495721817016602],["▁jiraan",-13.4957275390625],["▁rritur",-13.4957275390625],["▁ફોન",-13.495728492736816],["▁деня",-13.49573040008545],["▁דור",-13.49573040008545],["▁නිවස",-13.495755195617676],["▁heldig",-13.495770454406738],["小小的",-13.495783805847168],["スタン",-13.495784759521484],["▁programación",-13.495790481567385],["▁værelse",-13.495792388916016],["деј",-13.49579906463623],["そこに",-13.495806694030762],["▁उड",-13.495820045471191],["▁독일",-13.495842933654783],["▁environnement",-13.49586296081543],["▁eldon",-13.495863914489746],["學術",-13.495863914489746],["▁massima",-13.495869636535645],["▁ekimen",-13.495882034301758],["joties",-13.495912551879885],["▁Eles",-13.495931625366213],["പ്പം",-13.495945930480955],["▁posnet",-13.495954513549805],["ေတ",-13.49595546722412],["▁parlamentu",-13.495960235595703],["▁раді",-13.495965003967283],["▁Друга",-13.496010780334473],["▁сакам",-13.496026992797852],["▁cię",-13.496045112609863],["గిరి",-13.496065139770508],["▁ଲୋକଙ୍କ",-13.496074676513672],["ાયા",-13.496135711669922],["▁Again",-13.49614143371582],["口感",-13.496149063110352],["warna",-13.49616241455078],["بارك",-13.496170043945312],["▁ئاق",-13.496174812316896],["ативна",-13.496175765991213],["поред",-13.496179580688477],["▁Андан",-13.496217727661133],["めた",-13.49624252319336],["expérience",-13.496272087097168],["ქართული",-13.496275901794434],["없는",-13.496322631835938],["▁ತಾಯಿ",-13.496323585510254],["ziti",-13.496328353881836],["▁ประเทศ",-13.496356010437012],["urada",-13.496371269226074],["▁enciklopedio",-13.496371269226074],["▁mieście",-13.496410369873049],["▁efeito",-13.49644660949707],["▁охин",-13.496451377868652],["760",-13.496454238891602],["دس",-13.496499061584473],["-1)",-13.496501922607422],["▁Služb",-13.4965181350708],["ੱਗ",-13.496536254882812],["ื้อ",-13.49655055999756],["▁ಅನಿ",-13.496572494506836],["项目的",-13.496586799621582],["▁Sergio",-13.496604919433594],["排除",-13.49660587310791],["▁Gill",-13.496611595153809],["kode",-13.496621131896973],["▁miast",-13.496637344360352],["▁estrela",-13.496641159057615],["▁Reve",-13.49665641784668],["▁Oferta",-13.496676445007324],["TICA",-13.496689796447754],["專業的",-13.49669075012207],["▁Monica",-13.496692657470703],["支払",-13.496694564819336],["لاند",-13.496715545654297],["陪伴",-13.496771812438965],["▁funcionario",-13.49677276611328],["罩",-13.49677562713623],["▁jogi",-13.496783256530762],["လျှင်",-13.496792793273926],["ՈՒԹՅԱՆ",-13.49679470062256],["ແອັຟ",-13.49679470062256],["ဖုန်း",-13.49679470062256],["▁democracia",-13.49679470062256],["▁thỏa",-13.49679470062256],["▁utgangspunkt",-13.49679470062256],["▁Наприклад",-13.49679470062256],["▁дзейнасці",-13.49679470062256],["▁обслуговування",-13.49679470062256],["▁пришлось",-13.49679470062256],["▁वैशाख",-13.49679470062256],["▁শুক্রবার",-13.49679470062256],["앨",-13.49679470062256],["▁krävs",-13.496795654296877],["짝",-13.496795654296877],["▁edildikdə",-13.496796607971191],["▁ತಲುಪ",-13.496796607971191],["▁цахим",-13.496797561645508],["▁Xitoy",-13.49679946899414],["▁procedimiento",-13.496801376342772],["好处",-13.49680233001709],["▁συμφων",-13.496804237365724],["▁printer",-13.496810913085938],["▁বিচার",-13.496817588806152],["▁ပု",-13.496817588806152],["▁fichier",-13.496818542480469],["▁Zanim",-13.496819496154783],["ፕሮ",-13.496831893920898],["▁부족",-13.496832847595217],["▁արձանագր",-13.496833801269531],["▁Dieva",-13.496834754943848],["▁modlit",-13.496838569641112],["▁durchgeführt",-13.496848106384276],["ଯୋ",-13.496849060058594],["▁Jestem",-13.49685001373291],["▁روزه",-13.49685287475586],["▁Фу",-13.496855735778809],["▁معاشرے",-13.496855735778809],["▁obstante",-13.496861457824709],["▁attract",-13.49686336517334],["▁majoritatea",-13.49686336517334],["▁сәт",-13.49687957763672],["▁суммы",-13.496893882751465],["▁توقف",-13.496899604797363],["ಗ್ರಾ",-13.496905326843262],["▁मंगलबार",-13.49692153930664],["▁widok",-13.496926307678224],["▁raho",-13.49693489074707],["▁کپی",-13.496936798095703],["黃金",-13.49697971343994],["geschichte",-13.496980667114258],["ложить",-13.496987342834473],["▁Лук",-13.496994972229004],["▁recerca",-13.497007369995115],["▁អង្គ",-13.497032165527344],["▁survey",-13.497034072875977],["▁શક્ય",-13.497051239013672],["▁אישית",-13.497060775756836],["▁neile",-13.497072219848633],["▁kiçik",-13.49707317352295],["מזון",-13.497096061706545],["▁besim",-13.497099876403809],["▁גוף",-13.497100830078123],["ራሳቸው",-13.497109413146973],["▁miliarde",-13.497114181518556],["πεδ",-13.4971342086792],["▁තිබුණා",-13.497140884399414],["▁cook",-13.49714183807373],["▁genannt",-13.49714469909668],["▁dúinn",-13.497174263000488],["āva",-13.497177124023438],["dary",-13.49718952178955],["နက်",-13.497209548950195],["üyü",-13.497222900390623],["께서",-13.497252464294434],["ניג",-13.49725341796875],["ડર",-13.497278213500977],["ίνουμε",-13.497285842895508],["▁нашым",-13.497291564941406],["▁kým",-13.497312545776367],["állomás",-13.497323036193848],["రుగు",-13.497323036193848],["▁trial",-13.497325897216797],["RCA",-13.497329711914062],["kasika",-13.497331619262695],["ЕНИЕ",-13.497339248657228],["▁đáo",-13.497340202331545],["▁dessus",-13.497355461120604],["्वे",-13.497357368469238],["▁verdadeira",-13.497359275817873],["▁valóban",-13.497366905212402],["还不",-13.497393608093262],["▁konata",-13.49740219116211],["▁Nuair",-13.497406959533691],["▁időben",-13.497406959533691],["дний",-13.497417449951172],["▁броя",-13.497421264648438],["▁përball",-13.497429847717283],["యాం",-13.49744701385498],["▁રૂપ",-13.49745273590088],["▁alert",-13.49748706817627],["gence",-13.49751091003418],["ંસ",-13.497516632080078],["▁△",-13.497517585754396],["mium",-13.497525215148926],["parlament",-13.497529983520508],["▁движе",-13.49754238128662],["▁тэле",-13.497546195983888],["웨이",-13.49756908416748],["حذر",-13.497615814208984],["了不少",-13.497647285461426],["▁mobilní",-13.497654914855955],["ပျ",-13.497660636901855],["Well",-13.49766445159912],["पूर्व",-13.497681617736816],["▁архи",-13.497693061828612],["▁նախա",-13.49770736694336],["▁krize",-13.497754096984863],["▁poliisi",-13.497775077819824],["Note",-13.497782707214355],["▁দাবি",-13.49778652191162],["कता",-13.497814178466797],["/2003",-13.497836112976074],["▁Giao",-13.497851371765137],["▁enkä",-13.49785327911377],["▁հարցը",-13.497881889343262],["boka",-13.497883796691896],["▁laatu",-13.49789047241211],["▁смотрел",-13.497917175292969],["ಎನ್",-13.497940063476562],["▁آگاه",-13.497949600219728],["▁Chart",-13.497955322265623],["ระบุ",-13.497958183288574],["补充",-13.497974395751951],["LEA",-13.497982025146484],["ÓW",-13.497989654541016],["▁насто",-13.49800968170166],["每个人",-13.49801540374756],["▁محتوای",-13.498046875],["prywat",-13.498050689697266],["ۇز",-13.498055458068848],["อาทิตย์",-13.498055458068848],["texniki",-13.49807071685791],["ထားတာ",-13.498072624206545],["ร่วมกัน",-13.49809455871582],["▁Bibel",-13.49809741973877],["стояние",-13.498098373413086],["▁LTE",-13.498102188110352],["▁развоја",-13.4981050491333],["▁байгуулж",-13.498122215270996],["▁гледам",-13.498151779174805],["ຜ່ານ",-13.49815559387207],["▁matcher",-13.49815845489502],["꾸",-13.49815845489502],["кія",-13.498167037963867],["▁klok",-13.498172760009766],["いただける",-13.498191833496094],["▁տվյալ",-13.498214721679688],["▁полет",-13.49821662902832],["іста",-13.498217582702637],["▁skit",-13.49821949005127],["▁професионални",-13.498229026794434],["വള",-13.498247146606444],["------",-13.498260498046877],["ジャ",-13.498272895812988],["ନର",-13.498296737670898],["▁င",-13.498302459716797],["▁bölümü",-13.498319625854492],["eficient",-13.49832248687744],["▁cipta",-13.498336791992188],["마사지",-13.49833869934082],["միջոց",-13.49834442138672],["▁бива",-13.49835205078125],["▁anser",-13.498353958129885],["ဖန္",-13.498358726501465],["тски",-13.498370170593262],["▁laborum",-13.498373985290527],["වාස",-13.498385429382324],["▁gutter",-13.498387336730955],["eanu",-13.498394966125488],["▁نکرد",-13.498394966125488],["▁තාම",-13.498411178588867],["నింగ్",-13.498428344726562],["няга",-13.498459815979004],["▁nebol",-13.49846076965332],["▁Grant",-13.498462677001951],["isert",-13.49846649169922],["ຊົນ",-13.498470306396484],["▁Blok",-13.498485565185549],["法治",-13.49848747253418],["เข้าสู่",-13.498496055603027],["κέ",-13.498498916625977],["▁Bä",-13.498499870300291],["▁drumul",-13.498513221740724],["▁quantum",-13.498529434204102],["ligheten",-13.498530387878418],["کتور",-13.498531341552734],["Tá",-13.498534202575684],["pato",-13.498570442199709],["▁худалдаа",-13.49857234954834],["ிலிருந்து",-13.49858570098877],["bilde",-13.498597145080566],["▁нашага",-13.498597145080566],["▁Вод",-13.49860954284668],["ぞ",-13.498612403869627],["メリット",-13.498618125915527],["soy",-13.498624801635742],["▁neder",-13.498632431030272],["ソフト",-13.49863338470459],["▁especiais",-13.498649597167969],["▁nájsť",-13.498649597167969],["▁scilicet",-13.498649597167969],["▁Інформація",-13.498649597167969],["▁желтоқсан",-13.498649597167969],["▁अभिनेता",-13.498649597167969],["▁mahdollisimman",-13.498650550842283],["▁viikonloppu",-13.498650550842283],["▁அனுமதி",-13.498650550842283],["berechtigt",-13.498651504516602],["▁dijalankan",-13.498651504516602],["▁הפועל",-13.498651504516602],["▁Published",-13.498653411865234],["▁سماعت",-13.498656272888184],["▁девушка",-13.4986572265625],["แหละ",-13.498658180236816],["▁niciodata",-13.49866008758545],["▁находятся",-13.49866008758545],["▁musulman",-13.498661041259766],["பொழுது",-13.498661994934082],["▁Bloomberg",-13.498661994934082],["▁Gerçek",-13.498661994934082],["▁وأوضح",-13.498665809631348],["รายชื่อ",-13.49866771697998],["▁మార్చ",-13.49868392944336],["認め",-13.498686790466309],["ختار",-13.498689651489258],["▁බොරු",-13.498693466186523],["▁нужен",-13.498695373535156],["▁የአማራ",-13.498700141906738],["ಿರಲಿಲ್ಲ",-13.498702049255373],["シン",-13.498711585998535],["▁uchumi",-13.498714447021484],["▁అనేది",-13.498714447021484],["▁የአዲስ",-13.49872589111328],["▁үйлчилгээний",-13.49874210357666],["▁gwasanaethau",-13.49875259399414],["▁Krem",-13.49875545501709],["▁dimentica",-13.498756408691406],["▁possibilitat",-13.49876308441162],["▁tantos",-13.498766899108888],["▁فيصلو",-13.498774528503418],["плик",-13.49877643585205],["▁mò",-13.498779296875],["▁صدارت",-13.498785972595217],["කව",-13.49879264831543],["▁вентил",-13.498797416687012],["▁അഭിപ്രായം",-13.498821258544922],["▁hidrata",-13.498830795288086],["▁TIME",-13.498851776123049],["importanza",-13.498860359191896],["▁žaidė",-13.498875617980955],["▁बस्ने",-13.498875617980955],["▁dobe",-13.498884201049805],["добри",-13.49888801574707],["ъезд",-13.498902320861816],["▁ਵਧੀਆ",-13.498902320861816],["ցված",-13.498903274536133],["▁ўсім",-13.49893283843994],["▁Форма",-13.498940467834473],["▁ارتباطات",-13.498953819274902],["▁nagpa",-13.49895477294922],["▁professionisti",-13.498958587646484],["ነትን",-13.4989652633667],["▁súčasťou",-13.49897003173828],["ლურ",-13.498979568481444],["ileyo",-13.498992919921877],["▁nearly",-13.498992919921877],["▁անձի",-13.498998641967772],["itatii",-13.499001502990724],["和他",-13.49901008605957],["ONO",-13.499018669128418],["▁ברשת",-13.499029159545898],["▁fyn",-13.499031066894531],["utbildning",-13.499032020568848],["▁punte",-13.499041557312012],["▁instance",-13.499061584472656],["اٹ",-13.499072074890137],["▁კორ",-13.499083518981934],["ხეთ",-13.499095916748049],["اتتى",-13.499110221862791],["▁Izan",-13.499114990234377],["പാട",-13.499144554138184],["▁mv",-13.499146461486816],["▁Blå",-13.49915599822998],["▁brevi",-13.499192237854004],["▁chips",-13.499202728271484],["ገበያ",-13.499234199523926],["▁stanna",-13.49924087524414],["tumika",-13.499256134033203],["▁richiede",-13.49927043914795],["surf",-13.499275207519531],["ичан",-13.499302864074709],["▁rigtigt",-13.499305725097656],["▁dostali",-13.499306678771973],["έας",-13.499307632446287],["▁крал",-13.499316215515137],["ողի",-13.499344825744627],["vaihe",-13.499361991882324],["▁construción",-13.499363899230955],["▁คลิก",-13.499366760253906],["▁६०",-13.499384880065918],["小心",-13.499394416809082],["▁lana",-13.49941062927246],["▁Valst",-13.499428749084473],["▁ileri",-13.499429702758787],["▁medarbejdere",-13.49944305419922],["▁[[",-13.499462127685549],["▁topatu",-13.499463081359863],["▁অপর",-13.499475479125977],["▁Mandal",-13.49949073791504],["ເກັບ",-13.49949550628662],["▁орыс",-13.499500274658203],["▁sath",-13.499513626098633],["▁जाय",-13.49951457977295],["ĐT",-13.499519348144531],["▁મહિલા",-13.499524116516112],["tuki",-13.499530792236328],["▁մատ",-13.499547004699709],["ičan",-13.499551773071287],["luka",-13.49955940246582],["ијал",-13.49956226348877],["▁כּ",-13.499567031860352],["▁satur",-13.499571800231934],["wym",-13.499601364135742],["▁производител",-13.499601364135742],["に戻",-13.49960231781006],["ئٹ",-13.499616622924805],["▁Adult",-13.49963092803955],["▁heldu",-13.499649047851562],["ovník",-13.499653816223145],["▁równo",-13.499666213989258],["१५",-13.499667167663574],["ፍራ",-13.499672889709473],["એમ",-13.499689102172852],["▁palla",-13.499691009521484],["▁Рада",-13.499695777893066],["▁Atlas",-13.49970817565918],["▁умира",-13.499727249145508],["▁температури",-13.499751091003418],["▁провери",-13.49977207183838],["▁მერ",-13.499777793884276],["tyti",-13.49979019165039],["▁dallo",-13.499791145324709],["▁oggetti",-13.49979305267334],["పడ్డ",-13.499831199645996],["گردان",-13.499839782714844],["▁مانده",-13.499845504760742],["▁අල්ල",-13.499845504760742],["▁dniach",-13.499890327453612],["Best",-13.49990177154541],["tekin",-13.499906539916992],["▁Presta",-13.499929428100586],["ुत",-13.4999361038208],["▁həyatı",-13.499937057495115],["▁netti",-13.499958992004396],["▁klä",-13.499978065490724],["gly",-13.49998378753662],["ганы",-13.500000953674316],["▁пазара",-13.50001335144043],["නික",-13.500041007995604],["▁PLA",-13.500056266784668],["▁foreign",-13.500080108642578],["违",-13.500085830688477],["▁Vše",-13.50008773803711],["▁izziv",-13.500110626220703],["เซีย",-13.500134468078612],["ニー",-13.50013828277588],["ollywood",-13.500177383422852],["▁спр",-13.500184059143066],["▁arkitekt",-13.5001859664917],["03.20",-13.500197410583496],["ნული",-13.500197410583496],["лде",-13.500199317932127],["▁Gusti",-13.500204086303713],["▁Leser",-13.500204086303713],["三天",-13.500205993652344],["▁искрен",-13.500211715698242],["guem",-13.500221252441406],["▁menyu",-13.500235557556152],["▁ຈໍາ",-13.500239372253418],["思います",-13.500247955322266],["▁complement",-13.50026035308838],["在今年",-13.50027847290039],["▁Βι",-13.500303268432615],["Гра",-13.500306129455566],["▁תיק",-13.5003080368042],["▁изпълнение",-13.500308990478516],["គ្រូ",-13.500310897827148],["▁dicere",-13.50031280517578],["will",-13.500317573547363],["▁районе",-13.500321388244627],["▁palvel",-13.500341415405272],["▁İsti",-13.50036334991455],["ろう",-13.500375747680664],["నెట్",-13.50037670135498],["▁yılda",-13.500393867492676],["▁knyga",-13.500399589538574],["แว",-13.500425338745115],["▁doktora",-13.500428199768066],["จะได้รับ",-13.500438690185549],["בוי",-13.500445365905762],["▁ทั้งนี้",-13.50045394897461],["險",-13.50045394897461],["▁Rechte",-13.500454902648926],["cso",-13.50046157836914],["▁lihtsa",-13.500468254089355],["▁voleb",-13.50047206878662],["区块链",-13.500476837158203],["▁^_^",-13.500483512878418],["▁Гран",-13.500484466552734],["สงสัย",-13.500503540039062],["▁Dubrovnik",-13.500508308410645],["▁temple",-13.500508308410645],["▁órgão",-13.500508308410645],["▁İnformasiya",-13.500508308410645],["▁İngiltere",-13.500508308410645],["▁ऐतिहासिक",-13.500508308410645],["▁चुनौती",-13.500508308410645],["▁दूसरी",-13.500508308410645],["▁मनुष्य",-13.500508308410645],["▁मन्दिर",-13.500508308410645],["▁हालांकि",-13.500508308410645],["▁ອາຣ໌",-13.500508308410645],["▁ಮಾರ್ಗ",-13.50050926208496],["▁ಸಮಿತಿ",-13.50050926208496],["▁flexible",-13.500510215759276],["πια",-13.500511169433594],["▁Mister",-13.500515937805176],["▁earlier",-13.500515937805176],["▁egyéni",-13.50051975250244],["stimmung",-13.500521659851074],["▁európsk",-13.500528335571287],["▁Miljø",-13.500534057617188],["kúpiť",-13.500535011291504],["▁απειλ",-13.500539779663086],["▁ngài",-13.500547409057615],["haver",-13.500551223754885],["ชาวบ้าน",-13.500551223754885],["▁ondernemer",-13.500553131103516],["វ័យ",-13.500566482543944],["▁oranı",-13.500569343566896],["וידאו",-13.50057888031006],["▁tržištu",-13.500584602355955],["▁Milliy",-13.50058650970459],["▁klad",-13.500587463378906],["পর",-13.500601768493652],["պորտ",-13.500619888305664],["▁סיי",-13.500624656677246],["▁сүт",-13.500638961791992],["▁pitkään",-13.500655174255373],["dumu",-13.50066089630127],["ראש",-13.500673294067385],["라면",-13.500690460205078],["சர",-13.500691413879396],["▁витрати",-13.500691413879396],["▁selgita",-13.50069808959961],["▁אזור",-13.500726699829102],["▁Vaši",-13.500736236572266],["▁മരണം",-13.500737190246582],["lnych",-13.500770568847656],["ejší",-13.500774383544922],["▁қыр",-13.500781059265137],["▁نویس",-13.50078582763672],["▁protezione",-13.500786781311035],["▁ئېيت",-13.500792503356934],["▁բառ",-13.500814437866213],["的环境",-13.500822067260742],["▁horário",-13.500845909118652],["מרחק",-13.500853538513184],["▁elaborar",-13.500862121582031],["▁lepi",-13.500879287719728],["▁spesial",-13.500884056091309],["анг",-13.500894546508787],["▁သက္",-13.500897407531738],["▁दशक",-13.500920295715332],["チャー",-13.500921249389648],["၁၀",-13.500929832458496],["▁האלה",-13.500935554504396],["▁dövlətin",-13.50093936920166],["▁aplicat",-13.500941276550291],["▁українські",-13.500950813293455],["▁ihmisen",-13.500958442687988],["▁horre",-13.500967025756836],["▁król",-13.500970840454102],["▁സ്കൂള",-13.500991821289062],["ობები",-13.501005172729492],["▁öðru",-13.501005172729492],["ສຶກສາ",-13.501028060913086],["▁zdá",-13.501038551330566],["από",-13.501042366027832],["▁କେତେ",-13.50104522705078],["abilidad",-13.501055717468262],["▁Indra",-13.50106143951416],["تذكر",-13.50107192993164],["▁perdido",-13.50108528137207],["ராஜ்",-13.501094818115234],["▁првата",-13.501112937927246],["▁означен",-13.501124382019045],["ယံ",-13.50113582611084],["kröfu",-13.501138687133787],["▁کاربرد",-13.501140594482422],["NOW",-13.501141548156738],["▁algunhas",-13.501151084899902],["▁hewl",-13.50115203857422],["норм",-13.501152992248535],["調べて",-13.501168251037598],["کلی",-13.501172065734863],["▁podobno",-13.501173973083496],["tænk",-13.501182556152344],["▁Worte",-13.501187324523926],["无人",-13.501191139221191],["248",-13.501194953918455],["▁அழக",-13.501198768615724],["▁Кул",-13.501211166381836],["▁ferramentas",-13.50124168395996],["▁teinud",-13.50125217437744],["▁Tsu",-13.50125789642334],["भिः",-13.501287460327148],["▁ملل",-13.501291275024414],["ời",-13.501296997070312],["▁និ",-13.501300811767578],["▁wonten",-13.501304626464844],["ม้า",-13.501325607299805],["डियो",-13.501330375671388],["CHO",-13.501364707946776],["aaaa",-13.50137996673584],["▁Čak",-13.501387596130373],["▁СМИ",-13.501387596130373],["▁povijesti",-13.50139045715332],["▁Haec",-13.50141429901123],["ledningen",-13.501429557800291],["▁šole",-13.50144386291504],["留意",-13.501453399658203],["▁pewne",-13.50145435333252],["▁größten",-13.501470565795898],["▁cuyo",-13.501473426818848],["šev",-13.501474380493164],["個人情報",-13.50147533416748],["όλι",-13.501493453979492],["▁domande",-13.501494407653809],["月初",-13.501494407653809],["praktik",-13.501505851745604],["aireacht",-13.501514434814451],["▁věd",-13.50152587890625],["▁rar",-13.50155544281006],["▁korri",-13.501561164855955],["▁Wand",-13.501585960388184],["gún",-13.50158977508545],["▁ಅಧಿಕಾರ",-13.50158977508545],["क्ट",-13.501598358154297],["ıt",-13.50160312652588],["实现了",-13.50160789489746],["RIC",-13.501684188842772],["▁förhållande",-13.501693725585938],["▁Cà",-13.501701354980469],["▁Lasīt",-13.501721382141112],["ბოლ",-13.50172996520996],["▁lemmik",-13.50173282623291],["▁第三",-13.501741409301758],["▁nejaké",-13.501744270324709],["درك",-13.501768112182615],["فين",-13.501768112182615],["▁عامر",-13.501778602600098],["▁palco",-13.501786231994627],["dām",-13.501791954040527],["કાળ",-13.501795768737791],["дарының",-13.50179672241211],["ничко",-13.50180435180664],["▁przebieg",-13.50180721282959],["▁әйел",-13.50181007385254],["▁hukuman",-13.501869201660156],["▁Exc",-13.50188446044922],["▁чергу",-13.501893997192385],["cionista",-13.501896858215332],["madığı",-13.501928329467772],["▁ఉంచ",-13.50193214416504],["filtr",-13.501938819885254],["▁palkka",-13.50194263458252],["▁Rusija",-13.501957893371582],["▁lært",-13.501961708068848],["▁руко",-13.501967430114746],["▁stupid",-13.501981735229492],["ロン",-13.501992225646973],["rinda",-13.501995086669922],["▁sekretari",-13.501998901367188],["▁existem",-13.502001762390137],["vzd",-13.5020112991333],["ೇಜ್",-13.50204372406006],["▁oly",-13.502092361450195],["есет",-13.502124786376951],["▁habitación",-13.502171516418455],["▁sahə",-13.502172470092772],["▁214",-13.502198219299316],["▁вака",-13.502219200134276],["▁այցել",-13.502256393432615],["୧୭",-13.502290725708008],["ാനി",-13.50229549407959],["不利",-13.502296447753906],["շնորհ",-13.502318382263184],["太平洋",-13.50233268737793],["▁espiritual",-13.50234031677246],["zungumza",-13.502344131469728],["温暖",-13.502344131469728],["kkejä",-13.502348899841309],["אפי",-13.502359390258787],["▁aproveita",-13.502359390258787],["▁rhyw",-13.502359390258787],["ชัดเจน",-13.50236988067627],["สุนัข",-13.50236988067627],["ေဇာ္",-13.502370834350586],["▁Espainiako",-13.502370834350586],["▁Perusahaan",-13.502370834350586],["▁amžiaus",-13.502370834350586],["▁gandrīz",-13.502370834350586],["▁liknande",-13.502370834350586],["▁mabadiliko",-13.502370834350586],["▁Гърция",-13.502370834350586],["▁Постановление",-13.502370834350586],["▁мэдээллийг",-13.502370834350586],["▁ਤੁਹਾਡੇ",-13.502370834350586],["▁საჯარო",-13.502370834350586],["▁έκδοση",-13.502371788024902],["▁одбране",-13.502371788024902],["▁ஏற்பட்ட",-13.502371788024902],["企画",-13.502371788024902],["▁наявності",-13.502375602722168],["valmis",-13.502376556396484],["Кор",-13.502376556396484],["弁護士",-13.502376556396484],["▁ਬਦਲ",-13.50238037109375],["▁פעמים",-13.502384185791016],["▁ಇಂತಹ",-13.502384185791016],["▁ہوگئی",-13.502385139465332],["▁příliš",-13.502386093139648],["▁బాగు",-13.502389907836914],["▁Osijek",-13.50239086151123],["▁സംഗീത",-13.50239086151123],["▁lahja",-13.502394676208496],["▁profesjonal",-13.502397537231444],["▁kokybės",-13.502402305603027],["ຫນ່ວຍ",-13.502403259277344],["▁각종",-13.502403259277344],["ممارسة",-13.502408981323242],["▁Расіі",-13.502410888671877],["asset",-13.502415657043455],["▁деяких",-13.502416610717772],["ыканкам",-13.502419471740724],["▁قيمة",-13.50242042541504],["ବନ୍ଧ",-13.502422332763672],["媽",-13.502435684204102],["▁संदेश",-13.502439498901367],["▁오래",-13.502445220947266],["▁naprosto",-13.502446174621582],["▁belav",-13.502447128295898],["妇女",-13.502447128295898],["▁місяців",-13.502449035644531],["▁സംവിധാനം",-13.502449989318848],["▁सन",-13.50245189666748],["▁dalë",-13.502452850341797],["▁ఎన్ని",-13.502456665039062],["▁patch",-13.502458572387695],["▁pemuda",-13.502469062805176],["▁inquit",-13.502470970153809],["▁ואף",-13.502471923828123],["▁froid",-13.50247287750244],["▁PËR",-13.50247859954834],["▁настаўнік",-13.502504348754885],["▁végre",-13.502506256103516],["ຕົກ",-13.502507209777832],["▁pesta",-13.502518653869627],["だけの",-13.502519607543944],["▁අරන්",-13.502522468566896],["▁בנו",-13.502535820007324],["▁sensación",-13.502540588378906],["овыя",-13.502554893493652],["▁منافع",-13.502574920654297],["▁സംഭവം",-13.50260066986084],["▁Padre",-13.502606391906738],["▁Trafik",-13.502613067626951],["不倫",-13.502626419067385],["▁lapte",-13.502633094787598],["▁англи",-13.502652168273926],["▁template",-13.502653121948242],["ERIA",-13.50265407562256],["associazione",-13.502663612365724],["▁galleri",-13.502676963806152],["▁assassin",-13.502715110778809],["▁یار",-13.50271987915039],["▁Veranstaltungen",-13.502732276916504],["▁zitzai",-13.50275993347168],["▁lied",-13.502762794494627],["มิน",-13.50277328491211],["liyini",-13.50278377532959],["研修",-13.50278663635254],["▁prakse",-13.502788543701172],["bierz",-13.502799987792969],["▁وإذا",-13.502802848815918],["▁بني",-13.502809524536133],["ўленне",-13.502811431884766],["▁moram",-13.50282096862793],["▁2,0",-13.502826690673828],["felé",-13.502836227416992],["▁amacı",-13.502866744995115],["سىر",-13.50287628173828],["▁Kreml",-13.502877235412598],["กล่าวว่า",-13.502899169921877],["klubben",-13.50290870666504],["▁možností",-13.502937316894531],["▁privalo",-13.502942085266112],["▁kadro",-13.50295066833496],["▁ఫై",-13.50295352935791],["▁Έχ",-13.502960205078123],["▁activité",-13.50299072265625],["serta",-13.503032684326172],["人力",-13.50305461883545],["▁moms",-13.50306224822998],["פנו",-13.503082275390623],["逆に",-13.503089904785156],["נש",-13.503092765808104],["▁ianya",-13.503097534179688],["▁കുറിച്ച",-13.50310516357422],["giai",-13.503108978271484],["▁käte",-13.503111839294434],["▁verific",-13.503128051757812],["▁pembelian",-13.503178596496582],["ļus",-13.503190994262695],["▁развитието",-13.503209114074709],["tavaa",-13.503213882446287],["▁tokony",-13.503214836120604],["μασ",-13.503223419189451],["emîn",-13.503241539001465],["▁bestelling",-13.503247261047363],["поста",-13.50325870513916],["▁Život",-13.50326156616211],["▁tinta",-13.503273010253906],["mely",-13.503275871276855],["ก็ยัง",-13.503291130065918],["بيض",-13.50329303741455],["▁favorable",-13.503301620483398],["▁feitas",-13.503305435180664],["raigh",-13.503308296203612],["мра",-13.503311157226562],["的政治",-13.503314971923828],["常に",-13.50331687927246],["하던",-13.503325462341309],["HIN",-13.50333023071289],["▁देत",-13.503351211547852],["▁viure",-13.503357887268066],["էն",-13.503374099731444],["hant",-13.50340747833252],["▁знают",-13.503416061401367],["▁خب",-13.503422737121582],["▁وتق",-13.503425598144531],["▁കീഴ",-13.50343894958496],["▁الفنان",-13.503447532653809],["וגר",-13.503448486328123],["ေဒသ",-13.503456115722656],["▁எழுதிய",-13.503461837768556],["▁ознак",-13.50347900390625],["▁representación",-13.503479957580566],["▁sustine",-13.503483772277832],["▁Wege",-13.503485679626465],["टेल",-13.503487586975098],["▁ziar",-13.503488540649414],["เรือน",-13.50349235534668],["▁minste",-13.503497123718262],["ક્ક",-13.503499984741213],["การเมือง",-13.503521919250488],["ڙهي",-13.503534317016602],["▁Manual",-13.503544807434082],["डू",-13.503579139709473],["▁Alamat",-13.503605842590332],["▁dinh",-13.50360870361328],["▁מור",-13.503615379333496],["яет",-13.503623962402344],["නැ",-13.503623962402344],["▁wielkie",-13.50362491607666],["▁bany",-13.50363826751709],["ويب",-13.503656387329102],["▁OFF",-13.503661155700684],["▁దేశంలో",-13.503671646118164],["▁अधि",-13.503681182861328],["▁бомб",-13.503690719604492],["селе",-13.503711700439451],["▁ინტერნეტ",-13.50371265411377],["▁SNS",-13.503741264343262],["▁zait",-13.503750801086426],["▁бактери",-13.50376033782959],["▁belirten",-13.503774642944336],["品牌的",-13.503822326660156],["тото",-13.503826141357422],["γνω",-13.503830909729004],["▁Aplikasi",-13.503830909729004],["▁ອີກ",-13.5038480758667],["ម៉ោង",-13.503853797912598],["▁indult",-13.50385856628418],["▁হাসিনা",-13.50390625],["▁sors",-13.503924369812012],["培",-13.503947257995604],["raju",-13.503952026367188],["▁ប្រឆាំង",-13.503952026367188],["yoyi",-13.5039644241333],["āze",-13.50396728515625],["▁للعمل",-13.50396728515625],["880",-13.503997802734377],["▁usi",-13.504008293151855],["▁ЖК",-13.504009246826172],["▁skuffe",-13.504014015197754],["קבוצה",-13.504036903381348],["ÍA",-13.504037857055664],["▁Џе",-13.504037857055664],["วิเคราะห์",-13.504088401794434],["ुहुन्छ",-13.5040922164917],["लक",-13.504111289978027],["▁objave",-13.50411891937256],["いま",-13.504137992858888],["▁intet",-13.504142761230469],["▁yoksa",-13.50416088104248],["कृति",-13.504169464111328],["朴",-13.504175186157228],["帯",-13.504191398620604],["杂志",-13.504191398620604],["▁эв",-13.504195213317873],["玄",-13.504202842712402],["爺",-13.50420379638672],["وقوف",-13.504209518432615],["葛",-13.504213333129885],["解釋",-13.50422191619873],["▁примени",-13.504223823547363],["出现在",-13.50422477722168],["ийгмийн",-13.504225730895996],["▁titt",-13.504227638244627],["▁котор",-13.504227638244627],["▁Публ",-13.504233360290527],["ရှေ့",-13.50423526763916],["វគ្គ",-13.504236221313477],["▁Süleyman",-13.504236221313477],["▁Zamenhof",-13.504236221313477],["▁Αυγούστου",-13.504236221313477],["▁μπορούμε",-13.504236221313477],["▁Результат",-13.504236221313477],["▁нэвтрүүл",-13.504236221313477],["▁чэрвеня",-13.504236221313477],["▁צריכים",-13.504236221313477],["▁عرصه",-13.504236221313477],["▁ټرمپ",-13.504236221313477],["▁ښاغلي",-13.504236221313477],["▁ଏମିତି",-13.504236221313477],["▁വിശദ",-13.504236221313477],["▁ລິ້ງ",-13.504236221313477],["▁ድጋፍ",-13.504236221313477],["▁الإنترنت",-13.50423812866211],["▁έκθεση",-13.504239082336426],["▁აღნიშნა",-13.504243850708008],["▁konzerv",-13.50425624847412],["▁Олимп",-13.504273414611816],["▁qeverisë",-13.504287719726562],["▁tarjeta",-13.504291534423828],["készítés",-13.504292488098145],["▁diskussion",-13.504294395446776],["▁quotidian",-13.504300117492676],["▁Výrob",-13.504316329956056],["hár",-13.504318237304688],["సర్",-13.504319190979004],["▁Tisch",-13.50432014465332],["ଠାରେ",-13.50432300567627],["explo",-13.504327774047852],["▁Derecho",-13.504327774047852],["▁фабрика",-13.504327774047852],["дау",-13.504329681396484],["▁Geräte",-13.504351615905762],["▁habitants",-13.504355430603027],["▁گشت",-13.50435733795166],["ttaessa",-13.504359245300291],["▁encarga",-13.504364967346191],["面试",-13.504374504089355],["▁разгледа",-13.504383087158203],["ève",-13.504388809204102],["▁عالي",-13.50439167022705],["▁članica",-13.504420280456545],["わかる",-13.504443168640137],["▁ඕක",-13.5044584274292],["▁باستخدام",-13.504465103149414],["されること",-13.504472732543944],["szolgáltató",-13.504474639892578],["פגע",-13.504476547241213],["▁lluita",-13.50447940826416],["▁brani",-13.504484176635742],["▁залі",-13.504488945007324],["▁никогаш",-13.504490852355955],["▁જય",-13.504499435424805],["▁آتے",-13.504504203796388],["▁ഗുണ",-13.504510879516602],["τερες",-13.504534721374512],["▁foko",-13.504547119140623],["▁λ",-13.50455093383789],["▁خلاصه",-13.50455093383789],["▁цялото",-13.504555702209473],["▁хэд",-13.504556655883787],["▁الفصل",-13.504558563232422],["tenta",-13.504561424255373],["dzę",-13.504586219787598],["▁letras",-13.504621505737305],["▁Sahi",-13.504630088806152],["duire",-13.504637718200684],["▁specijal",-13.504651069641112],["▁Hazırda",-13.504657745361328],["▁Pixel",-13.504676818847656],["▁cerut",-13.50468635559082],["▁момче",-13.50471019744873],["ორის",-13.504714965820312],["▁representante",-13.504755973815918],["▁besef",-13.504765510559082],["사이즈",-13.504765510559082],["▁SUR",-13.504766464233398],["▁נכנס",-13.504773139953612],["oskop",-13.504786491394045],["▁Mire",-13.504807472229004],["pustili",-13.504822731018066],["▁dərc",-13.504839897155762],["▁Igaz",-13.50484848022461],["▁явах",-13.504853248596191],["▁анықтау",-13.504855155944824],["▁пие",-13.504862785339355],["▁използване",-13.504887580871582],["▁típus",-13.504888534545898],["7000",-13.504898071289062],["יטי",-13.504910469055176],["▁intende",-13.504913330078123],["üzə",-13.504921913146973],["로운",-13.504966735839844],["▁срам",-13.505001068115234],["▁آموز",-13.505012512207031],["σεως",-13.505013465881348],["▁premiers",-13.50502109527588],["▁בפר",-13.50503635406494],["▁싶다",-13.505038261413574],["▁taula",-13.505043983459473],["▁makita",-13.505050659179688],["▁Nueva",-13.50505828857422],["▁kindla",-13.505067825317385],["▁مسعود",-13.50507640838623],["▁అంతా",-13.50512409210205],["рэс",-13.505131721496582],["▁thả",-13.505141258239746],["koro",-13.50516128540039],["▁діл",-13.505171775817873],["gadā",-13.50517463684082],["മര",-13.505181312561035],["▁transportu",-13.505197525024414],["▁osserva",-13.505206108093262],["▁sluš",-13.505215644836426],["很棒",-13.505220413208008],["wszy",-13.50526237487793],["▁კანონი",-13.505276679992676],["▁ща",-13.505288124084473],["▁Ehe",-13.505289077758787],["எம்",-13.505290985107422],["▁leggi",-13.505298614501951],["▁Срп",-13.505324363708496],["对自己",-13.505326271057127],["PEL",-13.505335807800291],["▁lọc",-13.505361557006836],["עולם",-13.505364418029783],["▁ابر",-13.505369186401367],["ндагы",-13.505374908447266],["▁ekteskap",-13.505390167236328],["▁borgar",-13.50539493560791],["bale",-13.505399703979492],["ваўся",-13.505409240722656],["▁ռ",-13.505412101745604],["▁буз",-13.50542163848877],["னிய",-13.505422592163086],["▁Успе",-13.50542449951172],["▁Tiên",-13.505431175231934],["▁idegen",-13.5054349899292],["หลายๆ",-13.505436897277832],["ให้ได้",-13.505446434020996],["וואָ",-13.505449295043944],["▁SUB",-13.505464553833008],["▁Sejak",-13.505473136901855],["▁BAT",-13.505475997924805],["▁upravljanje",-13.50547695159912],["▁ដោយសារ",-13.505498886108398],["ištu",-13.505505561828612],["▁Nós",-13.505510330200195],["ลงทุน",-13.505513191223145],["▁знала",-13.505550384521484],["▁manage",-13.505553245544434],["▁stede",-13.5055570602417],["Pie",-13.505590438842772],["▁जरूरत",-13.505590438842772],["▁cumpli",-13.505613327026367],["▁zoom",-13.505617141723633],["▁Markus",-13.505655288696287],["発生",-13.505657196044922],["▁pangan",-13.50566864013672],["▁xứ",-13.50566864013672],["▁elektrisk",-13.505671501159668],["▁algunha",-13.505672454833984],["▁නිර්මාණ",-13.505708694458008],["▁ಅನುಭವ",-13.505714416503906],["čice",-13.50572395324707],["▁բացառ",-13.505732536315918],["80)",-13.505733489990234],["▁содержание",-13.505742073059082],["内で",-13.505754470825195],["▁Думаю",-13.505759239196776],["امه",-13.505762100219728],["▁mesele",-13.50576877593994],["演員",-13.505770683288574],["dhani",-13.505789756774902],["▁reportage",-13.505791664123535],["▁nappal",-13.505823135375977],["▁jedine",-13.50583267211914],["hív",-13.505847930908203],["▁Nossa",-13.505863189697266],["धू",-13.505864143371582],["▁հիշ",-13.505868911743164],["都知道",-13.505873680114746],["וכים",-13.505891799926758],["mendua",-13.505905151367188],["▁yıllarda",-13.505905151367188],["▁ಸಭೆ",-13.5059175491333],["عودة",-13.505929946899414],["▁obtine",-13.505952835083008],["ministri",-13.50595474243164],["▁первых",-13.505956649780272],["▁cafea",-13.505971908569336],["▁патолог",-13.505975723266602],["引进",-13.506003379821776],["▁कोष",-13.506032943725586],["▁Hiru",-13.506035804748535],["▁Finalmente",-13.50605010986328],["▁সহজ",-13.506054878234863],["▁kerta",-13.50605583190918],["▁tulemuse",-13.50606632232666],["▁سوئ",-13.506084442138672],["哀",-13.506085395812988],["▁öne",-13.506093978881836],["ڱ",-13.506103515625],["രാഷ്ട്ര",-13.506105422973633],["ህወሓት",-13.506105422973633],["▁bientôt",-13.506105422973633],["▁melindungi",-13.506105422973633],["▁sığorta",-13.506105422973633],["▁thầu",-13.506105422973633],["▁tisuća",-13.506105422973633],["▁tắt",-13.506105422973633],["▁звичайно",-13.506105422973633],["▁компанія",-13.506105422973633],["▁կունենա",-13.506105422973633],["▁تصویب",-13.506105422973633],["▁समन्वय",-13.506105422973633],["▁ਵੱਡੇ",-13.506105422973633],["దర్శ",-13.506107330322266],["▁Рейтинг",-13.506107330322266],["▁תהליך",-13.506109237670898],["▁Σήμερα",-13.506110191345217],["▁människa",-13.506111145019531],["▁खरिद",-13.506112098693848],["効率",-13.506112098693848],["借錢",-13.506113052368164],["▁Sachen",-13.50611400604248],["▁තේරුම්",-13.50611400604248],["▁vergessen",-13.506123542785645],["▁최초",-13.506123542785645],["▁твоя",-13.506125450134276],["▁તેણે",-13.506126403808594],["▁Comic",-13.506132125854492],["ахад",-13.50614070892334],["▁Virk",-13.50614070892334],["نموذج",-13.506152153015137],["▁mukaisesti",-13.50615406036377],["▁hjerne",-13.5061616897583],["▁Universiteit",-13.506163597106934],["▁любви",-13.506206512451172],["▁wichtigsten",-13.50620937347412],["如果不",-13.50621509552002],["▁याच",-13.506223678588867],["▁menyerang",-13.506226539611816],["▁naujas",-13.506230354309082],["▁למעשה",-13.506230354309082],["▁berbentuk",-13.506239891052246],["▁YPG",-13.506250381469728],["Test",-13.506258964538574],["▁naudu",-13.50629425048828],["119",-13.506295204162598],["▁maximal",-13.506299018859863],["▁dizaine",-13.506307601928713],["appelle",-13.506317138671877],["رول",-13.506336212158203],["foga",-13.506361961364746],["Brit",-13.506375312805176],["бю",-13.506378173828123],["▁Ekstra",-13.50638198852539],["▁ఏమ",-13.50638198852539],["▁അതിന്",-13.50641918182373],["віча",-13.506434440612791],["EW",-13.50643539428711],["▁začetku",-13.506440162658691],["ئاينىڭ",-13.506441116333008],["▁відмінн",-13.506463050842283],["▁gräns",-13.506465911865234],["▁mohon",-13.50647258758545],["ವಲ್ಲ",-13.506477355957031],["aurais",-13.50648021697998],["▁تجمع",-13.506503105163574],["短短",-13.506508827209473],["▁קבלת",-13.506514549255373],["▁variedade",-13.50652313232422],["lietu",-13.506524085998535],["vám",-13.506531715393066],["才會",-13.506553649902344],["СЫ",-13.50655746459961],["brin",-13.506558418273926],["▁чадахгүй",-13.506559371948242],["तरी",-13.506573677062988],["▁גדולים",-13.506577491760254],["▁gjithe",-13.506585121154783],["▁அரச",-13.506589889526367],["meria",-13.506596565246582],["▁spins",-13.50662612915039],["▁tè",-13.506634712219238],["▁breyta",-13.506650924682615],["▁зноў",-13.506654739379885],["▁#3",-13.50666618347168],["▁скажу",-13.50671100616455],["сир",-13.50672721862793],["liklə",-13.50675106048584],["პარა",-13.506756782531738],["▁Accept",-13.506757736206056],["▁şeker",-13.506796836853027],["īru",-13.50681209564209],["▁isip",-13.506813049316406],["سټ",-13.506839752197266],["sambung",-13.506845474243164],["▁электронны",-13.506850242614746],["әсіп",-13.506876945495604],["την",-13.50688648223877],["JUM",-13.506891250610352],["wege",-13.506976127624512],["ώρα",-13.50699234008789],["ილის",-13.507010459899902],["▁Bota",-13.50701141357422],["▁ಮತ",-13.507020950317385],["חס",-13.507044792175291],["▁ستاد",-13.50704860687256],["▁plecat",-13.50705909729004],["▁informē",-13.50709342956543],["▁feitos",-13.507098197937012],["▁garantía",-13.507104873657228],["တီး",-13.507121086120604],["▁LTD",-13.507121086120604],["▁ими",-13.507137298583984],["▁الأمير",-13.507137298583984],["▁สาร",-13.507147789001465],["ართ",-13.507155418395996],["▁elemen",-13.507155418395996],["овиќ",-13.507165908813477],["tsua",-13.507177352905272],["▁Norman",-13.507193565368652],["當天",-13.507206916809082],["σος",-13.507209777832031],["▁가는",-13.507234573364258],["▁المص",-13.50723648071289],["▁साता",-13.50723648071289],["ÓN",-13.507237434387209],["iranih",-13.507258415222168],["▁ادبیات",-13.50727081298828],["AREN",-13.507271766662598],["▁pemb",-13.50727653503418],["માલ",-13.507302284240724],["▁safar",-13.507328033447266],["zene",-13.507332801818848],["llisuutta",-13.50733470916748],["▁sufletul",-13.50734519958496],["在了",-13.507378578186035],["▁člana",-13.507384300231934],["▁Kanad",-13.5073881149292],["▁områden",-13.507403373718262],["▁culturais",-13.507421493530272],["ताको",-13.507428169250488],["ිකා",-13.507442474365234],["▁постійн",-13.507471084594728],["▁руке",-13.507472038269045],["activa",-13.507495880126951],["▁drar",-13.507495880126951],["pokea",-13.5075101852417],["▁այց",-13.50751495361328],["▁skyldes",-13.507538795471191],["mouth",-13.507539749145508],["▁komentira",-13.507551193237305],["ும்போது",-13.507575035095217],["▁gospodin",-13.507600784301758],["▁Tapa",-13.507630348205566],["ಯವರ",-13.507631301879885],["見える",-13.50764274597168],["▁sayo",-13.507649421691896],["ège",-13.507650375366213],["▁знали",-13.507654190063477],["▁ئېلىش",-13.50766944885254],["ຮຽນ",-13.507681846618652],["▁Sociale",-13.5076904296875],["径",-13.507701873779297],["▁potentiel",-13.507705688476562],["Алтын",-13.507718086242676],["иці",-13.507719993591309],["▁valita",-13.50772190093994],["▁pajak",-13.50772762298584],["▁directeur",-13.507740020751951],["ТБ",-13.50774097442627],["▁розміщен",-13.507742881774902],["进去",-13.507756233215332],["จอด",-13.507760047912598],["▁bp",-13.507761001586914],["erien",-13.507766723632812],["▁ගම්",-13.507779121398926],["▁dalamnya",-13.50778865814209],["▁Nasa",-13.507789611816406],["سول",-13.50779914855957],["ήθ",-13.507802963256836],["異常",-13.507805824279783],["▁melyet",-13.507834434509276],["▁නං",-13.50786018371582],["▁Varia",-13.507883071899414],["▁equipamentos",-13.50789737701416],["▁segmento",-13.507901191711426],["▁парад",-13.507911682128906],["▁UI",-13.507933616638184],["屏幕",-13.507948875427246],["▁সাংবাদিক",-13.507960319519045],["▁पाया",-13.507972717285156],["ဉ",-13.507977485656738],["▁Initiative",-13.507978439331056],["▁huolimatta",-13.507978439331056],["▁kepercayaan",-13.507978439331056],["▁непријатељ",-13.507978439331056],["▁تأثیر",-13.507978439331056],["▁لومړي",-13.507978439331056],["▁ପରୀକ୍ଷା",-13.507978439331056],["▁ఉపయోగించ",-13.507978439331056],["▁Allgemein",-13.507979393005373],["▁afhangend",-13.507980346679688],["▁ଭାବରେ",-13.507980346679688],["しかも",-13.507981300354004],["▁všeobecn",-13.507983207702637],["ထံ",-13.50798511505127],["บริ",-13.507986068725586],["▁ఎంపీ",-13.507987022399902],["▁rychlost",-13.50798797607422],["▁economy",-13.507988929748535],["▁бүлэг",-13.507990837097168],["tinimo",-13.507999420166016],["לאנד",-13.508002281188965],["▁finanšu",-13.50800609588623],["▁мальчик",-13.508009910583496],["▁മക്കള",-13.508009910583496],["▁политичка",-13.508010864257812],["▁тұрады",-13.508010864257812],["▁Verwendung",-13.508018493652344],["思い出",-13.508033752441406],["▁ОХУ",-13.508049964904783],["▁단계",-13.508052825927734],["▁जमीन",-13.508054733276367],["▁aide",-13.508055686950684],["ஹ்",-13.508063316345217],["▁sabiedrības",-13.508064270019531],["▁المنتخب",-13.508066177368164],["ご覧",-13.50806999206543],["可以用",-13.508084297180176],["കൊണ്ട്",-13.508091926574709],["▁ناشی",-13.508092880249023],["Bakı",-13.508094787597656],["лаў",-13.508099555969238],["gårds",-13.508102416992188],["▁Jura",-13.50810718536377],["▁отметил",-13.508126258850098],["▁temen",-13.508129119873049],["▁extremely",-13.508132934570312],["身材",-13.508135795593262],["quadra",-13.508155822753906],["▁peta",-13.508167266845703],["▁Запо",-13.508174896240234],["▁ਭੇਜ",-13.508176803588867],["DOR",-13.508184432983398],["försvar",-13.508209228515623],["ارن",-13.50821018218994],["▁zapyta",-13.50821018218994],["КР",-13.508213996887209],["▁হি",-13.508218765258787],["▁기획",-13.508220672607422],["▁ፓ",-13.508221626281738],["▁inovativ",-13.50822639465332],["ਆਪ",-13.508237838745115],["ramos",-13.50823974609375],["▁terzo",-13.508264541625977],["▁もし",-13.50828456878662],["▁pored",-13.508292198181152],["▁CZ",-13.508295059204102],["强大",-13.50829792022705],["▁nhàng",-13.508308410644531],["▁በነ",-13.508322715759276],["▁دبیر",-13.50838565826416],["▁burua",-13.508399963378906],["यान",-13.50840950012207],["▁Adams",-13.508423805236816],["▁yanzu",-13.508458137512209],["▁zpět",-13.508467674255373],["▁yaşındaki",-13.508474349975586],["▁risques",-13.508484840393066],["EKA",-13.50849151611328],["▁NHL",-13.50849151611328],["สเตอร์",-13.508503913879396],["▁HCM",-13.508506774902344],["▁rifle",-13.508506774902344],["välja",-13.50851821899414],["▁hätten",-13.508522033691406],["제품",-13.508522033691406],["וסף",-13.508529663085938],["▁соц",-13.508530616760254],["▁mengatur",-13.508556365966797],["▁osas",-13.508559226989746],["▁کلام",-13.50856876373291],["▁Estra",-13.508569717407228],["જ્ઞાન",-13.508573532104492],["▁üli",-13.50857639312744],["กั",-13.508602142333984],["▁õpi",-13.508604049682615],["▁experimenta",-13.508617401123049],["▁misteri",-13.508621215820312],["sistem",-13.508633613586426],["кави",-13.508645057678224],["▁knulla",-13.508647918701172],["ਰਵ",-13.508655548095703],["▁Украине",-13.508657455444336],["නුත්",-13.508686065673828],["toista",-13.508709907531738],["▁imperator",-13.508712768554688],["▁достиг",-13.508712768554688],["▁enkla",-13.5087251663208],["▁statului",-13.50874137878418],["▁พี่",-13.50874137878418],["друг",-13.508769989013672],["pub",-13.508782386779783],["进展",-13.508788108825684],["тички",-13.50879192352295],["evit",-13.508798599243164],["你知道",-13.50881004333496],["▁حقوقی",-13.508819580078123],["▁Madax",-13.50886058807373],["▁postar",-13.508898735046388],["▁მიიღე",-13.508930206298828],["stien",-13.50893497467041],["▁ప్రజలు",-13.508940696716309],["find",-13.508941650390623],["ର୍କ",-13.50896453857422],["▁mengo",-13.508966445922852],["klama",-13.508981704711914],["ก็เป็น",-13.508995056152344],["έι",-13.508999824523926],["▁бирге",-13.50900936126709],["чкі",-13.50901699066162],["▁DHE",-13.509021759033203],["▁העבר",-13.509026527404783],["▁Dhaka",-13.50905704498291],["▁Stick",-13.509066581726074],["▁długi",-13.50907039642334],["арски",-13.509072303771973],["▁不过",-13.5090970993042],["ragan",-13.509099006652832],["iyalı",-13.509100914001465],["▁osobno",-13.509111404418944],["▁самих",-13.509113311767578],["ечен",-13.509119987487791],["pakan",-13.509130477905272],["▁loan",-13.509135246276855],["▁alian",-13.509142875671388],["मान्",-13.509147644042969],["دری",-13.50916576385498],["▁grov",-13.50918960571289],["▁одржан",-13.509196281433104],["ढे",-13.509198188781738],["ليك",-13.509209632873535],["▁intervento",-13.5092134475708],["只好",-13.509222030639648],["svjež",-13.50922393798828],["laştı",-13.509224891662598],["▁سيتم",-13.509276390075684],["▁Emilia",-13.50927734375],["▁мав",-13.509282112121582],["▁tarihleri",-13.509284019470217],["▁реалност",-13.509291648864746],["▁kører",-13.509296417236328],["▁Druga",-13.509303092956545],["ჟა",-13.50931453704834],["ଡିଆ",-13.509315490722656],["▁horse",-13.509322166442873],["▁вест",-13.509322166442873],["▁Monde",-13.509328842163086],["лоб",-13.50933837890625],["就好",-13.509350776672363],["▁sõbra",-13.509371757507324],["▁tërë",-13.509376525878906],["維修",-13.509377479553224],["▁അവര",-13.509380340576172],["难道",-13.509385108947754],["xie",-13.509387969970703],["強く",-13.509413719177246],["teadus",-13.509417533874512],["ਨਰ",-13.509431838989258],["osoite",-13.509444236755373],["िष",-13.509456634521484],["вина",-13.509476661682127],["▁revision",-13.50949001312256],["▁Іс",-13.509503364562988],["ଯାଇ",-13.509563446044922],["▁κοινωνία",-13.50957202911377],["▁secure",-13.509587287902832],["ಒ",-13.50959014892578],["▁Leggi",-13.509592056274414],["ифицира",-13.509597778320312],["▁Çek",-13.509617805480955],["▁Зелен",-13.509617805480955],["овати",-13.509624481201172],["▁adaptar",-13.509632110595703],["Din",-13.509657859802246],["önd",-13.509687423706056],["▁indicati",-13.509690284729004],["▁läser",-13.509703636169434],["▁odbora",-13.509713172912598],["▁посета",-13.509719848632812],["פיה",-13.509737968444824],["▁remata",-13.509750366210938],["fî",-13.50975227355957],["▁namang",-13.509754180908203],["铺",-13.50976848602295],["społeczn",-13.509777069091797],["▁venga",-13.509794235229492],["▁நண்பர்",-13.509804725646973],["anović",-13.509820938110352],["▁voler",-13.5098237991333],["▁ekipe",-13.50982666015625],["恢復",-13.509833335876465],["帆",-13.509842872619627],["▁melden",-13.509843826293944],["ចុច",-13.509854316711426],["▁některé",-13.509854316711426],["▁occaecat",-13.509854316711426],["▁succès",-13.509854316711426],["▁uglavnom",-13.509854316711426],["▁всъщност",-13.509854316711426],["▁अच्छे",-13.509854316711426],["▁পরিবর্তন",-13.509854316711426],["សម្លាប់",-13.509855270385742],["▁Naujienos",-13.509855270385742],["▁maršrut",-13.509855270385742],["▁इंटरनेट",-13.509855270385742],["▁АКШ",-13.509857177734377],["▁തോമസ്",-13.509858131408691],["▁диалог",-13.509860038757324],["▁금지",-13.509860038757324],["▁Zobraziť",-13.50986099243164],["▁Польшчы",-13.509862899780272],["▁έργα",-13.509865760803224],["ಯುಕ್ತ",-13.50986671447754],["▁amíg",-13.50986671447754],["▁plain",-13.50987148284912],["▁Министерства",-13.509881019592283],["▁थिएन",-13.509882926940918],["医師",-13.509883880615234],["▁Русији",-13.50988483428955],["[15]",-13.509888648986816],["▁Zindagi",-13.509896278381348],["▁የተሰ",-13.50990104675293],["▁szeroki",-13.509906768798828],["▁zgłosz",-13.509906768798828],["▁Martínez",-13.509907722473145],["εσαι",-13.50991439819336],["iteti",-13.509926795959473],["▁музеј",-13.509932518005373],["▁mensagem",-13.509933471679688],["ուդ",-13.50994873046875],["▁bavln",-13.50994873046875],["▁perdita",-13.509950637817385],["▁kuukausi",-13.50996208190918],["▁म्हणाले",-13.509973526000977],["▁основные",-13.50997829437256],["inami",-13.509979248046877],["▁៩",-13.509980201721191],["prozess",-13.509987831115724],["čního",-13.509997367858888],["▁Bū",-13.510005950927734],["ດັ່ງ",-13.510010719299316],["▁CRM",-13.510019302368164],["dering",-13.510026931762695],["▁mät",-13.510042190551758],["ຣີ",-13.51004409790039],["▁праграм",-13.510047912597656],["ницю",-13.510056495666504],["▁даје",-13.510071754455566],["Bab",-13.510086059570312],["vuotias",-13.510090827941896],["الق",-13.510128021240234],["ક્રમ",-13.510143280029297],["▁ፖለቲካ",-13.510162353515623],["▁carb",-13.510164260864258],["▁durchaus",-13.510170936584473],["õr",-13.510186195373535],["▁paprika",-13.510189056396484],["▁الأع",-13.510198593139648],["ที่บ้าน",-13.51021957397461],["▁ponsel",-13.510228157043455],["▁begint",-13.510255813598633],["▁tân",-13.510265350341797],["▁``",-13.510269165039062],["▁તેનો",-13.510269165039062],["多久",-13.510281562805176],["гиб",-13.510305404663086],["님의",-13.510305404663086],["基金會",-13.510309219360352],["有不少",-13.510313987731934],["▁klus",-13.51031494140625],["landes",-13.510318756103516],["▁Άν",-13.510330200195312],["▁gundê",-13.51036548614502],["▁nikad",-13.510369300842283],["JAR",-13.510372161865234],["▁suprem",-13.510388374328612],["▁faqen",-13.51039695739746],["▁ем",-13.510403633117676],["▁overveje",-13.510414123535156],["▁dilyn",-13.510416984558104],["▁желания",-13.510416984558104],["ibû",-13.510417938232422],["ເມ",-13.510424613952637],["▁tumi",-13.510428428649902],["▁macro",-13.5104398727417],["▁kézi",-13.510443687438965],["▁دعوی",-13.510453224182127],["โส",-13.510456085205078],["่อน",-13.510456085205078],["▁Banca",-13.510456085205078],["храни",-13.51048469543457],["▁жү",-13.51048755645752],["鏡頭",-13.510503768920898],["▁Skrif",-13.510580062866213],["▁ଜାରି",-13.510589599609377],["presidente",-13.510591506958008],["תיקון",-13.510605812072754],["▁sebut",-13.510610580444336],["aktig",-13.510628700256348],["▁шеш",-13.510629653930664],["fü",-13.510631561279297],["usko",-13.510640144348145],["нными",-13.510645866394045],["▁chyn",-13.51065158843994],["▁Contr",-13.510656356811523],["▁большим",-13.510663032531738],["▁ίδιος",-13.510684967041016],["▁здраво",-13.510687828063965],["टेक",-13.51069164276123],["▁dużą",-13.510693550109863],["▁nowej",-13.510702133178713],["λον",-13.510734558105469],["▁Tím",-13.510734558105469],["▁einu",-13.510759353637695],["▁megye",-13.510759353637695],["▁Mú",-13.51077365875244],["▁ਪ੍ਰੋ",-13.510777473449709],["चित्",-13.510788917541504],["▁fərq",-13.51078987121582],["人和",-13.51079273223877],["תור",-13.510793685913086],["gey",-13.51079559326172],["▁ստորագր",-13.510815620422363],["▁dərin",-13.510826110839844],["▁រង",-13.510831832885742],["uerit",-13.510839462280272],["മല",-13.51084804534912],["▁قديم",-13.510856628417969],["▁Oskar",-13.5108642578125],["غار",-13.510869979858398],["▁visok",-13.51088523864746],["itty",-13.510897636413574],["ādē",-13.510904312133787],["తర",-13.510943412780762],["▁૫",-13.510955810546877],["фол",-13.510958671569824],["▁skle",-13.510958671569824],["▁valin",-13.510967254638672],["itetään",-13.510973930358888],["šila",-13.511049270629885],["▁202",-13.511061668395996],["džer",-13.511063575744627],["\\\\\\\\",-13.511065483093262],["SING",-13.511070251464844],["ການຄ້າ",-13.511077880859377],["ларынын",-13.511098861694336],["▁átti",-13.51111125946045],["ложение",-13.511130332946776],["▁Кла",-13.511154174804688],["ۇلۇ",-13.511165618896484],["▁hobi",-13.5111722946167],["▁kolegi",-13.511180877685549],["baixa",-13.511199951171877],["▁Stimme",-13.511207580566406],["▁kullandı",-13.511207580566406],["▁mengingat",-13.511228561401367],["神的",-13.511239051818848],["▁Kemp",-13.511242866516112],["▁Obec",-13.51125717163086],["▁serb",-13.511293411254885],["▁सजा",-13.511322975158691],["▁Ferien",-13.51133632659912],["▁findest",-13.51133918762207],["wong",-13.511380195617676],["▁ofrecen",-13.511414527893066],["дає",-13.511422157287598],["أداء",-13.511446952819824],["▁φυτ",-13.511463165283203],["sprogram",-13.511480331420898],["▁Desta",-13.51148796081543],["zola",-13.51149082183838],["መጀመሪያ",-13.511505126953123],["▁2,7",-13.511529922485352],["Փ",-13.511533737182615],["etc",-13.511542320251465],["▁italian",-13.511566162109377],["▁umani",-13.51159954071045],["▁millega",-13.511606216430664],["▁lân",-13.511610984802246],["▁سرما",-13.511618614196776],["91)",-13.511641502380373],["▁orasida",-13.51164436340332],["▁farger",-13.511646270751951],["▁sjukdom",-13.511651992797852],["ošās",-13.511658668518066],["▁veç",-13.5116605758667],["誓",-13.511662483215332],["炼",-13.511670112609863],["▁үндэс",-13.511673927307127],["堵",-13.511674880981444],["▁vanno",-13.511685371398926],["▁ಸೈ",-13.511696815490724],["▁intento",-13.51170539855957],["ландыру",-13.511710166931152],["পুরে",-13.511712074279783],["▁Орусия",-13.511716842651367],["財務",-13.511722564697266],["ማንኛውም",-13.511734008789062],["▁lẫn",-13.511734008789062],["▁opiskelija",-13.511734008789062],["▁ГЕРБ",-13.511734008789062],["▁পত্রিকা",-13.511734008789062],["▁සාර්ථක",-13.511734008789062],["둔",-13.511734008789062],["▁atsiliep",-13.51173496246338],["▁průměr",-13.51173496246338],["▁выклік",-13.51173496246338],["▁कोहली",-13.51173496246338],["▁aldonaj",-13.511735916137695],["▁павінна",-13.511735916137695],["▁բացահայտ",-13.511735916137695],["▁objednávky",-13.511736869812012],["▁счастлив",-13.511736869812012],["▁телеканал",-13.511736869812012],["▁disponeblas",-13.511737823486328],["▁potencijal",-13.511738777160645],["▁ارزیابی",-13.511738777160645],["▁Gjatë",-13.51173973083496],["▁огромна",-13.511741638183594],["▁yetkili",-13.511744499206545],["▁Evrope",-13.511746406555176],["▁μνημ",-13.511746406555176],["▁জন্ম",-13.511746406555176],["ುತ್ತೇವೆ",-13.511751174926758],["▁ಸಂಜೆ",-13.511752128601074],["▁Bologna",-13.511754035949709],["▁ներառ",-13.511754989624023],["▁Århus",-13.511758804321287],["▁πραγματικά",-13.511763572692873],["ລາຄາ",-13.511764526367188],["可怕",-13.511764526367188],["богат",-13.51176643371582],["▁genuin",-13.511771202087402],["▁extension",-13.511795043945312],["▁астам",-13.511798858642578],["▁өзара",-13.511798858642578],["▁rujna",-13.511812210083008],["▁grupā",-13.51181411743164],["▁ସୋ",-13.51181411743164],["віць",-13.511837005615234],["発見",-13.511848449707031],["▁အသက္",-13.51185131072998],["▁құрылыс",-13.511853218078612],["fræ",-13.511855125427246],["οθετ",-13.511860847473145],["▁geheim",-13.51187515258789],["ဆိုရင်",-13.511884689331056],["▁전통",-13.511886596679688],["▁Pha",-13.511898040771484],["▁Edgar",-13.5119047164917],["▁Начин",-13.511908531188965],["▁Ezért",-13.511933326721191],["غم",-13.511938095092772],["▁jamoa",-13.511940956115724],["▁አልተ",-13.511948585510254],["高峰",-13.51194953918457],["▁skuad",-13.511950492858888],["▁priznanj",-13.511957168579102],["▁təcrübə",-13.511962890625],["▁verandering",-13.511969566345217],["പ്പിച്ച",-13.51198673248291],["▁DALAM",-13.511987686157228],["▁وعدم",-13.511999130249023],["▁نڪ",-13.51200008392334],["▁straat",-13.512001991271973],["▁afspraak",-13.512002944946287],["ಫಾ",-13.512003898620604],["չական",-13.512008666992188],["▁mocht",-13.51201057434082],["▁установа",-13.512020111083984],["▁Sayang",-13.51206398010254],["▁zvezi",-13.512064933776855],["▁bedien",-13.512083053588867],["ေစာ",-13.512085914611816],["▁pinakama",-13.512092590332031],["▁certamente",-13.512107849121094],["▁కన్",-13.51211166381836],["▁önkormányzati",-13.512118339538574],["పోయి",-13.51213550567627],["▁päätty",-13.512151718139648],["▁allez",-13.512152671813965],["tyczna",-13.512154579162598],["▁zdravje",-13.512208938598633],["▁මිය",-13.512208938598633],["▁वेब",-13.512214660644531],["▁predlaga",-13.512218475341797],["Україна",-13.512236595153809],["▁Rash",-13.512240409851074],["кір",-13.512259483337402],["▁1870",-13.5122709274292],["▁ስል",-13.512287139892578],["רבי",-13.512290000915527],["▁degmada",-13.51229190826416],["▁изпълнител",-13.512322425842283],["▁същия",-13.512328147888184],["▁въпроса",-13.51233196258545],["▁aanay",-13.512353897094728],["▁kuigi",-13.512357711791992],["▁grups",-13.51237678527832],["▁išmok",-13.51238250732422],["য়াল",-13.512383460998535],["▁Staff",-13.512383460998535],["▁šū",-13.512423515319824],["▁โปรโมชั่น",-13.512435913085938],["▁telesa",-13.512453079223633],["▁wenige",-13.51245403289795],["▁presión",-13.512457847595217],["เหมาะสม",-13.512483596801758],["▁rammer",-13.512490272521973],["▁dvora",-13.512507438659668],["▁კვირა",-13.512547492980955],["ന്നി",-13.51254940032959],["▁confess",-13.512596130371094],["▁hatékony",-13.512608528137209],["▁emotion",-13.512650489807127],["▁рожден",-13.512673377990724],["▁ویکی",-13.512715339660645],["▁делают",-13.512720108032228],["▁zaslon",-13.512724876403809],["годишњи",-13.51272964477539],["▁embora",-13.512733459472656],["ακτ",-13.512737274169922],["▁помер",-13.512740135192873],["▁технологиялар",-13.512789726257324],["▁Väl",-13.51279640197754],["▁ишин",-13.512797355651855],["▁fitte",-13.512815475463867],["solo",-13.512816429138184],["▁reta",-13.512822151184082],["▁prices",-13.512825965881348],["dell",-13.512836456298828],["▁مذ",-13.512845993041992],["得知",-13.51285171508789],["īcija",-13.512863159179688],["▁følelser",-13.512872695922852],["ۇت",-13.51291561126709],["▁sahəsi",-13.51293659210205],["▁korisnika",-13.512950897216797],["▁кофе",-13.512957572937012],["▁содржина",-13.512959480285645],["URS",-13.512964248657228],["▁yolunu",-13.512994766235352],["▁전기",-13.513005256652832],["申し",-13.513006210327148],["▁альбо",-13.513009071350098],["▁Корея",-13.513038635253906],["▁riziko",-13.51304817199707],["▁קמ",-13.51304817199707],["gab",-13.513063430786133],["poçt",-13.513073921203612],["▁попут",-13.513076782226562],["なんか",-13.513076782226562],["στρο",-13.513097763061523],["▁épp",-13.51311492919922],["够",-13.51312255859375],["▁biryar",-13.513123512268066],["duit",-13.51315975189209],["▁wenza",-13.513171195983888],["▁hvaða",-13.513187408447266],["εγ",-13.51320743560791],["▁používání",-13.513214111328123],["▁vahva",-13.513223648071287],["▁رم",-13.513223648071287],["രൂപ",-13.513239860534668],["陽光",-13.513239860534668],["▁проведение",-13.513243675231934],["▁ക്ര",-13.51325511932373],["04)",-13.513260841369627],["▁frek",-13.513277053833008],["▁Thom",-13.513282775878906],["▁колдо",-13.51328945159912],["▁dikare",-13.513303756713867],["ològic",-13.513306617736816],["▁शांत",-13.513319969177246],["Prin",-13.513328552246094],["▁fordít",-13.51333999633789],["▁کاپی",-13.513352394104004],["ixin",-13.51337718963623],["▁zusätzlich",-13.513386726379396],["▁مباشر",-13.513404846191406],["ખી",-13.513408660888672],["▁neuvo",-13.513412475585938],["▁paže",-13.513412475585938],["՞ն",-13.513423919677734],["tragen",-13.513429641723633],["▁утас",-13.513439178466797],["▁Հար",-13.513442039489746],["▁Dende",-13.513456344604492],["ഒരു",-13.513466835021973],["яга",-13.513469696044922],["TIE",-13.513471603393556],["kreft",-13.513472557067873],["ინტერ",-13.513483047485352],["liit",-13.513484001159668],["▁zgjidhje",-13.513486862182615],["▁ස්ථාන",-13.513498306274414],["분석",-13.513501167297363],["మంది",-13.513505935668944],["něte",-13.513516426086426],["κος",-13.51352882385254],["ooda",-13.513541221618652],["▁שיער",-13.513545036315918],["shirts",-13.513572692871094],["▁ważny",-13.513572692871094],["▁zuvor",-13.513572692871094],["罐",-13.51358413696289],["▁----",-13.513587951660156],["্যু",-13.513596534729004],["cité",-13.513599395751951],["lipo",-13.513602256774902],["▁منصوب",-13.51360321044922],["혹",-13.513606071472168],["ၤ",-13.513607025146484],["楽しんで",-13.513607025146484],["กรุงเทพ",-13.513612747192385],["ແຕກ",-13.513615608215332],["สะอาด",-13.513616561889648],["▁Komunaĵo",-13.513617515563965],["▁canlynol",-13.513617515563965],["▁gấp",-13.513617515563965],["▁mwanamke",-13.513617515563965],["▁nắng",-13.513617515563965],["▁spolupráci",-13.513617515563965],["▁насельніцтва",-13.513617515563965],["▁شەھىرى",-13.513617515563965],["▁لومړی",-13.513617515563965],["▁ગઝલ",-13.51361846923828],["▁akhbar",-13.51362133026123],["▁تأثير",-13.51362133026123],["▁ਵਿਸ਼ੇਸ਼",-13.513622283935549],["▁самоуправления",-13.513623237609863],["顧問",-13.513623237609863],["▁První",-13.513631820678713],["▁levegő",-13.513632774353027],["売却",-13.513632774353027],["▁Argument",-13.513633728027344],["▁swo",-13.513635635375977],["▁경찰",-13.513635635375977],["▁livsstil",-13.513639450073242],["▁movemento",-13.513641357421877],["▁куќа",-13.51364803314209],["▁badala",-13.51365089416504],["▁hiển",-13.513652801513672],["▁دانشجو",-13.513653755187988],["氧化",-13.513671875],["▁ألا",-13.513672828674316],["▁följer",-13.513677597045898],["▁eğitimi",-13.513680458068848],["▁reglement",-13.513689994812012],["▁майстер",-13.513691902160645],["lisema",-13.51370620727539],["เร่ง",-13.513710975646973],["▁ਦਸ",-13.513731956481934],["▁profesionais",-13.513751029968262],["▁بالغ",-13.513753890991213],["GIS",-13.513758659362791],["▁wyjazd",-13.513764381408691],["▁pelikula",-13.513800621032717],["âne",-13.513802528381348],["▁своё",-13.513802528381348],["વાન",-13.513811111450195],["▁darlle",-13.513866424560549],["いや",-13.513874053955078],["րտ",-13.513879776000977],["▁чаму",-13.51388168334961],["▁טיי",-13.513893127441406],["▁loạt",-13.513904571533203],["▁especialistas",-13.513931274414062],["初步",-13.513937950134276],["▁ಪ್ರಕಟ",-13.513940811157228],["▁ribe",-13.51394748687744],["ක්ක",-13.513948440551758],["▁đâ",-13.513982772827148],["▁onih",-13.513991355895996],["▁සිටියේ",-13.513992309570312],["▁कवि",-13.513998031616213],["▁responsabilité",-13.51401424407959],["▁gatavo",-13.51402187347412],["struktuur",-13.51404857635498],["を紹介",-13.514062881469728],["▁ipsius",-13.51406478881836],["▁Hatta",-13.514071464538574],["▁тихо",-13.514093399047852],["წევრ",-13.514095306396484],["áiste",-13.514108657836914],["▁sözləri",-13.514123916625977],["ctva",-13.514134407043455],["gögn",-13.514156341552734],["რას",-13.514184951782228],["▁косата",-13.514189720153809],["▁kojom",-13.514193534851074],["▁sahədə",-13.514201164245604],["summa",-13.514208793640137],["▁tiče",-13.514227867126465],["absence",-13.514241218566896],["тражи",-13.5142822265625],["地铁",-13.514284133911133],["▁Kontrolle",-13.514293670654297],["▁обеспечение",-13.514297485351562],["▁Chun",-13.514307975769045],["▁усп",-13.51434326171875],["▁ٽيم",-13.514352798461914],["තෙක්",-13.51435375213623],["-34",-13.514357566833496],["gyan",-13.514361381530762],["▁remis",-13.514362335205078],["ywał",-13.514376640319824],["kleid",-13.514391899108888],["tığını",-13.514392852783203],["▁neler",-13.514402389526367],["教堂",-13.51441478729248],["▁פּאַ",-13.514415740966797],["▁හර",-13.514415740966797],["γω",-13.514423370361328],["likleri",-13.514427185058594],["▁назар",-13.514437675476074],["▁રાખવા",-13.514451026916504],["▁domy",-13.51447296142578],["ңиз",-13.514473915100098],["웨",-13.514481544494627],["▁sonde",-13.51449966430664],["▁വിവരങ്ങൾ",-13.514501571655272],["39)",-13.514503479003906],["▁српских",-13.514537811279297],["▁basah",-13.51455307006836],["▁tampoco",-13.514573097229004],["forbund",-13.514585494995115],["▁አንዳንድ",-13.514592170715332],["▁Газ",-13.514610290527344],["▁avevo",-13.514613151550291],["чину",-13.51461410522461],["乗り",-13.514618873596191],["一股",-13.51462459564209],["ψαν",-13.514650344848633],["▁vase",-13.514671325683594],["δων",-13.514678001403809],["рыш",-13.514698028564451],["전자",-13.514698028564451],["▁sanit",-13.51470184326172],["▁(36)",-13.5147123336792],["»).",-13.514718055725098],["▁പ്രോ",-13.514718055725098],["zər",-13.514723777770996],["▁paviment",-13.514732360839844],["▁Krye",-13.514735221862791],["itvijo",-13.514771461486816],["▁régime",-13.514776229858398],["▁Halle",-13.514778137207031],["íny",-13.51478672027588],["▁គឺជា",-13.514790534973145],["▁geliş",-13.514792442321776],["▁wynaj",-13.51479434967041],["ബോ",-13.514801025390623],["▁làng",-13.514822006225586],["▁Програма",-13.514822006225586],["▁часы",-13.51485538482666],["یکی",-13.514864921569824],["izacije",-13.514885902404783],["▁美國",-13.514896392822266],["▁herramienta",-13.514925956726074],["KSI",-13.514936447143556],["▁дараах",-13.514941215515137],["▁салбарт",-13.51494312286377],["פוט",-13.514944076538086],["▁Ausland",-13.514965057373049],["▁огромно",-13.514982223510742],["▁môj",-13.514986991882324],["ירו",-13.515006065368652],["▁ڌر",-13.51501750946045],["▁жүргүз",-13.515019416809082],["が出来",-13.515021324157717],["我自己",-13.515022277832031],["▁көлік",-13.515043258666992],["▁tokë",-13.51505184173584],["▁závis",-13.515080451965332],["Ша",-13.515098571777344],["อยู่แล้ว",-13.515103340148926],["构",-13.515106201171877],["▁системата",-13.515121459960938],["cepat",-13.515131950378418],["▁Okay",-13.515149116516112],["▁ας",-13.51516056060791],["zunehmen",-13.515182495117188],["вини",-13.515188217163086],["▁dituzu",-13.515193939208984],["▁Petter",-13.515220642089844],["▁medicament",-13.515222549438477],["▁Система",-13.515231132507324],["крил",-13.5152587890625],["גורם",-13.515270233154297],["▁سماج",-13.515271186828612],["▁debet",-13.515277862548828],["▁Bruce",-13.515304565429688],["вард",-13.515326499938965],["ხარი",-13.51532745361328],["▁ແບບ",-13.515332221984863],["▁måten",-13.515336990356444],["actual",-13.515345573425291],["ották",-13.515345573425291],["מז",-13.515351295471191],["▁apartado",-13.515365600585938],["ئۇ",-13.51536750793457],["▁Jus",-13.51539421081543],["▁cek",-13.515399932861328],["▁клієнт",-13.515405654907228],["▁desto",-13.515406608581545],["▁640",-13.51543140411377],["不便",-13.515433311462402],["▁நட",-13.515436172485352],["▁paus",-13.515442848205566],["党员",-13.51547145843506],["čih",-13.515485763549805],["ാണെന്ന്",-13.51548671722412],["義務",-13.51548957824707],["▁Ciri",-13.51549243927002],["Qəbələ",-13.515503883361816],["รีสอร์ท",-13.515503883361816],["▁ləğv",-13.515503883361816],["▁mittlerweile",-13.515503883361816],["▁اصطلاح",-13.515503883361816],["▁المباراة",-13.515503883361816],["▁गम्भीर",-13.515503883361816],["▁ரஜினி",-13.515503883361816],["텍",-13.515503883361816],["▁Comhairle",-13.515504837036133],["▁Tätigkeit",-13.515504837036133],["▁käesoleva",-13.515504837036133],["▁स्मार्टफोन",-13.515504837036133],["▁શરીર",-13.515504837036133],["▁Bởi",-13.51550579071045],["▁zalogowanych",-13.51550579071045],["▁выигрыш",-13.51550579071045],["▁ապրիլի",-13.51550579071045],["▁kesinlikle",-13.515506744384766],["▁mrefu",-13.515507698059082],["▁vollständig",-13.515509605407717],["▁copyright",-13.515510559082031],["▁послове",-13.51551342010498],["解説",-13.51551342010498],["▁szombat",-13.515518188476562],["▁usluga",-13.515522003173828],["ከራ",-13.515532493591309],["▁şiddet",-13.515532493591309],["prod",-13.515542984008787],["▁ufficiale",-13.515542984008787],["würdig",-13.515546798706056],["▁मागणी",-13.515546798706056],["▁얘기",-13.515548706054688],["מלחמה",-13.515551567077637],["JAM",-13.51555633544922],["▁дамуы",-13.5155611038208],["▁공급",-13.515567779541016],["▁Треба",-13.51557731628418],["▁ವರ್ಗ",-13.515581130981444],["▁відгук",-13.515588760375977],["▁გადაი",-13.515588760375977],["▁liberi",-13.515596389770508],["▁бағдарламалар",-13.515610694885254],["▁Lodge",-13.51561164855957],["▁partita",-13.515617370605469],["غني",-13.51562213897705],["▁cheart",-13.515625],["▁Mivel",-13.515631675720217],["ायचं",-13.515636444091797],["рыл",-13.51563835144043],["CIE",-13.515654563903809],["▁پرویز",-13.515658378601074],["▁പുറത്ത്",-13.51566219329834],["▁hetzelfde",-13.515666007995604],["▁تهدید",-13.515666007995604],["▁वय",-13.515669822692873],["▁ئايال",-13.515671730041504],["▁FK",-13.51567268371582],["▁закр",-13.515679359436035],["ເຕີ",-13.515684127807615],["▁jäänud",-13.515700340270996],["▁तिर",-13.515700340270996],["▁державних",-13.515701293945312],["▁Форм",-13.515703201293944],["▁Құр",-13.515703201293944],["▁lezzet",-13.515711784362791],["▁bahis",-13.515721321105955],["▁koles",-13.515743255615234],["מערב",-13.515746116638184],["▁נאכ",-13.515754699707031],["▁Basha",-13.51575756072998],["▁tusen",-13.51576042175293],["venire",-13.515765190124512],["şları",-13.515785217285156],["NBA",-13.515814781188965],["опт",-13.51584529876709],["▁чыкты",-13.515851020812988],["▁helps",-13.515870094299316],["▁isteme",-13.515870094299316],["▁solito",-13.515870094299316],["▁skur",-13.51588249206543],["▁alakul",-13.515888214111328],["▁chw",-13.51590347290039],["▁tělo",-13.515910148620604],["▁innlegget",-13.515913963317873],["▁втората",-13.5159330368042],["▁അന്ന്",-13.515941619873049],["▁sento",-13.515944480895996],["▁우수",-13.51595687866211],["បុ",-13.51597023010254],["▁haurà",-13.515971183776855],["▁eventuali",-13.515978813171388],["πως",-13.515987396240234],["克斯",-13.51599407196045],["▁όρο",-13.515996932983398],["▁သူ့",-13.515996932983398],["▁2016-2017",-13.516002655029297],["ცო",-13.516003608703612],["続ける",-13.516006469726562],["▁skill",-13.516020774841309],["▁blaen",-13.516021728515623],["▁kresk",-13.516042709350586],["の方に",-13.516048431396484],["ရိုက်",-13.51605987548828],["▁Kreativ",-13.516067504882812],["▁средне",-13.516082763671877],["▁helye",-13.516095161437988],["▁panāk",-13.516095161437988],["▁droite",-13.516108512878418],["▁sklepie",-13.51611328125],["▁вядомы",-13.516132354736328],["▁hunc",-13.516141891479492],["▁තරගය",-13.51614475250244],["▁смысле",-13.516161918640137],["▁staveb",-13.516173362731934],["omia",-13.516178131103516],["▁sprawdza",-13.51618766784668],["▁Фар",-13.516201972961426],["ैक",-13.516220092773438],["▁қаз",-13.516234397888184],["▁Moses",-13.5162353515625],["liyinin",-13.516237258911133],["▁nodi",-13.516275405883787],["шпа",-13.516298294067385],["▁სია",-13.516307830810549],["έξ",-13.516338348388672],["▁구글",-13.516347885131836],["sztott",-13.516357421875],["▁përmend",-13.516361236572266],["▁lirë",-13.516369819641112],["ਬੂ",-13.516374588012695],["стэр",-13.51637840270996],["קין",-13.51638126373291],["▁пытанне",-13.516403198242188],["етка",-13.516412734985352],["▁Тест",-13.516432762145996],["ეტის",-13.516448974609377],["▁distrug",-13.51645851135254],["▁физика",-13.516459465026855],["વુ",-13.51646327972412],["wula",-13.516478538513184],["▁ويت",-13.516480445861816],["▁favori",-13.51650619506836],["Kun",-13.516520500183104],["▁dijous",-13.516542434692385],["▁Hungari",-13.516551971435549],["これで",-13.516556739807127],["▁325",-13.516561508178713],["τερη",-13.516566276550291],["ляци",-13.51658058166504],["ganggu",-13.516583442687988],["▁Қара",-13.516590118408203],["าะ",-13.516605377197266],["▁talvez",-13.516605377197266],["▁dates",-13.516618728637695],["▁жұмыстары",-13.516632080078123],["रात",-13.516636848449709],["▁Новом",-13.516640663146973],["rozi",-13.516643524169922],["▁รูป",-13.516656875610352],["▁Alde",-13.516678810119627],["▁Astra",-13.516685485839844],["外国人",-13.516688346862791],["rání",-13.516694068908691],["▁കൂട്ടി",-13.516695976257324],["▁다르",-13.51669979095459],["▁213",-13.516704559326172],["abileceğiniz",-13.516731262207031],["▁гостите",-13.516740798950195],["▁edessä",-13.516741752624512],["jata",-13.516751289367676],["▁Unge",-13.516751289367676],["▁ಗೊತ್ತ",-13.516752243041992],["▁Omar",-13.516762733459473],["▁identidade",-13.516766548156738],["▁tiyang",-13.516777992248535],["구요",-13.516803741455078],["▁schwierig",-13.516805648803713],["ગો",-13.516827583312988],["▁moški",-13.516828536987305],["нков",-13.516834259033203],["▁mjere",-13.51685619354248],["▁euren",-13.516863822937012],["▁միջոցներ",-13.516879081726074],["▁iritsi",-13.51688003540039],["▁Çalışma",-13.516880989074709],["▁pum",-13.516898155212402],["काे",-13.516904830932615],["صوص",-13.516907691955566],["ākais",-13.516921997070312],["▁Txi",-13.516932487487791],["188",-13.51694107055664],["kasvatus",-13.516977310180664],["▁consell",-13.517013549804688],["باه",-13.51702117919922],["▁КМ",-13.517035484313965],["▁வட",-13.517040252685549],["layıb",-13.517043113708496],["▁paint",-13.517057418823242],["▁خم",-13.51707363128662],["▁अस्ति",-13.517077445983888],["യൊക്കെ",-13.517084121704102],["▁Neve",-13.517084121704102],["▁ярилц",-13.517091751098633],["▁ਹਿ",-13.517111778259276],["järvi",-13.51717472076416],["▁filio",-13.51717472076416],["ಯರ್",-13.517206192016602],["▁ಜೀವ",-13.517216682434082],["▁жайы",-13.517218589782717],["▁aturan",-13.517227172851562],["szczenia",-13.517236709594728],["praz",-13.517263412475586],["ทัวร์",-13.517264366149902],["▁ölü",-13.517271041870115],["元的",-13.517312049865724],["饮",-13.51731300354004],["logen",-13.517358779907228],["兹",-13.517358779907228],["▁wiadomo",-13.517374038696287],["▁යුද",-13.517374038696287],["lääk",-13.517390251159668],["রাষ্ট্র",-13.517393112182615],["నారాయణ",-13.517394065856934],["▁Atribuite",-13.517394065856934],["▁bărbat",-13.517394065856934],["▁Éirinn",-13.517394065856934],["▁παράδειγμα",-13.517394065856934],["▁اطمینان",-13.517394065856934],["ზაფხულ",-13.51739501953125],["▁dalībnieki",-13.51739501953125],["▁மகிழ்ச்சி",-13.51739501953125],["▁ڪامياب",-13.517396926879885],["▁savršen",-13.5173978805542],["▁udostępni",-13.517399787902832],["▁сначала",-13.517399787902832],["▁Põhja",-13.517400741577148],["▁गरिरहेको",-13.517400741577148],["ศิลปะ",-13.517401695251465],["หลักสูตร",-13.517401695251465],["▁الحقيقي",-13.517401695251465],["▁कल्पना",-13.517401695251465],["▁завершен",-13.517407417297363],["ျပဳလုပ္",-13.51740837097168],["▁גוגל",-13.517410278320312],["▁വിശ്വാസ",-13.517412185668944],["বন্ধ",-13.517415046691896],["sääntö",-13.517431259155272],["▁एयर",-13.51743221282959],["ptic",-13.517437934875488],["▁Boeing",-13.517438888549805],["▁spesiell",-13.517438888549805],["▁ủy",-13.517440795898438],["যোগ্য",-13.51744556427002],["▁غږ",-13.517452239990234],["▁Ehkä",-13.51745319366455],["▁አይደለም።",-13.517454147338867],["▁referência",-13.517459869384766],["▁படத்தில்",-13.517464637756348],["кало",-13.517467498779297],["▁వ్యాఖ్యలు",-13.517470359802246],["▁מחדש",-13.517478942871094],["▁Qadın",-13.51747989654541],["PRI",-13.517483711242676],["תמיכה",-13.517485618591309],["▁ceļu",-13.51748752593994],["▁viajar",-13.517491340637209],["▁layiq",-13.517508506774902],["moqchi",-13.517511367797852],["ေတာင္း",-13.517518043518066],["oikeus",-13.517523765563965],["▁نومبر",-13.51752471923828],["▁করলে",-13.517548561096191],["ברו",-13.517562866210938],["▁tratado",-13.51756763458252],["מפ",-13.517572402954102],["完美的",-13.51758098602295],["▁नंतर",-13.517581939697266],["▁නොද",-13.51758861541748],["7.000",-13.517592430114746],["▁طو",-13.51759433746338],["કેટ",-13.517611503601074],["▁Popis",-13.517614364624023],["ckého",-13.517616271972656],["▁motyw",-13.517621994018556],["▁həkim",-13.517624855041504],["JT",-13.517653465270996],["▁विजयी",-13.517662048339844],["新人",-13.517662048339844],["foi",-13.517675399780272],["▁muszą",-13.51770305633545],["▁начини",-13.517708778381348],["ধি",-13.51771068572998],["▁Мама",-13.51771354675293],["▁جراحی",-13.517715454101562],["十九",-13.51772117614746],["▁bolest",-13.517725944519045],["▁yapmış",-13.51773166656494],["▁ikä",-13.51775074005127],["▁lähti",-13.5177640914917],["▁pilna",-13.517767906188965],["વળ",-13.51776885986328],["▁permettent",-13.517779350280762],["pleje",-13.51778507232666],["▁Bibi",-13.51779556274414],["raud",-13.517817497253418],["权力",-13.517817497253418],["fón",-13.517820358276367],["ερα",-13.51782512664795],["▁මැතිවරණ",-13.517831802368164],["tették",-13.51783561706543],["만한",-13.517839431762695],["▁egyház",-13.517844200134276],["ადმი",-13.517849922180176],["positie",-13.517901420593262],["▁Parce",-13.517914772033691],["▁కేంద్రం",-13.517937660217283],["▁материала",-13.517959594726562],["ündig",-13.517966270446776],["▁Horizon",-13.517973899841309],["▁ਆਖ",-13.517973899841309],["▁tutup",-13.517988204956056],["Forum",-13.517993927001951],["▁차량",-13.517998695373535],["৪০",-13.518001556396484],["ثم",-13.518021583557127],["▁грант",-13.518025398254396],["▁выраб",-13.51803493499756],["▁matrimoni",-13.518054962158203],["ධාර",-13.51806640625],["Sky",-13.51807689666748],["▁ಬಳಸಿ",-13.518094062805176],["▁Eger",-13.518107414245604],["▁අන්",-13.5181245803833],["ด้วยความ",-13.51812744140625],["▁ხალ",-13.518132209777832],["ХҮ",-13.518141746520996],["▁tulle",-13.518168449401855],["体の",-13.518183708190918],["▁redaktita",-13.518192291259766],["tywny",-13.518206596374512],["▁kallas",-13.518220901489258],["robot",-13.51822280883789],["▁oddział",-13.518223762512209],["▁movement",-13.51824188232422],["хоо",-13.518258094787598],["▁dienų",-13.518291473388672],["▁배송",-13.518316268920898],["▁totally",-13.51832103729248],["▁وست",-13.518360137939451],["▁كلام",-13.5183687210083],["同行",-13.518373489379885],["▁velikost",-13.518387794494627],["tingimuste",-13.518404006958008],["prowadził",-13.518433570861816],["овала",-13.518445014953612],["▁savait",-13.518479347229004],["برز",-13.518484115600586],["▁supr",-13.518492698669434],["▁Башка",-13.518497467041016],["对中国",-13.518497467041016],["eftirlit",-13.51850128173828],["▁localizado",-13.518524169921877],["▁վաճառ",-13.518540382385254],["mpet",-13.518550872802734],["のための",-13.518560409545898],["▁заряд",-13.518566131591797],["▁impresi",-13.518588066101074],["лище",-13.518594741821287],["▁Цаг",-13.51860523223877],["נצי",-13.51860809326172],["▁Soal",-13.518610000610352],["ARU",-13.518616676330566],["teessa",-13.518625259399414],["8,000",-13.518656730651855],["这项",-13.518668174743652],["توان",-13.518692016601562],["▁טבעי",-13.518692016601562],["▁takkan",-13.51870346069336],["ണാ",-13.518715858459473],["ैर",-13.518721580505373],["ynaya",-13.518731117248535],["▁ипак",-13.518739700317385],["▁అర్థం",-13.518743515014648],["▁põhja",-13.51874542236328],["θαν",-13.518753051757812],["udvalget",-13.518760681152344],["▁ביו",-13.518760681152344],["▁Insa",-13.518790245056152],["▁educativo",-13.518797874450684],["▁dotaz",-13.518800735473633],["אנ",-13.518875122070312],["ionem",-13.518884658813477],["▁211",-13.518884658813477],["▁especializado",-13.518890380859377],["▁կիս",-13.518891334533691],["▁delito",-13.518893241882324],["▁ماد",-13.518903732299805],["ුවක්",-13.518906593322754],["ں۔",-13.518932342529297],["▁XS",-13.51893711090088],["▁mynda",-13.518940925598145],["जाल",-13.518942832946776],["▁নামে",-13.518991470336914],["▁детска",-13.519027709960938],["godi",-13.519046783447266],["ਬਲ",-13.51905918121338],["这家",-13.51906967163086],["のではないか",-13.51907730102539],["▁prospekt",-13.519100189208984],["▁played",-13.519124984741213],["▁traumat",-13.519185066223145],["kupi",-13.519186973571776],["▁512",-13.519206047058104],["▁Ποι",-13.519232749938965],["ارک",-13.51923656463623],["▁järgmise",-13.51924991607666],["吞",-13.51925563812256],["甚麼",-13.519259452819824],["參觀",-13.519265174865724],["▁термо",-13.519269943237305],["▁тұлғалар",-13.519269943237305],["▁ಹಬ್ಬ",-13.51927661895752],["螺",-13.51927661895752],["威而鋼",-13.519277572631836],["肌肉",-13.519280433654783],["ປະກອບ",-13.51928424835205],["▁Tiranës",-13.519286155700684],["¦",-13.519288063049316],["▁Kesihatan",-13.519288063049316],["▁کړکۍ",-13.519288063049316],["▁ওয়েবসাইট",-13.519288063049316],["▁ఏడాది",-13.519288063049316],["▁പ്രസിഡന്റ്",-13.519288063049316],["탑",-13.519288063049316],["קבוצות",-13.519289016723633],["กระจก",-13.519289016723633],["▁առաջնորդ",-13.519289016723633],["▁වගකීම",-13.519289016723633],["▁دہلی",-13.51928997039795],["รัตน์",-13.519290924072266],["▁допомогу",-13.519290924072266],["▁sytuacja",-13.519291877746582],["MAZ",-13.519294738769531],["▁szerencs",-13.519294738769531],["▁اغلب",-13.519294738769531],["▁સુરત",-13.519296646118164],["▁මානව",-13.51930332183838],["▁настолько",-13.519304275512695],["▁મોકલ",-13.519307136535645],["ຈັດຕັ້ງ",-13.519309043884276],["▁kādā",-13.519311904907228],["▁posledic",-13.519315719604492],["▁قاي",-13.519315719604492],["▁규정",-13.51931858062744],["▁responsabilità",-13.519319534301758],["▁mozgás",-13.519322395324709],["▁Außen",-13.519326210021973],["гэж",-13.519329071044922],["extra",-13.519330978393556],["ರೆಯ",-13.519351959228516],["▁thổ",-13.519356727600098],["的文章",-13.51935863494873],["okok",-13.519360542297363],["▁hija",-13.519362449645996],["▁Nüüd",-13.519381523132324],["▁bingung",-13.519391059875488],["▁etmişdir",-13.51939296722412],["ຕອບ",-13.519402503967283],["▁širi",-13.519402503967283],["▁الخارج",-13.519408226013184],["▁tykkää",-13.519418716430664],["▁sugu",-13.519423484802246],["▁compatibil",-13.519427299499512],["▁slegs",-13.519428253173828],["▁pievieno",-13.51943588256836],["▁대하여",-13.519448280334473],["കുമാര",-13.519451141357422],["▁структури",-13.519455909729004],["▁оставя",-13.519458770751951],["▁Xây",-13.519466400146484],["▁hiervoor",-13.519466400146484],["▁ליצור",-13.519475936889648],["ਜਿ",-13.519478797912598],["▁Захар",-13.519478797912598],["▁Munduko",-13.519489288330078],["▁presun",-13.519493103027344],["▁tjedan",-13.51950740814209],["▁Stel",-13.519509315490724],["▁tudás",-13.519546508789062],["▁costas",-13.519551277160645],["▁jurist",-13.519551277160645],["ською",-13.519556045532228],["▁روزانه",-13.51956272125244],["▁promociona",-13.51956844329834],["▁personliga",-13.519569396972656],["ルー",-13.519571304321287],["▁Viel",-13.519596099853516],["▁ketidak",-13.519596099853516],["ilmektedir",-13.519598960876465],["▁entri",-13.51962947845459],["▁автори",-13.519655227661133],["▁qind",-13.519683837890623],["▁obiect",-13.519705772399902],["▁musuh",-13.51973819732666],["▁yhä",-13.519753456115724],["▁аскер",-13.519757270812988],["▁юрист",-13.519757270812988],["เรามี",-13.519771575927734],["рих",-13.519798278808594],["yahay",-13.51980209350586],["การค้า",-13.519805908203123],["▁berguna",-13.51980972290039],["▁gawa",-13.519814491271973],["07)",-13.519821166992188],["▁Колко",-13.5198392868042],["աճ",-13.51984977722168],["준다",-13.519855499267578],["▁hoef",-13.519857406616213],["事は",-13.519862174987791],["들도",-13.51988410949707],["▁pust",-13.51988697052002],["▁1.2.",-13.519908905029297],["相似",-13.51992416381836],["▁požar",-13.519943237304688],["▁berde",-13.5199556350708],["കാരി",-13.519960403442385],["▁الجد",-13.519974708557127],["▁arbetet",-13.519976615905762],["▁Comunidade",-13.52003288269043],["▁хэвлэл",-13.520065307617188],["▁jedné",-13.52007007598877],["▁අනිත්",-13.520086288452148],["▁평화",-13.520094871520996],["▁direktori",-13.520112037658691],["ярод",-13.52012538909912],["▁leger",-13.52014446258545],["▁Woman",-13.52016258239746],["лёт",-13.520167350769045],["tagande",-13.52017307281494],["▁Boys",-13.52017307281494],["元気",-13.520179748535156],["▁kapasite",-13.520186424255373],["▁සර්",-13.520203590393066],["▁ค่า",-13.520212173461914],["▁cierta",-13.520215034484863],["▁გავი",-13.52022933959961],["▁తప్పు",-13.52026081085205],["dulla",-13.520281791687012],["▁Religi",-13.52028751373291],["îtî",-13.520291328430176],["וור",-13.52030086517334],["саг",-13.520310401916504],["ాక",-13.520318984985352],["▁kontant",-13.520320892333984],["▁rooms",-13.52035427093506],["▁Jule",-13.520363807678224],["▁پزشک",-13.52037239074707],["ზმ",-13.520401000976562],["▁выполня",-13.520407676696776],["▁объектив",-13.520410537719728],["地理",-13.520416259765623],["▁gösteren",-13.520431518554688],["▁olvasó",-13.52043914794922],["▁tendenci",-13.520442008972168],["ملك",-13.52046012878418],["▁прычын",-13.520464897155762],["иро",-13.52049446105957],["▁grož",-13.520508766174316],["өлө",-13.520520210266112],["▁mengisi",-13.52052116394043],["▁jelentés",-13.520525932312012],["▁simpli",-13.520563125610352],["njih",-13.520575523376465],["พบว่า",-13.520577430725098],["美丽",-13.52059555053711],["एसपी",-13.520618438720703],["▁soddisfa",-13.520621299743652],["▁բնակ",-13.520622253417969],["▁darbību",-13.520631790161133],["▁Llei",-13.520641326904297],["▁militares",-13.520647048950195],["utente",-13.520648956298828],["▁જોઈ",-13.52065658569336],["▁სამინისტროს",-13.52065658569336],["avaju",-13.520665168762209],["kysely",-13.520672798156738],["מם",-13.520672798156738],["▁Уже",-13.520674705505373],["▁моля",-13.5206880569458],["▁prévu",-13.520713806152344],["▁sisältä",-13.520756721496582],["юз",-13.52076244354248],["ласка",-13.52077579498291],["dóttir",-13.520794868469238],["▁pots",-13.520800590515137],["86)",-13.520819664001465],["▁металл",-13.520832061767578],["waga",-13.52084255218506],["▁बहन",-13.520869255065918],["տոն",-13.520898818969728],["▁uzrok",-13.520938873291016],["ということは",-13.520941734313965],["▁Giro",-13.520973205566406],["▁Crime",-13.520980834960938],["kės",-13.520990371704102],["▁titulu",-13.52099323272705],["barat",-13.521010398864746],["ോട്ട്",-13.52101707458496],["▁પરિ",-13.521018028259276],["-55",-13.52103042602539],["▁hrein",-13.521042823791504],["프라",-13.52106761932373],["วิว",-13.521081924438477],["▁BK",-13.521085739135742],["▁bhai",-13.52108669281006],["ísima",-13.521092414855955],["▁Julio",-13.52110767364502],["伍",-13.521110534667969],["پاکستان",-13.521111488342283],["拟",-13.521132469177246],["▁Tết",-13.521162986755373],["洽",-13.52116584777832],["廟",-13.52117156982422],["▁አፍ",-13.521174430847168],["▁Vikidatumoj",-13.5211820602417],["pĺňa",-13.521185874938965],["▁gyventojų",-13.521185874938965],["▁menyertai",-13.521185874938965],["▁δεδομένα",-13.521185874938965],["▁увеличава",-13.521185874938965],["▁шоколад",-13.521185874938965],["▁مشكلة",-13.521185874938965],["▁हिन्दू",-13.521185874938965],["▁তারিখ",-13.521185874938965],["꺼",-13.521185874938965],["רשטאג",-13.52118682861328],["▁વરસાદ",-13.52118682861328],["▁අභි",-13.52118682861328],["▁bdsm",-13.521187782287598],["▁tədris",-13.521187782287598],["▁தெரியுமா",-13.521187782287598],["teľov",-13.52118968963623],["▁তাঁর",-13.52119255065918],["▁Uygulama",-13.521194458007812],["▁950",-13.521195411682127],["▁यसअघि",-13.521195411682127],["ပြုလုပ်",-13.521196365356444],["▁gamutin",-13.521201133728027],["▁लोकसभा",-13.521201133728027],["▁ditambah",-13.521203994750977],["▁Gerät",-13.52121353149414],["▁орнууд",-13.521221160888672],["تجاوز",-13.521225929260254],["▁ന്യൂ",-13.521225929260254],["▁Témata",-13.521227836608888],["▁محنت",-13.521227836608888],["▁vervolgens",-13.521228790283203],["▁përdorur",-13.52123737335205],["▁Менску",-13.52127170562744],["▁badań",-13.52127456665039],["വിൽ",-13.521281242370604],["▁ভারত",-13.521297454833984],["▁Güney",-13.521302223205566],["溫度",-13.521306037902832],["▁корот",-13.521309852600098],["▁304",-13.521324157714844],["▁klop",-13.521326065063477],["▁Hitz",-13.52134609222412],["hlad",-13.521349906921388],["pusti",-13.52135181427002],["▁Caixa",-13.521354675292969],["▁fiel",-13.521360397338867],["▁pozná",-13.521374702453612],["▁sahiptir",-13.521376609802246],["▁قانوني",-13.52137851715088],["uše",-13.521380424499512],["▁Table",-13.521392822265623],["▁Beim",-13.521402359008787],["滿意",-13.521403312683104],["▁седмици",-13.52141571044922],["▁descoperi",-13.52143669128418],["▁esperanto",-13.521439552307127],["▁lewens",-13.52144718170166],["▁реклами",-13.52147388458252],["▁tagged",-13.521478652954102],["▁расходы",-13.52148151397705],["▁Vatan",-13.521489143371582],["▁mouth",-13.52150058746338],["обо",-13.52151107788086],["▁länkar",-13.521520614624023],["▁rasmiy",-13.521520614624023],["受付",-13.521529197692873],["▁maoni",-13.521531105041504],["▁flight",-13.521533012390137],["▁okrog",-13.521536827087402],["▁приватни",-13.521539688110352],["▁Surf",-13.521565437316896],["غیر",-13.52157211303711],["▁(38)",-13.521591186523438],["Sil",-13.521598815917969],["นวด",-13.521612167358398],["▁ufor",-13.521641731262209],["ຄັ້ງທີ",-13.521647453308104],["ട്ടോ",-13.521656036376951],["▁väčš",-13.521660804748535],["рыў",-13.521665573120115],["▁보험",-13.521684646606444],["ફા",-13.521709442138672],["▁სამყარო",-13.521726608276367],["▁ноги",-13.521728515625],["້ມ",-13.521737098693848],["puma",-13.52173900604248],["▁نشانہ",-13.52174186706543],["हिं",-13.521743774414062],["▁vrut",-13.521748542785645],["▁Kimi",-13.521750450134276],["▁valóság",-13.521760940551758],["▁శాతం",-13.521763801574709],["فاصل",-13.521772384643556],["▁asju",-13.521787643432615],["access",-13.521827697753906],["▁мужчын",-13.521860122680664],["▁клип",-13.521867752075195],["▁רוח",-13.521873474121094],["▁Предлага",-13.52190399169922],["ർന്ന",-13.5219087600708],["коло",-13.521918296813965],["▁urat",-13.521921157836914],["▁Žu",-13.521944999694824],["▁sombra",-13.521961212158203],["▁prišli",-13.521990776062012],["aysaa",-13.521992683410645],["▁Join",-13.522005081176758],["▁obtain",-13.522005081176758],["מפגש",-13.522006034851074],["▁notað",-13.522010803222656],["▁நிலைய",-13.522012710571287],["▁දක්",-13.522015571594238],["▁nød",-13.522021293640137],["▁Європейськ",-13.522027015686035],["的神",-13.5220308303833],["▁кампания",-13.52204418182373],["▁síce",-13.522049903869627],["观看",-13.522049903869627],["▁ಹುಟ್ಟ",-13.522068977355955],["▁Кел",-13.522083282470703],["▁հաշվի",-13.522085189819336],["▁distribution",-13.522087097167969],["▁amico",-13.522089958190918],["時点で",-13.522089958190918],["▁Gaan",-13.522095680236816],["지가",-13.522117614746094],["西藏",-13.522127151489258],["▁Goran",-13.52214241027832],["▁кажем",-13.522144317626951],["ंडे",-13.522164344787598],["VES",-13.522173881530762],["ணு",-13.522174835205078],["▁locum",-13.522174835205078],["မတ္",-13.522178649902344],["▁madje",-13.522178649902344],["ქვეშ",-13.52219295501709],["▁Справ",-13.52221965789795],["▁látky",-13.522241592407228],["▁געווי",-13.522266387939451],["▁۲۱",-13.5222749710083],["▁Хү",-13.522279739379885],["▁Таны",-13.522286415100098],["▁БАЙ",-13.522294998168944],["ଦେବ",-13.522323608398438],["▁spoke",-13.522333145141602],["▁commencer",-13.522340774536133],["▁masturba",-13.522346496582031],["ถา",-13.522361755371094],["▁dron",-13.52236557006836],["▁facit",-13.522383689880373],["▁ئورنى",-13.522396087646484],["▁taron",-13.522401809692385],["eczki",-13.522427558898926],["▁prolaz",-13.522464752197266],["glad",-13.522468566894531],["▁операцій",-13.522505760192873],["▁обществе",-13.522513389587402],["poñen",-13.52251434326172],["▁biliyor",-13.522520065307615],["ĖS",-13.522522926330566],["▁katıldı",-13.522537231445312],["1950",-13.522541046142578],["▁Organizator",-13.522541999816896],["▁მედია",-13.522552490234377],["ෙනවා",-13.522563934326172],["▁ჩაი",-13.522563934326172],["▁xaq",-13.522586822509766],["VENT",-13.522592544555664],["▁веществ",-13.522592544555664],["хин",-13.522600173950195],["▁gündem",-13.522604942321776],["ളു",-13.522605895996094],["▁palavra",-13.522618293762209],["▁bildene",-13.522656440734863],["▁საერთოდ",-13.522668838500977],["ஸி",-13.522676467895508],["▁وفد",-13.522680282592772],["▁Ամեն",-13.522700309753418],["▁متو",-13.522724151611328],["▁kelių",-13.522738456726074],["▁원하는",-13.522747039794922],["▁toţi",-13.522770881652832],["▁ენა",-13.522783279418944],["անգլ",-13.522820472717283],["ಲಿದ್ದಾರೆ",-13.522829055786133],["▁Bear",-13.52284812927246],["活动的",-13.522852897644045],["ግጥ",-13.522891998291016],["▁güneş",-13.522900581359863],["下列",-13.522904396057127],["▁کاش",-13.522905349731444],["▁powerful",-13.522933959960938],["好看",-13.522954940795898],["▁Abel",-13.523004531860352],["സ്ഥിതി",-13.523005485534668],["舟",-13.523009300231934],["▁εκδ",-13.523012161254885],["辉",-13.523021697998049],["回忆",-13.523030281066896],["会出现",-13.523042678833008],["糊",-13.523058891296388],["ுதல்",-13.523061752319336],["俄羅斯",-13.523064613342283],["bedrijf",-13.523070335388184],["▁ವಿಜಯ",-13.52307415008545],["izācija",-13.523076057434082],["83)",-13.52308750152588],["▁Shakespeare",-13.52308750152588],["▁համայնքի",-13.52308750152588],["▁پهريون",-13.52308750152588],["▁Napoleon",-13.523088455200195],["▁पराजित",-13.523088455200195],["▁ചോദിക്ക",-13.523088455200195],["▁xưa",-13.523089408874512],["参照",-13.523089408874512],["▁Perempuan",-13.523091316223145],["▁цэвэр",-13.523091316223145],["▁ఇప్పటికే",-13.523093223571776],["▁επίθεση",-13.52309513092041],["▁vajzë",-13.523098945617676],["▁उपाध्यक्ष",-13.523098945617676],["Қазақ",-13.523100852966309],["▁ਬਾਬਾ",-13.52310562133789],["▁ruž",-13.523107528686523],["とともに",-13.523107528686523],["DUR",-13.52310848236084],["▁노래",-13.523115158081056],["▁섹스",-13.523119926452637],["▁फी",-13.523120880126951],["▁നേട",-13.523123741149902],["▁шаарынын",-13.523131370544434],["▁വരുത്ത",-13.5231351852417],["▁Somos",-13.523138046264648],["moral",-13.523141860961914],["يئر",-13.52314281463623],["▁Gebruik",-13.523143768310549],["収入",-13.523149490356444],["ғым",-13.523153305053713],["▁előző",-13.523155212402344],["▁උනත්",-13.52315616607666],["ทะ",-13.523174285888672],["▁Naţional",-13.523178100585938],["▁tilby",-13.523187637329102],["▁Monument",-13.52319049835205],["▁인생",-13.52319049835205],["▁ګډ",-13.523191452026367],["▁යළි",-13.52319622039795],["levering",-13.523197174072266],["▁виховання",-13.523200035095217],["▁המשך",-13.523228645324709],["▁Complex",-13.523241996765137],["▁مرجع",-13.523249626159668],["waj",-13.5232515335083],["▁prol",-13.52325439453125],["▁הברית",-13.523262977600098],["▁termic",-13.52326488494873],["គ្រឿង",-13.52326774597168],["▁tænker",-13.523310661315918],["▁angajat",-13.523314476013184],["주소",-13.523317337036133],["prenant",-13.523319244384766],["▁гарадск",-13.52332878112793],["▁desses",-13.523340225219728],["▁Neidio",-13.523343086242676],["▁Vertrag",-13.52334976196289],["pice",-13.523350715637209],["äkin",-13.523371696472168],["▁የኢ",-13.52337646484375],["▁promotor",-13.523383140563965],["▁😞",-13.523385047912598],["мәдени",-13.523406982421877],["oleku",-13.523423194885254],["▁hlavy",-13.523428916931152],["ਖਿਆ",-13.52344036102295],["ליל",-13.52345371246338],["▁Şey",-13.523455619812012],["局面",-13.52347183227539],["りする",-13.523472785949709],["YAR",-13.523478507995604],["▁savdo",-13.523481369018556],["▁khoán",-13.52349090576172],["▁etmesi",-13.523496627807615],["नका",-13.523500442504885],["▁vhod",-13.52350616455078],["▁Bạch",-13.52350902557373],["előtt",-13.523519515991213],["ක්ම",-13.523541450500488],["▁nötig",-13.52355670928955],["adapt",-13.523566246032717],["▁dále",-13.52358341217041],["▁spes",-13.523588180541992],["的样子",-13.52359104156494],["նայ",-13.523612022399902],["grim",-13.523615837097168],["▁Soru",-13.523615837097168],["▁generos",-13.52362060546875],["▁એસ",-13.523635864257812],["▁Regularo",-13.52365016937256],["▁Dzīv",-13.523652076721191],["▁হক",-13.523662567138672],["▁mî",-13.523689270019531],["▁NEM",-13.52369499206543],["ameti",-13.523703575134276],["▁Tex",-13.523709297180176],["Hol",-13.523723602294922],["387",-13.523763656616213],["▁špič",-13.523776054382324],["据悉",-13.523777961730955],["▁laku",-13.523834228515623],["▁treffe",-13.52383804321289],["▁ರಾಷ್ಟ್ರ",-13.52383804321289],["▁Սիր",-13.523842811584473],["▁filosof",-13.5238676071167],["▁желае",-13.523887634277344],["חייב",-13.523892402648926],["▁Diva",-13.523907661437988],["збе",-13.52392864227295],["පල",-13.523953437805176],["▁hîn",-13.523953437805176],["020",-13.523959159851074],["▁ගල්",-13.523963928222656],["▁CMS",-13.523974418640137],["▁හැද",-13.524001121520996],["ISHA",-13.524008750915527],["बेला",-13.524024963378906],["▁ໂອ",-13.524048805236816],["▁representant",-13.524053573608398],["מנכ",-13.524066925048828],["▁guruh",-13.524072647094728],["▁bizonyít",-13.524077415466309],["halo",-13.524107933044434],["जना",-13.524118423461914],["▁ມື",-13.524123191833496],["▁vyhra",-13.524147033691406],["▁대상으로",-13.524158477783203],["ájában",-13.524163246154783],["止め",-13.524163246154783],["ባዊ",-13.52416706085205],["folie",-13.524175643920898],["しまいます",-13.52419662475586],["лз",-13.524205207824709],["AÇÃO",-13.524211883544922],["信号",-13.524214744567873],["anayo",-13.524242401123049],["essentiel",-13.52425479888916],["▁asking",-13.524263381958008],["▁نیٹ",-13.524264335632324],["▁shtyp",-13.524295806884766],["▁গ্রহণ",-13.524330139160156],["▁ଘଣ୍ଟା",-13.524348258972168],["▁kabeh",-13.524351119995115],["▁tingut",-13.524368286132812],["jevo",-13.524371147155762],["فروش",-13.524384498596191],["ពិ",-13.524388313293455],["▁adolescentes",-13.524391174316406],["▁tätig",-13.524431228637695],["▁түүнийг",-13.52443504333496],["pande",-13.524436950683594],["ರ್ಯ",-13.524443626403809],["▁Compar",-13.524455070495604],["▁olmaya",-13.524455070495604],["▁ideea",-13.524459838867188],["मिट",-13.524487495422363],["ାପ",-13.524555206298828],["വഴി",-13.524558067321776],["በታ",-13.52457332611084],["jine",-13.524575233459473],["яли",-13.524591445922852],["nijih",-13.5245943069458],["▁rastin",-13.524638175964355],["▁כוח",-13.524652481079102],["▁בתו",-13.524664878845217],["ភាគ",-13.524677276611328],["plassen",-13.52468204498291],["▁Imi",-13.524688720703123],["్రా",-13.524696350097656],["▁eure",-13.524713516235352],["的三",-13.524715423583984],["▁абарон",-13.524730682373049],["শ্ব",-13.52473258972168],["▁другому",-13.524737358093262],["▁Kommer",-13.52475357055664],["ಕಿಂಗ್",-13.524754524230955],["▁ቢሆንም",-13.52475643157959],["▁vitet",-13.52476692199707],["▁domhan",-13.524774551391602],["▁nefnd",-13.524781227111816],["ovci",-13.524806022644045],["▁Spletn",-13.524815559387209],["▁métodos",-13.524829864501951],["▁økonomiske",-13.5248384475708],["▁табе",-13.524840354919434],["яни",-13.52485466003418],["ীত",-13.524868965148926],["▁Tarihi",-13.524869918823242],["▁lepsze",-13.524871826171877],["▁داغ",-13.524874687194824],["১৬",-13.52487850189209],["▁بگیر",-13.524883270263672],["ર્ન",-13.52488899230957],["▁მზად",-13.524901390075684],["▁musikal",-13.524911880493164],["▁Ajuntament",-13.524914741516112],["捧",-13.524935722351074],["▁boldog",-13.524941444396973],["▁मात्रा",-13.524941444396973],["▁Κύπρο",-13.524946212768556],["▁ලක්ෂ",-13.5249605178833],["▁pozostał",-13.52497386932373],["ほぼ",-13.524982452392578],["স্প",-13.524984359741213],["bøl",-13.52498722076416],["▁власть",-13.52498722076416],["ráðuneyti",-13.524991035461426],["▁Llobregat",-13.524991989135742],["▁Taarifa",-13.524991989135742],["▁leedahay",-13.524991989135742],["▁mempertahankan",-13.524991989135742],["▁nhấn",-13.524991989135742],["▁sredstev",-13.524991989135742],["▁Πέμπτη",-13.524991989135742],["▁ветеринар",-13.524991989135742],["▁ініціатив",-13.524991989135742],["▁կուսակցության",-13.524991989135742],["▁مسابقه",-13.524991989135742],["▁ସାଧାରଣ",-13.524991989135742],["▁മുതൽ",-13.524991989135742],["▁maelezo",-13.52499294281006],["▁szenved",-13.52499294281006],["▁красавіка",-13.524993896484377],["▁Diputació",-13.524994850158691],["▁האירוע",-13.524995803833008],["▁pământ",-13.524996757507324],["▁Forskning",-13.52499771118164],["▁žádný",-13.52499771118164],["▁Apesar",-13.524999618530272],["▁શાળા",-13.524999618530272],["▁వ్యాపార",-13.525001525878906],["▁२०१६",-13.525002479553224],["Microsoft",-13.525004386901855],["▁Yardım",-13.525005340576172],["▁заключается",-13.525012016296388],["▁pronaći",-13.525014877319336],["▁traballadores",-13.525014877319336],["▁വിദേശ",-13.525017738342283],["▁اکرم",-13.525018692016602],["▁Министарства",-13.525020599365234],["ຟຣີ",-13.5250244140625],["čítať",-13.52504539489746],["▁аудан",-13.525050163269045],["messu",-13.525053977966309],["methode",-13.52505588531494],["▁وانگر",-13.525056838989258],["மது",-13.525057792663574],["▁Daawo",-13.525065422058104],["▁otomobil",-13.525076866149902],["фабрик",-13.525096893310549],["▁першого",-13.525104522705078],["sikia",-13.525106430053713],["lektion",-13.525118827819824],["▁recibe",-13.525124549865724],["▁بحسب",-13.525136947631836],["▁gobol",-13.525139808654783],["▁پہلا",-13.525150299072266],["▁uiteindelijk",-13.525153160095217],["▁tayyorlash",-13.525154113769531],["▁platser",-13.525155067443848],["▁медициналық",-13.525158882141112],["進口",-13.525158882141112],["▁Jimmy",-13.525166511535645],["▁මුළු",-13.525168418884276],["▁Investi",-13.525175094604492],["▁kumma",-13.525178909301758],["▁беларускі",-13.525200843811035],["▁uzaq",-13.52521514892578],["აჩ",-13.525221824645996],["▁võta",-13.525224685668944],["ահայ",-13.525240898132324],["▁매일",-13.525243759155272],["▁Statut",-13.525291442871094],["▁geluk",-13.525291442871094],["▁Teu",-13.52529239654541],["▁fauna",-13.525299072265623],["ಖಾ",-13.525311470031738],["▁segundos",-13.525325775146484],["▁HOT",-13.525338172912598],["▁Paşa",-13.52534008026123],["▁disposto",-13.525343894958496],["успешни",-13.525345802307127],["kutan",-13.525354385375977],["TG",-13.52536678314209],["Mai",-13.525367736816406],["▁کوریا",-13.525383949279783],["▁Psy",-13.52538776397705],["▁rekre",-13.52540397644043],["▁ግንቦት",-13.525404930114746],["▁bâ",-13.525416374206545],["▁សាលា",-13.52542495727539],["▁tydens",-13.525425910949709],["енную",-13.52544116973877],["ouche",-13.525447845458984],["▁intervention",-13.525483131408691],["ZIA",-13.525507926940918],["היה",-13.525519371032717],["ተረ",-13.52556037902832],["၁၆",-13.525569915771484],["▁porro",-13.525590896606444],["▁와",-13.525627136230469],["▁kartais",-13.525629043579102],["ებსა",-13.525632858276367],["wide",-13.525633811950684],["▁행정",-13.525640487670898],["▁رە",-13.52566909790039],["▁үзэх",-13.525675773620604],["▁الفن",-13.525683403015137],["▁마케팅",-13.525683403015137],["▁Фін",-13.525714874267578],["▁kohës",-13.525715827941896],["ていました",-13.525717735290527],["▁గంటల",-13.525745391845703],["▁servidor",-13.525752067565918],["Bor",-13.525760650634766],["▁emosi",-13.525760650634766],["▁problēmas",-13.525797843933104],["▁Kav",-13.52583122253418],["▁глаз",-13.525837898254396],["موقع",-13.525839805603027],["▁mux",-13.525845527648926],["▁родитељ",-13.525845527648926],["▁Mercado",-13.525848388671877],["▁ehtiyac",-13.525863647460938],["▁deinen",-13.525867462158203],["▁chào",-13.525890350341797],["博客",-13.525906562805176],["▁likt",-13.525908470153809],["▁γενν",-13.525908470153809],["▁mikil",-13.525914192199709],["माल",-13.525922775268556],["āns",-13.525923728942873],["ଗଲେ",-13.525928497314451],["▁Farg",-13.525930404663086],["▁müharibə",-13.52593231201172],["▁hybrid",-13.52597427368164],["алося",-13.52597713470459],["▁fawr",-13.525978088378906],["▁jutro",-13.525997161865234],["▁favorece",-13.52599811553955],["▁cantidade",-13.526007652282717],["▁rasite",-13.52601718902588],["technisch",-13.526020050048828],["hiq",-13.52602481842041],["иків",-13.526029586791992],["հատ",-13.526037216186523],["▁Cannes",-13.526040077209473],["▁uskon",-13.526043891906738],["stehende",-13.526058197021484],["▁բարե",-13.526068687438965],["▁tugev",-13.526079177856444],["▁éviter",-13.526089668273926],["▁vredno",-13.52609157562256],["▁ברק",-13.52609157562256],["كوم",-13.526093482971191],["ମର",-13.52609634399414],["▁AIDS",-13.52612018585205],["ացավ",-13.52614688873291],["ลาก",-13.526168823242188],["issimum",-13.526169776916504],["▁وہی",-13.526177406311035],["ಸನ್",-13.526199340820312],["まれ",-13.526208877563477],["▁procure",-13.526239395141602],["jille",-13.526240348815918],["非常好",-13.526269912719728],["化學",-13.526270866394045],["▁Коп",-13.526284217834473],["tettem",-13.526296615600586],["▁גן",-13.526301383972168],["▁ഡെ",-13.526312828063965],["▁ESO",-13.526320457458496],["нуло",-13.52632999420166],["▁potrebujete",-13.526331901550291],["▁багаж",-13.526331901550291],["Cola",-13.526334762573242],["▁Иса",-13.526341438293455],["१८",-13.52635669708252],["ल्प",-13.526369094848633],["▁בספר",-13.526371955871582],["▁старш",-13.526373863220217],["▁evo",-13.526381492614746],["▁qofka",-13.52639389038086],["▁Apartment",-13.526394844055176],["▁privateco",-13.526397705078123],["lén",-13.526402473449709],["στες",-13.52641773223877],["▁brasileiros",-13.526430130004885],["▁193",-13.526453018188477],["▁பாதுகாப்பு",-13.526460647583008],["▁predstavil",-13.526474952697754],["退出",-13.52647590637207],["▁কত",-13.526480674743652],["ştî",-13.52649211883545],["▁kerap",-13.526521682739258],["র্ন",-13.526528358459473],["▁aldeko",-13.52653980255127],["▁Biên",-13.52654266357422],["▁accoglie",-13.526543617248535],["▁בז",-13.526565551757812],["▁tvil",-13.526570320129396],["▁अगस्त",-13.526594161987305],["orio",-13.526611328125],["▁conflit",-13.52663516998291],["▁аутора",-13.526644706726074],["▁byste",-13.52664566040039],["▁asmens",-13.526653289794922],["水分",-13.526655197143556],["rūs",-13.52670669555664],["▁зміст",-13.52672004699707],["ХУ",-13.5267333984375],["ಾಂಗ",-13.5267333984375],["mudi",-13.526735305786133],["▁întreg",-13.526738166809082],["▁паліт",-13.52674388885498],["▁ulic",-13.526753425598145],["즐",-13.526774406433104],["գին",-13.52678108215332],["نار",-13.526782989501951],["▁వీర",-13.526790618896484],["▁entendu",-13.526795387268066],["债",-13.526803016662598],["▁кызык",-13.526820182800291],["▁Bolig",-13.526823043823242],["▁ହିଁ",-13.526829719543455],["▁হওয়া",-13.526835441589355],["匹",-13.526841163635254],["▁kêu",-13.526853561401367],["帶領",-13.52686595916748],["ਤਮ",-13.526866912841797],["火箭",-13.526870727539062],["▁المسؤول",-13.526893615722656],["สืบ",-13.526897430419922],["ستراتيجي",-13.526900291442873],["พรีเมียร์ลีก",-13.526900291442873],["▁dispoñible",-13.526900291442873],["▁múltiples",-13.526900291442873],["▁nộp",-13.526900291442873],["▁pertemuan",-13.526900291442873],["▁társadalom",-13.526900291442873],["▁întâmplă",-13.526900291442873],["▁Міністерства",-13.526900291442873],["▁интервју",-13.526900291442873],["▁следеће",-13.526900291442873],["▁церкви",-13.526900291442873],["▁բազմաթիվ",-13.526900291442873],["▁ուրիշ",-13.526900291442873],["▁بنسبة",-13.526900291442873],["▁आत्महत्या",-13.526900291442873],["▁घायल",-13.526900291442873],["▁नहुने",-13.526900291442873],["▁बिरामी",-13.526900291442873],["▁বিশ্বকাপ",-13.526900291442873],["▁algemeen",-13.526901245117188],["▁bâtiment",-13.526901245117188],["▁cuvânt",-13.526901245117188],["▁සභාපති",-13.526902198791504],["🙏",-13.526904106140137],["▁csillag",-13.52690601348877],["▁μάτια",-13.52690601348877],["▁महोत्सव",-13.52690601348877],["▁هیڅ",-13.526906967163086],["▁ગયું",-13.526906967163086],["▁felice",-13.526907920837402],["▁ներկայացնում",-13.526910781860352],["▁оптуж",-13.526911735534668],["▁진짜",-13.526911735534668],["▁σκληρ",-13.526914596557615],["▁նշված",-13.526917457580566],["▁magazyn",-13.526920318603516],["▁බිහි",-13.526920318603516],["▁Polici",-13.526930809020996],["▁மோடி",-13.526931762695312],["▁መሬት",-13.526936531066896],["▁našla",-13.526941299438477],["▁δείτε",-13.526942253112791],["▁पशु",-13.52694320678711],["hetjük",-13.52694606781006],["▁hinge",-13.526947021484377],["▁اداري",-13.526948928833008],["▁memegang",-13.52695083618164],["▁opravi",-13.526954650878906],["▁ഞാനും",-13.526955604553224],["餐厅",-13.52696132659912],["mån",-13.526965141296388],["▁توفير",-13.526966094970703],["專案",-13.526968002319336],["▁Σα",-13.526973724365234],["▁ସ୍ଥାନରେ",-13.526979446411133],["▁понуда",-13.526986122131348],["▁اوسه",-13.526986122131348],["▁شمیر",-13.526989936828612],["ulet",-13.527003288269045],["▁Energe",-13.527005195617676],["▁aceita",-13.52700901031494],["▁задржана",-13.527012825012209],["▁sær",-13.527017593383787],["▁लाग्छ",-13.527026176452637],["▁proberen",-13.527033805847168],["ympäristö",-13.527042388916016],["אישור",-13.527043342590332],["ుకోండి",-13.527060508728027],["▁تشکر",-13.527084350585938],["▁İşlər",-13.527087211608888],["brott",-13.527092933654783],["▁causes",-13.527118682861328],["▁모습을",-13.527121543884276],["▁طويلة",-13.527125358581545],["▁praticamente",-13.52713680267334],["▁xx",-13.527141571044922],["▁appartamento",-13.527165412902832],["▁Cerc",-13.52717113494873],["▁Δρ",-13.527176856994627],["ологія",-13.527180671691896],["▁trouverez",-13.527181625366213],["▁Приказ",-13.527183532714844],["▁presidi",-13.52719020843506],["▁tegema",-13.527195930480955],["▁ноќ",-13.52721118927002],["izácia",-13.527229309082031],["wka",-13.527230262756348],["▁милион",-13.527264595031738],["▁никого",-13.527283668518066],["▁인식",-13.527292251586914],["үт",-13.52729320526123],["▁vjetër",-13.527351379394531],["▁piese",-13.527365684509276],["▁lebt",-13.527377128601074],["കൂ",-13.527393341064451],["▁impianti",-13.527408599853516],["▁gjer",-13.527436256408691],["▁ଦେଲେ",-13.527442932128906],["שול",-13.52744960784912],["▁Selasa",-13.527461051940918],["、5",-13.52746295928955],["▁inniu",-13.5274658203125],["▁лимон",-13.527487754821776],["ប្រើ",-13.527488708496094],["▁ڈو",-13.52749729156494],["▁وتح",-13.52750015258789],["λόγου",-13.527501106262209],["▁vreemde",-13.527583122253418],["TAM",-13.527585983276367],["▁каком",-13.52760410308838],["▁guda",-13.527606010437012],["▁başlamış",-13.52764892578125],["▁jugu",-13.527653694152832],["ιότητα",-13.527654647827148],["가능",-13.527669906616213],["itinerari",-13.527694702148438],["▁ناو",-13.527703285217283],["▁Vanaf",-13.527714729309082],["歴史",-13.527716636657717],["▁одигра",-13.527722358703612],["Mes",-13.527724266052246],["材質",-13.52775764465332],["▁sugan",-13.527771949768066],["ECE",-13.527775764465332],["реал",-13.527779579162598],["вјет",-13.527799606323242],["▁Pili",-13.527819633483888],["иад",-13.527828216552734],["▁მიზეზი",-13.52783203125],["▁Čern",-13.527851104736328],["▁جزو",-13.527851104736328],["tiamo",-13.527857780456545],["▁cite",-13.52786350250244],["▁ekonomika",-13.52786922454834],["प्पा",-13.527884483337402],["ritur",-13.52790355682373],["增加了",-13.52792263031006],["▁масла",-13.52793025970459],["сущ",-13.527941703796388],["▁qarşısını",-13.527947425842283],["▁Atas",-13.527952194213867],["▁Ilyen",-13.527960777282717],["දැ",-13.527966499328612],["▁latte",-13.527969360351562],["▁clár",-13.527976989746094],["テン",-13.527983665466309],["▁gazdaság",-13.527996063232422],["연구",-13.527998924255373],["滞",-13.52801513671875],["▁хүлээ",-13.528034210205078],["▁bestimmte",-13.528036117553713],["ലില്",-13.52804946899414],["ódott",-13.528072357177734],["▁malaysia",-13.528112411499023],["▁בינ",-13.52812385559082],["▁Igra",-13.528128623962402],["▁Treball",-13.528128623962402],["▁objekta",-13.528145790100098],["▁velikih",-13.52817153930664],["▁ลอง",-13.528176307678224],["ვალი",-13.52818489074707],["▁ઘરે",-13.52818775177002],["ल्यानंतर",-13.528203964233398],["如有",-13.52821445465088],["▁bicikl",-13.528225898742676],["ผู้ใหญ่",-13.528226852416992],["▁fleur",-13.528233528137209],["▁шок",-13.528233528137209],["ሬት",-13.528239250183104],["▁मालिक",-13.52824592590332],["▁siúl",-13.528260231018066],["ことになります",-13.528260231018066],["Cuál",-13.528276443481444],["лагч",-13.528278350830078],["عدة",-13.528307914733888],["▁partem",-13.528311729431152],["шња",-13.528322219848633],["मास",-13.528327941894531],["▁Visita",-13.528359413146973],["▁حاکم",-13.528364181518556],["▁طرق",-13.528368949890137],["观念",-13.528377532958984],["▁reported",-13.528404235839844],["▁varsta",-13.528409004211426],["▁contratos",-13.528436660766602],["▁ulu",-13.528436660766602],["וקים",-13.52843952178955],["▁Коментар",-13.528457641601562],["队的",-13.528457641601562],["pest",-13.528461456298828],["▁Yleis",-13.52847671508789],["▁esot",-13.528499603271484],["▁najlepszy",-13.528499603271484],["▁ڍ",-13.528512954711914],["ոպ",-13.528515815734863],["▁taq",-13.52853012084961],["▁jednoho",-13.528538703918455],["ANCE",-13.528573036193848],["Аз",-13.528587341308594],["pania",-13.528605461120604],["ეპ",-13.528614044189451],["▁ಸ್ಟ",-13.52863311767578],["▁vatandaşlar",-13.528639793395996],["國外",-13.528645515441896],["▁Scu",-13.52864933013916],["գու",-13.528651237487791],["tywne",-13.528663635253906],["▁ေကာင္း",-13.528672218322754],["▁Економ",-13.52867317199707],["▁bendro",-13.52868366241455],["▁международно",-13.528706550598145],["▁ONG",-13.528716087341309],["linie",-13.52873706817627],["▁Lorsqu",-13.528752326965332],["遷",-13.52875804901123],["glejte",-13.52876091003418],["▁Народна",-13.528767585754396],["敦",-13.528773307800291],["▁conosce",-13.528775215148926],["公斤",-13.52879524230957],["▁verificar",-13.528800010681152],["゚",-13.528806686401367],["▁veľko",-13.528807640075684],["ဖြင့်",-13.52880859375],["ກໍານົດ",-13.528810501098633],["ຮ້າຍ",-13.528810501098633],["듀",-13.528810501098633],["ௌ",-13.528812408447266],["▁ocasiones",-13.528812408447266],["▁született",-13.528812408447266],["▁vähintään",-13.528812408447266],["▁регулярно",-13.528812408447266],["▁ইতিহাস",-13.528812408447266],["▁ಹೋರಾಟ",-13.528812408447266],["겁",-13.528812408447266],["▁uongozi",-13.528813362121582],["▁حصول",-13.528813362121582],["▁قاتناش",-13.528813362121582],["ဓာတ်",-13.528815269470217],["▁jedinstven",-13.528815269470217],["▁परमेश्वर",-13.528816223144531],["▁настоящего",-13.528817176818848],["阿里",-13.528817176818848],["▁تقاضا",-13.528818130493164],["▁berbahaya",-13.52881908416748],["▁ପ୍ରଦାନ",-13.528822898864746],["▁Michelle",-13.528827667236328],["▁promjene",-13.528830528259276],["▁Beyond",-13.528833389282228],["▁jälgi",-13.528834342956545],["▁ਅੱਗੇ",-13.528836250305176],["▁අරමුණ",-13.528837203979492],["▁त्यांची",-13.528839111328123],["▁мыкты",-13.52884292602539],["▁айқын",-13.528844833374023],["▁Orbán",-13.528846740722656],["▁dzīvi",-13.528846740722656],["▁mokslo",-13.528846740722656],["▁Reklama",-13.528849601745604],["θλ",-13.528850555419922],["▁реакция",-13.528851509094238],["▁özəl",-13.528854370117188],["vienība",-13.528855323791504],["▁Belarus",-13.528857231140137],["教會",-13.52885913848877],["بعة",-13.528865814208984],["ԻՐ",-13.528870582580566],["▁lovely",-13.528879165649414],["▁strategy",-13.528885841369627],["▁कोणी",-13.528890609741213],["▁কাম",-13.528894424438477],["▁phrase",-13.528912544250488],["▁Magistr",-13.528919219970703],["▁grazas",-13.528923034667969],["sexuell",-13.528926849365234],["▁snage",-13.528929710388184],["▁атрымаць",-13.528934478759766],["▁preț",-13.528943061828612],["▁تعتبر",-13.52896213531494],["▁orkestr",-13.52896785736084],["▁Keluarga",-13.52897834777832],["▁salón",-13.52897834777832],["intä",-13.528980255126951],["μαρτ",-13.528986930847168],["▁πολιτικό",-13.528989791870115],["▁орна",-13.529007911682127],["看似",-13.529012680053713],["▁cuidar",-13.529016494750977],["turnering",-13.529029846191406],["▁გუნდ",-13.52903175354004],["▁coklat",-13.52904224395752],["cicli",-13.529047966003418],["riippu",-13.529064178466797],["ಕೂ",-13.529064178466797],["▁keram",-13.52910041809082],["▁tagjai",-13.52911376953125],["▁øverst",-13.529119491577148],["▁използването",-13.529125213623049],["redskap",-13.529128074645996],["▁ଅପ",-13.529130935668944],["נאָ",-13.52914047241211],["▁dasturi",-13.529150009155272],["▁청년",-13.529162406921388],["myndigheten",-13.529166221618652],["▁রাখ",-13.529167175292969],["▁grek",-13.529170036315918],["▁ഇസ്",-13.529192924499512],["өнө",-13.529261589050291],["▁낮",-13.52926254272461],["んですよ",-13.52926254272461],["▁Сири",-13.529276847839355],["र्या",-13.529278755187988],["▁வாழ",-13.529291152954102],["▁پري",-13.52931308746338],["▁kvalifikacij",-13.52932357788086],["▁Вашето",-13.529324531555176],["▁Mehdi",-13.52934741973877],["skutečn",-13.5293550491333],["▁seguit",-13.529356956481934],["ruoka",-13.5293607711792],["क्षी",-13.529373168945312],["▁агро",-13.529393196105955],["మత",-13.529410362243652],["▁මැද",-13.529410362243652],["ज्य",-13.529414176940918],["▁הלו",-13.529417991638184],["▁остали",-13.529422760009766],["▁sykje",-13.529424667358398],["▁rapat",-13.529425621032717],["nței",-13.529457092285156],["分野",-13.529458999633787],["▁jogurt",-13.529484748840332],["▁ئاتا",-13.529500961303713],["▁እያለ",-13.529502868652344],["вантаження",-13.529544830322266],["▁asztal",-13.529553413391112],["▁лошо",-13.529559135437012],["EMP",-13.529562950134276],["▁сусед",-13.52956771850586],["▁heilsu",-13.529570579528809],["ለመ",-13.529585838317873],["▁редко",-13.52959442138672],["▁Близ",-13.529608726501465],["▁направил",-13.529637336730955],["ėj",-13.529653549194336],["▁Arap",-13.529692649841309],["वेश",-13.52971076965332],["▁Сирия",-13.529749870300291],["어야",-13.529767036437988],["▁друку",-13.52976894378662],["▁redzēt",-13.52978515625],["▁Назва",-13.529802322387695],["▁переклад",-13.52981662750244],["▁šat",-13.529829025268556],["klaring",-13.529858589172363],["▁важко",-13.529889106750488],["turvallisuus",-13.52989387512207],["▁ius",-13.529900550842283],["legger",-13.529903411865234],["уша",-13.529930114746094],["▁situations",-13.529939651489258],["▁mellorar",-13.529973983764648],["සිරි",-13.529994010925291],["durch",-13.530010223388672],["כניס",-13.530019760131836],["Pad",-13.530020713806152],["▁obje",-13.530035018920898],["ท่อ",-13.53005599975586],["▁ansigt",-13.53005599975586],["▁chlo",-13.53006076812744],["我们会",-13.530089378356934],["ијског",-13.530101776123049],["ājas",-13.530120849609377],["▁მოვი",-13.53014087677002],["▁ପ୍ରାୟ",-13.530149459838867],["▁mîna",-13.530157089233398],["telling",-13.530171394348145],["▁buluş",-13.53017520904541],["▁durumunda",-13.530194282531738],["の問題",-13.530196189880373],["▁dřev",-13.530200958251951],["▁послу",-13.5302152633667],["てくれる",-13.53022003173828],["Log",-13.530235290527344],["▁vehículo",-13.530237197875977],["ingizni",-13.530264854431152],["▁රමාණය",-13.530266761779783],["▁television",-13.530268669128418],["лён",-13.53027057647705],["▁vedel",-13.530271530151367],["ឯ",-13.53028392791748],["▁дэх",-13.530287742614746],["▁τύπου",-13.530290603637695],["▁ortalama",-13.530301094055176],["jangan",-13.530303955078123],["▁Torres",-13.530303955078123],["▁maty",-13.530320167541504],["▁sayur",-13.53032112121582],["▁ചോദിച്ചു",-13.53033447265625],["hán",-13.530348777770996],["▁oplever",-13.530354499816896],["▁futbolçu",-13.530367851257324],["▁мереж",-13.530373573303224],["බේ",-13.53040599822998],["овер",-13.530407905578612],["డె",-13.530415534973145],["▁інтер",-13.530427932739258],["▁апошні",-13.530454635620115],["▁nárok",-13.530455589294434],["跟着",-13.530467987060549],["ивно",-13.530484199523926],["▁δισ",-13.53048610687256],["עיני",-13.530489921569824],["èxit",-13.530500411987305],["pravil",-13.53051471710205],["نگر",-13.530525207519531],["द्व",-13.53054141998291],["▁الأخير",-13.530559539794922],["▁pong",-13.530574798583984],["▁apartman",-13.5305757522583],["ksiin",-13.530576705932615],["▁៕",-13.530576705932615],["▁0,6",-13.530583381652832],["▁அறிவிப்பு",-13.530585289001465],["▁kolory",-13.530595779418944],["makers",-13.53060817718506],["▁sister",-13.53061294555664],["的变化",-13.530632019042969],["តូច",-13.530667304992676],["最快",-13.530667304992676],["我们就",-13.530668258666992],["审计",-13.530673027038574],["▁التس",-13.530685424804688],["verkosto",-13.530689239501951],["▁جنگل",-13.530696868896484],["bira",-13.5306978225708],["腾讯",-13.5306978225708],["辺",-13.530701637268066],["президент",-13.530705451965332],["▁өзгөр",-13.530713081359863],["▁ordon",-13.530720710754396],["慈善",-13.530723571777344],["▁Sähkö",-13.53072452545166],["ฝรั่ง",-13.530726432800291],["฿",-13.530728340148926],["▁ietekmē",-13.530728340148926],["▁različitih",-13.530728340148926],["▁مؤتمر",-13.530728340148926],["▁مشہور",-13.530728340148926],["▁ہفتے",-13.530728340148926],["▁ਵਿਭਾਗ",-13.530728340148926],["▁ତେଣୁ",-13.530728340148926],["▁బ్లాగు",-13.530728340148926],["ជំនាញ",-13.530729293823242],["▁spletno",-13.530729293823242],["▁предприятие",-13.530729293823242],["▁kelihatan",-13.530731201171877],["▁záchran",-13.530731201171877],["▁маңызы",-13.530731201171877],["ləmək",-13.530732154846191],["▁പ്രതികരണ",-13.530732154846191],["θούμε",-13.53073501586914],["▁اختلال",-13.53073501586914],["ดอกไม้",-13.530735969543455],["▁وكالة",-13.530736923217772],["▁적극",-13.530736923217772],["▁караганда",-13.53073787689209],["▁հանդիպում",-13.53074073791504],["دانی",-13.530760765075684],["人で",-13.530767440795898],["▁Dziś",-13.530768394470217],["▁Bahçe",-13.53077220916748],["▁באינטרנט",-13.530773162841797],["▁lệnh",-13.530776023864746],["▁Current",-13.530778884887695],["▁жашы",-13.53078269958496],["▁அன்று",-13.53078269958496],["testület",-13.530783653259276],["两岸",-13.530783653259276],["maqla",-13.530794143676758],["ленд",-13.530797004699709],["▁ወጣቶች",-13.530813217163086],["▁kube",-13.530814170837402],["▁टै",-13.53081512451172],["▁להש",-13.530817985534668],["▁આપો",-13.530835151672363],["▁sağlamak",-13.530841827392578],["▁ਉਪ",-13.530861854553224],["ຂຽນ",-13.530868530273438],["▁الصحية",-13.530877113342283],["▁projektoj",-13.530887603759766],["▁securitate",-13.530896186828612],["▁जुलाई",-13.530896186828612],["134",-13.53089714050293],["گذاری",-13.530906677246094],["多名",-13.530924797058104],["▁Поста",-13.530962944030762],["▁etmeye",-13.530975341796877],["▁بالکل",-13.530991554260254],["▁уголовно",-13.53101634979248],["srapport",-13.531020164489746],["▁veden",-13.53104019165039],["▁அப்பா",-13.531041145324709],["▁රශ්නය",-13.531044960021973],["▁рег",-13.531050682067873],["ान्तर",-13.531071662902832],["زمان",-13.531084060668944],["▁жагы",-13.53109359741211],["▁Miksi",-13.531105041503906],["▁innovador",-13.531110763549805],["saṃ",-13.53111743927002],["することを",-13.531118392944336],["TUL",-13.531121253967283],["▁visuel",-13.531123161315918],["võistlus",-13.531133651733398],["▁사용하는",-13.531136512756348],["▁forskel",-13.531139373779297],["▁huga",-13.53116226196289],["качествен",-13.531164169311523],["▁tỉ",-13.531167030334473],["▁بولا",-13.531182289123535],["▁Договор",-13.531222343444824],["చిన",-13.531230926513672],["▁светло",-13.531234741210938],["Вы",-13.53125],["▁непознат",-13.531255722045898],["▁uporabljajo",-13.531271934509276],["ទេស",-13.53127384185791],["पल",-13.531307220458984],["▁편의",-13.531310081481934],["uðum",-13.531320571899414],["▁සිනමා",-13.531322479248049],["▁намерите",-13.531327247619627],["▁custom",-13.531330108642578],["lberg",-13.531373977661133],["បើក",-13.53138542175293],["وصي",-13.531391143798828],["▁kvalitu",-13.531394004821776],["▁atklāj",-13.531401634216309],["▁eleme",-13.53140354156494],["▁ئاش",-13.53140926361084],["▁แม้",-13.531414985656738],["bamba",-13.531420707702637],["▁mệt",-13.53142261505127],["▁учениците",-13.53142547607422],["▁wareeg",-13.531426429748535],["▁imaginar",-13.531429290771484],["polni",-13.531444549560549],["▁analysis",-13.531445503234863],["boken",-13.531450271606444],["ತೆಗೆ",-13.53147315979004],["▁देखील",-13.531474113464355],["▁indicat",-13.531478881835938],["▁contributo",-13.53149700164795],["ڌو",-13.531501770019531],["▁tõsi",-13.531506538391112],["▁vostri",-13.531512260437012],["дува",-13.53151798248291],["▁menambahkan",-13.531523704528809],["زور",-13.53152561187744],["▁species",-13.531582832336426],["인가",-13.531582832336426],["▁ເຂົາ",-13.531593322753906],["หนา",-13.531617164611816],["たいと思います",-13.531628608703612],["▁제한",-13.531638145446776],["БС",-13.53164291381836],["ເດ",-13.531657218933104],["νονται",-13.531661987304688],["hlut",-13.53169059753418],["ශී",-13.531709671020508],["▁šk",-13.531720161437988],["▁Informasjon",-13.531767845153809],["▁Fay",-13.531785011291504],["выкана",-13.53178882598877],["モン",-13.531793594360352],["izace",-13.531818389892578],["▁видів",-13.531830787658691],["ومی",-13.531853675842283],["▁impress",-13.531864166259766],["खिल",-13.531867027282717],["▁verdadeiro",-13.531886100769045],["你不",-13.531890869140623],["ಾಂತ",-13.53189468383789],["ணும்",-13.531963348388672],["ləmə",-13.53199577331543],["दौ",-13.532037734985352],["▁فروخت",-13.532089233398438],["തിനാല്",-13.532106399536133],["පෙන",-13.532130241394045],["▁ଚାଷୀ",-13.53215217590332],["stehen",-13.532180786132812],["▁Spania",-13.53218936920166],["▁gáz",-13.532193183898926],["▁لہ",-13.532207489013672],["مىغان",-13.532217025756836],["▁kors",-13.532235145568848],["ằng",-13.53223705291748],["▁колег",-13.532258987426758],["▁الگ",-13.532273292541504],["▁Макс",-13.53227424621582],["▁llamado",-13.532275199890137],["▁временно",-13.532285690307615],["പേ",-13.53231716156006],["oidh",-13.532320976257324],["ունի",-13.532323837280272],["ឡូ",-13.532342910766602],["roku",-13.532355308532717],["ませ",-13.532357215881348],["punerea",-13.53235912322998],["seitig",-13.532383918762209],["▁единиц",-13.532392501831056],["▁διατ",-13.532401084899902],["ប៉ា",-13.532402992248535],["හම",-13.532403945922852],["▁Slut",-13.532413482666016],["animation",-13.532418251037598],["визија",-13.532426834106444],["ජු",-13.532428741455078],["▁Enna",-13.532444953918455],["▁Johanna",-13.53246021270752],["▁older",-13.532463073730469],["Code",-13.532496452331545],["來了",-13.532503128051758],["owaną",-13.532504081726074],["▁juegos",-13.532505989074709],["หมา",-13.532512664794922],["▁жазу",-13.532512664794922],["wandel",-13.532514572143556],["armen",-13.53251838684082],["▁කැප",-13.532526969909668],["昏",-13.532540321350098],["▁Termo",-13.532554626464844],["▁parco",-13.53256130218506],["ettava",-13.53256893157959],["medicin",-13.53258991241455],["कालीन",-13.53258991241455],["▁pisze",-13.532594680786133],["飼",-13.53260326385498],["▁жыйын",-13.532614707946776],["▁Μπορεί",-13.532621383666992],["豊",-13.532631874084473],["悅",-13.532638549804688],["จุดด่างดํา",-13.532647132873535],["▁Neadaptita",-13.532647132873535],["▁müəllif",-13.532647132873535],["▁neustále",-13.532647132873535],["▁ovšem",-13.532647132873535],["▁paslaugas",-13.532647132873535],["▁söhbət",-13.532647132873535],["▁октобра",-13.532647132873535],["▁септембра",-13.532647132873535],["▁спектакл",-13.532647132873535],["▁توزیع",-13.532647132873535],["▁ویندوز",-13.532647132873535],["ብሔር",-13.532648086547852],["▁kizárólag",-13.532648086547852],["▁ರಿಯಲ್",-13.532648086547852],["ကူညီ",-13.532649040222168],["▁Фестивал",-13.532649040222168],["њује",-13.532649993896484],["▁ହତ୍ୟା",-13.532649993896484],["▁اعتقاد",-13.532651901245115],["ฆ่า",-13.532652854919434],["▁ସ୍କୁଲ",-13.532652854919434],["ซ้ํา",-13.532654762268066],["▁එවැනි",-13.5326566696167],["兩岸",-13.532657623291016],["▁prekybos",-13.53266143798828],["έλη",-13.532662391662598],["▁උසස්",-13.532663345336914],["▁memória",-13.532666206359863],["▁Tòa",-13.532668113708496],["íos",-13.532670021057127],["▁કેન્દ્ર",-13.532674789428713],["▁бажання",-13.53267765045166],["▁турнір",-13.53268051147461],["▁όσους",-13.53268337249756],["▁gleichzeitig",-13.53269100189209],["▁شأن",-13.532691955566406],["▁ছোট",-13.53269386291504],["▁valet",-13.53269863128662],["▁цвета",-13.532699584960938],["▁hivatalos",-13.53270149230957],["▁ક્ર",-13.532703399658203],["وبي",-13.53270435333252],["▁tatlı",-13.532709121704102],["wiedza",-13.532710075378418],["віднов",-13.532710075378418],["▁Piazza",-13.532719612121582],["▁Đi",-13.532721519470217],["▁ਹੁੰਦੀ",-13.532723426818848],["කඩ",-13.532729148864746],["にとっては",-13.532736778259276],["▁Acht",-13.532740592956545],["▁ഇനിയും",-13.532756805419922],["▁senjata",-13.53276252746582],["တုိင္း",-13.532767295837402],["▁crítico",-13.53276824951172],["▁کمیته",-13.532771110534668],["▁lutego",-13.5327730178833],["▁በሆነ",-13.532788276672363],["▁nashr",-13.532797813415527],["▁ਸਬੰਧੀ",-13.53280258178711],["▁fragen",-13.53280544281006],["▁Torna",-13.532811164855955],["forsk",-13.53281307220459],["がち",-13.532814025878906],["զան",-13.532827377319336],["▁пациенти",-13.532835960388184],["▁моря",-13.5328369140625],["▁Himmel",-13.53285026550293],["▁کیو",-13.532851219177246],["genomen",-13.532852172851562],["▁päässä",-13.532855987548828],["▁चढ",-13.532859802246094],["▁ಚೆನ್ನಾಗಿ",-13.53287124633789],["manden",-13.532875061035156],["▁עשר",-13.532876968383787],["▁diel",-13.532877922058104],["▁bitter",-13.532878875732422],["▁השנים",-13.532907485961914],["▁tāpat",-13.532920837402344],["▁gương",-13.532925605773926],["▁katon",-13.53292751312256],["▁tánc",-13.532954216003418],["▁vorstellen",-13.532967567443848],["▁présentation",-13.532995223999023],["▁ապրանք",-13.532995223999023],["销量",-13.533002853393556],["ரம",-13.533013343811035],["▁Kvalitet",-13.533018112182615],["▁نکرده",-13.533024787902832],["ciña",-13.533028602600098],["▁ripa",-13.533038139343262],["▁Votre",-13.533042907714844],["▁nehod",-13.533069610595703],["▁podrà",-13.53307056427002],["技术的",-13.53307056427002],["▁Tumi",-13.533072471618652],["▁разбере",-13.53309440612793],["▁Эрүүл",-13.533101081848145],["▁rosto",-13.53310775756836],["▁axtar",-13.533109664916992],["▁депутаты",-13.533114433288574],["ожен",-13.533140182495115],["▁Petrus",-13.533148765563965],["аасаа",-13.53314971923828],["▁urheilu",-13.533156394958496],["▁בחור",-13.533163070678713],["پذیر",-13.533170700073242],["hamn",-13.533185005187988],["forbrug",-13.533190727233888],["▁kompis",-13.533197402954102],["いたので",-13.53320026397705],["▁Learning",-13.53321361541748],["▁rīt",-13.533217430114746],["เป็นเรื่อง",-13.533220291137695],["取代",-13.53322696685791],["▁បន្ថែម",-13.533230781555176],["סמ",-13.533242225646973],["▁воли",-13.533247947692873],["曾在",-13.533252716064451],["▁diari",-13.53325653076172],["▁Taxi",-13.533262252807615],["▁(40",-13.533268928527832],["ઈન",-13.533272743225098],["▁непре",-13.533273696899414],["▁كلية",-13.533286094665527],["ЕРИ",-13.53329086303711],["▁članovi",-13.533296585083008],["▁بوس",-13.533300399780272],["▁karakteristik",-13.533306121826172],["гей",-13.53331184387207],["pand",-13.53332805633545],["レイ",-13.533330917358398],["▁Nebo",-13.533336639404297],["popular",-13.533343315124512],["ராம்",-13.533349990844728],["első",-13.533352851867676],["▁liikenne",-13.533354759216309],["ستمر",-13.533366203308104],["优质",-13.533371925354004],["▁مستمر",-13.533380508422852],["▁बैंकले",-13.533391952514648],["gül",-13.53339958190918],["▁เมตร",-13.533407211303713],["▁uken",-13.533411979675291],["बाह",-13.533413887023926],["جوز",-13.53342056274414],["ujúcich",-13.533429145812988],["ूक",-13.533432006835938],["▁hantera",-13.53344440460205],["cream",-13.533447265625],["روف",-13.53345012664795],["▁sjedi",-13.53345012664795],["▁partizan",-13.533452033996582],["pesi",-13.533461570739746],["▁toner",-13.533493041992188],["▁sāka",-13.533499717712402],["▁שיי",-13.533513069152832],["▁Joulu",-13.533526420593262],["olóxico",-13.53354549407959],["ذه",-13.53354835510254],["▁Verwaltung",-13.533551216125488],["▁Malah",-13.533555030822754],["▁fazi",-13.533555030822754],["▁पहला",-13.53355598449707],["▁priliku",-13.53357219696045],["▁дружин",-13.533573150634766],["疲れ",-13.533596992492676],["▁नियुक्त",-13.533597946166992],["▁nemohl",-13.53360652923584],["torp",-13.533609390258787],["guli",-13.53361988067627],["▁қарасты",-13.533623695373535],["▁Push",-13.5336332321167],["▁මාර්ගය",-13.53364372253418],["▁hmm",-13.533646583557127],["▁способом",-13.533658981323242],["運行",-13.53368091583252],["▁ressources",-13.533686637878418],["▁Christopher",-13.53369140625],["ټې",-13.533692359924316],["▁Provincial",-13.533697128295898],["▁пошта",-13.533705711364746],["▁بیل",-13.533711433410645],["▁soins",-13.533724784851074],["해보",-13.53374195098877],["näitus",-13.53374481201172],["▁updates",-13.533745765686035],["▁ትውልድ",-13.533753395080566],["▁Dual",-13.533757209777832],["järve",-13.533767700195312],["▁Ziya",-13.533768653869627],["▁krah",-13.533781051635742],["▁متنوع",-13.533783912658691],["▁mulier",-13.53380012512207],["iyev",-13.533802032470703],["▁zetu",-13.533808708190918],["krim",-13.533833503723145],["tonta",-13.53384494781494],["▁isugu",-13.533845901489258],["飲料",-13.533869743347168],["ضيع",-13.533870697021484],["כונים",-13.5338716506958],["รก",-13.533881187438965],["ענער",-13.533888816833496],["成为了",-13.533894538879396],["dýr",-13.53390121459961],["▁ие",-13.53391456604004],["▁slette",-13.533965110778809],["▁Retro",-13.533971786499023],["টের",-13.533973693847656],["▁obsa",-13.533987998962402],["▁försäljning",-13.534014701843262],["▁ať",-13.534019470214844],["▁ຊາວ",-13.534029006958008],["▁usada",-13.534042358398438],["部队",-13.534056663513184],["▁പ്രശ്",-13.534066200256348],["▁maganda",-13.53408908843994],["alamu",-13.534097671508787],["களைப்",-13.534102439880373],["▁strange",-13.534111976623535],["▁Kvinner",-13.534113883972168],["pakt",-13.534117698669434],["▁مزار",-13.534132957458496],["▁cable",-13.534150123596191],["▁Dw",-13.534170150756836],["▁hegy",-13.534191131591797],["▁Rød",-13.534197807312012],["▁kalkul",-13.53420639038086],["▁Senti",-13.534212112426758],["▁мужчина",-13.534213066101074],["▁ніч",-13.53424072265625],["▁wenden",-13.53425407409668],["▁creació",-13.534263610839844],["▁തിരിച്ച",-13.534271240234377],["▁visoke",-13.534287452697754],["▁sluša",-13.53432273864746],["ltaan",-13.53432559967041],["îf",-13.534326553344728],["してみてください",-13.534340858459473],["▁ailesi",-13.5343599319458],["▁aðra",-13.534368515014648],["มีโอกาส",-13.534379959106444],["▁HAI",-13.534381866455078],["ാധി",-13.534393310546877],["hoof",-13.53439712524414],["DSL",-13.53441047668457],["▁Satz",-13.534417152404783],["▁postati",-13.534429550170898],["▁آهيون",-13.534433364868164],["才有",-13.534435272216797],["▁konferans",-13.534454345703123],["▁отриман",-13.534457206726074],["lipa",-13.53449535369873],["tóir",-13.534496307373049],["езда",-13.534506797790527],["廊",-13.534523963928224],["瑟",-13.534538269042969],["辰",-13.534538269042969],["ව්ව",-13.534541130065918],["显著",-13.534549713134766],["ങ്ങനെ",-13.534557342529297],["きっかけ",-13.534557342529297],["▁oferir",-13.534558296203612],["ประวัติ",-13.534565925598145],["▁fee",-13.53456974029541],["ឱកាស",-13.534570693969728],["▁Първо",-13.534570693969728],["▁мектебі",-13.534570693969728],["▁קרעטשמע",-13.534570693969728],["▁فہرست",-13.534570693969728],["▁ڪهاڻي",-13.534570693969728],["▁রেখে",-13.534570693969728],["▁ਗੁਰਦੁਆਰਾ",-13.534570693969728],["▁kërkim",-13.534571647644045],["▁ফুটবল",-13.534571647644045],["ก่อสร้าง",-13.534573554992676],["▁ЗМІ",-13.534573554992676],["▁చేస్తే",-13.53457736968994],["▁θερμ",-13.534579277038574],["▁smartphones",-13.53458309173584],["глянути",-13.534587860107422],["kult",-13.534589767456056],["▁конгрес",-13.534590721130373],["▁각각",-13.534592628479004],["▁աշխատանքային",-13.534600257873535],["▁ધરાવ",-13.534603118896484],["▁édition",-13.5346040725708],["▁bhíonn",-13.534605979919434],["▁इक",-13.534611701965332],["▁ഹൈ",-13.534613609313965],["▁verzamel",-13.534615516662598],["▁बाजू",-13.534622192382812],["▁камсыз",-13.534626960754396],["▁квартиру",-13.53463363647461],["▁жарандар",-13.53464698791504],["▁Eurovision",-13.534655570983888],["▁деякі",-13.534663200378418],["hafte",-13.534683227539062],["▁معاملہ",-13.534693717956545],["▁කියයි",-13.53469467163086],["▁protesto",-13.53470230102539],["給我",-13.534727096557615],["بسيط",-13.534728050231934],["▁točke",-13.534729957580566],["การเล่น",-13.5347318649292],["▁sinema",-13.534743309020996],["▁Материал",-13.53475284576416],["hjælp",-13.53475570678711],["▁အလုပ်",-13.534780502319336],["手に",-13.534784317016602],["▁prijave",-13.534788131713867],["▁Keb",-13.53480625152588],["zag",-13.534812927246094],["▁enese",-13.534825325012209],["Лу",-13.534831047058104],["vaardig",-13.534842491149902],["ข่าวสาร",-13.534842491149902],["TIA",-13.534850120544434],["捷運",-13.534859657287598],["▁PLUS",-13.53486156463623],["īgais",-13.534863471984863],["▁gjak",-13.534878730773926],["▁sokan",-13.534882545471191],["▁Huom",-13.53488540649414],["▁puiki",-13.534900665283203],["خضر",-13.534907341003418],["очного",-13.534926414489746],["▁durable",-13.534929275512695],["▁بوی",-13.534934997558594],["нуў",-13.534955978393556],["▁שיר",-13.534960746765137],["▁batetik",-13.53496551513672],["▁estrella",-13.534968376159668],["Blue",-13.534971237182615],["materiale",-13.535027503967283],["పోవ",-13.535032272338867],["▁افت",-13.535082817077637],["▁cuina",-13.535086631774902],["ხვა",-13.535091400146484],["▁nasib",-13.535094261169434],["ացրել",-13.53510570526123],["▁Lung",-13.535116195678713],["▁قىلىنغان",-13.535120010375977],["▁ໃຫມ່",-13.535141944885254],["▁exakt",-13.535146713256836],["५१",-13.535170555114746],["rtasida",-13.535176277160645],["▁zati",-13.535178184509276],["▁abitu",-13.535184860229492],["▁mínu",-13.535186767578123],["financ",-13.53519344329834],["▁Veter",-13.535195350646973],["ୟାର",-13.535243034362791],["вир",-13.535249710083008],["▁Risk",-13.53526782989502],["▁Noël",-13.535279273986816],["▁Abe",-13.535280227661133],["▁бөлүм",-13.535306930541992],["จําเป็น",-13.535332679748535],["焼",-13.53534984588623],["▁причем",-13.535359382629396],["wein",-13.535362243652344],["эра",-13.53536891937256],["▁ទើប",-13.535371780395508],["▁sørge",-13.535377502441406],["▁tänkt",-13.535383224487305],["pais",-13.535390853881836],["▁erstatte",-13.535399436950684],["▁estados",-13.535412788391112],["වත",-13.535417556762695],["▁၁၂",-13.535436630249023],["▁Ορ",-13.535439491271973],["▁prepare",-13.53544807434082],["▁странице",-13.535462379455566],["▁produción",-13.535463333129885],["▁Profit",-13.535482406616213],["▁xay",-13.53548812866211],["▁खुशी",-13.535505294799805],["ժան",-13.535521507263184],["ważnie",-13.535550117492676],["сэл",-13.535566329956056],["▁رهن",-13.535567283630373],["നില",-13.535573959350586],["▁салт",-13.53559112548828],["ором",-13.53559684753418],["▁Kartu",-13.535600662231444],["並非",-13.535614967346191],["▁işə",-13.535619735717772],["▁necesidade",-13.53563404083252],["▁покрива",-13.535636901855469],["fejlesztési",-13.535653114318848],["▁afar",-13.535653114318848],["MAL",-13.53567886352539],["hjelm",-13.535679817199709],["▁считать",-13.535686492919922],["▁תרבות",-13.535686492919922],["ਲ੍ਹ",-13.53569793701172],["安全的",-13.535711288452148],["▁offrire",-13.53572940826416],["▁tillverka",-13.535743713378906],["▁aparecer",-13.535751342773438],["▁járt",-13.535768508911133],["▁Isaac",-13.535780906677246],["▁voditelj",-13.535783767700195],["gaidh",-13.535784721374512],["▁inimest",-13.535807609558104],["▁berichtet",-13.535832405090332],["▁گئیں",-13.535853385925291],["都没",-13.535853385925291],["▁מסת",-13.535858154296877],["undur",-13.535871505737305],["▁MEDIA",-13.535877227783203],["сүз",-13.53587818145752],["▁ମିଛ",-13.535882949829102],["خراج",-13.535884857177734],["▁បោះ",-13.535893440246582],["סיס",-13.535906791687012],["▁Bogu",-13.53593635559082],["▁รอง",-13.535937309265137],["▁trasfer",-13.535955429077148],["prime",-13.535959243774414],["cția",-13.535985946655272],["▁yapmaya",-13.535993576049805],["ಚೆ",-13.535996437072754],["▁květ",-13.53599739074707],["Ég",-13.536004066467283],["ценка",-13.536006927490234],["qada",-13.536048889160156],["НОМ",-13.536054611206056],["助理",-13.53606128692627],["ündə",-13.536062240600586],["印尼",-13.536065101623535],["mics",-13.536066055297852],["▁gesamte",-13.536070823669434],["▁võistlus",-13.536091804504396],["▁mouse",-13.536099433898926],["色的",-13.536112785339355],["▁Због",-13.536147117614746],["▁Παρά",-13.536152839660645],["ോടും",-13.536157608032228],["तार",-13.536175727844238],["ండీ",-13.536178588867188],["ม่า",-13.536178588867188],["▁môžeme",-13.536195755004885],["▁Bumi",-13.536225318908691],["▁ددې",-13.536277770996094],["potent",-13.536287307739258],["কট",-13.53629207611084],["▁Isra",-13.536300659179688],["Ini",-13.53630256652832],["▁vagyis",-13.536306381225586],["▁Mats",-13.536311149597168],["wish",-13.53631591796875],["▁óg",-13.536317825317385],["remmo",-13.536324501037598],["▁appen",-13.536333084106444],["▁المدني",-13.536336898803713],["мелі",-13.536346435546877],["▁acelera",-13.536373138427734],["▁Mulai",-13.53637981414795],["▁vähä",-13.536395072937012],["▁Άρ",-13.536405563354492],["尘",-13.536417961120604],["▁установе",-13.536426544189451],["▁চে",-13.536429405212402],["▁раде",-13.536437034606934],["fact",-13.536443710327148],["ිණි",-13.53644847869873],["爷",-13.536452293395996],["▁האישי",-13.536460876464844],["utės",-13.53646469116211],["▁ആശുപത്രി",-13.536474227905272],["記録",-13.536481857299805],["8°",-13.53648567199707],["gjör",-13.536486625671388],["▁talen",-13.536489486694336],["အသင္း",-13.53649616241455],["▁pinigų",-13.536497116088867],["▁plástico",-13.536497116088867],["▁السلطات",-13.536497116088867],["▁پوهنتون",-13.536497116088867],["▁আন্দোলন",-13.536497116088867],["▁സഹോദര",-13.536497116088867],["▁antioksidan",-13.536498069763184],["▁արդյունավետ",-13.536498069763184],["楽しく",-13.5364990234375],["▁Alþingi",-13.536500930786133],["▁kiwango",-13.53650188446045],["▁przesył",-13.536503791809082],["▁వలన",-13.536504745483398],["▁apmeklēt",-13.536505699157717],["▁цяжка",-13.536505699157717],["▁kávé",-13.536506652832031],["▁riqueza",-13.536506652832031],["▁poesía",-13.536508560180664],["▁නැද්ද",-13.536511421203612],["▁уметничк",-13.536514282226562],["▁কেউ",-13.536519050598145],["▁Conference",-13.536526679992676],["周边",-13.536529541015623],["▁speciellt",-13.53653049468994],["硬件",-13.536540985107422],["▁kullanarak",-13.53655242919922],["מנט",-13.536553382873535],["riat",-13.536564826965332],["ն՝",-13.536564826965332],["▁praktis",-13.536565780639648],["▁lối",-13.536575317382812],["പ്ല",-13.536582946777344],["理性",-13.536592483520508],["kolle",-13.53659439086914],["信じ",-13.53659725189209],["▁अर्थात",-13.53660011291504],["▁rekonstrukci",-13.536609649658203],["▁produktet",-13.536622047424316],["▁Iceland",-13.536627769470217],["▁ticaret",-13.536627769470217],["▁करण्याची",-13.536640167236328],["▁(40)",-13.536641120910645],["▁घरमा",-13.536649703979492],["▁보장",-13.536651611328123],["ישה",-13.536675453186035],["▁മാർ",-13.536676406860352],["bär",-13.536684036254885],["▁enfrentar",-13.536690711975098],["эрэ",-13.536700248718262],["ंश",-13.53670883178711],["▁Машин",-13.536715507507324],["▁שמש",-13.53672695159912],["ร่าง",-13.536739349365234],["升級",-13.536742210388184],["▁Cir",-13.5367431640625],["▁svakako",-13.536754608154297],["▁päeval",-13.53676414489746],["▁مقابلے",-13.536768913269045],["▁moci",-13.536800384521484],["▁didelis",-13.5368013381958],["kontor",-13.53681182861328],["يزي",-13.536816596984863],["קנה",-13.536831855773926],["该公司",-13.536831855773926],["▁Mujh",-13.536835670471191],["▁вагон",-13.53684425354004],["rescu",-13.53685188293457],["קוב",-13.536852836608888],["▁moich",-13.536855697631836],["▁ಮಾತನಾಡ",-13.536858558654783],["ECT",-13.536884307861328],["▁potenzia",-13.536890983581545],["digi",-13.536953926086426],["▁adesso",-13.53695583343506],["హెచ్",-13.536956787109377],["▁dhaqan",-13.536956787109377],["▁hükümet",-13.536983489990234],["ישראל",-13.536992073059082],["▁rapaz",-13.537010192871094],["▁aquella",-13.537016868591309],["τους",-13.537050247192385],["▁referent",-13.537060737609863],["▁वर्षमा",-13.537091255187988],["▁marrin",-13.537099838256836],["ෙන්ම",-13.537110328674316],["▁KV",-13.537111282348633],["യാണ",-13.537114143371582],["▁kotak",-13.537116050720217],["▁Ένας",-13.537128448486328],["▁أخذ",-13.537134170532228],["▁اچڻ",-13.537139892578123],["▁bryst",-13.537149429321287],["▁හෙට",-13.537171363830566],["κω",-13.537220001220703],["▁møder",-13.537237167358398],["▁izi",-13.537249565124512],["一時",-13.537254333496094],["考える",-13.537260055541992],["owców",-13.53726291656494],["చో",-13.5372953414917],["iadur",-13.537303924560549],["▁kızı",-13.537308692932127],["▁dodatkowe",-13.537312507629396],["ovač",-13.537321090698242],["▁kitus",-13.537351608276367],["ykset",-13.537357330322266],["▁ong",-13.537382125854492],["▁የቀ",-13.537385940551758],["▁dará",-13.537389755249023],["▁430",-13.537407875061035],["▁Moder",-13.537407875061035],["精選",-13.537433624267578],["▁salida",-13.53743839263916],["▁Gou",-13.537446975708008],["▁ibig",-13.537450790405272],["▁vrchol",-13.537453651428224],["μόν",-13.537480354309082],["र्ति",-13.53750228881836],["▁الجي",-13.53752326965332],["▁feta",-13.537534713745115],["▁hacker",-13.537546157836914],["sszük",-13.537565231323242],["illy",-13.537579536437988],["▁vrednosti",-13.53758144378662],["kuvat",-13.537598609924316],["▁anticip",-13.537607192993164],["▁Analys",-13.537626266479492],["▁joilla",-13.537630081176758],["stikke",-13.537639617919922],["▁coñece",-13.53767204284668],["▁incidente",-13.537683486938477],["صديق",-13.537684440612791],["▁Uči",-13.53769302368164],["在於",-13.537700653076172],["▁butikker",-13.537705421447754],["thik",-13.537723541259766],["▁šele",-13.537731170654297],["▁мүлік",-13.537738800048828],["ჯახ",-13.53774070739746],["▁เย็ด",-13.537745475769045],["▁höst",-13.537755966186523],["מרכז",-13.537758827209473],["ពូ",-13.537779808044434],["▁ڄا",-13.537796020507812],["▁vytvor",-13.537796974182127],["▁kontrolu",-13.537806510925291],["二次",-13.537813186645508],["本來",-13.537813186645508],["▁nicio",-13.537814140319824],["的态度",-13.537823677062988],["īnā",-13.53782844543457],["каль",-13.537829399108888],["ਦੂ",-13.537853240966797],["▁allows",-13.53785800933838],["setti",-13.53786563873291],["▁የህ",-13.537901878356934],["▁Artur",-13.537936210632324],["ാക്കിയ",-13.537940979003906],["MAG",-13.537985801696776],["əcə",-13.538002014160156],["藥物",-13.538010597229004],["▁семья",-13.53801441192627],["▁købt",-13.538018226623535],["▁Blom",-13.538047790527344],["νουμε",-13.538058280944824],["▁نعمت",-13.538070678710938],["▁πιστ",-13.538089752197266],["▁бяс",-13.538092613220217],["をし",-13.538108825683594],["▁등장",-13.538124084472656],["▁süs",-13.538153648376465],["enii",-13.538159370422363],["иваться",-13.538163185119627],["▁брата",-13.538179397583008],["▁Макар",-13.538182258605955],["ద్ర",-13.538199424743652],["ėjęs",-13.538202285766602],["sev",-13.538223266601562],["▁კომენტარი",-13.53822422027588],["的には",-13.538233757019045],["ვდი",-13.538252830505373],["▁inverno",-13.538283348083496],["បែក",-13.538297653198242],["▁jedná",-13.538304328918455],["ኖረው",-13.538307189941406],["3°",-13.538324356079102],["▁IPS",-13.538337707519531],["ministr",-13.53836154937744],["▁চলে",-13.538379669189451],["트를",-13.538379669189451],["▁ռուս",-13.538382530212402],["▁Nagu",-13.538384437561035],["лэж",-13.538393020629885],["▁forståelse",-13.538400650024414],["ລັດຖະມົນຕີ",-13.538426399230955],["▁bezpłatn",-13.538427352905272],["▁dibutuhkan",-13.538427352905272],["▁nümunə",-13.538427352905272],["▁priežiūros",-13.538427352905272],["▁tecrübe",-13.538427352905272],["▁περιοχές",-13.538427352905272],["▁впечатление",-13.538427352905272],["▁знаходзіцца",-13.538427352905272],["▁Արթուր",-13.538427352905272],["▁छोटी",-13.538427352905272],["▁ინფორმაციის",-13.538427352905272],["▁촬영",-13.538427352905272],["ប៉ុណ្ណោះ",-13.53842830657959],["▁Quliyev",-13.53842830657959],["▁izvještaj",-13.53842830657959],["▁완료",-13.53842830657959],["▁jautājums",-13.538430213928224],["▁залежить",-13.538430213928224],["▁ስርዓት",-13.538430213928224],["小さな",-13.53843116760254],["สนามบิน",-13.538432121276855],["႐ုပ္",-13.538432121276855],["لەشتۈر",-13.538436889648438],["▁ਬਣਾਉਣ",-13.53843879699707],["创意",-13.53843879699707],["▁успіх",-13.538442611694336],["▁ଖୋ",-13.538447380065918],["▁విజయ్",-13.538450241088867],["▁péntek",-13.538451194763184],["▁ଅଭି",-13.538458824157717],["ચ્છ",-13.538460731506348],["▁ganar",-13.53846549987793],["▁ಟ",-13.53846549987793],["▁выяв",-13.538467407226562],["▁Arad",-13.538474082946776],["▁Terdapat",-13.538474082946776],["shes",-13.538482666015623],["▁innskudd",-13.538487434387209],["ອາຫານ",-13.538490295410156],["▁студије",-13.53849983215332],["▁wadda",-13.538500785827637],["▁mondja",-13.538509368896484],["▁chwilę",-13.538522720336914],["दर्शन",-13.53852367401123],["▁Norður",-13.538524627685549],["▁ŝajn",-13.538532257080078],["▁കാര്യങ്ങള്",-13.538537979125977],["▁रहते",-13.538541793823242],["▁изберете",-13.538543701171877],["▁קינדער",-13.538548469543455],["▁Febr",-13.538555145263672],["▁factores",-13.538568496704102],["▁לל",-13.538573265075684],["▁शकतो",-13.538573265075684],["nehmer",-13.53857421875],["▁Prečo",-13.53857707977295],["▁skydda",-13.53859043121338],["▁tràn",-13.53859043121338],["േക്കും",-13.538593292236328],["▁grunt",-13.538597106933594],["▁სხეულ",-13.538609504699709],["▁Ошондой",-13.538617134094238],["▁ముఖ్య",-13.53862190246582],["际",-13.53862762451172],["▁לכתוב",-13.5386323928833],["▁жобасы",-13.538634300231934],["▁Zeitpunkt",-13.538647651672363],["ڌي",-13.538650512695312],["क्रा",-13.538654327392578],["▁kokio",-13.538656234741213],["▁წმინდა",-13.53865909576416],["▁default",-13.538670539855955],["▁nagrade",-13.538674354553224],["ărilor",-13.53867530822754],["▁faol",-13.538679122924805],["▁dopust",-13.538684844970703],["▁minimi",-13.53868579864502],["onio",-13.538714408874512],["مەس",-13.53871726989746],["▁markeds",-13.538721084594728],["Mini",-13.538729667663574],["▁Teljes",-13.538729667663574],["720",-13.538734436035156],["▁szakasz",-13.538765907287598],["▁chì",-13.53879737854004],["šanje",-13.53880500793457],["▁മനസ",-13.53880500793457],["▁bertahan",-13.538806915283203],["▁መቶ",-13.538824081420898],["▁vyf",-13.538829803466797],["▁Knight",-13.53883934020996],["=5",-13.538862228393556],["viste",-13.538864135742188],["485",-13.538895606994627],["▁termes",-13.538925170898438],["belah",-13.53892707824707],["▁nömrəli",-13.538949012756348],["▁Nils",-13.538975715637209],["▁പഠിക്ക",-13.538976669311523],["▁Wakati",-13.538983345031738],["ژو",-13.53899097442627],["▁venire",-13.539030075073242],["▁బుక్",-13.539034843444824],["ненні",-13.539036750793455],["జీవ",-13.539124488830566],["ຄູ່",-13.539130210876465],["landırma",-13.539134979248049],["▁développer",-13.539145469665527],["▁Dora",-13.539154052734377],["ջի",-13.539155006408691],["▁Fantasy",-13.539162635803224],["aiky",-13.539180755615234],["wanan",-13.53918743133545],["আর",-13.539191246032717],["▁thôn",-13.539192199707031],["357",-13.539196968078612],["андык",-13.539206504821776],["▁lantai",-13.539240837097168],["▁sedež",-13.539247512817385],["▁logistik",-13.539255142211914],["brief",-13.539265632629396],["▁ລ້ານ",-13.539276123046877],["ජී",-13.539277076721191],["▁cadena",-13.539280891418455],["แย",-13.539283752441406],["▁spokojen",-13.539283752441406],["کور",-13.539301872253418],["ragu",-13.539302825927734],["▁Rs",-13.53931713104248],["▁지정",-13.539334297180176],["▁povus",-13.53935718536377],["▁технологија",-13.5393648147583],["▁정리",-13.539373397827148],["▁Nora",-13.539380073547363],["▁verilmiş",-13.539395332336426],["▁ፖሊስ",-13.539427757263184],["▁festivalu",-13.53944969177246],["Paul",-13.53945541381836],["sever",-13.539472579956056],["▁Kvinne",-13.539477348327637],["▁siguri",-13.539485931396484],["læn",-13.539504051208496],["▁Къ",-13.539555549621582],["▁inject",-13.539558410644531],["▁Mwa",-13.539629936218262],["▁Straf",-13.539640426635742],["cipe",-13.53965950012207],["▁кіру",-13.539661407470703],["▁militari",-13.539665222167969],["WR",-13.539668083190918],["irajo",-13.539681434631348],["преки",-13.539708137512209],["▁seemed",-13.53972625732422],["▁Walk",-13.539766311645508],["▁oggetto",-13.539775848388672],["Stream",-13.539799690246582],["▁хам",-13.53981590270996],["▁секунди",-13.539833068847656],["štvu",-13.539834976196287],["▁François",-13.539857864379885],["▁Psycholog",-13.539864540100098],["ೆಲ್ಲಾ",-13.53986930847168],["centre",-13.539886474609377],["ीने",-13.53989028930664],["▁comentários",-13.539897918701172],["▁साफ",-13.539898872375488],["ದಾನ",-13.539902687072754],["jsza",-13.53990650177002],["Ĝ",-13.53993797302246],["жев",-13.539942741394045],["▁otot",-13.539962768554688],["ড়িয়ে",-13.539987564086914],["▁templom",-13.540002822875977],["▁පද",-13.54000473022461],["▁cesto",-13.540013313293455],["▁passed",-13.540027618408203],["▁ഉമ്മ",-13.540043830871582],["▁Aran",-13.540053367614746],["umist",-13.540054321289062],["▁morala",-13.54006004333496],["▁шам",-13.540061950683594],["▁búin",-13.54008960723877],["▁fotografía",-13.540094375610352],["bedi",-13.540122985839844],["▁денови",-13.540122985839844],["ırıq",-13.54013442993164],["تحر",-13.54013729095459],["▁turista",-13.540141105651855],["▁дорога",-13.540155410766602],["about",-13.540156364440918],["ÉK",-13.540160179138184],["chain",-13.54016399383545],["аттуу",-13.54017448425293],["страда",-13.540209770202637],["ตัวแทน",-13.540230751037598],["캠",-13.540237426757812],["▁apresentado",-13.540244102478027],["시에",-13.540252685546877],["▁gaaray",-13.540290832519531],["▁مجھ",-13.540302276611328],["查询",-13.540312767028809],["書籍",-13.540319442749023],["▁විම",-13.540324211120604],["潭",-13.540340423583984],["▁ਕਰੋੜ",-13.5403413772583],["ଗୁଡ଼ିକ",-13.540348052978516],["สโมสร",-13.540360450744627],["끌",-13.540361404418944],["😂",-13.540361404418944],["έρχονται",-13.540362358093262],["▁aplikeblas",-13.540362358093262],["▁kesulitan",-13.540362358093262],["▁pernikahan",-13.540362358093262],["▁reprezinta",-13.540362358093262],["▁tähelepanu",-13.540362358093262],["▁électrique",-13.540362358093262],["▁помещение",-13.540362358093262],["▁тэргүүн",-13.540362358093262],["▁միլիոն",-13.540362358093262],["▁आधुनिक",-13.540362358093262],["▁იმიტომ",-13.540362358093262],["細節",-13.540362358093262],["밴",-13.540362358093262],["▁Pilipino",-13.540363311767578],["▁binafsi",-13.540363311767578],["▁lehenengo",-13.540363311767578],["▁Chcete",-13.540364265441896],["▁behøver",-13.540366172790527],["▁ಸತ್ಯ",-13.540367126464844],["▁fantasy",-13.54036808013916],["▁Matthew",-13.540369987487791],["ເຂົ້າຫາ",-13.540371894836426],["▁हुनुहुन्छ",-13.540372848510742],["▁ቀደም",-13.540374755859377],["▁героя",-13.540376663208008],["▁doença",-13.54037857055664],["▁орчим",-13.540379524230955],["▁takaa",-13.540380477905272],["▁מלונות",-13.540380477905272],["▁динара",-13.54038143157959],["▁příští",-13.540386199951172],["▁संजय",-13.540386199951172],["▁Αγίου",-13.540388107299805],["▁ಸೊ",-13.540396690368652],["▁yapılır",-13.540398597717283],["▁ቀናት",-13.540399551391602],["▁ਜਾਵੇ",-13.540404319763184],["▁베이",-13.540407180786133],["▁ANO",-13.540409088134766],["▁vacaciones",-13.540409088134766],["▁сільського",-13.54041862487793],["看完",-13.540435791015623],["▁cv",-13.540438652038574],["▁அமெரிக்கா",-13.54043960571289],["UCH",-13.540454864501951],["▁передаче",-13.540460586547852],["▁deretter",-13.540461540222168],["▁cruce",-13.5404634475708],["▁заниматься",-13.540465354919434],["▁мэр",-13.540465354919434],["▁ექიმ",-13.5404691696167],["▁گور",-13.540481567382812],["missió",-13.540494918823242],["▁staðnum",-13.540494918823242],["րամ",-13.540499687194824],["▁Həm",-13.540502548217772],["▁በመቶ",-13.54051685333252],["▁ਹੱਥ",-13.540544509887695],["గురు",-13.540547370910645],["▁հավատ",-13.540552139282228],["paru",-13.540555953979492],["▁Wilt",-13.540559768676758],["4-1",-13.540563583374023],["▁המקום",-13.54056453704834],["▁konfront",-13.540570259094238],["也許",-13.540570259094238],["CAM",-13.540574073791504],["พนัน",-13.54057502746582],["▁Karel",-13.54060173034668],["plasser",-13.540603637695312],["▁koalici",-13.540603637695312],["_1",-13.54061508178711],["▁inyong",-13.540624618530272],["▁réseaux",-13.540626525878906],["▁දකින්න",-13.540630340576172],["的投资",-13.54064655303955],["▁наследство",-13.5406494140625],["trimeda",-13.54065227508545],["бля",-13.540653228759766],["▁यसै",-13.540677070617676],["▁речима",-13.540688514709473],["gido",-13.540692329406738],["▁hedder",-13.540712356567385],["▁উদ্",-13.5407133102417],["▁তাকে",-13.54075527191162],["▁nemoka",-13.540772438049316],["ચો",-13.540789604187012],["▁pande",-13.54079532623291],["▁taşın",-13.540797233581545],["skjema",-13.540807723999023],["▁намали",-13.540810585021973],["Drejt",-13.540814399719238],["▁حاليا",-13.540814399719238],["▁potrebbero",-13.54081916809082],["▁yapımı",-13.54085922241211],["▁Vaik",-13.54086685180664],["belle",-13.540901184082031],["▁breg",-13.540911674499512],["▁TVA",-13.540922164916992],["▁schools",-13.540934562683104],["izazio",-13.54096221923828],["▁Mother",-13.540971755981444],["▁materiaali",-13.540980339050291],["▁llo",-13.540987014770508],["▁(34)",-13.541024208068848],["▁Creo",-13.54102897644043],["▁קע",-13.541038513183594],["▁mardi",-13.541041374206545],["▁vjero",-13.54104709625244],["▁sudut",-13.541050910949709],["▁шире",-13.541064262390137],["పాద",-13.541065216064451],["KING",-13.54106616973877],["▁girdi",-13.54106616973877],["▁қорға",-13.541068077087402],["營運",-13.541077613830566],["▁chcem",-13.541091918945312],["stærk",-13.541122436523438],["pracoval",-13.541123390197754],["▁Tp",-13.54112720489502],["▁വള",-13.54114818572998],["▁قمر",-13.541172981262209],["▁incluir",-13.5411958694458],["▁pienen",-13.541197776794434],["ojne",-13.5412015914917],["▁izvan",-13.541202545166016],["பம்",-13.541208267211914],["確實",-13.541223526000977],["▁története",-13.541247367858888],["פד",-13.541259765625],["▁שינוי",-13.541259765625],["▁เริ่ม",-13.54127025604248],["▁strap",-13.541290283203123],["▁देशभर",-13.541293144226074],["117",-13.541300773620604],["▁هاڻ",-13.541302680969238],["737",-13.541315078735352],["▁серце",-13.541329383850098],["▁JP",-13.541337013244627],["▁ನೋಟ",-13.541346549987791],["ってきた",-13.541373252868652],["▁цене",-13.541380882263184],["міст",-13.541382789611816],["رەك",-13.541386604309082],["צפה",-13.541393280029297],["▁2,6",-13.541417121887209],["عامل",-13.54143238067627],["▁kicsi",-13.54145622253418],["kası",-13.541474342346191],["▁entrepren",-13.541488647460938],["▁PES",-13.54149341583252],["δημο",-13.541496276855469],["▁آرمی",-13.541497230529783],["▁evig",-13.541500091552734],["styring",-13.54153060913086],["▁ආගම",-13.54154109954834],["▁južn",-13.541544914245604],["పాల్",-13.541548728942873],["▁בידי",-13.54155731201172],["▁1902",-13.541560173034668],["ईन",-13.541583061218262],["함을",-13.541584014892578],["ദം",-13.541584968566896],["▁գրում",-13.541601181030272],["▁огляд",-13.541622161865234],["suom",-13.54162311553955],["▁kazal",-13.541624069213867],["ılmıştır",-13.54164981842041],["▁45%",-13.541675567626951],["applique",-13.54167938232422],["▁وای",-13.541685104370115],["年齡",-13.54169750213623],["▁messa",-13.541698455810549],["teista",-13.541707038879396],["▁ಇಂದ",-13.541708946228027],["▁գործել",-13.541711807250977],["▁Pune",-13.541719436645508],["▁니",-13.541726112365724],["▁Lara",-13.541736602783203],["тици",-13.541738510131836],["▁zadanie",-13.541741371154783],["▁ගෝ",-13.541759490966797],["▁നാല്",-13.541787147521973],["▁decisions",-13.541797637939451],["ٽائي",-13.541807174682615],["▁tablets",-13.541829109191896],["▁සුබ",-13.541829109191896],["klær",-13.541834831237791],["ოტ",-13.541855812072754],["▁teor",-13.541866302490234],["▁negócios",-13.54186725616455],["वंश",-13.541868209838867],["อาศัย",-13.541872024536133],["шком",-13.541878700256348],["▁техникалық",-13.541899681091309],["▁слабо",-13.541912078857422],["▁обично",-13.54192352294922],["ሎት",-13.541924476623535],["riju",-13.541945457458496],["irodalom",-13.541951179504396],["▁她",-13.541970252990724],["▁легенда",-13.54197597503662],["▁ruky",-13.541994094848633],["▁juge",-13.541998863220217],["▁ଯ",-13.542027473449709],["▁pior",-13.542031288146973],["린다",-13.542044639587402],["위원",-13.542051315307615],["▁suhte",-13.542061805725098],["▁Kasu",-13.542101860046388],["▁Filem",-13.54210376739502],["yddiaeth",-13.542107582092283],["▁აბ",-13.542123794555664],["▁obez",-13.54212760925293],["▁مئی",-13.54214859008789],["▁fortælle",-13.542155265808104],["▁asan",-13.54218292236328],["泛",-13.542218208312988],["▁Formació",-13.542219161987305],["ተካ",-13.542221069335938],["掘",-13.542253494262695],["▁ქუჩა",-13.54225730895996],["▁putra",-13.542261123657228],["▁circular",-13.542275428771973],["▁ierobežo",-13.542275428771973],["クレジットカード",-13.542284965515137],["чести",-13.542288780212402],["▁제조",-13.542290687561035],["臉書",-13.542299270629885],["▁elsősorban",-13.5423002243042],["▁uzkondiĉoj",-13.5423002243042],["▁wyjaśni",-13.5423002243042],["▁ödəniş",-13.5423002243042],["▁Википедија",-13.5423002243042],["▁впервые",-13.5423002243042],["▁پیپلز",-13.5423002243042],["▁अचानक",-13.5423002243042],["▁চেষ্টা",-13.5423002243042],["▁ಆಹ್ವಾನ",-13.5423002243042],["▁ጨምሮ",-13.5423002243042],["▁gezondheid",-13.542301177978516],["▁Бак",-13.542301177978516],["▁Yəni",-13.54230499267578],["▁Nhận",-13.542306900024414],["▁venligst",-13.54230785369873],["testületének",-13.542311668395996],["▁मुख्यमन्त्री",-13.542312622070312],["▁නිලධාරි",-13.542313575744627],["▁Metropolitan",-13.542319297790527],["▁ຊາ",-13.542319297790527],["▁политичко",-13.542320251464844],["▁종류",-13.542320251464844],["വന്ന",-13.542330741882324],["▁கீ",-13.542348861694336],["▁euskaldun",-13.542350769042969],["做法",-13.54235553741455],["▁condus",-13.542357444763184],["▁پیوست",-13.5423583984375],["▁מענטשן",-13.542365074157717],["▁qabtay",-13.542366027832031],["▁menyokong",-13.542369842529297],["▁náð",-13.542369842529297],["▁voeding",-13.542372703552246],["▁registrera",-13.542381286621094],["▁आधारित",-13.54238510131836],["吃饭",-13.54238510131836],["▁شوق",-13.542387008666992],["zoj",-13.542399406433104],["▁7.1",-13.542399406433104],["▁Komitəsinin",-13.542402267456056],["できて",-13.5424165725708],["▁ربما",-13.5424222946167],["▁steun",-13.542439460754396],["▁توجہ",-13.542445182800291],["▁kemudahan",-13.54245376586914],["▁વાંચ",-13.542458534240724],["▁silver",-13.542482376098633],["▁beer",-13.542485237121582],["▁priprema",-13.542488098144531],["۱۲",-13.54249095916748],["▁الجمهورية",-13.54249668121338],["▁sikt",-13.542497634887695],["סיר",-13.542506217956545],["▁окружающ",-13.542506217956545],["▁ದಾಳಿ",-13.542508125305176],["▁Väst",-13.542510986328123],["▁३५",-13.5425386428833],["▁falu",-13.542540550231934],["警告",-13.542552947998049],["рге",-13.542558670043944],["▁yerel",-13.542558670043944],["▁майна",-13.542560577392578],["▁عنصر",-13.542566299438477],["▁кала",-13.542574882507324],["ामि",-13.542607307434082],["City",-13.54262924194336],["▁создава",-13.54262924194336],["▁Hånd",-13.542640686035156],["vraag",-13.542659759521484],["მაგ",-13.542672157287598],["íní",-13.542678833007812],["▁estudiar",-13.54270839691162],["▁тасма",-13.542709350585938],["הפך",-13.542723655700684],["anstalt",-13.542724609375],["▁болбойт",-13.542730331420898],["▁Podemos",-13.542750358581545],["▁ስት",-13.54275131225586],["是什麼",-13.542755126953123],["▁सेयर",-13.54276180267334],["▁katoli",-13.54279899597168],["klai",-13.542831420898438],["▁dålig",-13.542852401733398],["ామని",-13.542858123779297],["▁Vilken",-13.542866706848145],["▁establecer",-13.542879104614258],["▁ilmainen",-13.542895317077637],["லாக",-13.542898178100586],["▁terén",-13.542901992797852],["ימת",-13.542912483215332],["hög",-13.542949676513672],["▁Venezia",-13.542956352233888],["▁etapas",-13.542963981628418],["▁రోజంతా",-13.542963981628418],["▁rozhodnutí",-13.542972564697266],["ріг",-13.542977333068848],["▁Vivo",-13.54298210144043],["▁nettsider",-13.54298973083496],["kvarter",-13.542994499206545],["▁estive",-13.542997360229492],["▁εκείνη",-13.543009757995604],["LOT",-13.543010711669922],["▁hár",-13.543025970458984],["▁Stran",-13.5430269241333],["最早",-13.543041229248049],["▁అందరి",-13.54305362701416],["を利用して",-13.54305934906006],["פלא",-13.543062210083008],["▁verseker",-13.54307460784912],["කල්",-13.54308795928955],["▁kampe",-13.543088912963867],["▁normaali",-13.543100357055664],["жить",-13.543102264404297],["4.6",-13.54311180114746],["▁నాలుగు",-13.54312515258789],["▁പ്രസ",-13.543133735656738],["▁konsol",-13.543145179748535],["ょ",-13.543157577514648],["▁وعده",-13.543197631835938],["ાશે",-13.543224334716797],["▁Abon",-13.54323387145996],["ლოგ",-13.543251991271973],["131",-13.543272972106934],["XXX",-13.543285369873049],["▁ယခု",-13.543301582336426],["▁peavad",-13.543309211730955],["føl",-13.54333782196045],["的规定",-13.543356895446776],["▁युग",-13.543360710144045],["▁dalo",-13.543391227722168],["سات",-13.543394088745115],["▁смрти",-13.54339599609375],["បង់",-13.543397903442385],["veta",-13.5433988571167],["முக",-13.543400764465332],["▁цагт",-13.54340362548828],["▁paine",-13.543416023254396],["▁ბევრ",-13.543427467346191],["politi",-13.543428421020508],["出來的",-13.54343605041504],["若い",-13.543437004089355],["▁vakit",-13.54344654083252],["▁olingan",-13.543448448181152],["uksien",-13.543451309204102],["ecido",-13.543453216552734],["wakil",-13.54345703125],["határ",-13.543474197387695],["૨૦",-13.543475151062012],["▁kendisi",-13.543496131896973],["нуться",-13.543498992919922],["▁دهم",-13.543505668640137],["▁gaus",-13.543521881103516],["lectiu",-13.543530464172363],["▁muligheter",-13.543532371520996],["▁unngå",-13.543540954589844],["▁procese",-13.54357624053955],["論文",-13.54357624053955],["UPA",-13.543578147888184],["ლოდ",-13.543588638305664],["▁cukru",-13.54359245300293],["▁annos",-13.543594360351562],["▁rinnova",-13.543667793273926],["▁κάνεις",-13.543670654296877],["विक",-13.543706893920898],["ॉन",-13.543708801269531],["▁warten",-13.543710708618164],["shah",-13.54372215270996],["▁pošto",-13.543736457824709],["klage",-13.543746948242188],["ÀN",-13.543792724609377],["▁ರಾಜ್ಯದ",-13.543801307678224],["▁صدق",-13.543808937072754],["בין",-13.543815612792969],["▁njeno",-13.543830871582031],["▁यत्",-13.543830871582031],["▁sef",-13.543838500976562],["альнай",-13.543843269348145],["RSS",-13.54385471343994],["▁soviet",-13.543856620788574],["▁Tsar",-13.543859481811523],["▁במי",-13.543859481811523],["▁Safe",-13.543863296508787],["▁gerð",-13.543869972229004],["的機會",-13.543880462646484],["શું",-13.543886184692385],["大約",-13.54389476776123],["▁Maine",-13.543912887573242],["енс",-13.54392910003662],["nän",-13.543940544128418],["▁Drei",-13.543952941894531],["אכט",-13.543983459472656],["▁العالي",-13.544005393981934],["álnych",-13.544012069702148],["▁sünni",-13.544024467468262],["▁Opret",-13.544032096862791],["▁ДВ",-13.544038772583008],["▁papan",-13.544060707092283],["▁oladi",-13.544079780578612],["მართა",-13.544092178344728],["skí",-13.544100761413574],["▁daban",-13.54411506652832],["ෙල්",-13.544118881225586],["Га",-13.54412078857422],["▁זהו",-13.544148445129396],["డమే",-13.544151306152344],["▁färdig",-13.54415798187256],["İLİ",-13.544160842895508],["迅",-13.544163703918455],["والی",-13.544182777404783],["látás",-13.544189453125],["▁מענטש",-13.54420280456543],["危險",-13.544206619262695],["cogni",-13.54421615600586],["▁Pär",-13.544219017028809],["▁vrat",-13.544222831726074],["โบราณ",-13.544236183166504],["▁தொடர",-13.544236183166504],["▁հանձն",-13.544239044189451],["សត្វ",-13.54423999786377],["롱",-13.544240951538086],["▁Egyesület",-13.544241905212402],["▁câștig",-13.544241905212402],["▁kierunku",-13.544241905212402],["▁līdzekļu",-13.544241905212402],["▁mempengaruhi",-13.544241905212402],["▁nágrenni",-13.544241905212402],["▁payudara",-13.544241905212402],["▁rögzít",-13.544241905212402],["▁scientifique",-13.544241905212402],["▁þurfa",-13.544241905212402],["▁şəhid",-13.544241905212402],["▁реєстрації",-13.544241905212402],["▁үналгысы",-13.544241905212402],["หน้ากาก",-13.54424285888672],["▁разглежда",-13.54424285888672],["▁छोरा",-13.54424285888672],["▁언론",-13.54424285888672],["你想",-13.544244766235352],["▁አስፈላጊ",-13.544245719909668],["นักเตะ",-13.5442476272583],["▁Sûriyeyê",-13.544248580932615],["▁ներկայացրել",-13.544248580932615],["▁yerleş",-13.544252395629885],["▁Çfarë",-13.544252395629885],["▁distanza",-13.544254302978516],["▁minulosti",-13.544254302978516],["τσα",-13.544260025024414],["▁त्यांचे",-13.544260025024414],["我们将",-13.544260025024414],["lampa",-13.544267654418944],["▁организације",-13.544267654418944],["▁jälki",-13.544271469116213],["ยาม",-13.544273376464844],["▁qısa",-13.544275283813477],["▁إحدى",-13.544278144836426],["▁lähde",-13.54428482055664],["▁тохиолдолд",-13.544286727905272],["▁Mamlaka",-13.544289588928224],["▁цябе",-13.544292449951172],["伟大",-13.544293403625488],["▁हवाई",-13.544294357299805],["▁učitelj",-13.54429817199707],["就業",-13.544322967529297],["ədək",-13.544330596923828],["vju",-13.544336318969728],["▁úvod",-13.544347763061523],["不夠",-13.544351577758787],["ป้าย",-13.544352531433104],["▁oferecer",-13.544371604919434],["▁présenter",-13.544405937194824],["▁temática",-13.544408798217772],["▁تعهد",-13.544421195983888],["▁мысль",-13.544424057006836],["▁ಆಕೆ",-13.544434547424316],["的方向",-13.544434547424316],["古代",-13.544439315795898],["▁voilà",-13.544447898864746],["▁bizə",-13.54445743560791],["▁dennoch",-13.54445743560791],["▁LEGO",-13.544462203979492],["生育",-13.544462203979492],["▁rijden",-13.544466018676758],["▁граници",-13.54447078704834],["▁boljše",-13.544496536254885],["▁sāku",-13.5444974899292],["▁Լի",-13.544499397277832],["▁பண",-13.54450511932373],["▁نمي",-13.544514656066896],["▁reggel",-13.544544219970703],["গঞ্জ",-13.54456901550293],["▁finais",-13.544578552246094],["▁பயிற்சி",-13.544584274291992],["δρομο",-13.54459285736084],["▁कलेज",-13.544599533081056],["▁fantastisch",-13.544601440429688],["▁periculo",-13.544604301452637],["▁سودا",-13.5446195602417],["▁hideg",-13.544644355773926],["toiminnan",-13.54465389251709],["하시면",-13.54465389251709],["▁потребители",-13.544668197631836],["▁tehdy",-13.544687271118164],["▁доо",-13.544689178466797],["ాలతో",-13.544697761535645],["בצע",-13.544708251953123],["※",-13.544718742370604],["▁flink",-13.544719696044922],["전에",-13.544721603393556],["▁ಜೋ",-13.544727325439451],["▁maandag",-13.54473114013672],["reck",-13.544737815856934],["ugur",-13.544769287109377],["ആര്",-13.544782638549805],["ಎಫ್",-13.544803619384766],["▁amator",-13.544805526733398],["kliai",-13.544832229614258],["isaa",-13.544844627380373],["▁සතුට",-13.544845581054688],["▁brede",-13.544846534729004],["ਾਲੀ",-13.54485034942627],["נדס",-13.544852256774902],["▁slovenskih",-13.54486083984375],["▁Kaap",-13.544862747192385],["јеш",-13.544864654541016],["▁ଜାଣିବା",-13.544878959655762],["▁věc",-13.544886589050291],["വിന്റെ",-13.544889450073242],["▁තියනවා",-13.544891357421877],["barth",-13.54490566253662],["▁asegurar",-13.544910430908203],["▁인해",-13.544927597045898],["øvelse",-13.544951438903809],["තින්",-13.544962882995604],["iņām",-13.544987678527832],["عليه",-13.544987678527832],["▁dôležité",-13.544997215270996],["ਤਾਂ",-13.545003890991213],["▁färger",-13.545023918151855],["▁یوم",-13.545026779174805],["▁thuhet",-13.545058250427246],["овский",-13.545059204101562],["▁tong",-13.545138359069824],["leit",-13.545146942138672],["ിലേക്കു",-13.545172691345217],["ग्रे",-13.545184135437012],["ສາດ",-13.545212745666504],["大師",-13.545239448547363],["▁ställa",-13.545246124267578],["▁posebn",-13.54525089263916],["▁Katiba",-13.545254707336426],["ીંગ",-13.545255661010742],["▁miglior",-13.545255661010742],["▁малыш",-13.545263290405272],["▁kanpo",-13.54526424407959],["▁shape",-13.545299530029297],["ეცა",-13.545310020446776],["▁تقريبا",-13.545317649841309],["ർത്തി",-13.545320510864258],["▁profilo",-13.54535675048828],["▁डाय",-13.545368194580078],["▁இடை",-13.54537868499756],["▁documentación",-13.54538631439209],["▁Tě",-13.545395851135254],["▁سهام",-13.545397758483888],["▁कां",-13.545400619506836],["ভু",-13.545408248901367],["▁Мило",-13.545455932617188],["はもちろん",-13.54547882080078],["دري",-13.545492172241213],["▁реци",-13.545527458190918],["მინისტრი",-13.545534133911133],["▁очі",-13.54554843902588],["pagina",-13.545557975769045],["ൊന്നു",-13.545564651489258],["▁kontrollera",-13.545567512512209],["მულ",-13.545577049255373],["▁формі",-13.545578002929688],["▁usual",-13.545587539672852],["▁औषध",-13.54560375213623],["OGA",-13.545609474182127],["сва",-13.545609474182127],["1-1",-13.545619010925291],["TEA",-13.545619010925291],["Sho",-13.54561996459961],["▁Hend",-13.545632362365724],["עיר",-13.545642852783203],["терінің",-13.545648574829102],["्त",-13.545649528503418],["▁організм",-13.545652389526367],["▁Bazı",-13.545653343200684],["ไม่ค่อย",-13.545713424682615],["▁భాగ",-13.545729637145996],["ԱԳ",-13.545730590820312],["▁चै",-13.54576015472412],["▁خوشی",-13.545777320861816],["大樓",-13.545804977416992],["ሁለተኛ",-13.545805931091309],["That",-13.545811653137209],["▁Sağ",-13.54582977294922],["▁Hvilke",-13.545833587646484],["▁لڳا",-13.545838356018066],["▁behe",-13.545841217041016],["▁szeme",-13.545842170715332],["ပိတ္",-13.545849800109863],["▁kriterij",-13.54585075378418],["პრეზიდენტ",-13.545897483825684],["▁Ügy",-13.54590129852295],["osasto",-13.545916557312012],["▁hüquqlar",-13.545934677124023],["punk",-13.545936584472656],["▁Anita",-13.545940399169922],["شرب",-13.545952796936035],["▁הקב",-13.545953750610352],["▁смог",-13.545969009399414],["▁Căn",-13.545977592468262],["▁Katrin",-13.545979499816896],["простран",-13.545985221862791],["▁ਬਣਾ",-13.546003341674805],["энэхүү",-13.546009063720703],["öhn",-13.546028137207031],["▁bolalar",-13.54603099822998],["▁โทร",-13.546038627624512],["מאה",-13.54605197906494],["▁Independent",-13.546058654785156],["▁বয়স",-13.546070098876951],["▁एप",-13.546090126037598],["توس",-13.546100616455078],["방안",-13.54610538482666],["▁models",-13.54611110687256],["▁доп",-13.546113967895508],["沢",-13.546120643615724],["foss",-13.54612159729004],["கிற",-13.546124458312988],["вив",-13.546130180358888],["▁villkor",-13.54613208770752],["ынып",-13.546133041381836],["▁oficiáln",-13.546133041381836],["▁шести",-13.546149253845217],["фашист",-13.546162605285645],["隠",-13.54616355895996],["Ղ",-13.546172142028809],["▁informaci",-13.546184539794922],["บังคับ",-13.546186447143556],["▁لېنک",-13.546186447143556],["▁daitezke",-13.546187400817873],["▁précédent",-13.546187400817873],["▁tietysti",-13.546187400817873],["▁vasárnap",-13.546187400817873],["▁амбасадор",-13.546187400817873],["▁испытыва",-13.546187400817873],["▁худалдан",-13.546187400817873],["▁බෞද්ධ",-13.546187400817873],["▁አህመድ",-13.546187400817873],["▁számítógép",-13.546188354492188],["▁қаңтар",-13.546189308166504],["▁ಬ್ಯಾಂಕ್",-13.546189308166504],["▁wspiera",-13.54619026184082],["▁permukaan",-13.546191215515137],["▁ittiham",-13.54619312286377],["▁šobrīd",-13.54619312286377],["▁ਮੈਂਬਰ",-13.546196937561035],["▁ਹੋਵੇਗਾ",-13.546198844909668],["chiamo",-13.546199798583984],["▁Aínda",-13.546201705932615],["▁زرداری",-13.546202659606934],["▁ніколи",-13.546205520629885],["▁fulgt",-13.54621124267578],["▁өтүп",-13.546212196350098],["▁ekipi",-13.546218872070312],["▁sarana",-13.546218872070312],["▁Laboratori",-13.546231269836426],["しましたが",-13.546236038208008],["▁Chuyên",-13.54624080657959],["▁πράγμα",-13.546250343322754],["haku",-13.546252250671388],["жайы",-13.54626178741455],["SIK",-13.546274185180664],["▁روزگار",-13.546277046203612],["▁необходими",-13.546284675598145],["▁גלי",-13.546284675598145],["▁Milyen",-13.546287536621094],["在香港",-13.546295166015623],["▁வங்கி",-13.54631233215332],["▁대해서",-13.546316146850586],["▁והיא",-13.5463228225708],["ليب",-13.546339988708496],["▁Fatih",-13.546340942382812],["▁viața",-13.546354293823242],["▁сути",-13.546358108520508],["▁broker",-13.546363830566406],["▁μιλά",-13.546378135681152],["▁persegui",-13.546382904052734],["валась",-13.546384811401367],["▁absolutely",-13.546403884887695],["safe",-13.546417236328123],["▁ótimo",-13.546417236328123],["ത്തേക്ക്",-13.546436309814451],["▁tarkibiga",-13.54643726348877],["▁membru",-13.54644775390625],["utuminen",-13.54647445678711],["දේශ",-13.546478271484377],["▁જૂન",-13.546478271484377],["▁பண்ண",-13.546496391296388],["▁jaunas",-13.546497344970703],["▁नवी",-13.546502113342283],["▁stretch",-13.546503067016602],["▁kaikkien",-13.546506881713867],["विरोधी",-13.546507835388184],["▁starosti",-13.546510696411133],["主持人",-13.54651165008545],["▁factum",-13.54651927947998],["▁teror",-13.546520233154297],["番号",-13.546557426452637],["líci",-13.546581268310549],["kengät",-13.546608924865724],["▁Possi",-13.546619415283203],["打工",-13.546623229980469],["なんだ",-13.546625137329102],["▁Khalid",-13.546640396118164],["ІІІ",-13.546650886535645],["ઉં",-13.546675682067873],["íteni",-13.546679496765137],["▁နှင့်",-13.546696662902832],["मिल",-13.546703338623049],["▁kostnader",-13.546712875366213],["тивни",-13.546724319458008],["▁Pajak",-13.546728134155272],["დას",-13.546732902526855],["みた",-13.546733856201172],["▁foglal",-13.546737670898438],["ियम",-13.54674243927002],["ităţile",-13.546748161315918],["▁tuota",-13.546759605407717],["▁encantado",-13.546778678894045],["▁Abba",-13.546782493591309],["инвестицион",-13.54678440093994],["மான்",-13.546807289123535],["▁위치한",-13.546808242797852],["lendiği",-13.546809196472168],["▁Storm",-13.546831130981444],["▁Эк",-13.546853065490724],["णारा",-13.54686164855957],["▁коридор",-13.546870231628418],["ご注文",-13.54687213897705],["▁تند",-13.546874046325684],["▁दाम",-13.546883583068848],["▁центрі",-13.546890258789062],["▁ondersteuning",-13.54689121246338],["▁panto",-13.54689121246338],["▁шатны",-13.546893119812012],["▁соба",-13.546910285949709],["▁Qarax",-13.546916961669922],["▁kaal",-13.546918869018556],["gwr",-13.546926498413086],["▁yilning",-13.546932220458984],["▁Сала",-13.5469331741333],["изирани",-13.546950340270996],["ávka",-13.546964645385742],["▁Seuraava",-13.54697608947754],["▁Τρίτη",-13.546998023986816],["▁базы",-13.547004699707031],["▁nachdem",-13.547019004821776],["▁वॉ",-13.547024726867676],["rojen",-13.547039985656738],["κάν",-13.54704761505127],["telný",-13.547063827514648],["▁tiedon",-13.547079086303713],["▁ökad",-13.547080039978027],["iële",-13.547083854675291],["▁Demi",-13.547083854675291],["ТҮ",-13.547121047973633],["形成了",-13.547127723693848],["арч",-13.547141075134276],["▁nabarmen",-13.547141075134276],["ทั้งสอง",-13.54714584350586],["▁дошли",-13.54715061187744],["▁puja",-13.54717254638672],["▁records",-13.547198295593262],["▁palli",-13.547203063964844],["ÍS",-13.547218322753906],["▁sursa",-13.547242164611816],["лөрдү",-13.547245025634766],["īr",-13.54726505279541],["ቃል",-13.54726791381836],["▁elementu",-13.547283172607422],["вілі",-13.547286033630373],["動力",-13.547287940979004],["ૂર",-13.54729175567627],["▁ریال",-13.547323226928713],["▁रखा",-13.547323226928713],["▁adauga",-13.54733180999756],["▁aktivt",-13.547340393066406],["▁پلار",-13.547346115112305],["▁انج",-13.54736042022705],["suke",-13.547392845153809],["mandi",-13.547418594360352],["CIN",-13.547430992126465],["▁довго",-13.547445297241213],["▁jury",-13.547467231750488],["ाबाद",-13.547470092773438],["▁carros",-13.547471046447754],["aink",-13.54748249053955],["▁нагляд",-13.547490119934082],["▁falando",-13.547496795654297],["▁kred",-13.547511100769045],["▁лоша",-13.547518730163574],["יאל",-13.547529220581056],["▁précise",-13.547533988952637],["▁wyraz",-13.547544479370115],["១៧",-13.547553062438965],["▁llamada",-13.547554969787598],["▁tutar",-13.547564506530762],["вень",-13.547574043273926],["OKA",-13.54758071899414],["▁ಜಿಲ್ಲೆಯ",-13.547595977783203],["նք",-13.547599792480469],["ണ്ടി",-13.54762363433838],["▁Nicolas",-13.547627449035645],["▁retejo",-13.547627449035645],["▁Rif",-13.547637939453123],["▁Sett",-13.54764175415039],["▁įgyvendin",-13.547643661499023],["▁kuria",-13.54764461517334],["вици",-13.547653198242188],["ഡീ",-13.547653198242188],["▁cihê",-13.547666549682615],["▁Piano",-13.547703742980955],["گھ",-13.547719955444336],["従",-13.547738075256348],["▁høyre",-13.547751426696776],["લુ",-13.54776096343994],["▁കഴിഞ്ഞു",-13.54776096343994],["▁norr",-13.547761917114258],["▁sindikat",-13.54776382446289],["▁menangis",-13.547775268554688],["ttämään",-13.547792434692385],["▁boys",-13.547796249389648],["βερ",-13.547821044921877],["ങ്ങാ",-13.547821044921877],["一代",-13.54783058166504],["▁Servei",-13.547839164733888],["ическом",-13.547849655151367],["▁مارک",-13.547861099243164],["▁الأوروبي",-13.54786777496338],["▁Bam",-13.547871589660645],["მთ",-13.547881126403809],["▁advise",-13.547893524169922],["▁шығарма",-13.547919273376465],["▁нашых",-13.547924041748049],["fragen",-13.547953605651855],["▁삶",-13.54799461364746],["▁தெ",-13.54800033569336],["▁Мало",-13.548009872436523],["▁tölu",-13.548026084899902],["▁invasi",-13.548041343688965],["▁သုံး",-13.548043251037598],["gál",-13.548059463500977],["▁rengini",-13.548080444335938],["▁राख्न",-13.548090934753418],["▁Evet",-13.548093795776367],["▁krab",-13.548113822937012],["大きい",-13.54812240600586],["နစ္",-13.548130989074709],["▁ರೇಟಿಂಗ್",-13.548134803771973],["▁ଫିଲ୍ମ",-13.548135757446287],["áætlun",-13.548136711120604],["ቫ",-13.548136711120604],["វិទ្យា",-13.548136711120604],["▁niektórych",-13.548136711120604],["▁periudhë",-13.548136711120604],["▁εξέλιξη",-13.548136711120604],["▁ομάδες",-13.548136711120604],["▁մլն",-13.548136711120604],["▁נוספות",-13.548136711120604],["▁حیثیت",-13.548136711120604],["▁आचार्य",-13.548136711120604],["▁இறுதி",-13.548136711120604],["▁கல்லூரி",-13.548136711120604],["tingimused",-13.548137664794922],["▁Sejarah",-13.548137664794922],["▁Жаңы",-13.548137664794922],["▁ووژل",-13.548138618469238],["▁ສໍາຫລັບ",-13.548138618469238],["▁lēmumu",-13.548139572143556],["▁hoeveel",-13.548140525817873],["▁उत्पन्न",-13.548140525817873],["▁Загора",-13.548142433166504],["▁грамадска",-13.548142433166504],["试图",-13.548142433166504],["มุ่ง",-13.548144340515137],["▁రచన",-13.548144340515137],["▁rozpozna",-13.548145294189451],["▁zwrot",-13.548145294189451],["▁کورنۍ",-13.548145294189451],["▁सभापति",-13.548145294189451],["▁Phường",-13.548152923583984],["▁передбачає",-13.548155784606934],["ເພ",-13.5481595993042],["▁ఏదో",-13.548161506652832],["베이",-13.54816436767578],["▁ครู",-13.548165321350098],["▁Sicilia",-13.548166275024414],["ቷል፡፡",-13.548171043395996],["▁ಇದಕ್ಕೆ",-13.548174858093262],["▁vidieť",-13.548178672790527],["▁כפר",-13.548192024230955],["▁तरीके",-13.548192977905272],["▁makakuha",-13.54819679260254],["ංක",-13.54820156097412],["▁nepři",-13.54820728302002],["dzes",-13.548208236694336],["ებო",-13.548212051391602],["▁гуля",-13.548212051391602],["▁Ministeri",-13.548216819763184],["▁esperanza",-13.548221588134766],["▁elhelyez",-13.548227310180664],["▁விமான",-13.54823112487793],["▁darle",-13.54824447631836],["bolo",-13.548246383666992],["▁uzņēmumu",-13.548251152038574],["тэры",-13.548254013061523],["▁ghế",-13.548260688781738],["וואַ",-13.548262596130373],["▁направля",-13.548264503479004],["▁Terma",-13.54828929901123],["stance",-13.548290252685549],["емый",-13.548309326171877],["▁जाणार",-13.548321723937988],["暮らし",-13.54832363128662],["▁verhouding",-13.548325538635254],["▁podes",-13.548330307006836],["▁составил",-13.54835033416748],["▁במידה",-13.548364639282228],["▁pasaran",-13.548369407653809],["▁Haberler",-13.548377990722656],["▁любого",-13.54838752746582],["▁ดาว",-13.548392295837402],["▁работают",-13.548413276672363],["▁छन",-13.548418045043944],["▁tegevuse",-13.548428535461426],["▁upgrade",-13.548431396484377],["▁پۇ",-13.548444747924805],["▁сказати",-13.548450469970703],["▁ಮಹಿಳೆಯರ",-13.548450469970703],["yyden",-13.548452377319336],["▁Bine",-13.548465728759766],["▁teist",-13.54848575592041],["のであれば",-13.54849338531494],["▁הבאים",-13.548504829406738],["▁grønne",-13.548535346984863],["込む",-13.548542022705078],["អំ",-13.548547744750977],["▁результатов",-13.548562049865724],["▁gazdag",-13.54856300354004],["vielas",-13.548579216003418],["OKO",-13.548582077026367],["▁కారు",-13.548595428466797],["ចាប់",-13.548601150512695],["Suomen",-13.548613548278809],["וטי",-13.548617362976074],["ruumi",-13.548622131347656],["▁وفقا",-13.548624038696287],["早在",-13.548632621765137],["▁ээж",-13.548639297485352],["캐",-13.548644065856934],["▁בשם",-13.548653602600098],["мала",-13.548663139343262],["கொண்ட",-13.548669815063477],["ahnya",-13.54868984222412],["fill",-13.548700332641602],["▁reakci",-13.548705101013184],["▁قصة",-13.548707008361816],["сько",-13.548707962036133],["mitten",-13.548714637756348],["impl",-13.548718452453612],["▁priori",-13.548724174499512],["▁संग्रह",-13.548733711242676],["▁partout",-13.548763275146484],["▁ควร",-13.548775672912598],["▁Пример",-13.548791885375977],["▁6.5",-13.54881477355957],["▁adults",-13.548845291137695],["Mos",-13.548855781555176],["▁Kyse",-13.548861503601074],["▁വിള",-13.548882484436035],["▁fugi",-13.54888916015625],["▁oseba",-13.548891067504885],["ສິນ",-13.548908233642578],["ემი",-13.548911094665527],["▁Papier",-13.548928260803224],["արյան",-13.548935890197754],["afoje",-13.548941612243652],["दायक",-13.54896068572998],["պար",-13.548982620239258],["වන්නේ",-13.549013137817385],["▁vitse",-13.5490140914917],["▁Vụ",-13.549027442932127],["이하",-13.549032211303713],["িস",-13.549034118652344],["▁odnose",-13.549041748046877],["▁راشد",-13.549042701721191],["▁tuomet",-13.549059867858888],["▁boto",-13.549074172973633],["気分",-13.54909896850586],["▁urmări",-13.54910373687744],["mír",-13.54911994934082],["โช",-13.54912567138672],["internet",-13.549132347106934],["ICK",-13.549154281616213],["▁keramik",-13.549171447753906],["▁pigem",-13.549175262451172],["स्थापन",-13.549186706542969],["េត",-13.549190521240234],["coop",-13.549199104309082],["▁acı",-13.549216270446776],["▁osto",-13.549239158630373],["▁vloer",-13.54925537109375],["▁Thaçi",-13.549260139465332],["ктен",-13.549274444580078],["lanta",-13.549301147460938],["▁Loren",-13.54930305480957],["pach",-13.5493745803833],["توجه",-13.549409866333008],["▁හේතුවෙන්",-13.549410820007324],["▁777",-13.549456596374512],["плати",-13.549490928649902],["▁lucra",-13.549497604370115],["▁گردن",-13.54950714111328],["סור",-13.549531936645508],["优秀的",-13.54953670501709],["▁taldea",-13.549541473388672],["1984",-13.549564361572266],["වෙන්න",-13.549564361572266],["▁felvétel",-13.549568176269531],["▁கடை",-13.54957675933838],["▁замов",-13.549598693847656],["▁Salg",-13.549623489379885],["▁방식",-13.549631118774414],["นี้เป็น",-13.54964542388916],["___",-13.54965877532959],["▁վաղ",-13.549659729003906],["ヨ",-13.54973602294922],["▁lita",-13.549738883972168],["▁sovit",-13.549757957458496],["▁svarbi",-13.549809455871582],["▁noem",-13.549813270568848],["▁transplant",-13.54985523223877],["クリ",-13.549861907958984],["ण्यास",-13.549880981445312],["▁อาจ",-13.549903869628906],["▁Jaro",-13.549934387207031],["▁потребе",-13.549935340881348],["▁मांड",-13.549936294555664],["HAND",-13.549938201904297],["▁นอกจาก",-13.54994010925293],["▁разів",-13.549945831298828],["ുകളിൽ",-13.54997444152832],["▁παλι",-13.549975395202637],["लले",-13.549983978271484],["รหัส",-13.550020217895508],["ဒေသ",-13.550060272216797],["▁описание",-13.550067901611328],["▁nejaký",-13.550070762634276],["隣",-13.550070762634276],["▁llocs",-13.550071716308594],["▁вважає",-13.55008029937744],["ชื่น",-13.55008316040039],["กระแส",-13.550088882446287],["▁Hắn",-13.550089836120604],["▁campagna",-13.550089836120604],["▁pirsgirêk",-13.550089836120604],["▁vườn",-13.550089836120604],["▁začiatku",-13.550089836120604],["▁équipé",-13.550089836120604],["▁Каждый",-13.550089836120604],["▁Университет",-13.550089836120604],["▁Գլխավոր",-13.550089836120604],["▁انکشاف",-13.550089836120604],["▁تسلیم",-13.550089836120604],["▁مڪمل",-13.550089836120604],["▁நேற்று",-13.550089836120604],["▁පවසයි",-13.550089836120604],["▁behoort",-13.550090789794922],["▁분위기",-13.550090789794922],["علامہ",-13.550091743469238],["▁گورنر",-13.550093650817873],["▁zebra",-13.550094604492188],["▁Youth",-13.550095558166504],["▁İtalya",-13.550097465515137],["oral",-13.550098419189451],["▁proximité",-13.550101280212402],["▁уваги",-13.55010223388672],["▁naiset",-13.550104141235352],["▁лечения",-13.550104141235352],["▁élevé",-13.550107955932615],["▁здійснюється",-13.550108909606934],["ธรรมดา",-13.550119400024414],["▁tổn",-13.550119400024414],["▁жүрүп",-13.550121307373049],["▁چطور",-13.550121307373049],["▁badkamer",-13.550127983093262],["▁dłuższ",-13.550132751464844],["▁obwohl",-13.550138473510742],["▁Abschluss",-13.55014419555664],["▁tujuh",-13.550146102905272],["▁مجاهد",-13.550146102905272],["▁courage",-13.550151824951172],["▁pethau",-13.550165176391602],["▁хараа",-13.550171852111816],["రాజు",-13.550179481506348],["▁bûne",-13.550189971923828],["▁أنهم",-13.55019474029541],["▁mūzikas",-13.550204277038574],["पास",-13.550215721130373],["▁спазва",-13.550219535827637],["ÉP",-13.550230979919434],["▁ചെയ്യുന്നത്",-13.550236701965332],["▁македонската",-13.55024528503418],["▁Traba",-13.550260543823242],["▁таможенн",-13.55026149749756],["▁činnosť",-13.550263404846191],["▁Another",-13.550264358520508],["kunni",-13.550273895263672],["умно",-13.550280570983888],["ਟਕ",-13.550280570983888],["ặt",-13.55029010772705],["ასთან",-13.550328254699709],["▁pravilno",-13.550339698791504],["電源",-13.5503511428833],["sakit",-13.55035400390625],["▁иных",-13.550355911254885],["▁Casal",-13.550358772277832],["έρι",-13.550384521484377],["éndose",-13.550403594970703],["▁Eş",-13.55040454864502],["▁touche",-13.550415992736816],["pija",-13.550469398498535],["ljenih",-13.5504732131958],["▁chua",-13.55048942565918],["зол",-13.550490379333496],["▁বিস্তারিত",-13.550494194030762],["▁chart",-13.550514221191406],["▁etape",-13.550528526306152],["▁tréning",-13.550528526306152],["१४",-13.550529479980469],["法定",-13.550537109375],["locat",-13.550544738769531],["Ад",-13.550559997558594],["▁conferencia",-13.55056381225586],["бага",-13.550567626953123],["ڪاري",-13.550567626953123],["▁પરિવાર",-13.550570487976074],["▁Ponta",-13.550579071044922],["▁krajiny",-13.550592422485352],["优先",-13.550606727600098],["▁Velg",-13.550612449645996],["▁filan",-13.550618171691896],["▁mwili",-13.55062770843506],["▁පැන",-13.550637245178224],["เหล่า",-13.550641059875488],["▁ಸಾಧ್ಯ",-13.550646781921388],["▁lâm",-13.55067253112793],["▁politiska",-13.550678253173828],["▁δεί",-13.550704956054688],["woh",-13.55070972442627],["existence",-13.550719261169434],["▁reicht",-13.5507230758667],["▁2008)",-13.550724983215332],["közlekedés",-13.550745010375977],["ുമെന്നും",-13.550747871398926],["▁Jerman",-13.55076026916504],["Alb",-13.550772666931152],["мөн",-13.550774574279783],["▁ostatnich",-13.550775527954102],["▁qəza",-13.550803184509276],["電動",-13.5508394241333],["ляга",-13.550848007202148],["▁уур",-13.550863265991213],["▁čtyř",-13.550865173339844],["▁տարածքում",-13.55088233947754],["опште",-13.55088710784912],["▁serata",-13.55088710784912],["▁लक्षण",-13.550896644592283],["118",-13.550897598266602],["▁المغ",-13.550902366638184],["ewî",-13.550923347473145],["▁енді",-13.55092716217041],["こともあります",-13.550947189331056],["▁Izen",-13.550958633422852],["▁jauni",-13.550983428955078],["හරි",-13.550984382629396],["قلق",-13.55099391937256],["▁military",-13.551002502441406],["▁sidomos",-13.551015853881836],["▁labas",-13.55106258392334],["▁aşama",-13.551070213317873],["▁Estonia",-13.551076889038086],["ealaí",-13.551077842712402],["▁taybet",-13.551080703735352],["▁pornografi",-13.551102638244627],["▁rahbari",-13.551108360290527],["μένους",-13.551115036010742],["लैंड",-13.55112648010254],["▁борбор",-13.551136016845703],["FAT",-13.551143646240234],["▁فائد",-13.551149368286133],["▁konsument",-13.55116844177246],["rahkan",-13.551180839538574],["97)",-13.551204681396484],["▁বন্ধু",-13.551204681396484],["▁извори",-13.551215171813965],["santa",-13.551219940185549],["طرق",-13.551220893859863],["不及",-13.551243782043455],["مدرسة",-13.551262855529783],["▁indirect",-13.551267623901367],["lediği",-13.551276206970217],["გრე",-13.551277160644531],["mpaan",-13.55129337310791],["هب",-13.55130386352539],["▁vendor",-13.551308631896973],["יקס",-13.551310539245604],["制造业",-13.551318168640137],["▁SDP",-13.551323890686035],["kkää",-13.551324844360352],["▁Sayın",-13.551335334777832],["yolu",-13.551351547241213],["šili",-13.551369667053224],["▁Yemen",-13.551399230957031],["▁ജില്ല",-13.551405906677246],["นิว",-13.55142879486084],["▁volje",-13.55142879486084],["▁Законом",-13.551444053649902],["▁იუ",-13.551450729370115],["▁litre",-13.551451683044434],["忽",-13.5514554977417],["▁کلن",-13.551467895507812],["ෂි",-13.551472663879396],["▁hozzászólás",-13.551474571228027],["rzymy",-13.551475524902344],["▁горд",-13.551499366760254],["WAS",-13.551525115966797],["ేష్",-13.551542282104492],["præ",-13.551549911499023],["▁대비",-13.551557540893556],["lingar",-13.551560401916504],["▁naiv",-13.551568031311035],["▁Vital",-13.551584243774414],["▁통합",-13.551605224609377],["▁ಮೋ",-13.551624298095703],["யாளர்",-13.5516357421875],["342",-13.551654815673828],["▁фокус",-13.55166244506836],["▁දැක්ක",-13.551671981811523],["▁sabía",-13.551688194274902],["▁induk",-13.55170726776123],["▁പ്രായ",-13.551709175109863],["ខុស",-13.55171012878418],["▁주는",-13.551725387573242],["▁баба",-13.55173110961914],["ikų",-13.551734924316406],["▁nánar",-13.5517578125],["▁სოფ",-13.551777839660645],["ಕೊಳ್ಳುವ",-13.551787376403809],["▁comercializa",-13.551794052124023],["▁үйлдвэрийн",-13.551806449890137],["▁ფული",-13.551830291748049],["▁ripo",-13.551835060119627],["▁امير",-13.551883697509766],["AKT",-13.551889419555664],["▁называем",-13.551905632019045],["▁Hjem",-13.551913261413574],["▁öğrenme",-13.551915168762209],["▁container",-13.55192756652832],["ಭೂಮಿ",-13.55193328857422],["Più",-13.551955223083496],["หน่วย",-13.551960945129396],["▁uneori",-13.551977157592772],["ୟୁ",-13.55198574066162],["批评",-13.55199146270752],["ньому",-13.551997184753418],["いっぱい",-13.552043914794922],["ชลบุรี",-13.552044868469238],["ሀገሪቱ",-13.552046775817873],["▁lầm",-13.552046775817873],["▁sepenuhnya",-13.552046775817873],["▁veçantë",-13.552046775817873],["▁шыққан",-13.552046775817873],["▁հաջորդ",-13.552046775817873],["▁نتیجے",-13.552046775817873],["▁टक्के",-13.552046775817873],["▁दिवाळी",-13.552046775817873],["▁पंतप्रधान",-13.552046775817873],["▁পাঁচ",-13.552046775817873],["▁ଚର୍ଚ୍ଚିତ",-13.552046775817873],["▁ಫೇಸ್",-13.552046775817873],["▁დაახლოებით",-13.552046775817873],["▁스스로",-13.552046775817873],["▁Seura",-13.552047729492188],["▁Үкімет",-13.552047729492188],["▁नेहमी",-13.552047729492188],["▁költségvetés",-13.552048683166504],["▁रेल्वे",-13.552048683166504],["▁hilabete",-13.55204963684082],["▁Всеукраїнськ",-13.55204963684082],["▁ఎన్నో",-13.552050590515137],["▁vsa",-13.552051544189451],["▁ಎಂಬುದು",-13.55205249786377],["▁memainkan",-13.552054405212402],["▁līdzekļi",-13.552057266235352],["▁sodišče",-13.552057266235352],["▁Yılmaz",-13.552059173583984],["▁rättigheter",-13.55206298828125],["▁бүгінгі",-13.552064895629885],["▁Gleich",-13.5520658493042],["▁کمزور",-13.552069664001465],["▁huippu",-13.552071571350098],["▁Domnului",-13.55207633972168],["▁tarkoittaa",-13.552078247070312],["යයි",-13.552080154418944],["▁großer",-13.552081108093262],["arrière",-13.552082061767578],["▁Belajar",-13.552084922790527],["▁شیوه",-13.55208683013916],["▁эркин",-13.55209255218506],["▁আশা",-13.552101135253906],["▁لطيف",-13.552105903625488],["▁kursu",-13.55210781097412],["▁Нийт",-13.552109718322754],["▁bellezza",-13.55211067199707],["使える",-13.55211353302002],["значення",-13.552119255065918],["哪裡",-13.552123069763184],["的商品",-13.552124977111816],["▁gumi",-13.55215835571289],["▁Quam",-13.552163124084473],["▁mehnat",-13.552175521850586],["▁speech",-13.552176475524902],["▁blokk",-13.552196502685549],["ခန္႔",-13.552215576171877],["▁פּראָ",-13.552227020263672],["ဆယ္",-13.552227973937988],["▁ነዉ",-13.552248001098633],["▁خانگی",-13.552252769470217],["▁vyč",-13.55228042602539],["▁machines",-13.552305221557615],["▁तिला",-13.552309036254885],["▁ושל",-13.552313804626465],["эхээр",-13.55231475830078],["▁મહ",-13.552321434020996],["stolen",-13.552326202392578],["▁سازش",-13.552327156066896],["▁глум",-13.552331924438477],["▁cuprins",-13.552345275878906],["▁mû",-13.552349090576172],["▁hakkab",-13.552352905273438],["ćih",-13.552369117736816],["▁ყურ",-13.55238437652588],["▁akıllı",-13.552412033081056],["▁Yanga",-13.552434921264648],["▁очевид",-13.552452087402344],["hubungan",-13.55245590209961],["κοι",-13.552459716796877],["▁vyks",-13.55246353149414],["▁zapomni",-13.552482604980469],["▁премин",-13.55249309539795],["▁Comedy",-13.552498817443848],["▁ਸਕਦੀ",-13.55251407623291],["이지만",-13.552515983581545],["▁взяти",-13.552523612976074],["▁світов",-13.552523612976074],["ട്രോ",-13.55252742767334],["279",-13.552536964416504],["▁Eduka",-13.55254077911377],["cíti",-13.55257511138916],["طئ",-13.55258560180664],["ઓમાં",-13.552589416503906],["ທຸລະກິດ",-13.552597045898438],["ვიდეო",-13.552597999572754],["ولات",-13.552637100219728],["▁средни",-13.552638053894045],["Master",-13.552648544311523],["▁पकड़",-13.552648544311523],["十七",-13.552654266357422],["ФА",-13.552679061889648],["또",-13.552690505981444],["委員",-13.552693367004396],["▁egészséges",-13.552708625793455],["الف",-13.552739143371582],["▁kuki",-13.552742004394531],["▁intelekt",-13.552742958068848],["▁místní",-13.552745819091797],["▁2,1",-13.552757263183594],["ליים",-13.55275821685791],["▁चोरी",-13.552785873413086],["▁Attila",-13.552786827087402],["Sex",-13.552844047546388],["sveit",-13.552844047546388],["০২",-13.552850723266602],["schreiben",-13.552854537963867],["▁crack",-13.552854537963867],["▁falleg",-13.552858352661133],["▁Prepara",-13.552878379821776],["ሷ",-13.552885055541992],["პოლი",-13.552897453308104],["▁조건",-13.5529146194458],["▁dubte",-13.552916526794434],["44)",-13.5529203414917],["▁അതില്",-13.552921295166016],["masalah",-13.552929878234863],["▁აფ",-13.552936553955078],["nički",-13.552956581115724],["▁pyet",-13.552958488464355],["▁fül",-13.552978515625],["▁மன்ன",-13.552984237670898],["álja",-13.552986145019531],["▁մասն",-13.552989959716797],["ENTER",-13.553007125854492],["王子",-13.55301570892334],["▁kelias",-13.553064346313477],["▁보이는",-13.553070068359377],["▁Пят",-13.553080558776855],["▁odber",-13.553093910217283],["▁කරා",-13.55309772491455],["意识到",-13.553107261657717],["násob",-13.553117752075195],["▁calm",-13.553140640258787],["▁некоја",-13.553154945373535],["4,8",-13.553157806396484],["ministeriö",-13.553163528442385],["szövetség",-13.553178787231444],["▁erbjuda",-13.553199768066406],["▁থাক",-13.553204536437988],["▁diminta",-13.55320644378662],["Austr",-13.553218841552734],["▁muziki",-13.553224563598633],["tinê",-13.553235054016112],["ūru",-13.553247451782228],["▁chiaro",-13.553247451782228],["忘れ",-13.553248405456545],["ирую",-13.553251266479492],["τρια",-13.553266525268556],["▁derefter",-13.553274154663086],["▁калат",-13.55327606201172],["▁ხელს",-13.553279876708984],["2.4",-13.553282737731934],["ሪዎች",-13.553297996520996],["▁löytyi",-13.553311347961426],["декабр",-13.55332374572754],["čal",-13.55335521697998],["了自己的",-13.553427696228027],["113",-13.55342960357666],["▁állami",-13.553438186645508],["▁Kela",-13.553468704223633],["ΩΣ",-13.55346965789795],["院长",-13.553472518920898],["ုိင္း",-13.553507804870604],["▁анан",-13.553522109985352],["לנד",-13.553524017333984],["নিক",-13.55356788635254],["▁Աբ",-13.553571701049805],["▁Humanos",-13.55357837677002],["▁gair",-13.553601264953612],["▁लड",-13.553613662719728],["▁praktische",-13.55362319946289],["▁Blues",-13.553631782531738],["பட்ட",-13.553640365600586],["arius",-13.553682327270508],["▁سیل",-13.553683280944824],["股市",-13.55369472503662],["▁sagatavo",-13.553701400756836],["▁pike",-13.553702354431152],["▁המצ",-13.553730010986328],["▁frontal",-13.55373477935791],["▁açıqlama",-13.553740501403809],["▁muziko",-13.55380153656006],["▁naucz",-13.553802490234377],["▁taglia",-13.55383014678955],["यति",-13.553834915161133],["▁Islamic",-13.55384635925293],["▁գրավ",-13.553860664367676],["▁Filo",-13.553872108459473],["יגן",-13.553874969482422],["▁fotografer",-13.553882598876951],["▁istoria",-13.55392360687256],["▁članov",-13.553929328918455],["▁200.000",-13.55395221710205],["淋",-13.55395793914795],["忽略",-13.553966522216797],["▁robe",-13.553980827331545],["ιέ",-13.553990364074709],["想想",-13.553995132446287],["ପାରିବ",-13.553997993469238],["lasku",-13.554000854492188],["▁آزمایش",-13.554001808166504],["ေဆြးေႏြး",-13.554006576538086],["אוניברסיטת",-13.554007530212402],["మోహన్",-13.554007530212402],["▁Künstler",-13.554007530212402],["▁həmişə",-13.554007530212402],["▁particolarmente",-13.554007530212402],["▁rješenje",-13.554007530212402],["▁schreef",-13.554007530212402],["▁зростання",-13.554007530212402],["▁муслиман",-13.554007530212402],["▁պիտի",-13.554007530212402],["▁آئندہ",-13.554007530212402],["▁শীর্ষ",-13.554007530212402],["▁తండ్రి",-13.554007530212402],["▁రెండో",-13.554007530212402],["▁රීඩක",-13.554007530212402],["▁algemene",-13.55400848388672],["▁компјутер",-13.55400848388672],["▁bersabda",-13.554009437561035],["▁tűnik",-13.554009437561035],["▁ევროპის",-13.554009437561035],["▁ዓላማ",-13.554009437561035],["▁сактоо",-13.554011344909668],["▁մրցույթ",-13.554011344909668],["▁izbe",-13.554014205932615],["▁crazy",-13.554015159606934],["▁ਜਾਵੇਗਾ",-13.55401611328125],["▁Francisc",-13.55402374267578],["▁வெளியே",-13.554024696350098],["▁colección",-13.554025650024414],["▁ambacho",-13.55402946472168],["▁esély",-13.554031372070312],["番組",-13.554032325744627],["▁priemonės",-13.554033279418944],["▁آرایش",-13.554033279418944],["▁لكرة",-13.554041862487791],["牛肉",-13.554043769836426],["▁keittiö",-13.55404567718506],["▁yüksəl",-13.554048538208008],["面臨",-13.554051399230955],["braut",-13.554054260253906],["▁vermeld",-13.554054260253906],["▁இதன்",-13.55405616760254],["▁aðstoð",-13.554059028625488],["▁أصحاب",-13.55406665802002],["▁असल्याने",-13.554067611694336],["▁негативно",-13.554075241088867],["有興趣",-13.554078102111816],["λία",-13.554105758666992],["▁അധികാര",-13.554107666015623],["▁këngë",-13.554110527038574],["▁офіс",-13.554128646850586],["▁ಸುದ್ದಿಗಳನ್ನು",-13.554128646850586],["▁аўта",-13.554134368896484],["▁୧୧",-13.554136276245115],["▁partnera",-13.554141998291016],["▁हेतु",-13.554163932800291],["▁දන්නේ",-13.554166793823242],["▁correspondiente",-13.554183959960938],["▁hào",-13.554190635681152],["回復",-13.554205894470217],["▁Mood",-13.55421257019043],["sichtlich",-13.554231643676758],["▁ئىگە",-13.554243087768556],["ávají",-13.55425262451172],["▁nordisk",-13.554261207580566],["▁పాత",-13.554274559020996],["590",-13.55428981781006],["▁स्थल",-13.55428981781006],["mängu",-13.55430507659912],["ήλ",-13.554311752319336],["▁smartfon",-13.554327964782717],["▁rëndë",-13.554330825805664],["▁veriler",-13.554343223571776],["džiau",-13.554356575012209],["▁садржај",-13.554357528686523],["▁prezo",-13.554407119750977],["▁Grecia",-13.55441951751709],["aktif",-13.55442714691162],["▁Sidoo",-13.554436683654783],["▁Tiếng",-13.554436683654783],["ジャー",-13.554443359375],["▁Boha",-13.554449081420898],["▁iránt",-13.554455757141112],["独自",-13.554457664489746],["▁rozhodne",-13.554458618164062],["▁Quiz",-13.554465293884276],["▁ಎನ್ನುವ",-13.55446720123291],["▁uğur",-13.554469108581545],["▁Стати",-13.554471015930176],["▁политику",-13.554473876953123],["▁Avia",-13.554492950439451],["▁ప్రాంత",-13.554515838623049],["▁tartozó",-13.554522514343262],["аараа",-13.554570198059082],["▁jasne",-13.55461311340332],["▁sny",-13.554637908935549],["▁serca",-13.554657936096191],["որի",-13.554676055908203],["갔",-13.554677963256836],["▁கெ",-13.554692268371582],["Luc",-13.554693222045898],["▁elektronski",-13.55469799041748],["▁Dekh",-13.554702758789062],["▁Орос",-13.55470848083496],["▁свойства",-13.554739952087402],["▁kimia",-13.554744720458984],["filmer",-13.554747581481934],["▁เสื้อ",-13.55474853515625],["時には",-13.554764747619627],["EVE",-13.554766654968262],["▁băng",-13.554777145385742],["panjang",-13.554819107055664],["▁prestasi",-13.55483055114746],["▁glade",-13.554832458496094],["▁endringer",-13.55486297607422],["hoitaja",-13.554879188537598],["▁אינטרנט",-13.554899215698242],["tīs",-13.554901123046877],["▁byddai",-13.554920196533203],["▁dokázal",-13.554945945739746],["▁সামনে",-13.55495262145996],["▁trink",-13.554956436157228],["▁mieszkania",-13.554993629455566],["▁midden",-13.554994583129885],["▁۲۳",-13.5549955368042],["▁شف",-13.555002212524414],["▁paraules",-13.555004119873049],["과학",-13.55500602722168],["▁lidh",-13.555015563964844],["ilun",-13.555039405822754],["▁Južn",-13.555054664611816],["▁460",-13.555060386657717],["▁المحت",-13.55506420135498],["▁ফের",-13.555075645446776],["მიზ",-13.55508518218994],["▁جيدة",-13.555091857910156],["▁naći",-13.5551176071167],["▁számol",-13.555121421813965],["браць",-13.555134773254396],["കാലത്ത്",-13.555156707763672],["みてください",-13.555185317993164],["▁խնդիրներ",-13.555218696594238],["▁ionad",-13.55522632598877],["▁noul",-13.555230140686035],["▁būna",-13.555248260498049],["▁spełni",-13.555253982543944],["лых",-13.55526351928711],["▁sentire",-13.555272102355955],["ιου",-13.555274963378906],["▁Menye",-13.555274963378906],["▁oppe",-13.555286407470703],["ליה",-13.555310249328612],["▁hopp",-13.555330276489258],["人々",-13.555335998535156],["▁alarak",-13.555337905883787],["করণ",-13.555349349975586],["haug",-13.555354118347168],["是一款",-13.555364608764648],["▁tanıt",-13.555367469787598],["pít",-13.555418014526367],["فال",-13.555419921875],["▁ప్రమాదం",-13.555425643920898],["▁tänkte",-13.555438041687012],["▁seriøs",-13.55544662475586],["ڀر",-13.555449485778809],["NIH",-13.555450439453123],["SERV",-13.555452346801758],["▁даў",-13.555469512939451],["▁móvil",-13.555474281311035],["▁setengah",-13.555493354797363],["טפל",-13.555501937866213],["▁praedic",-13.555534362792969],["▁variere",-13.55553913116455],["техническ",-13.555551528930664],["▁reception",-13.555563926696776],["▁visione",-13.555582046508787],["эст",-13.55559253692627],["▁центъра",-13.555607795715332],["▁klucz",-13.555608749389648],["▁پارلمان",-13.555623054504396],["▁Relation",-13.55563259124756],["רוי",-13.555665016174316],["电力",-13.555665016174316],["▁1396",-13.555689811706545],["▁fér",-13.555696487426758],["▁Дзя",-13.555699348449709],["รัช",-13.555709838867188],["▁pkt",-13.555709838867188],["▁alguses",-13.55571174621582],["audžia",-13.555741310119627],["▁وغير",-13.555746078491213],["▁ມື້",-13.55574893951416],["zsák",-13.555758476257324],["▁tuleks",-13.555764198303224],["Ing",-13.555768013000488],["iniuose",-13.555768966674805],["▁គ្រឿង",-13.555784225463867],["문제",-13.55579662322998],["機車",-13.555805206298828],["IĆ",-13.555809020996094],["▁marginal",-13.55580997467041],["▁کروا",-13.555819511413574],["▁สนใจ",-13.555819511413574],["mayıb",-13.555822372436523],["▁המחיר",-13.55583667755127],["▁banki",-13.555851936340332],["▁2011)",-13.555859565734863],["imli",-13.55586051940918],["▁tyske",-13.555868148803713],["▁адамды",-13.555870056152344],["くなり",-13.55587673187256],["▁അടിസ്ഥാന",-13.555891036987305],["▁esame",-13.555893898010254],["depan",-13.55589771270752],["▁قې",-13.555904388427734],["▁поврат",-13.555916786193848],["Bon",-13.555920600891112],["当你",-13.555936813354492],["ందే",-13.555956840515137],["▁csupán",-13.5559720993042],["▁mögött",-13.5559720993042],["▁mắn",-13.5559720993042],["▁przypomina",-13.5559720993042],["▁səviyyədə",-13.5559720993042],["▁temperatūra",-13.5559720993042],["▁tvrtke",-13.5559720993042],["▁tysięcy",-13.5559720993042],["▁większości",-13.5559720993042],["▁względem",-13.5559720993042],["▁σχέδιο",-13.5559720993042],["▁Шевченка",-13.5559720993042],["▁մակարդակ",-13.5559720993042],["▁पहचान",-13.5559720993042],["▁पहिला",-13.5559720993042],["▁ડાઉનલોડ",-13.5559720993042],["▁କାହିଁକି",-13.5559720993042],["▁ଗ୍ରହଣ",-13.5559720993042],["ಮೂರ್ತಿ",-13.555973052978516],["ครัว",-13.555973052978516],["▁Pegawai",-13.555973052978516],["▁células",-13.555973052978516],["▁hezkuntza",-13.555973052978516],["▁očakáva",-13.555973052978516],["▁εφημερίδα",-13.555973052978516],["▁छोरी",-13.555973052978516],["▁Nairobi",-13.555974006652832],["▁nedaudz",-13.555974006652832],["▁rehefa",-13.555974006652832],["▁Ubuntu",-13.555974960327148],["▁ágúst",-13.555974960327148],["▁norādīt",-13.555975914001465],["▁pimpinan",-13.555975914001465],["▁კვლავ",-13.555978775024414],["▁przedstawiciel",-13.555989265441896],["▁সবাই",-13.555992126464844],["▁गर्नुपर्छ",-13.55599594116211],["▁колькасць",-13.556001663208008],["▁kāpēc",-13.55600357055664],["▁જ્યાં",-13.55600357055664],["▁etməsi",-13.556010246276855],["▁HAYA",-13.556015968322754],["▁danke",-13.55601978302002],["▁მიე",-13.556026458740234],["عائلة",-13.5560302734375],["▁ಜೆ",-13.556034088134766],["моему",-13.556045532226562],["PAD",-13.55605125427246],["▁істеу",-13.556060791015623],["▁spordi",-13.556076049804688],["คอร์",-13.556089401245115],["▁សហ",-13.556090354919434],["շրջ",-13.556093215942385],["▁posve",-13.556102752685549],["空調",-13.556103706359863],["ചന്ദ്ര",-13.556106567382812],["худ",-13.556107521057127],["▁sağlayan",-13.556109428405762],["▁immers",-13.556116104125977],["▁astronomi",-13.55611801147461],["▁heiß",-13.556127548217772],["▁моћи",-13.55612850189209],["▁sınav",-13.556134223937988],["▁конфлікт",-13.556135177612305],["▁بلغ",-13.556146621704102],["хімічн",-13.556156158447266],["ციები",-13.556159019470217],["▁csat",-13.556160926818848],["▁žije",-13.556167602539062],["▁নাই",-13.556175231933594],["▁домакин",-13.55617618560791],["▁누구",-13.556193351745604],["▁fok",-13.556196212768556],["▁społeczne",-13.556200981140137],["▁yıllar",-13.556204795837402],["▁proiectului",-13.556219100952148],["пека",-13.55622386932373],["▁dietro",-13.556241035461426],["▁빌",-13.556275367736816],["കട",-13.55631160736084],["▁командир",-13.55631160736084],["▁skriftlig",-13.556314468383787],["▁ema",-13.556328773498535],["▁geli",-13.556342124938965],["yacak",-13.556358337402344],["ትር",-13.55640697479248],["不錯",-13.556418418884276],["▁Suu",-13.556437492370604],["▁بهار",-13.556440353393556],["▁vidim",-13.556442260742188],["▁۲۰۰",-13.55644702911377],["身分",-13.55644989013672],["້ວ",-13.556455612182615],["▁dieren",-13.556459426879885],["▁graça",-13.556463241577148],["▁주의",-13.55646800994873],["▁mancare",-13.556468963623049],["حزب",-13.556483268737791],["There",-13.556511878967283],["▁saxlanıl",-13.556516647338867],["▁аң",-13.55652141571045],["ຄົງ",-13.556522369384766],["▁vedem",-13.556527137756348],["▁Jawab",-13.55653953552246],["▁газеты",-13.556565284729004],["ttävän",-13.556594848632812],["IÊN",-13.556642532348633],["▁країні",-13.556676864624023],["▁Karne",-13.556687355041504],["scrizione",-13.5566987991333],["▁Mush",-13.556699752807615],["▁cargos",-13.556710243225098],["võime",-13.55673122406006],["rimas",-13.556736946105955],["▁करवा",-13.556755065917969],["▁pašto",-13.556760787963867],["▁hoos",-13.556769371032717],["▁නිලධාරීන්",-13.55678653717041],["▁गौर",-13.556814193725586],["▁ვინმე",-13.55682373046875],["koaren",-13.556838989257812],["نود",-13.556838989257812],["▁judge",-13.556839942932127],["داشت",-13.556846618652344],["ည္း",-13.556851387023926],["готвен",-13.55686855316162],["▁breaking",-13.556901931762695],["faire",-13.556909561157228],["▁komuna",-13.556927680969238],["▁நடித்த",-13.55693244934082],["▁Macht",-13.556934356689451],["nčia",-13.556936264038086],["gune",-13.556949615478516],["▁ponovo",-13.556979179382324],["▁هوایی",-13.556979179382324],["haran",-13.556998252868652],["ශි",-13.55700969696045],["▁피부",-13.557010650634766],["▁උත්සාහ",-13.557015419006348],["ագրեր",-13.557034492492676],["▁vehicles",-13.557052612304688],["▁သား",-13.557056427001951],["hoor",-13.5570707321167],["హు",-13.557089805603027],["▁tekemään",-13.557096481323242],["മെന്റ്",-13.557110786437988],["▁olmalı",-13.55711555480957],["update",-13.55712604522705],["▁jarraitu",-13.55713176727295],["ưỡng",-13.557137489318848],["▁Miasta",-13.557143211364746],["喜歡的",-13.557158470153809],["λλο",-13.557162284851074],["▁reklame",-13.557162284851074],["ენს",-13.557165145874023],["▁habitat",-13.557170867919922],["वेल",-13.557173728942873],["▁втрати",-13.557183265686035],["▁þið",-13.557184219360352],["▁pripravi",-13.557207107543944],["▁ကိုယ်",-13.557212829589844],["รองรับ",-13.557215690612791],["▁interaktiv",-13.557225227355955],["▁هاست",-13.557230949401855],["▁გამა",-13.557238578796388],["▁הרכב",-13.55724811553955],["άτι",-13.557260513305664],["已经成为",-13.557275772094728],["▁halad",-13.557315826416016],["യിലൂടെ",-13.557333946228027],["▁bojā",-13.557360649108888],["TOM",-13.55736255645752],["gaasi",-13.557369232177734],["▁daxili",-13.557377815246582],["ร้า",-13.55738925933838],["RIK",-13.55739402770996],["▁каждом",-13.557408332824709],["před",-13.557414054870604],["▁poteka",-13.557435989379885],["▁ادبي",-13.557435989379885],["qesh",-13.557446479797363],["▁descubri",-13.557449340820312],["▁капитала",-13.557449340820312],["ИШ",-13.557462692260742],["جمال",-13.55746841430664],["१६",-13.557470321655272],["▁ишен",-13.557476997375488],["mysł",-13.557482719421388],["технолог",-13.557498931884766],["МЕТ",-13.557503700256348],["▁ويندا",-13.557536125183104],["ivamente",-13.557538032531738],["MAK",-13.557559967041016],["▁elleni",-13.557568550109863],["▁anunt",-13.557583808898926],["കൾക്ക്",-13.557592391967772],["▁dibuka",-13.55759620666504],["▁dealer",-13.557608604431152],["▁mikilvæg",-13.557618141174316],["▁നൽക",-13.557632446289062],["▁लिने",-13.557668685913086],["▁semestre",-13.557682037353516],["skilt",-13.557697296142578],["ኮች",-13.557699203491213],["▁ඇස",-13.55772304534912],["▁الملل",-13.557734489440918],["owość",-13.557754516601562],["ਨੂੰ",-13.557772636413574],["fino",-13.557781219482422],["たま",-13.557795524597168],["▁houve",-13.5577974319458],["стављен",-13.55781078338623],["▁lakó",-13.557818412780762],["疗",-13.557819366455078],["ಪಂ",-13.557827949523926],["▁පෙන්",-13.557828903198242],["stria",-13.557832717895508],["xé",-13.557838439941406],["遲",-13.557888984680176],["攤",-13.5579195022583],["▁Cape",-13.557920455932615],["▁gwybod",-13.557933807373049],["▁nélküli",-13.557934761047363],["ค่อนข้าง",-13.557938575744627],["▁Jacques",-13.557940483093262],["▁dźwięk",-13.557940483093262],["▁eskaintzen",-13.557940483093262],["▁imprescindible",-13.557940483093262],["▁isticmaal",-13.557940483093262],["▁københavn",-13.557940483093262],["▁perkahwinan",-13.557940483093262],["▁zwykle",-13.557940483093262],["▁кампаніі",-13.557940483093262],["▁тусламж",-13.557940483093262],["▁хөдөлгөөн",-13.557940483093262],["▁تکنولوژی",-13.557940483093262],["▁सहायक",-13.557940483093262],["▁મુલાકાત",-13.557940483093262],["▁Frühstück",-13.557941436767578],["▁mercredi",-13.557941436767578],["▁udeležen",-13.557941436767578],["▁کیلومتر",-13.557941436767578],["អនុវត្ត",-13.557942390441896],["▁Uşaq",-13.557943344116213],["▁nonummy",-13.557943344116213],["▁ยูไนเต็ด",-13.557943344116213],["▁ნიშნავს",-13.557944297790527],["▁continúa",-13.557947158813477],["▁విలువ",-13.557948112487791],["▁מספיק",-13.557952880859377],["върна",-13.557954788208008],["▁Hvernig",-13.557960510253906],["▁wunderbar",-13.557961463928224],["▁užití",-13.557966232299805],["▁11:30",-13.557968139648438],["▁ուղղված",-13.557981491088867],["▁Kishore",-13.557995796203612],["▁плит",-13.558000564575195],["▁quantità",-13.558003425598145],["▁ഫോണ",-13.55800724029541],["▁դարձել",-13.558008193969728],["ığ",-13.55801010131836],["▁мәліметтер",-13.558011054992676],["高雄市",-13.558016777038574],["БОР",-13.558023452758787],["ल्याण्ड",-13.558023452758787],["เม็ด",-13.558030128479004],["සෙන",-13.558035850524902],["▁održan",-13.55803680419922],["dijo",-13.558039665222168],["▁ترڅ",-13.558042526245115],["▁أعلى",-13.558049201965332],["▁힘들",-13.55805206298828],["▁байвал",-13.558056831359863],["▁ليگ",-13.55805778503418],["▁piv",-13.558062553405762],["боле",-13.558079719543455],["▁місцевих",-13.558112144470217],["▁sterkt",-13.558114051818848],["lemme",-13.558115005493164],["▁ሦስት",-13.55811882019043],["▁රපට",-13.558133125305176],["▁ofere",-13.558143615722656],["qini",-13.5581693649292],["▁banan",-13.558184623718262],["▁opinber",-13.55820655822754],["▁distra",-13.558216094970703],["▁забране",-13.558256149291992],["▁ostala",-13.558260917663574],["▁마음을",-13.55826187133789],["▁esforço",-13.558263778686523],["▁Български",-13.558268547058104],["▁sagot",-13.558269500732422],["公正",-13.558272361755373],["▁entsprechend",-13.558277130126951],["▁Боже",-13.558279037475586],["▁الرغم",-13.55828094482422],["▁mērķi",-13.558284759521484],["గారు",-13.558289527893066],["▁Lenovo",-13.558289527893066],["▁двамата",-13.558292388916016],["▁Papua",-13.558298110961914],["▁новую",-13.558334350585938],["▁Tự",-13.558337211608888],["▁prá",-13.55834674835205],["ทีมงาน",-13.558351516723633],["▁പറഞ്ഞ്",-13.558358192443848],["පොළ",-13.55836296081543],["▁გული",-13.558403968811035],["▁Ipak",-13.558405876159668],["▁kljub",-13.558411598205566],["НЯ",-13.558425903320312],["▁брать",-13.558428764343262],["▁diaspor",-13.558430671691896],["▁descubrir",-13.558432579040527],["▁converte",-13.558435440063477],["Thi",-13.558439254760742],["▁polnud",-13.558442115783691],["മ്പു",-13.55844497680664],["384",-13.558466911315918],["▁brist",-13.558467864990234],["▁Rhein",-13.558478355407717],["ਐਮ",-13.558479309082031],["eiß",-13.55848503112793],["жилт",-13.55848503112793],["▁Ngay",-13.558499336242676],["▁сунуш",-13.558506965637209],["▁بنت",-13.558518409729004],["▁ronde",-13.55853271484375],["asutuse",-13.558555603027344],["形容",-13.558566093444824],["▁తెలి",-13.558574676513672],["为此",-13.558586120605469],["hääl",-13.55859088897705],["▁traves",-13.558594703674316],["▁saavut",-13.55860996246338],["▁frokost",-13.558624267578123],["▁voci",-13.558633804321287],["альное",-13.558638572692873],["▁menesty",-13.558643341064451],["▁կարգով",-13.558677673339844],["▁potrebna",-13.558706283569336],["ంటా",-13.558712005615234],["▁تشكيل",-13.558716773986816],["▁inggih",-13.558725357055664],["þjónustu",-13.55872917175293],["▁담당",-13.558745384216309],["ቅዱስ",-13.558754920959473],["▁állt",-13.55878448486328],["▁Espai",-13.558786392211914],["以此",-13.558793067932127],["▁Цр",-13.558795928955078],["▁našega",-13.558799743652344],["ಜ್ಜ",-13.558808326721191],["▁Hø",-13.558815002441406],["▁пәні",-13.558874130249023],["کاران",-13.558882713317873],["你也",-13.558894157409668],["▁antico",-13.55890655517578],["▁prestar",-13.558913230895996],["▁[7]",-13.558919906616213],["▁Ār",-13.558920860290527],["ونها",-13.55893325805664],["▁labs",-13.558941841125488],["ዘን",-13.55894374847412],["▁글로벌",-13.5589599609375],["كبر",-13.558975219726562],["▁Reddy",-13.558977127075195],["▁карты",-13.55898094177246],["▁2,8",-13.55898666381836],["Plat",-13.558989524841309],["▁منتدى",-13.558999061584473],["▁diagram",-13.559002876281738],["јно",-13.55901050567627],["▁asasi",-13.559017181396484],["सिल",-13.559022903442385],["▁манифест",-13.559025764465332],["소득",-13.559027671813965],["KUM",-13.559029579162598],["▁acabou",-13.559029579162598],["əb",-13.559030532836914],["▁அன்ப",-13.55903148651123],["осмотр",-13.559037208557127],["▁uchel",-13.559062957763672],["▁prvog",-13.559066772460938],["पणा",-13.55908489227295],["▁haus",-13.559094429016112],["▁شڪ",-13.55909538269043],["▁states",-13.559121131896973],["පී",-13.559148788452148],["▁Müslüman",-13.55918025970459],["▁Африка",-13.55918025970459],["▁ساری",-13.55918025970459],["▁에너지",-13.559209823608398],["bhal",-13.559219360351562],["▁agresi",-13.559242248535156],["legung",-13.559260368347168],["hyd",-13.5592622756958],["▁მხარე",-13.559267044067385],["שני",-13.559268951416016],["手指",-13.55927848815918],["DIG",-13.559297561645508],["പ്പറ",-13.559297561645508],["▁Lapse",-13.559297561645508],["▁malaman",-13.55929946899414],["▁Ασ",-13.559310913085938],["рови",-13.559321403503418],["▁مربع",-13.559321403503418],["držiava",-13.55932331085205],["يكية",-13.559334754943848],["▁עובר",-13.559338569641112],["က်င္",-13.559367179870604],["▁Nunca",-13.559368133544922],["ାମ",-13.559370994567873],["▁pidu",-13.559374809265137],["▁Meán",-13.559378623962402],["▁Amy",-13.559401512145996],["пројект",-13.559466361999512],["▁studenten",-13.559484481811523],["ητής",-13.559490203857422],["Жас",-13.559517860412598],["աւ",-13.559551239013672],["▁árs",-13.55955410003662],["▁sulke",-13.559558868408203],["▁paylaşım",-13.559569358825684],["▁besta",-13.55957317352295],["▁kniv",-13.55958080291748],["▁Middel",-13.559581756591797],["عاب",-13.559611320495604],["නට",-13.559638023376465],["时尚",-13.559657096862791],["▁vormen",-13.55966091156006],["પાલ",-13.559672355651855],["▁നേരത്തെ",-13.559687614440918],["▁TD",-13.559691429138184],["kér",-13.55969524383545],["▁kafedra",-13.55970859527588],["▁privato",-13.559715270996094],["Ale",-13.559722900390623],["▁ئىس",-13.559730529785156],["▁ठाउँ",-13.559735298156738],["▁bestilling",-13.559738159179688],["▁prose",-13.559741973876951],["▁GK",-13.559743881225586],["行う",-13.55975341796875],["▁юридическо",-13.559758186340332],["▁uspješno",-13.559762001037598],["▁искать",-13.559788703918455],["▁경쟁",-13.559791564941406],["▁partijen",-13.559804916381836],["润",-13.559819221496582],["▁Талас",-13.559828758239746],["兴奋",-13.559856414794922],["▁considerada",-13.559879302978516],["▁obrigado",-13.559879302978516],["的健康",-13.559882164001465],["สาขา",-13.559884071350098],["▁заказчик",-13.559887886047363],["▁Schal",-13.559897422790527],["▁Кеңеш",-13.559898376464844],["괴",-13.55990505218506],["▁бүгүн",-13.55990982055664],["epektibong",-13.55991268157959],["គេហទំព័រ",-13.55991268157959],["▁Egyesült",-13.55991268157959],["▁dėmesį",-13.55991268157959],["▁gewonnen",-13.55991268157959],["▁сурвалж",-13.55991268157959],["▁събитие",-13.55991268157959],["▁تنقید",-13.55991268157959],["▁दोन्ही",-13.55991268157959],["▁அறிக்கை",-13.55991268157959],["▁დოკუმენტ",-13.55991268157959],["▁Bizottság",-13.559913635253906],["▁attiecībā",-13.559913635253906],["▁dixwazin",-13.559913635253906],["▁Ρωσία",-13.559913635253906],["▁шаблон",-13.559913635253906],["▁ئۆتكۈز",-13.559913635253906],["▁মাদক",-13.559913635253906],["▁సంఖ్య",-13.559913635253906],["▁아이디",-13.559914588928224],["tòria",-13.55991554260254],["▁McDonald",-13.55991554260254],["ประวัติศาสตร์",-13.559916496276855],["▁aştept",-13.559917449951172],["▁umjesto",-13.559917449951172],["▁मिनेट",-13.559917449951172],["▁hisoblanadi",-13.55992031097412],["叫做",-13.55992031097412],["▁షో",-13.559924125671388],["oktatás",-13.559926986694336],["▁ప్రకారం",-13.559927940368652],["▁વગર",-13.55993366241455],["▁Хрватска",-13.559938430786133],["▁देण्यात",-13.55993938446045],["▁thượng",-13.559940338134766],["▁Millet",-13.559942245483398],["▁Архив",-13.559944152832031],["▁üzenet",-13.55994701385498],["▁Jess",-13.55994987487793],["రక",-13.559961318969728],["▁خواہش",-13.559965133666992],["▁функції",-13.559977531433104],["түк",-13.559980392456056],["▁Metropol",-13.559996604919434],["▁ይሆን",-13.560001373291016],["が行われ",-13.560003280639648],["▁경우에는",-13.56001091003418],["打印",-13.56003761291504],["▁Before",-13.560040473937988],["▁عوض",-13.560040473937988],["▁يولى",-13.560041427612305],["स्तो",-13.560044288635254],["▁велат",-13.56004524230957],["▁унікальн",-13.560052871704102],["董事长",-13.56005573272705],["▁zili",-13.56005859375],["rapi",-13.560065269470217],["▁Itulah",-13.56007194519043],["ികൾ",-13.560076713562012],["ԵԼ",-13.560077667236328],["▁כהן",-13.560081481933594],["▁створити",-13.560084342956545],["મૂ",-13.56008529663086],["▁subotu",-13.560086250305176],["▁көрсөтүү",-13.56009006500244],["耐心",-13.560091972351074],["▁daļu",-13.560118675231934],["住所",-13.560120582580566],["▁finansiell",-13.5601224899292],["▁sentyabr",-13.560128211975098],["▁ខ្លាំង",-13.560142517089844],["▁tugeva",-13.56014633178711],["▁gofyn",-13.560165405273438],["▁greater",-13.560181617736816],["מינ",-13.560189247131348],["▁syyskuuta",-13.560198783874512],["▁โร",-13.56020736694336],["▁Justice",-13.560211181640623],["▁ihminen",-13.560239791870115],["▁വിര",-13.560264587402344],["▁одбрана",-13.560266494750977],["מיה",-13.560267448425291],["శాస్త్ర",-13.560269355773926],["▁clubs",-13.560276985168455],["▁രാമ",-13.560276985168455],["ገነ",-13.560280799865724],["კერ",-13.560283660888672],["▁grunnlag",-13.560290336608888],["ിക്കാൻ",-13.560304641723633],["düyü",-13.560311317443848],["46)",-13.560314178466797],["▁indicate",-13.560335159301758],["▁Reykjavíkur",-13.560340881347656],["▁risico",-13.560359001159668],["▁otpad",-13.56036376953125],["▁adquiri",-13.560365676879885],["چىسى",-13.560383796691896],["кій",-13.56039333343506],["▁drap",-13.560395240783691],["▁کلک",-13.560409545898438],["यम्",-13.560410499572754],["ENCIA",-13.560413360595703],["خه",-13.56041431427002],["pista",-13.56044101715088],["―",-13.560442924499512],["▁naziv",-13.560443878173828],["千萬",-13.560447692871094],["▁1897",-13.560452461242676],["czyk",-13.56047534942627],["document",-13.56047534942627],["meyeceği",-13.560528755187988],["▁Hmm",-13.560528755187988],["વત",-13.560532569885254],["▁Naga",-13.560545921325684],["▁Olu",-13.56055736541748],["▁ಉಳಿದ",-13.560565948486328],["slev",-13.560601234436035],["となりました",-13.56062126159668],["為何",-13.560638427734377],["দ্য",-13.56064796447754],["▁જરૂરી",-13.56064796447754],["145",-13.560702323913574],["▁doklad",-13.560707092285156],["ಸ್ಥಿತಿ",-13.560708045959473],["首相",-13.560711860656738],["mbong",-13.560712814331056],["πουλ",-13.560797691345217],["泰国",-13.560798645019531],["▁yog",-13.560820579528809],["재단",-13.560820579528809],["▁песен",-13.56082248687744],["ीची",-13.560826301574709],["ORES",-13.560829162597656],["kolek",-13.560843467712402],["▁अरे",-13.560855865478516],["▁teile",-13.560887336730955],["gino",-13.56089973449707],["▁læring",-13.560908317565918],["▁Formul",-13.560914039611816],["▁كرده",-13.560916900634766],["্টি",-13.560927391052246],["▁Zrt",-13.560935020446776],["▁বু",-13.560948371887209],["▁Grøn",-13.560955047607422],["▁सिक्",-13.560962677001951],["şek",-13.561009407043455],["פרס",-13.561013221740724],["лечен",-13.56103229522705],["हून",-13.561036109924316],["▁Hüseyn",-13.561052322387695],["▁בסדר",-13.56106662750244],["traction",-13.561075210571287],["เลยค่ะ",-13.561079025268556],["▁verko",-13.561079978942873],["▁의사",-13.561079978942873],["▁Аф",-13.561087608337402],["jšie",-13.5610990524292],["▁بدأ",-13.561107635498049],["▁лекува",-13.561116218566896],["ต้องมี",-13.561127662658691],["▁puolesta",-13.561151504516602],["zieren",-13.56115436553955],["▁Tamás",-13.561156272888184],["▁conceito",-13.561163902282717],["▁hastalık",-13.56117820739746],["सन्",-13.561187744140623],["▁templo",-13.56120777130127],["lên",-13.561226844787598],["什么样的",-13.561244010925291],["我覺得",-13.561256408691406],["มีปัญหา",-13.561287879943848],["▁თქვა",-13.561293601989746],["រុ",-13.561315536499023],["äjä",-13.561320304870604],["usele",-13.561322212219238],["▁siajn",-13.561326026916504],["이지",-13.5613374710083],["▁праекта",-13.561338424682615],["▁පෙන්වා",-13.561376571655272],["griež",-13.56137752532959],["력이",-13.561406135559082],["▁катуу",-13.561409950256348],["▁hefð",-13.56142520904541],["▁његова",-13.56143283843994],["▁fid",-13.561436653137209],["పూ",-13.561446189880373],["上映",-13.561460494995115],["▁ඌ",-13.561469078063965],["▁forstår",-13.561476707458496],["ellyt",-13.561480522155762],["▁ለዚህ",-13.561480522155762],["technika",-13.561508178710938],["pell",-13.56152629852295],["▁ընկ",-13.561534881591797],["▁ziem",-13.561545372009276],["▁rebre",-13.561549186706545],["▁schod",-13.561553955078123],["▁chło",-13.561634063720703],["njeno",-13.561657905578612],["▁القو",-13.56168270111084],["ቺ",-13.561692237854004],["πτυ",-13.561697006225586],["740",-13.561723709106444],["teeksi",-13.56175708770752],["▁فیصل",-13.561769485473633],["▁workshops",-13.561777114868164],["▁মামলা",-13.56178092956543],["▁pirmajā",-13.561785697937012],["▁bambu",-13.561813354492188],["▁ஹா",-13.56181812286377],["贯",-13.561832427978516],["bhá",-13.56185245513916],["韦",-13.56186294555664],["マンション",-13.561863899230955],["▁Amerîka",-13.561872482299805],["ncies",-13.56187629699707],["▁sərəncam",-13.561888694763184],["▁ব্যবসা",-13.561888694763184],["힌",-13.561888694763184],["Ѓ",-13.5618896484375],["▁Felhasználás",-13.5618896484375],["▁Gaillimh",-13.5618896484375],["▁Küçük",-13.5618896484375],["▁UBND",-13.5618896484375],["▁Zoltán",-13.5618896484375],["▁memungkinkan",-13.5618896484375],["▁menyenangkan",-13.5618896484375],["▁življenju",-13.5618896484375],["▁Правительства",-13.5618896484375],["▁получение",-13.5618896484375],["▁последнее",-13.5618896484375],["▁эксперимент",-13.5618896484375],["▁інвалід",-13.5618896484375],["▁تاکنون",-13.5618896484375],["▁कुटुंब",-13.5618896484375],["▁রাজশাহী",-13.5618896484375],["▁ಆಡಳಿತ",-13.5618896484375],["μπι",-13.561890602111816],["▁ಉದ್ದೇಶ",-13.561890602111816],["▁ተቋማት",-13.561890602111816],["▁bhfeidhm",-13.56189250946045],["▁ցանկացած",-13.561893463134766],["▁aduna",-13.56190013885498],["▁Αθηνών",-13.561906814575195],["▁обикновено",-13.561906814575195],["túl",-13.561907768249512],["▁Despois",-13.561909675598145],["▁dóibh",-13.561909675598145],["▁മുഹമ്മദ്",-13.561909675598145],["▁puslapis",-13.56191062927246],["▁główne",-13.561914443969728],["היי",-13.56191635131836],["▁menganggap",-13.56191635131836],["▁බෙදා",-13.561919212341309],["▁piccole",-13.561920166015623],["▁אינטער",-13.561920166015623],["▁podmiot",-13.561922073364258],["▁yada",-13.56192398071289],["▁epizod",-13.561925888061523],["▁отнася",-13.561927795410156],["▁okanye",-13.561935424804688],["▁sublim",-13.561935424804688],["▁Tujuan",-13.561939239501951],["▁habitaciones",-13.56194305419922],["sukan",-13.561945915222168],["▁שגם",-13.561945915222168],["▁гэтую",-13.5619478225708],["▁режима",-13.5619478225708],["啤酒",-13.561951637268066],["tekniikka",-13.561957359313965],["▁(2005)",-13.56195831298828],["▁iekār",-13.561968803405762],["129",-13.561972618103027],["альную",-13.56198501586914],["▁pengaruh",-13.561986923217772],["▁тили",-13.56198787689209],["▁bewusst",-13.561993598937988],["ຈຸດ",-13.56199550628662],["▁Kena",-13.561999320983888],["▁посмотрел",-13.56201171875],["▁Gebiet",-13.562018394470217],["▁Kingdom",-13.562026977539062],["▁پرشین",-13.562030792236328],["▁visse",-13.56203556060791],["▁التم",-13.562037467956545],["▁משנה",-13.56203842163086],["чуп",-13.562087059020996],["▁ταξίδι",-13.562087059020996],["▁Odbor",-13.56209945678711],["▁vznikl",-13.562103271484377],["จอม",-13.562108039855955],["▁lovit",-13.562114715576172],["▁Chapter",-13.562122344970703],["▁показали",-13.562156677246094],["▁возрасте",-13.562166213989258],["這家",-13.562182426452637],["▁Caroline",-13.562185287475586],["▁spécialisé",-13.562189102172852],["સન",-13.562204360961914],["▁zbiera",-13.562207221984863],["▁ótima",-13.56222438812256],["▁цитира",-13.56222438812256],["സ്റ്റി",-13.562225341796877],["▁tendens",-13.562226295471191],["▁رائعة",-13.562249183654783],["▁істот",-13.56225872039795],["▁पदों",-13.562260627746582],["▁женски",-13.562289237976074],["▁rebut",-13.56229019165039],["▁масса",-13.562294006347656],["▁یوں",-13.562301635742188],["ратын",-13.56230354309082],["▁Helle",-13.562305450439451],["▁лоб",-13.562314987182615],["▁وانت",-13.562320709228516],["▁საერთო",-13.56232452392578],["អត់",-13.562332153320312],["ročno",-13.562333106994627],["▁פאל",-13.562350273132324],["▁acordat",-13.562363624572754],["ulduğu",-13.562376022338867],["▁Senado",-13.562384605407717],["▁siarad",-13.56238842010498],["▁मृत",-13.562397956848145],["▁pleť",-13.562414169311523],["▁שלוש",-13.562439918518066],["▁avsnitt",-13.56245231628418],["stwie",-13.562455177307127],["▁arís",-13.562463760375977],["▁savai",-13.562480926513672],["يەت",-13.562499046325684],["%20",-13.562503814697266],["▁Fry",-13.562533378601074],["ෙමි",-13.56253719329834],["▁Field",-13.562545776367188],["▁Koska",-13.562564849853516],["▁Naime",-13.562566757202148],["보드",-13.562569618225098],["stró",-13.562582969665527],["▁තාත්තා",-13.562585830688477],["▁مخاطب",-13.562615394592283],["ാനായി",-13.56262493133545],["ようと",-13.562636375427246],["ציות",-13.56264591217041],["ไล่",-13.562661170959473],["റിന്",-13.562684059143066],["▁bereid",-13.562684059143066],["▁Käsi",-13.562699317932127],["▁UZ",-13.56273078918457],["▁lokaler",-13.562732696533203],["▁sidst",-13.562736511230469],["brud",-13.562737464904783],["▁трохи",-13.562762260437012],["▁پیشین",-13.562772750854492],["▁جنت",-13.562779426574709],["▁eadar",-13.562796592712402],["▁અમારા",-13.562801361083984],["▁użytkow",-13.56280517578125],["▁Formular",-13.562826156616213],["主張",-13.562828063964844],["▁irure",-13.562835693359377],["ଧ୍ୟ",-13.562843322753906],["▁дэмж",-13.5628662109375],["တရ",-13.56286907196045],["טיב",-13.562893867492676],["hooldus",-13.562906265258787],["▁เห็น",-13.562908172607422],["▁GHz",-13.562931060791016],["冬季",-13.562955856323242],["ագիրը",-13.562958717346191],["्मा",-13.562960624694824],["ขอบคุณ",-13.562962532043455],["▁Parece",-13.562962532043455],["▁továbbra",-13.562965393066406],["ीट",-13.562975883483888],["wiet",-13.562983512878418],["piir",-13.562984466552734],["▁סוגי",-13.562984466552734],["▁Львівськ",-13.562986373901367],["คาด",-13.56302261352539],["▁المقال",-13.563023567199709],["▁შავი",-13.563032150268556],["▁bychom",-13.563037872314451],["த்துறை",-13.563051223754885],["Ray",-13.56307315826416],["▁فورم",-13.56307601928711],["talous",-13.56308937072754],["ديو",-13.563104629516602],["മ്യ",-13.563105583190918],["▁Modi",-13.563106536865234],["dug",-13.563127517700195],["▁कसे",-13.563127517700195],["agri",-13.563151359558104],["▁tarafı",-13.563180923461914],["▁sunuyor",-13.56320095062256],["Че",-13.563228607177734],["▁genital",-13.563231468200684],["rojn",-13.563236236572266],["▁భూ",-13.563237190246582],["▁Bogdan",-13.563244819641112],["▁передбачен",-13.56324577331543],["tafel",-13.563275337219238],["以上に",-13.56328010559082],["ijske",-13.563287734985352],["▁сұрақ",-13.5632905960083],["εθν",-13.563297271728516],["▁reports",-13.563304901123049],["ቂያ",-13.563312530517578],["▁Pantai",-13.563315391540527],["▁adulti",-13.563323974609377],["▁prope",-13.563340187072754],["misessa",-13.563343048095703],["ohjeet",-13.563353538513184],["▁ಸಮಾಜ",-13.563374519348145],["jdou",-13.563383102416992],["▁مغرب",-13.56338596343994],["තරම්",-13.563395500183104],["▁işe",-13.563396453857422],["▁לאי",-13.563401222229004],["▁осуществ",-13.563404083251951],["וול",-13.563411712646484],["▁প্রধানমন্ত্রীর",-13.563421249389648],["▁alpin",-13.56342887878418],["如果有",-13.563434600830078],["kázal",-13.563443183898926],["市中心",-13.563446044921877],["▁Kole",-13.563457489013672],["▁даними",-13.563464164733888],["വിടെ",-13.563465118408203],["▁1898",-13.563474655151367],["▁מזרח",-13.563474655151367],["раны",-13.563475608825684],["▁personales",-13.563483238220217],["▁серија",-13.563483238220217],["โท",-13.563501358032228],["▁mugav",-13.563505172729492],["もらった",-13.563507080078123],["349",-13.563511848449709],["▁почина",-13.56354522705078],["數字",-13.563569068908691],["▁αλλαγή",-13.563570022583008],["▁сумын",-13.563570022583008],["▁विद्युत",-13.56358814239502],["▁консерв",-13.563596725463867],["▁tramita",-13.563597679138184],["(18",-13.563650131225586],["▁zgjedh",-13.563653945922852],["كمل",-13.563664436340332],["ବାହ",-13.56366729736328],["AUT",-13.56369686126709],["ဟူ",-13.563712120056152],["▁بشه",-13.563712120056152],["فری",-13.563716888427734],["▁female",-13.563720703125],["▁priemer",-13.56373119354248],["▁dokaza",-13.563758850097656],["मती",-13.563764572143556],["▁боловсрол",-13.563773155212402],["▁distinct",-13.563776969909668],["▁leter",-13.563776969909668],["福祉",-13.563819885253906],["▁публичн",-13.563828468322754],["▁permanece",-13.563834190368652],["▁לוקח",-13.56383991241455],["回憶",-13.563849449157717],["वां",-13.563852310180664],["яких",-13.56385326385498],["▁haji",-13.56385326385498],["నై",-13.563854217529297],["പര്യ",-13.56385612487793],["お届け",-13.563857078552246],["委托",-13.563859939575195],["ၵ",-13.563864707946776],["führer",-13.56386661529541],["▁සම්පූර්ණ",-13.563868522644045],["▁Amennyiben",-13.56386947631836],["▁Gràcies",-13.56386947631836],["▁betydelig",-13.56386947631836],["▁irgendwie",-13.56386947631836],["▁izkušnje",-13.56386947631836],["▁memanfaatkan",-13.56386947631836],["▁məxsus",-13.56386947631836],["▁pérdida",-13.56386947631836],["▁richieste",-13.56386947631836],["▁zależności",-13.56386947631836],["▁πέντε",-13.56386947631836],["▁Сергій",-13.56386947631836],["▁пропозиції",-13.56386947631836],["▁протяжении",-13.56386947631836],["▁شەھەر",-13.56386947631836],["▁खड्का",-13.56386947631836],["▁सम्बोधन",-13.56386947631836],["▁પાછળ",-13.56386947631836],["▁ఢిల్లీ",-13.56386947631836],["▁თითქმის",-13.56386947631836],["▁მადლობა",-13.56386947631836],["▁behöva",-13.563870429992676],["▁călători",-13.563871383666992],["▁ξέρει",-13.563871383666992],["▁хабарлайды",-13.563871383666992],["▁pieredze",-13.563872337341309],["▁spletna",-13.563872337341309],["ឯកសារ",-13.563873291015623],["▁ապր",-13.56387424468994],["▁भनाइ",-13.563876152038574],["▁സമ്മാന",-13.563876152038574],["▁स्टेशन",-13.563878059387209],["▁ଟିମ୍",-13.563886642456056],["▁szal",-13.563895225524902],["▁प्रकल्प",-13.563897132873535],["▁ብዬ",-13.563901901245115],["▁الدراسة",-13.56390380859375],["שילוב",-13.56391143798828],["എന്ന",-13.563912391662598],["▁ఉందని",-13.563912391662598],["▁Silahkan",-13.563918113708496],["▁разрешение",-13.563920974731444],["▁yhtään",-13.563928604125977],["▁પૂછ",-13.563932418823242],["物を",-13.563935279846191],["vidas",-13.563941955566406],["▁بەزى",-13.563944816589355],["▁tuule",-13.563952445983888],["တြဲ",-13.563955307006836],["▁программасы",-13.563958168029783],["রাম",-13.563973426818848],["hù",-13.56397819519043],["▁auks",-13.563980102539062],["▁rodziców",-13.563986778259276],["พลังงาน",-13.56399154663086],["สีแดง",-13.563995361328123],["▁henüz",-13.56400203704834],["▁βιο",-13.564006805419922],["▁мұра",-13.564010620117188],["▁البيانات",-13.564014434814451],["▁fólki",-13.56401538848877],["యాల",-13.564032554626465],["АМА",-13.564053535461426],["▁batalla",-13.564080238342283],["▁ଦି",-13.56408977508545],["baw",-13.564098358154297],["▁proszę",-13.56410789489746],["VIII",-13.56411075592041],["▁tjedna",-13.56411838531494],["▁اکبر",-13.564129829406738],["▁olha",-13.56413745880127],["தொகு",-13.564143180847168],["▁sjever",-13.5641450881958],["▁بهشت",-13.564146995544434],["မား",-13.564153671264648],["▁Apartament",-13.564154624938965],["▁крайней",-13.56415843963623],["חדש",-13.564173698425291],["▁Aidha",-13.564175605773926],["▁19.30",-13.564179420471191],["9.5",-13.564186096191406],["áért",-13.56419277191162],["▁සිහි",-13.564193725585938],["នេះ។",-13.564199447631836],["▁sonuna",-13.564221382141112],["这就",-13.564233779907228],["สัม",-13.564234733581545],["▁удари",-13.564284324645996],["▁Kuid",-13.564288139343262],["▁Fortuna",-13.564295768737791],["▁සමත්",-13.564297676086426],["▁Hills",-13.564302444458008],["▁fortsette",-13.56430435180664],["▁hundra",-13.56432056427002],["▁وسایل",-13.564334869384766],["▁кодекса",-13.564338684082031],["pating",-13.56434154510498],["▁Św",-13.564367294311523],["▁naturelle",-13.564382553100586],["▁સરળ",-13.564385414123535],["▁швидк",-13.564387321472168],["のでしょう",-13.564387321472168],["презент",-13.5643949508667],["▁súlyos",-13.564395904541016],["▁פּאָ",-13.56439971923828],["酒吧",-13.564404487609863],["4.2",-13.564406394958496],["felt",-13.564407348632812],["▁scal",-13.564438819885254],["▁civili",-13.56445026397705],["ნორ",-13.564459800720217],["▁vestig",-13.56446361541748],["kindlustus",-13.564475059509276],["աֆ",-13.56448459625244],["▁Okres",-13.56448459625244],["▁débat",-13.564488410949709],["▁teacher",-13.564491271972656],["▁elementów",-13.564494132995604],["krieg",-13.56450080871582],["وعة",-13.56450080871582],["▁hoida",-13.56452178955078],["方を",-13.564550399780272],["▁arsim",-13.564557075500488],["▁Валер",-13.56456470489502],["▁սպաս",-13.564566612243652],["▁Național",-13.564582824707031],["ቆይ",-13.56458854675293],["ደገ",-13.564600944519045],["μόρ",-13.564605712890623],["▁ενα",-13.564608573913574],["statyti",-13.564616203308104],["βη",-13.564640045166016],["05)",-13.564648628234863],["▁ECO",-13.564678192138672],["▁изгуби",-13.564696311950684],["ફર",-13.564702033996582],["▁unless",-13.564705848693848],["知って",-13.564720153808594],["gelin",-13.564733505249023],["▁هوندا",-13.564736366271973],["▁sustenta",-13.564743041992188],["▁특정",-13.564763069152832],["国の",-13.564773559570312],["ιακά",-13.564781188964844],["94)",-13.564785957336426],["lerimizi",-13.564794540405272],["▁cəza",-13.56480312347412],["▁empeza",-13.564810752868652],["isty",-13.564826965332031],["డర్",-13.564870834350586],["▁أولا",-13.564873695373535],["तम्",-13.5648775100708],["ліктер",-13.56488037109375],["chino",-13.56490421295166],["▁aceeasi",-13.564908981323242],["ටා",-13.564915657043455],["▁TAM",-13.564919471740724],["luhur",-13.56492805480957],["diana",-13.56493854522705],["valy",-13.56495475769043],["tämä",-13.56495761871338],["▁الاخ",-13.564959526062012],["większy",-13.564979553222656],["អ៊",-13.5650053024292],["▁ľahko",-13.565020561218262],["找到了",-13.565024375915527],["▁huomaa",-13.56502628326416],["▁signe",-13.565051078796388],["UŽ",-13.56507968902588],["▁Chaque",-13.565083503723145],["тарында",-13.565093994140623],["▁Factor",-13.565110206604004],["▁ciljev",-13.56512451171875],["▁gjetur",-13.565126419067385],["▁lokala",-13.565142631530762],["raithe",-13.565145492553713],["▁wasir",-13.565155029296877],["▁540",-13.565155982971191],["likni",-13.565157890319824],["ਨਿ",-13.565157890319824],["▁nė",-13.565166473388672],["▁germ",-13.565195083618164],["▁având",-13.565196990966797],["мыш",-13.565220832824709],["▁المعا",-13.565231323242188],["▁günlerde",-13.565235137939451],["щими",-13.565258026123049],["▁našeg",-13.565260887145996],["▁aktivist",-13.565293312072754],["▁osnovne",-13.565296173095703],["▁हस्त",-13.565306663513184],["▁delivery",-13.56531047821045],["▁skøn",-13.565312385559082],["▁diantara",-13.565316200256348],["▁bás",-13.56534194946289],["▁کہاں",-13.565342903137209],["peita",-13.565352439880373],["▁barve",-13.565384864807127],["tallen",-13.565400123596191],["ריאל",-13.565403938293455],["zyc",-13.565410614013672],["▁محقق",-13.56541347503662],["вського",-13.565422058105469],["▁ühte",-13.565428733825684],["▁тіла",-13.565437316894531],["▁drah",-13.565439224243164],["lakukan",-13.565449714660645],["markedet",-13.56545352935791],["▁හරිම",-13.56546401977539],["▁വരുന്നു",-13.5654935836792],["xhe",-13.565542221069336],["▁compétences",-13.565558433532717],["▁dobiva",-13.565577507019045],["▁kommentoi",-13.565579414367676],["מתי",-13.565581321716309],["▁мале",-13.565584182739258],["વડ",-13.565611839294434],["▁честно",-13.565630912780762],["广东省",-13.56564712524414],["▁altın",-13.565665245056152],["將在",-13.565672874450684],["▁líon",-13.565696716308594],["クラス",-13.56570816040039],["▁totam",-13.565711975097656],["▁znaš",-13.565716743469238],["▁açıklamada",-13.565717697143556],["ಹಳ್ಳಿ",-13.56572437286377],["נדר",-13.565751075744627],["▁novaj",-13.56575870513916],["नार",-13.565760612487791],["▁പോലീസ",-13.565760612487791],["▁제출",-13.565768241882324],["勁",-13.565793991088867],["▁Yesus",-13.565799713134766],["舒适",-13.565804481506348],["▁పేజీ",-13.56580638885498],["派遣",-13.56582260131836],["hånd",-13.565825462341309],["льц",-13.565832138061523],["廣泛",-13.56583309173584],["メディア",-13.56584930419922],["យុវជន",-13.565851211547852],["ต้อนรับ",-13.565853118896484],["๙",-13.565853118896484],["မိသားစု",-13.565853118896484],["▁Kimataifa",-13.565853118896484],["▁Savukārt",-13.565853118896484],["▁fevereiro",-13.565853118896484],["▁hiệp",-13.565853118896484],["▁istraživanja",-13.565853118896484],["▁mendorong",-13.565853118896484],["▁qələbə",-13.565853118896484],["▁sərhəd",-13.565853118896484],["▁terbabit",-13.565853118896484],["▁обсужда",-13.565853118896484],["▁ଜାତୀୟ",-13.565853118896484],["▁fábrica",-13.5658540725708],["▁increasing",-13.5658540725708],["▁πρόταση",-13.5658540725708],["▁менеджмент",-13.5658540725708],["KF",-13.565855026245115],["▁meddwl",-13.565855026245115],["▁зусім",-13.565855026245115],["▁снежня",-13.565855026245115],["▁वालों",-13.565855979919434],["▁հանձնաժողովի",-13.565857887268066],["▁рэжым",-13.565858840942385],["▁افسوس",-13.565858840942385],["▁stopped",-13.565860748291016],["หาร",-13.565861701965332],["▁internacionais",-13.565862655639648],["ngkung",-13.565863609313965],["▁необходимость",-13.565869331359863],["▁СК",-13.565871238708496],["fassung",-13.565872192382812],["čević",-13.565876007080078],["əşə",-13.565879821777344],["▁prináša",-13.565884590148926],["▁ערך",-13.565889358520508],["▁moeilik",-13.56589412689209],["તંત્ર",-13.565902709960938],["▁Bereichen",-13.56590461730957],["▁gewinnen",-13.565911293029783],["▁Rezept",-13.565914154052734],["▁седница",-13.565921783447266],["▁bestu",-13.56593418121338],["▁लाग्ने",-13.56593418121338],["▁irti",-13.565937042236328],["▁버전",-13.565945625305176],["▁기관",-13.565948486328123],["▁Конституция",-13.56594944000244],["ባይ",-13.565990447998049],["êrî",-13.565991401672363],["ဝိ",-13.565999031066896],["▁Fondo",-13.565999984741213],["▁ሓ",-13.565999984741213],["▁riikide",-13.566012382507324],["▁alcanza",-13.566017150878906],["▁ienāk",-13.566021919250488],["▁самите",-13.566025733947754],["▁ल्याउन",-13.566034317016602],["▁masaž",-13.566073417663574],["▁딸",-13.566089630126951],["सै",-13.566095352172852],["▁කොටස්",-13.566105842590332],["▁humans",-13.56610870361328],["பூர்",-13.566110610961914],["▁մարտի",-13.566116333007812],["▁думать",-13.566128730773926],["рөө",-13.566152572631836],["▁írt",-13.566152572631836],["▁етіп",-13.566176414489746],["ոկ",-13.566197395324709],["▁दुर्घटनामा",-13.566213607788086],["▁लंड",-13.566228866577148],["кис",-13.566239356994627],["▁التالية",-13.566241264343262],["▁mesiacov",-13.566244125366213],["គោល",-13.566251754760742],["▁võime",-13.566261291503906],["▁அர",-13.566265106201172],["▁بۇنىڭ",-13.566278457641602],["▁зара",-13.566289901733398],["ចង់",-13.56629753112793],["▁kunda",-13.566299438476562],["▁утро",-13.56630516052246],["的经济",-13.566329002380373],["▁Minas",-13.566330909729004],["▁multipli",-13.56634521484375],["▁الحدود",-13.566346168518066],["grenzen",-13.566363334655762],["▁Þess",-13.566376686096191],["ୃତ",-13.56639003753662],["યું",-13.566400527954102],["ാകും",-13.566400527954102],["▁تحرير",-13.566401481628418],["업계",-13.566411018371582],["чилж",-13.56641674041748],["▁vinnig",-13.566424369812012],["ЗД",-13.566431999206545],["ध्द",-13.566442489624023],["▁вижу",-13.566450119018556],["▁jami",-13.566472053527832],["看的",-13.566499710083008],["▁gauche",-13.56650161743164],["▁पूछ",-13.566514015197754],["▁işler",-13.56653118133545],["▁местах",-13.566553115844728],["κερ",-13.566558837890623],["▁administrativo",-13.566572189331056],["▁зборови",-13.566574096679688],["▁truck",-13.566576957702637],["▁կայքում",-13.566585540771484],["▁kushtet",-13.566598892211914],["▁gads",-13.566603660583496],["▁২৩",-13.566604614257812],["alisme",-13.566622734069824],["▁книгу",-13.566633224487305],["స్తారు",-13.566638946533203],["▁aponta",-13.566640853881836],["으로서",-13.566645622253418],["▁veloce",-13.566658020019531],["ੂਲ",-13.56666374206543],["▁tagoj",-13.566673278808594],["૦૦",-13.566680908203123],["ເປີດ",-13.566682815551758],["ព្រឹក",-13.56668472290039],["▁മഹ",-13.56668758392334],["▁heure",-13.566688537597656],["PPP",-13.5667085647583],["▁अहिलेसम्म",-13.566709518432615],["▁cercare",-13.56671905517578],["ասի",-13.566741943359377],["▁হাতে",-13.566744804382324],["▁numër",-13.56675148010254],["UME",-13.566757202148438],["▁მოკლე",-13.566797256469728],["erên",-13.566800117492676],["▁герои",-13.566804885864258],["traf",-13.566817283630373],["ເລີ",-13.566837310791016],["▁Persona",-13.566840171813965],["▁végz",-13.566849708557127],["▁boro",-13.566856384277344],["▁တွင်",-13.566868782043455],["すぎ",-13.566876411437988],["▁különösen",-13.566884994506836],["ुस्",-13.566887855529783],["მინისტრ",-13.566888809204102],["ឃើញ",-13.566908836364746],["▁balon",-13.566917419433594],["ದಲ್ಲೇ",-13.566929817199709],["szerkeszt",-13.566936492919922],["▁Baja",-13.566954612731934],["ıstan",-13.5669584274292],["▁Šil",-13.5669584274292],["ကယ္",-13.56696605682373],["▁ფილმი",-13.566970825195312],["▁giden",-13.566975593566896],["пеціальн",-13.566997528076172],["▁Денис",-13.567002296447754],["būv",-13.567004203796388],["▁goberna",-13.56700611114502],["arrow",-13.567034721374512],["▁المدير",-13.567038536071776],["ております",-13.567051887512209],["▁Арт",-13.567057609558104],["івці",-13.567068099975586],["▁schimbat",-13.5670747756958],["▁nisy",-13.567083358764648],["▁magnific",-13.567089080810549],["▁Grau",-13.567140579223633],["нял",-13.5671968460083],["▁şərtlər",-13.5671968460083],["αίνει",-13.567198753356934],["▁아파트",-13.567228317260742],["▁ଲୋକେ",-13.56723690032959],["НЕТ",-13.567242622375488],["ሄደ",-13.567255020141602],["पह",-13.567286491394045],["▁saludable",-13.567290306091309],["▁სტ",-13.567296028137209],["▁දෙනෙක්",-13.567302703857422],["ቅር",-13.5673246383667],["▁מאר",-13.567341804504396],["トリ",-13.567380905151367],["▁drugog",-13.56739616394043],["▁gesla",-13.567400932312012],["▁තුන්",-13.567408561706545],["kyky",-13.56741428375244],["STAR",-13.567422866821287],["▁перші",-13.567435264587402],["▁הפל",-13.567481994628906],["▁dichiara",-13.567493438720703],["▁leggja",-13.567496299743652],["▁Pure",-13.56752872467041],["ající",-13.56753921508789],["▁bunların",-13.567549705505373],["▁Hamma",-13.56755828857422],["vanta",-13.567569732666016],["▁aplicativo",-13.567580223083496],["▁त्याचे",-13.567583084106444],["эп",-13.567584037780762],["යත්",-13.56758975982666],["nizde",-13.567612648010254],["▁minat",-13.567617416381836],["Mate",-13.567641258239746],["昨年",-13.56764316558838],["ganja",-13.567696571350098],["▁әдебиеті",-13.567703247070312],["▁detrás",-13.567715644836426],["▁Shri",-13.567736625671388],["▁بنياد",-13.56776523590088],["▁aiheutta",-13.567766189575195],["▁ويلي",-13.567768096923828],["凶",-13.56777000427246],["夥伴",-13.56778049468994],["▁Chine",-13.56778335571289],["誇",-13.567808151245115],["taisi",-13.567809104919434],["▁hp",-13.567816734313965],["▁эзэмш",-13.567831039428713],["城镇",-13.567835807800291],["▁nimel",-13.567838668823242],["บาคาร่า",-13.567840576171877],["▁Cenedlaethol",-13.567841529846191],["▁Nhất",-13.567841529846191],["▁mérkőzés",-13.567841529846191],["▁preskaŭ",-13.567841529846191],["▁البرامج",-13.567841529846191],["▁डिसेंबर",-13.567841529846191],["▁పత్రిక",-13.567841529846191],["▁వ్యవస్థ",-13.567841529846191],["▁ಜೆಡಿಎಸ್",-13.567841529846191],["▁ඇමැති",-13.567841529846191],["▁මුස්ලිම්",-13.567841529846191],["▁მაგალითად",-13.567841529846191],["▁bonheur",-13.567842483520508],["▁preliminar",-13.567842483520508],["▁ගිණුම",-13.567842483520508],["▁സോഷ്യല",-13.567843437194824],["▁براساس",-13.56784439086914],["👍",-13.567845344543455],["▁Leder",-13.567846298217772],["▁Spotify",-13.567846298217772],["▁troviĝas",-13.567846298217772],["▁строительство",-13.567851066589355],["▁breakfast",-13.567852020263672],["▁герой",-13.567852020263672],["▁개념",-13.567852020263672],["▁саласындағы",-13.567853927612305],["▁بازاریابی",-13.567853927612305],["▁મધ્ય",-13.567853927612305],["dóttur",-13.56785488128662],["▁만족",-13.567855834960938],["▁Nazirlər",-13.567858695983888],["▁publisert",-13.567859649658203],["▁הילד",-13.567862510681152],["▁mengganggu",-13.567863464355469],["▁ఎలాంటి",-13.567864418029783],["▁prejav",-13.567873001098633],["▁technologies",-13.567876815795898],["▁койгон",-13.56788444519043],["▁susijusi",-13.567888259887695],["▁céanna",-13.567903518676758],["▁제외",-13.567906379699709],["▁дійсно",-13.567912101745604],["▁bébé",-13.56792163848877],["كاتب",-13.567922592163086],["▁együttműködés",-13.567925453186035],["▁giây",-13.567925453186035],["κατά",-13.5679292678833],["▁půjčky",-13.5679292678833],["▁Miroslav",-13.567930221557615],["▁bergabung",-13.56794261932373],["▁മരിച്ചു",-13.56794261932373],["▁regole",-13.567947387695312],["▁samodzieln",-13.567954063415527],["▁Effekt",-13.56796169281006],["▁göstərici",-13.567965507507324],["devi",-13.567985534667969],["小米",-13.567987442016602],["▁අස්",-13.5679931640625],["身邊",-13.567997932434082],["▁අපගේ",-13.567998886108398],["▁mercados",-13.568012237548828],["RAW",-13.568023681640623],["▁hökm",-13.568023681640623],["▁موعد",-13.568036079406738],["▁konfrans",-13.568056106567385],["▁kommunikáció",-13.56806182861328],["▁vlastný",-13.568062782287598],["▁المصدر",-13.568063735961914],["▁MAHA",-13.568077087402344],["▁kolei",-13.568078994750977],["▁ດາ",-13.568090438842772],["▁Mình",-13.56809139251709],["▁klokken",-13.568097114562988],["▁ľah",-13.568109512329102],["▁Dura",-13.568113327026367],["cināt",-13.568115234375],["▁ლევან",-13.568138122558594],["▁američki",-13.568140983581545],["▁torta",-13.568151473999023],["▁වඩාත්",-13.56815242767334],["的基础",-13.56815242767334],["科技有限公司",-13.568154335021973],["▁funkcion",-13.568156242370604],["miel",-13.568161964416504],["▁förklara",-13.56818675994873],["āšanā",-13.568193435668944],["▁joriy",-13.56820011138916],["▁Rice",-13.56821060180664],["uwar",-13.568230628967283],["▁अध",-13.568230628967283],["▁telepon",-13.568236351013184],["▁සටහන්",-13.568236351013184],["كام",-13.568239212036133],["színű",-13.568248748779297],["inske",-13.56825351715088],["▁сацыяльна",-13.56825351715088],["פרש",-13.568256378173828],["kirche",-13.56825828552246],["▁olacağını",-13.568294525146484],["▁nobene",-13.56829833984375],["▁dolari",-13.568303108215332],["▁කන්න",-13.568317413330078],["▁Vissza",-13.568324089050291],["▁parlamento",-13.568325996398926],["▁Қаз",-13.568329811096191],["▁Suður",-13.568346977233888],["រង",-13.568354606628418],["ჭე",-13.568370819091797],["▁स्थापित",-13.568371772766112],["▁honako",-13.568379402160645],["keskuse",-13.568391799926758],["她們",-13.568399429321287],["३८",-13.56840705871582],["▁botës",-13.568416595458984],["▁хэлж",-13.568429946899414],["olie",-13.568436622619627],["مجلس",-13.56844997406006],["▁posluša",-13.568469047546388],["uotų",-13.5684814453125],["یې",-13.5684814453125],["court",-13.56849479675293],["१७",-13.568524360656738],["▁ülkede",-13.568558692932127],["▁იდეა",-13.568561553955078],["出行",-13.568592071533203],["ətlər",-13.568604469299316],["▁napot",-13.568625450134276],["ਹਾਂ",-13.568629264831545],["▁tvrdí",-13.568652153015137],["ographie",-13.568681716918944],["isiún",-13.56869888305664],["▁finir",-13.568724632263184],["▁anong",-13.56873607635498],["万美元",-13.568750381469728],["kering",-13.568755149841309],["▁hören",-13.568758010864258],["▁samar",-13.568760871887209],["hanap",-13.568764686584473],["完善的",-13.568764686584473],["▁அன்பு",-13.56877326965332],["▁mì",-13.568802833557127],["kassen",-13.568806648254396],["▁Fær",-13.568814277648926],["▁Szó",-13.56883716583252],["▁Onu",-13.568841934204102],["janju",-13.568857192993164],["▁olive",-13.568857192993164],["▁neniam",-13.568859100341797],["خار",-13.56886386871338],["▁ஃப",-13.568875312805176],["▁taxe",-13.56889820098877],["ଜୀ",-13.568902969360352],["▁roti",-13.568909645080566],["▁Problema",-13.5689115524292],["▁полици",-13.568922996520996],["ànic",-13.568929672241213],["▁mojego",-13.568931579589844],["▁Uí",-13.568958282470703],["მც",-13.568991661071776],["لاد",-13.569002151489258],["▁medtem",-13.569010734558104],["мера",-13.569029808044434],["্রে",-13.569058418273926],["बै",-13.56906032562256],["▁marido",-13.569074630737305],["▁পাতা",-13.569087982177734],["▁नका",-13.569113731384276],["കൃത്യ",-13.569119453430176],["▁eolas",-13.569127082824709],["▁grim",-13.56913948059082],["▁הינו",-13.569150924682615],["itetin",-13.569159507751465],["dense",-13.569180488586426],["也不能",-13.56920337677002],["▁raska",-13.569207191467283],["ຕ່າງປະເທດ",-13.56922721862793],["▁חסיד",-13.569231986999512],["ženo",-13.569232940673828],["gått",-13.56923484802246],["▁తెచ్చ",-13.569238662719728],["ለን።",-13.569246292114258],["▁ເພາະ",-13.569257736206056],["▁שעבר",-13.56926441192627],["▁실제로",-13.569278717041016],["▁ጥያቄዎች",-13.56928253173828],["▁Zil",-13.569300651550291],["▁bakarra",-13.569303512573242],["▁Halo",-13.569331169128418],["▁lumtur",-13.569377899169922],["▁Marek",-13.569391250610352],["▁età",-13.56940460205078],["▁ניסיון",-13.569411277770996],["▁filozof",-13.569432258605955],["▁жобалар",-13.569483757019045],["▁flertal",-13.569507598876951],["▁සභා",-13.5695219039917],["једно",-13.569531440734863],["då",-13.569543838500977],["▁massor",-13.569557189941406],["താമസ",-13.569558143615724],["▁ነፃ",-13.569558143615724],["ОЈ",-13.569567680358888],["အပ္",-13.569580078125],["▁notari",-13.569581985473633],["▁soko",-13.569584846496582],["▁liki",-13.569604873657228],["▁edilme",-13.569608688354492],["သင္",-13.569613456726074],["▁Dyma",-13.56964874267578],["წყო",-13.569676399230955],["▁שער",-13.569684982299805],["▁ибо",-13.569730758666992],["rodni",-13.569751739501951],["statakse",-13.569758415222168],["▁hverdagen",-13.569761276245115],["▁הציבור",-13.569765090942385],["▁Production",-13.569768905639648],["ଇଲେ",-13.56977081298828],["bava",-13.569787979125977],["▁smell",-13.569788932800291],["▁خاتم",-13.569790840148926],["翻译",-13.569809913635254],["叔",-13.569819450378418],["သွင်း",-13.569832801818848],["ដំណឹង",-13.569833755493164],["▁Düsseldorf",-13.569833755493164],["▁skúsenosti",-13.569833755493164],["▁zuletzt",-13.569833755493164],["▁сравнению",-13.569833755493164],["▁تحویل",-13.569833755493164],["▁आक्रमण",-13.569833755493164],["▁लक्ष्मी",-13.569833755493164],["▁চেয়ারম্যান",-13.569833755493164],["▁ଚିକିତ୍ସା",-13.569833755493164],["▁தொழில்நுட்பம்",-13.569833755493164],["▁ఎవరు",-13.569833755493164],["▁సోషల్",-13.569833755493164],["▁දුම්රිය",-13.569833755493164],["▁ලිංගික",-13.569833755493164],["▁ሳምንት",-13.569833755493164],["▁muvofiq",-13.56983470916748],["▁əhatə",-13.56983470916748],["▁Kryeministri",-13.569835662841797],["▁kemaluan",-13.569835662841797],["▁lopulta",-13.569835662841797],["▁líquido",-13.569835662841797],["▁ਇੱਥੇ",-13.569835662841797],["ลาน",-13.569836616516112],["▁பிரதமர்",-13.569836616516112],["▁ticarət",-13.56983757019043],["▁riche",-13.569838523864746],["▁წლებში",-13.569838523864746],["ワーク",-13.56984043121338],["▁ખાતે",-13.569841384887695],["▁ఆత్మ",-13.569842338562012],["teinen",-13.569844245910645],["▁मिलेगा",-13.569863319396973],["▁टिकट",-13.569864273071287],["▁qëndrim",-13.569865226745604],["▁רגע",-13.569873809814451],["▁Qadri",-13.569880485534668],["▁służb",-13.56988525390625],["▁ਦੋਸ਼",-13.56988525390625],["ੁਲ",-13.569900512695312],["▁asker",-13.569902420043944],["▁lägger",-13.569903373718262],["▁فرمود",-13.56990909576416],["▁жыр",-13.569911003112791],["▁దృష్టి",-13.56991481781006],["▁присутні",-13.569916725158691],["很難",-13.569917678833008],["▁ବଦଳ",-13.56992244720459],["▁suposa",-13.569923400878906],["▁capelli",-13.569942474365234],["▁brīdi",-13.569952011108398],["▁VY",-13.569964408874512],["▁کھول",-13.569965362548828],["在全球",-13.569969177246094],["▁overheid",-13.569975852966309],["івського",-13.569981575012209],["显得",-13.56999397277832],["▁చూపించ",-13.570003509521484],["▁गेला",-13.57000732421875],["▁Skulle",-13.57005500793457],["տը",-13.570067405700684],["▁달라",-13.570082664489746],["ليو",-13.57009506225586],["▁lieux",-13.570104598999023],["1/2",-13.570123672485352],["371",-13.570125579833984],["ለብ",-13.570128440856934],["▁කිහිපයක්",-13.570137977600098],["▁concorda",-13.570144653320312],["▁zkušenosti",-13.570160865783691],["▁seuran",-13.570165634155272],["גרות",-13.570168495178224],["▁подорож",-13.570185661315918],["ılabilir",-13.570198059082031],["▁Hehe",-13.570239067077637],["▁preco",-13.570242881774902],["▁butun",-13.570253372192385],["▁बोला",-13.570261001586914],["Market",-13.57026195526123],["مائة",-13.570267677307127],["▁Posiada",-13.570268630981444],["▁Rafa",-13.570268630981444],["▁установки",-13.570272445678713],["▁myslel",-13.570279121398926],["David",-13.570298194885254],["▁прояви",-13.570340156555176],["▁መሪዎች",-13.570347785949709],["▁كشف",-13.57034969329834],["▁білді",-13.5703763961792],["▁referens",-13.570385932922363],["▁penki",-13.570388793945312],["▁کلید",-13.570389747619627],["▁električn",-13.570399284362791],["▁Fuji",-13.57040023803711],["▁پائ",-13.570406913757324],["▁186",-13.57040786743164],["▁synlig",-13.570414543151855],["టై",-13.570427894592283],["齡",-13.570429801940918],["āļu",-13.57043170928955],["стало",-13.57046604156494],["▁አባል",-13.570472717285156],["▁والله",-13.57050609588623],["どうしても",-13.570514678955078],["സന്",-13.570516586303713],["▁هواپیما",-13.570527076721191],["转化",-13.570544242858888],["▁बैठकमा",-13.570548057556152],["▁الکترونیکی",-13.570555686950684],["SION",-13.57055950164795],["ędz",-13.570565223693848],["ମୋ",-13.570575714111328],["▁kontakter",-13.57057762145996],["▁dba",-13.57058048248291],["ησή",-13.570581436157228],["▁Члан",-13.570584297180176],["▁tinut",-13.570585250854492],["aukš",-13.570589065551758],["▁deveria",-13.570623397827148],["▁రాజు",-13.570629119873049],["▁সিটি",-13.570643424987791],["▁planifica",-13.570645332336426],["▁dixi",-13.570669174194336],["േഴ്",-13.570677757263184],["▁Koper",-13.570682525634766],["▁Buka",-13.570696830749512],["▁Ամ",-13.570709228515623],["▁கேட்டு",-13.570721626281738],["▁simpla",-13.57073974609375],["▁شديد",-13.57073974609375],["តម្លៃ",-13.570746421813965],["▁alacak",-13.570771217346191],["ጥም",-13.57077407836914],["▁putting",-13.570789337158203],["▁bordet",-13.570815086364746],["orët",-13.570829391479492],["▁Virginia",-13.570834159851074],["▁veto",-13.57083797454834],["▁носа",-13.570841789245604],["▁навчально",-13.570844650268556],["昇",-13.570863723754885],["37)",-13.57088851928711],["▁napisał",-13.57089138031006],["baby",-13.570899963378906],["áig",-13.570903778076172],["▁rekonstru",-13.570905685424805],["▁közösségi",-13.57090950012207],["mās",-13.57095432281494],["読んで",-13.57095432281494],["oikeuden",-13.570960998535156],["ajiri",-13.570962905883787],["RZ",-13.570964813232422],["кової",-13.570965766906738],["स्थल",-13.570965766906738],["▁ଅଧିକାରୀ",-13.570985794067385],["▁отворено",-13.57099151611328],["हीन",-13.571000099182127],["ရော",-13.571002960205078],["▁vivant",-13.571011543273926],["▁пръст",-13.57101345062256],["▁fevral",-13.571072578430176],["تحرك",-13.57109260559082],["teľný",-13.571093559265137],["千万",-13.571105003356934],["▁Saturn",-13.571117401123049],["▁kelis",-13.571122169494627],["▁zaposleni",-13.57114028930664],["๊ะ",-13.571142196655272],["jatele",-13.571147918701172],["▁फुट",-13.571176528930664],["Як",-13.57118320465088],["▁tiri",-13.57118320465088],["hiye",-13.571185111999512],["ေတြနဲ႔",-13.57119846343994],["ाइज",-13.571206092834473],["▁kodo",-13.571207046508787],["言える",-13.571213722229004],["flow",-13.57123565673828],["▁instituciones",-13.571244239807127],["▁valle",-13.571250915527344],["pür",-13.571269035339355],["▁Dane",-13.571281433105469],["▁praesent",-13.571283340454102],["भो",-13.571290969848633],["palle",-13.571306228637695],["▁contemporane",-13.571328163146973],["▁Uniti",-13.571330070495604],["没有什么",-13.571364402770996],["اين",-13.571388244628906],["ప్పటికీ",-13.571414947509766],["avdelning",-13.571417808532717],["tambul",-13.571420669555664],["ológ",-13.571425437927246],["▁petak",-13.571428298950195],["▁Heel",-13.571441650390623],["▁gutes",-13.571441650390623],["▁vrátil",-13.571483612060549],["▁conserve",-13.571496963500977],["▁সকাল",-13.571508407592772],["▁Vip",-13.571516036987305],["▁NGO",-13.571517944335938],["ніку",-13.571521759033203],["▁बनी",-13.571521759033203],["▁santu",-13.571548461914062],["палі",-13.571555137634276],["▁ענק",-13.57155704498291],["▁ስላ",-13.57157039642334],["▁bancos",-13.571578025817873],["▁ਟੀ",-13.571589469909668],["▁аф",-13.571593284606934],["▁дисциплин",-13.571612358093262],["▁Åh",-13.57163906097412],["بقاء",-13.571640014648438],["▁դատ",-13.571640968322754],["▁mudança",-13.571648597717283],["▁Վար",-13.571662902832031],["tjeneste",-13.571674346923828],["ELO",-13.57167625427246],["▁হামলা",-13.571721076965332],["仰",-13.571760177612305],["▁rengi",-13.57176113128662],["▁חברי",-13.57177448272705],["▁Stoff",-13.571776390075684],["▁незалежна",-13.571782112121582],["▁વખત",-13.57178783416748],["▁Kitap",-13.571789741516112],["卓越",-13.571789741516112],["▁소식",-13.57179355621338],["淺",-13.57180404663086],["▁επιχειρ",-13.571820259094238],["▁нерухом",-13.571828842163086],["▁հանրագիտարան",-13.571828842163086],["▁Această",-13.571829795837402],["▁CÔNG",-13.571829795837402],["▁Peygamber",-13.571829795837402],["▁atnaujin",-13.571829795837402],["▁pràctica",-13.571829795837402],["▁тиждень",-13.571829795837402],["▁متاسفانه",-13.571829795837402],["▁ଅନୁସାରେ",-13.571829795837402],["▁ସଂସ୍କୃତି",-13.571829795837402],["▁frecvent",-13.57183074951172],["▁towarzysz",-13.57183074951172],["▁ଅଫିସ",-13.57183074951172],["▁გვაქვს",-13.57183074951172],["▁Orleans",-13.571831703186035],["▁보인다",-13.571831703186035],["▁spodbuja",-13.571836471557615],["Skyscanner",-13.571837425231934],["▁gedeelte",-13.571837425231934],["▁irabazi",-13.571840286254885],["▁берилген",-13.5718412399292],["▁ancaq",-13.571845054626465],["▁يېقىن",-13.57184886932373],["▁dnešní",-13.571849822998049],["▁Frankreich",-13.57185173034668],["▁εντός",-13.57185173034668],["▁یکدیگر",-13.57185173034668],["▁promjena",-13.571852684020996],["shukuru",-13.571853637695312],["▁الأهلي",-13.571856498718262],["▁perfeita",-13.571859359741213],["▁әрбір",-13.571859359741213],["ტიკური",-13.571866035461426],["▁تبصرہ",-13.571866035461426],["▁sitios",-13.571879386901855],["▁ಸಣ್ಣ",-13.571881294250488],["▁abierto",-13.571887969970703],["▁ផ្តល់",-13.571889877319336],["▁Muta",-13.571892738342283],["mbuk",-13.571908950805664],["▁найбольш",-13.571911811828612],["▁Xavier",-13.571913719177246],["▁ჭა",-13.571913719177246],["▁гэхэд",-13.571916580200195],["СІ",-13.57192039489746],["▁получается",-13.57193374633789],["priek",-13.57193660736084],["▁sellainen",-13.57194995880127],["مثال",-13.57196044921875],["》《",-13.571979522705078],["聞く",-13.571988105773926],["▁magandang",-13.572020530700684],["宪法",-13.572029113769531],["▁Jinsi",-13.572031021118164],["▁baint",-13.572038650512695],["▁Ć",-13.57205295562744],["▁българския",-13.572071075439451],["▁appeared",-13.572086334228516],["▁caldo",-13.572088241577148],["▁håndtere",-13.572091102600098],["▁примене",-13.572098731994627],["▁Breast",-13.572121620178224],["▁מקבל",-13.572125434875488],["▁odstavka",-13.57212734222412],["▁Ángel",-13.5721435546875],["▁Pirk",-13.572153091430664],["▁Пъ",-13.572161674499512],["ุ่ม",-13.572162628173828],["▁англійськ",-13.572163581848145],["売り",-13.57217788696289],["▁īsteno",-13.572187423706056],["▁истина",-13.572210311889648],["▁RED",-13.572216987609863],["كسب",-13.572235107421877],["▁болсын",-13.57224464416504],["▁cijene",-13.572248458862305],["▁istediğiniz",-13.572265625],["▁sizə",-13.57228946685791],["▁especialista",-13.572296142578123],["▁Sahib",-13.572299003601074],["बळ",-13.572301864624023],["oyat",-13.572310447692873],["▁شته",-13.572314262390137],["ေဟာ",-13.572320938110352],["▁209",-13.572321891784668],["▁płac",-13.572328567504885],["▁føle",-13.572343826293944],["oaie",-13.572360038757324],["ậy",-13.572361946105955],["▁সর্ব",-13.572362899780272],["▁दोष",-13.572368621826172],["סביבה",-13.572382926940918],["▁reformas",-13.572386741638184],["Jak",-13.5723876953125],["خدم",-13.572392463684082],["investissement",-13.57240390777588],["▁häl",-13.572415351867676],["▁balení",-13.572428703308104],["обор",-13.572439193725586],["▁আসা",-13.572443962097168],["▁zasady",-13.57246208190918],["▁smør",-13.572463035583496],["▁clan",-13.572467803955078],["▁ছিলেন",-13.572477340698242],["▁০১৭",-13.572486877441406],["▁Див",-13.572505950927734],["คนอื่น",-13.57254409790039],["▁такому",-13.572547912597656],["centimetr",-13.572565078735352],["ignon",-13.572566032409668],["▁തുറന്ന",-13.5725736618042],["パス",-13.572582244873049],["學會",-13.572587966918944],["udesta",-13.572599411010742],["▁talagang",-13.572607040405272],["хор",-13.572613716125488],["시고",-13.572620391845703],["▁žinoma",-13.572628021240234],["▁mbyll",-13.572646141052246],["ຄຸນ",-13.572649002075195],["▁อาคาร",-13.572649955749512],["▁vyama",-13.572661399841309],["▁ухвал",-13.572671890258787],["▁Taş",-13.57270336151123],["▁paş",-13.572710990905762],["▁Tennis",-13.572757720947266],["AMENTO",-13.572759628295898],["ஷன்",-13.572773933410645],["▁Worm",-13.572773933410645],["alcalde",-13.572779655456545],["▁වරද",-13.57280158996582],["▁Coleg",-13.57281494140625],["首頁",-13.57281494140625],["tică",-13.57282543182373],["我们可以",-13.572832107543944],["പൂര്",-13.572839736938477],["▁Shugaban",-13.572851181030272],["▁התא",-13.572853088378906],["ేది",-13.572858810424805],["▁pocos",-13.57287311553955],["▁ვართ",-13.572876930236816],["▁Žal",-13.572881698608398],["▁zonke",-13.572896003723145],["▁regarding",-13.572904586791992],["training",-13.57291030883789],["ယား",-13.572911262512209],["▁хурал",-13.572920799255373],["厂家",-13.57294750213623],["▁muzikos",-13.57295036315918],["▁Meister",-13.57297134399414],["▁vendeve",-13.572975158691406],["nagy",-13.572994232177734],["રસ",-13.572994232177734],["ってしまう",-13.572996139526367],["▁kansa",-13.573002815246582],["ിലാ",-13.573003768920898],["▁majitel",-13.573017120361328],["▁أهداف",-13.573019981384276],["झे",-13.573031425476074],["▁ରି",-13.573043823242188],["▁хуралдаан",-13.573077201843262],["zining",-13.573090553283691],["▁Våre",-13.573095321655272],["▁Adat",-13.573098182678224],["▁tudtam",-13.57310676574707],["ဆဲ",-13.573122024536133],["ГД",-13.573125839233398],["▁fomentar",-13.573126792907717],["▁Reserve",-13.573128700256348],["બંધ",-13.573137283325195],["▁اٹھ",-13.573143005371094],["▁Պար",-13.573159217834473],["▁kennt",-13.57317066192627],["▁उड़",-13.573182106018066],["▁kryesor",-13.5731840133667],["▁Riks",-13.57319164276123],["stev",-13.573202133178713],["▁Ču",-13.573214530944824],["▁drží",-13.573222160339355],["podstatn",-13.573224067687988],["很多人都",-13.573235511779783],["▁arrangere",-13.57324504852295],["▁서울시",-13.573248863220217],["▁2009)",-13.57326889038086],["河南",-13.57326889038086],["▁salgs",-13.573272705078123],["▁veseli",-13.573272705078123],["ผู้เล่น",-13.573278427124023],["▁Bhar",-13.573286056518556],["ذاك",-13.573294639587402],["▁Rū",-13.57329559326172],["▁çağrı",-13.573312759399414],["▁viha",-13.573322296142578],["▁antud",-13.57334041595459],["овувати",-13.573346138000488],["165",-13.573347091674805],["▁उनीहरु",-13.573358535766602],["▁البنك",-13.573369979858398],["致します",-13.57338809967041],["组合",-13.573390007019045],["に入れ",-13.573410987854004],["thra",-13.573452949523926],["▁слага",-13.57346248626709],["▁याची",-13.573463439941406],["صحاب",-13.573468208312988],["▁зазначив",-13.573474884033203],["ଇନ",-13.573487281799316],["▁дорог",-13.573495864868164],["ിരുന്നത്",-13.573511123657228],["▁meðferð",-13.573528289794922],["▁نیازمند",-13.57353973388672],["있다",-13.573552131652832],["ေယာက္",-13.573586463928224],["halu",-13.573601722717283],["聖誕",-13.573637008666992],["▁agricole",-13.573671340942385],["ໄດ້ຮັບການ",-13.573711395263672],["▁çıkıyor",-13.57371711730957],["带领",-13.573718070983888],["▁տր",-13.573719024658203],["必要がある",-13.57373046875],["▁મોત",-13.573734283447266],["schal",-13.573737144470217],["▁pogodbe",-13.573750495910645],["TUN",-13.573755264282228],["▁waith",-13.57376194000244],["▁جمهوري",-13.57378101348877],["āties",-13.5737886428833],["ก์",-13.573802947998049],["رفض",-13.573803901672363],["変わり",-13.573819160461426],["३६",-13.573822021484377],["▁दस",-13.573823928833008],["▁fylgja",-13.57382869720459],["ဓမၼ",-13.573829650878906],["▁Aftenposten",-13.573829650878906],["▁Hargeysa",-13.573829650878906],["▁өкілдері",-13.573829650878906],["▁աստիճան",-13.573829650878906],["▁سڑک",-13.573829650878906],["▁هميشه",-13.573829650878906],["▁ক্ষমতা",-13.573829650878906],["▁അയാള്",-13.573829650878906],["▁ዶክተር",-13.573829650878906],["▁खराब",-13.573830604553224],["▁Kennedy",-13.57383155822754],["▁Xperia",-13.573832511901855],["สมอง",-13.573833465576172],["▁मंत्रालय",-13.573833465576172],["▁ocazia",-13.573834419250488],["ଥାନ୍ତି",-13.573835372924805],["▁دقيقة",-13.57383632659912],["ಿಸಿದ್ದ",-13.57383918762207],["▁التطبيق",-13.573840141296388],["▁мэдлэг",-13.573841094970703],["▁ବିଜୟ",-13.573843955993652],["▁Uwanja",-13.573844909667969],["▁Роберт",-13.573844909667969],["▁ffenest",-13.573845863342283],["▁грошов",-13.573845863342283],["Bay",-13.573850631713867],["▁tovuti",-13.573858261108398],["▁संकट",-13.573860168457031],["▁Päivä",-13.573862075805664],["▁kiderül",-13.573862075805664],["▁ഉറപ്പ",-13.573871612548828],["▁nocturn",-13.57387351989746],["▁Archiv",-13.573882102966309],["▁Digər",-13.573888778686523],["▁herêma",-13.573890686035156],["▁Комитет",-13.573890686035156],["മുഖ",-13.573891639709473],["▁structura",-13.573896408081056],["▁Amurka",-13.573904037475586],["▁eating",-13.5739107131958],["▁ვის",-13.573918342590332],["▁allerlei",-13.573925018310549],["▁eftermiddag",-13.57392692565918],["▁kazne",-13.573932647705078],["▁رضایت",-13.573945999145508],["▁vēlas",-13.57394790649414],["▁ਧਰਮ",-13.573948860168455],["बाहेक",-13.573952674865724],["szeit",-13.573954582214355],["▁służy",-13.57396125793457],["ਬਾਦ",-13.573963165283203],["▁وڃ",-13.573963165283203],["▁Азыр",-13.573966979980469],["▁fimm",-13.573973655700684],["▁netværk",-13.573978424072266],["ላዊ",-13.573984146118164],["▁Nachdem",-13.573991775512695],["▁núna",-13.573991775512695],["▁хотят",-13.57399559020996],["▁lijn",-13.574002265930176],["▁tarjoa",-13.574004173278809],["尽量",-13.574012756347656],["▁хороший",-13.574017524719238],["也将",-13.574051856994627],["▁ideál",-13.574053764343262],["planer",-13.57408332824707],["▁aneh",-13.574090957641602],["▁Zašto",-13.574108123779297],["▁האחר",-13.574110984802246],["▁બનાવી",-13.574115753173828],["▁stemning",-13.574127197265623],["▁бериш",-13.574132919311523],["▁fornire",-13.574141502380373],["▁ຍິງ",-13.574146270751951],["स्वी",-13.57418441772461],["ruše",-13.574196815490724],["▁šiol",-13.574213027954102],["▁kompren",-13.574261665344238],["▁evolución",-13.574265480041504],["პოლ",-13.57427978515625],["▁ouverte",-13.574284553527832],["▁парче",-13.57430648803711],["▁оор",-13.574311256408691],["▁вибір",-13.574325561523438],["▁Novel",-13.574338912963867],["גזר",-13.574347496032717],["▁федера",-13.574370384216309],["▁saol",-13.574379920959473],["გრძნობ",-13.574389457702637],["▁Karol",-13.574390411376951],["ζαν",-13.574393272399902],["1.8",-13.57439422607422],["ึก",-13.57439422607422],["▁princípio",-13.57439422607422],["ிங்",-13.57440185546875],["▁сакате",-13.57441234588623],["ovaným",-13.574417114257812],["▁tjenesten",-13.574423789978027],["▁អាន",-13.574423789978027],["▁asgjë",-13.574433326721191],["▁tiempos",-13.574438095092772],["▁erneut",-13.574440002441406],["▁װ",-13.574444770812988],["▁mentioned",-13.574447631835938],["▁Yeah",-13.574457168579102],["▁گلو",-13.574457168579102],["▁kvällen",-13.574464797973633],["▁డె",-13.574466705322266],["▁MMC",-13.57447910308838],["▁معمولی",-13.574482917785645],["▁Obchodní",-13.574492454528809],["▁осећа",-13.574498176574709],["▁прошлом",-13.574501991271973],["lize",-13.574505805969238],["▁տուր",-13.574506759643556],["▁السبب",-13.574509620666504],["οποίησης",-13.574527740478516],["▁Ministr",-13.574563026428224],["▁හේතු",-13.574565887451172],["▁botën",-13.574567794799805],["▁taasisi",-13.574573516845703],["ความรู้สึก",-13.574603080749512],["حول",-13.57460880279541],["▁Avy",-13.5746488571167],["家裡",-13.574650764465332],["▁anyagok",-13.57468032836914],["▁rydym",-13.574722290039062],["▁lettura",-13.574737548828123],["intégr",-13.57474422454834],["ББ",-13.574750900268556],["▁discurs",-13.5747709274292],["▁ranta",-13.57479190826416],["Camp",-13.574795722961426],["▁Pagka",-13.574798583984377],["ഈ",-13.57480812072754],["యిన",-13.574823379516602],["SED",-13.574825286865234],["▁rakt",-13.57482624053955],["▁Ulaya",-13.5748291015625],["▁hafif",-13.574832916259766],["▁Gus",-13.574857711791992],["▁κατηγορ",-13.574858665466309],["▁سلاح",-13.574864387512209],["പ്പറ്റി",-13.574868202209473],["വിദ്യ",-13.574874877929688],["▁mesazh",-13.574884414672852],["▁maakte",-13.574901580810549],["▁örül",-13.574923515319824],["他にも",-13.574932098388672],["▁поступа",-13.574941635131836],["▁mulige",-13.574953079223633],["▁తు",-13.574978828430176],["▁Skand",-13.574995040893556],["ујемо",-13.575029373168944],["▁Ihana",-13.575030326843262],["если",-13.575031280517578],["▁Parker",-13.575057983398438],["▁ကား",-13.575067520141602],["інші",-13.575074195861816],["▁Ustav",-13.575079917907717],["▁melhorar",-13.575093269348145],["MZ",-13.57509708404541],["選舉",-13.575105667114258],["▁Қор",-13.575109481811523],["▁Leuk",-13.575115203857422],["▁seolah",-13.57512378692627],["ITS",-13.575140953063965],["raich",-13.575145721435549],["▁सर्वे",-13.575164794921877],["▁Kano",-13.575206756591797],["▁Глава",-13.575207710266112],["▁maçı",-13.57522201538086],["▁Kern",-13.575227737426758],["自分に",-13.575234413146973],["▁متهم",-13.575300216674805],["▁uzima",-13.575323104858398],["▁Ігор",-13.575358390808104],["▁iqtisodiy",-13.575410842895508],["▁supo",-13.57541847229004],["נעם",-13.57542896270752],["لاندى",-13.575430870056152],["▁mhí",-13.575441360473633],["▁padahal",-13.575457572937012],["андаа",-13.575467109680176],["fühl",-13.575478553771973],["▁filha",-13.575504302978516],["ۋۇ",-13.575514793395996],["▁repeat",-13.575539588928224],["चाल",-13.575560569763184],["▁ولل",-13.57556438446045],["夫婦",-13.57559585571289],["పక్ష",-13.575604438781738],["bura",-13.575605392456056],["úcháin",-13.575605392456056],["водите",-13.57561492919922],["▁Rho",-13.575626373291016],["▁prošlo",-13.575639724731444],["▁proposte",-13.575644493103027],["само",-13.57564640045166],["▁تفصیل",-13.575651168823242],["▁عزيز",-13.575654983520508],["▁Først",-13.575661659240724],["צרפת",-13.57566738128662],["顏",-13.575706481933594],["▁sød",-13.575716018676758],["▁ତାରିଖ",-13.57574462890625],["dóm",-13.575745582580566],["▁Bulgari",-13.575750350952148],["▁sedam",-13.575756072998049],["▁отиде",-13.575765609741213],["djela",-13.57576847076416],["▁organizada",-13.575769424438477],["饱",-13.575769424438477],["▁קונ",-13.575773239135742],["▁ഫോണ്",-13.575777053833008],["奖励",-13.575779914855955],["的男人",-13.575783729553224],["膚",-13.575793266296388],["▁зорилго",-13.575799942016602],["る事",-13.575801849365234],["▁চৌধুরী",-13.57580280303955],["ništva",-13.575809478759766],["▁Things",-13.575819969177246],["有沒有",-13.575822830200195],["ირი",-13.57582664489746],["קסנומקס",-13.575833320617676],["ប្រហែល",-13.575833320617676],["▁TANZANIA",-13.575833320617676],["▁antioxidant",-13.575833320617676],["▁febrúar",-13.575833320617676],["▁niektorých",-13.575833320617676],["▁oherwydd",-13.575833320617676],["▁tilgængelig",-13.575833320617676],["▁ساياھەت",-13.575833320617676],["▁وظیفه",-13.575833320617676],["▁চিকিৎসা",-13.575833320617676],["▁ችግሮች",-13.575833320617676],["옆",-13.575833320617676],["흑",-13.575833320617676],["Хмельницьк",-13.575834274291992],["▁bezmaksas",-13.575834274291992],["▁pöördu",-13.575834274291992],["흐",-13.575834274291992],["▁मौका",-13.575836181640623],["plej",-13.57583713531494],["▁Deluxe",-13.57583713531494],["▁핵심",-13.57583713531494],["▁лютага",-13.575841903686523],["فير",-13.575844764709473],["▁různých",-13.575844764709473],["▁شناسایی",-13.575851440429688],["promo",-13.57585906982422],["▁Trainer",-13.575860023498535],["ພາຍໃນ",-13.575868606567385],["▁गर्नुभएको",-13.575868606567385],["▁Maqedonisë",-13.5758695602417],["▁വിവാദ",-13.575870513916016],["▁Medicine",-13.57588005065918],["▁hyviä",-13.575894355773926],["skrav",-13.575895309448242],["▁geluid",-13.575908660888672],["▁käsittely",-13.57591724395752],["▁Farben",-13.575922966003418],["ÖS",-13.575925827026367],["▁výskum",-13.575940132141112],["▁agrupa",-13.57594871520996],["▁گرامی",-13.575949668884276],["▁kolesterol",-13.575952529907228],["יטים",-13.575961112976074],["▁взема",-13.575974464416504],["▁wykonania",-13.57597541809082],["лната",-13.575990676879885],["▁Einstein",-13.575992584228516],["▁সালের",-13.575994491577148],["前後",-13.576000213623049],["▁ರೀತಿಯ",-13.576027870178224],["▁abita",-13.57603931427002],["maschine",-13.576044082641602],["▁फुल",-13.576045989990234],["isatie",-13.576050758361816],["▁Ainsi",-13.576051712036133],["▁Елена",-13.576057434082031],["▁מאשר",-13.576057434082031],["▁nóg",-13.576068878173828],["▁Παπα",-13.576068878173828],["▁നടന്നു",-13.576071739196776],["▁ξανά",-13.576080322265623],["േക്ക",-13.576107025146484],["▁മറ്റ്",-13.576109886169434],["▁בקרב",-13.576111793518066],["▁eingesetzt",-13.576115608215332],["▁עובדים",-13.57612419128418],["▁Социал",-13.576128959655762],["▁работят",-13.576135635375977],["▁výrobky",-13.576152801513672],["្ន",-13.576156616210938],["▁Yra",-13.576165199279783],["▁जसले",-13.576172828674316],["្ឋ",-13.576189994812012],["▁pemilihan",-13.57619285583496],["▁मिश्र",-13.576194763183594],["ävä",-13.57619857788086],["iyum",-13.576213836669922],["▁retin",-13.576220512390137],["▁aviso",-13.576223373413086],["য়েল",-13.57624626159668],["▁පැය",-13.576264381408691],["لاردا",-13.57627010345459],["kill",-13.576306343078612],["▁eylem",-13.576333045959473],["▁Obr",-13.576364517211914],["無限",-13.576367378234863],["▁माह",-13.576375961303713],["▁समितिको",-13.576390266418455],["▁watan",-13.576393127441406],["▁Servizo",-13.576394081115724],["がいる",-13.576415061950684],["人體",-13.576433181762695],["▁dikir",-13.576461791992188],["båt",-13.57646369934082],["עסט",-13.576470375061035],["▁yarışma",-13.576476097106934],["▁meghatározott",-13.576489448547363],["▁stället",-13.57651424407959],["▁nümayəndələri",-13.57654094696045],["▁kansan",-13.57655429840088],["โป๊",-13.576560020446776],["▁بيع",-13.576562881469728],["шылардың",-13.57656478881836],["▁boš",-13.57657241821289],["منح",-13.57658576965332],["▁semnat",-13.57659149169922],["▁ռե",-13.576602935791016],["ಸೂ",-13.576603889465332],["▁ünvanı",-13.576605796813965],["▁miliki",-13.57660675048828],["▁матеріалу",-13.576616287231444],["тандыру",-13.57662582397461],["ராம",-13.57662868499756],["CENTR",-13.576642990112305],["▁rajt",-13.576648712158203],["▁liep",-13.576664924621582],["▁nikah",-13.576675415039062],["▁Tools",-13.576695442199709],["▁zmaga",-13.57671070098877],["▁altamente",-13.57671356201172],["▁атлет",-13.576726913452148],["yard",-13.576741218566896],["目的地",-13.57674503326416],["▁വിട്ടു",-13.576751708984377],["▁Moore",-13.57676124572754],["▁Libri",-13.576836585998535],["▁Minimal",-13.576855659484863],["▁stöðu",-13.576870918273926],["▁сати",-13.576895713806152],["letti",-13.576898574829102],["szolgáltatás",-13.576912879943848],["▁řek",-13.576945304870604],["▁Lehrer",-13.576949119567873],["זון",-13.576956748962402],["▁भूल",-13.576964378356934],["▁संवाद",-13.5769681930542],["▁gofal",-13.576969146728516],["▁занять",-13.576970100402832],["လိုအပ်",-13.576996803283691],["▁sastāv",-13.57699966430664],["▁ටී",-13.577011108398438],["વારે",-13.577013969421388],["لاک",-13.577024459838867],["зраст",-13.57703971862793],["▁реално",-13.577054977416992],["స్తున్నారు",-13.577107429504396],["▁penjualan",-13.577157020568848],["▁2.6",-13.577173233032228],["ຽວ",-13.577180862426758],["▁aktiva",-13.577181816101074],["▁OH",-13.577188491821287],["▁juru",-13.577199935913086],["ycznego",-13.577223777770996],["▁mysli",-13.577225685119627],["▁ndërkombëtare",-13.577250480651855],["▁inteligen",-13.577263832092283],["▁swart",-13.577275276184082],["▁ደስ",-13.577275276184082],["▁بگو",-13.577277183532717],["就被",-13.577281951904297],["▁әдіс",-13.577284812927246],["▁производители",-13.577292442321776],["くれて",-13.577292442321776],["▁SANT",-13.57732105255127],["▁Tror",-13.577325820922852],["ологиялық",-13.577387809753418],["▁səh",-13.577404975891112],["สุดๆ",-13.577436447143556],["▁பெற்று",-13.57744312286377],["▁počasí",-13.57746124267578],["▁Nationale",-13.57748317718506],["ఫల",-13.577488899230955],["▁क्षेत्रको",-13.577494621276855],["制限",-13.577507972717283],["▁Iskola",-13.577515602111816],["▁lagunak",-13.577524185180664],["វែង",-13.577587127685549],["▁Αμ",-13.577601432800291],["교사",-13.577606201171877],["▁potere",-13.577635765075684],["талып",-13.577644348144531],["аттан",-13.57765769958496],["نحو",-13.57766056060791],["ようになる",-13.577666282653809],["▁аттуу",-13.57767105102539],["סטן",-13.577681541442873],["▁العين",-13.577706336975098],["▁Həsənov",-13.577719688415527],["ເບ",-13.577720642089844],["▁մեկն",-13.577730178833008],["ګه",-13.577731132507324],["▁ආවා",-13.57773208618164],["▁fragil",-13.577733039855955],["შის",-13.57773494720459],["▁ظرف",-13.57773780822754],["telemaan",-13.57774543762207],["习近平总书记",-13.577749252319336],["台灣的",-13.577749252319336],["സ്ട്ര",-13.577762603759766],["ห้องน้ํา",-13.577768325805664],["匠",-13.57778549194336],["▁votat",-13.577788352966309],["▁Самар",-13.57779026031494],["szenia",-13.577805519104004],["嗯",-13.577805519104004],["ESI",-13.577817916870115],["ująca",-13.577818870544434],["▁187",-13.577818870544434],["きちんと",-13.577824592590332],["アイテム",-13.577824592590332],["타임",-13.577825546264648],["そのような",-13.577826499938965],["怎樣",-13.577829360961914],["asiantuntija",-13.577841758728027],["Ị",-13.577841758728027],["‪.‬‬",-13.577841758728027],["▁fjölskyldu",-13.577841758728027],["▁lehetséges",-13.577841758728027],["▁nxjerr",-13.577841758728027],["▁İctimai",-13.577841758728027],["▁ředitel",-13.577841758728027],["▁Бүх",-13.577841758728027],["▁Լուրեր",-13.577841758728027],["▁خەلقئارا",-13.577841758728027],["▁এখানে",-13.577841758728027],["▁முழுவதும்",-13.577841758728027],["▁ukweli",-13.577842712402344],["認定",-13.577842712402344],["▁bại",-13.57784366607666],["▁verskillende",-13.57784366607666],["▁भन्नुभयो",-13.57784366607666],["▁soorten",-13.577844619750977],["▁Bhosle",-13.577845573425291],["▁මෙවැනි",-13.577845573425291],["თბ",-13.577847480773926],["在上海",-13.577852249145508],["フリー",-13.577853202819824],["▁કોર્ટ",-13.57785415649414],["▁доаѓа",-13.577857971191406],["▁stoljeća",-13.577858924865724],["▁අවසාන",-13.577858924865724],["▁Argentin",-13.577862739562988],["▁ажилла",-13.577866554260254],["▁bahçe",-13.57787036895752],["▁կգ",-13.577875137329102],["▁드러",-13.577881813049316],["לר",-13.57788372039795],["▁casă",-13.57788372039795],["▁आकाश",-13.577888488769531],["▁kreatif",-13.577892303466797],["bizottság",-13.577902793884276],["▁नजिक",-13.57790756225586],["▁Śląsk",-13.577909469604492],["▁ئۇلارنىڭ",-13.577919960021973],["▁alegria",-13.577920913696287],["▁voisin",-13.577923774719238],["▁กรุงเทพมหานคร",-13.577924728393556],["▁связанных",-13.577940940856934],["מעמד",-13.577942848205566],["▁Chiesa",-13.577954292297363],["▁Eftersom",-13.577957153320312],["rrr",-13.577970504760742],["▁රව්",-13.57797145843506],["▁సంగతి",-13.577972412109377],["시기",-13.577974319458008],["▁தினம்",-13.577980041503906],["▁təmir",-13.577996253967283],["▁сукоб",-13.57800006866455],["були",-13.578014373779297],["▁apresentação",-13.578025817871094],["▁świetnie",-13.578036308288574],["▁தவற",-13.578039169311523],["還會",-13.578044891357422],["▁အမွတ္",-13.578046798706056],["tissimo",-13.578060150146484],["▁हामीलाई",-13.578063011169434],["▁brown",-13.57807159423828],["▁sidde",-13.57807159423828],["▁ରହିବ",-13.578084945678713],["抵達",-13.57811164855957],["လယ္",-13.578129768371582],["▁nazywa",-13.578134536743164],["导演",-13.57814121246338],["сіі",-13.578142166137695],["ставки",-13.578143119812012],["▁meyve",-13.578146934509276],["▁Мора",-13.578149795532228],["▁olmuştur",-13.57816219329834],["lioni",-13.578164100646973],["talouden",-13.578164100646973],["▁fedt",-13.57819652557373],["istiske",-13.578203201293944],["जीव",-13.578208923339844],["してしまった",-13.578210830688477],["പ്പെടുന്നു",-13.578218460083008],["▁pieejama",-13.578235626220703],["▁versus",-13.578241348266602],["βλη",-13.57826042175293],["155",-13.57826328277588],["krishna",-13.578269958496094],["それで",-13.57831859588623],["ಿಸಿದ್ದಾರೆ",-13.578324317932127],["▁námi",-13.578333854675291],["▁Lava",-13.578351020812988],["▁síma",-13.57835292816162],["▁ករណី",-13.578373908996582],["▁fusion",-13.578387260437012],["wiesz",-13.57839298248291],["▁ನಾಗ",-13.578402519226074],["clear",-13.57845401763916],["ન્ન",-13.578455924987791],["▁לשם",-13.578500747680664],["▁എല്",-13.578516006469728],["▁emblem",-13.57852840423584],["▁ສອງ",-13.578557014465332],["sýning",-13.578577041625977],["▁لوٹ",-13.578588485717772],["落在",-13.578588485717772],["▁Amanda",-13.578593254089355],["▁vindue",-13.578595161437988],["topic",-13.578605651855469],["・・",-13.578608512878418],["իկի",-13.578621864318848],["▁Деца",-13.578627586364746],["▁చేసుకోవ",-13.57863712310791],["▁келі",-13.578662872314451],["▁bøger",-13.57866382598877],["vinnu",-13.5786771774292],["▁vlog",-13.578688621520996],["ತಂತ್ರ",-13.578689575195312],["черк",-13.578701972961426],["šenja",-13.578704833984377],["诚信",-13.578704833984377],["▁ఉంటా",-13.578707695007324],["公主",-13.578718185424805],["detail",-13.578760147094728],["▁профессионально",-13.578763961791992],["Audi",-13.578779220581056],["Radio",-13.578794479370115],["ình",-13.578800201416016],["▁corpore",-13.578803062438965],["брос",-13.578805923461914],["▁javnega",-13.578815460205078],["αρκ",-13.578834533691406],["environnement",-13.57885456085205],["▁konkurrence",-13.578861236572266],["▁לבן",-13.578864097595217],["▁sztuk",-13.578869819641112],["310",-13.578871726989746],["▁пошуку",-13.578875541687012],["шното",-13.57888126373291],["▁ойла",-13.578886032104492],["▁रखें",-13.578886032104492],["▁געווא",-13.578913688659668],["सम्बन्धी",-13.5789213180542],["läinen",-13.578937530517578],["กลม",-13.578937530517578],["▁краю",-13.578956604003906],["▁Morris",-13.578965187072754],["ማል",-13.57896614074707],["▁finalidade",-13.578972816467283],["እግዚአብሔር",-13.57897663116455],["▁spore",-13.578978538513184],["▁degrada",-13.579009056091309],["zhou",-13.57907772064209],["वन्",-13.579083442687988],["▁blaga",-13.57912540435791],["▁internacionales",-13.579126358032228],["▁இர",-13.579126358032228],["buhay",-13.579167366027832],["▁Pran",-13.579171180725098],["беле",-13.579178810119627],["rejse",-13.579183578491213],["▁Bilo",-13.579191207885742],["▁þótt",-13.579203605651855],["▁جدیدی",-13.579214096069336],["▁regen",-13.579230308532717],["▁rhy",-13.579273223876951],["เทศ",-13.579288482666016],["ojums",-13.579305648803713],["атрак",-13.579310417175291],["▁แต่ก็",-13.57931423187256],["▁αλλ",-13.579319953918455],["▁továbbá",-13.579363822937012],["енько",-13.579368591308594],["βάν",-13.579383850097656],["▁Oko",-13.579389572143556],["▁gjëra",-13.579405784606934],["ள்ளி",-13.579407691955566],["▁Abril",-13.579439163208008],["▁Industria",-13.57944107055664],["▁dny",-13.57944107055664],["ทําไม",-13.579459190368652],["▁parve",-13.57946491241455],["▁Ulko",-13.579473495483398],["▁dîsa",-13.57948398590088],["▁Μπα",-13.579495429992676],["göra",-13.579496383666992],["▁kulay",-13.57950210571289],["▁speaker",-13.57950210571289],["▁غرف",-13.579514503479004],["▁そこで",-13.579520225524902],["▁ditulis",-13.579524993896484],["▁అవి",-13.579533576965332],["לידה",-13.579562187194824],["kém",-13.579564094543455],["▁Christo",-13.579567909240724],["▁magac",-13.579572677612305],["▁Greg",-13.579601287841797],["АДА",-13.579604148864746],["ได้ดี",-13.57961082458496],["▁הצג",-13.57961082458496],["▁auxiliar",-13.579614639282228],["▁mote",-13.57961654663086],["▁ശക്തി",-13.579638481140137],["るか",-13.579656600952148],["အေပၚ",-13.579668998718262],["▁Љу",-13.579692840576172],["▁νόμο",-13.579693794250488],["philosoph",-13.579706192016602],["を開催",-13.579718589782717],["Në",-13.579719543457031],["पार्ट",-13.579750061035156],["▁гармон",-13.579751968383787],["BIN",-13.579753875732422],["▁Gefahr",-13.579761505126951],["NOSTI",-13.57977294921875],["Лю",-13.579777717590332],["调研",-13.579790115356444],["晒",-13.579792976379396],["▁originál",-13.57979965209961],["膝",-13.579803466796877],["καθ",-13.579804420471191],["מכון",-13.579822540283203],["截止",-13.579828262329102],["恐怕",-13.579829216003418],["บ่อย",-13.57984447479248],["冠軍",-13.57984447479248],["kového",-13.579849243164062],["唷",-13.579849243164062],["伦敦",-13.579851150512695],["힐",-13.579852104187012],["វិស័យ",-13.579853057861328],["សកម្មភាព",-13.579854011535645],["▁Kebijakan",-13.579854011535645],["▁wykorzystywan",-13.579854011535645],["▁ЕООД",-13.579854011535645],["▁исключительно",-13.579854011535645],["▁ذخیره",-13.579854011535645],["▁बिहीबार",-13.579854011535645],["▁ઓળખ",-13.579854011535645],["▁കോഴിക്കോട്",-13.579854011535645],["▁ወንጀል",-13.579854011535645],["▁아름다운",-13.579854011535645],["▁프랑스",-13.579854011535645],["▁განვითარება",-13.57985496520996],["សញ្ញា",-13.57985782623291],["▁säännö",-13.57985782623291],["▁በተመለከተ",-13.57985782623291],["▁позволит",-13.579863548278809],["▁മോദി",-13.579867362976074],["▁បន្ទាប់ពី",-13.57986831665039],["資產",-13.579872131347656],["▁sottolinea",-13.579877853393556],["▁Shayari",-13.579879760742188],["▁tapauksessa",-13.579879760742188],["obraz",-13.579891204833984],["▁здоровье",-13.579907417297363],["▁mpanao",-13.57991886138916],["▁centrā",-13.579930305480955],["ਐ",-13.579933166503906],["▁വയസ്സ",-13.579936027526855],["▁منصوبہ",-13.579960823059082],["▁болдог",-13.579967498779297],["▁cruci",-13.57999038696289],["▁Keres",-13.579998970031738],["တိုင်",-13.580010414123535],["▁ואח",-13.580039024353027],["▁रहता",-13.580056190490724],["先进的",-13.580059051513672],["▁কৰা",-13.58006191253662],["▁teksta",-13.580066680908203],["Tek",-13.58006763458252],["gabung",-13.58006763458252],["▁отворен",-13.580093383789062],["दय",-13.58009910583496],["▁luule",-13.58010196685791],["▁mudanças",-13.580103874206545],["▁prodaja",-13.580121040344238],["▁konzum",-13.58012866973877],["▁napraw",-13.580134391784668],["▁못한",-13.580137252807615],["▁Trou",-13.580138206481934],["▁Sense",-13.5801420211792],["▁fylke",-13.580144882202148],["▁wau",-13.58017349243164],["▁65%",-13.580207824707031],["▁medicale",-13.58022117614746],["▁реакции",-13.580240249633787],["振り",-13.580248832702637],["ليا",-13.580280303955078],["▁ກາ",-13.580284118652344],["▁berdua",-13.58028507232666],["▁ஆய்வு",-13.580300331115724],["▁разгляд",-13.580307960510254],["യോട്",-13.580368041992188],["▁Aha",-13.58036994934082],["오는",-13.580379486083984],["▁felhasználói",-13.580384254455566],["ॅन",-13.580405235290527],["永久",-13.580414772033691],["ၿမ",-13.5804443359375],["▁jarayoni",-13.58049488067627],["▁Сауд",-13.580510139465332],["ल्याने",-13.580524444580078],["▁afero",-13.580530166625977],["▁बेहद",-13.580531120300291],["女性の",-13.580533027648926],["十四",-13.580570220947266],["▁Dez",-13.58060359954834],["▁ірі",-13.58061695098877],["▁देशों",-13.580634117126465],["▁spirito",-13.58063507080078],["▁emplea",-13.580642700195312],["ЦІ",-13.580645561218262],["યર",-13.580649375915527],["▁alama",-13.58065128326416],["▁негізі",-13.580656051635742],["▁මේවා",-13.580656051635742],["▁ştiu",-13.58065700531006],["ttelin",-13.580668449401855],["▁ગમ",-13.580700874328612],["සය",-13.580703735351562],["▁raac",-13.58072280883789],["▁جج",-13.580729484558104],["▁παγ",-13.580735206604004],["命运",-13.580755233764648],["▁انتخابی",-13.58076286315918],["▁правим",-13.58081340789795],["װ",-13.580814361572266],["▁serviciile",-13.580818176269531],["monda",-13.58082389831543],["ುವುದನ್ನು",-13.580824851989746],["▁තිය",-13.58084201812744],["පේ",-13.580854415893556],["▁zbira",-13.580860137939451],["▁mezu",-13.580881118774414],["уват",-13.580906867980955],["▁وعن",-13.580914497375488],["▁Babe",-13.58095359802246],["ເດືອນ",-13.580967903137209],["▁thờ",-13.580984115600586],["බර",-13.58100414276123],["шов",-13.581016540527344],["▁scenari",-13.581037521362305],["▁viktige",-13.58104133605957],["▁gamit",-13.58105182647705],["ΣΤ",-13.58106517791748],["▁హిట్",-13.581101417541504],["▁ነጻ",-13.58110237121582],["Ві",-13.581127166748049],["▁spannend",-13.581128120422363],["ганда",-13.581140518188477],["нскиот",-13.581167221069336],["▁Palu",-13.581168174743652],["ක්කු",-13.581171989440918],["може",-13.58117389678955],["▁запуск",-13.581184387207031],["álneho",-13.58123016357422],["țiilor",-13.581232070922852],["aĵon",-13.581235885620115],["▁хвороб",-13.581279754638672],["▁такіх",-13.581287384033203],["▁sedih",-13.581292152404783],["▁ตอนนี้",-13.581320762634276],["ГИЙН",-13.581323623657228],["씨는",-13.58132553100586],["략",-13.581326484680176],["▁Мел",-13.581329345703123],["▁تفسير",-13.581332206726074],["▁lépe",-13.581343650817873],["▁ਲੜ",-13.58135223388672],["ходил",-13.581358909606934],["▁терористич",-13.581363677978516],["ნგ",-13.581388473510742],["と言われ",-13.581388473510742],["နီး",-13.581405639648438],["▁ਦਰਜ",-13.581412315368652],["ERRA",-13.58142375946045],["▁začali",-13.58143424987793],["mænd",-13.58144187927246],["▁ஒப்ப",-13.58144474029541],["дур",-13.581470489501951],["▁layihələr",-13.581480026245115],["ittele",-13.58148193359375],["▁പ്രദേശ",-13.581494331359863],["plikasi",-13.581501007080078],["▁confirmar",-13.581501960754396],["▁išče",-13.581502914428713],["НЗ",-13.581510543823242],["▁намын",-13.58151149749756],["7°",-13.581537246704102],["дөй",-13.581550598144531],["loup",-13.581565856933594],["ળે",-13.581575393676758],["▁لینڈ",-13.5816068649292],["擔",-13.581612586975098],["ılmasını",-13.581622123718262],["ໂລ",-13.581622123718262],["▁inferiore",-13.581628799438477],["行った",-13.581659317016602],["を持っている",-13.581663131713867],["uojamas",-13.5816650390625],["שומר",-13.581672668457031],["▁herbergi",-13.581701278686523],["▁Brat",-13.58170223236084],["fryd",-13.581703186035156],["▁anlamda",-13.58171844482422],["ाउँदा",-13.581719398498535],["▁килограм",-13.581745147705078],["▁şirketler",-13.581750869750977],["ਧਰ",-13.581768035888672],["给我们",-13.581798553466797],["▁biju",-13.58180046081543],["▁струва",-13.58180046081543],["▁жон",-13.581806182861328],["lister",-13.58181858062744],["نور",-13.581819534301758],["▁запрет",-13.581822395324709],["傲",-13.581827163696287],["កូ",-13.581830024719238],["廷",-13.581830978393556],["ブランド",-13.581857681274414],["▁Οικονομ",-13.581862449645996],["お勧め",-13.581864356994627],["ထောက်",-13.581868171691896],["គម្រោង",-13.581869125366213],["ເຊື້ອ",-13.581870079040527],["▁adecuado",-13.581870079040527],["▁caractère",-13.581870079040527],["▁kekuasaan",-13.581870079040527],["▁mükemmel",-13.581870079040527],["▁spettacolo",-13.581870079040527],["▁βοήθεια",-13.581870079040527],["▁фитнес",-13.581870079040527],["▁तथ्य",-13.581870079040527],["▁मायबोली",-13.581870079040527],["▁ਯੂਨੀਵਰਸਿਟੀ",-13.581870079040527],["▁ନିର୍ମାଣ",-13.581870079040527],["▁සෞඛ්",-13.581870079040527],["▁shampoo",-13.581871032714844],["▁હજાર",-13.581871032714844],["▁احکام",-13.581872940063477],["▁zadarmo",-13.58187484741211],["▁игровые",-13.581880569458008],["tər",-13.581884384155272],["▁melkein",-13.581887245178224],["▁الدنيا",-13.581887245178224],["บนใบหน้า",-13.58188819885254],["▁հասարակության",-13.581889152526855],["▁músico",-13.581890106201172],["▁Evropa",-13.581894874572754],["▁အဆိုပါ",-13.581894874572754],["▁الرقم",-13.58189582824707],["▁regji",-13.581904411315918],["ปก",-13.581929206848145],["▁(2003)",-13.58193588256836],["▁дверь",-13.58194637298584],["አዲሱ",-13.581948280334473],["▁आभार",-13.581954002380373],["▁ਡਰ",-13.581960678100586],["▁igazság",-13.581963539123535],["▁vedieť",-13.581965446472168],["▁Дис",-13.581974029541016],["▁segítség",-13.581975936889648],["规定的",-13.581978797912598],["▁fali",-13.581984519958496],["▁cerebral",-13.581992149353027],["uré",-13.581993103027344],["▁jabatan",-13.58199977874756],["גאַ",-13.582012176513672],["▁réel",-13.58204174041748],["389",-13.582077026367188],["▁Origin",-13.582086563110352],["lässig",-13.582094192504885],["▁witamin",-13.582097053527832],["▁ശാസ്ത്ര",-13.582107543945312],["ვილი",-13.582113265991213],["ξεις",-13.582123756408691],["▁berperan",-13.582139015197754],["ザー",-13.582141876220703],["▁саясий",-13.58214282989502],["athiri",-13.582168579101562],["▁evenement",-13.582183837890623],["▁gesond",-13.582188606262209],["ドラマ",-13.582196235656738],["▁nastup",-13.58220386505127],["gall",-13.582205772399902],["tədqiqat",-13.582218170166016],["▁osobu",-13.582221031188965],["德里",-13.582221031188965],["▁незаконно",-13.58222484588623],["▁ଅସ",-13.582232475280762],["▁миллионов",-13.582246780395508],["▁immens",-13.582249641418455],["KOV",-13.582253456115724],["▁អន្តរជាតិ",-13.582253456115724],["forestilling",-13.582262992858888],["▁ನಿಯಮ",-13.582292556762695],["ಮನ",-13.582304954528809],["грев",-13.58230686187744],["▁größte",-13.582318305969238],["▁таван",-13.58232879638672],["▁Gordon",-13.582357406616213],["▁leaders",-13.582382202148438],["увани",-13.5823974609375],["▁Fig",-13.58240032196045],["ಣ್",-13.582411766052246],["▁جە",-13.582427024841309],["گاہ",-13.582429885864258],["▁GER",-13.582444190979004],["▁คา",-13.582452774047852],["▁organizaciji",-13.5824613571167],["▁жайлы",-13.582475662231444],["的心情",-13.582487106323242],["▁qaadan",-13.582501411437988],["hallinto",-13.58250904083252],["▁Генеральн",-13.582518577575684],["▁stutt",-13.58254623413086],["എല്",-13.58256721496582],["leştirme",-13.582568168640137],["ብሪ",-13.582574844360352],["▁convenio",-13.582579612731934],["ઓની",-13.582584381103516],["шэн",-13.58258819580078],["५५",-13.582602500915527],["▁contraire",-13.582605361938477],["▁vim",-13.582605361938477],["▁litet",-13.582622528076172],["▁restos",-13.582642555236816],["▁mesafe",-13.582670211791992],["▁εξο",-13.582687377929688],["ರಾಯ",-13.582708358764648],["▁службов",-13.582717895507812],["وژن",-13.58272647857666],["▁polí",-13.582746505737305],["▁신고",-13.582759857177734],["▁Styl",-13.582782745361328],["alternativ",-13.582789421081545],["మన్",-13.582799911499023],["▁Qay",-13.582825660705566],["▁Tarih",-13.582855224609377],["▁കയറി",-13.58285903930664],["procent",-13.582865715026855],["▁hayoti",-13.58286952972412],["ोद",-13.582883834838867],["rör",-13.582898139953612],["▁fərqli",-13.582904815673828],["▁izstrādā",-13.582921981811523],["▁wurk",-13.582944869995115],["▁smjer",-13.582975387573242],["▁имейл",-13.58298110961914],["▁Anzahl",-13.583012580871582],["cku",-13.583026885986328],["РЫН",-13.583033561706545],["▁sanitar",-13.583047866821287],["ของฉัน",-13.583109855651855],["▁మానవ",-13.583133697509766],["▁בדי",-13.58320426940918],["fila",-13.583223342895508],["schef",-13.583240509033203],["▁Kde",-13.58324909210205],["paja",-13.583258628845217],["▁četrt",-13.583264350891112],["ദൂര",-13.583295822143556],["ləşmə",-13.583335876464844],["füg",-13.583372116088867],["කුල",-13.583393096923828],["▁بنی",-13.583410263061523],["IDO",-13.583415031433104],["33)",-13.583422660827637],["AKE",-13.583436965942385],["χθηκε",-13.583452224731444],["งา",-13.583490371704102],["▁imkanları",-13.583499908447266],["读书",-13.583503723144531],["▁вынік",-13.583518981933594],["▁Individu",-13.58352756500244],["▁ранг",-13.583548545837402],["▁spatiu",-13.583562850952148],["eeda",-13.583576202392578],["▁υγεία",-13.58360767364502],["رنگ",-13.58364963531494],["३४",-13.58366870880127],["rohet",-13.583673477172852],["권을",-13.583681106567385],["əsinin",-13.583700180053713],["▁civitate",-13.583760261535645],["োজ",-13.58376121520996],["јуће",-13.583782196044922],["▁zbav",-13.583784103393556],["▁සිද්ධිය",-13.583785057067873],["▁Odpo",-13.583805084228516],["怨",-13.58382511138916],["▁Hóa",-13.583829879760742],["chú",-13.58383560180664],["वृ",-13.583839416503906],["垂",-13.58384609222412],["俩",-13.583847045898438],["szony",-13.583847999572754],["dhara",-13.5838623046875],["▁රෝහල",-13.583867073059082],["ကၡ",-13.583877563476562],["ିଲା",-13.58388614654541],["姿勢",-13.58388614654541],["fesztivál",-13.583889961242676],["๘",-13.583889961242676],["ເກົາຫຼີ",-13.583889961242676],["▁flexibel",-13.583889961242676],["▁mengakibatkan",-13.583889961242676],["▁najprej",-13.583889961242676],["▁neposredno",-13.583889961242676],["▁skikkelig",-13.583889961242676],["▁szczęście",-13.583889961242676],["▁wrażenie",-13.583889961242676],["▁řidič",-13.583889961242676],["▁ωστόσο",-13.583889961242676],["▁серьезно",-13.583889961242676],["▁ಸಾರ್ವಜನಿಕ",-13.583889961242676],["▁desaparece",-13.583890914916992],["▁donderdag",-13.583890914916992],["▁ushtarak",-13.583890914916992],["▁şüphe",-13.583890914916992],["▁سلسلے",-13.583890914916992],["נקודות",-13.583891868591309],["▁көрсетілетін",-13.583891868591309],["▁aștept",-13.583892822265623],["▁pembangkang",-13.58389377593994],["▁zamiast",-13.583895683288574],["▁संरचना",-13.58389663696289],["联邦",-13.583897590637209],["▁чиглэлээр",-13.583900451660156],["▁اليهود",-13.583903312683104],["▁Polecam",-13.583914756774902],["▁последний",-13.583917617797852],["▁аудио",-13.583919525146484],["▁Frederic",-13.58393096923828],["▁منہ",-13.583935737609863],["lasting",-13.583939552307127],["▁dóna",-13.583939552307127],["▁ವಿವರಗಳು",-13.583941459655762],["▁ritrova",-13.583943367004396],["▁pitanju",-13.583948135375977],["▁Bav",-13.58395004272461],["▁കുറവ",-13.583953857421877],["▁شبیه",-13.583956718444824],["▁утверждении",-13.583958625793455],["▁Spon",-13.58396053314209],["▁tänne",-13.583975791931152],["用の",-13.583984375],["suhde",-13.583986282348633],["▁hubiera",-13.583996772766112],["▁Koos",-13.584003448486328],["▁мовою",-13.584007263183594],["здание",-13.584019660949709],["▁ভারতের",-13.58402156829834],["▁આપવામાં",-13.584022521972656],["▁volumen",-13.584028244018556],["တမ်း",-13.584035873413086],["▁fiican",-13.584036827087402],["昭和",-13.58405303955078],["iệm",-13.584064483642578],["▁miközben",-13.584077835083008],["▁Θέμα",-13.584092140197754],["▁پلا",-13.584115982055664],["▁રસ",-13.584115982055664],["▁lokacij",-13.584135055541992],["▁Đã",-13.584153175354004],["रासस",-13.584157943725586],["▁خار",-13.584157943725586],["▁예술",-13.5841646194458],["▁Leia",-13.584187507629396],["▁поклон",-13.584188461303713],["▁borroka",-13.584190368652344],["สิน",-13.584197998046877],["ходзяць",-13.584200859069824],["▁uniquement",-13.584222793579102],["▁armen",-13.584223747253418],["grid",-13.584228515625],["▁Patria",-13.584229469299316],["▁extreme",-13.58423137664795],["▁मात",-13.584233283996582],["▁زنی",-13.584236145019531],["▁anvendes",-13.584242820739746],["▁मिलने",-13.58425521850586],["▁chien",-13.58426284790039],["ത്വം",-13.584267616271973],["▁avar",-13.584290504455566],["▁tautas",-13.5842924118042],["ioara",-13.584295272827148],["১১",-13.584306716918944],["▁mgr",-13.584314346313477],["▁kvalitātes",-13.584317207336426],["toodete",-13.58431911468506],["ንቅ",-13.584341049194336],["▁sertifika",-13.58435344696045],["▁अवसरमा",-13.584365844726562],["▁függ",-13.584375381469728],["lular",-13.584388732910156],["▁konsekvenser",-13.584389686584473],["ambiance",-13.584392547607422],["▁mecz",-13.584412574768066],["▁ਟੀਮ",-13.584416389465332],["▁shahrida",-13.584424018859863],["▁دير",-13.584424018859863],["▁ప్రధాని",-13.584425926208496],["्यावर",-13.584434509277344],["▁capacita",-13.584444046020508],["▁Градск",-13.58444881439209],["▁пляж",-13.584454536437988],["▁elején",-13.58445644378662],["同時也",-13.584465026855469],["paino",-13.584470748901367],["smart",-13.584485054016112],["▁factors",-13.58449363708496],["を入れ",-13.58452033996582],["▁pü",-13.584555625915527],["gevoer",-13.584574699401855],["▁αθλ",-13.584595680236816],["▁earste",-13.584607124328612],["▁Бес",-13.58462905883789],["NIM",-13.584630012512209],["േജ്",-13.584635734558104],["▁паралел",-13.58466339111328],["▁Länder",-13.584677696228027],["(19",-13.584694862365724],["▁លាន",-13.584696769714355],["▁थीं",-13.584711074829102],["▁АВ",-13.584717750549316],["▁imtahan",-13.58471965789795],["▁спектр",-13.58472728729248],["▁staje",-13.584729194641112],["▁лекарства",-13.584729194641112],["▁watak",-13.584735870361328],["▁aksiya",-13.584761619567873],["чаў",-13.584786415100098],["▁Дана",-13.58479118347168],["mates",-13.584797859191896],["安い",-13.584806442260742],["▁стає",-13.584809303283691],["▁tutvu",-13.584814071655272],["▁సహ",-13.584821701049805],["JAK",-13.584846496582031],["рыць",-13.58485507965088],["▁හිටිය",-13.584860801696776],["▁Lula",-13.584887504577637],["▁موقعیت",-13.584888458251951],["یدو",-13.584917068481444],["ոյան",-13.58493995666504],["▁911",-13.584943771362305],["解析",-13.584946632385254],["▁ווערט",-13.584948539733888],["▁prevención",-13.584968566894531],["คิ",-13.58497142791748],["კლ",-13.584989547729492],["▁nápoj",-13.58499813079834],["▁ցուց",-13.585023880004885],["Специал",-13.58502960205078],["▁tematik",-13.58503532409668],["POL",-13.585041046142578],["▁frit",-13.585042953491213],["ძლევ",-13.585108757019045],["▁Chic",-13.585113525390623],["Dec",-13.585122108459473],["▁लिएको",-13.58513355255127],["▁انتها",-13.585140228271484],["▁ocurre",-13.5851411819458],["▁tepi",-13.585150718688965],["▁masaya",-13.58515167236328],["▁ئەزا",-13.58515739440918],["ுகின்றன",-13.585165977478027],["▁αυτ",-13.585195541381836],["зит",-13.585206031799316],["▁avaliku",-13.585210800170898],["▁notika",-13.585217475891112],["▁periodic",-13.58522129058838],["თის",-13.585271835327148],["▁rivela",-13.58527660369873],["▁benefícios",-13.585278511047363],["▁самата",-13.585296630859377],["ੀਏ",-13.585309982299805],["ilmoitus",-13.585330963134766],["▁გაე",-13.585347175598145],["thú",-13.585368156433104],["cendi",-13.58537769317627],["▁Bahn",-13.58539581298828],["▁estuda",-13.585409164428713],["▁Адже",-13.58542823791504],["▁rasi",-13.585456848144531],["pois",-13.585461616516112],["κληση",-13.585464477539062],["ಭಿ",-13.585464477539062],["ьное",-13.58546543121338],["ācī",-13.58547306060791],["्छु",-13.585474014282228],["κληρ",-13.585530281066896],["路线",-13.58553695678711],["以内",-13.58555507659912],["▁Wissenschaft",-13.585562705993652],["▁자동",-13.585573196411133],["的人生",-13.585582733154297],["kilo",-13.585597038269045],["▁معها",-13.585626602172852],["▁tvarka",-13.585627555847168],["▁आखिर",-13.585640907287598],["▁serbest",-13.585646629333496],["▁услове",-13.585657119750977],["▁buldu",-13.585659980773926],["affiche",-13.585663795471191],["스를",-13.585665702819824],["▁chant",-13.58566951751709],["▁thự",-13.585683822631836],["льнай",-13.585699081420898],["▁אָפּ",-13.585699081420898],["zila",-13.585731506347656],["▁தேவ",-13.585736274719238],["▁بردن",-13.585747718811035],["▁ආණ්ඩු",-13.5857515335083],["रोग",-13.585768699645996],["▁ராம",-13.585768699645996],["kuru",-13.585771560668944],["▁ڀر",-13.585782051086426],["▁valide",-13.58581256866455],["▁uyarı",-13.585833549499512],["▁pět",-13.585844993591309],["כביש",-13.58584976196289],["▁ریلی",-13.585857391357422],["赔偿",-13.585861206054688],["mått",-13.585862159729004],["勿",-13.58586311340332],["纪录",-13.585870742797852],["لڪ",-13.585872650146484],["▁ερε",-13.58588409423828],["委託",-13.585892677307127],["värdering",-13.585894584655762],["応募",-13.58590602874756],["削除",-13.58591079711914],["oldvideochat",-13.585914611816406],["ແກ້ໄຂ",-13.585914611816406],["▁működik",-13.585914611816406],["▁piękny",-13.585914611816406],["▁privacitat",-13.585914611816406],["▁Ελλήνων",-13.585914611816406],["▁κάνετε",-13.585914611816406],["▁συνάντηση",-13.585914611816406],["▁идентитет",-13.585914611816406],["▁персонаж",-13.585914611816406],["▁ситуацию",-13.585914611816406],["▁Өнөөдөр",-13.585914611816406],["▁آگهی",-13.585914611816406],["▁ਵਿਕੀਪੀਡੀਆ",-13.585914611816406],["▁สิงหาคม",-13.585914611816406],["▁წარმოადგენს",-13.585914611816406],["👉",-13.585914611816406],["▁حيدرآباد",-13.585915565490724],["▁விடுதலை",-13.585915565490724],["헬",-13.585915565490724],["▁изделия",-13.58591651916504],["▁بېرى",-13.58591651916504],["▁Beiträge",-13.585917472839355],["▁działalność",-13.585918426513672],["▁წლიდან",-13.585919380187988],["▁obyvatel",-13.585920333862305],["▁vadītāja",-13.585923194885254],["異なる",-13.585923194885254],["▁Favorit",-13.585925102233888],["▁ciudadanos",-13.585927963256836],["テスト",-13.585933685302734],["▁uçuş",-13.58593463897705],["▁gerekmektedir",-13.585935592651367],["▁നാളെ",-13.585935592651367],["▁keeping",-13.585938453674316],["災害",-13.585943222045898],["▁otherwise",-13.585946083068848],["นักท่องเที่ยว",-13.585947036743164],["▁ДТП",-13.585947036743164],["▁бағыт",-13.585947036743164],["▁नाटक",-13.585951805114746],["julkaisu",-13.585952758789062],["▁Umsetzung",-13.585954666137695],["▁tindrà",-13.585956573486328],["▁షాక్",-13.58596420288086],["galing",-13.585966110229492],["VOD",-13.585968017578123],["▁bērna",-13.585982322692873],["▁merkittävä",-13.58598804473877],["▁کارخانه",-13.58598804473877],["▁Андрей",-13.58599853515625],["▁ауру",-13.586000442504885],["▁Gradu",-13.586023330688477],["▁Gottes",-13.586030006408691],["▁عضویت",-13.586034774780272],["івка",-13.58603858947754],["▁ghar",-13.586041450500488],["▁تستخدم",-13.586050033569336],["пэўн",-13.586058616638184],["▁terjadinya",-13.586060523986816],["▁وهل",-13.586089134216309],["▁elaboración",-13.586090087890623],["▁توکي",-13.586109161376951],["ාර්",-13.586126327514648],["▁čiji",-13.586135864257812],["golf",-13.586141586303713],["▁пуска",-13.586146354675291],["▁8:00",-13.586155891418455],["▁kasutades",-13.586159706115724],["▁mövqe",-13.586159706115724],["▁다음과",-13.586161613464355],["▁melynek",-13.586164474487305],["แผล",-13.586189270019531],["▁moglo",-13.586189270019531],["▁العدو",-13.586201667785645],["359",-13.586204528808594],["▁категорија",-13.58620834350586],["гель",-13.58623218536377],["▁Castle",-13.58623218536377],["▁منظمة",-13.586258888244627],["的选择",-13.58626937866211],["▁duše",-13.58627223968506],["▁მარი",-13.586281776428224],["జ్ఞ",-13.586297035217283],["ftë",-13.586297988891602],["▁იმისა",-13.586301803588867],["केट",-13.58631992340088],["▁ustvarjal",-13.586326599121094],["vīr",-13.586359024047852],["▁vírus",-13.586363792419434],["▁programok",-13.586369514465332],["▁giran",-13.58639907836914],["▁erster",-13.586402893066406],["பிர",-13.586407661437988],["NEL",-13.586445808410645],["APE",-13.586462020874023],["花了",-13.586485862731934],["водство",-13.5864896774292],["▁bowl",-13.586493492126465],["▁hemel",-13.58650016784668],["▁λόγος",-13.586503028869627],["평가",-13.586505889892578],["▁Лист",-13.586546897888184],["▁vika",-13.586559295654297],["ويه",-13.58656120300293],["Etat",-13.586575508117676],["▁threat",-13.58658504486084],["billede",-13.586607933044434],["▁sønn",-13.586637496948242],["▁relacionada",-13.586639404296877],["tajā",-13.58665657043457],["filter",-13.586657524108888],["ావ",-13.586664199829102],["พบกับ",-13.58668041229248],["▁иной",-13.586684226989746],["▁wykaz",-13.586687088012695],["čene",-13.58670139312744],["▁Hó",-13.586710929870604],["▁ಕಂ",-13.586716651916504],["უმი",-13.586724281311035],["▁antaranya",-13.58673095703125],["עצב",-13.586742401123049],["▁asistir",-13.586742401123049],["▁stanowią",-13.586767196655272],["회장",-13.586783409118652],["事前に",-13.586786270141602],["▁لمح",-13.586852073669434],["▁снага",-13.586885452270508],["vádí",-13.586886405944824],["ብኝ",-13.586894035339355],["▁Hanne",-13.586906433105469],["▁pontosan",-13.586913108825684],["▁kaliya",-13.5869140625],["gagawa",-13.586921691894531],["▁மிக்க",-13.586931228637695],["▁koša",-13.58693504333496],["の記事",-13.58693790435791],["THE",-13.586949348449709],["ánál",-13.586960792541504],["▁아버지",-13.586983680725098],["全年",-13.586987495422363],["▁новы",-13.586996078491213],["▁nilang",-13.58700180053711],["▁puto",-13.587002754211426],["▁баща",-13.587061882019045],["▁saper",-13.58708667755127],["ఉ",-13.58709716796875],["▁ასეთ",-13.587122917175291],["▁continuer",-13.587154388427734],["lør",-13.587159156799316],["▁لقاء",-13.58716106414795],["420",-13.587185859680176],["大眾",-13.587186813354492],["▁snor",-13.587193489074709],["તમ",-13.587196350097656],["▁görevi",-13.58722972869873],["дерін",-13.587235450744627],["▁ميان",-13.587240219116213],["▁suppli",-13.587247848510742],["▁volve",-13.587254524230955],["▁இந்தப்",-13.587274551391602],["▁daļā",-13.587292671203612],["sorozat",-13.587302207946776],["▁dudas",-13.58731460571289],["▁trolig",-13.587320327758787],["arena",-13.587324142456056],["სვლის",-13.587346076965332],["▁descanso",-13.58734893798828],["ODI",-13.587384223937988],["▁hechos",-13.587409019470217],["põlv",-13.587438583374023],["လား။",-13.587441444396973],["▁huyu",-13.587450981140137],["が見",-13.5874605178833],["▁Сем",-13.587484359741213],["▁izah",-13.587504386901855],["▁شخصي",-13.58750820159912],["▁VU",-13.587556838989258],["ស្ដ",-13.587575912475586],["baca",-13.587589263916016],["рээ",-13.58762550354004],["▁nikako",-13.587648391723633],["врз",-13.587666511535645],["ভে",-13.587677001953123],["▁[8]",-13.58767795562744],["還在",-13.58767795562744],["ሳል",-13.587690353393556],["ខេ",-13.587697982788086],["▁بارلىق",-13.587698936462402],["▁diote",-13.587702751159668],["8.000",-13.5877103805542],["ارية",-13.587713241577148],["იკოს",-13.587729454040527],["ништво",-13.587742805480955],["▁неком",-13.587743759155272],["յանին",-13.587756156921388],["ιακές",-13.587793350219728],["▁Ländern",-13.58780002593994],["来る",-13.587803840637209],["್ತಿ",-13.587820053100586],["wyth",-13.587830543518066],["айт",-13.587862968444824],["▁Einar",-13.587876319885254],["linio",-13.587882995605469],["milit",-13.587883949279783],["▁মাস",-13.587888717651367],["cılığı",-13.587889671325684],["熬",-13.587889671325684],["жную",-13.58790397644043],["淫",-13.58790397644043],["裔",-13.58791732788086],["▁Þor",-13.587918281555176],["▁aurten",-13.58792209625244],["▁סק",-13.587930679321287],["聯繫",-13.587930679321287],["▁bryllup",-13.587943077087402],["▁dispozícii",-13.587943077087402],["▁sostegno",-13.587943077087402],["▁συζήτηση",-13.587943077087402],["▁нийгэм",-13.587943077087402],["▁טראמפ",-13.587943077087402],["▁डिजिटल",-13.587943077087402],["▁ক্রিকেট",-13.587943077087402],["▁ଏପରି",-13.587943077087402],["▁겨울",-13.587943077087402],["▁шэраг",-13.58794403076172],["▁gceist",-13.587945938110352],["▁учесници",-13.587946891784668],["▁tertarik",-13.587947845458984],["▁yaxud",-13.587947845458984],["▁хэрэгсэл",-13.587947845458984],["▁Польщі",-13.5879487991333],["▁सैनिक",-13.5879487991333],["▁miljonit",-13.587949752807615],["бэл",-13.587956428527832],["▁vadovas",-13.587956428527832],["▁ಬೈಕ್",-13.58795928955078],["utfordring",-13.587963104248049],["▁funcionamiento",-13.58796501159668],["▁الأوسط",-13.587965965270996],["▁რაიონ",-13.58798599243164],["אָס",-13.58798885345459],["▁Дата",-13.587989807128906],["▁Douglas",-13.587990760803224],["▁הני",-13.587990760803224],["▁ærlig",-13.587997436523438],["▁stroke",-13.58799934387207],["기사",-13.588003158569336],["▁ascolta",-13.588005065917969],["肚子",-13.58800983428955],["ህዝቡ",-13.588011741638184],["▁Əmək",-13.588013648986816],["▁actuellement",-13.588019371032717],["▁Vorstellung",-13.588022232055664],["▁فرش",-13.588022232055664],["▁गुरुवार",-13.588027954101562],["Ef",-13.588030815124512],["▁zevk",-13.588035583496094],["▁historisk",-13.588038444519045],["▁кнопку",-13.588038444519045],["▁በዓለም",-13.58804416656494],["▁відмови",-13.588055610656738],["▁الرابط",-13.588064193725586],["▁dodatne",-13.58806610107422],["▁बाकी",-13.588072776794434],["为大家",-13.58808135986328],["▁යවරයා",-13.588092803955078],["▁खाते",-13.588129043579102],["▁বিশ্বের",-13.58813190460205],["▁ജല",-13.588133811950684],["▁составе",-13.588134765625],["▁تساعد",-13.58813762664795],["▁ಜನರು",-13.58813762664795],["▁cruza",-13.58815574645996],["▁úpln",-13.588160514831545],["▁høyere",-13.58816623687744],["▁jeugd",-13.58816909790039],["ЭД",-13.588171005249023],["ుకున్నా",-13.588184356689451],["指南",-13.588196754455566],["▁ministru",-13.588229179382324],["十几",-13.588232040405272],["ਫਰ",-13.58823299407959],["▁तास",-13.588234901428224],["יוצר",-13.588244438171388],["▁мислим",-13.588255882263184],["▁kuuma",-13.588260650634766],["▁daarbij",-13.588261604309082],["▁rena",-13.588261604309082],["▁инвестор",-13.588264465332031],["hassa",-13.588279724121094],["▁membu",-13.588297843933104],["4.1",-13.588309288024902],["▁hittade",-13.58831024169922],["▁سرم",-13.588311195373535],["▁kekal",-13.588332176208496],["▁הבר",-13.588335990905762],["시켜",-13.588338851928713],["▁Arian",-13.588359832763672],["▁salın",-13.588385581970217],["▁კითხვა",-13.588406562805176],["▁କିଏ",-13.588412284851074],["வார்",-13.588433265686035],["▁soluciones",-13.588470458984377],["▁متعلقہ",-13.588470458984377],["▁beraz",-13.58847999572754],["▁خوندي",-13.588494300842283],["▁મેળવ",-13.588499069213867],["▁Nû",-13.588512420654297],["▁zaposli",-13.588513374328612],["▁ບໍລິສັດ",-13.588516235351562],["▁politiche",-13.58852481842041],["právne",-13.588533401489258],["▁пиша",-13.588557243347168],["fail",-13.588590621948242],["▁njegovih",-13.588604927062988],["▁насны",-13.588614463806152],["▁ตําบล",-13.588624954223633],["都被",-13.58863639831543],["ინის",-13.588637351989746],["kjer",-13.588645935058594],["に関しては",-13.588650703430176],["▁ทีม",-13.58865451812744],["▁совместно",-13.588666915893556],["vaba",-13.58868408203125],["▁غوره",-13.588696479797363],["▁Irish",-13.588698387145996],["тельную",-13.588706970214844],["▁Schaden",-13.58870792388916],["وارث",-13.588736534118652],["ക്കാല",-13.5887451171875],["▁šādu",-13.588753700256348],["▁пород",-13.588760375976562],["▁نشاط",-13.588780403137209],["אנו",-13.588781356811523],["łączyć",-13.588788032531738],["▁Анда",-13.588796615600586],["▁디지털",-13.588796615600586],["▁Јан",-13.58880615234375],["▁conduit",-13.588824272155762],["systemer",-13.588827133178713],["၂၅",-13.58883571624756],["यस",-13.58884048461914],["▁жерлер",-13.588844299316406],["▁bendruomenė",-13.58885383605957],["▁กลาก",-13.58885669708252],["QUA",-13.588891983032228],["ទំន",-13.588915824890137],["▁אליה",-13.588934898376465],["▁төмен",-13.588980674743652],["变成了",-13.588983535766602],["Հայ",-13.589000701904297],["▁සුභ",-13.58901309967041],["▁ātru",-13.58902359008789],["נטר",-13.589030265808104],["▁lojas",-13.589052200317385],["јава",-13.58906364440918],["나라",-13.589072227478027],["太太",-13.589082717895508],["رانی",-13.589097023010254],["سحب",-13.589098930358888],["▁kohën",-13.589111328125],["യോഗം",-13.589119911193848],["▁moshë",-13.58912181854248],["▁krahas",-13.589123725891112],["▁କରୁ",-13.589128494262695],["▁helping",-13.58913230895996],["▁Lena",-13.589151382446287],["現在の",-13.589156150817873],["的社会",-13.58916187286377],["▁Kell",-13.589168548583984],["وران",-13.589181900024414],["▁выкарыстоўва",-13.589181900024414],["ยง",-13.589211463928224],["▁verificare",-13.58921241760254],["ស្លាប់",-13.589221954345703],["留在",-13.589228630065918],["▁Sheria",-13.589231491088867],["জার",-13.5892333984375],["▁لوڈ",-13.589241981506348],["▁центре",-13.589245796203612],["सिंह",-13.58926486968994],["▁angst",-13.589282989501951],["ನಲ್",-13.589292526245115],["១៩",-13.5892972946167],["▁artış",-13.58932876586914],["▁liberté",-13.589353561401367],["▁удач",-13.589353561401367],["审核",-13.589372634887695],["无数",-13.58937644958496],["▁לצד",-13.589395523071287],["ВД",-13.589412689208984],["arrive",-13.589454650878906],["▁යාල",-13.589458465576172],["116",-13.589476585388184],["ستى",-13.589484214782717],["færi",-13.589515686035156],["▁विक",-13.589534759521484],["লৈ",-13.589544296264648],["▁lise",-13.589548110961914],["▁ছা",-13.589567184448242],["മാരുടെ",-13.589569091796877],["▁atteint",-13.589574813842772],["▁सोन",-13.58957862854004],["nēm",-13.589607238769531],["▁asyl",-13.589662551879885],["▁sistemului",-13.58967399597168],["ნაც",-13.58969783782959],["▁Wine",-13.589720726013184],["贸",-13.589726448059082],["ுமா",-13.589766502380373],["▁nazionali",-13.58977508544922],["▁چوب",-13.5897855758667],["cation",-13.589797973632812],["ที่ต้อง",-13.589800834655762],["ဗီ",-13.589801788330078],["▁pendent",-13.589801788330078],["อนุญาต",-13.589828491210938],["持って",-13.589863777160645],["▁tarkoitus",-13.589866638183594],["▁pobud",-13.589879035949709],["▁koordinat",-13.589879989624023],["ведени",-13.589888572692873],["▁rung",-13.589906692504885],["▁glücklich",-13.589911460876465],["补贴",-13.589921951293944],["鋪",-13.589938163757324],["しばらく",-13.589962005615234],["自治区",-13.589964866638184],["▁نهائي",-13.589967727661133],["▁galdu",-13.58996868133545],["លទ្ធផល",-13.589974403381348],["▁விசாரணை",-13.589974403381348],["рэдагаваць",-13.589975357055664],["ಢ",-13.589975357055664],["ປະມານ",-13.589975357055664],["▁anksčiau",-13.589975357055664],["▁dëshiron",-13.589975357055664],["▁helsinki",-13.589975357055664],["▁երաժշտ",-13.589975357055664],["▁जवाफ",-13.589975357055664],["▁पोखरेल",-13.589975357055664],["▁তাহলে",-13.589975357055664],["▁અમેરિકા",-13.589975357055664],["▁ಮಾರಾಟ",-13.58997631072998],["▁Yardımcısı",-13.589977264404297],["▁ఇంట్లో",-13.589977264404297],["מונים",-13.589978218078612],["▁410",-13.589978218078612],["▁Pengerusi",-13.58997917175293],["▁dikenakan",-13.58997917175293],["▁сердце",-13.58997917175293],["تفصيل",-13.589981079101562],["ասն",-13.58998203277588],["▁сюда",-13.589984893798828],["▁မြန်မာနိုင်ငံ",-13.589985847473145],["▁Même",-13.589988708496094],["▁үзэж",-13.58998966217041],["▁Gaba",-13.589995384216309],["▁istəyirəm",-13.589996337890623],["▁Felipe",-13.590011596679688],["▁مشرانو",-13.590011596679688],["▁республикалық",-13.59001636505127],["▁Seksyen",-13.59001922607422],["▁seyahat",-13.5900239944458],["▁લઈને",-13.590038299560549],["▁공격",-13.590038299560549],["річний",-13.590043067932127],["▁дэмжих",-13.590058326721191],["▁اللقاء",-13.590058326721191],["▁journey",-13.590059280395508],["▁فائدہ",-13.590065002441406],["▁prif",-13.590070724487305],["▁Langkah",-13.590072631835938],["▁iskustva",-13.590073585510254],["▁legatura",-13.590094566345217],["▁Апошнія",-13.590103149414062],["▁pyn",-13.590120315551758],["صوم",-13.590127944946287],["▁pruebas",-13.590134620666504],["就把",-13.590137481689451],["通用",-13.59013843536377],["▁piha",-13.590140342712402],["▁readers",-13.590155601501465],["లేని",-13.590163230895996],["▁Awal",-13.59018325805664],["▁Bele",-13.59018325805664],["ର୍ଡ",-13.590187072753906],["▁BJP",-13.590189933776855],["ciano",-13.590191841125488],["海军",-13.590191841125488],["IEL",-13.590201377868652],["▁lühi",-13.59021282196045],["▁بگیرد",-13.59022045135498],["▁видат",-13.590231895446776],["▁χρόνου",-13.590237617492676],["▁العلوم",-13.590253829956056],["上がり",-13.590256690979004],["活性",-13.590272903442385],["▁Dominic",-13.59027862548828],["σίες",-13.59028434753418],["▁raspo",-13.590295791625977],["endesha",-13.590312957763672],["▁aplicação",-13.590316772460938],["▁истраж",-13.59031867980957],["сіне",-13.590339660644531],["▁دلته",-13.59035873413086],["ገዳ",-13.590364456176758],["ρει",-13.590372085571287],["▁тканин",-13.590375900268556],["▁Puna",-13.590388298034668],["័រ",-13.590394020080566],["▁રહેવા",-13.59041976928711],["▁бичих",-13.590421676635742],["▁επιτ",-13.590432167053224],["લર",-13.590439796447754],["titul",-13.59044075012207],["օրյա",-13.590449333190918],["ໂດ",-13.590459823608398],["▁masini",-13.590459823608398],["άζουμε",-13.590484619140623],["용품",-13.59050178527832],["элж",-13.59050464630127],["ריכט",-13.590524673461914],["さま",-13.590536117553713],["▁dorit",-13.590537071228027],["▁Анализ",-13.590547561645508],["레스",-13.59054946899414],["ૂં",-13.590559005737305],["anareo",-13.590566635131836],["ējuma",-13.590578079223633],["▁ពិភពលោក",-13.590632438659668],["▁ด้าน",-13.590645790100098],["故意",-13.590669631958008],["ხმა",-13.59067440032959],["▁cruz",-13.590700149536133],["▁задума",-13.590707778930664],["▁चिन",-13.59071445465088],["▁anëtarë",-13.59072208404541],["網址",-13.590726852416992],["▁válaszol",-13.590744972229004],["▁agricultura",-13.590749740600586],["▁පිය",-13.590754508972168],["体重",-13.590760231018066],["åringen",-13.590764999389648],["▁основания",-13.590785026550291],["▁Presenta",-13.590794563293455],["電力",-13.590798377990724],["കഥ",-13.590805053710938],["▁stroje",-13.590811729431152],["ଦୀ",-13.590821266174316],["ൻസ്",-13.590821266174316],["▁elvesz",-13.590834617614746],["ամա",-13.59083652496338],["▁charla",-13.590865135192873],["▁Hafta",-13.590866088867188],["аўся",-13.590899467468262],["ลัย",-13.590904235839844],["ความคิด",-13.590912818908691],["▁ஆண்ட",-13.590928077697754],["▁Mỗ",-13.590929985046388],["▁اتصال",-13.590929985046388],["pansin",-13.590933799743652],["-52",-13.590941429138184],["iensis",-13.590947151184082],["▁qaç",-13.59095287322998],["最高的",-13.590970039367676],["▁խնդիր",-13.59097671508789],["▁فلسطین",-13.590991020202637],["▁Zde",-13.590994834899902],["▁سختی",-13.591009140014648],["ύρι",-13.591012954711914],["▁Naat",-13.591012954711914],["▁Ερ",-13.59101676940918],["есть",-13.59105110168457],["▁oyunlar",-13.591058731079102],["▁ხუთ",-13.59106159210205],["పించ",-13.591070175170898],["▁මිස",-13.591072082519531],["ନରେ",-13.591073989868164],["▁lobby",-13.591083526611328],["▁Fler",-13.591096878051758],["ದ್ರೆ",-13.591113090515137],["▁senast",-13.591115951538086],["▁asub",-13.591154098510742],["▁wpływa",-13.591156005859377],["Αν",-13.591174125671388],["的政策",-13.591217041015623],["▁Špan",-13.591218948364258],["ёў",-13.591235160827637],["▁одржава",-13.59123992919922],["▁Беларускі",-13.591243743896484],["zib",-13.59125518798828],["תחומי",-13.591262817382812],["▁letters",-13.591263771057127],["出てくる",-13.591266632080078],["▁башкы",-13.591275215148926],["▁serra",-13.591279029846191],["Ֆ",-13.59128475189209],["ເຄື່ອງ",-13.591297149658203],["গি",-13.591312408447266],["갔다",-13.591316223144531],["ализира",-13.591346740722656],["▁osebno",-13.591349601745604],["▁رايونى",-13.591379165649414],["۶۰",-13.591388702392578],["▁pehme",-13.591397285461426],["▁berichten",-13.59140682220459],["▁Твор",-13.591410636901855],["▁Aasta",-13.591432571411133],["▁forholdet",-13.591442108154297],["▁грешки",-13.591460227966309],["يتي",-13.591463088989258],["▁فارغ",-13.591464042663574],["35)",-13.591465950012209],["▁zastosowan",-13.591483116149902],["▁봄",-13.59148406982422],["▁pratiques",-13.5914945602417],["SX",-13.591501235961914],["▁službeni",-13.591540336608888],["χους",-13.591541290283203],["▁sabem",-13.591557502746582],["radiť",-13.59156322479248],["▁asemel",-13.591578483581545],["▁постига",-13.59158420562744],["izando",-13.591588973999023],["စိတ်",-13.59162712097168],["מבר",-13.591641426086426],["▁младеж",-13.591667175292969],["▁ketu",-13.591670989990234],["传统的",-13.591675758361816],["учени",-13.591678619384766],["తార",-13.591682434082031],["▁kasal",-13.591692924499512],["▁professores",-13.591697692871094],["χρονο",-13.591719627380373],["AMO",-13.591753005981444],["▁הגב",-13.591754913330078],["മാക്കി",-13.591793060302734],["▁Baro",-13.591793060302734],["isissa",-13.591803550720217],["検討",-13.591824531555176],["▁roga",-13.59183120727539],["▁ranar",-13.59184455871582],["sinë",-13.591853141784668],["▁kjøp",-13.591854095458984],["▁roller",-13.591869354248049],["▁අනි",-13.591872215270996],["nț",-13.591873168945312],["▁temes",-13.591883659362791],["▁полза",-13.591913223266602],["rentur",-13.591949462890623],["▁nazwa",-13.591958999633787],["▁coneix",-13.591973304748535],["▁nachází",-13.591975212097168],["依旧",-13.591975212097168],["IDATO",-13.591979026794434],["2.3",-13.591995239257812],["▁çaba",-13.591995239257812],["▁Mədəniyyət",-13.592012405395508],["▁Srednjobosansk",-13.592012405395508],["▁TENTANG",-13.592012405395508],["▁alapvető",-13.592012405395508],["▁kelebihan",-13.592012405395508],["▁llibertat",-13.592012405395508],["▁področja",-13.592012405395508],["▁zahraničí",-13.592012405395508],["▁întâmpla",-13.592012405395508],["▁выпадку",-13.592012405395508],["▁თითქოს",-13.592012405395508],["ΙΚΗ",-13.592013359069824],["▁સામગ્રી",-13.592013359069824],["▁ғой",-13.592016220092772],["▁იყვნენ",-13.592016220092772],["▁సూర్య",-13.59201717376709],["▁ungefär",-13.592028617858888],["▁kommunikasjon",-13.59203052520752],["央行",-13.592031478881836],["▁Claro",-13.59203815460205],["获得了",-13.59203815460205],["▁Merhaba",-13.592039108276367],["▁അറിഞ്ഞ",-13.592063903808594],["▁trữ",-13.592066764831545],["▁тегін",-13.592066764831545],["bahar",-13.592080116271973],["▁বিমান",-13.592080116271973],["▁rendelkezik",-13.592092514038086],["▁Dienstag",-13.592093467712402],["▁इम",-13.592100143432615],["▁приміщення",-13.592105865478516],["ることは",-13.592107772827148],["▁υπουργός",-13.59211254119873],["سابق",-13.592113494873049],["▁ajuste",-13.592121124267578],["▁richting",-13.592126846313477],["▁Fáilte",-13.592133522033691],["ԿԱ",-13.592144966125488],["▁לעולם",-13.592153549194336],["▁pachet",-13.592164039611816],["▁مصنف",-13.59217929840088],["▁solchen",-13.592185974121094],["▁lavoratori",-13.59218692779541],["jika",-13.592190742492676],["▁bosque",-13.592254638671877],["▁വ്യക്തമാക്കി",-13.592278480529783],["▁ورکړي",-13.592288970947266],["▁gampang",-13.592304229736328],["▁Viện",-13.592309951782228],["▁öncesi",-13.592309951782228],["▁recollida",-13.592315673828123],["▁stoti",-13.592344284057615],["页面",-13.592357635498049],["下滑",-13.59235954284668],["▁النوع",-13.592368125915527],["wirken",-13.59238338470459],["စမ္း",-13.592389106750488],["▁यसलाई",-13.592411041259766],["▁ട്",-13.592415809631348],["▁ሌላው",-13.59241771697998],["paha",-13.592494010925291],["她在",-13.59250259399414],["inėmis",-13.592520713806152],["▁scrap",-13.592521667480469],["▁ដី",-13.592523574829102],["▁unikal",-13.592531204223633],["▁دختران",-13.592535018920898],["tyvät",-13.59253978729248],["гана",-13.59255027770996],["▁пътува",-13.592552185058594],["▁Plastik",-13.592565536499023],["▁ordenador",-13.592567443847656],["bule",-13.592570304870604],["ÀI",-13.592573165893556],["▁DOC",-13.592578887939451],["▁hvala",-13.592591285705566],["▁uygula",-13.592632293701172],["Fit",-13.592673301696776],["▁Duitse",-13.592676162719728],["▁pinna",-13.592679023742676],["▁خواندن",-13.59268856048584],["▁वाटत",-13.592695236206056],["▁ពីរ",-13.592700004577637],["bago",-13.592721939086914],["▁sínu",-13.59272575378418],["▁piedāvāt",-13.592734336853027],["ificare",-13.592791557312012],["זית",-13.592836380004885],["ckich",-13.5928373336792],["不幸",-13.59287929534912],["ナル",-13.592888832092283],["▁fucking",-13.592897415161133],["▁brood",-13.592902183532717],["შვე",-13.59291648864746],["гови",-13.592930793762209],["จักร",-13.592930793762209],["▁zonë",-13.592949867248535],["▁operación",-13.592966079711914],["四大",-13.592966079711914],["▁ondorioz",-13.592981338500977],["ქალ",-13.592992782592772],["▁njene",-13.59299373626709],["▁activitat",-13.59299659729004],["▁direção",-13.59301471710205],["▁cantante",-13.593029022216797],["▁messaggio",-13.593037605285645],["▁aŭto",-13.59305191040039],["▁principes",-13.593053817749023],["▁목사",-13.593070030212402],["▁വെബ്",-13.5930757522583],["まった",-13.59308624267578],["▁wichtiger",-13.593087196350098],["▁តម្លៃ",-13.593117713928224],["▁उसको",-13.593127250671388],["▁skönt",-13.593132972717283],["▁vampir",-13.593140602111816],["কন",-13.593186378479004],["▁económicos",-13.593192100524902],["ufficio",-13.593202590942385],["▁komandas",-13.593202590942385],["疾",-13.59321117401123],["▁әкімі",-13.593228340148926],["▁Hafiz",-13.593244552612305],["said",-13.593247413635254],["▁специално",-13.593247413635254],["bici",-13.59327220916748],["▁terminat",-13.593274116516112],["ランド",-13.593276023864746],["▁Више",-13.593297004699709],["лака",-13.593299865722656],["ፍር",-13.593304634094238],["▁задължително",-13.5933256149292],["ឆ្លង",-13.593337059020996],["▁обуча",-13.59334659576416],["▁thải",-13.59334945678711],["▁vozač",-13.593379020690918],["▁duena",-13.593402862548828],["▁نخ",-13.593413352966309],["ಫೆ",-13.59341526031494],["▁Sano",-13.593417167663574],["ร้านอาหาร",-13.593422889709473],["▁Prva",-13.593424797058104],["▁کرلی",-13.593430519104004],["▁hộp",-13.593441009521484],["▁เผย",-13.593453407287598],["▁кніга",-13.593462944030762],["▁slúži",-13.593469619750977],["ქვი",-13.59349250793457],["sekolah",-13.593497276306152],["영화",-13.593504905700684],["ствовал",-13.593524932861328],["ंगल",-13.593534469604492],["▁koncepci",-13.59353733062744],["▁sköt",-13.59354305267334],["szög",-13.593545913696287],["strut",-13.593561172485352],["▁జిల్లాలో",-13.593562126159668],["▁drogas",-13.593573570251465],["▁filer",-13.59357452392578],["▁waqti",-13.593575477600098],["▁insegna",-13.593576431274414],["▁brazil",-13.593585014343262],["▁dépend",-13.593585968017578],["▁કી",-13.593586921691896],["▁1896",-13.59361171722412],["tanje",-13.59361743927002],["▁فكر",-13.593631744384766],["▁низьк",-13.593647956848145],["▁lehetőséget",-13.593652725219728],["▁häm",-13.593671798706056],["يىللىق",-13.593690872192385],["▁sudaro",-13.593705177307127],["▁Emin",-13.593710899353027],["dô",-13.593732833862305],["▁декор",-13.593738555908203],["۹۰",-13.59374713897705],["quista",-13.593751907348633],["▁ਸੁਖ",-13.593772888183594],["CUR",-13.59377384185791],["▁Tanga",-13.593779563903809],["▁výroby",-13.59378433227539],["ង្",-13.593790054321287],["▁επιθυμ",-13.593822479248049],["মূলক",-13.593828201293944],["▁gerði",-13.59384822845459],["▁පත",-13.593876838684082],["stane",-13.593886375427246],["虽",-13.593901634216309],["▁girme",-13.59390926361084],["一句话",-13.593924522399902],["IVO",-13.593942642211914],["▁لج",-13.593948364257812],["▁undgå",-13.593958854675291],["拨",-13.593984603881836],["▁मज",-13.593992233276367],["障碍",-13.593992233276367],["ลืม",-13.594000816345217],["ച്ഛ",-13.594005584716797],["てください",-13.59401512145996],["▁transformar",-13.59401798248291],["閒",-13.59402084350586],["託",-13.59403133392334],["劍",-13.594034194946287],["筒",-13.594038009643556],["kräfte",-13.594042778015137],["▁neked",-13.59404468536377],["施術",-13.59404468536377],["▁희망",-13.5940523147583],["תחבורה",-13.594053268432615],["ోత్సవ",-13.594053268432615],["ോടൊപ്പം",-13.594053268432615],["▁kejayaan",-13.594053268432615],["▁maintenance",-13.594053268432615],["▁nabídky",-13.594053268432615],["▁θάνατο",-13.594053268432615],["▁Атырау",-13.594053268432615],["▁संकलन",-13.594053268432615],["▁ಡ್ರೈವ್",-13.594053268432615],["▁അപേക്ഷ",-13.594053268432615],["▁هیئت",-13.59405517578125],["▁खरेदी",-13.594060897827148],["▁Gizbot",-13.594066619873049],["▁konsisten",-13.594066619873049],["▁srdce",-13.594069480895996],["▁പള്ളി",-13.594071388244627],["▁češ",-13.594072341918944],["▁കേരളത്തിലെ",-13.594077110290527],["▁ಜೈ",-13.59408187866211],["▁កុំ",-13.594083786010742],["ାଙ୍କ",-13.59408473968506],["▁انھیں",-13.594087600708008],["▁Mahakama",-13.59408950805664],["▁Ներ",-13.59409236907959],["▁kasutatakse",-13.594096183776855],["▁хотод",-13.594100952148438],["▁هشت",-13.594100952148438],["মেইল",-13.594104766845703],["▁exchange",-13.594117164611816],["▁चक्र",-13.594134330749512],["▁горя",-13.594135284423828],["的女人",-13.594146728515623],["▁tanàna",-13.594149589538574],["schappen",-13.594151496887209],["▁азарт",-13.594151496887209],["▁Annu",-13.594155311584473],["▁уга",-13.59418487548828],["▁bijzondere",-13.59420108795166],["▁હતાં",-13.594216346740724],["▁megjelent",-13.594218254089355],["▁הגוף",-13.594225883483888],["ፊት",-13.594232559204102],["▁نویسی",-13.594234466552734],["Build",-13.594249725341797],["▁qilindi",-13.59425163269043],["▁probabilmente",-13.59425449371338],["▁lucro",-13.594257354736328],["öjlig",-13.594258308410645],["▁félagsins",-13.594265937805176],["leiter",-13.594266891479492],["▁اخراج",-13.594283103942873],["справедлив",-13.59429931640625],["做得",-13.594305992126465],["mount",-13.59430980682373],["▁vlerë",-13.594315528869627],["▁ősz",-13.594317436218262],["▁konse",-13.594320297241213],["▁elaborat",-13.594322204589844],["လင်း",-13.594334602355955],["بستگی",-13.59435749053955],["▁Cola",-13.594378471374512],["▁mitad",-13.59438133239746],["▁Dahl",-13.594386100769045],["ဆင္း",-13.594393730163574],["▁زير",-13.594393730163574],["▁stifte",-13.59439468383789],["ొచ్చ",-13.594406127929688],["rehabilit",-13.594415664672852],["养老",-13.594443321228027],["cidi",-13.594462394714355],["ભૂત",-13.594470977783203],["▁وشو",-13.594478607177734],["бери",-13.594481468200684],["▁ಮಗು",-13.594481468200684],["umque",-13.59449291229248],["▁अल्",-13.594514846801758],["கையில்",-13.594527244567873],["▁bezpośrednio",-13.59453296661377],["▁zamanlarda",-13.594552993774414],["ബേ",-13.594558715820312],["▁holky",-13.59458827972412],["▁Уул",-13.594592094421388],["▁রাখা",-13.594592094421388],["ร่วมกับ",-13.594613075256348],["dommen",-13.594619750976562],["▁आपली",-13.59463596343994],["ikusi",-13.594639778137209],["▁Gregori",-13.594651222229004],["▁Miko",-13.594651222229004],["ാടി",-13.594655990600586],["▁voer",-13.594659805297852],["▁vyro",-13.5946626663208],["▁Ран",-13.59466552734375],["▁häiri",-13.594667434692385],["▁umetnost",-13.594671249389648],["▁nesaf",-13.594706535339355],["▁Snap",-13.594722747802734],["tiunea",-13.594725608825684],["ଏମ୍",-13.594735145568848],["▁קנ",-13.594754219055176],["▁ആന്",-13.594799995422363],["hová",-13.594804763793944],["▁निर्वाचनमा",-13.594813346862791],["zék",-13.594816207885742],["女友",-13.594825744628906],["▁țări",-13.594828605651855],["szczo",-13.594833374023438],["▁Tiesa",-13.59484577178955],["ધર",-13.594849586486816],["חוסר",-13.594852447509766],["▁Million",-13.594857215881348],["ገራ",-13.594881057739258],["schrijf",-13.594884872436523],["▁njegovi",-13.594889640808104],["▁kalimat",-13.59489917755127],["▁omfatter",-13.594902992248535],["ционных",-13.594903945922852],["▁الماضية",-13.594904899597168],["likheid",-13.594925880432127],["▁लगायत",-13.594929695129396],["高兴",-13.59493350982666],["▁concession",-13.594941139221191],["yjne",-13.594974517822266],["▁sinnvoll",-13.594978332519531],["▁instrumentos",-13.594987869262695],["▁Chip",-13.59499168395996],["▁Nicole",-13.595004081726074],["正直",-13.595023155212402],["▁birlik",-13.59502410888672],["▁seans",-13.59502410888672],["በኛ",-13.595037460327148],["▁inclina",-13.59503936767578],["cilla",-13.59504508972168],["心の",-13.595054626464844],["▁שיווק",-13.59505558013916],["▁sanatçı",-13.595056533813477],["▁ବ୍ଲକ",-13.595073699951172],["▁Миний",-13.59509563446045],["▁Déu",-13.595105171203612],["оваться",-13.595130920410156],["▁Webb",-13.595147132873535],["そうで",-13.595149993896484],["▁כספי",-13.595170974731444],["▁देखे",-13.595175743103027],["▁Berria",-13.595178604125977],["▁աշխարհում",-13.595182418823242],["▁близост",-13.59518337249756],["▁mengakui",-13.59518814086914],["▁ಅಂಕ",-13.59519863128662],["▁Давид",-13.59520149230957],["۱۱",-13.595206260681152],["▁ನಂಬ",-13.595213890075684],["मार",-13.595219612121582],["▁تاريخي",-13.595222473144531],["४१",-13.595223426818848],["ματά",-13.595226287841797],["▁новое",-13.59523105621338],["▁trgovina",-13.595251083374023],["▁ispred",-13.595255851745604],["▁വീട്",-13.595257759094238],["0.4",-13.59526252746582],["▁kniha",-13.59526538848877],["▁ampio",-13.595277786254885],["▁significativa",-13.5952787399292],["hatjuk",-13.595329284667969],["▁veche",-13.59534740447998],["▁విన",-13.59537410736084],["Ό",-13.59538745880127],["▁valtio",-13.595391273498535],["ריאה",-13.595393180847168],["occhio",-13.595402717590332],["Franc",-13.59542179107666],["ადგილ",-13.595438957214355],["క్షన్",-13.595447540283203],["/2018)",-13.595449447631836],["▁изменение",-13.595458030700684],["▁preces",-13.595458984375],["まあ",-13.595463752746582],["▁vertrou",-13.595464706420898],["▁224",-13.595476150512695],["ご連絡",-13.595501899719238],["▁financeiro",-13.595515251159668],["▁Hammer",-13.5955228805542],["▁bölgesinde",-13.595524787902832],["讓他",-13.59552764892578],["cağı",-13.595528602600098],["▁Azam",-13.595536231994627],["шення",-13.595550537109377],["▁rakam",-13.595574378967283],["▁Samar",-13.595577239990234],["▁jugadores",-13.5955810546875],["krīt",-13.595595359802246],["▁ਹਵਾ",-13.595599174499512],["▁බලන",-13.595601081848145],["がん",-13.59560489654541],["ለሁ።",-13.595608711242676],["bór",-13.59561252593994],["▁Anime",-13.5956449508667],["ካል",-13.595722198486328],["▁chain",-13.59573459625244],["▁писать",-13.59573745727539],["毎年",-13.595758438110352],["▁Viss",-13.59576416015625],["▁rates",-13.59577751159668],["▁Acer",-13.595779418945312],["▁الشو",-13.59578800201416],["▁BEST",-13.595788955688477],["改正",-13.595797538757324],["▁شناخته",-13.595805168151855],["▁edge",-13.595813751220703],["▁traže",-13.595818519592283],["▁الرسمي",-13.595829010009766],["ÑO",-13.59583568572998],["▁Гон",-13.59585189819336],["▁යෝජනා",-13.595853805541992],["▁moči",-13.595863342285156],["riaus",-13.59587287902832],["pú",-13.59589385986328],["▁makeup",-13.59589958190918],["tutto",-13.595904350280762],["▁tutor",-13.595914840698242],["▁Që",-13.59593105316162],["ávať",-13.595934867858888],["456",-13.59593677520752],["定义",-13.595983505249023],["ډي",-13.5960054397583],["▁захват",-13.596009254455566],["唯一的",-13.596024513244627],["بھی",-13.596046447753906],["▁akvo",-13.596049308776855],["坂",-13.596067428588867],["蟹",-13.596076011657717],["внутрен",-13.596096992492676],["verhältnis",-13.596097946166992],["▁Columbia",-13.596097946166992],["▁Ernährung",-13.596097946166992],["▁całkowicie",-13.596097946166992],["▁cenderung",-13.596097946166992],["▁clínica",-13.596097946166992],["▁jméno",-13.596097946166992],["▁mewujudkan",-13.596097946166992],["▁raziskoval",-13.596097946166992],["▁rengeteg",-13.596097946166992],["▁окремих",-13.596097946166992],["▁первые",-13.596097946166992],["▁գաղափար",-13.596097946166992],["▁լուրջ",-13.596097946166992],["▁ծավալ",-13.596097946166992],["▁ثقافت",-13.596097946166992],["▁صوبائی",-13.596097946166992],["▁संचालन",-13.596097946166992],["▁కీలక",-13.596097946166992],["1⁄4",-13.596098899841309],["▁अशोक",-13.59610080718994],["▁الدستور",-13.596102714538574],["▁المشترك",-13.596102714538574],["▁caktuar",-13.59610366821289],["סכום",-13.596104621887209],["బోతున్న",-13.596104621887209],["▁розподіл",-13.596104621887209],["▁propriété",-13.59610652923584],["▁жыхар",-13.59610652923584],["▁daliri",-13.596107482910156],["אושר",-13.596108436584473],["▁thoáng",-13.596112251281738],["▁ಬರೆಯ",-13.596120834350586],["▁الرجال",-13.596121788024902],["▁అలాంటి",-13.59612274169922],["恋愛",-13.596124649047852],["▁판단",-13.5961275100708],["▁कुठे",-13.59613037109375],["▁প্যা",-13.5961332321167],["▁pokrenu",-13.59613800048828],["▁ټولنې",-13.59613800048828],["▁پیاده",-13.596141815185549],["▁Erwachsene",-13.596144676208496],["▁Personlig",-13.596148490905762],["schlüsse",-13.596155166625977],["▁oifig",-13.596166610717772],["▁consola",-13.596168518066406],["▁legea",-13.596172332763672],["ంటున్న",-13.596173286437988],["▁triumf",-13.596196174621582],["EME",-13.596197128295898],["▁Országos",-13.596197128295898],["▁اسکول",-13.596202850341797],["▁특별한",-13.59620761871338],["▁tertinggi",-13.596209526062012],["dhibiti",-13.596211433410645],["▁थि",-13.596235275268556],["▁המשפחה",-13.596236228942873],["łożenie",-13.596237182617188],["хит",-13.596258163452148],["93)",-13.59626007080078],["▁حجت",-13.596261024475098],["▁አለው",-13.596266746520996],["▁தங்கள்",-13.596270561218262],["▁Белги",-13.596271514892578],["▁technical",-13.596290588378906],["▁обре",-13.596298217773438],["▁المدرسة",-13.596301078796388],["两天",-13.596314430236816],["ქტი",-13.59631633758545],["门口",-13.596360206604004],["ուստ",-13.596375465393066],["▁guest",-13.596378326416016],["ศพ",-13.596383094787598],["▁taberna",-13.59640884399414],["▁мъже",-13.596410751342772],["6°",-13.596418380737305],["barkeit",-13.59641933441162],["▁umræðu",-13.596453666687012],["▁জু",-13.59646224975586],["תום",-13.596500396728516],["▁იმედი",-13.596503257751465],["dessus",-13.596505165100098],["ራው",-13.596517562866213],["െടുത്തു",-13.59652328491211],["▁त्याच",-13.596524238586426],["▁sample",-13.596532821655272],["▁makse",-13.596548080444336],["assist",-13.596549034118652],["▁entstehen",-13.596549034118652],["▁հո",-13.596561431884766],["politisk",-13.596567153930664],["indra",-13.596577644348145],["אית",-13.596580505371094],["▁شركت",-13.59658145904541],["cp",-13.59659481048584],["drept",-13.596609115600586],["▁eleito",-13.596616744995115],["EIRO",-13.596617698669434],["μάτι",-13.596622467041016],["▁Begin",-13.596661567687988],["য়ন",-13.596677780151367],["▁påvirke",-13.5966796875],["ಫೈ",-13.596697807312012],["▁stoc",-13.596701622009276],["▁văd",-13.596710205078123],["വിത",-13.596720695495604],["દુ",-13.596761703491213],["▁ڀري",-13.596763610839844],["▁vašim",-13.596768379211426],["▁știu",-13.59678077697754],["ทีวี",-13.59678554534912],["も多く",-13.596793174743652],["的表现",-13.596795082092283],["資本",-13.596827507019045],["▁пословн",-13.596841812133787],["សាច់",-13.59685230255127],["▁jediný",-13.59685516357422],["今日は",-13.596867561340332],["▁paham",-13.596872329711914],["▁released",-13.59687328338623],["▁شار",-13.596879959106444],["▁hadits",-13.596882820129396],["▁привод",-13.59691333770752],["▁gustu",-13.596929550170898],["ోంది",-13.596949577331545],["▁생각이",-13.596957206726074],["▁লগ",-13.597027778625488],["▁çöz",-13.597039222717283],["patah",-13.597041130065918],["▁Ειδ",-13.597041130065918],["▁wcale",-13.597052574157717],["ঠা",-13.59706211090088],["อนุ",-13.59706211090088],["▁repair",-13.59706974029541],["▁қоса",-13.597140312194824],["oituksen",-13.597148895263672],["在新",-13.597155570983888],["▁مطالعات",-13.59716510772705],["▁nahiz",-13.59716796875],["▁comunale",-13.597187995910645],["ែល",-13.597213745117188],["▁lenti",-13.597219467163086],["όμενο",-13.597227096557615],["▁măsuri",-13.597240447998049],["▁Persze",-13.597259521484377],["ት።",-13.597283363342283],["▁kepadanya",-13.597297668457031],["φορά",-13.59732151031494],["▁кей",-13.597322463989258],["▁jack",-13.597332954406738],["ацією",-13.597338676452637],["ბად",-13.597355842590332],["нги",-13.597356796264648],["▁praece",-13.597360610961914],["çək",-13.597375869750977],["▁stul",-13.597381591796877],["ዝና",-13.59738826751709],["▁зүйлс",-13.597405433654783],["▁момиче",-13.59740924835205],["▁борбе",-13.597415924072266],["▁virtute",-13.597416877746582],["278",-13.597429275512695],["▁vodou",-13.597454071044922],["වෙල",-13.597455024719238],["ጉል",-13.597455024719238],["ايد",-13.5974760055542],["▁officielle",-13.59748649597168],["שמות",-13.597497940063477],["hætt",-13.597498893737791],["▁secolo",-13.597525596618652],["▁urur",-13.597551345825195],["▁በዓል",-13.597577095031738],["▁ûnt",-13.597578048706056],["▁näkyy",-13.597586631774902],["ਮੋ",-13.597589492797852],["កម្មវិធី",-13.597612380981444],["dienas",-13.597623825073242],["moji",-13.59767246246338],["功夫",-13.59767723083496],["ອັນ",-13.597686767578123],["ឹក",-13.597716331481934],["▁llena",-13.597729682922363],["▁Dydd",-13.597731590270996],["▁უფასო",-13.597734451293944],["▁Mimi",-13.59774112701416],["▁Kategoria",-13.597752571105955],["ಚಾರ",-13.597755432128906],["▁රතිචාර",-13.597761154174805],["▁örgüt",-13.597776412963867],["EMI",-13.597782135009766],["▁Waka",-13.597798347473145],["▁шешу",-13.597805976867676],["▁وتع",-13.597822189331056],["▁puses",-13.597838401794434],["рожден",-13.597841262817385],["тичний",-13.597854614257812],["첨",-13.597880363464355],["▁Rett",-13.597894668579102],["තය",-13.59792709350586],["を受けて",-13.597932815551758],["能不能",-13.597935676574709],["▁Gazete",-13.597954750061035],["▁Böyle",-13.597960472106934],["věří",-13.597982406616213],["신청",-13.59801959991455],["лекс",-13.598026275634766],["தேச",-13.59803295135498],["ляє",-13.598036766052246],["▁ФК",-13.598042488098145],["规律",-13.598073959350586],["辱",-13.598074913024902],["escriure",-13.5980806350708],["▁folket",-13.598082542419434],["循環",-13.5980863571167],["চিত্র",-13.59809398651123],["のかな",-13.59809398651123],["ቀድሞ",-13.598095893859863],["示范",-13.598104476928713],["辩",-13.598115921020508],["تعامل",-13.59811782836914],["仔细",-13.598121643066406],["溫泉",-13.598121643066406],["ଦର୍ଶନ",-13.598138809204102],["▁पिंपरी",-13.59814453125],["ခေါင်း",-13.598146438598633],["▁ଦିଲ୍ଲୀ",-13.598146438598633],["젠",-13.598146438598633],["geschäft",-13.59814739227295],["▁Hercegovini",-13.59814739227295],["▁brezplačno",-13.59814739227295],["▁cymryd",-13.59814739227295],["▁galimybę",-13.59814739227295],["▁příspěvky",-13.59814739227295],["▁δημόσια",-13.59814739227295],["▁κίνδυνο",-13.59814739227295],["▁Відэа",-13.59814739227295],["▁Прэзідэнт",-13.59814739227295],["▁отримали",-13.59814739227295],["▁шалтгаан",-13.59814739227295],["▁ورلڈ",-13.59814739227295],["▁मेडिकल",-13.59814739227295],["▁захирал",-13.598149299621582],["▁پناہ",-13.598149299621582],["▁الحقيقة",-13.598155975341797],["▁szpital",-13.598156929016112],["▁श्रीमती",-13.598156929016112],["▁dinding",-13.598158836364746],["▁аудандық",-13.59816074371338],["▁प्रवक्ता",-13.598161697387695],["▁податку",-13.598162651062012],["▁puolue",-13.598164558410645],["intéresse",-13.59816551208496],["▁ធ",-13.59816551208496],["▁assolutamente",-13.598169326782228],["▁bunge",-13.59817123413086],["vergadering",-13.598172187805176],["▁damn",-13.59817600250244],["▁ehtiyat",-13.59817600250244],["▁قرمز",-13.59817886352539],["▁przemysł",-13.59818172454834],["▁ťaž",-13.59818172454834],["▁wenigen",-13.598184585571287],["▁эффективности",-13.598188400268556],["▁الجزء",-13.598189353942873],["▁красота",-13.598190307617188],["▁واژه",-13.598201751708984],["▁kaynağı",-13.5982027053833],["▁fhios",-13.598217010498049],["▁Tattoo",-13.598219871520996],["ขอบ",-13.59823513031006],["▁ಮಾಡಿದ್ದಾರೆ",-13.598240852355955],["▁beroende",-13.59824562072754],["▁vojensk",-13.598247528076172],["最具",-13.59825038909912],["ամար",-13.598252296447754],["▁යම",-13.598257064819336],["▁पारित",-13.598264694213867],["▁najavi",-13.598272323608398],["▁Dó",-13.598276138305664],["▁personagens",-13.59827709197998],["▁المواقع",-13.598278045654297],["енту",-13.598278999328612],["▁incluindo",-13.598291397094728],["▁kungiyar",-13.598292350769045],["▁Fiscal",-13.598294258117676],["▁الخدمة",-13.598299026489258],["▁የባ",-13.598299026489258],["ிருக்கிறது",-13.598307609558104],["▁haldið",-13.59831714630127],["▁දන්නවා",-13.598329544067385],["毫不",-13.598350524902344],["ਿਸ਼",-13.598353385925291],["▁ተገ",-13.598358154296877],["▁191",-13.59836483001709],["▁उज",-13.598365783691406],["▁Балалар",-13.59836769104004],["ക്കിട",-13.598374366760254],["▁opgaver",-13.598383903503418],["▁Kanda",-13.598413467407228],["antsika",-13.598414421081545],["▁helburua",-13.598438262939451],["▁ဒီလို",-13.598451614379885],["සුන්",-13.598461151123049],["▁dịp",-13.598468780517578],["ിക്",-13.598504066467283],["誰も",-13.598508834838867],["▁linie",-13.598512649536133],["▁Adventure",-13.598533630371094],["▁Akta",-13.598533630371094],["ಿದ್ದೇವೆ",-13.59854507446289],["▁affrontare",-13.59855842590332],["大自然",-13.59855842590332],["qram",-13.5985689163208],["▁arms",-13.5985689163208],["▁гостей",-13.598572731018066],["prawi",-13.598580360412598],["▁soziale",-13.598597526550291],["▁dormitor",-13.598600387573242],["▁deiner",-13.598603248596191],["▁اى",-13.598655700683594],["▁தெரி",-13.598666191101074],["▁שכן",-13.598674774169922],["▁૧૦",-13.598681449890137],["ांश",-13.598682403564451],["▁ಕ್ಲ",-13.598685264587402],["TË",-13.598687171936035],["წევს",-13.598688125610352],["telek",-13.598702430725098],["gesch",-13.598711967468262],["▁Evropi",-13.598716735839844],["ופר",-13.59872055053711],["▁forhånd",-13.59872055053711],["khar",-13.59872341156006],["্যাল",-13.59875202178955],["لسنة",-13.598772048950195],["γιά",-13.598773002624512],["njenih",-13.598794937133787],["▁Hamil",-13.598796844482422],["▁पहन",-13.598798751831056],["SEK",-13.59880256652832],["▁iznosi",-13.598809242248535],["▁ОШ",-13.598837852478027],["175",-13.598838806152344],["리지",-13.59884262084961],["▁Walt",-13.598846435546877],["käsi",-13.598855018615724],["balans",-13.598861694335938],["▁Vrij",-13.59889793395996],["ுக்",-13.598916053771973],["▁Папа",-13.598920822143556],["▁royal",-13.598925590515137],["▁सकाळी",-13.59893035888672],["小編",-13.59893798828125],["nnissa",-13.598976135253906],["အင်",-13.598978996276855],["▁umhverfis",-13.598994255065918],["倒是",-13.59901237487793],["ságok",-13.59906768798828],["▁gingen",-13.599077224731444],["adott",-13.59908390045166],["▁ਬੈਠ",-13.599103927612305],["▁Gazi",-13.599129676818848],["▁сроки",-13.599132537841797],["▁Supp",-13.599133491516112],["▁пријател",-13.599145889282228],["▁мада",-13.599148750305176],["82)",-13.59917163848877],["地圖",-13.5991849899292],["▁ამბები",-13.599190711975098],["▁jucat",-13.599204063415527],["▁ຄັ້ງ",-13.599225044250488],["▁ಜಗ",-13.599231719970703],["▁klientu",-13.599272727966309],["▁spørge",-13.599289894104004],["ድብ",-13.599291801452637],["▁.‬",-13.599292755126951],["क्रे",-13.59929370880127],["どのように",-13.599315643310549],["▁Пен",-13.599324226379396],["escriptor",-13.599328994750977],["▁læst",-13.59934425354004],["יהו",-13.599347114562988],["▁puisque",-13.599388122558594],["▁Гори",-13.59939193725586],["▁الشام",-13.599406242370604],["▁letošnj",-13.599417686462402],["▁meesha",-13.599454879760742],["多么",-13.599469184875488],["द्वारे",-13.59947109222412],["špi",-13.5994873046875],["▁činnost",-13.59949016571045],["нуваат",-13.59950828552246],["▁corro",-13.59951400756836],["ुप",-13.599531173706056],["ovqat",-13.59953498840332],["▁helhet",-13.59955596923828],["▁sorunları",-13.599566459655762],["▁säily",-13.599570274353027],["vací",-13.59957504272461],["▁ដំណើរ",-13.599583625793455],["市長",-13.599596977233888],["лья",-13.59959888458252],["▁vásárlás",-13.599607467651367],["▁bywyd",-13.59961223602295],["льної",-13.599623680114746],["▁hagyományos",-13.599653244018556],["verden",-13.59965705871582],["▁لوري",-13.599666595458984],["▁TÜ",-13.599668502807615],["▁ഫെ",-13.599676132202148],["כשר",-13.599692344665527],["▁direktør",-13.599693298339844],["חן",-13.599699020385742],["▁ಬರುವ",-13.599717140197754],["▁mistake",-13.599725723266602],["asztal",-13.599730491638184],["▁xilka",-13.59974479675293],["▁voltant",-13.599759101867676],["▁беруші",-13.59976863861084],["▁premsa",-13.599770545959473],["▁Надо",-13.599781036376951],["▁വന്",-13.59979248046875],["▁നിലവില",-13.599836349487305],["▁Kapag",-13.59984302520752],["riste",-13.599847793579102],["Отвара",-13.599882125854492],["▁faran",-13.599891662597656],["▁conditii",-13.599919319152832],["લમાં",-13.599923133850098],["lijst",-13.599931716918944],["▁Mamy",-13.599933624267578],["▁پڑے",-13.599934577941896],["служи",-13.599953651428224],["▁gasten",-13.599955558776855],["айып",-13.599961280822754],["34)",-13.59996223449707],["мерен",-13.599967956542969],["▁tenue",-13.599968910217283],["гласен",-13.599987983703612],["▁gözlə",-13.599993705749512],["owań",-13.599995613098145],["▁llegado",-13.599997520446776],["llari",-13.60000705718994],["▁كۆرۈ",-13.600013732910156],["ധാരണ",-13.600030899047852],["▁ориентир",-13.600049018859863],["▁저장",-13.600055694580078],["दश",-13.60006046295166],["▁аяқта",-13.600062370300291],["▁bantu",-13.600088119506836],["▁зема",-13.60009479522705],["ۋر",-13.600103378295898],["▁nyeste",-13.600106239318848],["▁ମାମଲା",-13.60011100769043],["▁փուլ",-13.600112915039062],["pisano",-13.600119590759276],["跃",-13.600146293640137],["锻炼",-13.600146293640137],["疫",-13.6001558303833],["▁สมาชิก",-13.600160598754885],["▁ಮಾತ",-13.60016918182373],["▁VIR",-13.600184440612791],["キング",-13.60018539428711],["乾淨",-13.600190162658691],["kjes",-13.600199699401855],["▁György",-13.600200653076172],["▁Turmush",-13.600200653076172],["▁egyáltalán",-13.600200653076172],["▁entschieden",-13.600200653076172],["▁kebiasaan",-13.600200653076172],["▁khawatir",-13.600200653076172],["▁miércoles",-13.600200653076172],["▁Διαβάστε",-13.600200653076172],["▁пользователей",-13.600200653076172],["▁բաժին",-13.600200653076172],["▁ఆర్థిక",-13.600200653076172],["▁ಅಭಿಪ್ರಾಯ",-13.600200653076172],["▁പിണറായി",-13.600200653076172],["▁බුද්ධි",-13.600200653076172],["ธาตุ",-13.600201606750488],["▁noteikumu",-13.600201606750488],["▁javaslat",-13.600202560424805],["▁त्यति",-13.600202560424805],["▁ଶକ୍ତି",-13.60020351409912],["▁obzira",-13.600204467773438],["▁Şərq",-13.600204467773438],["▁μελέτη",-13.600204467773438],["▁ayar",-13.600205421447754],["▁ग्रुप",-13.600205421447754],["▁presupuesto",-13.600208282470703],["▁علاقة",-13.60020923614502],["▁бичлэг",-13.600210189819336],["▁будущем",-13.600212097167969],["▁sfrutta",-13.600215911865234],["▁تکلیف",-13.600215911865234],["▁ھېس",-13.600217819213867],["▁außerdem",-13.6002197265625],["vitenskap",-13.600226402282717],["ฟู",-13.600227355957031],["るべき",-13.60023021697998],["▁кетип",-13.600234985351562],["▁šun",-13.60023593902588],["▁کھانے",-13.600237846374512],["클럽",-13.600238800048828],["▁чыгыш",-13.60024070739746],["▁Cuộc",-13.600242614746094],["▁अत्र",-13.600249290466309],["▁tirgus",-13.600253105163574],["ေရာင္း",-13.600261688232422],["▁Universitatea",-13.60026741027832],["ტიკ",-13.600279808044434],["▁Orlando",-13.600287437438965],["توج",-13.600296020507812],["▁bidhaa",-13.600303649902344],["▁šilt",-13.600312232971191],["ᄏᄏ",-13.600322723388672],["ишу",-13.600332260131836],["開車",-13.600332260131836],["▁nisur",-13.600350379943848],["▁cilësi",-13.60035514831543],["▁الانسان",-13.600356101989746],["▁շրջանում",-13.600357055664062],["▁ଦାସ",-13.600375175476074],["▁greičiau",-13.600401878356934],["▁trabajando",-13.600446701049805],["▁pääsee",-13.600456237792969],["引發",-13.600461959838867],["▁mộ",-13.600469589233398],["▁portale",-13.600488662719728],["▁buffet",-13.600494384765623],["▁Profile",-13.600507736206056],["▁arkadaşlar",-13.600513458251951],["運輸",-13.600519180297852],["▁18.30",-13.60053253173828],["▁yüzü",-13.600566864013672],["▁shkuar",-13.600568771362305],["▁цара",-13.60056972503662],["ריס",-13.600590705871582],["▁κεφαλ",-13.600603103637695],["▁أصل",-13.600605964660645],["tinggi",-13.60060691833496],["▁բնակիչ",-13.600637435913086],["ером",-13.60063934326172],["▁pusē",-13.600641250610352],["▁parlement",-13.6006498336792],["ስማ",-13.600667953491213],["ധന",-13.60067653656006],["▁Tarjo",-13.600683212280272],["▁utilize",-13.600687980651855],["שמים",-13.600688934326172],["▁consumatori",-13.600715637207031],["▁ترم",-13.600719451904297],["▁изработен",-13.600723266601562],["▁Koliko",-13.600733757019045],["▁sektörü",-13.600740432739258],["▁dostala",-13.600749015808104],["▁frétt",-13.600752830505373],["මැ",-13.600754737854004],["▁សុខភាព",-13.600756645202637],["سىم",-13.600764274597168],["▁pradeda",-13.600765228271484],["▁क्षण",-13.60077953338623],["▁սկսել",-13.600780487060549],["▁nanao",-13.600791931152344],["▁slots",-13.600791931152344],["3.00",-13.600794792175291],["lamaya",-13.60079574584961],["လော",-13.600821495056152],["industria",-13.600831985473633],["▁Congresso",-13.600834846496582],["▁Монголд",-13.60084342956543],["▁ፖ",-13.600847244262695],["уем",-13.60085391998291],["▁Év",-13.600866317749023],["സീ",-13.60089111328125],["Paper",-13.600905418395996],["もらって",-13.600905418395996],["自身的",-13.600905418395996],["▁fiziki",-13.600907325744627],["▁Cata",-13.600909233093262],["▁7.5",-13.600910186767578],["гчийн",-13.600947380065918],["▁aperi",-13.600947380065918],["▁hakimiyyət",-13.600954055786133],["chisi",-13.600956916809082],["▁సృష్టించ",-13.600958824157717],["▁sajtó",-13.60097599029541],["▁رشت",-13.600981712341309],["▁įmonė",-13.601007461547852],["▁dhab",-13.60105037689209],["▁целей",-13.601061820983888],["▁mero",-13.601069450378418],["▁اکثریت",-13.601094245910645],["నుంది",-13.601096153259276],["▁kafir",-13.601099967956545],["ຊະ",-13.601110458374023],["يرو",-13.601112365722656],["▁teplo",-13.60112476348877],["▁Projeto",-13.60114288330078],["▁registri",-13.601166725158691],["위로",-13.601179122924805],["ที่ทําให้",-13.601189613342283],["▁ඇස්",-13.60119915008545],["▁масаж",-13.601200103759766],["▁Bred",-13.601213455200195],["შვილის",-13.601216316223145],["pill",-13.60121726989746],["▁mains",-13.601222038269045],["▁학습",-13.60123062133789],["nywa",-13.601248741149902],["▁שלושה",-13.601258277893066],["▁našom",-13.601259231567385],["▁maydon",-13.601263046264648],["blå",-13.60128116607666],["▁légi",-13.601289749145508],["▁דירה",-13.601290702819824],["▁апта",-13.601295471191406],["דח",-13.60130500793457],["צילום",-13.601320266723633],["▁زره",-13.601337432861328],["ਧਾ",-13.601373672485352],["▁симпат",-13.60139274597168],["▁daglige",-13.601426124572754],["velle",-13.601428031921388],["▁drank",-13.60142993927002],["▁فولاد",-13.601432800292969],["▁vorto",-13.6014404296875],["éd",-13.601442337036133],["▁refu",-13.60145378112793],["▁mocy",-13.601455688476562],["▁папа",-13.601458549499512],["▁عمده",-13.601473808288574],["▁оқ",-13.601500511169434],["▁consecuencias",-13.601502418518066],["Full",-13.601516723632812],["▁tempatan",-13.601545333862305],["ുണ്ടാക്ക",-13.601547241210938],["▁Antrag",-13.601564407348633],["znej",-13.601574897766112],["▁имал",-13.601577758789062],["▁κίνηση",-13.601587295532228],["ໂນ",-13.601602554321287],["ДН",-13.6016263961792],["▁языке",-13.601630210876465],["tutu",-13.601648330688477],["ступление",-13.601655006408691],["▁ítél",-13.60167121887207],["返回",-13.601676940917969],["turva",-13.60168170928955],["▁나가",-13.601699829101562],["hatta",-13.601733207702637],["Service",-13.601739883422852],["그룹",-13.60174560546875],["▁sebulan",-13.6017484664917],["áhl",-13.60176944732666],["▁dosya",-13.60176944732666],["voed",-13.60178565979004],["ኑን",-13.60181713104248],["čním",-13.601844787597656],["▁нею",-13.601847648620604],["▁статии",-13.601859092712402],["ेक्ट",-13.601899147033691],["▁নিয়োগ",-13.601914405822754],["▁увеличен",-13.601950645446776],["okhoz",-13.60197925567627],["-500",-13.60198211669922],["▁pät",-13.601985931396484],["▁الباب",-13.602021217346191],["stukken",-13.602035522460938],["▁estetik",-13.60204029083252],["মত",-13.602045059204102],["▁посочи",-13.602088928222656],["▁eredu",-13.602089881896973],["zioaren",-13.602092742919922],["hinta",-13.602113723754885],["▁billigt",-13.602127075195312],["▁tsjin",-13.60213565826416],["▁əsər",-13.602169036865234],["傾",-13.60216999053955],["pozycja",-13.602171897888184],["▁კალ",-13.60220432281494],["ained",-13.602208137512209],["▁понрави",-13.602246284484863],["ปริมาณ",-13.602255821228027],["เปรียบเทียบ",-13.602256774902344],["អារម្មណ៍",-13.602256774902344],["ပြင်ဆင်ရန်",-13.60225772857666],["▁Etusivu",-13.60225772857666],["▁Jyväskylä",-13.60225772857666],["▁desempenho",-13.60225772857666],["▁dviejų",-13.60225772857666],["▁pénzügyi",-13.60225772857666],["▁tapahtuu",-13.60225772857666],["▁uzupełni",-13.60225772857666],["▁Íslensk",-13.60225772857666],["▁ذريعي",-13.60225772857666],["▁qishloq",-13.602258682250977],["▁verfügt",-13.602258682250977],["▁ծանր",-13.602258682250977],["▁تفریح",-13.602258682250977],["▁ವಿರೋಧ",-13.602258682250977],["엠",-13.602258682250977],["▁gọn",-13.60226058959961],["▁السادس",-13.60226058959961],["ၿဖစ္",-13.602261543273926],["▁wabunge",-13.602261543273926],["▁սույն",-13.602262496948242],["▁Rechnung",-13.60226345062256],["▁кислот",-13.602264404296877],["▁पढ्नुहोस्",-13.60226821899414],["▁dəvət",-13.602269172668455],["льність",-13.60227108001709],["▁χρήματα",-13.602272033691406],["شام",-13.60227394104004],["▁Laste",-13.602275848388672],["▁возач",-13.602276802062988],["bih",-13.602283477783203],["▁suhtes",-13.602290153503418],["▁Houston",-13.602302551269531],["ശബ്ദ",-13.60230827331543],["▁Lokale",-13.60230827331543],["▁шил",-13.602317810058594],["▁Kors",-13.602319717407228],["жати",-13.60232639312744],["værktøj",-13.602327346801758],["▁همزمان",-13.60232925415039],["▁착",-13.602331161499023],["арно",-13.602335929870604],["inayo",-13.602351188659668],["▁სტუ",-13.602361679077148],["▁Mór",-13.60236644744873],["▁schriftlich",-13.60236644744873],["жыць",-13.602374076843262],["pour",-13.602375984191896],["▁sehe",-13.60237979888916],["JP",-13.602380752563477],["▁carico",-13.602387428283691],["▁즐기",-13.602394104003906],["▁၁။",-13.602396965026855],["▁سجل",-13.602409362792969],["▁karşısında",-13.602410316467283],["▁Indiana",-13.602418899536133],["▁gezellig",-13.602422714233398],["monta",-13.602435111999512],["▁individuale",-13.60243797302246],["▁jalur",-13.602442741394045],["长时间",-13.60244846343994],["▁novidades",-13.602449417114258],["స్టి",-13.602456092834473],["lump",-13.6024751663208],["▁пчел",-13.602482795715332],["▁Порно",-13.602492332458496],["OLL",-13.60250186920166],["အားလံုး",-13.602505683898926],["▁presentan",-13.602506637573242],["▁Skatīt",-13.602524757385254],["▁marine",-13.602527618408203],["▁полезны",-13.602527618408203],["▁никакого",-13.602534294128418],["▁syntes",-13.602544784545898],["တစ္ဦး",-13.60256576538086],["пуштен",-13.602566719055176],["▁самая",-13.602582931518556],["▁tarik",-13.602587699890137],["▁gramm",-13.602615356445312],["ვანი",-13.602630615234377],["▁Versand",-13.602641105651855],["▁sirds",-13.602645874023438],["▁miesza",-13.602646827697754],["briga",-13.6026611328125],["▁भई",-13.60266399383545],["▁Tambah",-13.602668762207031],["▁તેથી",-13.602668762207031],["▁[3",-13.602670669555664],["pove",-13.602686882019045],["competent",-13.602706909179688],["▁भूत",-13.60271453857422],["▁تحدث",-13.602715492248535],["ైతే",-13.6027193069458],["▁consultor",-13.60272216796875],["出身",-13.602737426757812],["ወርቅ",-13.602740287780762],["▁gemeenskap",-13.602744102478027],["▁ഒഴി",-13.602746963500977],["FIC",-13.602763175964355],["ችሉ",-13.60276699066162],["▁hætti",-13.60276985168457],["▁цветове",-13.60277271270752],["Not",-13.602773666381836],["alum",-13.60280704498291],["maso",-13.60282039642334],["▁താന്",-13.602823257446287],["▁έρχεται",-13.60284423828125],["▁sodass",-13.602882385253906],["▁joke",-13.60289192199707],["▁moteur",-13.60290241241455],["▁trato",-13.602910995483398],["▁хими",-13.602938652038574],["င်္",-13.60296630859375],["▁Allahın",-13.602970123291016],["алися",-13.602971076965332],["▁العلمي",-13.602988243103027],["צומת",-13.602991104125977],["ideal",-13.60303020477295],["▁työtä",-13.603033065795898],["▁האל",-13.60304069519043],["▁bijis",-13.60304832458496],["weil",-13.603056907653809],["▁rifu",-13.603059768676758],["▁ríkis",-13.603059768676758],["▁vada",-13.60306453704834],["可见",-13.603070259094238],["ಿಯಲ್ಲಿ",-13.60307502746582],["тиш",-13.603082656860352],["▁узнал",-13.603084564208984],["▁zêdetir",-13.60308837890625],["▁опет",-13.603089332580566],["கொ",-13.603097915649414],["▁сабақтар",-13.603110313415527],["▁едино",-13.603121757507324],["Assad",-13.603141784667969],["жаны",-13.60315227508545],["▁Mille",-13.603158950805664],["▁पासून",-13.603158950805664],["ક્ર",-13.603167533874512],["스는",-13.603172302246094],["▁الليل",-13.603177070617676],["jött",-13.603178024291992],["▁Sư",-13.603182792663574],["▁vykdy",-13.6032133102417],["▁tahlil",-13.603214263916016],["töv",-13.603228569030762],["ndid",-13.603229522705078],["▁комплет",-13.603277206420898],["▁فعلا",-13.603277206420898],["Особ",-13.603303909301758],["январ",-13.603307723999023],["מאל",-13.603325843811035],["▁klid",-13.603330612182615],["TTE",-13.603341102600098],["▁Fyn",-13.603343963623049],["orno",-13.603374481201172],["skripsi",-13.60340690612793],["▁velocidad",-13.603410720825195],["የም",-13.603411674499512],["ಕಾಲ",-13.60342502593994],["▁född",-13.603428840637209],["▁steril",-13.603429794311523],["▁храма",-13.60344409942627],["▁taldeak",-13.603446006774902],["ϊκό",-13.60348415374756],["▁Ház",-13.603493690490724],["▁fiori",-13.603493690490724],["њом",-13.60350227355957],["▁moyo",-13.603509902954102],["увалися",-13.60352897644043],["▁이내",-13.603534698486328],["▁líst",-13.603557586669922],["えば",-13.603588104248049],["রিয়া",-13.603607177734377],["▁router",-13.603609085083008],["맥",-13.603632926940918],["▁ekonomike",-13.603731155395508],["▁Hotellet",-13.603734970092772],["ଯାଇଥିଲା",-13.60375690460205],["լուսանկար",-13.603757858276367],["▁pés",-13.60376262664795],["بلاغ",-13.603768348693848],["▁pakken",-13.603781700134276],["▁Божа",-13.60378646850586],["▁davran",-13.60381031036377],["vonta",-13.603816032409668],["▁maxima",-13.603818893432615],["dĺž",-13.603853225708008],["таль",-13.60392951965332],["▁Masha",-13.603930473327637],["တို့ကို",-13.603944778442385],["予想",-13.603947639465332],["телят",-13.603985786437988],["જિ",-13.603999137878418],["▁gənclər",-13.604009628295898],["▁keksi",-13.604031562805176],["▁تضم",-13.604034423828123],["▁પૂર્વ",-13.60403823852539],["▁խախտ",-13.604052543640137],["дарына",-13.604060173034668],["▁kategorier",-13.604063987731934],["▁Опера",-13.604065895080566],["▁223",-13.60409164428711],["geleid",-13.60409927368164],["▁opettaja",-13.604108810424805],["ettem",-13.604127883911133],["▁изменить",-13.604138374328612],["▁raff",-13.604145050048828],["טכנולוגי",-13.604168891906738],["▁prevenir",-13.604192733764648],["▁पंच",-13.604196548461914],["ኢኮኖሚ",-13.60419750213623],["可在",-13.604212760925291],["ხოვ",-13.60424518585205],["停留",-13.604247093200684],["▁PADA",-13.60425090789795],["会长",-13.604257583618164],["▁Anta",-13.604260444641112],["Rock",-13.60426139831543],["дсэн",-13.604262351989746],["凝聚",-13.604262351989746],["黏",-13.604265213012695],["銅",-13.604284286499023],["▁съобщи",-13.604292869567873],["▁մասը",-13.604303359985352],["▁בהחלט",-13.6043119430542],["ຂະຫຍາຍ",-13.604318618774414],["▁Jézus",-13.60431957244873],["▁famílies",-13.60431957244873],["▁històric",-13.60431957244873],["▁məhdud",-13.60431957244873],["▁poniżej",-13.60431957244873],["▁sistēmu",-13.60431957244873],["▁Негізгі",-13.60431957244873],["▁різноманітн",-13.60431957244873],["▁ବ୍ୟବସ୍ଥା",-13.60431957244873],["▁ಸಂಗೀತ",-13.60431957244873],["▁മമ്മൂട്ടി",-13.60431957244873],["▁schließlich",-13.604320526123049],["▁हिंदु",-13.604320526123049],["▁ይቻላል",-13.604320526123049],["▁adrenalin",-13.604321479797363],["青岛",-13.604321479797363],["▁Legisla",-13.60432243347168],["咱们",-13.60432243347168],["ျမိဳ႕",-13.604323387145996],["▁Všetko",-13.604326248168944],["▁iskoristi",-13.604326248168944],["SOR",-13.604327201843262],["▁државно",-13.604331970214844],["朝鮮",-13.604333877563477],["▁Мали",-13.604340553283691],["▁ಪಕ್ಷದ",-13.604344367980955],["vají",-13.60434627532959],["▁ਕਿਵੇਂ",-13.604347229003906],["ljós",-13.604364395141602],["ចេះ",-13.604366302490234],["sī",-13.604368209838867],["▁бодож",-13.604369163513184],["▁reveal",-13.604372024536133],["▁iborat",-13.60437297821045],["▁զոր",-13.604378700256348],["开拓",-13.604382514953612],["▁казалось",-13.604384422302246],["ยึด",-13.604387283325195],["▁ይዞ",-13.604388236999512],["▁стаи",-13.604389190673828],["▁লীগের",-13.604399681091309],["▁වුණේ",-13.604406356811523],["▁Міністрів",-13.60440731048584],["▁sprawy",-13.604412078857422],["▁Darüber",-13.604419708251951],["▁اتنا",-13.60442352294922],["▁ensimmäisen",-13.604427337646484],["▁करेगा",-13.604427337646484],["▁valutazione",-13.6044282913208],["▁விளக்க",-13.604435920715332],["τίμ",-13.604439735412598],["▁nawala",-13.60444164276123],["▁sügav",-13.60444450378418],["▁તેવી",-13.604447364807127],["▁głowy",-13.604449272155762],["▁በዚያ",-13.604475021362305],["▁-2",-13.60447597503662],["▁članova",-13.60447883605957],["shab",-13.60448169708252],["合作伙伴",-13.604494094848633],["iečiai",-13.604499816894531],["▁الأشخاص",-13.604503631591797],["▁않다",-13.604506492614746],["▁يساعد",-13.604521751403809],["▁সরকারের",-13.604522705078123],["▁Ανά",-13.60452651977539],["▁природи",-13.604528427124023],["▁говорю",-13.604537963867188],["▁ಸುತ್ತ",-13.604572296142578],["áty",-13.604584693908691],["▁tagja",-13.604586601257324],["▁treguar",-13.604589462280272],["▁خبرنگاران",-13.60459327697754],["dinta",-13.604619979858398],["▁അന്ത",-13.604620933532717],["▁Jang",-13.604621887207031],["▁mnoge",-13.604621887207031],["▁подписан",-13.604626655578612],["▁የገ",-13.604647636413574],["сқан",-13.604658126831056],["▁Fizik",-13.6046724319458],["հայտն",-13.604677200317385],["▁පිටුව",-13.604692459106444],["тивна",-13.604697227478027],["▁واقعي",-13.604708671569824],["▁emin",-13.604710578918455],["▁faisant",-13.604721069335938],["する場合",-13.60474681854248],["▁decorat",-13.604750633239746],["ulungan",-13.604775428771973],["puka",-13.60480499267578],["كوب",-13.604820251464844],["მუ",-13.604829788208008],["▁чиста",-13.604839324951172],["▁satria",-13.604866027832031],["1.6",-13.604870796203612],["▁sanayi",-13.604873657226562],["▁tashkilot",-13.604881286621094],["▁gelesen",-13.604891777038574],["▁Prijavite",-13.60489559173584],["▁atvinnu",-13.604905128479004],["देह",-13.604933738708496],["▁розчин",-13.604939460754396],["▁konsent",-13.60494327545166],["▁выніку",-13.604945182800291],["▁สาขา",-13.604948043823242],["▁desejo",-13.604982376098633],["▁ອັນ",-13.60498332977295],["цілі",-13.604995727539062],["ന്തി",-13.60499668121338],["▁fucked",-13.604999542236328],["JAS",-13.605005264282228],["▁مصروف",-13.605010032653809],["ూరు",-13.605026245117188],["▁وكيف",-13.60503101348877],["क्षेत्र",-13.605053901672363],["حركة",-13.605060577392578],["▁Klick",-13.605063438415527],["DRO",-13.60507106781006],["▁ornament",-13.605076789855955],["ськ",-13.60509204864502],["▁معاونت",-13.605101585388184],["스의",-13.605104446411133],["likkuse",-13.605112075805664],["▁alueella",-13.60511302947998],["ელმა",-13.605134963989258],["301",-13.605147361755373],["▁администратор",-13.605162620544434],["緊急",-13.605168342590332],["▁цирк",-13.60517120361328],["いなかった",-13.605172157287598],["ውቅ",-13.605198860168455],["ெட்",-13.605234146118164],["ացնելու",-13.60523509979248],["▁প্রকাশক",-13.605246543884276],["▁Comunicació",-13.60525894165039],["▁მამ",-13.605267524719238],["▁memorie",-13.605286598205566],["▁குரு",-13.605292320251465],["▁lasku",-13.60529613494873],["298",-13.605298042297363],["一辆",-13.60533046722412],["▁dhanka",-13.60533332824707],["hiếp",-13.605342864990234],["undar",-13.605350494384766],["▁değişim",-13.605388641357422],["ноз",-13.605406761169434],["▁ampia",-13.6054105758667],["ขี้",-13.60543155670166],["▁PORT",-13.60544776916504],["▁búa",-13.605454444885254],["▁häst",-13.605470657348633],["に来て",-13.605476379394531],["▁тече",-13.605486869812012],["▁walking",-13.605487823486328],["▁tò",-13.605502128601074],["▁Volk",-13.605515480041504],["▁густ",-13.605570793151855],["▁offered",-13.60558032989502],["කේ",-13.605582237243652],["▁경기도",-13.605630874633787],["▁montar",-13.605631828308104],["▁sendes",-13.605631828308104],["▁සංවර්ධන",-13.605649948120115],["▁ERP",-13.605690956115724],["ाएका",-13.60570240020752],["▁Kõ",-13.605706214904783],["▁מהר",-13.605708122253418],["byen",-13.605711936950684],["pół",-13.605714797973633],["▁pericolo",-13.605714797973633],["ጥበብ",-13.605735778808594],["tësi",-13.60574436187744],["她说",-13.605772018432615],["生命的",-13.6057767868042],["▁245",-13.605789184570312],["▁fibre",-13.605794906616213],["▁299",-13.60581874847412],["▁trov",-13.605843544006348],["▁6.2",-13.605874061584473],["▁Binnen",-13.605879783630373],["▁сябра",-13.605891227722168],["የን",-13.605907440185549],["ПРА",-13.60590934753418],["▁dodatki",-13.605932235717772],["▁rakenda",-13.605939865112305],["ເກີດ",-13.605942726135254],["ଆସ",-13.60594367980957],["এল",-13.605945587158203],["▁տեղադր",-13.605950355529783],["ျမင့္",-13.605955123901367],["mbin",-13.60596752166748],["▁شيئا",-13.60597801208496],["▁నటించ",-13.605982780456545],["tauti",-13.6060152053833],["ОВИ",-13.606023788452148],["▁ਸਰਕਾਰੀ",-13.606027603149414],["▁Viên",-13.606040954589844],["▁ყოველი",-13.606048583984377],["▁Рам",-13.606073379516602],["出版社",-13.606074333190918],["一切都",-13.606077194213867],["▁localidade",-13.606083869934082],["stanie",-13.606090545654297],["▁ግንባታ",-13.606091499328612],["▁experienced",-13.60610008239746],["чном",-13.606101036071776],["itwa",-13.60611343383789],["లింగ్",-13.606118202209473],["φυσ",-13.606133460998535],["▁પુર",-13.606167793273926],["იზმი",-13.606188774108888],["▁svjetski",-13.606200218200684],["ņus",-13.606202125549316],["มันเป็น",-13.60622215270996],["▁jāiz",-13.606234550476074],["▁Şurası",-13.606253623962402],["piirkond",-13.606268882751465],["▁читав",-13.606268882751465],["ywanie",-13.606289863586426],["▁IMP",-13.606292724609377],["ваючы",-13.60629940032959],["▁آمیز",-13.606310844421388],["ஃப்",-13.606317520141602],["劫",-13.606322288513184],["tehnika",-13.60632610321045],["samband",-13.606330871582031],["▁Диа",-13.60633373260498],["▁משתמש",-13.60633373260498],["獸",-13.606342315673828],["▁serbe",-13.606345176696776],["▁προϊόν",-13.606345176696776],["▁மாணவர்",-13.606350898742676],["▁জে",-13.606358528137209],["▁ឆ្នាំ២០១៧",-13.60637092590332],["シリーズ",-13.606371879577637],["ционер",-13.606380462646484],["ยิ้ม",-13.606385231018066],["ជប៉ុន",-13.606385231018066],["amministrazione",-13.606386184692385],["▁Podmienky",-13.606386184692385],["▁deelnemers",-13.606386184692385],["▁mahdollisuus",-13.606386184692385],["▁mệnh",-13.606386184692385],["▁נוספת",-13.606386184692385],["▁ملګرو",-13.606386184692385],["▁शैली",-13.606386184692385],["▁हमारा",-13.606386184692385],["▁আপডেট",-13.606386184692385],["▁উল্লেখ",-13.606386184692385],["▁വിദ്യാര്",-13.606386184692385],["릿",-13.606386184692385],["▁তুমি",-13.6063871383667],["▁സ്വകാര്യ",-13.606389045715332],["▁enostavno",-13.606389999389648],["▁चेहरे",-13.606389999389648],["▁αλλαγές",-13.606392860412598],["▁синдром",-13.606392860412598],["▁ប្រភព",-13.606392860412598],["▁dovrà",-13.606395721435549],["某种",-13.606395721435549],["▁trebao",-13.60639762878418],["▁ጥረት",-13.60639762878418],["▁обясни",-13.606398582458496],["▁Bohu",-13.606405258178713],["▁посао",-13.606412887573242],["냈다",-13.606420516967772],["▁అతను",-13.606422424316406],["ക്കണം",-13.606432914733888],["▁وسیع",-13.606433868408203],["▁demostrar",-13.60643482208252],["▁zostaną",-13.606443405151367],["▁Sundhed",-13.606447219848633],["▁технологій",-13.606451988220217],["▁хэлсэн",-13.606453895568848],["▁ādas",-13.606456756591797],["▁файлы",-13.60648250579834],["werkzaamheden",-13.606487274169922],["▁महानगर",-13.60649871826172],["▁ആരും",-13.606504440307615],["▁снаге",-13.606505393981934],["inneadh",-13.606523513793944],["交往",-13.606524467468262],["▁Рах",-13.606527328491213],["▁integrantes",-13.606557846069336],["▁මරණ",-13.606561660766602],["▁Fantastisk",-13.6065673828125],["▁vaihta",-13.6065673828125],["▁ہاتھوں",-13.606574058532717],["▁republica",-13.606575965881348],["▁utilizzato",-13.606586456298828],["▁technické",-13.606595039367676],["▁ціна",-13.606605529785156],["▁الطرق",-13.606605529785156],["▁sjen",-13.606614112854004],["ກີ",-13.606619834899902],["▁בסוף",-13.60662078857422],["▁geur",-13.606623649597168],["▁harreman",-13.606634140014648],["▁شكرا",-13.606634140014648],["suojelu",-13.606657028198242],["を迎え",-13.606694221496582],["▁arkitektur",-13.606718063354492],["▁החדשה",-13.606728553771973],["388",-13.606729507446287],["▁થયેલ",-13.60673999786377],["▁الدورة",-13.606741905212402],["▁چندین",-13.60674285888672],["▁염",-13.606752395629885],["ilha",-13.606759071350098],["▁көрүн",-13.606762886047363],["中间",-13.60676383972168],["essaan",-13.606767654418944],["▁Pick",-13.606775283813477],["ณา",-13.606801986694336],["ващ",-13.6068115234375],["சூ",-13.606837272644045],["gold",-13.606843948364258],["▁Komisioni",-13.606846809387209],["▁спирт",-13.606854438781738],["▁ඉටු",-13.606858253479004],["▁stava",-13.606893539428713],["yaasha",-13.606915473937988],["お客様に",-13.606916427612305],["▁პროექტი",-13.60692024230957],["送到",-13.606928825378418],["▁Lagt",-13.60693359375],["gazi",-13.606938362121582],["▁toplantısı",-13.606942176818848],["▁اتهام",-13.606947898864746],["▁Midt",-13.60696792602539],["djur",-13.60698413848877],["ФОР",-13.606988906860352],["▁kampus",-13.606992721557615],["▁belanja",-13.606996536254885],["▁типов",-13.607003211975098],["服务器",-13.607015609741213],["▁dezvoltare",-13.607017517089844],["ങ്ങളില",-13.607019424438477],["▁llawn",-13.60702896118164],["▁Får",-13.607035636901855],["mbana",-13.607040405273438],["श्वास",-13.607051849365234],["язання",-13.6070556640625],["▁स्वामी",-13.607088088989258],["▁daraja",-13.607105255126951],["▁दर्शक",-13.607108116149902],["organització",-13.607115745544434],["▁funcional",-13.60711669921875],["▁Ник",-13.607131958007812],["ക്കള്",-13.607141494750977],["▁ljubitelj",-13.607154846191406],["▁ផង",-13.607157707214355],["დამ",-13.607187271118164],["tapos",-13.607206344604492],["▁mentale",-13.607207298278809],["▁bake",-13.607215881347656],["▁parlamenti",-13.607229232788086],["▁добави",-13.607243537902832],["лый",-13.607258796691896],["▁వారం",-13.607276916503906],["▁בעוד",-13.607308387756348],["▁próximas",-13.607319831848145],["▁රික්",-13.60733127593994],["▁saaray",-13.607359886169434],["▁econòmic",-13.607364654541016],["▁Český",-13.60736846923828],["خم",-13.607369422912598],["ទ្",-13.60737419128418],["логу",-13.607391357421877],["▁montaña",-13.607430458068848],["▁Brīv",-13.60744285583496],["▁통신",-13.607473373413086],["▁какого",-13.607479095458984],["▁включая",-13.607481002807615],["خلاص",-13.607481956481934],["ተዋል።",-13.6074857711792],["▁тран",-13.607503890991213],["▁Şti",-13.60751724243164],["lýsing",-13.607519149780272],["▁ഏക",-13.607521057128906],["▁expertos",-13.607528686523438],["דין",-13.60754680633545],["▁منصب",-13.607547760009766],["▁мән",-13.607548713684082],["▁usuários",-13.607551574707031],["▁artistes",-13.607561111450195],["▁uNkulunkulu",-13.607562065124512],["እም",-13.607572555541992],["▁нарко",-13.60758113861084],["ကာလ",-13.60759449005127],["▁masura",-13.60760498046875],["دلال",-13.607611656188965],["њење",-13.607619285583496],["▁związan",-13.607630729675291],["▁Reader",-13.60763168334961],["▁రాత్రి",-13.607645988464355],["לוגי",-13.60765266418457],["▁charme",-13.607680320739746],["▁estén",-13.607694625854492],["ÍN",-13.607708930969238],["▁işlemi",-13.607747077941896],["تطبيق",-13.607748031616213],["ੱਠ",-13.60775089263916],["▁fantasia",-13.60775661468506],["▁organik",-13.60776710510254],["乃至",-13.607789039611816],["▁ክፍ",-13.607802391052246],["▁vatan",-13.607820510864258],["▁müsste",-13.607827186584473],["▁forrige",-13.607833862304688],["שבת",-13.60783576965332],["▁haridus",-13.6078519821167],["▁കഴിയുന്ന",-13.607864379882812],["జం",-13.607876777648926],["▁සැලක",-13.607885360717772],["▁alleine",-13.607900619506836],["னார்",-13.607904434204102],["▁Unido",-13.60791015625],["▁trec",-13.607934951782228],["▁ਕੋਰਟ",-13.607938766479492],["시스템",-13.607962608337402],["▁המא",-13.60796356201172],["▁සමු",-13.607964515686035],["անալու",-13.607976913452148],["▁гола",-13.607985496520996],["▁покуша",-13.607999801635742],["გია",-13.608010292053224],["yaka",-13.60802173614502],["かい",-13.608040809631348],["▁mbetur",-13.608050346374512],["లేదని",-13.60806941986084],["школ",-13.608073234558104],["▁Жү",-13.608076095581056],["▁responsabili",-13.608078956604004],["▁அறிவு",-13.6080904006958],["▁Escorte",-13.608092308044434],["▁klær",-13.60810089111328],["▁øre",-13.608101844787598],["fermo",-13.608118057250977],["威尼斯",-13.608141899108888],["дек",-13.608150482177734],["▁Arne",-13.608166694641112],["▁слот",-13.608183860778809],["▁سابقه",-13.60818576812744],["▁velikim",-13.608186721801758],["történet",-13.608187675476074],["▁Existen",-13.608193397521973],["ാണെന്നും",-13.608194351196287],["ဖိ",-13.608206748962402],["TEX",-13.608238220214844],["മ്മി",-13.608263969421388],["▁സുഖ",-13.608264923095703],["ባበ",-13.608275413513184],["▁vidis",-13.608285903930664],["ফু",-13.60828971862793],["▁Altın",-13.608302116394045],["kirurgi",-13.608304977416992],["বির",-13.608311653137209],["▁Pain",-13.60831356048584],["drog",-13.608317375183104],["ിനി",-13.608338356018066],["▁osobné",-13.60836124420166],["nünk",-13.608381271362305],["89)",-13.608393669128418],["τήρι",-13.608404159545898],["▁망",-13.608406066894531],["嚴格",-13.60841178894043],["懸",-13.60841178894043],["岳",-13.608436584472656],["▁całe",-13.608441352844238],["เกือบ",-13.608452796936035],["ೋತ್ಸವ",-13.6084566116333],["▁Evropë",-13.6084566116333],["▁grundsätzlich",-13.6084566116333],["▁ketentuan",-13.6084566116333],["▁përfaqësues",-13.6084566116333],["▁sredstava",-13.6084566116333],["▁παρελθόν",-13.6084566116333],["▁притежава",-13.6084566116333],["▁інформаційно",-13.6084566116333],["▁քվեարկ",-13.6084566116333],["▁ترجیح",-13.6084566116333],["▁निर्वाचित",-13.6084566116333],["▁সৌদি",-13.6084566116333],["▁ხოლმე",-13.6084566116333],["▁inglês",-13.608457565307615],["▁smještaj",-13.608457565307615],["▁símbolo",-13.608457565307615],["▁благодарение",-13.608457565307615],["▁суботу",-13.608457565307615],["▁Kanskje",-13.608458518981934],["▁lihtne",-13.608458518981934],["▁uitdaging",-13.608458518981934],["▁πρόσβαση",-13.608458518981934],["▁Hivatal",-13.6084623336792],["▁بحيث",-13.6084623336792],["▁szociális",-13.608464241027832],["▁juuni",-13.608465194702148],["▁உயிர்",-13.608465194702148],["▁ditemui",-13.608469009399414],["▁pulau",-13.608469009399414],["▁מדוע",-13.608470916748049],["してきた",-13.608474731445312],["▁tháinig",-13.608475685119627],["▁valyuta",-13.608477592468262],["▁rygg",-13.60848331451416],["▁வார்த்தை",-13.60848617553711],["指標",-13.60848617553711],["▁طولانی",-13.60849952697754],["แพง",-13.608514785766602],["પે",-13.6085205078125],["▁slide",-13.6085205078125],["จะช่วย",-13.60853099822998],["▁קבוצת",-13.608540534973145],["▁پیوند",-13.608548164367676],["▁lækre",-13.608549118041992],["овій",-13.60855770111084],["▁zunanji",-13.608559608459473],["▁sülh",-13.608560562133787],["▁sýna",-13.608567237854004],["▁səhifələrində",-13.608570098876951],["▁atribui",-13.608572006225586],["▁ehhez",-13.608580589294434],["▁plne",-13.6085844039917],["ົ່າ",-13.608590126037598],["▁myndigheter",-13.608591079711914],["▁معالج",-13.608599662780762],["▁dysgu",-13.608613014221191],["▁የትግራይ",-13.608617782592772],["ສັນຍາ",-13.608625411987305],["national",-13.60862636566162],["▁चाहता",-13.608628273010254],["▁무단",-13.60862922668457],["ిగా",-13.608641624450684],["▁concrete",-13.608644485473633],["▁Савет",-13.608647346496582],["▁sidang",-13.608651161193848],["▁miongon",-13.608654022216797],["▁חס",-13.608657836914062],["▁강의",-13.608683586120604],["▁Његов",-13.608688354492188],["ത്തിലൂടെ",-13.608691215515137],["tulog",-13.608712196350098],["▁preserva",-13.608729362487791],["▁upea",-13.608738899230955],["▁좋다",-13.608746528625488],["нист",-13.608750343322754],["▁piramid",-13.608752250671388],["නං",-13.608763694763184],["uğu",-13.608768463134766],["janak",-13.608774185180664],["▁Тър",-13.608781814575195],["▁חופש",-13.60878562927246],["▁rahatsız",-13.60883331298828],["▁Gelo",-13.608856201171877],["▁actief",-13.608875274658203],["ément",-13.608893394470217],["▁finished",-13.608909606933594],["▁estudia",-13.60891819000244],["▁வலை",-13.608941078186035],["▁FER",-13.6089448928833],["▁ಬಡ",-13.60897159576416],["вечер",-13.608976364135742],["ানোর",-13.609000205993652],["▁скро",-13.609006881713867],["illant",-13.609014511108398],["כאב",-13.609036445617676],["gxi",-13.609064102172852],["▁Stort",-13.60906982421875],["▁pāris",-13.609086990356444],["▁Patro",-13.60909938812256],["▁Reply",-13.609105110168455],["▁hodnoty",-13.609108924865724],["▁1899",-13.609149932861328],["▁Iris",-13.609185218811035],["▁Komment",-13.6091890335083],["▁contesto",-13.609193801879885],["▁болка",-13.609195709228516],["ىستان",-13.609210968017578],["ЕСТ",-13.609235763549805],["▁културата",-13.609254837036133],["ुत्",-13.609256744384766],["శీ",-13.609271049499512],["▁frikë",-13.609289169311523],["תחרות",-13.609336853027344],["▁газу",-13.609342575073242],["valdkonna",-13.60935878753662],["▁Парк",-13.609360694885254],["▁باتیں",-13.609376907348633],["▁yapılmış",-13.60937786102295],["▁efectivo",-13.609389305114746],["▁vermək",-13.609397888183594],["▁tekstil",-13.60942840576172],["▁Rossi",-13.609450340270996],["▁Euskara",-13.609451293945312],["▁шири",-13.609457969665527],["prez",-13.60948657989502],["▁පත්ව",-13.609488487243652],["▁birga",-13.609489440917969],["▁Evans",-13.609498977661133],["▁فورا",-13.609503746032717],["räkning",-13.609515190124512],["▁plantes",-13.60952377319336],["▁Janne",-13.609524726867676],["▁дошло",-13.60953140258789],["▁metodologi",-13.609533309936523],["▁kohale",-13.609541893005373],["▁tekita",-13.60955047607422],["▁складі",-13.609567642211914],["actuació",-13.60956859588623],["人大",-13.60956859588623],["▁põhjust",-13.60958766937256],["cò",-13.60959243774414],["tarë",-13.60959529876709],["▁କମ୍",-13.60959529876709],["क्रिया",-13.609599113464355],["▁სალ",-13.60960865020752],["▁döneminde",-13.609613418579102],["▁fogja",-13.609613418579102],["▁Ստաց",-13.60962963104248],["ਗ਼",-13.609634399414062],["▁ಇದರಿಂದ",-13.609651565551758],["የር",-13.609671592712402],["▁zvon",-13.609688758850098],["دعا",-13.60970973968506],["▁trö",-13.609731674194336],["לג",-13.609735488891602],["▁besoek",-13.609761238098145],["чысы",-13.609764099121094],["ెన్స్",-13.609804153442385],["▁Märk",-13.609811782836914],["▁keskusta",-13.609820365905762],["▁такім",-13.609838485717772],["სუ",-13.609840393066406],["▁டீ",-13.60986042022705],["▁inclui",-13.609877586364746],["інформ",-13.609888076782228],["▁වහ",-13.609889030456545],["More",-13.609896659851074],["әрекет",-13.609903335571287],["รณ์",-13.609908103942873],["▁එතන",-13.60991382598877],["▁diens",-13.609992027282717],["▁પાડ",-13.609992027282717],["▁የሚመ",-13.609992027282717],["0.6",-13.60999584197998],["समोर",-13.60999584197998],["▁održati",-13.610003471374512],["▁канц",-13.610005378723145],["▁ជីវិត",-13.610011100769045],["▁ڏسڻ",-13.610024452209473],["nieść",-13.610039710998535],["смотреть",-13.610062599182127],["sögu",-13.610079765319824],["liyoruz",-13.610082626342772],["ดับ",-13.610084533691406],["▁Uzman",-13.610116958618164],["ពាក្យ",-13.61016845703125],["ాలలో",-13.610191345214844],["เรียบ",-13.61019229888916],["▁profesionale",-13.610248565673828],["▁roliga",-13.61025619506836],["▁celem",-13.610270500183104],["▁reconstru",-13.610274314880373],["▁lelki",-13.61028003692627],["▁coll",-13.610285758972168],["▁şart",-13.610307693481444],["ಾತ",-13.610319137573242],["ခဲ့ရ",-13.610321998596191],["▁toimus",-13.61032485961914],["ಿಸಿಕೊಂಡ",-13.610345840454102],["र्ध",-13.610355377197266],["γία",-13.610365867614746],["▁ფრა",-13.610369682312012],["▁bøker",-13.610370635986328],["▁francia",-13.610373497009276],["▁харж",-13.61038589477539],["▁bosan",-13.610389709472656],["▁nəticələri",-13.610392570495604],["▁agency",-13.610401153564451],["었는데",-13.610404014587402],["▁etsin",-13.61042022705078],["gadi",-13.610430717468262],["▁shift",-13.61044216156006],["▁belas",-13.610461235046388],["▁bixin",-13.610462188720703],["渴望",-13.610468864440918],["高達",-13.610474586486816],["▁Lamb",-13.610477447509766],["▁ویبپاڼ",-13.610477447509766],["膨",-13.61049747467041],["▁Kies",-13.610502243041992],["抖",-13.61050510406494],["▁قرب",-13.61050796508789],["▁ravna",-13.610508918762209],["キャンペーン",-13.610520362854004],["កំពង់",-13.610529899597168],["ေသြး",-13.610530853271484],["▁Bedürfnisse",-13.610530853271484],["▁CỦA",-13.610530853271484],["▁jumătate",-13.610530853271484],["▁kräver",-13.610530853271484],["▁tergantung",-13.610530853271484],["▁κράτος",-13.610530853271484],["▁πήρε",-13.610530853271484],["▁अश्लील",-13.610530853271484],["▁সম্মেলন",-13.610530853271484],["▁જિલ્લા",-13.610530853271484],["▁مصاحبه",-13.6105318069458],["▁хамгаалагдсан",-13.610532760620115],["▁ұзақ",-13.610532760620115],["▁ಸಂಪೂರ್ಣ",-13.610532760620115],["▁scelto",-13.610533714294434],["▁magnífic",-13.610535621643066],["▁perilaku",-13.610538482666016],["▁опрема",-13.610538482666016],["▁طبیعت",-13.610538482666016],["zinho",-13.610539436340332],["▁Ludwig",-13.610540390014648],["▁esperantistoj",-13.610540390014648],["▁letošní",-13.610540390014648],["▁Ireland",-13.610546112060549],["▁jaanuar",-13.610555648803713],["▁шаардык",-13.610555648803713],["▁gıda",-13.610557556152344],["どうぞ",-13.610557556152344],["▁Crystal",-13.610562324523926],["▁өттү",-13.610565185546877],["▁ಬಣ್ಣ",-13.610569953918455],["▁Останні",-13.610570907592772],["▁लक्षात",-13.610570907592772],["nictví",-13.61057186126709],["тқан",-13.61057186126709],["▁terpercaya",-13.61058521270752],["▁politico",-13.610597610473633],["▁کرلیا",-13.610602378845217],["אנג",-13.61060619354248],["pey",-13.61062240600586],["▁Ensino",-13.610623359680176],["▁최소",-13.610624313354492],["▁நோய்",-13.610651969909668],["▁olumsuz",-13.610658645629885],["▁Прва",-13.610661506652832],["▁pulsuz",-13.610676765441896],["▁Login",-13.610688209533691],["流れ",-13.610689163208008],["▁овозможува",-13.610692024230955],["▁Pihak",-13.610709190368652],["බෝ",-13.610722541809082],["▁convince",-13.610726356506348],["▁위해서",-13.610750198364258],["なっている",-13.610763549804688],["▁prý",-13.610764503479004],["▁јавност",-13.61076545715332],["不一样",-13.610773086547852],["ότερη",-13.61078929901123],["▁phobl",-13.61078929901123],["签订",-13.61078929901123],["▁persoonlijk",-13.610799789428713],["41)",-13.610804557800291],["лски",-13.61080551147461],["▁risal",-13.610806465148926],["کیا",-13.610817909240724],["پال",-13.610831260681152],["▁horoskop",-13.610833168029783],["▁provinsi",-13.61083698272705],["▁stein",-13.610868453979492],["▁कार्",-13.610869407653809],["▁pohybu",-13.610901832580566],["▁이야기를",-13.610917091369627],["▁अंत",-13.610939025878906],["▁usiku",-13.610943794250488],["▁македон",-13.610943794250488],["ىقى",-13.610955238342283],["▁győz",-13.610960006713867],["ειών",-13.610971450805664],["▁pobres",-13.610973358154297],["▁clearly",-13.610974311828612],["▁продолжа",-13.611003875732422],["▁गरीब",-13.611019134521484],["liczyć",-13.611023902893066],["▁бави",-13.611037254333496],["▁Interior",-13.61106777191162],["車輛",-13.61106777191162],["▁čuva",-13.61107063293457],["▁ось",-13.611085891723633],["▁outils",-13.611092567443848],["ಾಂತರ",-13.61109447479248],["▁종합",-13.611135482788086],["▁ნატო",-13.6111478805542],["▁لپ",-13.611187934875488],["िके",-13.611209869384766],["▁pyar",-13.61122226715088],["▁ZDA",-13.611225128173828],["▁වෙන්නෙ",-13.611230850219728],["ലാണ്",-13.611241340637209],["▁доходи",-13.611260414123535],["▁betaler",-13.61126708984375],["▁umsókn",-13.61127758026123],["deep",-13.611284255981444],["▁Διά",-13.611284255981444],["▁xweş",-13.611303329467772],["▁ගල",-13.611303329467772],["▁खुले",-13.611306190490724],["▁relief",-13.611315727233888],["配送",-13.611322402954102],["790",-13.611329078674316],["टॉप",-13.611330032348633],["एच",-13.611342430114746],["ზონ",-13.611343383789062],["福建",-13.61137580871582],["ängen",-13.611377716064451],["▁comité",-13.611384391784668],["▁names",-13.611414909362791],["▁କମ",-13.611448287963867],["수의",-13.611454010009766],["▁Lär",-13.611456871032717],["▁maestro",-13.61146068572998],["▁ബന്ധപ്പെട്ട",-13.611464500427246],["σφαλ",-13.61147689819336],["ส่งฟรี",-13.611485481262209],["regnskab",-13.611507415771484],["▁жерден",-13.611530303955078],["tettua",-13.611532211303713],["▁operador",-13.61156940460205],["alaihi",-13.611573219299316],["▁Teatre",-13.611592292785645],["▁cubi",-13.611594200134276],["▁германски",-13.611595153808594],["▁lögum",-13.611597061157228],["czności",-13.611612319946287],["ערע",-13.611613273620604],["லின்",-13.611618995666504],["tawi",-13.61161994934082],["▁ühes",-13.611620903015137],["anomena",-13.61162281036377],["inimą",-13.611634254455566],["celer",-13.61166286468506],["CAS",-13.61167335510254],["daļa",-13.61167812347412],["▁prejel",-13.611689567565918],["שפיע",-13.611713409423828],["róp",-13.611726760864258],["▁uygulaması",-13.611738204956056],["▁αερο",-13.611746788024902],["▁happening",-13.611760139465332],["在你",-13.611776351928713],["▁Provide",-13.611783027648926],["▁обязательств",-13.611791610717772],["iñeiro",-13.611797332763672],["ijoita",-13.611804008483888],["▁солдат",-13.611817359924316],["▁näita",-13.611830711364746],["▁πρωτ",-13.611830711364746],["かれた",-13.611830711364746],["▁സൈ",-13.611835479736328],["▁članak",-13.61184310913086],["▁Rubi",-13.611878395080566],["სამ",-13.611882209777832],["energi",-13.611899375915527],["acaksınız",-13.611931800842283],["מיש",-13.611942291259766],["▁плен",-13.611950874328612],["▁utilizat",-13.611960411071776],["▁Mercur",-13.61196231842041],["▁lucrat",-13.611968040466309],["▁rana",-13.611976623535156],["ਚਾਰ",-13.611978530883787],["interromp",-13.611981391906738],["▁드라마",-13.611989974975586],["▁இப்ப",-13.612030982971191],["▁پخت",-13.61203384399414],["▁დეტალ",-13.612048149108888],["വിട്ട",-13.612074851989746],["▁yürüt",-13.612125396728516],["ကြယ္",-13.61212921142578],["▁Balta",-13.612131118774414],["śmy",-13.61215114593506],["დარი",-13.612164497375488],["▁▷",-13.612168312072754],["սում",-13.612174987792969],["nehm",-13.612197875976562],["▁těm",-13.612200736999512],["▁stvarno",-13.612205505371094],["▁выпуска",-13.612222671508787],["▁тексеру",-13.612237930297852],["332",-13.612259864807127],["▁стоп",-13.612260818481444],["ប្រកាស",-13.612287521362305],["כיר",-13.612316131591797],["▁precedent",-13.61232852935791],["لفت",-13.612330436706545],["▁двама",-13.612333297729492],["นัน",-13.61234188079834],["▁adierazi",-13.6123628616333],["▁dhammaan",-13.612363815307615],["▁Suku",-13.61237335205078],["նից",-13.612377166748049],["gamot",-13.612382888793944],["435",-13.612396240234377],["cao",-13.612397193908691],["▁cambiare",-13.612399101257324],["▁жақын",-13.61244773864746],["נאי",-13.612466812133787],["যোগ",-13.612471580505373],["▁напиша",-13.6124849319458],["賭",-13.6124849319458],["▁পায়",-13.612488746643066],["գո",-13.61250114440918],["▁expectativas",-13.612505912780762],["踩",-13.612536430358888],["幽",-13.61255168914795],["畢竟",-13.61256504058838],["▁permiten",-13.612567901611328],["▁будемо",-13.612584114074709],["駕駛",-13.612588882446287],["▁देशमा",-13.612589836120604],["綺麗",-13.612592697143556],["継",-13.612595558166504],["標籤",-13.612605094909668],["відповідальність",-13.61260986328125],["ዑ",-13.61260986328125],["ጐ",-13.61260986328125],["▁aprendizaje",-13.61260986328125],["▁behavior",-13.61260986328125],["▁benötigen",-13.61260986328125],["▁bitartez",-13.61260986328125],["▁ovviamente",-13.61260986328125],["▁propriétaire",-13.61260986328125],["▁sündmus",-13.61260986328125],["▁ηλεκτρονικού",-13.61260986328125],["▁будівництва",-13.61260986328125],["▁тайлбар",-13.61260986328125],["▁ترلاسه",-13.61260986328125],["▁ہفتہ",-13.61260986328125],["▁मुताबिक",-13.61260986328125],["▁ప్రాజెక్టు",-13.61260986328125],["▁ಕಂಪನಿ",-13.61260986328125],["▁ಮೈಸೂರು",-13.61260986328125],["▁എങ്കിലും",-13.61260986328125],["▁ተግባር",-13.61260986328125],["▁matatizo",-13.612610816955566],["▁իրադարձություն",-13.612610816955566],["▁میکند",-13.612610816955566],["▁කට්ටිය",-13.612610816955566],["▁Богородиц",-13.612611770629885],["מדריך",-13.6126127243042],["▁বিজয়",-13.6126127243042],["▁Geburtstag",-13.612613677978516],["▁género",-13.612616539001465],["▁قواعد",-13.612618446350098],["▁मराठा",-13.61262321472168],["▁μαθητές",-13.612624168395996],["▁претходни",-13.612625122070312],["▁problemes",-13.612626075744627],["▁மலர்",-13.612627029418944],["▁przynajmniej",-13.612627983093262],["▁Vallès",-13.612631797790527],["▁עניין",-13.612631797790527],["▁Duitsland",-13.612632751464844],[":«",-13.612640380859377],["เก่ง",-13.612640380859377],["เชียงราย",-13.612640380859377],["ልቅ",-13.612640380859377],["▁சேவை",-13.612642288208008],["▁контент",-13.61264705657959],["▁رياست",-13.612648010253906],["Под",-13.612650871276855],["▁پیگیری",-13.612650871276855],["訂單",-13.612650871276855],["▁Εκεί",-13.61266040802002],["tăm",-13.612661361694336],["▁мамандар",-13.612661361694336],["▁irmão",-13.612664222717283],["KN",-13.612669944763184],["▁ಮಾಡಿಕೊಳ್ಳ",-13.612674713134766],["▁karyawan",-13.61268138885498],["▁демон",-13.612698554992676],["▁15.000",-13.612722396850586],["▁қысқа",-13.612722396850586],["▁прекин",-13.61273193359375],["打ち",-13.612737655639648],["▁בשעה",-13.612741470336914],["▁phú",-13.612765312194824],["▁їй",-13.612768173217772],["▁이곳",-13.612770080566406],["▁जरुरी",-13.612771034240724],["ฎ",-13.612780570983888],["ttēlus",-13.612810134887695],["ДЕН",-13.61281681060791],["▁земята",-13.612825393676758],["▁жинақ",-13.61284351348877],["▁solução",-13.61284637451172],["▁jätku",-13.612859725952148],["ሃገር",-13.612873077392578],["DIR",-13.612887382507324],["▁проби",-13.612887382507324],["▁dedim",-13.61289119720459],["▁ഗ്രസ്",-13.612893104553224],["▁Попов",-13.612909317016602],["▁дзіця",-13.612924575805664],["▁augusta",-13.612948417663574],["▁ಸಹಾಯ",-13.61296272277832],["íček",-13.61298656463623],["▁باري",-13.612988471984863],["▁ospiti",-13.61298942565918],["サービスを",-13.612990379333496],["▁فروغ",-13.612991333007812],["ల్డ్",-13.612994194030762],["▁Kosovo",-13.613006591796877],["▁düşünür",-13.613008499145508],["تاز",-13.613019943237305],["FG",-13.613029479980469],["▁ചോദിച്ച",-13.613038063049316],["▁ക്ഷേത്ര",-13.613039016723633],["▁indique",-13.613049507141112],["▁ভে",-13.61306381225586],["▁IES",-13.613082885742188],["全民",-13.613085746765137],["▁postre",-13.613101959228516],["▁huruf",-13.613102912902832],["തരം",-13.61312198638916],["▁preuve",-13.613144874572754],["▁giusta",-13.613147735595703],["▁ڪافي",-13.613149642944336],["特別是",-13.613151550292969],["▁Bonn",-13.613160133361816],["▁wadau",-13.61317539215088],["ીમાં",-13.61318588256836],["registro",-13.613204002380373],["▁tipuri",-13.613208770751951],["▁생명",-13.613248825073242],["▁politice",-13.613251686096191],["ARY",-13.613252639770508],["003",-13.613265991210938],["▁Siemens",-13.61327075958252],["click",-13.613274574279783],["▁ipar",-13.61330223083496],["▁Aisce",-13.613316535949709],["▁써",-13.613323211669922],["سق",-13.6133451461792],["▁ernstig",-13.6133451461792],["▁svako",-13.613348007202148],["ह्र",-13.613364219665527],["χρονη",-13.613368034362791],["▁Régi",-13.61338710784912],["▁cilt",-13.613388061523438],["impianto",-13.61339282989502],["علاج",-13.613399505615234],["▁домой",-13.613408088684082],["▁බන්",-13.61342430114746],["▁gaitu",-13.613425254821776],["사람",-13.613429069519045],["▁qurban",-13.613445281982422],["▁ٻنهي",-13.613455772399902],["▁מכן",-13.613457679748535],["▁mielestäni",-13.613459587097168],["▁medo",-13.613483428955078],["eyri",-13.613487243652344],["เกินไป",-13.613494873046877],["▁sezono",-13.61349868774414],["▁ykkur",-13.613506317138672],["യെന്ന്",-13.613508224487305],["▁veritatis",-13.61351490020752],["ቷል",-13.613523483276367],["ෙනි",-13.61352825164795],["▁პირი",-13.613530158996582],["▁functional",-13.613536834716797],["figura",-13.613551139831545],["▁cogita",-13.61355686187744],["indik",-13.61356258392334],["стай",-13.613564491271973],["▁nê",-13.613570213317873],["ējuši",-13.613582611083984],["דרג",-13.613587379455566],["strar",-13.613592147827148],["▁병원",-13.613602638244627],["▁бен",-13.613614082336426],["▁රථ",-13.613627433776855],["▁Гэр",-13.613629341125488],["▁šel",-13.613636016845703],["▁настай",-13.613639831542969],["▁əsr",-13.613641738891602],["▁pw",-13.613653182983398],["應該是",-13.613662719726562],["sivat",-13.613666534423828],["ինը",-13.613666534423828],["יקער",-13.613670349121094],["▁этапе",-13.613719940185549],["を利用する",-13.613723754882812],["いろいろな",-13.61374282836914],["йся",-13.613743782043455],["vého",-13.613749504089355],["olaj",-13.613767623901367],["vula",-13.613771438598633],["Пер",-13.613774299621582],["▁გარე",-13.61378002166748],["▁начали",-13.61379051208496],["▁국회",-13.613802909851074],["▁pijn",-13.613824844360352],["milli",-13.613826751708984],["▁моделі",-13.6138277053833],["ڀي",-13.61383056640625],["第七",-13.6138334274292],["tiivne",-13.613844871520996],["▁lancia",-13.613852500915527],["▁упозна",-13.613855361938477],["ልኝ",-13.613861083984377],["ობისა",-13.61386775970459],["endis",-13.613871574401855],["ဒို",-13.613871574401855],["▁naţional",-13.61387538909912],["▁Praza",-13.613896369934082],["▁Thật",-13.613905906677246],["▁ರೀತಿ",-13.613916397094728],["Свобод",-13.61392593383789],["нікам",-13.613990783691406],["▁Flores",-13.613991737365724],["▁Pievienot",-13.613994598388672],["្ម",-13.614005088806152],["▁inkişafına",-13.614031791687012],["▁520",-13.614041328430176],["یزه",-13.614044189453123],["валу",-13.614065170288086],["2.00",-13.614078521728516],["▁rechten",-13.614084243774414],["▁domeniu",-13.614087104797363],["▁peper",-13.61408805847168],["▁statusu",-13.614089012145996],["ūra",-13.614099502563477],["▁köra",-13.61410427093506],["formand",-13.614109992980955],["▁таки",-13.614116668701172],["aglia",-13.614117622375488],["▁компанията",-13.61411952972412],["ტონი",-13.614121437072754],["▁Chen",-13.614133834838867],["▁düzenlen",-13.614134788513184],["▁ორივე",-13.614160537719728],["▁Kein",-13.614166259765623],["▁otroka",-13.61417293548584],["sızlık",-13.614184379577637],["▁ہوگئے",-13.614228248596191],["паган",-13.61424160003662],["ովին",-13.614264488220217],["▁הטבע",-13.614275932312012],["ທິການ",-13.61428165435791],["▁بأس",-13.614286422729492],["iliselt",-13.614295959472656],["▁verschil",-13.614298820495604],["▁Elkar",-13.614315032958984],["▁regener",-13.614316940307615],["szól",-13.614336967468262],["▁Prek",-13.614337921142578],["श्या",-13.614351272583008],["▁Geografi",-13.61435890197754],["▁voorkom",-13.614362716674805],["▁simbolo",-13.614376068115234],["▁nýtt",-13.614380836486816],["▁сказали",-13.61438274383545],["০৭",-13.61439323425293],["▁જેના",-13.61439323425293],["ಈ",-13.61439609527588],["色々な",-13.614402770996094],["▁ryggen",-13.61441421508789],["▁არაა",-13.61441707611084],["▁различните",-13.61442756652832],["▁prsa",-13.614439010620115],["▁italiensk",-13.61445140838623],["▁llaw",-13.614466667175291],["ឬ",-13.614472389221191],["▁спин",-13.61450481414795],["合作的",-13.614506721496582],["əsində",-13.614519119262695],["keeda",-13.614531517028809],["▁општество",-13.614543914794922],["▁Intre",-13.614554405212402],["▁અન",-13.614557266235352],["▁իրականաց",-13.614561080932615],["▁Šte",-13.614568710327148],["▁køn",-13.61457633972168],["駕",-13.614579200744627],["▁kertoi",-13.61458683013916],["aois",-13.614602088928224],["▁Mathe",-13.614611625671388],["▁zemlja",-13.614616394042969],["▁पहुंचे",-13.614630699157717],["▁সেবা",-13.61464023590088],["▁tény",-13.614641189575195],["▁jakich",-13.61464500427246],["商标",-13.61465549468994],["仿",-13.614662170410156],["遂",-13.614663124084473],["IOR",-13.614665985107422],["▁දුන්න",-13.61467170715332],["▁بافت",-13.6146821975708],["▁valamit",-13.614683151245115],["171",-13.6146879196167],["▁հանդիս",-13.614688873291016],["ଭେ",-13.614689826965332],["การ์ตูน",-13.614691734313965],["เกียรติ",-13.61469268798828],["ပစ္စည်း",-13.61469268798828],["▁Ukoliko",-13.614693641662598],["▁baxmayaraq",-13.614693641662598],["▁növekedés",-13.614693641662598],["▁δωρεάν",-13.614693641662598],["▁зовнішні",-13.614693641662598],["▁հետեւանքով",-13.614693641662598],["▁אברהם",-13.614693641662598],["បញ្ចប់",-13.614694595336914],["▁രക്ത",-13.614694595336914],["▁Veränderung",-13.61469554901123],["לפעמים",-13.614696502685549],["▁പ്രധാനമന്ത്രി",-13.614696502685549],["▁disponíveis",-13.614697456359863],["▁doctrina",-13.61469841003418],["▁УК",-13.614701271057127],["气候",-13.614702224731444],["▁vähemmän",-13.614704132080078],["▁Vairāk",-13.614705085754396],["operazione",-13.614706993103027],["▁پریشان",-13.614706993103027],["▁κύριο",-13.614713668823242],["▁Stabil",-13.614717483520508],["▁erwarten",-13.61472225189209],["▁Önkormányzata",-13.61472225189209],["[16]",-13.614723205566406],["▁Engineering",-13.614728927612305],["▁sheegeen",-13.614728927612305],["▁তুলে",-13.614733695983888],["တိုင္",-13.614736557006836],["▁vlera",-13.614738464355469],["▁განათლების",-13.614742279052734],["utrustning",-13.614751815795898],["▁بطريقة",-13.614752769470217],["▁ആരോപണ",-13.61475658416748],["ដើរ",-13.614760398864746],["▁Меркел",-13.614768028259276],["▁Андрій",-13.61477279663086],["▁Верховної",-13.614775657653809],["▁nesreč",-13.614776611328123],["▁привык",-13.614782333374023],["▁ווערן",-13.61478328704834],["▁уклад",-13.614786148071287],["LİK",-13.614791870117188],["іце",-13.614791870117188],["▁البشر",-13.614815711975098],["▁bahar",-13.61481761932373],["▁எங்கள்",-13.614818572998049],["▁сърцето",-13.614823341369627],["▁utmärkt",-13.614826202392578],["▁גייט",-13.614829063415527],["НОВА",-13.614869117736816],["▁මූලික",-13.614886283874512],["▁provare",-13.614912986755373],["▁fundera",-13.614920616149902],["▁인증",-13.614953994750977],["▁கிராம",-13.61496925354004],["▁мода",-13.614978790283203],["రాల",-13.61501407623291],["פעם",-13.61502456665039],["▁verlassen",-13.615039825439451],["ነትና",-13.615042686462402],["حاكم",-13.615046501159668],["igante",-13.615052223205566],["が入って",-13.615060806274414],["▁scoate",-13.615072250366213],["вила",-13.615073204040527],["рива",-13.615074157714844],["▁эске",-13.61508083343506],["▁Penn",-13.61508560180664],["fikiri",-13.615092277526855],["пишете",-13.615103721618652],["▁приватно",-13.615104675292969],["чной",-13.615106582641602],["▁anteriormente",-13.615124702453612],["▁జె",-13.61513900756836],["▁politiques",-13.615147590637209],["έθ",-13.61515998840332],["kunden",-13.615160942077637],["▁potovanj",-13.615160942077637],["vijesti",-13.61517333984375],["▁dadkii",-13.615179061889648],["ਵਾਈ",-13.615190505981444],["اخر",-13.615191459655762],["ટન",-13.61520004272461],["nduse",-13.615201950073242],["စြဲ",-13.615220069885254],["озно",-13.61522388458252],["▁Поред",-13.615225791931152],["क्ष्य",-13.615228652954102],["是否有",-13.615228652954102],["▁faller",-13.615229606628418],["▁elementy",-13.615241050720217],["▁poros",-13.615248680114746],["экскурс",-13.615254402160645],["▁Agosto",-13.61525535583496],["▁prasa",-13.61526584625244],["▁대화",-13.61527156829834],["▁bestimmten",-13.615279197692873],["▁кыздар",-13.615285873413086],["▁mmg",-13.61528778076172],["వులు",-13.615288734436035],["тельство",-13.615301132202148],["історичн",-13.61530303955078],["ほどの",-13.61530876159668],["▁يوما",-13.615313529968262],["▁жазуучу",-13.615318298339844],["四十",-13.615318298339844],["▁INFO",-13.615348815917969],["ماد",-13.6153564453125],["▁Blir",-13.61535930633545],["டோ",-13.615370750427246],["▁ייִדיש",-13.615391731262209],["ครั้งแรก",-13.61539363861084],["вернуть",-13.615397453308104],["מילי",-13.615398406982422],["▁Events",-13.615410804748535],["▁Kolon",-13.615410804748535],["夏季",-13.615413665771484],["▁odborník",-13.615416526794434],["▁cantar",-13.61543083190918],["▁lahenda",-13.615434646606444],["이면",-13.615435600280762],["สู้",-13.615449905395508],["▁ਅੱ",-13.615453720092772],["әй",-13.61545753479004],["▁erkekler",-13.615459442138672],["γουν",-13.615467071533203],["ESCO",-13.615484237670898],["▁Command",-13.615520477294922],["tsioonid",-13.615521430969238],["▁탐",-13.615538597106934],["menni",-13.615554809570312],["▁събра",-13.61556339263916],["▁პირობები",-13.61557674407959],["vrije",-13.615586280822754],["▁очер",-13.615594863891602],["ကစား",-13.615614891052246],["▁۹۷",-13.615615844726562],["ături",-13.61562442779541],["કળ",-13.61569595336914],["▁tapos",-13.615704536437988],["▁بتوان",-13.61571216583252],["කළ",-13.615720748901367],["的家庭",-13.615723609924316],["leyin",-13.61573314666748],["▁амжилттай",-13.61573886871338],["▁ولن",-13.615742683410645],["▁Erotik",-13.615750312805176],["▁инстал",-13.615767478942873],["▁adja",-13.615771293640137],["▁velo",-13.615771293640137],["▁kynna",-13.615779876708984],["▁réunion",-13.6157808303833],["てしまいます",-13.61582088470459],["1972",-13.615845680236816],["▁περιβάλλον",-13.61586570739746],["▁APC",-13.615891456604004],["▁ሸ",-13.61589241027832],["▁အကြောင်း",-13.615907669067385],["▁extrema",-13.61591625213623],["▁Olimpiya",-13.615930557250977],["న్య",-13.615933418273926],["▁lesbi",-13.615938186645508],["▁अन्त्य",-13.615962028503418],["▁दत्त",-13.61596393585205],["▁Owen",-13.615965843200684],["▁negre",-13.615970611572266],["▁யார",-13.615972518920898],["verfahren",-13.615973472595217],["▁amplo",-13.615974426269531],["áshoz",-13.615978240966797],["▁CAL",-13.615978240966797],["▁Mwaka",-13.615995407104492],["▁стоят",-13.61605453491211],["▁පස්",-13.616061210632324],["ნახავ",-13.616081237792969],["▁líderes",-13.616084098815918],["▁Сел",-13.61608600616455],["▁თანამ",-13.616128921508787],["ілася",-13.616132736206056],["▁CER",-13.616161346435549],["▁dijela",-13.616164207458496],["▁valiku",-13.616167068481444],["▁ugari",-13.616188049316406],["прям",-13.616205215454102],["▁tål",-13.6162109375],["ండు",-13.616217613220217],["دۇق",-13.616230010986328],["▁commodi",-13.616236686706545],["▁ești",-13.616250038146973],["▁muscle",-13.616266250610352],["ttomia",-13.616271018981934],["▁пишет",-13.616283416748049],["الن",-13.616302490234377],["▁ٽر",-13.616314888000488],["راع",-13.61634922027588],["88)",-13.616358757019045],["빠",-13.616361618041992],["▁અસર",-13.616363525390623],["▁respon",-13.616369247436523],["dømme",-13.616390228271484],["▁techniek",-13.61642837524414],["▁माना",-13.616435050964355],["သြ",-13.616442680358888],["▁χρησιμοποιεί",-13.616482734680176],["ខ្លាំង",-13.616497993469238],["▁материјала",-13.61650562286377],["▁Realiza",-13.616510391235352],["这类",-13.616514205932615],["죽",-13.616533279418944],["▁VYA",-13.616534233093262],["▁Standart",-13.616538047790527],["szív",-13.616562843322754],["지지",-13.616591453552246],["▁realizados",-13.616604804992676],["lack",-13.616606712341309],["▁minél",-13.616612434387209],["ርባ",-13.616636276245115],["ებაზე",-13.616639137268066],["fadh",-13.616643905639648],["məyən",-13.616643905639648],["▁adapter",-13.616647720336914],["▁milles",-13.616657257080078],["▁ernst",-13.616662979125977],["වන්නට",-13.616690635681152],["MOBIL",-13.616700172424316],["▁δημοτικ",-13.616705894470217],["▁මිතුර",-13.616705894470217],["kufa",-13.61671543121338],["▁రోజులు",-13.616734504699709],["▁pohodln",-13.616737365722656],["▁Arbeiten",-13.616738319396973],["累積",-13.616750717163086],["累计",-13.616762161254885],["▁chala",-13.61677074432373],["เทียบ",-13.616777420043944],["▁lỗ",-13.616779327392578],["▁beschäftigt",-13.616781234741213],["▁différence",-13.616781234741213],["▁hjørne",-13.616781234741213],["▁nấu",-13.616781234741213],["▁opinião",-13.616781234741213],["▁өкүлдөрү",-13.616781234741213],["▁խոշոր",-13.616781234741213],["▁अभिनय",-13.616781234741213],["▁దాదాపు",-13.616781234741213],["▁şikayət",-13.616782188415527],["끄",-13.616782188415527],["▁Ferdinand",-13.616783142089844],["▁vērtība",-13.616785049438477],["▁danışıqlar",-13.61678695678711],["▁mustaqil",-13.616787910461426],["▁خیبر",-13.616787910461426],["სარგებლ",-13.616792678833008],["▁ଦେଇଥିଲେ",-13.61679458618164],["▁отдельных",-13.616795539855955],["感兴趣",-13.61679744720459],["▁roulette",-13.616799354553224],["▁особливості",-13.616801261901855],["▁raziskav",-13.61680507659912],["▁операції",-13.61680507659912],["▁Pangeran",-13.616808891296388],["▁അവൾ",-13.61681079864502],["▁département",-13.616814613342283],["▁ավանդ",-13.616816520690918],["▁Faj",-13.616817474365234],["▁கொலை",-13.616819381713867],["ស្វែងរក",-13.6168212890625],["▁साझा",-13.6168212890625],["▁sofistic",-13.616836547851562],["▁실행",-13.616840362548828],["預期",-13.61685562133789],["▁هرگز",-13.616869926452637],["▁чланова",-13.616877555847168],["ដឹក",-13.616883277893066],["对其",-13.616887092590332],["▁ముందుకు",-13.616915702819824],["egészségügyi",-13.616923332214355],["zeichen",-13.616943359375],["ໄລຍະ",-13.616957664489746],["kosta",-13.616981506347656],["▁fonctionnement",-13.616981506347656],["▁cinnte",-13.616985321044922],["lności",-13.616990089416504],["▁ndryshe",-13.617003440856934],["adolescent",-13.617029190063477],["▁reduz",-13.61703109741211],["ETO",-13.617036819458008],["▁ກຸ່ມ",-13.617063522338867],["▁టాక్",-13.61706829071045],["▁uvjetima",-13.617084503173828],["▁אָ",-13.617085456848145],["قاطع",-13.617094039916992],["물이",-13.617119789123535],["waran",-13.61715316772461],["Ако",-13.61715316772461],["▁هلي",-13.61716079711914],["▁регіон",-13.617167472839355],["▁imej",-13.617170333862305],["▁vänster",-13.617170333862305],["▁frivillig",-13.61717700958252],["▁pasando",-13.61719799041748],["▁muisti",-13.61721420288086],["▁Ruk",-13.617216110229492],["ريان",-13.61721897125244],["▁szerez",-13.617228507995604],["こともある",-13.617250442504885],["▁എൻ",-13.617257118225098],["▁ചുറ്റ",-13.61725902557373],["▁presion",-13.617259979248049],["▁parter",-13.617260932922363],["▁သူတို့",-13.617265701293944],["▁అంది",-13.617266654968262],["▁pavyzdžiui",-13.617301940917969],["buff",-13.61733627319336],["aynta",-13.61734676361084],["十三",-13.617351531982422],["▁camion",-13.617358207702637],["▁kelt",-13.617362022399902],["▁Sail",-13.61736297607422],["▁Felix",-13.617382049560549],["jelző",-13.61738395690918],["နိုင္",-13.617386817932127],["▁නොකර",-13.617402076721191],["හී",-13.617408752441406],["niejszy",-13.617411613464355],["▁กรม",-13.617430686950684],["▁prišla",-13.617457389831545],["▁танк",-13.617459297180176],["▁bleu",-13.617460250854492],["▁prekių",-13.617462158203123],["▁učiniti",-13.617462158203123],["átky",-13.617465019226074],["▁gebruikers",-13.617473602294922],["▁aportar",-13.617480278015137],["▁formulaire",-13.617483139038086],["▁සන්",-13.617504119873049],["▁Selon",-13.617511749267578],["▁frig",-13.617536544799805],["▁Áll",-13.617541313171388],["képp",-13.617571830749512],["▁efeitos",-13.617573738098145],["▁фикс",-13.617594718933104],["▁oppleve",-13.617595672607422],["▁frame",-13.61760425567627],["▁بالن",-13.617616653442385],["▁gyvūn",-13.61762523651123],["▁stave",-13.61762809753418],["▁ජනතා",-13.617636680603027],["▁arfer",-13.61763858795166],["tuwa",-13.617642402648926],["▁guardar",-13.617643356323242],["правлен",-13.617656707763672],["ಿದ್ದಾನೆ",-13.61769676208496],["άνια",-13.617700576782228],["管理者",-13.617755889892578],["このように",-13.61776065826416],["▁आपला",-13.617770195007324],["lənir",-13.617782592773438],["▁Guan",-13.617795944213867],["▁консул",-13.617813110351562],["▁упо",-13.617837905883787],["лост",-13.617839813232422],["▁მარტი",-13.617841720581056],["sıdır",-13.617863655090332],["ፈጥ",-13.61786651611328],["▁Ог",-13.617874145507812],["▁liefst",-13.617884635925291],["обще",-13.61791706085205],["▁позната",-13.617935180664062],["▁löydät",-13.61799430847168],["kód",-13.618019104003906],["转移",-13.618021965026855],["▁നേരെ",-13.618035316467283],["աչափ",-13.61806869506836],["▁అధిక",-13.618083000183104],["▁основан",-13.618087768554688],["▁חז",-13.618102073669434],["▁defende",-13.6181058883667],["▁anus",-13.618119239807127],["▁novca",-13.618123054504396],["▁ପ୍ରସ୍ତୁତ",-13.618138313293455],["sõna",-13.618151664733888],["МАТ",-13.61815357208252],["▁Child",-13.618168830871582],["स्मा",-13.61819076538086],["▁Кро",-13.618240356445312],["▁जातो",-13.618246078491213],["▁иштери",-13.618270874023438],["▁протеин",-13.618273735046388],["▁သတိ",-13.618281364440918],["UGA",-13.618293762207031],["ہری",-13.618314743041992],["▁formë",-13.618315696716309],["▁populer",-13.618338584899902],["▁draws",-13.618371963500977],["ntium",-13.618382453918455],["▁Vino",-13.618412971496582],["多样",-13.618437767028809],["אנס",-13.618438720703123],["▁sikkerhed",-13.618440628051758],["▁ස්ව",-13.618450164794922],["▁जस्तै",-13.618453025817873],["вото",-13.618462562561035],["▁edip",-13.618463516235352],["▁Philipp",-13.6184720993042],["도시",-13.618487358093262],["▁bosib",-13.618488311767578],["▁seien",-13.61850357055664],["▁yazma",-13.618515014648438],["▁بدور",-13.618526458740234],["brudd",-13.618528366088867],["▁členov",-13.618539810180664],["Tube",-13.61854648590088],["▁گذري",-13.618553161621094],["μός",-13.618556022644045],["▁밖에",-13.618559837341309],["▁посто",-13.618570327758787],["さが",-13.618574142456056],["▁англ",-13.618592262268066],["▁birini",-13.618596076965332],["▁căng",-13.61861515045166],["▁falls",-13.618621826171877],["توقف",-13.618626594543455],["▁metody",-13.618637084960938],["돈",-13.618669509887695],["▁ഉപയോഗ",-13.618671417236328],["풀",-13.618684768676758],["▁ብሔራዊ",-13.618696212768556],["正好",-13.618700981140137],["▁proportion",-13.6187105178833],["cioso",-13.6187162399292],["gailu",-13.618721961975098],["▁പണ",-13.618730545043944],["▁tîm",-13.618741035461426],["▁fritids",-13.618748664855955],["ေရႊ",-13.61875343322754],["▁വിഭാഗ",-13.61876106262207],["▁ಉದ್ಯೋಗ",-13.618769645690918],["നാള",-13.618770599365234],["201",-13.618797302246094],["세기",-13.618800163269045],["▁Elektri",-13.618803024291992],["▁kiegészítő",-13.618806838989258],["鄰",-13.618828773498535],["▁assina",-13.618830680847168],["骂",-13.618834495544434],["▁leilighet",-13.618842124938965],["▁ਵਰਗ",-13.618846893310549],["çik",-13.618858337402344],["▁एकत्र",-13.618858337402344],["挖掘",-13.618860244750977],["▁ھازىر",-13.61886215209961],["エネルギー",-13.61886215209961],["▁tuotanto",-13.618865966796877],["▁fontes",-13.618866920471191],["卫星",-13.618871688842772],["แถว",-13.61887264251709],["▁dreapta",-13.61887264251709],["▁сувязі",-13.61887264251709],["▁انٹرنیٹ",-13.61887264251709],["▁दिवशी",-13.61887264251709],["▁ଘୋଷଣା",-13.61887264251709],["▁მიხეილ",-13.61887264251709],["ថ្លៃ",-13.618873596191406],["▁Gossip",-13.618873596191406],["▁положај",-13.618873596191406],["▁رپورٽ",-13.618873596191406],["▁కొద్ది",-13.618873596191406],["▁ಘಟಕ",-13.618873596191406],["▁මට්ටම",-13.61887550354004],["sikhathi",-13.618876457214355],["▁письменник",-13.618876457214355],["▁geplaatst",-13.618877410888672],["▁Түүний",-13.618878364562988],["▁پیروزی",-13.618879318237305],["▁обстоятельства",-13.61888027191162],["▁දියුණු",-13.61888027191162],["ებდნენ",-13.618881225585938],["▁júlí",-13.618881225585938],["▁adhuc",-13.61888313293457],["▁Laporan",-13.61888599395752],["▁Zentrum",-13.618890762329102],["▁объявлен",-13.618894577026367],["▁Тогава",-13.618895530700684],["▁gündüz",-13.618897438049316],["▁возврат",-13.618901252746582],["▁навик",-13.618902206420898],["▁اتحادیه",-13.618905067443848],["▁пријави",-13.618910789489746],["хаж",-13.618919372558594],["▁ବିଷୟରେ",-13.61892318725586],["▁sinasabi",-13.618931770324709],["小区",-13.618943214416504],["▁התורה",-13.61894416809082],["▁יוכל",-13.61894416809082],["abilitat",-13.618947982788086],["▁ведет",-13.618955612182615],["wissenschaftlich",-13.61895751953125],["▁обавља",-13.618971824645996],["▁целу",-13.618996620178224],["▁rosso",-13.619010925292969],["▁למעלה",-13.6190185546875],["სგან",-13.61902141571045],["▁Rusland",-13.619022369384766],["▁alimentación",-13.619027137756348],["▁ureja",-13.619035720825195],["▁दान",-13.619035720825195],["▁početkom",-13.619038581848145],["▁ограничения",-13.619053840637209],["▁दावा",-13.61905574798584],["▁cigaret",-13.619063377380373],["▁ਸੌ",-13.619072914123535],["kitab",-13.619078636169434],["▁אינם",-13.619078636169434],["▁landskap",-13.61910343170166],["▁xoves",-13.61910343170166],["▁значна",-13.619107246398926],["казывает",-13.619123458862305],["▁בכך",-13.61912441253662],["ดังนี้",-13.61915683746338],["▁Aksi",-13.61917495727539],["Дмитр",-13.619187355041504],["▁powietrz",-13.619187355041504],["ል፤",-13.619193077087402],["▁uttrykk",-13.619196891784668],["верз",-13.619203567504885],["▁ұлы",-13.619205474853516],["授業",-13.619213104248049],["lıkla",-13.619229316711426],["▁corredor",-13.61923885345459],["ရက္ေန႔",-13.619239807128906],["▁därmed",-13.619245529174805],["▁ছেলে",-13.619257926940918],["▁Panga",-13.619272232055664],["▁والط",-13.619285583496094],["مرة",-13.61928653717041],["▁desconto",-13.619296073913574],["▁ոլորտում",-13.619297981262209],["kwenda",-13.619318008422852],["▁Пес",-13.619319915771484],["داش",-13.6193208694458],["▁kazanma",-13.619330406188965],["שורה",-13.61933708190918],["▁egongo",-13.619339942932127],["augs",-13.61934757232666],["látó",-13.61934757232666],["▁반대",-13.619364738464355],["▁заасан",-13.619365692138672],["▁मार्फत",-13.619380950927734],["ුර",-13.619385719299316],["▁Process",-13.619394302368164],["▁bought",-13.619396209716797],["▁আলো",-13.619396209716797],["▁Pesca",-13.619417190551758],["▁koyu",-13.619417190551758],["▁horizontal",-13.619418144226074],["வற்றை",-13.619421005249023],["▁Fonte",-13.61943531036377],["െത്തി",-13.619441986083984],["▁alussa",-13.61945915222168],["なければならない",-13.619466781616213],["▁घेतले",-13.61949634552002],["▁dvojic",-13.619513511657717],["▁ರೂಪಾಯಿ",-13.61953067779541],["▁jiro",-13.619542121887209],["▁abone",-13.619543075561523],["▁مەدەنىيەت",-13.61955738067627],["▁Hrvati",-13.6195707321167],["piirkonna",-13.619577407836914],["ભુ",-13.619588851928713],["▁Μου",-13.619614601135254],["▁christian",-13.619621276855469],["的重要性",-13.61964225769043],["▁interviu",-13.619662284851074],["època",-13.619665145874023],["▁sürede",-13.619668006896973],["▁Cepat",-13.619668960571287],["ですし",-13.619670867919922],["▁транспорта",-13.619685173034668],["▁konur",-13.619688987731934],["大规模",-13.619691848754885],["किन",-13.619698524475098],["örer",-13.619705200195312],["▁negozio",-13.61971950531006],["▁aggressiv",-13.61972713470459],["▁Deve",-13.619752883911133],["работва",-13.61976718902588],["▁حكومة",-13.619773864746094],["▁سینیٹ",-13.619789123535156],["▁uplatn",-13.619832992553713],["▁בעיני",-13.619833946228027],["ບັນ",-13.619839668273926],["కంగా",-13.619848251342772],["▁המג",-13.619868278503418],["▁करुन",-13.619876861572266],["ائش",-13.619890213012695],["▁Дъ",-13.619894981384276],["▁Él",-13.619905471801758],["▁resistente",-13.619937896728516],["कुर",-13.619961738586426],["▁Artigo",-13.61996364593506],["hetik",-13.61998462677002],["▁ઈન્",-13.620020866394045],["▁tecido",-13.62003231048584],["▁vrasje",-13.620035171508787],["▁Այ",-13.620059967041016],["▁segar",-13.620061874389648],["行われ",-13.620075225830078],["▁ለእ",-13.620086669921877],["神社",-13.620088577270508],["援助",-13.620091438293455],["▁artırılması",-13.620116233825684],["▁отлича",-13.620141983032228],["stjo",-13.620184898376465],["▁തുറന്നു",-13.620189666748049],["សិន",-13.62021541595459],["▁tons",-13.620221138000488],["merke",-13.62022590637207],["▁Uj",-13.620230674743652],["ଷ୍ଟା",-13.620243072509766],["▁سرگرمی",-13.620375633239746],["slovenske",-13.62037754058838],["tyminen",-13.62038803100586],["ಾದರೆ",-13.620433807373049],["ก็ตาม",-13.620445251464844],["▁augš",-13.620457649230955],["▁ölümü",-13.620489120483398],["ಥಾ",-13.620503425598145],["▁8,5",-13.62050724029541],["▁беш",-13.620508193969728],["laigh",-13.62051486968994],["нып",-13.620525360107422],["▁Linie",-13.620532989501951],["brú",-13.620549201965332],["مک",-13.620558738708496],["▁યોજના",-13.620590209960938],["▁Sath",-13.620595932006836],["▁betalning",-13.620627403259276],["你好",-13.620628356933594],["inaweza",-13.62063694000244],["សោ",-13.620657920837402],["▁машину",-13.620691299438477],["▁एज",-13.620692253112791],["▁mellé",-13.620707511901855],["▁Terminal",-13.620710372924805],["足够",-13.62071132659912],["▁realizará",-13.620721817016602],["4,4",-13.620723724365234],["▁პროცეს",-13.620723724365234],["▁nødt",-13.620728492736816],["ニング",-13.62074089050293],["ochtend",-13.620741844177246],["ならば",-13.620760917663574],["▁Faci",-13.620768547058104],["▁కాల్",-13.62077808380127],["▁Chandra",-13.620782852172852],["▁வந்தது",-13.620789527893066],["▁Laurent",-13.620792388916016],["▁Xwe",-13.620831489562988],["ဘယ္",-13.620840072631836],["▁곧",-13.620850563049316],["▁алкоголь",-13.62087059020996],["▁yanıt",-13.62087345123291],["▁baut",-13.620889663696287],["纵",-13.62089729309082],["▁milk",-13.620899200439451],["▁بسيار",-13.620916366577148],["悦",-13.620927810668944],["詳",-13.620936393737791],["侯",-13.620942115783691],["詢問",-13.620946884155272],["洗衣",-13.620952606201172],["تصل",-13.620954513549805],["బాద్",-13.620962142944336],["▁မာ",-13.620965003967283],["สมาคม",-13.620966911315918],["เรื่อย",-13.62096881866455],["ប្រធានាធិបតី",-13.62096881866455],["ᅲᅲ",-13.620969772338867],["▁Gelegenheit",-13.620969772338867],["▁companhia",-13.620969772338867],["▁països",-13.620969772338867],["▁жогору",-13.620969772338867],["▁табиғат",-13.620969772338867],["▁філософ",-13.620969772338867],["▁कर्नाटक",-13.620969772338867],["▁মার্কিন",-13.620969772338867],["▁ಎಸ್ಟೇಟ್",-13.620969772338867],["▁സേവന",-13.620969772338867],["▁აუცილებლად",-13.620969772338867],["▁fegyver",-13.620970726013184],["简直",-13.620970726013184],["▁meliputi",-13.620972633361816],["▁våld",-13.620972633361816],["▁стигне",-13.620973587036133],["▁dilengkapi",-13.620975494384766],["▁διότι",-13.620980262756348],["▁నిర్మించ",-13.620980262756348],["▁belirterek",-13.620988845825195],["ຄາດ",-13.620989799499512],["▁დუ",-13.620989799499512],["▁Amerîkayê",-13.620990753173828],["▁понаша",-13.620991706848145],["ONDA",-13.621000289916992],["▁झापा",-13.621007919311523],["▁یوسف",-13.621010780334473],["▁मॉ",-13.621010780334473],["▁Segunda",-13.621015548706056],["外交部",-13.621016502380373],["▁شاخص",-13.62102222442627],["▁زندگيء",-13.621023178100586],["ītis",-13.621024131774902],["כשיר",-13.62102508544922],["▁внесении",-13.621031761169434],["▁ਪੰਜ",-13.62103271484375],["▁transaction",-13.621033668518066],["▁Muna",-13.6210355758667],["▁natuurlik",-13.621036529541016],["▁nettopp",-13.621038436889648],["cresc",-13.621044158935549],["▁Wong",-13.621051788330078],["▁شاندار",-13.621054649353027],["1977",-13.621071815490724],["▁تستطيع",-13.621074676513672],["▁हस",-13.621078491210938],["▁соціального",-13.621079444885254],["▁Stella",-13.621088981628418],["▁vizito",-13.621089935302734],["▁SMP",-13.62109661102295],["río",-13.621101379394531],["ήμα",-13.621129035949709],["するには",-13.621146202087402],["▁tõesti",-13.621155738830566],["▁umor",-13.621170997619627],["Andorra",-13.621175765991213],["▁médio",-13.621191024780272],["▁descobert",-13.621200561523438],["▁prosa",-13.62120246887207],["itheoirí",-13.621222496032717],["▁casal",-13.621230125427246],["在台灣",-13.62123680114746],["場合があります",-13.621238708496094],["▁Aaron",-13.62124252319336],["▁വേദന",-13.621246337890623],["▁սեպտեմբերի",-13.621256828308104],["▁փորձում",-13.621260643005373],["ПАР",-13.621267318725586],["▁Pirms",-13.621272087097168],["පන",-13.621275901794434],["משפחות",-13.621283531188965],["പം",-13.621285438537598],["Nam",-13.621342658996582],["天地",-13.621345520019531],["▁Зашто",-13.621382713317873],["▁sindical",-13.621394157409668],["▁крові",-13.621404647827148],["▁mengandungi",-13.62140655517578],["▁មតិ",-13.621413230895996],["เอฟ",-13.621423721313477],["▁النوم",-13.621431350708008],["salam",-13.621460914611816],["▁RPG",-13.621482849121094],["öpp",-13.62148666381836],["бала",-13.621488571166992],["严重的",-13.62149143218994],["▁жеңіл",-13.62152099609375],["▁تحصیلی",-13.621537208557127],["▁vyhod",-13.621540069580078],["Qo",-13.621546745300291],["videnskab",-13.621559143066406],["ഹിത",-13.62156105041504],["▁šiame",-13.62157154083252],["▁discount",-13.621577262878418],["▁kelti",-13.621587753295898],["▁filmas",-13.621590614318848],["▁DPH",-13.621594429016112],["ignan",-13.621597290039062],["▁χαρά",-13.621649742126465],["èche",-13.621668815612791],["▁torrent",-13.621695518493652],["▁ዘር",-13.621723175048828],["യുടെയും",-13.621729850769045],["▁içerik",-13.621774673461914],["Ја",-13.62180233001709],["正常的",-13.621808052062988],["迎接",-13.621816635131836],["អាហារ",-13.621829986572266],["动画",-13.621835708618164],["ящих",-13.62186050415039],["▁ರಲ್ಲಿ",-13.62187385559082],["NAM",-13.621874809265137],["▁Канада",-13.621875762939451],["▁spannende",-13.621903419494627],["▁melakukannya",-13.621907234191896],["▁urmat",-13.62192153930664],["▁ពិ",-13.621940612792969],["▁ගාන",-13.621973037719728],["▁პროცესი",-13.622011184692385],["arske",-13.622047424316406],["▁സംസാരിക്ക",-13.62209129333496],["▁xilas",-13.622093200683594],["▁birinin",-13.622106552124023],["▁woorden",-13.622129440307615],["▁levy",-13.622148513793944],["▁beint",-13.622166633605955],["▁අරගෙන",-13.622173309326172],["▁geleceği",-13.622214317321776],["zanim",-13.622228622436523],["वृत्त",-13.62224006652832],["вот",-13.622246742248535],["сіп",-13.622260093688965],["▁trage",-13.622265815734863],["▁פרסום",-13.622268676757812],["▁viszony",-13.622279167175291],["pezi",-13.622288703918455],["▁உள்",-13.62229061126709],["واري",-13.62232780456543],["▁Articol",-13.62233543395996],["▁වනු",-13.622336387634276],["是谁",-13.62234115600586],["▁Biro",-13.622365951538086],["50.000",-13.622370719909668],["norsk",-13.622370719909668],["▁Bikin",-13.62238311767578],["меняется",-13.622386932373049],["ικότητα",-13.622389793395996],["▁Akademik",-13.622406005859377],["لاردىن",-13.622419357299805],["▁paža",-13.622428894042969],["čara",-13.622430801391602],["brek",-13.622435569763184],["▁Universitetinin",-13.622438430786133],["▁yhteisö",-13.62244987487793],["დგომა",-13.622453689575195],["altre",-13.622477531433104],["▁Terme",-13.62249755859375],["▁Италија",-13.622499465942385],["▁entrare",-13.622517585754396],["▁katibi",-13.622533798217772],["今年的",-13.622536659240724],["▁хүрээ",-13.622572898864746],["▁trainer",-13.622576713562012],["▁filius",-13.622598648071287],["ມອບ",-13.622599601745604],["DEA",-13.62261199951172],["▁interesat",-13.622629165649414],["▁Temel",-13.622637748718262],["новите",-13.622642517089844],["гэрэл",-13.62264347076416],["▁kryss",-13.622657775878906],["▁valimis",-13.62266731262207],["▁Jakub",-13.622668266296388],["▁singler",-13.62267780303955],["tishi",-13.622679710388184],["окот",-13.62269687652588],["▁resultats",-13.622699737548828],["▁Ruth",-13.62270736694336],["வர்களுக்கு",-13.62271499633789],["▁підвищ",-13.622724533081056],["▁училища",-13.622737884521484],["нська",-13.622748374938965],["▁kapo",-13.622761726379396],["gies",-13.62277126312256],["▁tartoz",-13.62281608581543],["▁hlavu",-13.622849464416504],["▁pelaaja",-13.622849464416504],["▁oczekiwa",-13.62289333343506],["گىن",-13.622919082641602],["▁elkészít",-13.62293529510498],["гава",-13.622960090637209],["ğini",-13.622969627380373],["ក្រសួង",-13.622979164123535],["▁bază",-13.622981071472168],["挤",-13.622984886169434],["금을",-13.622994422912598],["晋",-13.622998237609863],["▁nyilvános",-13.623001098632812],["ლებით",-13.623004913330078],["cock",-13.623008728027344],["▁ਚਾਰ",-13.623011589050291],["txt",-13.62301254272461],["ไบ",-13.623016357421877],["ющиеся",-13.62304401397705],["▁џ",-13.623046875],["ቀረበ",-13.623048782348633],["引擎",-13.623050689697266],["吋",-13.62306308746338],["грес",-13.623064994812012],["รับผิดชอบ",-13.62306785583496],["▁வளர்",-13.623068809509276],["ຫຼິ້ນ",-13.623069763183594],["▁Конференц",-13.623069763183594],["▁Moscow",-13.62307071685791],["▁menguasai",-13.62307071685791],["▁température",-13.62307071685791],["▁zagrebačk",-13.62307071685791],["▁Šiandien",-13.62307071685791],["▁најбоље",-13.62307071685791],["▁մուտք",-13.62307071685791],["▁أوروبا",-13.62307071685791],["▁ସ୍ମାର୍ଟ",-13.62307071685791],["▁ရန်ကုန်",-13.62307071685791],["▁Ընդ",-13.623071670532228],["▁ღვთის",-13.623071670532228],["▁Führung",-13.623072624206545],["▁gespeichert",-13.623072624206545],["▁perfetto",-13.623072624206545],["▁ਛੱਡ",-13.62307357788086],["▁نوازشریف",-13.623074531555176],["▁സ്വീകരിക്ക",-13.623074531555176],["▁انگریز",-13.623075485229492],["▁працюють",-13.623077392578123],["ликвид",-13.623080253601074],["▁essencial",-13.623085021972656],["▁Кеңештин",-13.623085975646973],["▁ಪ್ರೇಮ",-13.623089790344238],["▁નજર",-13.623091697692873],["▁단순",-13.623091697692873],["▁mreža",-13.623093605041504],["▁zichzelf",-13.623093605041504],["▁Қытай",-13.623096466064451],["клу",-13.62310028076172],["▁වැනිදා",-13.623102188110352],["▁märts",-13.62311840057373],["▁Világ",-13.623119354248049],["▁შესაძლებელია",-13.623123168945312],["▁Baltijas",-13.623126029968262],["▁учебно",-13.62313175201416],["▁routine",-13.623132705688477],["▁घेतली",-13.623133659362791],["▁específico",-13.62313747406006],["ටෝ",-13.623147010803224],["राष्ट्र",-13.62314796447754],["▁दृष्टि",-13.623148918151855],["barát",-13.623150825500488],["እኛ",-13.62315273284912],["▁avaliação",-13.623159408569336],["回复",-13.623163223266602],["▁darparu",-13.623165130615234],["ビル",-13.62316608428955],["▁mezcla",-13.623172760009766],["ходящ",-13.623183250427246],["▁Vatikan",-13.623188972473145],["▁akumul",-13.623188972473145],["▁velice",-13.623188972473145],["▁menerusi",-13.623197555541992],["▁නිර්",-13.623197555541992],["ռի",-13.623198509216309],["teck",-13.623199462890623],["00,00",-13.623212814331056],["▁kylmä",-13.623218536376951],["láin",-13.62322235107422],["▁الوزارة",-13.6232271194458],["▁relationer",-13.623230934143066],["▁Jadran",-13.623263359069824],["ضاع",-13.62326431274414],["общи",-13.623268127441406],["מרי",-13.62327003479004],["▁ከነ",-13.62327766418457],["ราก",-13.623279571533203],["▁Дори",-13.62329387664795],["▁turėti",-13.623294830322266],["ソー",-13.623299598693848],["what",-13.623320579528809],["ποίηση",-13.623329162597656],["▁exerce",-13.6233549118042],["▁해도",-13.62336254119873],["rijke",-13.62336540222168],["▁leverans",-13.623373985290527],["úchán",-13.623382568359377],["▁joint",-13.623382568359377],["▁predsa",-13.623431205749512],["▁Emirat",-13.623472213745115],["▁envío",-13.62348461151123],["▁umumiy",-13.623485565185549],["שיטת",-13.623497009277344],["▁सम्बन्धमा",-13.623505592346191],["▁değeri",-13.62352180480957],["▁tkanin",-13.623528480529783],["děla",-13.623529434204102],["▁سحر",-13.623530387878418],["ਵੀਂ",-13.623564720153809],["הלכה",-13.623578071594238],["dū",-13.623579025268556],["ும",-13.623614311218262],["▁ទុក",-13.62362289428711],["ঙ্গা",-13.623638153076172],["▁सामान",-13.623652458190918],["відомі",-13.623656272888184],["▁počinje",-13.623669624328612],["▁Kosovën",-13.623679161071776],["ណ្",-13.62368392944336],["ਰਿ",-13.62368869781494],["▁propriedade",-13.623697280883787],["▁kieli",-13.62371063232422],["▁настроение",-13.62372589111328],["▁hrozn",-13.623745918273926],["▁zpráv",-13.623767852783203],["▁ورق",-13.62380313873291],["▁Jove",-13.62381362915039],["▁పడి",-13.623823165893556],["▁ইং",-13.623825073242188],["LOC",-13.623848915100098],["ማሪ",-13.623859405517578],["ಕ್ಕಿಂತ",-13.623871803283691],["▁школі",-13.62388801574707],["▁हटा",-13.62388801574707],["chap",-13.62389087677002],["▁Խաղ",-13.623894691467283],["▁bankas",-13.623907089233398],["แต",-13.623930931091309],["льнікаў",-13.623953819274902],["правляю",-13.623963356018066],["▁$3",-13.623970985412598],["▁тарту",-13.623990058898926],["▁الجاري",-13.624001502990724],["கால",-13.624003410339355],["▁плодове",-13.62401294708252],["▁crit",-13.62407684326172],["▁ناب",-13.624107360839844],["▁რომელი",-13.624110221862791],["▁статут",-13.624123573303224],["▁পাঠ",-13.624136924743652],["▁ರಿಂದ",-13.62414264678955],["תּ",-13.624147415161133],["եցնում",-13.624187469482422],["▁එයට",-13.624192237854004],["▁Haur",-13.624202728271484],["▁пријатели",-13.624218940734863],["▁mərhələ",-13.62425136566162],["▁Мус",-13.62425422668457],["बाजी",-13.624272346496582],["▁ಇಂ",-13.624292373657228],["ราง",-13.62429904937744],["Let",-13.624306678771973],["するための",-13.624313354492188],["▁mision",-13.624319076538086],["▁vigente",-13.624330520629885],["の一つ",-13.62436866760254],["▁Wakil",-13.624396324157717],["▁приходит",-13.624412536621094],["ísimo",-13.62444019317627],["▁رادیو",-13.62445831298828],["มักจะ",-13.624497413635254],["machen",-13.62450122833252],["▁дату",-13.624510765075684],["▁باپ",-13.624526977539062],["ट्री",-13.624552726745604],["▁ადამიანები",-13.624561309814451],["▁joueurs",-13.624576568603516],["诸",-13.624578475952148],["leider",-13.624582290649414],["▁bright",-13.62458324432373],["▁levende",-13.624588966369627],["κοπή",-13.624610900878906],["▁ingresos",-13.624610900878906],["ေနၾက",-13.624624252319336],["▁Schl",-13.6246337890625],["なんです",-13.624649047851562],["▁פועל",-13.624661445617676],["▁одби",-13.624693870544434],["▁лето",-13.624701499938965],["დგე",-13.624731063842772],["▁ढु",-13.62482452392578],["zile",-13.624855041503906],["▁Stora",-13.624863624572754],["▁taju",-13.62486743927002],["年生",-13.62490177154541],["▁аналог",-13.62491226196289],["ปริ",-13.624917030334473],["стигну",-13.624926567077637],["▁gösterme",-13.624933242797852],["▁مانگ",-13.624970436096191],["▁lines",-13.624972343444824],["বাজার",-13.624991416931152],["▁συλλ",-13.625028610229492],["▁соціально",-13.625029563903809],["遊び",-13.625038146972656],["original",-13.625040054321287],["5.6",-13.625085830688477],["เข้ม",-13.62511157989502],["ડિયા",-13.625125885009766],["▁básica",-13.625128746032717],["▁Lean",-13.625131607055664],["景色",-13.625133514404297],["▁ਔਰਤ",-13.625139236450195],["▁Ceci",-13.625146865844728],["陕西",-13.625160217285156],["(5)",-13.625166893005373],["▁değerlendir",-13.625168800354004],["ທໍາອິດ",-13.625173568725586],["겼",-13.625174522399902],["깔",-13.625174522399902],["॰",-13.62517547607422],["▁Eternulo",-13.62517547607422],["▁Qeveria",-13.62517547607422],["▁bertujuan",-13.62517547607422],["▁gemütlich",-13.62517547607422],["▁notícies",-13.62517547607422],["▁ફોટો",-13.62517547607422],["▁Wuxuu",-13.625176429748535],["▁існує",-13.625176429748535],["▁խումբ",-13.625176429748535],["▁جنرال",-13.625176429748535],["▁Protection",-13.625177383422852],["▁браузер",-13.625177383422852],["▁ଉଚ୍ଚ",-13.625179290771484],["▁necessária",-13.6251802444458],["▁понятие",-13.625181198120115],["▁مؤثر",-13.625182151794434],["▁Stuur",-13.62518310546875],["▁ukoliko",-13.62518310546875],["▁دەۋر",-13.625185012817385],["▁قضية",-13.625185012817385],["▁Neymar",-13.625186920166016],["▁ट्रम्प",-13.625186920166016],["▁सहायता",-13.625186920166016],["▁თვითონ",-13.625186920166016],["▁չունի",-13.625187873840332],["▁හිමිකම්",-13.625188827514648],["▁અંદર",-13.625189781188965],["▁ilmselt",-13.625197410583496],["▁ਕਿਉਂ",-13.625198364257812],["▁түсінік",-13.625201225280762],["▁tələbə",-13.625202178955078],["ลงทะเบียน",-13.62520694732666],["შაბათ",-13.625210762023926],["▁Сепак",-13.625213623046877],["▁десять",-13.625221252441406],["▁Argent",-13.625222206115724],["▁Huu",-13.625224113464355],["▁várható",-13.625228881835938],["▁گیلان",-13.625229835510254],["百貨",-13.62523365020752],["▁nangyari",-13.625237464904783],["▁betydelse",-13.625258445739746],["▁авторитет",-13.625259399414062],["▁exclusivamente",-13.625283241271973],["▁తయారు",-13.625283241271973],["▁шағын",-13.625288009643556],["บันเทิง",-13.62529182434082],["▁החינוך",-13.62529468536377],["▁informatic",-13.625304222106934],["▁الغربية",-13.625317573547363],["ሳብ",-13.625330924987791],["▁الرسول",-13.625338554382324],["ማርያም",-13.625348091125488],["▁promovare",-13.625361442565918],["▁Süper",-13.625364303588867],["▁sziget",-13.625388145446776],["-42",-13.62539768218994],["相反",-13.625409126281738],["▁দলের",-13.62541389465332],["PUT",-13.62541675567627],["▁болжээ",-13.62541675567627],["▁대신",-13.625421524047852],["▁inclou",-13.625423431396484],["▁науке",-13.625429153442385],["▁Quality",-13.625431060791016],["ვნის",-13.625432968139648],["▁фактически",-13.625439643859863],["έλευση",-13.625448226928713],["quier",-13.625455856323242],["▁braku",-13.62545680999756],["ÉN",-13.625460624694824],["qol",-13.625473022460938],["▁saksi",-13.625473022460938],["▁יודעים",-13.625473022460938],["ႏို",-13.625475883483888],["▁Zahn",-13.625494003295898],["IGE",-13.625497817993164],["اڙي",-13.62549877166748],["334",-13.625511169433594],["▁aizsarg",-13.625514030456545],["▁פורסם",-13.625528335571287],["▁Bước",-13.625551223754885],["▁poena",-13.625563621520996],["▁питания",-13.625564575195312],["▁istifadəsi",-13.62557315826416],["νιο",-13.625576972961426],["▁седам",-13.625576972961426],["taulu",-13.625587463378906],["ിയില്",-13.625595092773438],["▁лют",-13.625606536865234],["بيت",-13.625617980957031],["▁артисти",-13.625619888305664],["▁worse",-13.625649452209473],["vení",-13.625672340393066],["▁parkir",-13.625689506530762],["▁पीडित",-13.62571144104004],["▁לחצו",-13.625718116760254],["▁жолда",-13.625723838806152],["▁Akhir",-13.62572956085205],["▁اعضا",-13.625734329223633],["varia",-13.62573528289795],["▁страну",-13.625741004943848],["ليس",-13.625804901123049],["▁montage",-13.62580680847168],["▁Ант",-13.62582778930664],["▁rendelete",-13.625856399536133],["ımızda",-13.62587070465088],["heq",-13.625874519348145],["ნომ",-13.625874519348145],["▁gelap",-13.62588119506836],["▁szabadság",-13.625882148742676],["տնտես",-13.625889778137209],["▁sobiva",-13.625893592834473],["▁άλλοι",-13.625900268554688],["▁नको",-13.625917434692385],["▁szerinti",-13.6259183883667],["▁aastast",-13.625931739807127],["都合",-13.625983238220217],["▁Пир",-13.62598991394043],["ოპ",-13.625990867614746],["▁Garcia",-13.625993728637695],["EMENT",-13.626004219055176],["वर्ग",-13.626005172729492],["ІМ",-13.626033782958984],["▁kendt",-13.626036643981934],["▁fordele",-13.62604808807373],["▁charges",-13.626070022583008],["तुर",-13.626091003417969],["했을",-13.626115798950195],["▁görüşme",-13.626141548156738],["тири",-13.626154899597168],["▁mariage",-13.626161575317385],["▁dejligt",-13.62616729736328],["▁Gründen",-13.62617301940918],["вога",-13.626180648803713],["▁konflik",-13.626185417175291],["ታሪ",-13.626191139221191],["▁oliy",-13.626222610473633],["▁tām",-13.626230239868164],["果然",-13.626230239868164],["▁MJ",-13.626239776611328],["सिया",-13.62624168395996],["▁מדינת",-13.626245498657228],["▁pics",-13.62624740600586],["▁VAT",-13.626253128051758],["▁करीत",-13.626255989074709],["▁เกมส์",-13.626258850097656],["城乡",-13.62627410888672],["▁дэвш",-13.626277923583984],["▁Feed",-13.626290321350098],["▁کارگران",-13.626310348510742],["▁fasad",-13.626327514648438],["▁funciones",-13.626333236694336],["Mir",-13.626350402832031],["▁hevos",-13.626354217529297],["▁večje",-13.626359939575195],["房价",-13.626368522644045],["▁открыл",-13.626398086547852],["▁министра",-13.626415252685549],["▁Terapi",-13.62642765045166],["ມັກ",-13.62643337249756],["▁mcm",-13.626440048217772],["▁naravni",-13.62644100189209],["▁lehetőségek",-13.62645149230957],["▁minte",-13.626470565795898],["▁Beyaz",-13.626472473144531],["postadresse",-13.62647533416748],["▁정도로",-13.626479148864746],["▁dupla",-13.626484870910645],["ೋರ್",-13.626498222351074],["*******",-13.626508712768556],["alkotás",-13.62651252746582],["ಚಲ",-13.626523971557615],["▁اینڈ",-13.626526832580566],["▁Kwe",-13.626531600952148],["▁speelt",-13.626537322998049],["▁básicos",-13.62656021118164],["รับประกัน",-13.626561164855955],["мисле",-13.62657070159912],["▁pericol",-13.626583099365234],["▁nécessaires",-13.626594543457031],["چہ",-13.62660026550293],["monaster",-13.626632690429688],["▁jornal",-13.626642227172852],["▁דיר",-13.626646995544434],["▁Soli",-13.626659393310549],["▁røde",-13.626659393310549],["خلق",-13.62667465209961],["なくなった",-13.62667465209961],["▁маја",-13.62667751312256],["sakė",-13.626687049865724],["мад",-13.626689910888672],["▁tök",-13.626708030700684],["సై",-13.62671184539795],["นด์",-13.626728057861328],["▁increased",-13.62672996520996],["▁እውነት",-13.626736640930176],["ininko",-13.626748085021973],["▁hyödyn",-13.626750946044922],["រងគ្រោះ",-13.626768112182615],["זול",-13.62678050994873],["קדש",-13.626806259155272],["szła",-13.626840591430664],["▁ବଡ",-13.626842498779297],["Дз",-13.62686824798584],["▁rož",-13.626912117004396],["бори",-13.626916885375977],["dále",-13.626924514770508],["▁Dennis",-13.626925468444824],["Лі",-13.626927375793455],["▁Federación",-13.626956939697266],["▁রাত",-13.626959800720217],["▁къща",-13.626973152160645],["▁поряд",-13.626983642578123],["▁prehod",-13.626988410949709],["▁jautājumi",-13.627023696899414],["Kir",-13.62702465057373],["▁ilo",-13.627026557922363],["łania",-13.627032279968262],["▁ngạc",-13.627046585083008],["▁ଆପ୍",-13.627050399780272],["▁پاڪ",-13.627073287963867],["▁skál",-13.62709903717041],["跟你",-13.627100944519045],["▁Треб",-13.62710189819336],["▁Customer",-13.627104759216309],["▁vette",-13.627111434936523],["▁veb",-13.627120018005373],["非常的",-13.627121925354004],["njom",-13.627126693725586],["NEK",-13.627135276794434],["ਰਸ",-13.627150535583496],["쉽",-13.627167701721191],["▁programy",-13.627168655395508],["▁مہین",-13.62718391418457],["▁והש",-13.627185821533203],["膏",-13.62718677520752],["ēšanās",-13.627191543579102],["陸續",-13.627196311950684],["ಿದ್ದೆ",-13.627198219299316],["زيل",-13.62721061706543],["▁ชิ้น",-13.627219200134276],["แร",-13.62722110748291],["добро",-13.627223014831545],["hiyo",-13.62722396850586],["珍惜",-13.627224922180176],["чкиот",-13.627236366271973],["žený",-13.62724781036377],["竭",-13.62725067138672],["▁negle",-13.627251625061035],["წამ",-13.627267837524414],["▁akta",-13.627269744873049],["วิตามิน",-13.62728500366211],["▁eléctrica",-13.62728500366211],["▁menyaksikan",-13.62728500366211],["▁śmierci",-13.62728500366211],["▁алынған",-13.62728500366211],["▁ժողովուրդ",-13.62728500366211],["▁القضية",-13.62728500366211],["▁मजबूत",-13.62728500366211],["▁მხრივ",-13.62728500366211],["▁förändra",-13.627285957336426],["▁Michezo",-13.627286911010742],["▁kiongozi",-13.627286911010742],["▁ملف",-13.627286911010742],["▁শুধু",-13.627286911010742],["▁Keputusan",-13.62728786468506],["▁całości",-13.627288818359377],["▁грип",-13.627288818359377],["▁αποτελούν",-13.627289772033691],["▁недвижимости",-13.627289772033691],["▁Herbst",-13.62729263305664],["▁Доналд",-13.627293586730955],["▁gaña",-13.62729549407959],["▁Mirë",-13.627297401428224],["▁բուժ",-13.627297401428224],["▁تابستان",-13.627301216125488],["映像",-13.627301216125488],["▁Định",-13.62730312347412],["▁संसदीय",-13.62730884552002],["▁સહિત",-13.627310752868652],["keten",-13.627312660217283],["ລັດຖະ",-13.627313613891602],["▁questões",-13.627313613891602],["▁ได้แก่",-13.627317428588867],["▁záznam",-13.627318382263184],["▁Firenze",-13.627321243286133],["lihan",-13.627326011657717],["▁apprezza",-13.627327919006348],["▁Sjáðu",-13.627331733703612],["▁Hillary",-13.627334594726562],["▁Qaranka",-13.627336502075195],["▁положення",-13.627336502075195],["▁sapi",-13.627341270446776],["wehr",-13.627351760864258],["▁manokana",-13.627351760864258],["▁रवि",-13.627362251281738],["少しでも",-13.627362251281738],["すこと",-13.62736988067627],["hoff",-13.627378463745115],["▁понуди",-13.627389907836914],["ప్రసాద్",-13.62739372253418],["▁góry",-13.627398490905762],["學員",-13.627400398254396],["▁liikunta",-13.62740421295166],["▁creado",-13.627408027648926],["▁കാണാന്",-13.627419471740724],["aftale",-13.627442359924316],["様の",-13.627445220947266],["▁lipnja",-13.627446174621582],["▁characters",-13.627456665039062],["▁reúne",-13.62746524810791],["▁აზრ",-13.627492904663086],["COR",-13.627508163452148],["وضح",-13.627508163452148],["▁формула",-13.627511024475098],["ffor",-13.62753963470459],["▁procesul",-13.627552032470703],["▁целта",-13.627558708190918],["▁Gənclər",-13.627559661865234],["tukan",-13.627561569213867],["▁ലക്ഷ",-13.627561569213867],["▁sisteminin",-13.627591133117676],["▁родител",-13.627591133117676],["▁muzica",-13.627592086791992],["▁Eile",-13.627601623535156],["kilde",-13.627602577209473],["izmo",-13.627605438232422],["πού",-13.627609252929688],["▁праграмы",-13.627609252929688],["စပ်",-13.627629280090332],["▁увеличение",-13.62763214111328],["▁tanggung",-13.62763786315918],["נסה",-13.627639770507812],["▁طریقہ",-13.627647399902344],["ບໍ",-13.62768268585205],["▁нейкі",-13.62768268585205],["vöru",-13.627699851989746],["▁prišlo",-13.627711296081545],["▁ನವ",-13.627714157104492],["khan",-13.627731323242188],["頁面",-13.627734184265137],["▁probléma",-13.62774658203125],["േഷൻ",-13.6277494430542],["▁redor",-13.627752304077148],["稍微",-13.627756118774414],["▁إنها",-13.627769470214844],["ΜΗ",-13.627830505371094],["trygg",-13.62784194946289],["етите",-13.627842903137209],["ložené",-13.627848625183104],["▁tuman",-13.627854347229004],["▁حفل",-13.627857208251951],["ಂಡೆ",-13.627890586853027],["▁Gdańsk",-13.62790870666504],["▁Mette",-13.62791919708252],["éag",-13.627920150756836],["สยาม",-13.627925872802734],["▁fath",-13.627947807312012],["▁तिन",-13.627947807312012],["גבר",-13.62795066833496],["▁whose",-13.62795639038086],["ช่องทาง",-13.627976417541504],["filia",-13.62798309326172],["▁verdien",-13.627992630004885],["▁حجاب",-13.62801456451416],["JPG",-13.628016471862791],["▁Vsi",-13.628016471862791],["▁εξε",-13.6280517578125],["▁Vē",-13.62806797027588],["ենի",-13.628091812133787],["▁κυκλοφορ",-13.628095626831056],["ಪಡಿಸಿ",-13.628101348876951],["▁працуе",-13.628106117248535],["▁પુસ્તક",-13.628106117248535],["▁თამაშის",-13.6281099319458],["290",-13.628150939941406],["▁българи",-13.628164291381836],["▁впливу",-13.628164291381836],["为何",-13.628173828125],["正如",-13.628185272216797],["جانب",-13.628202438354492],["▁화면",-13.62822151184082],["▁Woord",-13.62822437286377],["เราก็",-13.628228187561035],["▁jawaab",-13.62826156616211],["▁tosin",-13.628273963928224],["▁казах",-13.628280639648438],["扶贫",-13.628286361694336],["▁bedoeld",-13.628290176391602],["wców",-13.628291130065918],["▁astronom",-13.628299713134766],["ketten",-13.628303527832031],["▁notas",-13.628305435180664],["▁подходящи",-13.628318786621094],["▁tarkka",-13.628321647644045],["▁منطق",-13.628332138061523],["▁åker",-13.628334045410156],["God",-13.628339767456056],["▁wuxuuna",-13.62834358215332],["0,5",-13.628347396850586],["▁Brasileiro",-13.628363609313965],["prowadza",-13.628372192382812],["عذر",-13.628379821777344],["▁toleran",-13.62839412689209],["▁vrienden",-13.628398895263672],["▁Kampf",-13.628400802612305],["▁לשמור",-13.628408432006836],["▁tvö",-13.628411293029783],["ಂಟು",-13.628413200378418],["▁sözleri",-13.628414154052734],["miyorum",-13.628424644470217],["iglio",-13.628447532653809],["jelölt",-13.6284761428833],["▁Våra",-13.628503799438477],["▁spen",-13.628504753112791],["ካን",-13.628507614135742],["▁långa",-13.628512382507324],["▁සිරි",-13.628528594970703],["▁VIA",-13.628548622131348],["evro",-13.628581047058104],["三星",-13.628589630126951],["▁ignore",-13.6286039352417],["もらえる",-13.628606796264648],["▁Mű",-13.62861442565918],["sivusto",-13.628625869750977],["енность",-13.62865161895752],["▁farita",-13.628660202026367],["▁háború",-13.628665924072266],["ватися",-13.628667831420898],["▁Kamal",-13.628667831420898],["Хамгийн",-13.628670692443848],["▁артист",-13.62867259979248],["▁kvap",-13.628677368164062],["▁Орталық",-13.628680229187012],["▁परिसर",-13.628682136535645],["▁můžeme",-13.628684043884276],["أمل",-13.628690719604492],["▁హాట్",-13.62869358062744],["няття",-13.628695487976074],["▁Бө",-13.628707885742188],["▁குற்ற",-13.628707885742188],["みると",-13.628710746765137],["DIO",-13.628739356994627],["▁Moral",-13.628759384155272],["▁Затоа",-13.628759384155272],["oskus",-13.628762245178224],["▁onthou",-13.62877082824707],["▁skráð",-13.628771781921388],["到達",-13.62878704071045],["νεται",-13.628789901733398],["VERS",-13.628806114196776],["ांतर",-13.628808975219728],["▁Nein",-13.62881088256836],["tietoja",-13.628820419311523],["hên",-13.628829956054688],["▁companie",-13.628866195678713],["▁Şen",-13.62886905670166],["ировании",-13.628876686096191],["camera",-13.628884315490724],["▁κολ",-13.62889575958252],["▁insula",-13.628907203674316],["اهر",-13.628926277160645],["▁laborat",-13.628934860229492],["▁Бру",-13.62895679473877],["ોએ",-13.628966331481934],["øs",-13.6289701461792],["▁məqsəd",-13.628979682922363],["▁සටහන",-13.62898063659668],["กลับไป",-13.628993034362791],["thaí",-13.62899684906006],["ņēma",-13.628997802734377],["落地",-13.629010200500488],["үүсү",-13.629013061523438],["hallgató",-13.62901496887207],["▁Investiga",-13.62901496887207],["▁Adel",-13.629015922546388],["▁barber",-13.629037857055664],["▁dövrü",-13.629051208496094],["服用",-13.629057884216309],["kese",-13.629067420959473],["▁обмежен",-13.629068374633787],["▁setahun",-13.629082679748535],["▁Swydd",-13.629090309143066],["வான்",-13.629100799560549],["▁போர்",-13.62913703918457],["ាច់",-13.629140853881836],["രാമ",-13.629144668579102],["▁atinge",-13.629152297973633],["▁medije",-13.629156112670898],["ثلاث",-13.62920379638672],["prawidłow",-13.629230499267578],["stúpen",-13.629233360290527],["бије",-13.629233360290527],["▁provision",-13.629242897033691],["▁為",-13.629262924194336],["▁получать",-13.629291534423828],["暫",-13.629294395446776],["ກອງ",-13.62929630279541],["АНД",-13.62932586669922],["▁איכות",-13.629326820373535],["▁lavere",-13.629329681396484],["banding",-13.6293363571167],["ضخم",-13.629345893859863],["мэ",-13.62934684753418],["▁wêze",-13.629350662231444],["宜蘭",-13.629356384277344],["艇",-13.629358291625977],["喻",-13.62936019897461],["콜걸",-13.629369735717772],["フォーム",-13.629379272460938],["kuthi",-13.629387855529783],["ランキング",-13.629388809204102],["▁чиг",-13.629390716552734],["▁müalicə",-13.629399299621582],["▁δείχνει",-13.629399299621582],["▁δυνάμεις",-13.629399299621582],["▁χθες",-13.629399299621582],["▁Здравствуйте",-13.629399299621582],["▁апликација",-13.629399299621582],["▁зөвлөгөө",-13.629399299621582],["▁наоборот",-13.629399299621582],["▁حوادث",-13.629399299621582],["▁فورسز",-13.629399299621582],["▁هکله",-13.629399299621582],["▁ਸਮਾਗਮ",-13.629399299621582],["▁រំលែក",-13.629399299621582],["맘",-13.629399299621582],["▁espécie",-13.629400253295898],["▁رضوی",-13.629400253295898],["▁ڪڍي",-13.629400253295898],["▁Цэнтр",-13.629401206970217],["▁whatsapp",-13.629402160644531],["▁առանձնա",-13.629402160644531],["▁მიზანი",-13.629402160644531],["標題",-13.629402160644531],["▁ictimaiyyət",-13.629404067993164],["▁otomatis",-13.629404067993164],["▁övriga",-13.629406929016112],["▁যান",-13.629406929016112],["▁ස්තුතියි",-13.62940788269043],["▁utakmica",-13.629409790039062],["grave",-13.62941074371338],["ถวาย",-13.629411697387695],["เลิก",-13.629411697387695],["▁Quyết",-13.629413604736328],["▁තවමත්",-13.62941551208496],["▁mladý",-13.629423141479492],["▁пытанні",-13.629423141479492],["▁erzählt",-13.629424095153809],["▁objev",-13.629424095153809],["▁uwch",-13.62942600250244],["▁సత్య",-13.629429817199709],["▁йдеться",-13.629430770874023],["ซัก",-13.62943172454834],["▁τιμές",-13.629436492919922],["trzymuje",-13.629438400268556],["▁الأكثر",-13.629443168640137],["▁mure",-13.629449844360352],["▁ସରକାରୀ",-13.629449844360352],["ណ្ត",-13.629454612731934],["▁기초",-13.629457473754885],["探し",-13.62946891784668],["▁jeudi",-13.629480361938477],["▁ostatnio",-13.629481315612791],["▁vlhk",-13.629483222961426],["▁сајта",-13.62948513031006],["▁šogad",-13.629493713378906],["▁Seán",-13.629501342773438],["▁қаласының",-13.629504203796388],["▁trovano",-13.629508972167969],["▁coordena",-13.629509925842283],["উদ্দিন",-13.629518508911133],["uddannelse",-13.629521369934082],["ตํา",-13.629528045654297],["▁получает",-13.629528045654297],["▁wobei",-13.62952995300293],["Kommission",-13.629535675048828],["▁영향을",-13.629541397094728],["▁չենք",-13.629549026489258],["▁retirar",-13.629563331604004],["▁gezi",-13.629566192626951],["▁etiqueta",-13.62956714630127],["▁Şehir",-13.62957000732422],["▁Benar",-13.629573822021484],["▁хэрэглэгч",-13.629609107971191],["▁būtent",-13.62961196899414],["۳۰",-13.629624366760254],["▁مخدر",-13.62964153289795],["▁საშუალება",-13.629667282104492],["▁Суб",-13.629678726196287],["▁барлығы",-13.629681587219238],["▁බය",-13.629681587219238],["▁dodá",-13.629698753356934],["▁kesäkuuta",-13.629703521728516],["▁pohval",-13.629704475402832],["40)",-13.62971019744873],["tämällä",-13.629751205444336],["▁ungdomar",-13.629754066467283],["қыт",-13.629755020141602],["▁സമര",-13.6297607421875],["ন্স",-13.629768371582031],["▁refus",-13.62978172302246],["igoj",-13.62978458404541],["safiri",-13.629801750183104],["潜力",-13.629803657531738],["▁opportun",-13.629809379577637],["УРА",-13.629817962646484],["ສະຖານ",-13.6298189163208],["BLE",-13.629820823669434],["គួរ",-13.629841804504396],["無い",-13.629841804504396],["▁വിമാന",-13.629853248596191],["▁معدل",-13.629858016967772],["▁인터뷰",-13.629858016967772],["太空",-13.629905700683594],["▁гараж",-13.62990665435791],["3,9",-13.629919052124023],["133",-13.629921913146973],["▁gamitin",-13.629925727844238],["▁مشتری",-13.629928588867188],["▁ekonomiska",-13.629931449890137],["mængde",-13.629932403564451],["▁тәжірибе",-13.629937171936035],["ಷ್ಟೇ",-13.629938125610352],["kuvia",-13.629944801330566],["thyr",-13.629949569702148],["▁нагород",-13.629953384399414],["казывают",-13.629956245422363],["▁gelişme",-13.629997253417969],["▁kombinaci",-13.629998207092283],["▁იქნა",-13.6300048828125],["ម៉ូ",-13.630020141601562],["▁Eropa",-13.630027770996094],["corrido",-13.630046844482422],["īšu",-13.63005256652832],["कृती",-13.63007926940918],["▁Limit",-13.630088806152344],["തേ",-13.63009262084961],["▁Կր",-13.630169868469238],["হো",-13.630186080932615],["ेली",-13.630188941955566],["ကန်",-13.63023281097412],["▁Drug",-13.630241394042969],["▁comprende",-13.630248069763184],["▁inovacij",-13.6302490234375],["أدوات",-13.630261421203612],["▁Koç",-13.630261421203612],["▁Ungdom",-13.63026523590088],["▁decisiv",-13.630300521850586],["统",-13.630331039428713],["▁imani",-13.630331993103027],["▁parecido",-13.630365371704102],["кг",-13.630376815795898],["▁visade",-13.630377769470217],["▁expression",-13.630379676818848],["▁közt",-13.630414962768556],["▁կազմակերպության",-13.6304292678833],["▁šaty",-13.630488395690918],["보자",-13.63050937652588],["สมเด็จพระ",-13.630510330200195],["▁Rond",-13.630515098571776],["ұқ",-13.630517959594728],["тарынын",-13.630521774291992],["▁qrupu",-13.630528450012209],["▁impedir",-13.630529403686523],["▁produtor",-13.630534172058104],["的形式",-13.6305570602417],["Esport",-13.630606651306152],["▁uvid",-13.63062858581543],["▁подне",-13.630630493164062],["释",-13.630669593811035],["គាត់",-13.630681991577148],["્લ",-13.63071346282959],["▁третира",-13.630732536315918],["kuqala",-13.630738258361816],["oise",-13.630742073059082],["బోయ",-13.630748748779297],["▁споруд",-13.630752563476562],["的不同",-13.630752563476562],["不去",-13.63075828552246],["ജന്",-13.630769729614258],["▁shty",-13.630782127380373],["みよう",-13.630810737609863],["▁astazi",-13.630812644958496],["▁escrit",-13.630831718444824],["午餐",-13.630839347839355],["६५",-13.630849838256836],["▁cok",-13.630853652954102],["տել",-13.630857467651367],["▁Значи",-13.630858421325684],["一間",-13.630860328674316],["kodás",-13.630870819091797],["▁Amit",-13.630884170532228],["▁escreve",-13.630887031555176],["▁carton",-13.630903244018556],["јам",-13.630964279174805],["1985",-13.630990982055664],["రస",-13.631022453308104],["▁crédit",-13.631022453308104],["็น",-13.63103199005127],["cjami",-13.631061553955078],["少ない",-13.631068229675291],["र्जी",-13.631072998046877],["▁размещен",-13.631073951721191],["▁ІІІ",-13.631097793579102],["scherm",-13.631101608276367],["▁Dzieci",-13.631101608276367],["คําตอบ",-13.63112449645996],["뷰",-13.63112449645996],["▁198",-13.63113784790039],["schie",-13.631144523620604],["▁خاور",-13.631155967712402],["▁WHO",-13.631157875061035],["obec",-13.63117504119873],["▁bestaande",-13.631185531616213],["▁ತಿರು",-13.631192207336426],["▁Шы",-13.631248474121094],["žov",-13.631267547607422],["ೆಂದರೆ",-13.631288528442385],["ври",-13.631355285644531],["▁مرزا",-13.63136863708496],["دولة",-13.63138484954834],["ڭى",-13.63138484954834],["sendt",-13.631387710571287],["手上",-13.631390571594238],["▁потребителя",-13.631393432617188],["ANIA",-13.631414413452148],["▁сваё",-13.631429672241213],["▁پڇ",-13.631454467773438],["Edit",-13.631468772888184],["▁предложил",-13.631471633911133],["▁ниско",-13.63149070739746],["cibus",-13.631497383117676],["▁famili",-13.631500244140623],["コンテンツ",-13.631508827209473],["▁ჩანაწერ",-13.63151741027832],["яўляюцца",-13.631518363952637],["▁esperienze",-13.631518363952637],["▁posljednje",-13.631518363952637],["▁včasih",-13.631518363952637],["▁ανθρώπων",-13.631518363952637],["▁збільшення",-13.631518363952637],["▁осъществява",-13.631518363952637],["▁لدينا",-13.631518363952637],["▁مرحلة",-13.631518363952637],["▁वायरल",-13.631518363952637],["묘",-13.631518363952637],["▁orixinal",-13.631519317626951],["▁фантаз",-13.631519317626951],["▁посредством",-13.63152027130127],["▁მიღება",-13.63152027130127],["▁прийняття",-13.63152313232422],["▁جرګې",-13.63152313232422],["▁магчымасць",-13.631528854370115],["utunut",-13.631532669067385],["▁recognize",-13.631535530090332],["平臺",-13.631535530090332],["साहेब",-13.631537437438965],["▁бичсэн",-13.631537437438965],["▁olukord",-13.63153839111328],["▁سلول",-13.63153839111328],["fanikiwa",-13.631540298461914],["▁dürfte",-13.63154125213623],["▁lediglich",-13.631543159484863],["▁예정이다",-13.631543159484863],["ແຫ່ງຊາດ",-13.631546020507812],["▁montáž",-13.631552696228027],["▁памяти",-13.631553649902344],["▁4.000",-13.631558418273926],["▁अनुमान",-13.63156032562256],["▁زمرہ",-13.63158893585205],["▁Srbija",-13.631591796875],["▁kahwin",-13.631603240966797],["фаќа",-13.631627082824709],["▁tosiaan",-13.631635665893556],["▁Εάν",-13.631646156311035],["हां",-13.631648063659668],["▁vakre",-13.631648063659668],["မှန်",-13.631681442260742],["▁zsír",-13.63168716430664],["▁öljy",-13.63169765472412],["▁реду",-13.631708145141602],["ຊຸດ",-13.631714820861816],["▁عمومي",-13.631732940673828],["▁sorriso",-13.631733894348145],["▁временем",-13.63173484802246],["▁পড়",-13.631742477416992],["БАЛ",-13.631769180297852],["▁pronta",-13.631770133972168],["▁જીત",-13.631817817687988],["▁проводиться",-13.631823539733888],["▁מתו",-13.631829261779783],["▁decenni",-13.631836891174316],["▁اثرات",-13.631841659545898],["▁εκπαιδευ",-13.631845474243164],["▁Seveda",-13.631847381591797],["llisten",-13.631853103637695],["▁തയ്യാറ",-13.631857872009276],["▁polskiej",-13.631877899169922],["一开始",-13.631898880004885],["йтін",-13.631900787353516],["▁كشور",-13.63190460205078],["બન",-13.63190746307373],["▁Mới",-13.63190746307373],["▁sohasida",-13.631916999816896],["▁زياد",-13.631927490234377],["▁مىللەت",-13.631946563720703],["បែប",-13.631958961486816],["▁కంటే",-13.631975173950195],["▁vēlāk",-13.631978034973145],["▁المك",-13.631980895996094],["▁ottimo",-13.631994247436523],["▁aviat",-13.631999015808104],["▁Maž",-13.632024765014648],["▁nyolc",-13.632030487060549],["skrid",-13.632049560546877],["▁alındı",-13.632050514221191],["▁morir",-13.632062911987305],["▁arttır",-13.632070541381836],["नव",-13.632073402404783],["▁nafas",-13.632083892822266],["▁Trading",-13.63209056854248],["▁Graz",-13.632099151611328],["ován",-13.632137298583984],["vask",-13.632140159606934],["▁promosi",-13.632161140441896],["apte",-13.632162094116213],["▁pojawia",-13.632172584533691],["vjeçari",-13.632180213928224],["▁말이",-13.632183074951172],["тою",-13.632219314575195],["ანტ",-13.632238388061523],["▁kainos",-13.63224983215332],["وحد",-13.632254600524902],["কর্মী",-13.63225555419922],["▁pozabi",-13.63225555419922],["چین",-13.632268905639648],["▁Bund",-13.632277488708496],["cidade",-13.632284164428713],["▁गोष्ट",-13.632288932800291],["▁aren",-13.63230323791504],["▁तद्",-13.632311820983888],["▁만들기",-13.632318496704102],["▁បំ",-13.632323265075684],["▁postea",-13.632331848144531],["▁humaine",-13.632341384887695],["▁furi",-13.632349967956545],["▁unabhängig",-13.632351875305176],["生成",-13.632369995117188],["關心",-13.6323881149292],["ювали",-13.632390975952148],["▁தொடங்க",-13.632396697998049],["▁Ziele",-13.632404327392578],["σού",-13.632406234741213],["▁մեկնարկ",-13.63240909576416],["▁kreative",-13.63242530822754],["▁şand",-13.632451057434082],["პერ",-13.632464408874512],["osok",-13.632519721984863],["▁qadınlar",-13.632524490356444],["▁출연",-13.632525444030762],["▁Kulit",-13.632556915283203],["বাই",-13.632561683654783],["▁vzduchu",-13.632561683654783],["▁გადავ",-13.632563591003418],["▁commerciali",-13.632568359375],["kundi",-13.632588386535645],["午前",-13.63259506225586],["▁390",-13.632597923278809],["▁заиста",-13.632597923278809],["▁يەنى",-13.63259983062744],["▁garantizar",-13.63260269165039],["▁association",-13.632612228393556],["▁ordem",-13.632623672485352],["exercice",-13.632651329040527],["▁Papild",-13.632658004760742],["▁бирөө",-13.632661819458008],["▁жағы",-13.63266944885254],["ਲੁ",-13.632675170898438],["▁xilli",-13.63267993927002],["▁ترتيب",-13.632699012756348],["▁heman",-13.632709503173828],["情形",-13.632709503173828],["IEM",-13.632712364196776],["▁insanı",-13.632715225219728],["美丽的",-13.63274097442627],["нуцца",-13.632763862609863],["▁miałam",-13.632766723632812],["▁ASC",-13.632780075073242],["Манас",-13.63278579711914],["arreta",-13.632787704467772],["▁krydd",-13.63278865814209],["▁газеті",-13.63280200958252],["▁raggi",-13.632804870605469],["98)",-13.632813453674316],["အဖ",-13.632816314697266],["ційний",-13.632840156555176],["幼兒",-13.632857322692873],["ბერი",-13.632877349853516],["экономикалык",-13.63288116455078],["besi",-13.632892608642578],["▁됐다",-13.632896423339844],["▁aký",-13.632898330688477],["▁ລະຫວ່າງ",-13.632906913757324],["国会",-13.632908821105955],["systemen",-13.63291072845459],["▁sark",-13.632930755615234],["▁นม",-13.632938385009766],["▁مستند",-13.632953643798828],["347",-13.63304042816162],["יחת",-13.633047103881836],["tumaan",-13.633069038391112],["ತಿಯ",-13.63307285308838],["बारी",-13.633097648620604],["▁tradisjon",-13.633111000061035],["▁എന്തിന",-13.633171081542969],["哲学",-13.633174896240234],["math",-13.63319206237793],["▁spomen",-13.633234977722168],["한다면",-13.6332426071167],["takt",-13.633255004882812],["▁sonraí",-13.633255004882812],["KATA",-13.633256912231444],["kerk",-13.633292198181152],["ገባው",-13.633294105529783],["▁сонин",-13.633296966552734],["▁патент",-13.633310317993164],["▁halli",-13.633315086364746],["▁sól",-13.633340835571287],["▁beban",-13.633345603942873],["▁трима",-13.633382797241213],["▁voglio",-13.6334228515625],["ຸງ",-13.633427619934082],["心态",-13.633431434631348],["▁luftës",-13.633432388305664],["▁ಹರ",-13.633438110351562],["സ്സ്",-13.633440971374512],["2%)",-13.633458137512209],["ējums",-13.633458137512209],["▁көші",-13.633467674255373],["▁ملے",-13.63347053527832],["▁rapida",-13.63347339630127],["▁እንድ",-13.633484840393066],["กิจการ",-13.633489608764648],["▁قائل",-13.633493423461914],["▁bột",-13.63349723815918],["▁מוצ",-13.633502960205078],["ńców",-13.633506774902344],["uuuu",-13.633512496948242],["区别",-13.63351821899414],["▁νέου",-13.633520126342772],["▁លក់",-13.633522987365724],["▁ficam",-13.633526802062988],["▁açılış",-13.63353157043457],["损害",-13.633544921875],["▁αίμα",-13.633557319641112],["▁අදහස",-13.63355827331543],["▁helaas",-13.633563995361328],["RNA",-13.63356590270996],["▁virksomheden",-13.633570671081545],["必须要",-13.633570671081545],["赏",-13.633573532104492],["▁opdage",-13.633594512939451],["涉嫌",-13.633610725402832],["輯",-13.633615493774414],["▁এলাকা",-13.633617401123049],["棟",-13.63362979888916],["▁DEC",-13.633631706237791],["járás",-13.633634567260742],["开启",-13.633637428283691],["▁колега",-13.63364028930664],["▁ಆಸ್ಪತ್ರೆ",-13.63364028930664],["⚫",-13.63364028930664],["開展",-13.63364028930664],["วุฒิ",-13.633641242980955],["សិស្ស",-13.633641242980955],["▁Yogyakarta",-13.633641242980955],["▁nämligen",-13.633641242980955],["▁τραγουδ",-13.633641242980955],["▁χιλιάδες",-13.633641242980955],["▁инспектор",-13.633641242980955],["▁офицер",-13.633641242980955],["▁נאָך",-13.633641242980955],["▁اشتغال",-13.633641242980955],["▁اهڙو",-13.633641242980955],["▁فروردین",-13.633641242980955],["▁प्राकृतिक",-13.633641242980955],["▁বিদ্যালয়",-13.633641242980955],["▁ପ୍ରକଳ୍ପ",-13.633641242980955],["▁반드시",-13.633641242980955],["▁beträgt",-13.63364315032959],["▁kijamii",-13.63364315032959],["▁પ્રકાશ",-13.63364315032959],["▁ikastaro",-13.633644104003906],["▁प्रतिनिधिसभा",-13.633644104003906],["▁ಯಾಕೆ",-13.633644104003906],["▁Telangana",-13.633645057678224],["▁slapukus",-13.633645057678224],["▁ਭਗਤ",-13.633646965026855],["▁akinek",-13.633647918701172],["▁отримав",-13.633648872375488],["▁চেয়ে",-13.633649826049805],["▁메뉴",-13.633649826049805],["▁labiausiai",-13.63365077972412],["▁تسجيل",-13.633651733398438],["▁वित्तीय",-13.633652687072754],["▁materiālu",-13.633659362792969],["▁වෙඩි",-13.63366413116455],["▁مهمترین",-13.633668899536133],["▁repülő",-13.633670806884766],["▁Sergi",-13.633679389953612],["tidningen",-13.633681297302246],["▁equilibrio",-13.633682250976562],["▁பின்னர்",-13.633682250976562],["වෝ",-13.633688926696776],["▁Kuko",-13.633689880371094],["▁reclame",-13.633694648742676],["▁районунун",-13.633697509765623],["lerimizin",-13.633700370788574],["▁väntar",-13.633702278137209],["द्वितीय",-13.633710861206056],["▁រូបភាព",-13.633718490600586],["עיל",-13.63372039794922],["▁lakosság",-13.633733749389648],["-33",-13.633737564086914],["充実",-13.633739471435549],["▁grec",-13.633748054504396],["▁promote",-13.633781433105469],["ציוד",-13.633797645568848],["▁çeken",-13.633806228637695],["▁विगत",-13.633806228637695],["▁ddwy",-13.633808135986328],["作为一个",-13.633814811706545],["▁риз",-13.633816719055176],["▁kiekviena",-13.633822441101074],["▁ạ",-13.633828163146973],["▁Siria",-13.633830070495604],["向き",-13.63383674621582],["מינה",-13.63385009765625],["▁ақыл",-13.63385772705078],["▁төслийн",-13.633862495422363],["▁menikah",-13.633864402770996],["▁gato",-13.633872032165527],["чниот",-13.633892059326172],["▁falas",-13.633904457092283],["ዕድ",-13.633905410766602],["tohen",-13.633914947509766],["▁nocleg",-13.63392734527588],["用餐",-13.633931159973145],["帰り",-13.633940696716309],["▁sollicit",-13.633949279785156],["5,5",-13.633967399597168],["▁talabalar",-13.633970260620115],["івської",-13.63397216796875],["Kalau",-13.633980751037598],["γέν",-13.633986473083496],["▁medali",-13.633996963500977],["區的",-13.633997917175291],["▁mural",-13.634021759033203],["▁Lehet",-13.634037971496582],["▁තිබුණේ",-13.63405704498291],["没什么",-13.634077072143556],["▁Λέ",-13.63408374786377],["▁ممثل",-13.634100914001465],["▁κανονι",-13.634117126464844],["▁خودتان",-13.63413429260254],["▁Lajme",-13.634136199951172],["▁scrive",-13.634161949157717],["tänyt",-13.634170532226562],["▁верш",-13.634170532226562],["avaram",-13.634172439575195],["▁ontwikkelen",-13.634174346923828],["▁මොකක්ද",-13.634175300598145],["▁конститу",-13.634178161621094],["▁sonraki",-13.634187698364258],["▁අන",-13.634211540222168],["▁Марин",-13.634222030639648],["▁rusa",-13.63424301147461],["кыл",-13.634267807006836],["▁นโยบาย",-13.634281158447266],["täisi",-13.634283065795898],["തിന",-13.634284019470217],["வருக்கு",-13.634300231933594],["้ย",-13.634315490722656],["▁contactar",-13.634321212768556],["ୀନ",-13.634337425231934],["▁авіа",-13.63434886932373],["▁тааныш",-13.634361267089844],["▁deixando",-13.634380340576172],["▁fiyatı",-13.634385108947754],["▁ፊት",-13.634389877319336],["▁ekarri",-13.634394645690918],["始めた",-13.634394645690918],["▁справі",-13.634407997131348],["نظ",-13.63442039489746],["▁güncel",-13.634422302246094],["skatīt",-13.63442611694336],["vajte",-13.63442611694336],["▁formulier",-13.634445190429688],["▁односе",-13.634478569030762],["endid",-13.63448429107666],["▁nadležn",-13.63449001312256],["ליין",-13.63450527191162],["ธี",-13.63451862335205],["▁გრძნობ",-13.634525299072266],["சர்",-13.63453483581543],["▁Wester",-13.634561538696287],["▁malá",-13.634564399719238],["▁පුතා",-13.634567260742188],["▁Wannan",-13.634570121765137],["▁чаш",-13.63457202911377],["kendt",-13.634578704833984],["ريو",-13.634591102600098],["▁Ağa",-13.634598731994627],["▁heerlijke",-13.634602546691896],["ບິນ",-13.63463020324707],["ЫЗ",-13.634685516357422],["bás",-13.63471221923828],["▁అర",-13.634726524353027],["▁mielen",-13.634730339050291],["cón",-13.63473415374756],["MARK",-13.634736061096191],["▁extract",-13.634758949279783],["▁český",-13.634761810302734],["metru",-13.634771347045898],["▁Forbes",-13.634779930114746],["▁träna",-13.63479709625244],["େଲ",-13.634809494018556],["▁Dung",-13.63483428955078],["っても",-13.634854316711426],["Џ",-13.634858131408691],["▁rahi",-13.634858131408691],["Was",-13.634860038757324],["▁ത്ഥി",-13.63487148284912],["到底是",-13.63487434387207],["▁Ушул",-13.634881973266602],["ШУ",-13.634882926940918],["▁ഇന്ത്യൻ",-13.63489818572998],["ाचं",-13.634922981262209],["EVO",-13.634932518005373],["▁занятия",-13.634932518005373],["gation",-13.634953498840332],["▁maande",-13.634963035583496],["地板",-13.634969711303713],["jamā",-13.634977340698242],["▁desuden",-13.63499641418457],["istika",-13.635001182556152],["▁butter",-13.635010719299316],["▁مارك",-13.635015487670898],["ිනි",-13.635027885437012],["▁Mazda",-13.635038375854492],["▁NË",-13.63506317138672],["▁seboj",-13.635088920593262],["▁drugs",-13.635136604309082],["▁dobrym",-13.635138511657717],["▁sucre",-13.635149002075195],["▁туризма",-13.635149955749512],["▁לקו",-13.635159492492676],["▁посока",-13.635169982910156],["қыл",-13.635191917419434],["▁ئات",-13.635202407836914],["▁სანამ",-13.635229110717772],["szuk",-13.635238647460938],["▁схож",-13.635257720947266],["▁Biar",-13.635272979736328],["▁bredt",-13.635272979736328],["▁leverera",-13.635282516479492],["▁slavi",-13.635282516479492],["▁ವಿಧಾನ",-13.635284423828123],["▁diskutera",-13.63528823852539],["▁tír",-13.635316848754885],["женні",-13.635323524475098],["はこちら",-13.635334014892578],["▁સ્વા",-13.635334968566896],["یزو",-13.635335922241213],["félagið",-13.635337829589844],["▁Carter",-13.635347366333008],["chadh",-13.63535213470459],["्क",-13.635360717773438],["▁avand",-13.635367393493652],["ండే",-13.635380744934082],["Пра",-13.635393142700195],["fati",-13.635397911071776],["król",-13.635408401489258],["▁Glede",-13.635424613952637],["▁Flag",-13.635438919067385],["▁emango",-13.635442733764648],["track",-13.635449409484863],["▁kapta",-13.635452270507812],["▁Gré",-13.635462760925291],["让她",-13.63546371459961],["▁geldt",-13.635489463806152],["organi",-13.635491371154783],["を見た",-13.63549518585205],["▁wahrscheinlich",-13.635498046875],["წინ",-13.63552951812744],["下記の",-13.635538101196287],["▁மலை",-13.635540008544922],["▁uniforme",-13.635542869567873],["▁banken",-13.635550498962402],["▁Nõ",-13.635560989379885],["άμα",-13.635586738586426],["▁വോട്ട",-13.635587692260742],["不错的",-13.635597229003906],["▁рећи",-13.63560676574707],["bair",-13.635614395141602],["▁napriek",-13.635625839233398],["୍ର",-13.63564682006836],["merkin",-13.635647773742676],["തെന്നും",-13.635655403137209],["▁skogen",-13.63565731048584],["isellä",-13.635659217834473],["我去",-13.635689735412598],["OFF",-13.635714530944824],["阻止",-13.635719299316406],["说是",-13.635722160339355],["PRA",-13.635724067687988],["▁Kır",-13.635726928710938],["汪",-13.6357421875],["饮食",-13.635751724243164],["素晴らしい",-13.635754585266112],["口コミ",-13.635757446289062],["薇",-13.635757446289062],["▁Jl",-13.635759353637695],["セミナー",-13.635759353637695],["ทําความสะอาด",-13.63576602935791],["▁Bevölkerung",-13.63576889038086],["▁Dnevnik",-13.63576889038086],["▁Secretaría",-13.63576889038086],["▁erhöht",-13.63576889038086],["▁eteenpäin",-13.63576889038086],["▁gefällt",-13.63576889038086],["▁kvôli",-13.63576889038086],["▁ΤΗΝ",-13.63576889038086],["▁υπηρεσιών",-13.63576889038086],["▁последната",-13.63576889038086],["▁քննչական",-13.63576889038086],["▁ਸਿੱਧੂ",-13.63576889038086],["▁મેનેજમેન્ટ",-13.63576889038086],["▁କର୍ମଚାରୀ",-13.63576889038086],["▁ଦୁର୍ଘଟଣା",-13.63576889038086],["▁விருது",-13.63576889038086],["▁ತಿಳಿಯ",-13.63576889038086],["▁ආරක්ෂක",-13.63576889038086],["▁ασχολ",-13.635769844055176],["▁shilingi",-13.635771751403809],["▁נסיעה",-13.635772705078123],["▁figyelembe",-13.635775566101074],["▁žinios",-13.635775566101074],["▁жінка",-13.635778427124023],["▁قاچاق",-13.635780334472656],["▁opremljen",-13.635781288146973],["▁είδη",-13.635787010192873],["激情",-13.635787010192873],["▁Hrvatskog",-13.635788917541504],["▁diagnóstico",-13.635788917541504],["संख्य",-13.635793685913086],["▁Hastanesi",-13.635793685913086],["▁ଲେଖା",-13.635793685913086],["▁1895",-13.63579559326172],["▁Толькі",-13.63579559326172],["основ",-13.63580322265625],["इरहेका",-13.635807037353516],["▁اتنی",-13.635807037353516],["▁टीवी",-13.635807991027832],["▁iniziative",-13.635814666748049],["▁vybrať",-13.635815620422363],["▁गर्छन्",-13.63581657409668],["▁दुकान",-13.635817527770996],["דרש",-13.635826110839844],["▁pagalbos",-13.635826110839844],["▁framtíð",-13.635828971862791],["▁yoldaşı",-13.63584041595459],["野菜",-13.635844230651855],["▁Johansson",-13.635849952697754],["ELLA",-13.635851860046388],["▁skupno",-13.635862350463867],["▁etiraz",-13.6358642578125],["两个人",-13.6358642578125],["▁poduze",-13.635868072509766],["▁vendu",-13.635872840881348],["▁tarbiya",-13.63587474822998],["▁paiement",-13.63587760925293],["▁знань",-13.635889053344728],["▁مجید",-13.635893821716309],["▁마이",-13.635899543762209],["▁variedad",-13.635906219482422],["▁bilind",-13.635912895202637],["▁Benedict",-13.635916709899902],["טס",-13.635920524597168],["演讲",-13.635926246643066],["369",-13.6359281539917],["Mobile",-13.63593292236328],["▁carrière",-13.635942459106444],["▁kolmanda",-13.635946273803713],["▁மாதம்",-13.635950088500977],["▁jedného",-13.635953903198242],["▁فقير",-13.63595485687256],["tologi",-13.635964393615724],["▁уметност",-13.635981559753418],["▁assortiment",-13.635986328125],["▁ચી",-13.635990142822266],["▁কারণ",-13.635994911193848],["méter",-13.63599967956543],["▁Baadhi",-13.636006355285645],["boden",-13.63600730895996],["▁lapā",-13.636042594909668],["▁Plaça",-13.6360445022583],["▁உலகம்",-13.63605499267578],["▁जरूरी",-13.636058807373049],["▁കൊല്ല",-13.636061668395996],["anvisning",-13.636075973510742],["の関係",-13.636079788208008],["▁iesnieg",-13.63609218597412],["بنا",-13.63609504699707],["▁hoga",-13.636096954345703],["ບໍລິການ",-13.636098861694336],["▁besuchen",-13.636104583740234],["▁Kivi",-13.636109352111816],["ที่ได้รับ",-13.636115074157717],["▁barrier",-13.636133193969728],["▁týmto",-13.636144638061523],["只需要",-13.636150360107422],["▁genu",-13.636159896850586],["ungguh",-13.636176109313965],["▁Chcem",-13.63618278503418],["вшихся",-13.636187553405762],["87)",-13.636188507080078],["▁pokušava",-13.63620662689209],["▁Puli",-13.636228561401367],["▁barev",-13.63623046875],["ृत",-13.636239051818848],["▁ભાઈ",-13.63624095916748],["▁mükafat",-13.63625717163086],["▁العلمية",-13.63625717163086],["▁Elles",-13.636277198791504],["▁FOTOLAR",-13.636284828186035],["▁ጦርነት",-13.636289596557615],["▁branży",-13.636290550231934],["водять",-13.636300086975098],["▁värske",-13.636305809020996],["ላስ",-13.636306762695312],["▁gosod",-13.636324882507324],["ЕКТ",-13.636337280273438],["▁كاملة",-13.636338233947754],["▁Två",-13.636340141296388],["belo",-13.636346817016602],["०८",-13.636357307434082],["理财",-13.636358261108398],["ದಾಗ",-13.636362075805664],["াধ্য",-13.636395454406738],["მედ",-13.636404037475586],["broek",-13.636418342590332],["essere",-13.636420249938965],["▁Ubi",-13.636429786682127],["दुर",-13.636431694030762],["יקע",-13.636434555053713],["▁effettuare",-13.636451721191406],["▁യോഗ",-13.636452674865724],["▁წერ",-13.636489868164062],["OVE",-13.636513710021973],["ওয়ে",-13.636544227600098],["DNA",-13.636556625366213],["үнөн",-13.63659381866455],["▁창업",-13.6365966796875],["▁tenham",-13.636603355407717],["▁gaming",-13.636605262756348],["▁Első",-13.636653900146484],["▁rašo",-13.636658668518066],["גנים",-13.636669158935549],["上傳",-13.63670825958252],["▁मधेश",-13.636717796325684],["▁අයිය",-13.636719703674316],["ことから",-13.636719703674316],["▁FORMA",-13.636754989624023],["▁պահին",-13.636757850646973],["▁igrača",-13.636775970458984],["▁રાખી",-13.636784553527832],["▁emme",-13.636786460876465],["▁tulong",-13.636796951293944],["▁Ethiopian",-13.63680362701416],["▁کاربردی",-13.636829376220703],["▁границе",-13.63683032989502],["▁lục",-13.636837005615234],["▁Във",-13.636839866638184],["КІ",-13.636845588684082],["fft",-13.636889457702637],["葡萄",-13.636890411376951],["▁elämää",-13.63696575164795],["▁هوء",-13.636975288391112],["▁ট্র",-13.636988639831545],["ریم",-13.637030601501465],["▁TAG",-13.637043952941896],["मराठ",-13.637056350708008],["کول",-13.63706111907959],["1979",-13.637062072753906],["▁satis",-13.637064933776855],["▁калтыр",-13.637077331542969],["विर",-13.637104988098145],["▁саласы",-13.63710880279541],["▁salan",-13.637141227722168],["אקט",-13.6371431350708],["ියට",-13.637155532836914],["ніть",-13.63716983795166],["ඩුව",-13.637171745300291],["▁uffici",-13.63720989227295],["▁Gerard",-13.63723850250244],["студ",-13.637269973754885],["إنجاز",-13.637272834777832],["었던",-13.637279510498049],["▁mesura",-13.637284278869627],["▁Zeiten",-13.637286186218262],["scrição",-13.637292861938477],["プレイ",-13.637325286865234],["するか",-13.637331008911133],["வற்ற",-13.637347221374512],["▁根據",-13.637347221374512],["▁адзнач",-13.637372016906738],["წყვე",-13.637381553649902],["basis",-13.637391090393066],["▁Bölüm",-13.637395858764648],["▁qadar",-13.637402534484863],["تسو",-13.637428283691406],["▁slobodno",-13.637453079223633],["▁Tourism",-13.637465476989746],["▁משפטי",-13.637478828430176],["▁فاز",-13.637478828430176],["▁գրել",-13.63748264312744],["▁விதி",-13.637495994567873],["▁введен",-13.63751983642578],["kõr",-13.637578964233398],["stavili",-13.637608528137209],["ئیے",-13.637609481811523],["▁imobil",-13.637609481811523],["▁Əl",-13.637617111206056],["İYE",-13.637618064880373],["رست",-13.637624740600586],["ናል፡፡",-13.6376314163208],["▁חצי",-13.637639045715332],["▁চাল",-13.637639045715332],["▁деректер",-13.637655258178713],["▁შეგ",-13.637656211853027],["▁Tradu",-13.637677192687988],["▁pročita",-13.637697219848633],["省委",-13.637699127197266],["▁الاتصال",-13.63771915435791],["▁rind",-13.637761116027832],["▁تىر",-13.637768745422363],["▁алатын",-13.637782096862791],["▁gyermekek",-13.637789726257324],["ستطيع",-13.637803077697754],["▁যুক্তরাষ্ট্র",-13.637812614440918],["лька",-13.63782024383545],["挡",-13.63783359527588],["贺",-13.637835502624512],["▁ቃለ",-13.637837409973145],["▁entsprechende",-13.637845039367676],["▁франц",-13.637860298156738],["蓮",-13.637860298156738],["▁етаж",-13.637866020202637],["ອິ",-13.637868881225586],["▁presentat",-13.637877464294434],["激励",-13.637877464294434],["簿",-13.637882232666016],["返済",-13.637882232666016],["土耳其",-13.63788604736328],["épített",-13.63788890838623],["សំខាន់",-13.637893676757812],["▁učinkovitost",-13.637894630432127],["즘",-13.637898445129396],["ປັບປຸງ",-13.637900352478027],["▁veebruar",-13.637900352478027],["▁gjithçka",-13.637901306152344],["▁melepaskan",-13.637901306152344],["▁γίνονται",-13.637901306152344],["▁Доколку",-13.637901306152344],["▁осигура",-13.637901306152344],["▁рішень",-13.637901306152344],["▁цалкам",-13.637901306152344],["▁घण्टा",-13.637901306152344],["▁ඡායාරූප",-13.637901306152344],["▁ዶላር",-13.637901306152344],["▁숙박",-13.637901306152344],["▁Botëror",-13.63790225982666],["▁kewajiban",-13.63790225982666],["▁شەرقىي",-13.637903213500977],["▁изпраща",-13.637904167175291],["▁ಪರಿಹಾರ",-13.637904167175291],["▁atbilstoši",-13.637906074523926],["▁பிரபல",-13.637906074523926],["공무원",-13.637906074523926],["bescherming",-13.63790798187256],["▁Rodrigo",-13.63790798187256],["▁Simone",-13.637908935546877],["▁dúbida",-13.637909889221191],["▁समाप्त",-13.637909889221191],["▁AIR",-13.637910842895508],["▁قضایی",-13.63791275024414],["▁Fußball",-13.637913703918455],["装置",-13.637914657592772],["▁corn",-13.63791847229004],["▁pomóc",-13.637924194335938],["▁rahvus",-13.637924194335938],["▁אריין",-13.637930870056152],["窗口",-13.637931823730469],["▁إنتاج",-13.637935638427734],["▁значително",-13.637939453125],["▁Секој",-13.63794231414795],["▁опубликован",-13.637943267822266],["▁ಇಂದಿನ",-13.637944221496582],["▁दाबी",-13.637954711914062],["▁गंगा",-13.637956619262695],["▁ശിക്ഷ",-13.637962341308594],["信念",-13.637965202331545],["▁Bow",-13.63796615600586],["▁വീട്ടില്",-13.637967109680176],["▁يتعلق",-13.63797664642334],["▁മുന്നില്",-13.637978553771973],["▁Rabatt",-13.637982368469238],["තක",-13.63798713684082],["▁그것은",-13.63799285888672],["isci",-13.637996673583984],["▁nekünk",-13.6379976272583],["bama",-13.638016700744627],["▁aplicaciones",-13.638019561767578],["▁освои",-13.63802719116211],["▁narahat",-13.638031959533691],["▁জেনে",-13.638049125671388],["▁posiadają",-13.638055801391602],["िरहेका",-13.63807201385498],["▁ככה",-13.638072967529297],["قمة",-13.63807773590088],["▁cillum",-13.638089179992676],["▁Romeo",-13.638093948364258],["▁жената",-13.638094902038574],["▁통한",-13.638099670410156],["▁चलते",-13.638103485107422],["▁Halamang",-13.638120651245115],["овина",-13.63812255859375],["▁লক্ষ্য",-13.638128280639648],["▁ואין",-13.638155937194824],["▁upad",-13.638188362121582],["▁kinacho",-13.638198852539062],["situasjon",-13.638205528259276],["▁програму",-13.638206481933594],["incontro",-13.638223648071287],["३७",-13.638233184814451],["▁пуно",-13.638260841369627],["▁ks",-13.638282775878906],["▁Polonia",-13.638300895690918],["PIC",-13.638303756713867],["ភ្ល",-13.638307571411133],["▁Schol",-13.638310432434082],["▁решить",-13.638313293457031],["սու",-13.638341903686523],["▁vorbit",-13.638346672058104],["▁nalaze",-13.638351440429688],["banka",-13.638355255126951],["▁динамик",-13.638361930847168],["▁سین",-13.638367652893066],["▁yazılım",-13.63838005065918],["▁затвердження",-13.638381958007812],["▁Ου",-13.638382911682127],["zentr",-13.638384819030762],["▁поїзд",-13.638386726379396],["▁ortam",-13.638412475585938],["fugl",-13.638421058654783],["даюць",-13.638422966003418],["IEC",-13.638435363769531],["▁новыя",-13.638442993164062],["авыя",-13.638444900512695],["▁Honey",-13.638453483581545],["▁линија",-13.6384916305542],["▁சின்ன",-13.638511657714844],["ोस",-13.638524055480955],["nämnden",-13.638535499572754],["▁gila",-13.638538360595703],["schuss",-13.63857078552246],["કોમ",-13.63858127593994],["▁başlar",-13.638586044311523],["▁entradas",-13.63859748840332],["▁запіс",-13.638601303100586],["िण",-13.638604164123535],["▁Statist",-13.63861083984375],["não",-13.638644218444824],["▁butikken",-13.63864517211914],["▁Ті",-13.638653755187988],["ಮಲ",-13.638677597045898],["▁Grupa",-13.638701438903809],["▁Pasti",-13.638712882995604],["実感",-13.638718605041504],["teller",-13.638727188110352],["▁hwyl",-13.638752937316896],["▁articolul",-13.63876724243164],["▁Мек",-13.638778686523438],["▁cancella",-13.63878345489502],["ෙමින්",-13.638790130615234],["льной",-13.638811111450195],["▁ARM",-13.638819694519045],["▁Army",-13.63882541656494],["▁міг",-13.638826370239258],["tuminen",-13.638833999633787],["▁zprávy",-13.638838768005373],["▁scaun",-13.638863563537598],["▁zgjedhje",-13.638870239257812],["▁ਜਾਂਦੀ",-13.638870239257812],["चार्ज",-13.638873100280762],["ไม่เป็น",-13.638896942138672],["なども",-13.63889980316162],["是可以",-13.63890266418457],["▁priču",-13.638906478881836],["▁Итак",-13.638937950134276],["რულ",-13.638945579528809],["▁meteo",-13.638945579528809],["▁Ruu",-13.638951301574709],["▁publicitat",-13.638958930969238],["▁Versuch",-13.639007568359377],["▁대전",-13.639023780822754],["機票",-13.639023780822754],["▁Bwana",-13.639042854309082],["来て",-13.639043807983398],["▁aktyvi",-13.639073371887209],["▁Umgang",-13.639076232910156],["Mən",-13.639100074768066],["▁Strip",-13.639114379882812],["▁детал",-13.639131546020508],["▁humbur",-13.639150619506836],["に合わせて",-13.639162063598633],["▁Filmen",-13.639177322387695],["පෑ",-13.63918113708496],["قلة",-13.639182090759276],["ധാ",-13.639188766479492],["▁kaming",-13.639189720153809],["▁berjochten",-13.639206886291504],["▁0.2",-13.639219284057615],["▁අධික",-13.639278411865234],["▁visszatér",-13.639283180236816],["▁ошо",-13.639288902282717],["觀看",-13.639293670654297],["▁เหรียญ",-13.639294624328612],["▁Jää",-13.63929843902588],["▁besonderen",-13.63931369781494],["બ્લ",-13.639330863952637],["▁vaikutus",-13.639368057250977],["▁çocukların",-13.639369010925291],["▁Katedr",-13.639395713806152],["小學",-13.639426231384276],["6-1",-13.639447212219238],["▁مذہب",-13.639449119567873],["yordu",-13.639461517333984],["▁trø",-13.639470100402832],["▁مغل",-13.639472007751465],["laşdı",-13.639486312866213],["mesti",-13.639488220214844],["तया",-13.63951301574707],["Wer",-13.639580726623535],["▁показывает",-13.639593124389648],["▁givet",-13.63959503173828],["▁lepingu",-13.639616012573242],["autó",-13.63963222503662],["ଚାର",-13.63963222503662],["ည်",-13.63963222503662],["венци",-13.639656066894531],["▁meddelande",-13.639662742614746],["rì",-13.63967990875244],["フォー",-13.639681816101074],["▁naziva",-13.63968563079834],["▁tarika",-13.639689445495604],["▁Ката",-13.639705657958984],["▁kreis",-13.639728546142578],["कत",-13.639731407165527],["▁potrzebuje",-13.639747619628906],["次第",-13.639752388000488],["strada",-13.639777183532717],["▁hàm",-13.639782905578612],["▁chcela",-13.639802932739258],["▁ภาค",-13.639819145202637],["▁Sibiu",-13.639820098876951],["గీత",-13.639825820922852],["▁stemmer",-13.63987636566162],["▁aurka",-13.639904022216797],["举措",-13.639968872070312],["ທຶນ",-13.63999366760254],["რბ",-13.64000415802002],["▁Արամ",-13.64000415802002],["忙碌",-13.64000415802002],["亭",-13.640007019042969],["进来",-13.640007972717283],["ኗ",-13.640015602111816],["▁קאר",-13.640016555786133],["糖尿病",-13.640018463134766],["检验",-13.640023231506348],["新たな",-13.640026092529297],["hash",-13.640029907226562],["恵",-13.640033721923828],["▁Cymdeithas",-13.640037536621094],["▁WAZIRI",-13.640037536621094],["▁göteborg",-13.640037536621094],["▁iechyd",-13.640037536621094],["▁samhället",-13.640037536621094],["▁Ωστόσο",-13.640037536621094],["▁Анткени",-13.640037536621094],["▁приобрета",-13.640037536621094],["▁دهشتگرد",-13.640037536621094],["▁अतिरिक्त",-13.640037536621094],["▁menuntut",-13.64003849029541],["▁амерички",-13.64003849029541],["▁расмий",-13.64003849029541],["حتياجات",-13.64004135131836],["▁эхэлсэн",-13.640042304992676],["▁դիրք",-13.640043258666992],["▁innovación",-13.640045166015623],["трымлівае",-13.640048027038574],["▁పెరుగు",-13.640048027038574],["▁ermöglicht",-13.640049934387209],["קנים",-13.640050888061523],["▁чекор",-13.640050888061523],["嘉義",-13.640052795410156],["▁rəisi",-13.640054702758787],["▁Mental",-13.640057563781738],["▁ماہرین",-13.640057563781738],["법인",-13.640058517456056],["▁дазваляе",-13.640061378479004],["▁sheegtay",-13.64006233215332],["▁ludzki",-13.640063285827637],["▁vojo",-13.640063285827637],["▁käytön",-13.640066146850586],["沢山",-13.64006805419922],["▁Lääne",-13.640073776245115],["▁ملوث",-13.640074729919434],["▁tēmu",-13.640077590942385],["▁ಹೇಳಿದ್ದಾರೆ",-13.640079498291016],["▁ಅದಕ್ಕೆ",-13.640082359313965],["▁թվական",-13.64008331298828],["▁технології",-13.640092849731444],["▁అన్నాడు",-13.640098571777344],["utvikling",-13.64009952545166],["▁libertà",-13.640107154846191],["▁звільн",-13.640107154846191],["JÄ",-13.640117645263672],["プレ",-13.640118598937988],["▁qarax",-13.640122413635254],["▁shaxslar",-13.640125274658203],["▁पूछा",-13.64012622833252],["▁çeşit",-13.64013671875],["▁աշխատել",-13.640138626098633],["제를",-13.64014720916748],["నాడు",-13.64015007019043],["▁bijdrage",-13.640154838562012],["を選ぶ",-13.640158653259276],["શક્તિ",-13.64016056060791],["▁रहेगा",-13.640179634094238],["▁Dinamo",-13.640180587768556],["▁τρία",-13.640183448791504],["▁شہریوں",-13.640185356140137],["▁역할을",-13.640191078186035],["▁भाषण",-13.640196800231934],["▁yağış",-13.640219688415527],["▁المحكمة",-13.64022445678711],["▁യുവ",-13.64022731781006],["▁اداکار",-13.640254020690918],["Ու",-13.640277862548828],["huolto",-13.640278816223145],["▁മുഖം",-13.64027976989746],["▁ຖະ",-13.64029312133789],["▁aibă",-13.64029598236084],["▁valodas",-13.640302658081056],["▁النادي",-13.640308380126951],["だって",-13.640338897705078],["▁kadhaa",-13.640345573425291],["บทความ",-13.64034938812256],["▁торг",-13.640350341796877],["▁sví",-13.640351295471191],["▁палата",-13.640365600585938],["NEN",-13.64036750793457],["치를",-13.64037036895752],["▁паслуг",-13.64037799835205],["▁contido",-13.640387535095217],["прошу",-13.640437126159668],["▁плав",-13.6404390335083],["▁desmit",-13.64044189453125],["▁dolgot",-13.640450477600098],["့်",-13.640474319458008],["▁изследвания",-13.640484809875488],["▁រៀន",-13.640487670898438],["terie",-13.640491485595703],["▁oktyabr",-13.640494346618652],["▁లేదని",-13.640499114990234],["rechnen",-13.64050579071045],["▁земли",-13.640510559082031],["▁سورية",-13.640517234802246],["ಯಲ್ಲ",-13.64052391052246],["▁ആക്രമണ",-13.64052391052246],["▁büntet",-13.640532493591309],["▁안정",-13.640532493591309],["αστικό",-13.640559196472168],["Tamil",-13.640569686889648],["▁vurğu",-13.640572547912598],["▁कसै",-13.64059352874756],["other",-13.640604972839355],["thā",-13.640605926513672],["▁tini",-13.640621185302734],["▁aperis",-13.640633583068848],["▁chodí",-13.640634536743164],["▁измене",-13.640636444091797],["ИМА",-13.640637397766112],["▁Dauer",-13.640645027160645],["▁दौर",-13.640649795532228],["RAC",-13.640666961669922],["▁nyere",-13.640673637390137],["fully",-13.640676498413086],["▁саналат",-13.640694618225098],["▁ilmesty",-13.640697479248049],["▁خطوط",-13.640706062316896],["▁218",-13.640708923339844],["kumen",-13.640739440917969],["▁citā",-13.640740394592283],["▁осим",-13.640754699707031],["▁Cùng",-13.64077091217041],["▁HARI",-13.640806198120115],["živjeti",-13.64081573486328],["▁ilçesinde",-13.64084815979004],["▁dolgozó",-13.640851974487305],["▁غار",-13.640871047973633],["passion",-13.640873908996582],["的一切",-13.640881538391112],["▁Afrik",-13.640886306762695],["ਅਲ",-13.64089012145996],["▁ბაღ",-13.64089584350586],["فرض",-13.640907287597656],["▁pieces",-13.640921592712402],["ељ",-13.640928268432615],["▁ansin",-13.640965461730955],["▁skid",-13.640969276428224],["▁Заң",-13.640979766845703],["▁تیل",-13.640983581542969],["▁straně",-13.640986442565918],["▁sklepu",-13.640995979309082],["שתו",-13.641003608703612],["▁obdela",-13.641011238098145],["▁cards",-13.641021728515623],["ИНА",-13.641031265258787],["फु",-13.641033172607422],["443",-13.641045570373535],["▁सरकारने",-13.641050338745115],["▁önskar",-13.64112377166748],["▁zaveda",-13.641124725341797],["İn",-13.641131401062012],["шых",-13.641139030456545],["▁udføre",-13.64115047454834],["étranger",-13.641155242919922],["ტვ",-13.641161918640137],["▁individuo",-13.64116668701172],["▁maf",-13.641172409057615],["ંબ",-13.641173362731934],["▁трка",-13.641247749328612],["ለይ",-13.641267776489258],["▁sesiapa",-13.64126968383789],["መኪና",-13.641310691833496],["ብረት",-13.641327857971191],["▁MX",-13.641334533691406],["▁kalitesi",-13.641334533691406],["▁இல்லாத",-13.641345977783203],["▁чарба",-13.641366004943848],["▁sjø",-13.641371726989746],["пушти",-13.64138412475586],["▁ünvan",-13.64142608642578],["ፈታ",-13.64143180847168],["▁getiren",-13.641441345214844],["▁loopt",-13.641451835632324],["▁aquelas",-13.64145851135254],["બળ",-13.641472816467283],["▁účely",-13.641485214233398],["əbə",-13.64148998260498],["▁දමා",-13.64149284362793],["▁offering",-13.641502380371094],["▁гүл",-13.641520500183104],["▁Pelajar",-13.64153003692627],["ificazione",-13.641542434692385],["ষ্",-13.64157009124756],["عوام",-13.641573905944824],["hage",-13.64157772064209],["▁stavební",-13.641583442687988],["▁šķ",-13.641631126403809],["▁projektov",-13.641646385192873],["ικη",-13.641647338867188],["▁Vaša",-13.641648292541504],["▁кайрыл",-13.641674041748049],["णि",-13.641732215881348],["▁здійс",-13.641735076904297],["▁ideer",-13.64173698425293],["▁nefa",-13.641746520996094],["的做法",-13.641748428344728],["▁Позна",-13.641768455505373],["新品",-13.641791343688965],["hjul",-13.641797065734863],["▁bốn",-13.641803741455078],["ûnder",-13.64180850982666],["▁φύση",-13.641810417175291],["ционни",-13.64181137084961],["qad",-13.64181900024414],["▁Hydro",-13.64182186126709],["▁Eliza",-13.64182472229004],["▁додава",-13.641826629638672],["▁GRAN",-13.641830444335938],["ဴး",-13.641836166381836],["▁کارگردان",-13.64184284210205],["ferencia",-13.641849517822266],["▁Bros",-13.641889572143556],["▁العسكري",-13.641912460327148],["▁지지",-13.641948699951172],["胆",-13.641948699951172],["▁اذ",-13.641949653625488],["വായ",-13.641965866088867],["িব",-13.641966819763184],["▁nohy",-13.641987800598145],["▁לקבלת",-13.641987800598145],["▁stiller",-13.64198875427246],["いう",-13.641990661621094],["▁nitko",-13.64201831817627],["جە",-13.642037391662598],["▁изп",-13.64207649230957],["အချက်အလက်",-13.642094612121582],["▁pacjent",-13.642117500305176],["ékeny",-13.642131805419922],["摔",-13.642136573791504],["袖",-13.64214038848877],["租赁",-13.642155647277832],["巧克力",-13.642159461975098],["໋",-13.642176628112791],["▁məsləhət",-13.642178535461426],["therapeut",-13.642179489135742],["бактылуу",-13.642179489135742],["ዌ",-13.642179489135742],["▁conexión",-13.642179489135742],["▁qershor",-13.642179489135742],["▁ravishda",-13.642179489135742],["▁seterusnya",-13.642179489135742],["▁περιπτώσεις",-13.642179489135742],["▁Сооронбай",-13.642179489135742],["▁изпълнява",-13.642179489135742],["▁Դավիթ",-13.642179489135742],["▁هڅه",-13.642179489135742],["▁ਸਾਬਕਾ",-13.642179489135742],["▁შეეხება",-13.642179489135742],["▁Ponadto",-13.64218044281006],["▁известен",-13.64218044281006],["▁ਅਨੁਸਾਰ",-13.64218044281006],["▁mponina",-13.642183303833008],["▁Необходимо",-13.642183303833008],["▁نگهداری",-13.642184257507324],["▁волонтер",-13.642186164855955],["▁француски",-13.64218807220459],["เซ็น",-13.642189025878906],["▁könnyű",-13.642189979553224],["▁poslo",-13.64219093322754],["▁עורך",-13.642192840576172],["งบประมาณ",-13.64219570159912],["▁Konflikt",-13.642197608947754],["ിനിടെ",-13.642200469970703],["▁रात्री",-13.64220142364502],["▁backup",-13.642202377319336],["▁великі",-13.642203330993652],["▁престиж",-13.642207145690918],["▁жасы",-13.642215728759766],["▁структуры",-13.642217636108398],["▁Gates",-13.642223358154297],["▁tietokone",-13.64222812652588],["▁tokio",-13.64223575592041],["jimą",-13.642239570617676],["▁படங்கள்",-13.642245292663574],["kovou",-13.64225959777832],["▁vraća",-13.642266273498535],["lığına",-13.642268180847168],["▁jafnvel",-13.6422700881958],["▁минулого",-13.642284393310549],["sarjan",-13.642285346984863],["▁bezoekers",-13.642300605773926],["▁туруп",-13.642300605773926],["▁ඉල්ලීම",-13.642312049865724],["▁exempelvis",-13.642319679260254],["▁Ndërsa",-13.642324447631836],["▁pálya",-13.642325401306152],["талды",-13.642328262329102],["Ran",-13.642345428466797],["▁дешава",-13.64236545562744],["▁Hobby",-13.642366409301758],["▁پورے",-13.642373085021973],["▁коротко",-13.642374992370604],["▁costos",-13.642375946044922],["▁trotz",-13.642376899719238],["▁düşünüyorum",-13.642383575439451],["▁моего",-13.64238452911377],["▁puerta",-13.642386436462402],["▁brigad",-13.642391204833984],["thur",-13.642411231994627],["دىڭ",-13.642436027526855],["▁vaovao",-13.642443656921388],["دین",-13.64245319366455],["គ្រប់",-13.642457962036133],["▁соғыс",-13.64245891571045],["▁volontari",-13.642460823059082],["分泌",-13.642476081848145],["grepp",-13.642488479614258],["очные",-13.64250373840332],["AIR",-13.642504692077637],["提起",-13.642505645751951],["▁यासाठी",-13.642512321472168],["▁sjanse",-13.642522811889648],["gøre",-13.642528533935549],["▁ରାଜ୍ୟରେ",-13.642529487609863],["▁जवळ",-13.642533302307127],["▁provoc",-13.642541885375977],["▁þín",-13.642549514770508],["▁galime",-13.642550468444824],["▁априла",-13.64255142211914],["▁mahad",-13.642556190490724],["entusiast",-13.642579078674316],["çî",-13.642590522766112],["▁lebar",-13.642592430114746],["▁mild",-13.642601013183594],["▁Aristo",-13.642633438110352],["Οι",-13.642655372619627],["▁pensado",-13.64266300201416],["▁реп",-13.642664909362791],["卒業",-13.642666816711426],["▁Habita",-13.642678260803224],["हृ",-13.642698287963867],["▁Αι",-13.642701148986816],["mųjų",-13.64271068572998],["ेच्या",-13.64271354675293],["簡單的",-13.642739295959473],["▁собственник",-13.64276885986328],["ज्योत",-13.642772674560549],["▁brukere",-13.642783164978027],["▁Mikäli",-13.64279079437256],["▁banane",-13.642802238464355],["▁seçimler",-13.642826080322266],["▁ಜಾರಿ",-13.642834663391112],["▁پیشرفته",-13.642840385437012],["▁diretta",-13.642851829528809],["▁عش",-13.64285373687744],["▁gelişim",-13.642900466918944],["▁ಗಾಯ",-13.642914772033691],["▁pogut",-13.642927169799805],["layıp",-13.642931938171388],["▁məsələsi",-13.642946243286133],["werf",-13.64294719696045],["ніцтва",-13.64295768737793],["▁جیسا",-13.64296531677246],["▁обзир",-13.642967224121094],["▁පියා",-13.642974853515623],["ებებს",-13.643001556396484],["▁խորհրդարան",-13.643004417419434],["▁આવતા",-13.643007278442385],["▁adjust",-13.643024444580078],["Start",-13.64302921295166],["▁нысан",-13.64303207397461],["可見",-13.643038749694824],["好玩",-13.643048286437988],["▁ມະ",-13.64305019378662],["▁actualidade",-13.643068313598633],["▁معنای",-13.643099784851074],["▁средине",-13.643111228942873],["のだが",-13.643115997314451],["▁populära",-13.643120765686035],["▁Samas",-13.643121719360352],["▁ຫົວຫນ້າ",-13.643122673034668],["▁Peace",-13.643131256103516],["-2007",-13.643143653869627],["▁hösten",-13.643147468566896],["ল্ল",-13.643148422241213],["დებული",-13.643162727355955],["▁настави",-13.64317226409912],["ポー",-13.64317512512207],["▁ສຽງ",-13.643200874328612],["મ્બ",-13.643210411071776],["търс",-13.643242835998535],["▁internationalen",-13.643256187438965],["▁Основна",-13.643256187438965],["▁tutma",-13.643270492553713],["шня",-13.643280029296877],["bincang",-13.64328670501709],["dají",-13.643293380737305],["▁geliştirme",-13.643296241760254],["▁كەل",-13.64332103729248],["റാം",-13.643332481384276],["чө",-13.643343925476074],["▁সমাজ",-13.64337921142578],["▁þessar",-13.643383026123049],["▁звучи",-13.643402099609377],["▁عمد",-13.64341640472412],["นุ",-13.643427848815918],["▁sual",-13.643434524536133],["SAK",-13.64345645904541],["зелен",-13.643463134765623],["detta",-13.643465042114258],["▁presă",-13.64346694946289],["幾天",-13.643495559692385],["коммун",-13.64350700378418],["▁ukazuje",-13.643526077270508],["▁Bath",-13.643534660339355],["ทิศ",-13.643537521362305],["aalaha",-13.643549919128418],["ՍՏ",-13.64355182647705],["borð",-13.643555641174316],["▁autore",-13.643567085266112],["نشط",-13.643577575683594],["കിട",-13.643577575683594],["ጾ",-13.643580436706545],["וטר",-13.64358615875244],["fikia",-13.643594741821287],["▁مذهبی",-13.643610000610352],["άζονται",-13.643621444702148],["મ્",-13.643624305725098],["フル",-13.643633842468262],["▁Efekt",-13.643646240234377],["▁आते",-13.643654823303224],["bremen",-13.643669128417969],["▁schuld",-13.643671989440918],["▁sådana",-13.643686294555664],["จาน",-13.643692016601562],["▁militant",-13.643693923950195],["บัว",-13.643694877624512],["▁HAM",-13.64370059967041],["▁pól",-13.643709182739258],["▁svarer",-13.643712997436523],["▁plné",-13.64371395111084],["▁обман",-13.643718719482422],["▁Tylko",-13.643733024597168],["▁နေ",-13.6437349319458],["▁роду",-13.64374828338623],["beskrivning",-13.643754005432127],["нуты",-13.643756866455078],["▁රයි",-13.643757820129396],["latura",-13.643768310546877],["gondol",-13.643771171569824],["▁inspirera",-13.64377784729004],["▁якими",-13.64377784729004],["ग्राम",-13.643792152404783],["▁справы",-13.643799781799316],["▁сунушта",-13.643841743469238],["▁förlora",-13.643872261047363],["▁Result",-13.64387321472168],["Школ",-13.643891334533691],["▁teritorija",-13.643908500671388],["▁anatomi",-13.643918991088867],["работодател",-13.643945693969728],["▁Волин",-13.643953323364258],["▁دیو",-13.643956184387209],["аралық",-13.643975257873535],["forsker",-13.643978118896484],["▁ໃຫຍ່",-13.644030570983888],["▁තමන්ට",-13.644065856933594],["▁Nogle",-13.644073486328123],["وريا",-13.644079208374023],["▁పాప",-13.644084930419922],["реждане",-13.644094467163086],["чдын",-13.644119262695312],["ריב",-13.64415454864502],["368",-13.644171714782717],["େଶ",-13.644177436828612],["▁ആഗ്രഹ",-13.64418888092041],["▁дж",-13.644213676452637],["음을",-13.644230842590332],["▁ସ୍ଥାନ",-13.644231796264648],["▁sesso",-13.644241333007812],["สร",-13.644250869750977],["斥",-13.64425277709961],["dingen",-13.644254684448242],["▁Kahit",-13.644259452819824],["queira",-13.64426040649414],["▁ágy",-13.644261360168455],["▁singlar",-13.64427089691162],["▁scrivere",-13.64427661895752],["畏",-13.644280433654783],["▁envolv",-13.644283294677734],["廃",-13.644305229187012],["▁إس",-13.644306182861328],["إقليم",-13.644307136535645],["騙",-13.644307136535645],["การเรียนรู้",-13.64431381225586],["懶",-13.644314765930176],["▁саясаты",-13.644315719604492],["కపోతే",-13.644323348999023],["กระบวนการ",-13.64432430267334],["តំណាង",-13.64432430267334],["▁Wasaaradda",-13.644325256347656],["▁begrijp",-13.644325256347656],["▁datblygu",-13.644325256347656],["▁tyvärr",-13.644325256347656],["▁tādēļ",-13.644325256347656],["▁začátku",-13.644325256347656],["▁ευθύνη",-13.644325256347656],["▁բաժան",-13.644325256347656],["▁लॉन्च",-13.644325256347656],["▁পাকিস্তান",-13.644325256347656],["▁ଦିବସ",-13.644325256347656],["▁받으",-13.644325256347656],["▁Fórum",-13.644326210021973],["▁Rudolf",-13.644326210021973],["▁olcsó",-13.644326210021973],["▁osôb",-13.644326210021973],["▁кандыдат",-13.644326210021973],["▁ਪੂਰੀ",-13.644326210021973],["▁저녁",-13.644326210021973],["阅读全文",-13.644326210021973],["▁bármilyen",-13.644328117370604],["▁magiging",-13.644328117370604],["▁исключением",-13.644328117370604],["▁bhaineann",-13.644329071044922],["▁αιώνα",-13.644330024719238],["▁vuxna",-13.644332885742188],["▁මහජන",-13.644332885742188],["mønster",-13.64433479309082],["▁Arnold",-13.644335746765137],["▁dukungan",-13.64434814453125],["▁естественно",-13.64434814453125],["▁العليا",-13.64434814453125],["▁istorijos",-13.6443510055542],["▁sīkdatnes",-13.64435577392578],["▁Зворотн",-13.644363403320312],["▁મહત્વ",-13.644363403320312],["▁ഇപ്പോൾ",-13.644364356994627],["▁mõlema",-13.644365310668944],["▁жителей",-13.644366264343262],["▁மாட்ட",-13.644369125366213],["▁artiklar",-13.644372940063477],["број",-13.644380569458008],["▁Баб",-13.644389152526855],["▁מתאים",-13.644389152526855],["▁жеткен",-13.644401550292969],["▁Hannover",-13.644411087036133],["▁kiemelt",-13.644415855407717],["▁пътуване",-13.644416809082031],["▁Тен",-13.644417762756348],["▁sogn",-13.64441967010498],["▁freagra",-13.644421577453612],["ପାରିବେ",-13.644436836242676],["▁изложен",-13.644444465637209],["იონ",-13.644457817077637],["▁fuqarolar",-13.644461631774902],["▁еколог",-13.644461631774902],["▁әдебиет",-13.644464492797852],["▁mangel",-13.644485473632812],["▁Anyway",-13.64450454711914],["職員",-13.644505500793455],["envoyer",-13.644509315490724],["plab",-13.64451026916504],["▁سبحانه",-13.644513130187988],["ాది",-13.644515991210938],["▁изграден",-13.644522666931152],["▁Çağ",-13.644526481628418],["НП",-13.644532203674316],["▁орден",-13.64453411102295],["▁અંત",-13.644566535949709],["▁לימודי",-13.644577980041504],["▁اجل",-13.644598960876465],["rachadh",-13.644606590270996],["▁বস",-13.644614219665527],["ناز",-13.644627571105955],["▁nuostab",-13.64462947845459],["ισμα",-13.644635200500488],["▁იგივე",-13.644662857055664],["▁դեկտեմբերի",-13.644665718078612],["▁contracte",-13.644667625427246],["leyerek",-13.64466953277588],["▁другую",-13.64466953277588],["▁developed",-13.64467716217041],["▁مطلق",-13.644681930541992],["kinder",-13.644691467285156],["▁szépen",-13.644713401794434],["還能",-13.6447172164917],["▁Cerca",-13.644750595092772],["ΡΕ",-13.644755363464355],["štine",-13.644768714904783],["▁കാലം",-13.644776344299316],["ದೇವ",-13.644782066345217],["▁buenos",-13.644792556762695],["▁mennessä",-13.644792556762695],["▁האח",-13.644794464111328],["ակն",-13.64480209350586],["äisen",-13.64481258392334],["年後",-13.644815444946287],["nefndar",-13.644821166992188],["▁portion",-13.64482593536377],["▁mampi",-13.644843101501465],["▁მიმ",-13.64484405517578],["ेच",-13.644852638244627],["▁صلح",-13.644853591918944],["▁impegna",-13.644869804382324],["▁توضیحات",-13.64487075805664],["GLE",-13.64488410949707],["ევის",-13.644887924194336],["ங்கை",-13.644889831542969],["▁מוכר",-13.644895553588867],["▁מאו",-13.644896507263184],["ంతి",-13.644912719726562],["▁Procura",-13.644927024841309],["JET",-13.644930839538574],["也沒有",-13.64493179321289],["▁लागले",-13.644960403442385],["▁жарым",-13.644964218139648],["TUT",-13.644983291625977],["iola",-13.644991874694824],["krot",-13.64499568939209],["blich",-13.64500904083252],["▁316",-13.64501953125],["▁ప్రచారం",-13.645037651062012],["▁włosy",-13.645038604736328],["иком",-13.64504051208496],["▁lastnik",-13.645042419433594],["▁contenuto",-13.645049095153809],["▁gemäß",-13.645055770874023],["▁snima",-13.645058631896973],["гуз",-13.645059585571287],["▁svaka",-13.645064353942873],["识别",-13.645071983337402],["▁वही",-13.645085334777832],["pelit",-13.645095825195312],["していると",-13.645098686218262],["▁schimbare",-13.645102500915527],["spiele",-13.64511013031006],["▁чланак",-13.645116806030272],["▁(37)",-13.64513111114502],["▁SEG",-13.645139694213867],["▁Gera",-13.645158767700195],["▁tutvust",-13.645201683044434],["قضايا",-13.64520263671875],["▁অন",-13.64522933959961],["љев",-13.645240783691406],["▁leyfi",-13.645241737365724],["▁Русе",-13.64524269104004],["▁divertido",-13.645263671875],["▁vlastné",-13.645286560058594],["rakstā",-13.645288467407228],["▁470",-13.645297050476074],["▁Към",-13.645305633544922],["০৯",-13.64531421661377],["▁eläin",-13.645318984985352],["บางคน",-13.645322799682615],["▁Maaari",-13.645323753356934],["▁375",-13.6453275680542],["-36",-13.645334243774414],["लम",-13.645336151123049],["▁ខណ្ឌ",-13.64534854888916],["▁өнөр",-13.645358085632324],["▁potrebne",-13.64535903930664],["▁ئېلى",-13.64536190032959],["▁аднак",-13.645365715026855],["▁Neder",-13.645370483398438],["▁зору",-13.645370483398438],["▁erineva",-13.645374298095703],["▁добио",-13.645392417907717],["▁բար",-13.645394325256348],["▁жатканын",-13.645402908325195],["Hotels",-13.645413398742676],["▁inflama",-13.645415306091309],["▁suomen",-13.645438194274902],["inzi",-13.6454496383667],["▁ទំនាក់ទំនង",-13.645458221435549],["▁haqqı",-13.645467758178713],["дарға",-13.64548397064209],["▁retorno",-13.645490646362305],["▁vro",-13.64549160003662],["▁designet",-13.645529747009276],["actif",-13.645548820495604],["▁Fö",-13.645549774169922],["плачу",-13.645569801330566],["▁payer",-13.64559268951416],["▁parlamentare",-13.645602226257324],["▁יקר",-13.6456298828125],["ڪڻ",-13.645654678344728],["▁ettiğini",-13.645658493041992],["▁அவரை",-13.645659446716309],["▁اسين",-13.64566707611084],["▁वाटते",-13.645674705505373],["▁უზრუნველყო",-13.645695686340332],["▁படித்த",-13.645696640014648],["▁dk",-13.645727157592772],["▁ប្រចាំ",-13.645734786987305],["아웃",-13.64573574066162],["પલ",-13.645739555358888],["▁כוס",-13.645764350891112],["▁כמ",-13.645811080932615],["7.00",-13.6458158493042],["▁سلا",-13.645824432373049],["▁белгіле",-13.645837783813477],["ጥላ",-13.64584732055664],["▁사회적",-13.645854949951172],["▁okraj",-13.645859718322754],["▁nonché",-13.6458740234375],["▁Mikel",-13.64588451385498],["סטו",-13.645885467529297],["▁quién",-13.645915031433104],["▁TEK",-13.645936012268066],["graphique",-13.645960807800291],["▁реконстру",-13.645971298217772],["ทัพ",-13.645999908447266],["condition",-13.646005630493164],["▁спрем",-13.646005630493164],["▁Arme",-13.646008491516112],["▁universitari",-13.646010398864746],["ည့္",-13.64601707458496],["ذور",-13.646035194396973],["▁stair",-13.646042823791504],["▁detalle",-13.646052360534668],["▁thirrje",-13.646063804626465],["▁dilində",-13.646081924438477],["▁членове",-13.646088600158691],["ದಲ್ಲಿರುವ",-13.646099090576172],["▁hospodár",-13.646126747131348],["zap",-13.64614200592041],["▁maguk",-13.646151542663574],["免疫",-13.646156311035156],["▁bénéficier",-13.646193504333496],["▁russiske",-13.64620304107666],["▁mendatang",-13.64621353149414],["▁cartell",-13.64622402191162],["დოთ",-13.646240234375],["▁ويس",-13.646240234375],["tività",-13.646249771118164],["ىتىش",-13.646249771118164],["▁ခ်စ္",-13.64626407623291],["▁వర్",-13.646316528320312],["tissima",-13.646317481994627],["▁lotta",-13.646322250366213],["שקע",-13.646329879760742],["▁tillhör",-13.646339416503906],["▁Seal",-13.646347045898438],["ებელ",-13.646364212036133],["vonat",-13.64637279510498],["幼儿园",-13.646388053894045],["岗",-13.646397590637209],["漂",-13.646407127380373],["▁зөвлөл",-13.646411895751951],["▁dwy",-13.646425247192385],["tiwa",-13.646428108215332],["लौ",-13.646431922912598],["▁turística",-13.646431922912598],["冲击",-13.646435737609863],["▁fortalte",-13.646437644958496],["▁രംഗത്ത",-13.646438598632812],["狭",-13.646438598632812],["贵州",-13.646446228027344],["וקה",-13.646450996398926],["natur",-13.646455764770508],["ລາຍການ",-13.646458625793455],["geschiedenis",-13.646475791931152],["▁audiovisual",-13.646475791931152],["▁godkänner",-13.646475791931152],["▁hyvinvointi",-13.646475791931152],["▁iglesia",-13.646475791931152],["▁πόλεμο",-13.646475791931152],["▁обеспечить",-13.646475791931152],["▁वैज्ञानिक",-13.646475791931152],["▁ਇਤਿਹਾਸ",-13.646475791931152],["▁ตุลาคม",-13.646475791931152],["▁Nàng",-13.646476745605469],["▁technológia",-13.646476745605469],["▁ανοιχτ",-13.646476745605469],["▁الواحد",-13.646476745605469],["批判",-13.646476745605469],["▁říct",-13.646477699279783],["▁ಅತ್ಯಂತ",-13.646477699279783],["▁스마트폰",-13.646477699279783],["เจาะ",-13.646478652954102],["Chan",-13.646479606628418],["▁blockchain",-13.646479606628418],["▁поэзия",-13.646480560302734],["▁эксплуатации",-13.646482467651367],["▁nisbatan",-13.646483421325684],["▁đỉnh",-13.646483421325684],["▁použiť",-13.646485328674316],["▁կարիք",-13.646486282348633],["▁ئىنسان",-13.646486282348633],["kutu",-13.646490097045898],["▁Cảnh",-13.646491050720217],["▁ଜମି",-13.646491050720217],["▁kviečia",-13.646493911743164],["▁compositor",-13.64649486541748],["▁đế",-13.646498680114746],["キャン",-13.646498680114746],["▁تنظيف",-13.64650821685791],["▁నవ్వ",-13.646512031555176],["▁thaobh",-13.646512985229492],["▁сексуальн",-13.646525382995604],["በሳ",-13.646529197692873],["ຮ້ອງ",-13.64653491973877],["▁Ďalšie",-13.64653491973877],["▁నీటి",-13.64653491973877],["▁požiada",-13.646551132202148],["▁kaybetti",-13.64655303955078],["▁dať",-13.646553993225098],["▁музея",-13.64656925201416],["▁Эгер",-13.64657974243164],["سيم",-13.646591186523438],["祖国",-13.646592140197754],["▁أف",-13.64659309387207],["พักผ่อน",-13.646600723266602],["▁erinevaid",-13.64661693572998],["▁SZER",-13.646618843078612],["αϊ",-13.64663028717041],["▁conocimientos",-13.646632194519045],["研究中心",-13.646632194519045],["▁الثورة",-13.646656036376951],["▁مباحث",-13.646663665771484],["▁մամուլի",-13.646669387817385],["вчин",-13.646673202514648],["1987",-13.64667797088623],["▁односа",-13.646679878234863],["清理",-13.64669704437256],["▁Црне",-13.646702766418455],["ഷീ",-13.64670753479004],["▁Rachel",-13.646710395812988],["▁întreb",-13.646714210510254],["▁kujenga",-13.64671516418457],["▁tanınmış",-13.64671516418457],["ЪР",-13.646730422973633],["▁gehouden",-13.646737098693848],["▁chakula",-13.646739959716797],["טות",-13.646742820739746],["givet",-13.646746635437012],["▁właściwości",-13.64675235748291],["Türk",-13.646773338317873],["悔",-13.64679718017578],["▁south",-13.646805763244627],["HTML",-13.646806716918944],["▁مونکي",-13.646815299987791],["ყურებ",-13.646862983703612],["▁अॅप",-13.646868705749512],["▁کرم",-13.646872520446776],["KLA",-13.646880149841309],["რილ",-13.646881103515623],["งวด",-13.64688491821289],["மம்",-13.646889686584473],["ျဖဴ",-13.646889686584473],["▁Objav",-13.646891593933104],["▁clasic",-13.64689826965332],["▁kjole",-13.64690399169922],["schwe",-13.646918296813965],["▁Projekti",-13.646928787231444],["годишната",-13.646931648254396],["▁tarmoq",-13.646943092346191],["ıllı",-13.646944046020508],["▁SERVI",-13.64694595336914],["អំពី",-13.646946907043455],["▁fı",-13.64695644378662],["▁0.3",-13.646960258483888],["▁polisen",-13.646961212158203],["▁tiszta",-13.646981239318848],["▁segueix",-13.646982192993164],["ическата",-13.646986961364746],["ในงาน",-13.647010803222656],["folio",-13.647025108337402],["▁जै",-13.647029876708984],["▁оноос",-13.647031784057615],["▁கட்டு",-13.64704132080078],["▁tukang",-13.647050857543944],["ຜະລິດ",-13.647051811218262],["batas",-13.647062301635742],["▁محلی",-13.64706325531006],["ଜୟ",-13.647071838378906],["▁китай",-13.647077560424805],["▁ตร",-13.647083282470703],["▁correcto",-13.647086143493652],["væg",-13.647089958190918],["▁Молод",-13.64709758758545],["љиви",-13.647128105163574],["▁имеется",-13.64714241027832],["كول",-13.647147178649902],["tarak",-13.647156715393066],["▁خوښ",-13.647183418273926],["▁Woj",-13.647184371948242],["papir",-13.647205352783203],["▁súbory",-13.647214889526367],["صاحب",-13.647257804870604],["▁puterea",-13.64726448059082],["例外",-13.647289276123049],["Ռուսաստան",-13.647303581237791],["Sloven",-13.647316932678224],["▁heild",-13.64732837677002],["▁massiv",-13.647347450256348],["▁sore",-13.64735507965088],["▁challenges",-13.647356033325195],["289",-13.647363662719728],["▁riep",-13.647366523742676],["మల",-13.647371292114258],["▁Glob",-13.647380828857422],["▁darbinieki",-13.64739227294922],["tarëve",-13.647393226623535],["ుకుంది",-13.64740753173828],["▁MGA",-13.647411346435549],["נוף",-13.647418975830078],["▁Baga",-13.647421836853027],["▁nærmeste",-13.647425651550291],["▁посебн",-13.647433280944824],["prisen",-13.647454261779783],["▁هاشمی",-13.647465705871582],["▁بالق",-13.647486686706545],["▁kosmi",-13.647499084472656],["▁битка",-13.647513389587402],["▁њихове",-13.647515296936035],["стъпва",-13.647516250610352],["ësia",-13.647534370422363],["▁tepung",-13.647534370422363],["ຕິດ",-13.647542953491213],["▁druhy",-13.647586822509766],["▁Nega",-13.647589683532717],["uyorum",-13.64759349822998],["▁جنم",-13.647604942321776],["▁Division",-13.647613525390623],["▁పది",-13.647628784179688],["▁اطلاعاتی",-13.647639274597168],["▁යක්ෂ",-13.647642135620115],["संघ",-13.647662162780762],["▁vội",-13.647679328918455],["▁didesni",-13.647680282592772],["лгүй",-13.647714614868164],["▁kitabın",-13.64771556854248],["ተዋል",-13.647719383239746],["knapp",-13.647723197937012],["▁keçən",-13.647724151611328],["▁Cale",-13.647765159606934],["اهو",-13.647775650024414],["▁गर्भ",-13.647783279418944],["Siz",-13.64780616760254],["boer",-13.647822380065918],["▁Journalist",-13.647826194763184],["▁आयो",-13.647829055786133],["▁تعليق",-13.647836685180664],["▁необходимы",-13.64784812927246],["についての",-13.647863388061523],["▁onay",-13.647871971130373],["▁باخت",-13.64787769317627],["▁podpora",-13.647903442382812],["सरा",-13.647931098937988],["▁tájékoztató",-13.647931098937988],["▁fluor",-13.647940635681152],["тыс",-13.64794635772705],["ចោល",-13.647954940795898],["▁säkerhet",-13.647974014282228],["ாவது",-13.647985458374023],["jedná",-13.647992134094238],["ترنت",-13.647994995117188],["เพล",-13.64799690246582],["▁наложи",-13.648001670837402],["激烈",-13.64801025390625],["▁planera",-13.648016929626465],["▁okien",-13.648048400878906],["▁chóir",-13.64807415008545],["örf",-13.648094177246094],["▁Транс",-13.648096084594728],["hammer",-13.648098945617676],["ίσουμε",-13.648101806640623],["▁బై",-13.648117065429688],["▁ставка",-13.648138046264648],["kaat",-13.648146629333496],["coba",-13.648158073425291],["punten",-13.648180961608888],["▁pozostaw",-13.648197174072266],["となっている",-13.648208618164062],["▁büdcə",-13.648234367370604],["خال",-13.648249626159668],["▁খেলা",-13.648277282714844],["▁szék",-13.648282051086426],["ፉት",-13.64831829071045],["▁මමත්",-13.64831829071045],["מנ",-13.64832592010498],["▁విశ్వ",-13.648341178894045],["▁घरी",-13.648347854614258],["вео",-13.648356437683104],["▁അപ്പ",-13.648394584655762],["DOM",-13.648399353027344],["▁жолдары",-13.648401260375977],["▁ATP",-13.64840316772461],["Քաղաք",-13.648412704467772],["▁Patent",-13.648415565490724],["communica",-13.648418426513672],["▁Nikon",-13.648431777954102],["זיקה",-13.648438453674316],["ยั",-13.64845085144043],["▁3,6",-13.648460388183594],["монт",-13.648462295532228],["▁netinu",-13.648463249206545],["▁ផ្នែក",-13.648467063903809],["▁afaka",-13.648481369018556],["▁Mercat",-13.648507118225098],["▁talar",-13.648512840270996],["tojams",-13.648513793945312],["غذ",-13.64852523803711],["▁glæder",-13.648530006408691],["▁комунальн",-13.648530006408691],["▁valdība",-13.648534774780272],["▁ਬਚ",-13.64853572845459],["▁դիմել",-13.648537635803224],["▁jeito",-13.648558616638184],["仿佛",-13.648569107055664],["ολογική",-13.64857006072998],["窮",-13.648589134216309],["MANI",-13.64859676361084],["럴",-13.648599624633787],["▁volem",-13.648601531982422],["▁Consiliul",-13.648605346679688],["▁kodeks",-13.64860725402832],["דף",-13.648609161376951],["濟",-13.648615837097168],["▁என்றார்",-13.648618698120115],["离婚",-13.6486234664917],["▁۱۳۹۴",-13.648630142211914],["▁मुश्किल",-13.648630142211914],["አውሮፓ",-13.64863109588623],["បារាំង",-13.64863109588623],["▁diwrnod",-13.64863109588623],["▁doświadczenia",-13.64863109588623],["▁funcionários",-13.64863109588623],["▁proizvođač",-13.64863109588623],["▁puheenjohtaja",-13.64863109588623],["▁víťaz",-13.64863109588623],["▁wählen",-13.64863109588623],["▁zhruba",-13.64863109588623],["▁сүүлийн",-13.64863109588623],["▁विष्णु",-13.64863109588623],["▁संलग्न",-13.64863109588623],["▁දුරකථන",-13.64863109588623],["묵",-13.64863109588623],["ೀನ್",-13.648632049560549],["▁ایجنسی",-13.648632049560549],["▁مزدور",-13.648632049560549],["▁altceva",-13.648633003234863],["▁Төв",-13.648633003234863],["ယာဥ္",-13.64863395690918],["▁exklusiv",-13.648634910583496],["▁নির্দেশ",-13.648635864257812],["▁حماية",-13.648636817932127],["▁silicone",-13.648638725280762],["▁තෝරා",-13.648639678955078],["▁recuerdo",-13.648641586303713],["ζωνταν",-13.648643493652344],["▁kjendis",-13.648643493652344],["▁осындай",-13.648645401000977],["试点",-13.648645401000977],["ЛЫҚ",-13.648646354675291],["▁завърши",-13.64864730834961],["▁रेलवे",-13.64864730834961],["프로",-13.648648262023926],["▁fórmula",-13.64865016937256],["黑暗",-13.648652076721191],["ਰਜ",-13.648653030395508],["▁folosesc",-13.648653030395508],["▁Restoran",-13.64865493774414],["▁بیٹے",-13.648659706115724],["▁प्रदेशसभा",-13.648662567138672],["▁구축",-13.64866828918457],["מטי",-13.648673057556152],["▁ਹਾਈ",-13.648674964904783],["▁riesce",-13.648679733276367],["▁therefore",-13.648679733276367],["▁संपूर्ण",-13.648683547973633],["▁항공권",-13.648683547973633],["rædd",-13.648703575134276],["▁Было",-13.648706436157228],["▁duży",-13.648712158203123],["▁temveč",-13.648712158203123],["▁beweging",-13.648726463317873],["每一位",-13.64874267578125],["社群",-13.648743629455566],["▁highlight",-13.648755073547363],["▁ඉහත",-13.648758888244627],["▁staw",-13.648761749267578],["▁nepra",-13.648774147033691],["Инфо",-13.648781776428224],["▁Orienta",-13.648781776428224],["▁circuito",-13.648788452148438],["▁Dafür",-13.648811340332031],["யைப்",-13.648818016052246],["▁Posebn",-13.648820877075195],["▁представить",-13.64883804321289],["▁birth",-13.648841857910156],["▁تۈرك",-13.648850440979004],["▁المثال",-13.64885425567627],["吃到",-13.648863792419434],["▁höll",-13.648869514465332],["▁जति",-13.648874282836914],["▁Whit",-13.64887523651123],["▁vähene",-13.64889144897461],["▁احسن",-13.64889144897461],["▁Mesto",-13.64892578125],["▁либерал",-13.648932456970217],["▁խոսքով",-13.648945808410645],["결과",-13.648953437805176],["▁izraz",-13.64895725250244],["▁zabaldu",-13.648961067199709],["▁repetir",-13.648982048034668],["▁режиме",-13.6489896774292],["خدمات",-13.64899444580078],["▁formă",-13.648995399475098],["stává",-13.649009704589844],["ቱም",-13.649009704589844],["▁sajandi",-13.649011611938477],["回目",-13.64901638031006],["▁समोर",-13.649036407470703],["юються",-13.649077415466309],["▁energ",-13.649077415466309],["American",-13.649093627929688],["▁Bildung",-13.649102210998535],["LOM",-13.649114608764648],["▁സൂ",-13.649134635925291],["ଦୀପ",-13.649147033691406],["▁störf",-13.649149894714355],["495",-13.649157524108888],["พิษ",-13.64915943145752],["▁минус",-13.649164199829102],["▁Nori",-13.649179458618164],["就職",-13.649203300476074],["▁ხალხი",-13.64920425415039],["▁1850",-13.649210929870604],["▁obrazu",-13.649215698242188],["▁conosco",-13.649216651916504],["▁necessidades",-13.64922046661377],["კეთილ",-13.649224281311035],["كرم",-13.6492280960083],["ワイ",-13.649243354797363],["▁herlig",-13.649253845214844],["▁لنکس",-13.649253845214844],["▁saksa",-13.649259567260742],["▁موسی",-13.649269104003906],["▁цял",-13.649291038513184],["场所",-13.649291038513184],["▁acquistare",-13.649296760559082],["անոց",-13.64930248260498],["▁Nesse",-13.64930248260498],["▁xir",-13.649321556091309],["▁hallarda",-13.649332046508787],["▁แนะนํา",-13.649332046508787],["▁Zə",-13.649348258972168],["ovič",-13.649353981018066],["▁palvelut",-13.649356842041016],["▁имамо",-13.64939785003662],["ΙΚΑ",-13.649429321289062],["▁jelöl",-13.64944076538086],["▁있지",-13.64944839477539],["大家的",-13.64946460723877],["▁સમયે",-13.649474143981934],["▁ammu",-13.649496078491213],["▁શબ્દ",-13.649502754211426],["skort",-13.64951229095459],["▁fokusere",-13.649542808532717],["ARGA",-13.649544715881348],["得意",-13.649547576904297],["▁ПОД",-13.649576187133787],["ในประเทศไทย",-13.64958953857422],["▁asigură",-13.649611473083496],["▁Louise",-13.649617195129396],["mellett",-13.64963150024414],["状態で",-13.649651527404783],["גברים",-13.649670600891112],["નની",-13.649673461914062],["LIO",-13.649682998657228],["▁vënë",-13.64971160888672],["kileyo",-13.649721145629885],["▁refund",-13.649723052978516],["▁înalt",-13.649724960327148],["▁ගී",-13.649737358093262],["isera",-13.649765014648438],["জো",-13.649765968322754],["▁rendelkezés",-13.649765968322754],["▁Jasa",-13.64978313446045],["▁passt",-13.649784088134766],["▁සතු",-13.649785041809082],["▁dzieje",-13.649799346923828],["▁lectores",-13.649802207946776],["ovaly",-13.649820327758787],["▁sû",-13.649832725524902],["Tam",-13.6498384475708],["▁quebra",-13.64984130859375],["▁Walang",-13.649842262268066],["▁ireny",-13.649843215942385],["▁вика",-13.649845123291016],["▁спам",-13.649850845336914],["▁побара",-13.649866104125977],["▁részletes",-13.649869918823242],["▁Виж",-13.649869918823242],["▁polizia",-13.64988613128662],["▁Vrou",-13.649895668029783],["dako",-13.649911880493164],["EAR",-13.649924278259276],["的品牌",-13.64995002746582],["▁pravico",-13.649950981140137],["ฮิ",-13.649954795837402],["chasi",-13.649982452392578],["▁느낌",-13.649991035461426],["ัท",-13.650017738342283],["▁kpl",-13.650020599365234],["▁участия",-13.650023460388184],["▁потребни",-13.650042533874512],["JJ",-13.650050163269045],["▁Ninja",-13.650059700012209],["▁Σημ",-13.650059700012209],["科学家",-13.650074005126951],["▁સરસ",-13.650096893310549],["▁சொ",-13.650096893310549],["▁обязательн",-13.650097846984863],["HAK",-13.650121688842772],["▁krije",-13.650126457214355],["▁templ",-13.650136947631836],["一邊",-13.650147438049316],["hjemme",-13.650153160095217],["▁komerci",-13.650161743164062],["▁цікаві",-13.650169372558594],["▁apartment",-13.650172233581545],["▁pokol",-13.65018081665039],["обича",-13.650192260742188],["▁Leistungs",-13.650192260742188],["▁lagundu",-13.650217056274414],["▁فلسف",-13.650221824645996],["▁Sauna",-13.65023708343506],["▁لاست",-13.650248527526855],["EKS",-13.65025520324707],["▁סביב",-13.650266647338867],["▁מיד",-13.650270462036133],["▁millise",-13.650274276733398],["▁leeg",-13.650276184082031],["ਚੋਂ",-13.65027904510498],["ствена",-13.65028476715088],["▁potpisa",-13.650294303894045],["▁toimiva",-13.650301933288574],["▁преврат",-13.65030288696289],["గ్గు",-13.650322914123535],["心地",-13.650334358215332],["பக",-13.650347709655762],["▁critique",-13.650347709655762],["▁faktura",-13.650349617004396],["▁polni",-13.650352478027344],["тіп",-13.650370597839355],["▁contenidos",-13.650381088256836],["▁દિલ",-13.650386810302734],["▁divertida",-13.65038776397705],["▁ครั้งที่",-13.650389671325684],["ptica",-13.650390625],["▁дође",-13.650396347045898],["korv",-13.650399208068848],["▁yayi",-13.650400161743164],["▁станал",-13.650409698486328],["▁toiminnan",-13.65041446685791],["▁Präsident",-13.650419235229492],["มล",-13.650443077087402],["▁तद",-13.650445938110352],["CTA",-13.650477409362791],["اتك",-13.650486946105955],["▁อายุ",-13.650490760803224],["▁manns",-13.650492668151855],["▁фракц",-13.65051555633545],["▁ဖတ္",-13.650538444519045],["的思想",-13.650604248046877],["▁დარჩე",-13.65060806274414],["ულმა",-13.65064525604248],["华人",-13.650654792785645],["▁୧୮",-13.650683403015137],["▁၁၅",-13.65068817138672],["▁gender",-13.650693893432615],["▁κυρ",-13.65070915222168],["讨",-13.65070915222168],["氛围",-13.650713920593262],["ъци",-13.650727272033691],["konstruktion",-13.650731086730955],["cello",-13.650734901428224],["探讨",-13.650738716125488],["ર્થ",-13.650741577148438],["لازم",-13.650748252868652],["棚",-13.65075397491455],["紛",-13.650758743286133],["nicama",-13.650764465332031],["▁ఫ్ల",-13.65077018737793],["ମୀ",-13.650778770446776],["迷惑",-13.65078353881836],["弾",-13.65078830718994],["▁Pembangunan",-13.650790214538574],["▁całkiem",-13.650790214538574],["▁consommation",-13.650790214538574],["▁oavsett",-13.650790214538574],["▁priekšsēdētāj",-13.650790214538574],["▁оплаты",-13.650790214538574],["▁подразумева",-13.650790214538574],["▁превръща",-13.650790214538574],["▁निवडणूक",-13.650790214538574],["▁ਕੈਨੇਡਾ",-13.650790214538574],["▁ਜ਼ਿਲ੍ਹਾ",-13.650790214538574],["īšanās",-13.65079116821289],["▁достижения",-13.65079116821289],["▁රතිඵල",-13.65079116821289],["▁Депутат",-13.650792121887209],["▁ምላሽ",-13.650792121887209],["몽",-13.650793075561523],["▁ražošana",-13.65079402923584],["▁tudok",-13.65079402923584],["▁ausreichend",-13.650794982910156],["▁встановлення",-13.650794982910156],["▁kuvvet",-13.650796890258787],["▁συμβαίνει",-13.650796890258787],["▁průmysl",-13.650797843933104],["▁ఎన్నికల్లో",-13.650797843933104],["▁Xukuumadda",-13.650799751281738],["▁Χρυσ",-13.650799751281738],["صمت",-13.650806427001951],["▁závěr",-13.650806427001951],["▁фігур",-13.65081024169922],["▁Teilnahme",-13.650814056396484],["▁Տես",-13.650827407836914],["▁sorta",-13.65082836151123],["ここまで",-13.65082836151123],["▁بزرگترین",-13.65083122253418],["▁በአሁኑ",-13.650833129882812],["▁kosovar",-13.650834083557127],["kaut",-13.650839805603027],["▁ceļa",-13.650839805603027],["▁assistance",-13.650845527648926],["▁lasīt",-13.65086269378662],["▁selskapet",-13.650864601135254],["▁настани",-13.650869369506836],["▁توانایی",-13.650875091552734],["▁לעזור",-13.65087604522705],["▁pugui",-13.650880813598633],["▁vlastnosti",-13.650884628295898],["▁Gis",-13.650890350341797],["志愿者",-13.650893211364746],["زوج",-13.650894165039062],["▁voucher",-13.65089988708496],["▁jumla",-13.650900840759276],["▁पवार",-13.650912284851074],["భ్య",-13.650914192199709],["▁llegó",-13.65092945098877],["▁Աղ",-13.650934219360352],["▁ଟ୍ର",-13.6509370803833],["blá",-13.650941848754885],["▁bồi",-13.6509428024292],["▁Biasanya",-13.65095329284668],["▁mühit",-13.650954246520996],["▁מספק",-13.650955200195312],["▁ബാക്കി",-13.650958061218262],["teoir",-13.650960922241213],["▁считается",-13.650961875915527],["רוט",-13.650970458984377],["▁Comfort",-13.650973320007324],["esterno",-13.650975227355955],["ใดๆ",-13.650976181030272],["▁animaux",-13.650983810424805],["ลอน",-13.650993347167969],["▁міського",-13.650996208190918],["▁одмор",-13.650997161865234],["▁behera",-13.651000022888184],["▁Herkes",-13.651039123535156],["хім",-13.651040077209473],["▁Doppel",-13.651047706604004],["▁залу",-13.651061058044434],["▁Advanced",-13.651077270507812],["▁немало",-13.651081085205078],["taneet",-13.65108585357666],["火车",-13.65108585357666],["တုိ",-13.65108871459961],["▁вирі",-13.65108871459961],["ేంద్ర",-13.651090621948242],["▁paix",-13.651117324829102],["▁engedély",-13.651123046875],["▁automatik",-13.651142120361328],["▁අධිකරණය",-13.651144981384276],["RAR",-13.651169776916504],["▁Чому",-13.65117359161377],["▁vášho",-13.651178359985352],["せず",-13.651178359985352],["▁Hamid",-13.651182174682615],["所需的",-13.651190757751465],["manni",-13.65119457244873],["غط",-13.651205062866213],["õnn",-13.651216506958008],["▁ikkagi",-13.65122890472412],["aynaa",-13.651230812072754],["-150",-13.651273727416992],["situation",-13.651273727416992],["▁toimuva",-13.65127658843994],["isenä",-13.651283264160156],["ಾಯ್",-13.651312828063965],["▁UPS",-13.65131664276123],["MÄ",-13.651321411132812],["▁açıkladı",-13.651321411132812],["▁Компани",-13.651331901550291],["▁ప్రారంభించ",-13.651347160339355],["▁hallituksen",-13.651348114013672],["posten",-13.651351928710938],["ยินดี",-13.65136432647705],["ЛАД",-13.65138816833496],["▁проведено",-13.65139389038086],["ುತ್ತೆ",-13.651406288146973],["onate",-13.651419639587402],["алагічны",-13.65142822265625],["▁upis",-13.651451110839844],["▁بننے",-13.651456832885742],["▁Marca",-13.65146827697754],["sorti",-13.651473999023438],["▁płat",-13.651473999023438],["▁بري",-13.651488304138184],["Pop",-13.651495933532717],["▁құқықтары",-13.651530265808104],["▁बग",-13.651541709899902],["▁подобра",-13.65157699584961],["▁స్వామి",-13.651578903198242],["▁ծրագր",-13.651609420776367],["రెడ్డి",-13.651613235473633],["گزار",-13.651633262634276],["▁успеа",-13.651634216308594],["ືອ",-13.651647567749023],["▁smr",-13.651651382446287],["▁приложен",-13.651676177978516],["▁Баян",-13.651689529418944],["▁ədəd",-13.651694297790527],["▁описан",-13.651694297790527],["جنب",-13.651695251464844],["▁нејзини",-13.651697158813477],["▁geçerli",-13.651703834533691],["Wie",-13.651718139648438],["▁plantea",-13.651728630065918],["קשר",-13.6517333984375],["▁fiction",-13.651739120483398],["▁multiplica",-13.651744842529297],["▁branitelj",-13.65174674987793],["▁ONU",-13.651750564575195],["▁նյութեր",-13.651759147644045],["▁şirkət",-13.651762008666992],["sivät",-13.651769638061523],["▁የምና",-13.651777267456056],["วาน",-13.651793479919434],["▁zaupa",-13.651798248291016],["AAT",-13.651814460754396],["ERING",-13.651850700378418],["▁dosah",-13.65185260772705],["เหมาะสําหรับ",-13.651869773864746],["▁dünyasını",-13.65188980102539],["ώνονται",-13.651896476745604],["▁Sveta",-13.65190887451172],["zata",-13.651921272277832],["▁السكر",-13.651957511901855],["മം",-13.6519775390625],["▁neuer",-13.651997566223145],["ינער",-13.651999473571776],["vojo",-13.652019500732422],["▁Wichtig",-13.652020454406738],["бисквитки",-13.65202522277832],["ڏو",-13.65204620361328],["▁usage",-13.652060508728027],["▁жою",-13.652060508728027],["ladigan",-13.652069091796877],["баган",-13.652094841003418],["▁찬",-13.652106285095217],["▁testat",-13.65213108062744],["▁agricol",-13.652154922485352],["పాలు",-13.65216827392578],["▁Kurse",-13.65216827392578],["دقة",-13.652192115783691],["▁Цена",-13.652194023132324],["悉",-13.65220832824707],["▁hesabı",-13.652223587036133],["LAMA",-13.652227401733398],["▁rozmer",-13.652236938476562],["▁नाममा",-13.652241706848145],["▁uyum",-13.652249336242676],["내고",-13.652262687683104],["▁Ох",-13.652280807495115],["▁Raš",-13.652305603027344],["νία",-13.652308464050291],["גיא",-13.652321815490724],["▁замени",-13.652348518371582],["patuloy",-13.652377128601074],["დრ",-13.652423858642578],["▁натпревар",-13.652427673339844],["▁harbiy",-13.652433395385742],["▁kello",-13.652457237243652],["ალო",-13.652461051940918],["▁nebylo",-13.652462005615234],["▁Ál",-13.652462005615234],["GAM",-13.65246868133545],["૧૨",-13.652509689331056],["▁conflito",-13.652514457702637],["▁аялал",-13.652541160583496],["▁gündən",-13.652558326721191],["대를",-13.65256118774414],["ทริป",-13.652570724487305],["یسم",-13.652581214904783],["▁ungu",-13.652609825134276],["明明",-13.65261459350586],["▁لاره",-13.652615547180176],["דא",-13.652626037597656],["דרה",-13.652629852294922],["▁egenskaper",-13.652645111083984],["▁флот",-13.65264892578125],["▁baixar",-13.652650833129885],["ӨӨ",-13.652655601501465],["▁registraci",-13.652666091918944],["▁səviyyəsi",-13.652679443359377],["処",-13.652684211730955],["याः",-13.652690887451172],["▁toets",-13.652697563171388],["▁спрямо",-13.652700424194336],["habitude",-13.652737617492676],["ificação",-13.652738571166992],["▁порталы",-13.652750015258787],["▁zračn",-13.652771949768066],["prostřed",-13.65280818939209],["▁ډير",-13.652817726135254],["▁വായന",-13.65281867980957],["senti",-13.652826309204102],["ものと",-13.652832984924316],["▁contribution",-13.652847290039062],["貿",-13.652860641479492],["▁открыты",-13.652871131896973],["▁мистер",-13.652873992919922],["ძახ",-13.652874946594238],["▁Краљ",-13.6528959274292],["绘",-13.652896881103516],["ತರ",-13.652898788452148],["进了",-13.652918815612791],["喉",-13.652921676635742],["plí",-13.65293025970459],["ምሳሌ",-13.652934074401855],["ხედავ",-13.65294075012207],["▁visat",-13.652953147888184],["річчя",-13.652955055236816],["ພະແນກ",-13.652955055236816],["ແມ່ຍິງ",-13.652955055236816],["သဘော",-13.652955055236816],["អញ្ជើញ",-13.652955055236816],["▁Uchaguzi",-13.652955055236816],["▁qytetarët",-13.652955055236816],["▁tomorrow",-13.652955055236816],["▁ákvörðun",-13.652955055236816],["▁ΠΑΣΟΚ",-13.652955055236816],["▁Испания",-13.652955055236816],["▁дэпутат",-13.652955055236816],["▁предварително",-13.652955055236816],["▁розповів",-13.652955055236816],["▁ضوابط",-13.652955055236816],["▁वर्ल्ड",-13.652955055236816],["▁हिस्सा",-13.652955055236816],["▁ਵੱਡਾ",-13.652955055236816],["▁ಇಂಡಿಯಾ",-13.652955055236816],["▁አስተያየት",-13.652955055236816],["▁Bewerbung",-13.652956008911133],["▁پۈتۈن",-13.652956008911133],["▁актывіст",-13.65295696258545],["▁külföldi",-13.652957916259766],["▁udělat",-13.652957916259766],["▁msaada",-13.652958869934082],["лтернатив",-13.652959823608398],["ทั่วโลก",-13.652959823608398],["Just",-13.652963638305664],["▁többször",-13.65296745300293],["▁вооружен",-13.65296745300293],["▁finansiering",-13.652968406677246],["▁හම්බ",-13.652973175048828],["靠近",-13.652974128723145],["▁මල",-13.652978897094728],["▁לפנות",-13.652981758117676],["▁불편",-13.65298843383789],["哪个",-13.65300178527832],["▁дитина",-13.653005599975586],["▁дърво",-13.653005599975586],["ခဲ့ပါတယ်။",-13.653008460998535],["▁болуу",-13.653009414672852],["기의",-13.653017044067385],["▁kļūst",-13.6530179977417],["▁тижні",-13.653026580810549],["▁धाव",-13.653046607971191],["ചര",-13.65305233001709],["▁គិត",-13.653078079223633],["▁Hochzeit",-13.65307903289795],["▁посвящен",-13.653082847595217],["▁Erkek",-13.65309715270996],["▁poteva",-13.653099060058594],["▁2014-2020",-13.653101921081545],["ගය",-13.653118133544922],["▁omsorg",-13.653120994567873],["▁tisoč",-13.653125762939451],["▁feci",-13.653128623962402],["▁akumulator",-13.653135299682615],["▁wisa",-13.653136253356934],["▁películas",-13.653142929077148],["▁റൂ",-13.653154373168944],["אָו",-13.653155326843262],["klara",-13.653160095214844],["▁εργασ",-13.65317440032959],["endorf",-13.653175354003906],["▁अगले",-13.653178215026855],["▁selkeä",-13.653180122375488],["每位",-13.653180122375488],["▁හොඳම",-13.653186798095703],["תקן",-13.653191566467283],["▁اباد",-13.6531982421875],["verseny",-13.653200149536133],["▁ተደ",-13.653203964233398],["▁Peste",-13.653210639953612],["فرح",-13.653212547302246],["▁precipit",-13.653216361999512],["▁caminha",-13.65322208404541],["BIS",-13.653225898742676],["нтов",-13.653227806091309],["▁Камен",-13.653231620788574],["မွတ္",-13.653242111206056],["▁hôtel",-13.65324878692627],["▁meccs",-13.653249740600586],["▁Patron",-13.65325164794922],["▁রোড",-13.65325164794922],["િર",-13.653257369995115],["用意",-13.653261184692385],["▁többség",-13.653271675109863],["▁návod",-13.653280258178713],["▁өлкөлөр",-13.65328311920166],["▁państwa",-13.653287887573242],["விடும்",-13.653310775756836],["▁телевизия",-13.653315544128418],["▁다만",-13.653318405151367],["▁निघ",-13.653322219848633],["iškė",-13.653331756591797],["▁prvič",-13.653331756591797],["▁виси",-13.653337478637695],["▁существенно",-13.653362274169922],["▁nauki",-13.653376579284668],["▁jammer",-13.653377532958984],["▁ಹಂತ",-13.653380393981934],["对待",-13.653403282165527],["▁alaturi",-13.65341567993164],["▁cild",-13.65342617034912],["▁vág",-13.653433799743652],["bibi",-13.65343952178955],["▁Chief",-13.653441429138184],["biler",-13.653446197509766],["353",-13.65345573425293],["▁teised",-13.653460502624512],["▁runs",-13.653465270996094],["▁បក្ស",-13.653467178344728],["овича",-13.653472900390623],["▁baterie",-13.653474807739258],["▁lenta",-13.653494834899902],["toimen",-13.653501510620115],["biał",-13.653529167175291],["▁гарын",-13.653547286987305],["reya",-13.653558731079102],["▁méně",-13.653564453125],["▁shown",-13.653586387634276],["▁bayaran",-13.653590202331545],["ktet",-13.65359115600586],["▁Toch",-13.653594970703123],["▁Тэг",-13.653597831726074],["256",-13.653600692749023],["▁praznik",-13.653637886047363],["ത്തിലുള്ള",-13.653657913208008],["ština",-13.653663635253906],["▁Prüfung",-13.653672218322754],["▁Βρ",-13.653672218322754],["▁ប្រព័ន្ធ",-13.653677940368652],["장에서",-13.653678894042969],["▁Vader",-13.653682708740234],["▁reaktion",-13.653687477111816],["VEZ",-13.653690338134766],["غوط",-13.653693199157717],["▁أخي",-13.653725624084473],["▁tashkilotlar",-13.653759002685549],["▁toimia",-13.65376091003418],["▁வாங்கி",-13.65377140045166],["стеріг",-13.65377426147461],["▁Sicher",-13.653825759887695],["▁отсто",-13.653841972351074],["tiap",-13.653844833374023],["áček",-13.65384578704834],["ค่าใช้จ่าย",-13.65384578704834],["▁טובים",-13.653852462768556],["4,9",-13.653854370117188],["▁uzoq",-13.65386199951172],["▁ventas",-13.653863906860352],["▁covered",-13.653867721557615],["▁فخر",-13.65387725830078],["▁പട്ട",-13.653878211975098],["žino",-13.653884887695312],["venant",-13.653887748718262],["▁rike",-13.653894424438477],["▁vervoer",-13.653903007507324],["▁පාර්ලිමේන්තුව",-13.653911590576172],["מדיה",-13.653918266296388],["▁Alum",-13.653925895690918],["aattori",-13.65394115447998],["सान",-13.65395450592041],["▁ordea",-13.653964042663574],["kapp",-13.65396499633789],["изації",-13.653966903686523],["▁مقال",-13.653971672058104],["▁talon",-13.653995513916016],["▁Wohl",-13.654000282287598],["▁duhur",-13.654034614562988],["cā",-13.654050827026367],["бою",-13.65406894683838],["նային",-13.65408706665039],["ξο",-13.654093742370604],["▁خمس",-13.654098510742188],["Protect",-13.654119491577148],["క్త",-13.654132843017578],["SUM",-13.65414047241211],["របស់អ្នក",-13.654142379760742],["ຈາ",-13.654146194458008],["▁serva",-13.654146194458008],["的味道",-13.654150009155272],["стрем",-13.654186248779297],["້ອງ",-13.654194831848145],["▁क्व",-13.654197692871094],["▁ascenso",-13.654200553894045],["▁చేపట్ట",-13.65420913696289],["contro",-13.65421199798584],["▁güvenli",-13.654213905334473],["сүрэн",-13.654219627380373],["▁przestrzeni",-13.65422248840332],["▁செல்",-13.654224395751951],["ทอด",-13.6542329788208],["▁ફિ",-13.6542329788208],["▁спын",-13.654237747192385],["시키",-13.654239654541016],["这部",-13.654263496398926],["ไม่เกิน",-13.654266357421877],["▁impulso",-13.654274940490724],["lapse",-13.654308319091797],["▁شپ",-13.654326438903809],["363",-13.6543607711792],["▁préfér",-13.65436553955078],["ដូច",-13.654367446899414],["ໄຟ",-13.654375076293944],["▁გარ",-13.654400825500488],["素敵な",-13.654406547546388],["▁turistik",-13.65445327758789],["▁amics",-13.654470443725586],["▁iyul",-13.654470443725586],["շին",-13.654471397399902],["まだまだ",-13.654473304748535],["▁келгенде",-13.65450668334961],["енција",-13.654516220092772],["▁Clip",-13.654540061950684],["▁organisationer",-13.654545783996582],["ισμένη",-13.65455722808838],["tusega",-13.654560089111328],["ზიან",-13.65456199645996],["▁doza",-13.654573440551758],["cencia",-13.654576301574709],["その他",-13.654576301574709],["▁raisons",-13.654590606689451],["▁blogging",-13.654600143432615],["▁karaa",-13.654621124267578],["வால்",-13.654623031616213],["ທ່າ",-13.654623031616213],["▁nikakv",-13.654632568359377],["moder",-13.654656410217283],["लिन",-13.65466594696045],["കെട്ട",-13.654680252075195],["▁necessaria",-13.654680252075195],["全然",-13.654695510864258],["▁предлагает",-13.65471649169922],["▁ಅಡಿ",-13.654732704162598],["▁uspeh",-13.654733657836914],["kasten",-13.654744148254396],["▁संस्थान",-13.65475082397461],["▁വിളിച്ചു",-13.654760360717772],["▁konsul",-13.65478801727295],["▁تدريب",-13.654790878295898],["zeichnet",-13.654800415039062],["▁Miro",-13.654800415039062],["▁போக",-13.65480899810791],["ehdot",-13.654828071594238],["wok",-13.654829025268556],["ुभयो",-13.65483856201172],["▁njuta",-13.65484619140625],["▁izgled",-13.654851913452148],["ငယ်",-13.654862403869627],["▁tiska",-13.654866218566896],["▁BAŞ",-13.654874801635742],["shell",-13.65487575531006],["▁eguna",-13.654878616333008],["сајт",-13.654881477355955],["rriak",-13.654887199401855],["▁могућности",-13.654894828796388],["স্ব",-13.654901504516602],["ryhmän",-13.6549072265625],["اول",-13.654909133911133],["▁മാന്",-13.654918670654297],["▁kurser",-13.654919624328612],["▁capul",-13.654924392700195],["▁مانع",-13.654964447021484],["івна",-13.654980659484863],["যা",-13.655038833618164],["▁sukar",-13.655041694641112],["joja",-13.655067443847656],["धारण",-13.655071258544922],["▁foydalan",-13.655092239379885],["縁",-13.655112266540527],["▁প্রয়োজন",-13.655118942260742],["ភ្នំ",-13.655121803283691],["▁बालबालिका",-13.655123710632324],["▁ಪರಿಣಾಮ",-13.655123710632324],["déjeuner",-13.65512466430664],["▁Gwybodaeth",-13.65512466430664],["▁INDONESIA",-13.65512466430664],["▁gwiazd",-13.65512466430664],["▁kerugian",-13.65512466430664],["▁menegaskan",-13.65512466430664],["▁skuespiller",-13.65512466430664],["▁įrašai",-13.65512466430664],["▁διατροφή",-13.65512466430664],["▁Несмотря",-13.65512466430664],["▁милијарди",-13.65512466430664],["▁սխալ",-13.65512466430664],["▁برطانیہ",-13.65512466430664],["▁حاشیه",-13.65512466430664],["▁مەسئۇل",-13.65512466430664],["▁وبالتالي",-13.65512466430664],["▁સામાન્ય",-13.65512466430664],["▁Faqja",-13.655125617980955],["▁kapcsolódó",-13.655125617980955],["▁renuncia",-13.655125617980955],["▁viikolla",-13.655126571655272],["▁дрехи",-13.655126571655272],["▁urmează",-13.655128479003906],["▁සුදුසු",-13.655131340026855],["▁dechrau",-13.65513515472412],["▁вивчення",-13.65513515472412],["ထည့္",-13.655137062072754],["▁miercuri",-13.65513801574707],["▁izlaz",-13.655142784118652],["häuser",-13.655144691467283],["▁zərər",-13.655146598815918],["▁இருந்தால்",-13.655150413513184],["▁토론",-13.655153274536133],["▁רואה",-13.65516185760498],["▁అడ్డ",-13.655163764953612],["▁użytkowników",-13.655176162719728],["▁ಸೌ",-13.655180931091309],["▁هېوادونو",-13.655186653137209],["▁rãi",-13.655200958251951],["▁எனவே",-13.65520191192627],["▁новая",-13.655224800109863],["נגד",-13.655228614807127],["▁ໂຕ",-13.655229568481444],["▁ಇವರು",-13.655241012573242],["▁پتہ",-13.655253410339355],["▁idealny",-13.65525722503662],["▁ნახევარ",-13.655261039733888],["▁попыта",-13.655261993408203],["▁жатып",-13.655264854431152],["▁красиво",-13.655271530151367],["▁ειδικά",-13.655274391174316],["ດຽວ",-13.655282020568848],["▁مهمة",-13.655284881591797],["▁БСП",-13.655285835266112],["▁स्या",-13.655287742614746],["▁журам",-13.65529727935791],["▁vækst",-13.65530014038086],["▁znaczenie",-13.65530014038086],["▁Gje",-13.65531063079834],["▁конференција",-13.655312538146973],["kkede",-13.65532112121582],["ነህ",-13.65532398223877],["▁кръст",-13.655328750610352],["räv",-13.655341148376465],["▁ຊົນ",-13.655356407165527],["▁איכותי",-13.655359268188477],["▁связь",-13.655384063720703],["▁Naken",-13.655388832092283],["▁darbų",-13.655426025390623],["▁Adri",-13.655438423156738],["▁визу",-13.655440330505373],["۸۰",-13.6554536819458],["▁comptes",-13.655454635620115],["▁moderní",-13.65545654296875],["▁Konstru",-13.655472755432127],["ליש",-13.65549659729004],["▁zainteresowani",-13.65549659729004],["▁skot",-13.655515670776367],["▁refuse",-13.655517578125],["募",-13.65554141998291],["ାଳୟ",-13.655543327331545],["Tbilisi",-13.65555191040039],["▁Gesundheits",-13.65555477142334],["▁ukončen",-13.655560493469238],["रौ",-13.655564308166504],["▁Lina",-13.655570030212402],["▁kannalta",-13.655588150024414],["▁tilstand",-13.655599594116213],["▁সাই",-13.655611991882324],["▁Onda",-13.655617713928224],["▁جيدا",-13.655625343322754],["ជួប",-13.65562915802002],["▁покрив",-13.655653953552246],["年以来",-13.655657768249512],["▁1860",-13.655662536621094],["කරණය",-13.65566349029541],["шілігі",-13.655667304992676],["▁syge",-13.655669212341309],["▁indray",-13.655672073364258],["UZI",-13.655684471130373],["▁Plas",-13.655705451965332],["▁foregår",-13.655720710754396],["▁tegne",-13.655725479125977],["ແອ",-13.65573024749756],["▁อันดับที่",-13.655736923217772],["▁الحمد",-13.65574073791504],["vuotiaa",-13.655741691589355],["▁Giả",-13.655750274658203],["▁jotenkin",-13.655779838562012],["rujuk",-13.655780792236328],["▁chán",-13.655784606933594],["▁Pump",-13.655800819396973],["は何",-13.655834197998049],["▁Крис",-13.65587043762207],["▁utána",-13.655876159667969],["dyk",-13.655885696411133],["▁منهن",-13.655887603759766],["పోర్",-13.655893325805664],["▁protestant",-13.655893325805664],["시대",-13.655912399291992],["жақты",-13.655932426452637],["tasid",-13.65593719482422],["▁kendimi",-13.65593719482422],["▁қана",-13.6559419631958],["ட்சி",-13.655942916870115],["ಿಸಿದೆ",-13.655954360961914],["حركات",-13.655961990356444],["▁teaching",-13.655961990356444],["штаб",-13.655965805053713],["ենտ",-13.655980110168455],["laganje",-13.65598201751709],["▁көргөн",-13.65602684020996],["▁сказаць",-13.65604305267334],["▁үзүүлэх",-13.65607738494873],["▁השם",-13.656079292297363],["ificació",-13.656089782714844],["logio",-13.65609073638916],["ნაირი",-13.65610122680664],["营业",-13.656102180480955],["▁เส้นเลือดขอด",-13.656105041503906],["▁Concept",-13.656128883361816],["jährigen",-13.656135559082031],["άκου",-13.65615177154541],["▁faoliyatini",-13.65615463256836],["▁काले",-13.656163215637209],["も多い",-13.656164169311523],["3,0",-13.656190872192385],["▁എഴുത്ത",-13.656201362609863],["▁протек",-13.65621852874756],["4,6",-13.656235694885254],["▁ընտրություններ",-13.656254768371582],["ਤੁ",-13.656257629394531],["faciliteter",-13.656265258789062],["▁تمكن",-13.656265258789062],["იძის",-13.65627670288086],["▁Fisk",-13.656280517578123],["Diagnos",-13.656292915344238],["▁შვილი",-13.656295776367188],["▁Geist",-13.656332015991213],["▁symboli",-13.656350135803224],["īns",-13.656354904174805],["▁хаан",-13.65635871887207],["▁osteo",-13.656362533569336],["ddf",-13.65638256072998],["සින්",-13.656404495239258],["▁σωματ",-13.656408309936523],["▁219",-13.656415939331056],["▁Ausgabe",-13.656424522399902],["katzeko",-13.656427383422852],["▁미디어",-13.656458854675291],["▁Miño",-13.656471252441406],["▁kripto",-13.656474113464355],["बीर",-13.656490325927734],["▁bener",-13.65649127960205],["ത്തര",-13.65650749206543],["ясне",-13.65651035308838],["ଏନ",-13.656523704528809],["▁telling",-13.656529426574709],["▁ത്തക",-13.656529426574709],["મન",-13.6565580368042],["338",-13.65658187866211],["▁Ties",-13.656583786010742],["▁၁၃",-13.656583786010742],["▁ఏర్పడ",-13.65659999847412],["▁кругл",-13.65660285949707],["▁určené",-13.65661334991455],["把我",-13.65661907196045],["ķer",-13.656622886657717],["▁يىغىنى",-13.656624794006348],["▁maneno",-13.656627655029297],["▁gaisa",-13.656631469726562],["▁پټ",-13.656645774841309],["とっても",-13.656658172607422],["▁العاب",-13.656715393066406],["லெ",-13.656720161437988],["知り",-13.656731605529783],["tykset",-13.656732559204102],["binding",-13.656750679016112],["БЛ",-13.656750679016112],["ില്ലേ",-13.65675449371338],["▁Առաջին",-13.65675449371338],["ဥ္",-13.65675926208496],["yritykse",-13.65676212310791],["▁الرب",-13.656781196594238],["▁grupu",-13.656789779663086],["▁Legge",-13.656794548034668],["▁основні",-13.656797409057615],["▁Tiere",-13.656813621520996],["కుమార",-13.656829833984377],["▁இருக்கிற",-13.656841278076172],["نشان",-13.65684413909912],["షే",-13.65684700012207],["▁ouvir",-13.656858444213867],["acompanya",-13.656872749328612],["▁sljedeće",-13.65688133239746],["▁texnika",-13.656900405883787],["(((",-13.656914710998535],["க்கல்",-13.656939506530762],["▁uzlabot",-13.656940460205078],["peto",-13.65694522857666],["▁vakker",-13.656946182250977],["▁semacam",-13.656984329223633],["해도",-13.65701675415039],["ค่าย",-13.65705108642578],["안전",-13.657052993774414],["▁Haku",-13.65705680847168],["elyje",-13.657061576843262],["ვამ",-13.65706729888916],["▁Lucr",-13.65706729888916],["నంద",-13.657100677490234],["bolu",-13.657109260559082],["▁സംശയ",-13.65716552734375],["▁quarter",-13.657179832458496],["tietojen",-13.657185554504396],["▁Koulu",-13.65719223022461],["@@",-13.65720272064209],["▁тос",-13.65720558166504],["ιστή",-13.657207489013672],["▁khe",-13.657211303710938],["▁kozm",-13.657211303710938],["▁spui",-13.65722370147705],["ministeriet",-13.657246589660645],["怀疑",-13.657254219055176],["톡",-13.657262802124023],["pakka",-13.657270431518556],["▁μικρο",-13.657280921936035],["▁кости",-13.657292366027832],["ရီးယား",-13.657297134399414],["Ļ",-13.65729808807373],["▁Shanghai",-13.65729808807373],["▁Yıldırım",-13.65729808807373],["▁atbalstu",-13.65729808807373],["▁hábitos",-13.65729808807373],["▁manutenção",-13.65729808807373],["▁τραυματ",-13.65729808807373],["▁событий",-13.65729808807373],["▁ഗ്രൂപ്പ",-13.65729808807373],["▁ირაკლი",-13.65729808807373],["▁러시아",-13.65729808807373],["▁rejstřík",-13.657299041748049],["▁otwarci",-13.657299995422363],["▁kockázat",-13.65730094909668],["▁безпосередньо",-13.65730094909668],["▁говорити",-13.65730094909668],["▁dostarcza",-13.657301902770996],["▁vytvára",-13.657301902770996],["▁කවුද",-13.657301902770996],["▁ବିଦ୍ୟା",-13.657302856445312],["▁competenze",-13.657304763793944],["▁କରୁଛି",-13.657309532165527],["បណ្",-13.65731143951416],["▁ететін",-13.65731716156006],["▁трае",-13.65731716156006],["▁ಪೊಲೀಸರು",-13.65731716156006],["▁nedavno",-13.657320022583008],["▁продолжува",-13.657320976257324],["▁जंगल",-13.657322883605955],["فطر",-13.65732765197754],["▁sukurti",-13.65733242034912],["▁رہتے",-13.657337188720703],["▁ağrı",-13.657339096069336],["▁kisiasa",-13.657341957092283],["▁chýba",-13.65734577178955],["吨",-13.657347679138184],["▁včera",-13.657349586486816],["▁చేయాలి",-13.657349586486816],["▁کشورها",-13.65735912322998],["▁Vintage",-13.657367706298828],["ІС",-13.657370567321776],["▁professionele",-13.657371520996094],["▁szeretne",-13.657376289367676],["▁undersøkelse",-13.657381057739258],["ानं",-13.6574068069458],["▁bröd",-13.657408714294434],["▁ایشیا",-13.657410621643066],["▁spala",-13.657415390014648],["▁regras",-13.65742015838623],["တွေက",-13.657431602478027],["連接",-13.657437324523926],["787",-13.65744400024414],["▁Catal",-13.657466888427734],["▁dövründə",-13.657477378845217],["▁prodhim",-13.657480239868164],["理想的",-13.657508850097656],["トル",-13.657520294189451],["▁Ermənistanın",-13.65755558013916],["ngela",-13.657562255859377],["▁príde",-13.657567977905272],["▁nãeste",-13.657573699951172],["▁etmir",-13.657574653625488],["▁andmete",-13.657580375671388],["▁криви",-13.65758228302002],["▁Mikael",-13.657597541809082],["álnej",-13.657612800598145],["sindi",-13.65761375427246],["্রা",-13.657614707946776],["▁жый",-13.65761947631836],["▁Roche",-13.65764045715332],["大道",-13.657642364501951],["▁Uyg",-13.657645225524902],["▁медичн",-13.657674789428713],["聚集",-13.657684326171877],["▁באש",-13.657719612121582],["▁Nieuw",-13.65772533416748],["▁Oak",-13.657726287841797],["4.00",-13.657737731933594],["álacha",-13.657737731933594],["ხდი",-13.657740592956545],["▁Bersama",-13.657740592956545],["▁Kecil",-13.657752990722656],["ပန်း",-13.657766342163086],["▁metafor",-13.657770156860352],["▁roep",-13.65777587890625],["ँग",-13.657780647277832],["իթ",-13.657785415649414],["▁Jari",-13.657794952392578],["▁10.30",-13.657798767089844],["▁представитель",-13.657804489135742],["▁አክ",-13.657825469970703],["▁баб",-13.657843589782717],["▁Inspir",-13.657845497131348],["▁ફોર",-13.657856941223145],["▁dəstəyi",-13.657859802246094],["▁Şahin",-13.657907485961914],["▁ample",-13.657923698425291],["▁silencio",-13.657928466796877],["fuatilia",-13.657943725585938],["▁Lauk",-13.657952308654783],["സ്റ്റോ",-13.657970428466797],["餐飲",-13.657974243164062],["第一個",-13.657983779907228],["ຫັດ",-13.657984733581545],["▁způsobem",-13.65798568725586],["▁جلب",-13.65799617767334],["unting",-13.65801239013672],["▁kikao",-13.65802001953125],["ေမာ",-13.658028602600098],["လွ်င္",-13.658032417297363],["▁тоног",-13.658039093017578],["▁annunci",-13.65804958343506],["tăţii",-13.658050537109377],["の情報",-13.65807056427002],["▁conductor",-13.658071517944336],["path",-13.658084869384766],["ဖြေ",-13.658087730407717],["зно",-13.658099174499512],["▁amatör",-13.65811538696289],["чица",-13.658119201660156],["▁Вашата",-13.658120155334473],["赤ちゃん",-13.658123970031738],["türk",-13.658137321472168],["▁სკოლა",-13.6581392288208],["▁kupiga",-13.658140182495115],["/50",-13.65814208984375],["čnog",-13.65815258026123],["▁нямаше",-13.658159255981444],["ЛАР",-13.65818214416504],["▁pierd",-13.658183097839355],["▁muß",-13.658197402954102],["▁ciudades",-13.658202171325684],["▁βαθμ",-13.65820598602295],["8.2",-13.658206939697266],["tzako",-13.658212661743164],["▁قلت",-13.658223152160645],["▁фен",-13.65825653076172],["▁बाय",-13.6582670211792],["čká",-13.658267974853516],["airean",-13.658276557922363],["▁опасности",-13.658288955688477],["▁abuse",-13.65830421447754],["▁पर्वत",-13.658350944519045],["▁idéer",-13.658365249633787],["ดอน",-13.658384323120115],["şad",-13.658404350280762],["jakso",-13.65842628479004],["▁страст",-13.658432006835938],["▁பகிர்",-13.658432006835938],["▁sahil",-13.658432960510254],["ЭК",-13.658438682556152],["මාර",-13.658452033996582],["▁dużym",-13.658476829528809],["ulatus",-13.658480644226074],["яны",-13.658493041992188],["▁ایچ",-13.658503532409668],["پیش",-13.658507347106934],["▁Ισ",-13.658510208129885],["ώδη",-13.658512115478516],["▁perlahan",-13.658514976501465],["▁प्रजा",-13.658520698547363],["▁моје",-13.65852165222168],["ILLE",-13.658533096313477],["▁förstås",-13.658535957336426],["▁ciidanka",-13.65854549407959],["biskup",-13.658559799194336],["จะมา",-13.658587455749512],["了个",-13.658598899841309],["▁Krom",-13.6586332321167],["▁Мили",-13.658636093139648],["▁temelju",-13.658637046813965],["▁încerc",-13.658641815185549],["shuv",-13.658662796020508],["▁ढा",-13.658665657043455],["▁botani",-13.658673286437988],["▁particulares",-13.658677101135254],["ustaja",-13.658693313598633],["ଡାହା",-13.65870761871338],["png",-13.65871810913086],["▁ضرورة",-13.658721923828123],["महि",-13.658754348754885],["▁religioso",-13.6587553024292],["mmig",-13.658766746520996],["▁consulter",-13.658793449401855],["▁এত",-13.65882968902588],["權利",-13.658860206604004],["▁aquilo",-13.658868789672852],["▁হ্যা",-13.6588716506958],["▁पान",-13.658875465393066],["▁एकै",-13.658881187438965],["пост",-13.658921241760254],["ητικά",-13.658946990966797],["opia",-13.658950805664062],["▁busnes",-13.658950805664062],["ルの",-13.658957481384276],["▁peccato",-13.658963203430176],["▁walau",-13.658981323242188],["▁Reserva",-13.658987045288086],["plaz",-13.659032821655272],["▁primitiv",-13.659034729003906],["▁baud",-13.659046173095703],["▁ranka",-13.659050941467283],["▁Års",-13.65908432006836],["ادا",-13.659104347229004],["▁manifesto",-13.659107208251951],["와의",-13.65910816192627],["lógico",-13.659138679504396],["147",-13.659140586853027],["▁Shko",-13.659173011779783],["ڭىز",-13.659191131591797],["▁techno",-13.659199714660645],["техничк",-13.659202575683594],["▁الإلكتروني",-13.659208297729492],["Peter",-13.65921688079834],["向前",-13.659229278564451],["IKK",-13.659239768981934],["旦",-13.65924072265625],["▁kilala",-13.659269332885742],["上方",-13.65928077697754],["上次",-13.659286499023438],["sikiliza",-13.659296989440918],["▁zdravnik",-13.659364700317385],["▁overall",-13.65939712524414],["▁সাহিত্য",-13.659398078918455],["以下简称",-13.659419059753418],["▁מצליח",-13.659421920776367],["溫暖",-13.659424781799316],["▁श्रम",-13.659461975097656],["ကိစၥ",-13.659475326538086],["▁يونيورسٽي",-13.659476280212402],["ጧ",-13.65947723388672],["▁Iohannis",-13.65947723388672],["▁Najnovšie",-13.65947723388672],["▁complément",-13.65947723388672],["▁müsbət",-13.65947723388672],["▁permohonan",-13.65947723388672],["▁úroveň",-13.65947723388672],["▁διαχείριση",-13.65947723388672],["▁Перейти",-13.65947723388672],["▁Солонгос",-13.65947723388672],["▁правительства",-13.65947723388672],["▁جزئیات",-13.65947723388672],["▁شرايط",-13.65947723388672],["▁जिंदगी",-13.65947723388672],["▁ठाकरे",-13.65947723388672],["▁ਜੁਲਾਈ",-13.65947723388672],["▁ਸਤੰਬਰ",-13.65947723388672],["spetsialist",-13.659478187561035],["▁Written",-13.659478187561035],["▁bekväm",-13.659478187561035],["שטעלן",-13.659479141235352],["▁khiển",-13.659479141235352],["▁biztanle",-13.659480094909668],["▁mööda",-13.659480094909668],["▁יהיו",-13.659480094909668],["ფოსტა",-13.659481048583984],["▁slaapkamer",-13.659481048583984],["ျပည္နယ္",-13.6594820022583],["ฟอร์ม",-13.659482955932615],["▁Брисел",-13.659482955932615],["▁بھٹو",-13.659482955932615],["▁ଦେଖିବାକୁ",-13.659482955932615],["▁ვიყავი",-13.659482955932615],["▁sådant",-13.659483909606934],["▁ущерб",-13.659483909606934],["▁banorë",-13.65948486328125],["▁галоўны",-13.65948486328125],["▁реагира",-13.65948486328125],["▁истовремено",-13.659491539001465],["▁жасалған",-13.65949249267578],["▁svētku",-13.659493446350098],["▁aproveitar",-13.659494400024414],["▁referitoare",-13.659497261047363],["espectacle",-13.65949821472168],["▁мулти",-13.65949821472168],["▁SET",-13.659499168395996],["▁ჩანს",-13.659503936767578],["▁таасир",-13.659505844116213],["▁menonton",-13.65950870513916],["▁ھۆ",-13.65950870513916],["▁Voilà",-13.659512519836426],["▁талапкер",-13.65951442718506],["▁فما",-13.659525871276855],["▁மின்",-13.659525871276855],["▁aplikacji",-13.659526824951172],["▁clicca",-13.659549713134766],["▁የአፍሪካ",-13.65956687927246],["▁yèn",-13.659574508666992],["▁ወጣት",-13.659575462341309],["▁klasės",-13.659581184387209],["▁origem",-13.659581184387209],["▁مبا",-13.659582138061523],["カン",-13.659584999084473],["▁höfðu",-13.659589767456056],["▁supermarket",-13.659589767456056],["▁gespeel",-13.659590721130373],["▁возило",-13.6596040725708],["ічним",-13.65960693359375],["▁uguns",-13.65960693359375],["▁Alimenta",-13.659613609313965],["▁руководи",-13.659624099731444],["▁عارف",-13.659624099731444],["ഹം",-13.659640312194824],["▁kauan",-13.659647941589355],["▁ਜੂਨ",-13.659671783447266],["▁aicina",-13.659709930419922],["▁добу",-13.659709930419922],["трол",-13.65972137451172],["کړ",-13.659733772277832],["▁copilului",-13.65974235534668],["రువు",-13.659743309020996],["▁ръцете",-13.659747123718262],["▁بدء",-13.659765243530272],["лицей",-13.659783363342283],["▁ಹುಡುಕ",-13.659785270690918],["५२",-13.65980052947998],["▁povedala",-13.659812927246094],["2021",-13.659822463989258],["▁cunosc",-13.659822463989258],["▁bonum",-13.659829139709473],["▁perheen",-13.659845352172852],["▁करताना",-13.659846305847168],["kkia",-13.6598539352417],["▁순간",-13.659857749938965],["場面",-13.659871101379396],["食べる",-13.65987777709961],["ctos",-13.65988063812256],["▁అందు",-13.659890174865724],["▁پولیسو",-13.659893035888672],["▁градове",-13.659902572631836],["▁Nimi",-13.659904479980469],["▁arvamus",-13.65990924835205],["▁søn",-13.659912109375],["▁Мой",-13.659915924072266],["ΝΕ",-13.659927368164062],["mög",-13.659934043884276],["tyvi",-13.65993881225586],["▁Gloria",-13.65996265411377],["▁немец",-13.659994125366213],["▁הגר",-13.660005569458008],["▁ნაკრებ",-13.660012245178224],["▁Siinä",-13.66001796722412],["ୟର",-13.660036087036133],["▁dysku",-13.660038948059082],["ିୟ",-13.660051345825195],["▁ධන",-13.660053253173828],["▁stelt",-13.660066604614258],["ລັດຖະບານ",-13.66006851196289],["▁კლ",-13.660082817077637],["▁crestere",-13.660111427307127],["Design",-13.660136222839355],["▁Maio",-13.660146713256836],["▁Experten",-13.66015911102295],["注意到",-13.660178184509276],["ලක්",-13.66019058227539],["▁marques",-13.660191535949709],["▁alimento",-13.66019344329834],["したもの",-13.660194396972656],["▁culturel",-13.660198211669922],["▁escrever",-13.660204887390137],["▁ರೈ",-13.660205841064451],["▁भोग",-13.66021728515625],["▁rigid",-13.66022491455078],["शः",-13.66022777557373],["▁bù",-13.660228729248049],["▁europeu",-13.660234451293944],["សុ",-13.66024398803711],["ৰত",-13.66029930114746],["ေနရ",-13.66031551361084],["improv",-13.660320281982422],["▁Fonda",-13.660320281982422],["þe",-13.660321235656738],["▁dydis",-13.660330772399902],["ဝေ",-13.660338401794434],["Від",-13.660346031188965],["եցնել",-13.66036605834961],["ներկայ",-13.660372734069824],["yub",-13.66037368774414],["▁symptom",-13.660374641418455],["▁skrivit",-13.66039752960205],["▁registrat",-13.660405158996582],["Україн",-13.660411834716797],["▁neðan",-13.660419464111328],["▁condición",-13.660445213317873],["▁KOS",-13.660465240478516],["سىپ",-13.660470962524414],["▁العشر",-13.660476684570312],["čený",-13.660496711730955],["▁തിരു",-13.660507202148438],["فرد",-13.660513877868652],["sänd",-13.660530090332031],["▁పార్",-13.660534858703612],["민주",-13.660551071166992],["▁militaire",-13.66055965423584],["▁ơi",-13.66055965423584],["रः",-13.660571098327637],["ിപ്പിച്ച",-13.660582542419434],["OUS",-13.660606384277344],["▁Kesä",-13.66061782836914],["▁функција",-13.660626411437988],["▁događa",-13.660634994506836],["▁habían",-13.660656929016112],["▁proprietate",-13.66066074371338],["dracht",-13.660672187805176],["▁لاسه",-13.66069793701172],["▁vonë",-13.660710334777832],["geir",-13.660737991333008],["▁ආදර",-13.660780906677246],["ேஷ்",-13.660783767700195],["▁Вечер",-13.660784721374512],["▁blanda",-13.660795211791992],["▁उसी",-13.660804748535156],["▁uzņem",-13.660836219787598],["යෙන්ම",-13.660844802856444],["▁siten",-13.660860061645508],["▁Musk",-13.660871505737305],["▁YAP",-13.660877227783203],["တြ",-13.660879135131836],["▁Пап",-13.660881042480469],["riño",-13.660921096801758],["ได้ง่าย",-13.660922050476074],["▁सत्",-13.660938262939451],["▁охот",-13.660944938659668],["▁oxid",-13.660954475402832],["在国内",-13.660983085632324],["▁shiga",-13.660993576049805],["▁destinada",-13.661029815673828],["▁پرته",-13.661050796508787],["▁बोली",-13.661056518554688],["▁بالع",-13.66106128692627],["ចាំ",-13.661067962646484],["گون",-13.6610746383667],["כז",-13.66107940673828],["▁ALLE",-13.661090850830078],["▁biztonság",-13.661105155944824],["முடியாத",-13.661107063293455],["pät",-13.66111946105957],["▁захід",-13.66112995147705],["ਮਾਂ",-13.661131858825684],["ลัก",-13.661136627197266],["▁classique",-13.661165237426758],["îrê",-13.66117000579834],["▁hunde",-13.661170959472656],["▁ଅନ୍",-13.661178588867188],["제주",-13.661187171936035],["▁свете",-13.661224365234377],["▁фотографија",-13.661273956298828],["▁მონაწილეობა",-13.661274909973145],["▁கிடைக்க",-13.661283493041992],["▁പാട്ട",-13.66128635406494],["▁оглас",-13.661294937133787],["▁plé",-13.661296844482422],["▁ಗಾ",-13.661319732666016],["miljøet",-13.661371231079102],["▁Balkon",-13.66137409210205],["▁documentar",-13.661384582519531],["▁aspects",-13.661385536193848],["▁پات",-13.661396026611328],["▁רפואי",-13.661399841308594],["▁የለ",-13.661437034606934],["פֿר",-13.661441802978516],["▁આં",-13.661446571350098],["▁ప్రాణ",-13.661450386047363],["▁унут",-13.661452293395996],["▁የሚለውን",-13.661457061767578],["▁название",-13.661462783813477],["▁намалява",-13.661484718322754],["ገሩ",-13.6614990234375],["山西",-13.661505699157717],["bogen",-13.661529541015623],["▁clasifica",-13.661535263061523],["მკ",-13.661540031433104],["▁سارا",-13.661545753479004],["▁periodi",-13.661550521850586],["لەت",-13.661553382873535],["ទាន់",-13.6615629196167],["▁elastic",-13.661569595336914],["▁sincs",-13.66157341003418],["θέσεων",-13.661581993103027],["▁iyaga",-13.661585807800291],["schaften",-13.661593437194824],["颇",-13.66161060333252],["惑",-13.661619186401367],["piac",-13.661632537841797],["括",-13.661632537841797],["毀",-13.661639213562012],["▁hjálpa",-13.661645889282228],["Ņ",-13.661649703979492],["▁banga",-13.661650657653809],["▁devices",-13.661654472351074],["137",-13.66165828704834],["DOWNLOAD",-13.661660194396973],["▁Gàidhealtachd",-13.661660194396973],["▁Verbraucher",-13.661660194396973],["▁hứng",-13.661660194396973],["▁işbirliği",-13.661660194396973],["▁pendaftaran",-13.661660194396973],["▁pomeriggio",-13.661660194396973],["▁trưng",-13.661660194396973],["▁άμεσα",-13.661660194396973],["▁λάθος",-13.661660194396973],["▁μεγάλες",-13.661660194396973],["▁адразу",-13.661660194396973],["▁податків",-13.661660194396973],["▁църква",-13.661660194396973],["▁ہسپتال",-13.661660194396973],["▁ਹੁੰਦੇ",-13.661660194396973],["Ù",-13.661661148071287],["▁desværre",-13.661661148071287],["▁mėgsta",-13.661661148071287],["▁κάποιες",-13.661662101745604],["▁војске",-13.661663055419922],["▁edozein",-13.661664009094238],["▁கிடைத்த",-13.661664009094238],["▁ആശയ",-13.661664009094238],["▁๒๕",-13.661664962768556],["▁betreffende",-13.661665916442873],["▁حقیقی",-13.661665916442873],["▁bermakna",-13.661666870117188],["▁सहकार्य",-13.661667823791504],["▁Федерального",-13.661669731140137],["▁221",-13.661670684814451],["ေကာက္",-13.66167163848877],["▁katakan",-13.66167163848877],["▁Kompleks",-13.66167449951172],["▁постановление",-13.661675453186035],["▁maanantai",-13.661677360534668],["▁दिनुभयो",-13.661683082580566],["▁Enerji",-13.66169261932373],["▁එහිදී",-13.661693572998049],["▁Dzień",-13.661696434020996],["感覚",-13.661698341369627],["▁자세한",-13.661699295043944],["▁अंदर",-13.661703109741213],["ินทร์",-13.661704063415527],["▁seriös",-13.661705017089844],["bruch",-13.661712646484377],["▁omfattende",-13.66171932220459],["ဖို",-13.661720275878906],["▁(2002)",-13.66172218322754],["ที่กําลัง",-13.661724090576172],["ிறது",-13.661730766296388],["差異",-13.661730766296388],["▁poderán",-13.6617431640625],["▁úton",-13.661752700805664],["▁travaille",-13.661765098571776],["▁obliku",-13.661772727966309],["▁زندہ",-13.661773681640623],["झी",-13.66178035736084],["▁gradual",-13.661784172058104],["▁nafsu",-13.661787033081056],["▁Mycket",-13.661787986755373],["▁شايد",-13.66179084777832],["党委",-13.661803245544434],["▁приводит",-13.661812782287598],["▁Zelf",-13.661824226379396],["▁нашому",-13.661827087402344],["▁edota",-13.661837577819824],["ляе",-13.66185188293457],["SÍ",-13.661872863769531],["▁ഇവര്",-13.661876678466797],["גורמים",-13.66188621520996],["▁कसा",-13.661887168884276],["ლიზ",-13.661895751953123],["▁כדור",-13.661895751953123],["▁පෙනී",-13.661905288696287],["▁nýju",-13.661910057067873],["▁ਦੱਸ",-13.661910057067873],["▁dónde",-13.661913871765137],["▁LOL",-13.661921501159668],["▁relacionamento",-13.661931037902832],["▁fața",-13.66195583343506],["▁رھ",-13.661964416503906],["▁Vorstand",-13.661967277526855],["ሙት",-13.661970138549805],["▁منح",-13.661993980407717],["▁পালন",-13.66199779510498],["入学",-13.662004470825195],["▁hmot",-13.662007331848145],["▁cousa",-13.662013053894045],["▁najlepszych",-13.662017822265623],["▁Carles",-13.662023544311523],["tension",-13.662026405334473],["▁работници",-13.662055015563965],["kaarten",-13.662099838256836],["▁vertellen",-13.662102699279783],["深圳市",-13.66211223602295],["393",-13.662134170532228],["Imp",-13.662134170532228],["váltás",-13.662137031555176],["▁Antón",-13.662153244018556],["مايدۇ",-13.6621675491333],["ପୂ",-13.662179946899414],["uyoruz",-13.66219425201416],["▁химия",-13.6622314453125],["中国人民",-13.662237167358398],["تنظيم",-13.662251472473145],["▁osem",-13.66225242614746],["▁sortzen",-13.66230297088623],["ցե",-13.662311553955078],["▁παρέ",-13.662315368652344],["▁przedstawia",-13.662331581115724],["▁појава",-13.662345886230469],["▁уопште",-13.662349700927734],["മാറ്റ",-13.662355422973633],["▁ক্ল",-13.662370681762695],["▁Filter",-13.662378311157228],["engine",-13.662383079528809],["визи",-13.662388801574709],["▁hää",-13.662410736083984],["vatele",-13.662413597106934],["▁verloor",-13.662420272827148],["ზოგ",-13.662434577941896],["aksjonen",-13.662446022033691],["▁etmektedir",-13.662449836730955],["▁nervos",-13.66245937347412],["▁независим",-13.662489891052246],["▁байгаад",-13.662521362304688],["ιστών",-13.662525177001951],["▁razão",-13.662540435791016],["型的",-13.66254425048828],["▁nick",-13.662555694580078],["KIL",-13.66257381439209],["ตรี",-13.662577629089355],["▁będziesz",-13.662582397460938],["▁විඳ",-13.662598609924316],["▁1397",-13.66261386871338],["्जा",-13.662631034851074],["lehe",-13.662637710571287],["377",-13.662650108337402],["תכו",-13.6626558303833],["▁справах",-13.66266918182373],["▁карту",-13.66268539428711],["頂きました",-13.66268539428711],["▁sundheds",-13.662734985351562],["▁MOL",-13.662752151489258],["эргэ",-13.662803649902344],["▁Masse",-13.66286563873291],["ρης",-13.662898063659668],["工藝",-13.662908554077148],["▁chặn",-13.66291332244873],["真人",-13.662919044494627],["عالج",-13.662927627563477],["تىك",-13.662930488586426],["▁Sociedade",-13.662944793701172],["šais",-13.662949562072754],["▁menjar",-13.662961959838867],["▁нераз",-13.662961959838867],["▁Friend",-13.662981986999512],["▁demografi",-13.662989616394045],["ावी",-13.662993431091309],["▁papildus",-13.662996292114258],["വിക",-13.663002014160156],["▁quang",-13.663002967834473],["▁varken",-13.663055419921877],["▁তলা",-13.66306495666504],["▁Bliv",-13.663068771362305],["▁redaktør",-13.663079261779783],["žiem",-13.663115501403809],["▁харин",-13.66311740875244],["▁populaire",-13.663140296936035],["นึง",-13.663142204284668],["▁zatrzyma",-13.66315746307373],["مصر",-13.663162231445312],["ပတ်သက်",-13.663167953491213],["なった",-13.66319179534912],["▁tradisi",-13.663216590881348],["▁마을",-13.66323471069336],["▁וועג",-13.66325569152832],["▁לאו",-13.663268089294434],["jskom",-13.66327667236328],["ଫି",-13.66327667236328],["▁mezzi",-13.663280487060549],["思った",-13.663317680358888],["sebenza",-13.663336753845217],["▁ବସ",-13.663337707519531],["▁edifica",-13.663347244262695],["▁predstavljen",-13.663347244262695],["يٽر",-13.663349151611328],["誕生",-13.663359642028809],["▁keino",-13.663360595703123],["▁verplicht",-13.66336441040039],["▁История",-13.66336441040039],["▁Urząd",-13.663378715515137],["▁neroz",-13.663381576538086],["漂亮的",-13.663397789001465],["rreko",-13.66342067718506],["ရပါ",-13.663430213928224],["▁isimli",-13.663434982299805],["▁කරන්නෙ",-13.66343593597412],["▁parazit",-13.663440704345703],["▁కోట్లు",-13.663450241088867],["▁przewodni",-13.66348361968994],["joče",-13.663485527038574],["▁μυστικ",-13.663488388061523],["брон",-13.663501739501951],["ర్థ",-13.66354274749756],["▁bò",-13.66355323791504],["▁planter",-13.66355800628662],["▁anmeldelse",-13.663576126098633],["клет",-13.663586616516112],["▁إل",-13.663610458374023],["พระพุทธ",-13.663623809814451],["▁tuaja",-13.663626670837402],["▁dispara",-13.663631439208984],["ാഴ്ച",-13.663641929626465],["▁кандидати",-13.663657188415527],["тичного",-13.663676261901855],["щую",-13.663694381713867],["اهل",-13.66370964050293],["▁Ром",-13.663713455200195],["▁письмо",-13.66372299194336],["▁spesifik",-13.663731575012209],["നിര",-13.663734436035156],["▁buying",-13.663745880126951],["▁intenc",-13.663761138916016],["дәстүр",-13.663769721984863],["依赖",-13.663787841796877],["茫",-13.66380500793457],["魏",-13.663808822631836],["دغې",-13.663813591003418],["ବାସୀ",-13.663813591003418],["壊",-13.663814544677734],["▁yanğın",-13.663816452026367],["粘",-13.663817405700684],["義大利",-13.663818359375],["なのですが",-13.663819313049316],["複雜",-13.663820266723633],["▁domů",-13.663825035095217],["▁najbliższ",-13.663827896118164],["ستقبل",-13.663833618164062],["άνθρωπο",-13.66384220123291],["▁vaara",-13.66384506225586],["សំឡេង",-13.663847923278809],["▁Często",-13.663847923278809],["올림픽",-13.663847923278809],["κάλυψη",-13.663848876953123],["ؓ",-13.663848876953123],["๗",-13.663848876953123],["▁Genedlaethol",-13.663848876953123],["▁Wettbewerb",-13.663848876953123],["▁homenaxe",-13.663848876953123],["▁kohdalla",-13.663848876953123],["▁limpeza",-13.663848876953123],["▁samochodu",-13.663848876953123],["▁thiểu",-13.663848876953123],["▁urobiť",-13.663848876953123],["▁woensdag",-13.663848876953123],["▁ţară",-13.663848876953123],["▁ασφάλεια",-13.663848876953123],["▁айтымында",-13.663848876953123],["▁гісторыя",-13.663848876953123],["▁ушундай",-13.663848876953123],["▁קידום",-13.663848876953123],["▁استراتژی",-13.663848876953123],["▁محکوم",-13.663848876953123],["▁पुढील",-13.663848876953123],["▁กันยายน",-13.663848876953123],["▁มิถุนายน",-13.663848876953123],["▁გამოყენება",-13.663848876953123],["▁አማርኛ",-13.663848876953123],["▁비즈니스",-13.663848876953123],["빅",-13.663848876953123],["▁Milliarden",-13.66384983062744],["▁ihåg",-13.66384983062744],["▁taariikh",-13.66384983062744],["▁Премьер",-13.66384983062744],["▁하드",-13.66384983062744],["▁biggest",-13.663851737976074],["▁ήθελα",-13.66385269165039],["▁будинок",-13.663853645324709],["▁मंगलवार",-13.663853645324709],["opplæring",-13.663854598999023],["▁Sgwrs",-13.663857460021973],["▁Alguns",-13.663858413696287],["▁Kabinet",-13.663859367370604],["▁νέων",-13.663859367370604],["▁виды",-13.663860321044922],["▁catalans",-13.663864135742188],["▁अमित",-13.66386604309082],["建材",-13.663883209228516],["אַקט",-13.663908004760742],["▁강남",-13.663909912109377],["ulator",-13.663928031921388],["▁Wirkung",-13.66392993927002],["▁повышения",-13.66392993927002],["▁sánh",-13.663933753967283],["šķir",-13.663934707641602],["▁ஓர்",-13.663936614990234],["▁ಮನೆಗೆ",-13.663945198059082],["▁maag",-13.663946151733398],["後は",-13.663948059082031],["▁henhold",-13.66396141052246],["andre",-13.663980484008787],["▁zegoen",-13.663983345031738],["▁gwerth",-13.664022445678713],["▁مصوب",-13.664026260375977],["▁индивидуально",-13.664051055908203],["▁explicou",-13.664054870605469],["blanding",-13.664076805114746],["▁제시",-13.664079666137695],["▁Brin",-13.66408634185791],["▁tokrat",-13.664087295532228],["在我们",-13.664112091064451],["ээсээ",-13.664116859436035],["▁Deixa",-13.664119720458984],["▁юридически",-13.664122581481934],["▁alderdi",-13.66412353515625],["▁197",-13.664129257202148],["▁Trots",-13.66415023803711],["ულო",-13.664170265197754],["を中心に",-13.664175033569336],["מבנה",-13.664190292358398],["ଂଚ",-13.664192199707031],["▁sindicat",-13.66420078277588],["▁સિંહ",-13.664212226867676],["▁किये",-13.664216995239258],["▁össz",-13.664217948913574],["ंबा",-13.664230346679688],["▁nepasi",-13.664246559143066],["▁trošku",-13.664250373840332],["▁samej",-13.664263725280762],["કન",-13.66427230834961],["▁aquells",-13.664273262023926],["▁semifinal",-13.664283752441406],["baini",-13.664286613464355],["▁anggaran",-13.664299964904783],["▁eraman",-13.664299964904783],["работи",-13.664325714111328],["Alike",-13.664331436157228],["დებს",-13.66433811187744],["▁નાખ",-13.664341926574709],["liwanag",-13.664349555969238],["▁njihovi",-13.664361953735352],["▁continente",-13.664372444152832],["ooni",-13.66437530517578],["▁memnun",-13.664379119873049],["▁ahir",-13.664381980895996],["GUE",-13.664384841918944],["▁провели",-13.664392471313477],["▁პრეზიდენტის",-13.664401054382324],["▁заключение",-13.664410591125488],["▁kumpa",-13.664419174194336],["▁smysl",-13.664422035217283],["hlásenie",-13.66443157196045],["льнага",-13.664435386657717],["czesny",-13.664443969726562],["▁realização",-13.66444492340088],["▁LOVE",-13.664458274841309],["ttavia",-13.664470672607422],["▁punten",-13.664472579956056],["▁učenici",-13.664487838745115],["āko",-13.6644926071167],["▁wapi",-13.66451644897461],["កំពុង",-13.66451930999756],["いるので",-13.664556503295898],["▁ಸರ್ಕಾರದ",-13.664562225341797],["creati",-13.664565086364746],["▁Hinter",-13.664566040039062],["▁வலி",-13.664579391479492],["114",-13.66458511352539],["زات",-13.66459846496582],["▁членів",-13.66459846496582],["▁Abra",-13.66460132598877],["▁Bizi",-13.66460418701172],["árok",-13.664612770080566],["▁njeriut",-13.66461944580078],["▁ಏಕ",-13.664628982543944],["▁норми",-13.664645195007324],["▁sajnos",-13.664648056030272],["▁चुनावी",-13.664664268493652],["▁kooban",-13.664669036865234],["▁заявлен",-13.664671897888184],["▁konča",-13.664676666259766],["以免",-13.664677619934082],["▁פרע",-13.66469669342041],["▁Sprzeda",-13.664703369140623],["ováno",-13.6647310256958],["▁Freedom",-13.664753913879396],["▁interesser",-13.664753913879396],["aften",-13.664761543273926],["tâl",-13.664782524108888],["▁아들",-13.664791107177734],["▁되면",-13.664794921875],["▁жилд",-13.664799690246582],["مرشح",-13.664802551269531],["337",-13.66482162475586],["▁järk",-13.664822578430176],["▁ainoa",-13.664824485778809],["▁конвенц",-13.66482639312744],["▁Detalj",-13.664854049682615],["বাসী",-13.66486644744873],["ርሶ",-13.66486930847168],["▁вимоги",-13.664870262145996],["▁նյութ",-13.664888381958008],["防治",-13.66489315032959],["ಬಿಗ್",-13.664909362792969],["▁frequenta",-13.664923667907717],["▁овакв",-13.664924621582031],["ەتلىك",-13.664965629577637],["κριση",-13.66498565673828],["ម៉ូតូ",-13.66502285003662],["frist",-13.665030479431152],["▁Shkup",-13.665038108825684],["▁הכנסת",-13.6650390625],["yksiä",-13.665069580078123],["▁Ус",-13.665072441101074],["▁образува",-13.665103912353516],["▁øvrige",-13.665116310119627],["ಾಲಯ",-13.665120124816896],["ినా",-13.665141105651855],["▁مادة",-13.665152549743652],["▁vitesse",-13.665154457092283],["▁kostet",-13.665172576904297],["ခြေ",-13.665188789367676],["▁cenário",-13.665201187133787],["▁اجتماعي",-13.665216445922852],["▁글을",-13.66522216796875],["ശീല",-13.665233612060549],["▁prudent",-13.665250778198242],["▁славян",-13.665253639221191],["▁ඒකට",-13.66532039642334],["▁פרופ",-13.665328979492188],["புரி",-13.665331840515137],["▁Label",-13.66536808013916],["▁استاندار",-13.665369987487791],["▁ນອກ",-13.665369987487791],["可能です",-13.665374755859377],["▁퇴",-13.665392875671388],["กิจ",-13.665393829345703],["كاميرا",-13.665401458740234],["▁naisten",-13.665411949157717],["Cloud",-13.665412902832031],["▁mendoj",-13.66543960571289],["▁kõikide",-13.665453910827637],["ቾች",-13.66545867919922],["kilder",-13.66546630859375],["▁treći",-13.66547679901123],["▁Protein",-13.665492057800291],["undangan",-13.665493965148926],["▁जाये",-13.665502548217772],["maskine",-13.66550350189209],["▁майно",-13.665523529052734],["雇",-13.665529251098633],["▁Karten",-13.665562629699709],["専用",-13.665570259094238],["▁dragen",-13.66557502746582],["▁معرفة",-13.665584564208984],["▁milliarder",-13.6655855178833],["驱动",-13.665586471557615],["ുമൊക്കെ",-13.665587425231934],["▁střední",-13.665604591369627],["faat",-13.665618896484377],["regionen",-13.665624618530272],["▁vliv",-13.665626525878906],["களால்",-13.66563606262207],["▁convertido",-13.665637969970703],["wną",-13.665644645690918],["▁sanitari",-13.665647506713867],["పేట",-13.6656494140625],["▁Skatte",-13.6656494140625],["▁suunta",-13.665651321411133],["քային",-13.665656089782717],["▁үзэл",-13.665660858154297],["Zdroj",-13.665671348571776],["wenang",-13.665672302246094],["▁đài",-13.665698051452637],["川普",-13.665716171264648],["glej",-13.665735244750977],["sidig",-13.665746688842772],["▁påske",-13.66575050354004],["АУ",-13.665753364562988],["kyl",-13.665765762329102],["在那",-13.665766716003418],["▁rental",-13.665771484375],["▁condena",-13.665773391723633],["▁профила",-13.665776252746582],["৩৫",-13.665780067443848],["团体",-13.66578769683838],["▁verf",-13.66580581665039],["▁aniversario",-13.665838241577148],["▁hearing",-13.665851593017578],["iisii",-13.665868759155272],["شرك",-13.66588020324707],["▁5.3",-13.665895462036133],["▁consultant",-13.66589641571045],["ngkok",-13.665903091430664],["▁семейство",-13.665911674499512],["▁guter",-13.665929794311523],["▁existentes",-13.665937423706056],["으나",-13.665939331054688],["轨",-13.665947914123535],["▁timme",-13.665953636169434],["varna",-13.66595458984375],["獄",-13.665969848632812],["▁1-4",-13.665971755981444],["موت",-13.665976524353027],["疫苗",-13.665982246398926],["▁ফেসবুক",-13.66598415374756],["▁РАЗ",-13.66599178314209],["quê",-13.665996551513672],["将其",-13.666001319885254],["抵抗",-13.666004180908203],["▁Hizmet",-13.666010856628418],["रानी",-13.666025161743164],["δικα",-13.66602611541748],["▁చేస్తున్నా",-13.666033744812012],["▁Effect",-13.666040420532228],["သမ္မတ",-13.666041374206545],["បច្ចុប្បន្ន",-13.666041374206545],["▁Biserica",-13.666041374206545],["▁Prefeitura",-13.666041374206545],["▁evropian",-13.666041374206545],["▁gníomh",-13.666041374206545],["▁pieniędzy",-13.666041374206545],["▁později",-13.666041374206545],["▁procédure",-13.666041374206545],["▁Сяргей",-13.666041374206545],["▁прибыли",-13.666041374206545],["▁сарадњу",-13.666041374206545],["▁цахилгаан",-13.666041374206545],["▁موسسه",-13.666041374206545],["▁نٿو",-13.666041374206545],["▁कठिन",-13.666041374206545],["▁निर्माता",-13.666041374206545],["▁முஸ்லிம்",-13.666041374206545],["▁విధానం",-13.666041374206545],["▁პრაქტიკ",-13.666041374206545],["핸",-13.666041374206545],["▁بشپړ",-13.66604232788086],["▁тяжело",-13.666043281555176],["▁menú",-13.666045188903809],["▁느끼",-13.666045188903809],["▁ડૉ",-13.666048049926758],["▁นางสาว",-13.66604995727539],["▁நிறுத்த",-13.666050910949709],["▁გინდა",-13.666050910949709],["▁sözleşme",-13.66605281829834],["▁wydarzenia",-13.66605281829834],["▁сонымен",-13.666055679321287],["▁Kandidat",-13.666057586669922],["▁ritorno",-13.666059494018556],["▁diagnoz",-13.666060447692873],["▁ඉදිරියට",-13.666062355041504],["▁пик",-13.666068077087402],["▁Three",-13.6660795211792],["▁хоногийн",-13.666086196899414],["أنظمة",-13.66608715057373],["▁ورکوي",-13.66608715057373],["▁концов",-13.666089057922363],["▁گھو",-13.666091918945312],["▁използвате",-13.666095733642578],["▁tarvitaan",-13.666098594665527],["▁frihet",-13.66610622406006],["▁modification",-13.666110038757324],["▁السابع",-13.66611671447754],["▁आल्या",-13.666118621826172],["▁ኢን",-13.666120529174805],["▁señal",-13.666122436523438],["▁колони",-13.666128158569336],["▁ലേഖനം",-13.666129112243652],["▁ŝtato",-13.666131019592283],["▁hendaklah",-13.666160583496094],["▁기계",-13.666160583496094],["▁사는",-13.66616153717041],["▁Narayan",-13.66616916656494],["▁Vijesti",-13.666177749633787],["▁sensation",-13.666199684143066],["▁remek",-13.666207313537598],["▁művészet",-13.666213035583496],["▁واله",-13.666219711303713],["ringe",-13.666227340698242],["όσ",-13.666237831115724],["▁luonto",-13.66624927520752],["тус",-13.666257858276367],["▁olmadı",-13.666265487670898],["ღვე",-13.666268348693848],["▁Psalm",-13.666268348693848],["▁kiiresti",-13.666300773620604],["▁højere",-13.666319847106934],["▁catatan",-13.666329383850098],["ባር",-13.666338920593262],["相場",-13.666339874267578],["▁tulevat",-13.666341781616213],["▁Kawasan",-13.66634750366211],["KZ",-13.666349411010742],["▁dhamaan",-13.66635036468506],["lənmə",-13.666362762451172],["ไม่ให้",-13.666362762451172],["▁prys",-13.66641616821289],["▁Tanto",-13.666420936584473],["gių",-13.666421890258787],["دعي",-13.666428565979004],["קשת",-13.66642951965332],["ggle",-13.66643238067627],["tevi",-13.666434288024902],["▁Bezug",-13.66643524169922],["▁värld",-13.666452407836914],["дору",-13.666473388671877],["ਲਰ",-13.66647720336914],["▁çekti",-13.666481018066406],["▁pitä",-13.666489601135254],["▁Kash",-13.666516304016112],["くれる",-13.666528701782228],["▁საზოგადოების",-13.666534423828123],["▁xoş",-13.666536331176758],["▁ଡେ",-13.666537284851074],["ዓለም",-13.666545867919922],["▁позитивно",-13.66655158996582],["▁hudobn",-13.666553497314451],["စိန္",-13.66655445098877],["▁frække",-13.666555404663086],["▁aparato",-13.666559219360352],["จะทํา",-13.666576385498049],["▁Shën",-13.666579246520996],["২৪",-13.666583061218262],["▁زلزله",-13.666586875915527],["ფან",-13.666593551635742],["▁හිනා",-13.666598320007324],["▁దాడి",-13.66659927368164],["▁genauso",-13.666604042053224],["▁کولای",-13.666610717773438],["ətli",-13.666622161865234],["▁sady",-13.666626930236816],["▁блокира",-13.666630744934082],["▁образовање",-13.666638374328612],["▁속에서",-13.666640281677246],["▁Falle",-13.666641235351562],["▁Vicente",-13.666659355163574],["▁ପ୍ରା",-13.666666030883787],["▁Pey",-13.666668891906738],["▁heft",-13.666675567626951],["▁betroffen",-13.666686058044434],["andishi",-13.666687965393066],["▁عباسی",-13.666691780090332],["▁dán",-13.666702270507812],["▁Денес",-13.66671657562256],["خواست",-13.666742324829102],["姓名",-13.66675090789795],["▁súťaže",-13.666760444641112],["чава",-13.666762351989746],["▁قضا",-13.666767120361328],["iPad",-13.666768074035645],["▁پئ",-13.66676902770996],["▁presentato",-13.666793823242188],["▁Baltic",-13.666796684265137],["/11/2018",-13.666813850402832],["▁mistura",-13.666817665100098],["しているので",-13.666826248168944],["registrer",-13.666842460632324],["▁NAK",-13.666851043701172],["fül",-13.66685962677002],["▁कमा",-13.66685962677002],["ètica",-13.66687297821045],["्नुपर्ने",-13.66692352294922],["ার্থ",-13.666926383972168],["定制",-13.666940689086914],["▁fă",-13.666942596435549],["▁мектептер",-13.66696548461914],["▁Луцьк",-13.666982650756836],["ുന്നതും",-13.666983604431152],["▁দেখতে",-13.66700267791748],["▁ສິ່ງ",-13.66702651977539],["களிடம்",-13.667028427124023],["▁නිම",-13.667034149169922],["▁accor",-13.667088508605955],["▁hande",-13.667106628417969],["wności",-13.66712474822998],["▁tietoja",-13.667140007019045],["▁축구",-13.66715145111084],["▁választott",-13.667166709899902],["స్సు",-13.667197227478027],["きている",-13.667198181152344],["▁부모",-13.667201042175291],["ຢູ",-13.66720485687256],["▁ເບິ່ງ",-13.66726016998291],["täydell",-13.667263984680176],["▁سکے",-13.66727066040039],["▁diligent",-13.667271614074709],["▁provis",-13.66728973388672],["▁ಕಷ್ಟ",-13.667305946350098],["הקמת",-13.667309761047363],["とり",-13.66731071472168],["▁interpretar",-13.667325973510742],["▁vulgar",-13.66734504699707],["ในวันที่",-13.667390823364258],["yhteys",-13.667397499084473],["illeadh",-13.667399406433104],["▁Ղ",-13.667423248291016],["▁höger",-13.667431831359863],["ଟୁ",-13.667445182800291],["▁сајту",-13.66744613647461],["▁ಸಂಘ",-13.667449951171877],["നാട്ട",-13.667486190795898],["否定",-13.667491912841797],["annan",-13.667492866516112],["дент",-13.667513847351074],["јић",-13.667513847351074],["മെന്റ",-13.667548179626465],["▁látogat",-13.667558670043944],["▁perusta",-13.667593955993652],["сё",-13.667628288269045],["izou",-13.667641639709473],["▁určený",-13.667659759521484],["▁tangannya",-13.66767406463623],["▁alakít",-13.667695045471191],["▁lekko",-13.667701721191406],["▁Hành",-13.667712211608888],["▁dôvod",-13.667716979980469],["▁ашу",-13.667719841003418],["▁flyttet",-13.66773509979248],["効果が",-13.66774082183838],["▁पैदा",-13.667746543884276],["▁келесі",-13.667750358581545],["▁restaurang",-13.66777801513672],["▁ที่พัก",-13.6677827835083],["▁litų",-13.667789459228516],["45)",-13.667800903320312],["лаад",-13.667802810668944],["klās",-13.667807579040527],["өдө",-13.667807579040527],["ਖੀ",-13.667808532714844],["▁տեղեկաց",-13.66783046722412],["▁للف",-13.667835235595703],["िलो",-13.667840957641602],["ttiği",-13.66786766052246],["zakon",-13.667869567871094],["▁shock",-13.667869567871094],["ाट",-13.667875289916992],["ZIN",-13.667896270751951],["▁pracownik",-13.667920112609863],["▁frukost",-13.667924880981444],["▁Boko",-13.667935371398926],["▁møteplass",-13.667939186096191],["92)",-13.667945861816406],["FIN",-13.667953491210938],["▁tânăr",-13.667954444885254],["ഞ്ഞ്",-13.66795539855957],["zand",-13.667960166931152],["▁ಬಲ",-13.667963027954102],["▁stawia",-13.667986869812012],["▁mørk",-13.668008804321287],["യൊരു",-13.66802978515625],["▁웃",-13.66803741455078],["ේද",-13.66805648803711],["▁Philippines",-13.668073654174805],["▁яах",-13.668082237243652],["▁menings",-13.66808795928955],["తొ",-13.668134689331056],["stici",-13.668163299560549],["杨欢",-13.66819190979004],["瞬间",-13.668193817138672],["婷",-13.66820240020752],["▁idadi",-13.668207168579102],["引领",-13.66822910308838],["тігін",-13.668235778808594],["☞",-13.66823673248291],["ปืน",-13.668237686157228],["ออกกําลังกาย",-13.668237686157228],["▁ajută",-13.668237686157228],["▁रुग्ण",-13.668238639831545],["▁According",-13.66823959350586],["▁sperimenta",-13.66823959350586],["▁εναντίον",-13.66823959350586],["▁επιχείρηση",-13.66823959350586],["▁означає",-13.66823959350586],["▁хэзээ",-13.66823959350586],["▁қазақстандық",-13.66823959350586],["▁तनाव",-13.66823959350586],["▁फायदा",-13.66823959350586],["▁ఉద్యమ",-13.66823959350586],["▁ഉദ്യോഗസ്ഥ",-13.66823959350586],["▁သိရသည်။",-13.66823959350586],["▁ថ្លែង",-13.66823959350586],["ჩემპიონ",-13.668240547180176],["▁Panginoon",-13.668240547180176],["▁ഞങ്ങളുടെ",-13.668240547180176],["▁اتخاذ",-13.668241500854492],["▁۲۰۱۸",-13.668241500854492],["▁dodatkowo",-13.668242454528809],["▁marriage",-13.668242454528809],["▁úspech",-13.668243408203123],["▁Igreja",-13.66824436187744],["▁Bonne",-13.668245315551758],["▁Catholic",-13.668245315551758],["▁zadań",-13.668246269226074],["▁कर्ज",-13.668246269226074],["▁επόμενη",-13.668248176574709],["▁állandó",-13.668249130249023],["▁εξουσία",-13.668249130249023],["帳號",-13.66825008392334],["▁peranan",-13.668251991271973],["▁கடைசி",-13.668252944946287],["▁помогает",-13.668253898620604],["▁میتوانید",-13.668257713317873],["▁ఫేస్",-13.668258666992188],["▁ጥናት",-13.668258666992188],["លាន",-13.66826057434082],["ឃាត់ខ្លួន",-13.668262481689451],["ចា",-13.668265342712402],["voeg",-13.66826629638672],["▁IKEA",-13.668268203735352],["▁Нужно",-13.668275833129885],["▁ಶೋ",-13.668280601501465],["▁דאנא",-13.66828155517578],["▁बेटे",-13.66828155517578],["▁रिलीज",-13.66828155517578],["放松",-13.668285369873049],["▁speciaal",-13.668292045593262],["Innen",-13.668292999267578],["▁valdymo",-13.66829776763916],["▁јазици",-13.668298721313477],["เอ็น",-13.668310165405272],["打电话",-13.66832447052002],["ଗୁଡିକ",-13.668336868286133],["▁ಆಗಿದೆ",-13.668336868286133],["▁olduğumuz",-13.66833782196045],["▁innstillinger",-13.668339729309082],["▁Shp",-13.668341636657717],["▁خلاص",-13.668341636657717],["▁итгэл",-13.668344497680664],["▁сучасны",-13.668346405029297],["▁보니",-13.668352127075195],["▁የአንድ",-13.66835880279541],["यन्त",-13.668363571166992],["▁ঘটে",-13.668375968933104],["▁kopš",-13.66838550567627],["▁Chill",-13.668402671813965],["▁įranga",-13.668402671813965],["回报",-13.668416976928713],["▁Informació",-13.668417930603027],["▁التحقيق",-13.668421745300291],["▁българската",-13.66843605041504],["▁Merah",-13.668448448181152],["的情況",-13.66846752166748],["పాటి",-13.668485641479492],["פקיד",-13.66848850250244],["▁sīk",-13.668495178222656],["近く",-13.668497085571287],["ขนม",-13.668512344360352],["▁تبع",-13.668523788452148],["▁मार्",-13.66854763031006],["國民",-13.668553352355955],["▁ग्या",-13.668585777282717],["▁použitie",-13.668604850769045],["▁Ленин",-13.668604850769045],["▁CLA",-13.66861057281494],["staafu",-13.668625831604004],["▁Pika",-13.6686429977417],["▁Bulu",-13.66865348815918],["▁Ð",-13.668673515319824],["▁საუკუნე",-13.668695449829102],["▁informoj",-13.668697357177734],["▁розныя",-13.668709754943848],["पुरा",-13.668713569641112],["▁თეთრი",-13.668745994567873],["มาให้",-13.66875457763672],["కాయ",-13.66876983642578],["▁الحالة",-13.668792724609377],["▁krever",-13.668828964233398],["▁217",-13.668829917907717],["▁міндет",-13.668843269348145],["фарб",-13.668858528137209],["▁negatív",-13.668878555297852],["Sab",-13.6688814163208],["▁드립니다",-13.668893814086914],["BEL",-13.66890811920166],["▁konserva",-13.66892147064209],["▁описа",-13.668922424316406],["Ис",-13.668925285339355],["▁pejy",-13.668943405151367],["▁sirve",-13.6689453125],["fogyasztás",-13.668972969055176],["dhana",-13.668974876403809],["▁lomba",-13.668986320495604],["حدود",-13.668994903564451],["▁susjed",-13.669017791748049],["▁ylös",-13.66904354095459],["инт",-13.66905117034912],["▁Kiek",-13.669058799743652],["bevægelse",-13.669074058532717],["ရှု",-13.66908359527588],["▁бәрі",-13.66909885406494],["▁냉",-13.669100761413574],["▁Arabi",-13.669118881225586],["games",-13.6691255569458],["受理",-13.66913890838623],["वाणी",-13.669149398803713],["6,5",-13.669158935546877],["▁(39)",-13.669158935546877],["▁դեր",-13.66916561126709],["▁šak",-13.669174194335938],["▁fresca",-13.669175148010254],["ጠቅላይ",-13.669198989868164],["▁bloggar",-13.66920280456543],["▁припи",-13.669219017028809],["дайте",-13.669221878051758],["▁kukaan",-13.669243812561035],["cheza",-13.6692476272583],["对他",-13.669264793395996],["شین",-13.669270515441896],["劇情",-13.66927433013916],["kleur",-13.669276237487791],["▁røv",-13.669278144836426],["▁előbb",-13.669296264648438],["▁Olga",-13.669299125671388],["3.0;",-13.669315338134766],["121",-13.669321060180664],["آور",-13.669336318969728],["FW",-13.669337272644045],["▁Karimov",-13.669340133666992],["▁Kredi",-13.669346809387209],["▁займ",-13.669366836547852],["חמישי",-13.669367790222168],["▁kumuha",-13.669374465942385],["▁kterých",-13.66938304901123],["▁1848",-13.669389724731444],["▁વાતો",-13.669398307800291],["альными",-13.669405937194824],["▁açma",-13.669411659240724],["常见的",-13.669416427612305],["▁бош",-13.669441223144531],["▁ತಂದೆ",-13.669443130493164],["ټل",-13.66945457458496],["ត្តិ",-13.669486045837402],["แอน",-13.669493675231934],["▁casse",-13.66950225830078],["herbergi",-13.669510841369627],["▁fraude",-13.669510841369627],["▁Dentro",-13.66952133178711],["prosessi",-13.6695556640625],["▁Zee",-13.66956615447998],["▁የቅ",-13.66957187652588],["▁meghatároz",-13.669572830200195],["ನರ್",-13.669575691223145],["iskajā",-13.669580459594728],["▁GEL",-13.66958236694336],["קיימת",-13.669590950012209],["mişəm",-13.669594764709473],["நாட்ட",-13.669604301452637],["▁پری",-13.669610023498535],["▁Vlast",-13.669614791870115],["ickú",-13.6696195602417],["жана",-13.669620513916016],["ເມື່ອ",-13.669629096984863],["▁paura",-13.669633865356444],["なか",-13.669669151306152],["▁zwart",-13.66971206665039],["▁Bosna",-13.66976547241211],["ന്മാര്",-13.66976833343506],["ഗം",-13.669770240783691],["を行って",-13.66977310180664],["ሰነ",-13.669788360595703],["قترح",-13.669801712036133],["yıb",-13.669815063476562],["▁advance",-13.669822692871094],["ΑΚ",-13.66985034942627],["▁upraw",-13.66985034942627],["০৩",-13.669864654541016],["zinha",-13.669866561889648],["тувати",-13.669888496398926],["gebruik",-13.669891357421877],["سفر",-13.669901847839355],["јев",-13.669904708862305],["ESH",-13.669916152954102],["▁vyras",-13.669922828674316],["▁Pais",-13.669927597045898],["した場合",-13.669963836669922],["rún",-13.669987678527832],["vajadus",-13.669995307922363],["▁գնում",-13.669995307922363],["▁Europeo",-13.669998168945312],["თითებ",-13.670022010803224],["thé",-13.670023918151855],["▁पड़ा",-13.670032501220703],["eeuw",-13.670036315917969],["▁செல்வ",-13.6700439453125],["▁मेला",-13.670051574707031],["▁बम",-13.670059204101562],["ಸ್ಟ",-13.67006778717041],["▁Ramadan",-13.67007541656494],["▁Министар",-13.670086860656738],["edług",-13.670087814331056],["▁bessere",-13.6701078414917],["طی",-13.670114517211914],["▁добър",-13.67011547088623],["▁предаде",-13.670120239257812],["▁आँ",-13.670160293579102],["ህና",-13.670161247253418],["▁Aç",-13.670162200927734],["▁saioa",-13.670180320739746],["CW",-13.670272827148438],["▁аткаруу",-13.67029094696045],["ของเธอ",-13.670313835144045],["ួរ",-13.670345306396484],["ተቃዋሚ",-13.670352935791016],["пункт",-13.67036247253418],["▁വർഷ",-13.670366287231444],["謀",-13.670388221740724],["▁naredn",-13.67038917541504],["腫",-13.67038917541504],["晃",-13.670405387878418],["槽",-13.670408248901367],["钻",-13.670412063598633],["▁можам",-13.670419692993164],["有关部门",-13.67043113708496],["攻略",-13.67043399810791],["▁општини",-13.670437812805176],["រាជធានី",-13.670440673828123],["Магілёў",-13.67044162750244],["បញ្ជូន",-13.67044162750244],["▁Hưng",-13.67044162750244],["▁Mwalimu",-13.67044162750244],["▁tersendiri",-13.67044162750244],["▁venerdì",-13.67044162750244],["▁webbläsare",-13.67044162750244],["▁гісторыю",-13.67044162750244],["▁друзей",-13.67044162750244],["▁співробітник",-13.67044162750244],["▁علائقي",-13.67044162750244],["▁స్పెషల్",-13.67044162750244],["▁Catherine",-13.670442581176758],["▁Ciidanka",-13.670442581176758],["▁səlahiyyət",-13.670442581176758],["▁მოსახლეობა",-13.670442581176758],["▁министрлиги",-13.670443534851074],["▁الهاتف",-13.670443534851074],["▁Kedves",-13.67044448852539],["▁نیچے",-13.67044448852539],["သို့မဟုတ်",-13.670445442199709],["▁шинжлэх",-13.670445442199709],["▁ବିଶେଷ",-13.670445442199709],["▁දෙදෙනා",-13.670445442199709],["シュ",-13.670445442199709],["▁listopadu",-13.670448303222656],["▁επιχειρήσεις",-13.670449256896973],["▁mantenimiento",-13.670450210571287],["▁recibido",-13.670454025268556],["▁Mumbai",-13.670456886291504],["▁കേള്",-13.670458793640137],["▁Suure",-13.67046070098877],["▁alınıb",-13.670475006103516],["▁Dadka",-13.670487403869627],["▁situata",-13.670488357543944],["IỆN",-13.670489311218262],["▁фундамент",-13.67049503326416],["▁montagne",-13.670495986938477],["▁العلماء",-13.670498847961426],["шыға",-13.670501708984377],["▁помню",-13.670502662658691],["▁Еўра",-13.670504570007324],["▁לומר",-13.67050838470459],["▁sitting",-13.67051124572754],["نديون",-13.670512199401855],["▁Яр",-13.670513153076172],["▁kenaikan",-13.670536041259766],["ksilla",-13.670539855957031],["▁Левски",-13.670544624328612],["▁analizar",-13.670546531677246],["▁perfekta",-13.670568466186523],["мело",-13.670570373535156],["тыкі",-13.670571327209473],["▁територията",-13.670573234558104],["ფიცი",-13.67059326171875],["▁ISLAM",-13.670597076416016],["▁પતિ",-13.670599937438965],["▁měsíců",-13.670607566833496],["storm",-13.670609474182127],["▁הטיפול",-13.670609474182127],["▁കാര",-13.670621871948242],["▁الجميل",-13.670625686645508],["▁वाचा",-13.670635223388672],["▁níl",-13.670639038085938],["▁زود",-13.670639991760254],["פיות",-13.670644760131836],["회를",-13.670659065246582],["empêche",-13.670665740966797],["▁betingelser",-13.670683860778809],["▁periferi",-13.67070770263672],["ÇE",-13.67072296142578],["τηρίου",-13.670724868774414],["forsvar",-13.670737266540527],["▁потребна",-13.670756340026855],["ബ്രാ",-13.670759201049805],["▁Bilboko",-13.670787811279297],["▁szállás",-13.67079734802246],["▁manakala",-13.670801162719728],["ಸದ",-13.670828819274902],["▁hundre",-13.670833587646484],["ක්ද",-13.670843124389648],["▁लगाने",-13.670853614807127],["▁policie",-13.670867919921877],["▁wprowadza",-13.670872688293455],["کام",-13.67087459564209],["7.0",-13.670876502990724],["▁Geno",-13.670892715454102],["▁Serveis",-13.670924186706545],["▁мастак",-13.67092514038086],["▁విజయం",-13.670930862426758],[".’’",-13.670933723449709],["▁MHz",-13.670934677124023],["נין",-13.670942306518556],["АВА",-13.670947074890137],["▁talált",-13.670950889587402],["▁mennyire",-13.670964241027832],["ارے",-13.670968055725098],["▁daquele",-13.67096996307373],["的活動",-13.670977592468262],["▁огр",-13.670979499816896],["▁obeh",-13.6710205078125],["źnie",-13.671022415161133],["ขับรถ",-13.671038627624512],["▁హె",-13.671049118041992],["ሕዝቡ",-13.671050071716309],["▁optar",-13.67105770111084],["▁სამინისტრო",-13.671058654785156],["räum",-13.671093940734863],["ляюць",-13.671100616455078],["▁-->",-13.671110153198242],["▁начинает",-13.671142578125],["▁лоши",-13.67115592956543],["ጹ",-13.67117404937744],["▁cartes",-13.671199798583984],["ေကာ",-13.671208381652832],["לוק",-13.671236991882324],["各方",-13.67123794555664],["वू",-13.671238899230955],["▁умения",-13.671239852905272],["▁полной",-13.671242713928224],["grind",-13.671244621276855],["▁itsas",-13.671247482299805],["▁sekva",-13.671269416809082],["の世界",-13.671270370483398],["▁екстре",-13.671277046203612],["▁もちろん",-13.671278953552246],["▁tudung",-13.671283721923828],["▁iniziato",-13.671306610107422],["▁differ",-13.671310424804688],["حية",-13.671343803405762],["▁iyada",-13.671343803405762],["क्यू",-13.67136287689209],["▁evidencia",-13.671368598937988],["▁ہار",-13.67138385772705],["Big",-13.671385765075684],["تعيين",-13.671393394470217],["사랑",-13.671394348144531],["▁cultivo",-13.671396255493164],["十字",-13.671404838562012],["▁이르",-13.671411514282228],["pesu",-13.671414375305176],["szolgálat",-13.671414375305176],["३९",-13.67142391204834],["▁jendea",-13.671451568603516],["▁දවස්",-13.671453475952148],["▁Argi",-13.671454429626465],["▁තරම",-13.671462059020996],["▁giet",-13.671468734741213],["相手の",-13.671469688415527],["不见",-13.67148780822754],["一直是",-13.67149829864502],["పుడు",-13.67152500152588],["▁дијалог",-13.67152500152588],["▁nyeri",-13.671533584594728],["▁இந்து",-13.671533584594728],["笑容",-13.671536445617676],["ಗೌಡ",-13.671549797058104],["▁Rig",-13.671573638916016],["▁τρόπος",-13.671578407287598],["▁پلو",-13.671587944030762],["▁profundo",-13.671592712402344],["▁poznati",-13.67159938812256],["▁odvija",-13.671601295471191],["టెక్",-13.671602249145508],["▁гриб",-13.671615600585938],["yrityksi",-13.671626091003418],["▁Lyd",-13.671640396118164],["▁Laki",-13.671656608581545],["▁Hüseynov",-13.671661376953123],["▁հե",-13.671673774719238],["EEE",-13.671714782714844],["▁palk",-13.671730041503906],["356",-13.671744346618652],["öð",-13.67176342010498],["дира",-13.671788215637209],["ляк",-13.671813011169434],["▁дайын",-13.671829223632812],["▁lahku",-13.671831130981444],["haupt",-13.671857833862305],["▁വെച്ച",-13.67186164855957],["▁fortë",-13.671871185302734],["sufficient",-13.67188835144043],["▁patogu",-13.671896934509276],["▁කොර",-13.671940803527832],["▁piros",-13.671942710876465],["ULE",-13.671957969665527],["▁인도",-13.67196273803711],["▁süreç",-13.671971321105955],["▁Zugang",-13.672006607055664],["ตู",-13.672012329101562],["▁تجاه",-13.672012329101562],["▁سکول",-13.672014236450195],["nostjo",-13.672026634216309],["▁særlige",-13.67204761505127],["▁вопроса",-13.672054290771484],["▁hoff",-13.67207145690918],["지에",-13.672086715698242],["▁MHP",-13.672118186950684],["транс",-13.67215061187744],["หนาว",-13.67217254638672],["▁výkonu",-13.672178268432615],["▁ಇನ್ನ",-13.672183990478516],["▁গেল",-13.672184944152832],["▁संस्कृत",-13.67219066619873],["ėta",-13.67219352722168],["▁naslednji",-13.672194480895996],["लाइट",-13.672211647033691],["käynti",-13.672215461730955],["▁Acho",-13.672234535217283],["▁wachten",-13.67223834991455],["titel",-13.6722412109375],["тримати",-13.672273635864258],["▁લિ",-13.672286033630373],["cykel",-13.672296524047852],["▁Groen",-13.672300338745115],["▁исследова",-13.672307014465332],["tarin",-13.672308921813965],["▁ragu",-13.672322273254396],["ញ្ញ",-13.672332763671877],["▁زخم",-13.672370910644531],["好奇",-13.672380447387695],["▁võimaluse",-13.672386169433594],["▁देह",-13.672402381896973],["▁სააგენტო",-13.67241096496582],["gelassen",-13.672417640686035],["▁полиција",-13.672419548034668],["▁экологическ",-13.67245388031006],["▁Godt",-13.672487258911133],["▁227",-13.67249870300293],["▁LINE",-13.67250633239746],["听说",-13.672552108764648],["▁статистик",-13.672561645507812],["▁aliud",-13.67257308959961],["▁mjek",-13.67257595062256],["▁খেল",-13.672579765319824],["袭",-13.672589302062988],["柴",-13.67259120941162],["譽",-13.672598838806152],["途径",-13.672607421875],["约翰",-13.672611236572266],["实验室",-13.672614097595217],["挨",-13.672627449035645],["▁балл",-13.672630310058594],["卑",-13.672630310058594],["▁хэлэлц",-13.672635078430176],["▁вечера",-13.672643661499023],["российско",-13.672646522521973],["ฤดูกาล",-13.672647476196287],["มวล",-13.672648429870604],["လုပ္ငန္း",-13.672649383544922],["▁alespoň",-13.672649383544922],["▁ausgestattet",-13.672649383544922],["▁deepthroat",-13.672649383544922],["▁sodelovanje",-13.672649383544922],["▁акыркы",-13.672649383544922],["▁осигури",-13.672649383544922],["▁пользователя",-13.672649383544922],["▁ریڈیو",-13.672649383544922],["▁عبارة",-13.672649383544922],["▁নিরাপত্তা",-13.672649383544922],["▁விவசாய",-13.672649383544922],["▁మళ్ళీ",-13.672649383544922],["▁ಕಚೇರಿ",-13.672649383544922],["▁განსაკუთრებით",-13.672649383544922],["▁ትርጉም",-13.672649383544922],["简介",-13.672649383544922],["▁Keuangan",-13.672650337219238],["▁poskytovan",-13.672650337219238],["▁موزیک",-13.672650337219238],["▁نیشنل",-13.672650337219238],["▁bynnag",-13.672651290893556],["▁Όμως",-13.672651290893556],["▁присвячен",-13.672651290893556],["▁कट्टर",-13.672652244567873],["▁ಇನ್ನಷ್ಟು",-13.672652244567873],["▁заболевания",-13.672653198242188],["▁sipariş",-13.672654151916504],["▁допринос",-13.672654151916504],["έτ",-13.67265510559082],["▁Which",-13.67265510559082],["▁ரூபாய்",-13.67265510559082],["▁පෙරමුණ",-13.67265510559082],["▁інвестицій",-13.672657012939451],["▁азыркы",-13.67265796661377],["▁스마트",-13.67265796661377],["▁خواجہ",-13.672662734985352],["▁동시에",-13.672663688659668],["▁કવિતા",-13.672667503356934],["▁தமிழில்",-13.672670364379885],["▁partecipanti",-13.672672271728516],["▁இதற்கு",-13.672672271728516],["▁juice",-13.672674179077148],["cházejí",-13.672675132751465],["▁სრულიად",-13.67267608642578],["nictwo",-13.672677040100098],["▁රටවල්",-13.672679901123049],["▁উপায়",-13.672682762145996],["ҒЫ",-13.672683715820312],["▁mengangkat",-13.672683715820312],["เที่ยวบิน",-13.672688484191896],["▁அவர்களின்",-13.672689437866213],["▁שאנחנו",-13.672691345214844],["▁sportovní",-13.672693252563477],["▁случилось",-13.672698974609377],["하십시오",-13.672699928283691],["▁Tiếp",-13.672703742980955],["▁դատական",-13.672707557678224],["▁onderdeel",-13.672714233398438],["▁zvierat",-13.672719955444336],["ដាច់",-13.672721862792969],["ผี",-13.672725677490234],["▁உரை",-13.672725677490234],["▁речиси",-13.67272663116455],["▁svjetlo",-13.672727584838867],["قنوات",-13.672728538513184],["کنی",-13.672731399536133],["▁следећи",-13.672735214233398],["▁ابلاغ",-13.67275047302246],["▁rusak",-13.672754287719728],["▁Особенно",-13.672755241394045],["▁அழி",-13.672757148742676],["iwas",-13.672758102416992],["▁კავშირი",-13.672760009765623],["мість",-13.672765731811523],["▁වේවා",-13.67276668548584],["കളാണ്",-13.67278289794922],["מסחר",-13.672825813293455],["▁lubię",-13.672828674316406],["▁proizvodnje",-13.672829627990724],["的最大",-13.672834396362305],["▁საბჭოს",-13.67283821105957],["▁இல்",-13.672840118408203],["▁utifrån",-13.672845840454102],["▁državne",-13.672859191894531],["▁bbw",-13.672860145568848],["▁olduğundan",-13.67287254333496],["▁vegetar",-13.67287254333496],["ພັດ",-13.672882080078123],["పిల్ల",-13.672889709472656],["ထု",-13.672889709472656],["▁شرقی",-13.672890663146973],["សួរ",-13.672891616821287],["යාට",-13.672896385192873],["▁Nước",-13.672911643981934],["metal",-13.6729154586792],["MW",-13.67292594909668],["▁아동",-13.672927856445312],["▁svētki",-13.672929763793944],["▁punën",-13.672935485839844],["▁adicionais",-13.67294216156006],["▁فكرة",-13.672951698303224],["▁понимать",-13.672962188720703],["▁آنکه",-13.67299461364746],["▁oraş",-13.673030853271484],["▁tajemnic",-13.67303466796875],["▁לינק",-13.67303466796875],["Кол",-13.673044204711914],["წოდება",-13.673067092895508],["▁landsins",-13.673073768615724],["▁պետության",-13.673087120056152],["複数の",-13.67310905456543],["▁vitória",-13.673113822937012],["▁εύ",-13.673133850097656],["VIT",-13.673140525817873],["▁gho",-13.67314624786377],["▁zyrtare",-13.673151016235352],["するため",-13.673152923583984],["▁Haw",-13.6731538772583],["▁ಹಾಗ",-13.67318630218506],["▁breytingar",-13.673202514648438],["▁dicks",-13.673203468322754],["▁אונטער",-13.67320442199707],["005",-13.67323112487793],["żał",-13.673232078552246],["▁роля",-13.673233032226562],["ábel",-13.67324161529541],["вагі",-13.673254013061523],["▁sơn",-13.673262596130373],["▁Лог",-13.673266410827637],["øse",-13.6732816696167],["▁čerp",-13.67329216003418],["ংক",-13.67330551147461],["▁Đồ",-13.67332363128662],["kkeiden",-13.67334270477295],["TAJ",-13.673343658447266],["▁магистр",-13.673344612121582],["▁sölu",-13.673348426818848],["ිල්ල",-13.673361778259276],["individu",-13.673367500305176],["느",-13.673409461975098],["▁Ifølg",-13.67341136932373],["▁Lạc",-13.67344093322754],["▁graviditet",-13.67344093322754],["▁ELS",-13.67344856262207],["▁អតីត",-13.673449516296388],["▁අති",-13.673453330993652],["借金",-13.673469543457031],["International",-13.673474311828612],["գնա",-13.673495292663574],["▁fisica",-13.673502922058104],["્તિ",-13.67350959777832],["▁verbind",-13.67352294921875],["ဖြစ်ပါတယ်။",-13.673529624938965],["याँ",-13.673544883728027],["quera",-13.673548698425291],["تعل",-13.673562049865724],["▁domicilio",-13.673569679260254],["ាន់",-13.673572540283203],["▁Københavns",-13.673579216003418],["▁iştirakı",-13.673590660095217],["▁prato",-13.673590660095217],["▁знакомы",-13.673640251159668],["▁ກໍາລັງ",-13.67365550994873],["▁ਚਲ",-13.673662185668944],["ňujú",-13.673672676086426],["រស់នៅ",-13.673726081848145],["यत",-13.673742294311523],["▁ачуу",-13.673752784729004],["たちが",-13.67375659942627],["▁행위",-13.67378044128418],["qvist",-13.673784255981444],["വം",-13.673805236816406],["▁Quali",-13.67383098602295],["ැති",-13.673846244812012],["▁Сын",-13.673863410949709],["ologica",-13.673870086669922],["liğinde",-13.673872947692873],["▁koselig",-13.67387866973877],["▁लॉ",-13.673891067504885],["▁നിന്റെ",-13.673906326293944],["▁ຈໍານວນ",-13.673907279968262],["▁คอนโด",-13.67391586303711],["▁оставил",-13.673919677734377],["▁ottima",-13.673931121826172],["车型",-13.673932075500488],["▁आती",-13.673933029174805],["▁kurias",-13.673938751220703],["▁spiser",-13.673943519592283],["▁раждане",-13.673951148986816],["▁Rene",-13.673959732055664],["▁команды",-13.673961639404297],["▁çand",-13.673998832702637],["▁מאות",-13.67400074005127],["▁pracovné",-13.67404079437256],["▁вряд",-13.674043655395508],["▁плаж",-13.67405605316162],["▁äly",-13.674060821533203],["сине",-13.67406177520752],["▁სახლ",-13.674071311950684],["ೇರ",-13.67409896850586],["▁хоч",-13.674113273620604],["▁console",-13.674182891845703],["▁cercano",-13.674201011657717],["172",-13.674220085144045],["дердин",-13.674222946166992],["ganga",-13.674236297607422],["▁чија",-13.67428970336914],["дзяржаў",-13.67430591583252],["▁ክርስቲያ",-13.674308776855469],["Rİ",-13.674315452575684],["▁Kuul",-13.67432689666748],["▁uvod",-13.67434787750244],["▁မွတ္",-13.6743745803833],["+4",-13.674381256103516],["▁انتظامی",-13.674396514892578],["▁الاقتصاد",-13.674415588378906],["idean",-13.67442798614502],["tulis",-13.674442291259766],["đeni",-13.674442291259766],["เชื่อว่า",-13.674442291259766],["▁respectiva",-13.67444896697998],["▁Beer",-13.674466133117676],["▁ngayo",-13.674468040466309],["öllä",-13.67446994781494],["סות",-13.674480438232422],["šū",-13.674483299255373],["▁Schiff",-13.674504280090332],["ගත්ත",-13.67451286315918],["ΔΑ",-13.67452907562256],["▁nasza",-13.674531936645508],["prova",-13.674546241760254],["▁visão",-13.674555778503418],["tasunaren",-13.674561500549316],["不是很",-13.674565315246582],["▁Ingin",-13.674617767333984],["▁najem",-13.674633026123049],["्नुहोस",-13.674635887145996],["heshin",-13.674638748168944],["▁Informācija",-13.674639701843262],["ступить",-13.674654006958008],["▁kauba",-13.674663543701172],["توم",-13.674667358398438],["▁vallen",-13.674681663513184],["ښه",-13.674687385559082],["集合",-13.674690246582031],["▁kubo",-13.674691200256348],["▁பீ",-13.67470932006836],["LIV",-13.67471981048584],["有一定的",-13.674720764160156],["▁multo",-13.674721717834473],["▁construcció",-13.674724578857422],["веда",-13.674737930297852],["▁घुस",-13.674739837646484],["▁ruske",-13.674757957458496],["wissenschaft",-13.67476749420166],["▁feia",-13.674771308898926],["喷",-13.674814224243164],["▁critical",-13.67482566833496],["▁fortelle",-13.674837112426758],["訪れ",-13.67484188079834],["ทดลอง",-13.674860000610352],["พัทยา",-13.674861907958984],["▁begeistert",-13.674861907958984],["▁fenómeno",-13.674861907958984],["▁giấc",-13.674861907958984],["▁kľúč",-13.674861907958984],["▁trầm",-13.674861907958984],["▁ανακοίνωση",-13.674861907958984],["▁διεύθυνση",-13.674861907958984],["▁Димитър",-13.674861907958984],["▁рұқсат",-13.674861907958984],["▁مظلوم",-13.674861907958984],["▁ট্রাম্প",-13.674861907958984],["▁ਬੱਚਿਆਂ",-13.674861907958984],["▁ଆହତ",-13.674861907958984],["▁షూటింగ్",-13.674861907958984],["▁დასავლეთ",-13.674861907958984],["▁zpracování",-13.6748628616333],["▁বুঝ",-13.6748628616333],["▁פרויקט",-13.674864768981934],["エリア",-13.674864768981934],["▁detaylı",-13.674866676330566],["▁почиње",-13.674866676330566],["▁վերադարձ",-13.674867630004885],["聲音",-13.674867630004885],["▁гезит",-13.674869537353516],["▁atpūta",-13.674872398376465],["▁возникает",-13.674872398376465],["ကွက်",-13.674874305725098],["▁ਜਿੱਤ",-13.674874305725098],["を実施",-13.674878120422363],["▁araşdırma",-13.67487907409668],["▁לציון",-13.67487907409668],["▁Menarik",-13.674880981445312],["▁mnohem",-13.674885749816896],["專門",-13.674890518188477],["▁persoană",-13.674891471862791],["▁ነበሩ",-13.674893379211426],["▁версии",-13.67489528656006],["▁නිතර",-13.674897193908691],["公尺",-13.674901008605955],["raid",-13.674903869628906],["▁jarraitzen",-13.674904823303224],["▁torr",-13.674904823303224],["▁pagină",-13.674908638000488],["▁तिमी",-13.674909591674805],["चन्द्र",-13.674922943115234],["öyü",-13.67492389678955],["▁നാലു",-13.674927711486816],["▁ተማሪዎች",-13.674927711486816],["▁кінця",-13.674931526184082],["▁opptatt",-13.674935340881348],["▁yetiştir",-13.674942016601562],["▁vhodná",-13.674944877624512],["▁פארק",-13.674962997436523],["版权",-13.674962997436523],["▁schauen",-13.67498016357422],["язків",-13.674981117248535],["▁citizen",-13.674983978271484],["▁îţi",-13.675004959106444],["▁Heer",-13.675010681152344],["▁circles",-13.675016403198242],["▁இன",-13.675016403198242],["હું",-13.675020217895508],["▁Answer",-13.675026893615724],["▁helbest",-13.675028800964355],["▁Nexus",-13.675033569335938],["▁өңдеу",-13.675041198730469],["▁будучы",-13.675043106079102],["ტეხ",-13.675044059753418],["▁lektor",-13.675050735473633],["▁большую",-13.675055503845217],["▁jaminan",-13.675058364868164],["▁بالر",-13.675058364868164],["▁chętnie",-13.675067901611328],["▁művel",-13.67507553100586],["▁ফেল",-13.675081253051758],["▁бодит",-13.675082206726074],["▁राख्ने",-13.675090789794922],["▁аварга",-13.675091743469238],["▁abuso",-13.67509651184082],["▁глобальн",-13.675106048583984],["▁casca",-13.675122261047363],["▁Павлов",-13.675140380859377],["これからも",-13.675148010253906],["▁الرسمية",-13.675152778625488],["▁uključi",-13.6751708984375],["並沒有",-13.675182342529297],["▁Систем",-13.675196647644045],["ချစ်",-13.675203323364258],["▁fylgi",-13.675216674804688],["Roma",-13.675220489501951],["високи",-13.67522144317627],["▁Watt",-13.675226211547852],["▁Drop",-13.6752290725708],["եին",-13.6752347946167],["▁обед",-13.675235748291016],["▁හදන්න",-13.675243377685549],["▁senden",-13.675260543823242],["ીત",-13.675265312194824],["îyan",-13.675267219543455],["▁ndodhi",-13.67527961730957],["▁Sektor",-13.675292015075684],["▁competition",-13.675293922424316],["▁igualmente",-13.675310134887695],["ДК",-13.675311088562012],["▁OBS",-13.675311088562012],["▁சர்",-13.675320625305176],["▁dotyczą",-13.675321578979492],["▁Март",-13.675321578979492],["bajnokság",-13.675333976745604],["ਿੰਦ",-13.675339698791504],["▁맞는",-13.675344467163086],["▁pripremi",-13.67534637451172],["▁نیستند",-13.675371170043944],["RSA",-13.675373077392578],["▁Iulia",-13.675419807434082],["pall",-13.67543601989746],["▁materialet",-13.675443649291992],["▁კანონ",-13.675454139709473],["න්ස්",-13.675457000732422],["▁Млади",-13.675464630126951],["▁Trail",-13.67547607421875],["▁plz",-13.675514221191406],["فحص",-13.675518035888672],["ទាន",-13.67552089691162],["न्ज",-13.675538063049316],["ಶು",-13.675538063049316],["▁kõigile",-13.67555046081543],["изму",-13.675557136535645],["ілу",-13.67556381225586],["kaisi",-13.675570487976074],["ባህ",-13.675575256347656],["停車",-13.675593376159668],["▁chcel",-13.6755952835083],["▁Pró",-13.675607681274414],["▁ເຄື່ອງ",-13.67560863494873],["▁Migra",-13.67561149597168],["▁தோன்ற",-13.675643920898438],["خیر",-13.675655364990234],["▁Apartments",-13.67565631866455],["▁enfermedades",-13.675658226013184],["▁aldaketa",-13.67566967010498],["▁යුද්ධය",-13.675671577453612],["▁Televi",-13.675676345825195],["▁Sierra",-13.675679206848145],["కాలం",-13.67568016052246],["▁নিতে",-13.675683975219728],["▁پوچھ",-13.675702095031738],["▁куә",-13.675705909729004],["άριο",-13.675725936889648],["ائيء",-13.675726890563965],["Į",-13.67572784423828],["的相簿",-13.675761222839355],["▁पाप",-13.675786018371582],["ฟี",-13.675788879394531],["להלן",-13.67582893371582],["λοκ",-13.67585563659668],["ေပါ",-13.67589282989502],["▁niekada",-13.675894737243652],["▁godinama",-13.67591381072998],["▁уеб",-13.67592716217041],["ložka",-13.67593002319336],["▁જાણવા",-13.67593002319336],["▁hill",-13.675949096679688],["policy",-13.675951957702637],["ឌី",-13.675969123840332],["ೇಗೌಡ",-13.67597484588623],["▁Леп",-13.675996780395508],["▁odpravi",-13.67599868774414],["▁ಸಾಧ್ಯತೆ",-13.676005363464355],["▁Pharma",-13.67601490020752],["▁worst",-13.676019668579102],["▁ಹೂ",-13.676023483276367],["ဒိ",-13.676024436950684],["▁العقار",-13.676051139831545],["skriuw",-13.676063537597656],["▁शाम",-13.67607307434082],["jének",-13.676093101501465],["šali",-13.676095962524414],["▁zmeny",-13.676140785217283],["inājums",-13.676167488098145],["▁هجر",-13.676170349121094],["▁일상",-13.676180839538574],["ドル",-13.676199913024902],["▁भगव",-13.67620086669922],["gîn",-13.676225662231444],["მართლ",-13.67625617980957],["電車",-13.676258087158203],["menes",-13.676270484924316],["▁ਵੇ",-13.676284790039062],["ΜΟ",-13.676289558410645],["пік",-13.676313400268556],["▁välj",-13.676331520080566],["▁медицинско",-13.676339149475098],["បរិ",-13.676340103149414],["дадзены",-13.67637538909912],["ფაქტ",-13.6763916015625],["▁ricevis",-13.676395416259766],["▁없었다",-13.676399230957031],["▁acidente",-13.67640495300293],["점은",-13.67642307281494],["▁монопол",-13.67642879486084],["▁определено",-13.67642879486084],["әс",-13.676478385925291],["1-6",-13.676483154296877],["▁poté",-13.676514625549316],["▁Kürt",-13.676556587219238],["▁mennyi",-13.676556587219238],["âl",-13.676558494567873],["▁alî",-13.676579475402832],["▁kB",-13.676590919494627],["▁शीर्ष",-13.676594734191896],["▁ກວ່າ",-13.676600456237791],["▁européen",-13.676605224609377],["بې",-13.67662525177002],["игры",-13.6766357421875],["ΥΠ",-13.676636695861816],["ızı",-13.676639556884766],["boz",-13.67665195465088],["íska",-13.676652908325195],["ਸ਼ੇ",-13.676652908325195],["▁námskeið",-13.676654815673828],["▁ерте",-13.676671981811523],["ଯାଉ",-13.676691055297852],["▁anticipa",-13.676692962646484],["؟“",-13.67670440673828],["所谓的",-13.676718711853027],["你说",-13.676751136779783],["poana",-13.67676830291748],["०१",-13.67677879333496],["ketta",-13.676788330078123],["щення",-13.676793098449709],["JC",-13.676800727844238],["▁dres",-13.676802635192873],["▁الفرنسي",-13.676812171936035],["▁ويون",-13.676817893981934],["▁dental",-13.676825523376465],["▁çekme",-13.676838874816896],["▁3:1",-13.676841735839844],["▁Temat",-13.676870346069336],["ത്തോളം",-13.676871299743652],["fyd",-13.676880836486816],["ยอม",-13.676888465881348],["▁yatak",-13.676898002624512],["▁नियन्त्रण",-13.67690086364746],["რში",-13.676913261413574],["▁pievienot",-13.676918029785156],["▁преговори",-13.676920890808104],["ואלי",-13.676921844482422],["▁крут",-13.67693328857422],["▁husker",-13.676947593688965],["ዓይ",-13.67695426940918],["▁vojne",-13.676957130432127],["▁cài",-13.676965713500977],["▁imatges",-13.676966667175291],["putus",-13.67696762084961],["▁वजन",-13.676972389221191],["滾",-13.676993370056152],["▁спя",-13.677003860473633],["▁dispozitiv",-13.677005767822266],["運作",-13.67701244354248],["▁කවුරු",-13.677021026611328],["译",-13.677027702331545],["指挥",-13.67703342437744],["▁Byd",-13.677042961120604],["逮捕",-13.677046775817873],["凭借",-13.677047729492188],["පං",-13.677053451538086],["訓",-13.677059173583984],["▁luften",-13.677071571350098],["элсэн",-13.677072525024414],["牛奶",-13.67707633972168],["បញ្ជី",-13.677078247070312],["ኢትዮ",-13.677079200744627],["ពិនិត្យ",-13.677079200744627],["▁Bizkaiko",-13.677079200744627],["▁Québec",-13.677079200744627],["▁pemikiran",-13.677079200744627],["▁udgangspunkt",-13.677079200744627],["▁илјади",-13.677079200744627],["▁помалку",-13.677079200744627],["▁предузећа",-13.677079200744627],["▁регулювання",-13.677079200744627],["▁слободна",-13.677079200744627],["▁экзамен",-13.677079200744627],["▁ծախս",-13.677079200744627],["▁צריכה",-13.677079200744627],["▁स्थायी",-13.677079200744627],["▁ચર્ચા",-13.677079200744627],["▁વિકાસ",-13.677079200744627],["▁నేపథ్యంలో",-13.677079200744627],["▁బాధ్యత",-13.677079200744627],["▁გადაწყვეტილება",-13.677079200744627],["▁მხრიდან",-13.677079200744627],["▁სასტუმრო",-13.677079200744627],["▁съвмест",-13.677080154418944],["▁традиции",-13.677080154418944],["▁behagelig",-13.677081108093262],["▁פתוח",-13.677081108093262],["▁جاوید",-13.677081108093262],["ទស្សនា",-13.677082061767578],["▁olumlu",-13.677082061767578],["▁그래도",-13.677082061767578],["스타그램",-13.677082061767578],["▁ngọt",-13.677083015441896],["ေခါင္း",-13.677083969116213],["▁обставин",-13.677083969116213],["▁prihvati",-13.677084922790527],["▁വിളിച്ച",-13.67708683013916],["▁රමසිංහ",-13.677088737487791],["▁rådgivning",-13.67708969116211],["gusi",-13.677091598510742],["ാനാണ്",-13.677094459533691],["▁տեսնում",-13.677095413208008],["▁mánuði",-13.677096366882324],["▁department",-13.67709732055664],["▁كۆرسىت",-13.677098274230955],["▁орчны",-13.677099227905272],["▁суурь",-13.677102088928224],["▁Verantwortung",-13.677106857299805],["▁ରାଜ୍ୟର",-13.67710781097412],["[17]",-13.677108764648438],["▁müəssisələri",-13.67711067199707],["▁établi",-13.677111625671388],["▁চলছে",-13.677112579345703],["▁pogreš",-13.67712688446045],["▁rýchle",-13.677127838134766],["▁aparılan",-13.677129745483398],["▁аласыз",-13.677138328552246],["▁кезеңі",-13.67715072631836],["росла",-13.677152633666992],["▁уточни",-13.67715835571289],["▁ପୁରୀ",-13.677159309387209],["▁Swift",-13.677193641662598],["▁ինքը",-13.677201271057127],["▁Anul",-13.677205085754396],["▁හොඳයි",-13.677210807800291],["我们也",-13.677212715148926],["รถไฟ",-13.67721939086914],["▁주세요",-13.677231788635254],["دیا",-13.677239418029783],["▁الآخرين",-13.677240371704102],["▁വെളിപ്പെടുത്ത",-13.677252769470217],["▁responsabil",-13.677254676818848],["▁نشود",-13.677258491516112],["bideak",-13.677261352539062],["▁relativamente",-13.677274703979492],["▁जाग",-13.677278518676758],["▁rahe",-13.677288055419922],["給你",-13.677295684814451],["晚餐",-13.677309036254885],["▁curva",-13.677315711975098],["▁shvati",-13.677315711975098],["▁315",-13.677319526672363],["▁Custom",-13.677326202392578],["▁Večer",-13.677326202392578],["▁voluto",-13.677340507507324],["▁Mesmo",-13.677342414855955],["说过",-13.677345275878906],["▁kıs",-13.67735195159912],["▁الحوار",-13.677359580993652],["便捷",-13.6773681640625],["▁razreda",-13.677379608154297],["▁přátel",-13.677388191223145],["▁Цяпер",-13.677396774291992],["▁забыв",-13.677397727966309],["ተጋ",-13.677410125732422],["සයි",-13.677447319030762],["▁liigu",-13.677456855773926],["▁Devil",-13.67745876312256],["▁שונה",-13.677467346191406],["146",-13.67747688293457],["activitats",-13.677485466003418],["ingumo",-13.677490234375],["▁следствие",-13.677499771118164],["▁paseo",-13.67750358581543],["okuthi",-13.677526473999023],["intelligence",-13.677532196044922],["▁сабой",-13.67753791809082],["ೀಸ್",-13.677538871765137],["▁показатели",-13.677557945251465],["▁höga",-13.677571296691896],["▁rakh",-13.677573204040527],["▁nowym",-13.677593231201172],["▁Xəzər",-13.677603721618652],["▁aia",-13.677606582641602],["konsulent",-13.677611351013184],["سائي",-13.677611351013184],["▁Mahi",-13.677617073059082],["▁pokoju",-13.677618026733398],["پرو",-13.677647590637209],["▁karê",-13.677658081054688],["▁سکه",-13.677658081054688],["▁nettverk",-13.677661895751951],["тийн",-13.677672386169434],["▁اکين",-13.67770004272461],["adeed",-13.677711486816406],["दृश",-13.677725791931152],["▁makke",-13.677725791931152],["jelzés",-13.677728652954102],["▁svæði",-13.677729606628418],["র্ক",-13.677734375],["▁custa",-13.67775058746338],["▁našoj",-13.677757263183594],["kusta",-13.67776107788086],["▁opskrift",-13.677778244018556],["▁Đô",-13.67778205871582],["装饰",-13.677810668945312],["Сан",-13.677813529968262],["▁kilimo",-13.67782974243164],["▁resor",-13.677852630615234],["▁občut",-13.677862167358398],["▁servidores",-13.677863121032717],["走在",-13.677867889404297],["▁226",-13.67786979675293],["▁fenomeno",-13.677873611450195],["▁खेती",-13.67788028717041],["▁posledních",-13.677924156188965],["▁zagra",-13.677931785583496],["▁póst",-13.677947998046877],["▁המר",-13.677949905395508],["▁venue",-13.677956581115724],["▁capabil",-13.67796802520752],["മന്ത്രി",-13.677973747253418],["הרג",-13.677997589111328],["必要があります",-13.677998542785645],["▁domhain",-13.67800998687744],["คอนโด",-13.678034782409668],["▁ضمانت",-13.678045272827148],["▁доля",-13.678055763244627],["▁fuego",-13.678062438964844],["▁областа",-13.678064346313477],["דעת",-13.67808723449707],["ჩნდა",-13.678091049194336],["▁biuro",-13.678093910217283],["באר",-13.678139686584473],["▁raia",-13.678160667419434],["RUN",-13.678171157836914],["▁связано",-13.678177833557127],["ոխ",-13.678182601928713],["▁condiții",-13.67821216583252],["▁trgovin",-13.678230285644531],["סכם",-13.67823314666748],["▁gekry",-13.678236961364746],["▁ਲਿ",-13.678237915039062],["▁Uning",-13.678248405456545],["ळून",-13.678284645080566],["▁různé",-13.678288459777832],["Alex",-13.678302764892578],["▁modtager",-13.678305625915527],["▁ਜੋੜ",-13.67831325531006],["▁نازل",-13.678314208984377],["פלסטיני",-13.678325653076172],["▁vurgu",-13.678329467773438],["▁ਸੋਚ",-13.678337097167969],["스가",-13.678339004516602],["ขนส่ง",-13.678364753723145],["ලට",-13.678369522094728],["▁utilizados",-13.678407669067385],["▁aiba",-13.678443908691406],["plau",-13.67845344543457],["▁نفرت",-13.678471565246582],["▁warme",-13.678475379943848],["โดยใช้",-13.678479194641112],["▁prenota",-13.678491592407228],["ىدىكەن",-13.678521156311035],["छु",-13.678533554077148],["Mis",-13.678549766540527],["kiewicz",-13.67855453491211],["▁saadaan",-13.678559303283691],["▁policijsk",-13.678574562072754],["usser",-13.678583145141602],["當年",-13.678600311279297],["▁teel",-13.678603172302246],["وفر",-13.678619384765623],["全程",-13.678631782531738],["vrt",-13.678644180297852],["▁සභාව",-13.678661346435549],["▁Libya",-13.678682327270508],["خري",-13.678691864013672],["yadii",-13.678707122802734],["들에",-13.678709030151367],["ഘട്ട",-13.67873764038086],["▁shpejtë",-13.678754806518556],["▁Stig",-13.67877960205078],["ientes",-13.678788185119627],["ეგ",-13.678792953491213],["EDA",-13.678804397583008],["▁ceci",-13.678810119628906],["ෙමු",-13.678811073303224],["▁muddo",-13.678823471069336],["itățile",-13.67883014678955],["ړی",-13.67887020111084],["ιακών",-13.678884506225586],["▁взима",-13.678915977478027],["röst",-13.678921699523926],["▁distingue",-13.67893123626709],["▁abunda",-13.67894172668457],["▁breytt",-13.678948402404783],["▁virhe",-13.678953170776367],["ujoč",-13.678956031799316],["წვა",-13.67896842956543],["▁attitude",-13.678997993469238],["gęs",-13.678998947143556],["mbë",-13.679017066955566],["▁debt",-13.67902660369873],["▁iznad",-13.679062843322754],["ெல்லாம்",-13.679065704345703],["兔",-13.679073333740234],["▁moottori",-13.67909049987793],["వర్గ",-13.67909336090088],["മാർ",-13.679095268249512],["komitee",-13.679104804992676],["▁messi",-13.679117202758787],["▁ປະທານ",-13.67914581298828],["أشخاص",-13.679174423217772],["▁šioj",-13.679180145263672],["förbundet",-13.679197311401367],["sprochen",-13.67921257019043],["▁годишно",-13.67922019958496],["ciach",-13.679238319396973],["膽",-13.679258346557615],["貧",-13.679265022277832],["▁груб",-13.67928695678711],["▁leiding",-13.679296493530272],["เดี๋ยว",-13.679299354553224],["ធ្លាប់",-13.67930030822754],["ชนะเลิศ",-13.679301261901855],["▁Gheorghe",-13.679301261901855],["▁bermaksud",-13.679301261901855],["▁hurrengo",-13.679301261901855],["▁perjanjian",-13.679301261901855],["▁γονείς",-13.679301261901855],["▁веднъж",-13.679301261901855],["▁новембра",-13.679301261901855],["▁уколико",-13.679301261901855],["▁الجمعية",-13.679301261901855],["▁राजकीय",-13.679301261901855],["▁ବ୍ୟୁରୋ",-13.679301261901855],["▁ವಿಡಿಯೋ",-13.679301261901855],["▁කාර්යාලය",-13.679301261901855],["▁permasalahan",-13.679302215576172],["▁грошы",-13.679302215576172],["ጎንደር",-13.679303169250488],["▁ადგილზე",-13.679303169250488],["▁gairebé",-13.679306030273438],["▁ईश्वर",-13.679306983947754],["▁kancelář",-13.679308891296388],["▁Görüş",-13.679315567016602],["▁похоже",-13.679316520690918],["▁aparelho",-13.679317474365234],["▁виробнич",-13.67931842803955],["低下",-13.679319381713867],["▁തകര്",-13.679320335388184],["▁trasporto",-13.67932415008545],["▁හොයා",-13.67933464050293],["▁වෙමින්",-13.679336547851562],["કરણ",-13.67934513092041],["หุ่น",-13.679347038269045],["▁Baltijos",-13.679350852966309],["一覧",-13.679350852966309],["▁الشريف",-13.679351806640623],["▁benessere",-13.67935848236084],["▁telpā",-13.67935848236084],["▁дзве",-13.679361343383787],["ிருக்கிறார்",-13.679373741149902],["▁sencilla",-13.679373741149902],["▁Representa",-13.679391860961914],["ەڭ",-13.679393768310549],["▁بتاريخ",-13.679394721984863],["▁jossain",-13.679396629333496],["▁Yine",-13.679397583007812],["加州",-13.679399490356444],["▁soluções",-13.679402351379396],["正しい",-13.679402351379396],["▁Uri",-13.67940616607666],["▁президенти",-13.67941665649414],["ตัวเลือก",-13.67942237854004],["指數",-13.679424285888672],["▁başladığı",-13.67944049835205],["▁සැර",-13.67945384979248],["▁Britain",-13.679462432861328],["▁Valla",-13.679471015930176],["▁tendência",-13.67947483062744],["▁TXXX",-13.679512977600098],["▁ühel",-13.679522514343262],["▁ապագա",-13.679522514343262],["১৭",-13.679523468017578],["იცა",-13.679529190063477],["trekking",-13.679535865783691],["▁isključivo",-13.679539680480955],["▁tojás",-13.679539680480955],["▁اصلاحات",-13.679540634155272],["leniyor",-13.679542541503906],["▁Beatles",-13.679555892944336],["ብዙ",-13.679571151733398],["▁razumije",-13.679576873779297],["ٻا",-13.679587364196776],["▁encontramos",-13.679587364196776],["▁අවස්ථා",-13.67958927154541],[">>>",-13.679606437683104],["▁ciep",-13.679628372192385],["▁يذكر",-13.6796293258667],["3-4",-13.679640769958496],["тельное",-13.679642677307127],["▁opprette",-13.67969799041748],["▁adil",-13.679707527160645],["▁הזוג",-13.679709434509276],["▁በጥ",-13.679709434509276],["▁пълна",-13.67971134185791],["▁పుట్ట",-13.67971420288086],["▁ouvi",-13.679753303527832],["▁Executive",-13.67975902557373],["▁допустим",-13.679800987243652],["▁פרשת",-13.679819107055664],["▁😛",-13.679885864257812],["ıyı",-13.679890632629396],["പ്പുറ",-13.679896354675291],["▁주민",-13.67991542816162],["בלע",-13.67995834350586],["กู้",-13.679964065551758],["▁jūras",-13.679975509643556],["▁intérieur",-13.680007934570312],["▁hallitus",-13.680024147033691],["▁ölkəmizdə",-13.680033683776855],["▁ਮਾਂ",-13.680078506469728],["▁(41)",-13.680091857910156],["врши",-13.680094718933104],["▁familja",-13.680096626281738],["▁මටත්",-13.680110931396484],["pressió",-13.68012237548828],["▁fizice",-13.680129051208496],["▁հն",-13.680137634277344],["▁Kategorioj",-13.680145263671877],["مشروع",-13.68014907836914],["वाइ",-13.680158615112305],["男友",-13.680158615112305],["▁സാഹചര്യ",-13.680160522460938],["▁பாஸ்",-13.68017578125],["▁കറ",-13.680194854736328],["▁noveller",-13.680203437805176],["▁სამსახურ",-13.680203437805176],["▁черво",-13.68021297454834],["▁assegura",-13.680227279663086],["ಿಕೊಂಡು",-13.68024730682373],["▁complexo",-13.680264472961426],["▁søde",-13.68028163909912],["▁postoj",-13.680315971374512],["▁хозяйства",-13.680322647094728],["حياة",-13.680347442626951],["beskyttet",-13.68034839630127],["▁canale",-13.68034839630127],["따",-13.680353164672852],["▁emenda",-13.68038845062256],["válasz",-13.68040370941162],["▁starfi",-13.680407524108888],["▁Ghost",-13.680408477783203],["kék",-13.680411338806152],["▁mapu",-13.680421829223633],["하기도",-13.680429458618164],["▁ambition",-13.680462837219238],["ордон",-13.680465698242188],["šlja",-13.680466651916504],["ومن",-13.680505752563477],["padi",-13.68050765991211],["biasa",-13.680520057678224],["ಭಾಗ",-13.680538177490234],["ጀመር",-13.68055248260498],["ായിരുന്ന",-13.68055534362793],["شارة",-13.680557250976562],["რებულ",-13.68057346343994],["ثنا",-13.680591583251951],["▁Lietuv",-13.68061351776123],["▁andel",-13.680633544921877],["Balt",-13.680636405944824],["▁mīl",-13.680638313293455],["▁ম্যা",-13.680644989013672],["▁изграждане",-13.680656433105469],["מינו",-13.680672645568848],["ให้มี",-13.68067455291748],["جديد",-13.680692672729492],["געקומען",-13.68071460723877],["▁početka",-13.6807279586792],["▁Указ",-13.680730819702148],["vragen",-13.680733680725098],["を作り",-13.680747032165527],["▁kožo",-13.68074893951416],["060",-13.680788040161133],["▁ଆଦି",-13.680790901184082],["álom",-13.680791854858398],["▁ឈ",-13.680800437927246],["▁برم",-13.68080711364746],["סקר",-13.68080997467041],["มีคน",-13.680815696716309],["▁legati",-13.680816650390623],["ቻችን",-13.680846214294434],["ಾಡ",-13.68085765838623],["▁Budd",-13.680859565734863],["ทับ",-13.68087387084961],["▁pluraj",-13.68087673187256],["treden",-13.680893898010254],["▁importance",-13.680898666381836],["▁குறித்த",-13.680906295776367],["当代",-13.68091869354248],["▁યોજ",-13.680923461914062],["▁ໄຟ",-13.68092441558838],["▁տեղեկություններ",-13.68094253540039],["▁obliko",-13.680950164794922],["kedve",-13.68097686767578],["നന്",-13.680986404418944],["▁miasto",-13.680996894836426],["▁complicado",-13.680997848510742],["Ñ",-13.68099880218506],["ጠሩ",-13.681011199951172],["▁тверд",-13.681023597717283],["▁xornal",-13.681032180786133],["▁уюм",-13.681035041809082],["änä",-13.681036949157717],["▁publikuar",-13.68104362487793],["uće",-13.68104648590088],["اسلام",-13.68106746673584],["▁स्वस्थ",-13.68107795715332],["▁chiave",-13.681078910827637],["ovský",-13.681082725524902],["susi",-13.681082725524902],["ودي",-13.68111801147461],["ګا",-13.681130409240724],["▁derket",-13.68115234375],["hâlde",-13.681161880493164],["▁mikään",-13.681178092956545],["VED",-13.68118953704834],["عها",-13.681192398071287],["家に",-13.681192398071287],["▁шляху",-13.681200981140137],["יטעט",-13.681208610534668],["▁uvjeti",-13.681219100952148],["▁kezdte",-13.68122386932373],["▁götürül",-13.681230545043944],["בחן",-13.681234359741213],["▁tuam",-13.681241989135742],["лагдсан",-13.681251525878906],["▁ใกล้",-13.681278228759766],["فئة",-13.681282997131348],["▁వైపు",-13.681282997131348],["▁عزم",-13.681307792663574],["թար",-13.681340217590332],["▁Laufe",-13.681344985961914],["ОСТ",-13.681376457214355],["магнит",-13.681382179260254],["▁semmit",-13.681384086608888],["▁recomandat",-13.681404113769531],["▁Wê",-13.68141746520996],["▁nổ",-13.68142795562744],["كرر",-13.681434631347656],["pět",-13.68144702911377],["تىل",-13.68144989013672],["▁dolgok",-13.68146514892578],["▁વિના",-13.681477546691896],["talon",-13.681478500366213],["實踐",-13.681478500366213],["▁ہل",-13.681480407714844],["▁observat",-13.681486129760742],["▁marcar",-13.68150520324707],["寿命",-13.681507110595703],["셜",-13.681513786315918],["オンライン",-13.681517601013184],["แขก",-13.681524276733398],["▁Tyvärr",-13.681528091430664],["▁gedurende",-13.681528091430664],["▁impotriva",-13.681528091430664],["▁livraison",-13.681528091430664],["▁mazungumzo",-13.681528091430664],["▁mengucapkan",-13.681528091430664],["▁mëngjes",-13.681528091430664],["▁δικαίωμα",-13.681528091430664],["▁հատկապես",-13.681528091430664],["▁כלומר",-13.681528091430664],["▁دیے",-13.681528091430664],["▁سىياسىي",-13.681528091430664],["▁ਮੀਟਿੰਗ",-13.681528091430664],["▁ஜனாதிபதி",-13.681528091430664],["▁카테고리",-13.681528091430664],["éducation",-13.68152904510498],["▁Králové",-13.68152904510498],["▁Olomouc",-13.68152904510498],["▁bliadhna",-13.68152904510498],["▁bosqich",-13.68152904510498],["▁bərabər",-13.68152904510498],["▁gyönyörű",-13.68152904510498],["▁leeyihiin",-13.68152904510498],["▁pressupost",-13.68152904510498],["▁yesterday",-13.68152904510498],["▁Ақтөбе",-13.68152904510498],["▁مریض",-13.68152904510498],["▁ਐਲਾਨ",-13.68152904510498],["▁ଫଟୋ",-13.681529998779297],["▁persónu",-13.681530952453612],["▁ելույթ",-13.68153190612793],["▁spectacle",-13.68153476715088],["▁סעיף",-13.681535720825195],["▁فوائد",-13.681535720825195],["▁südame",-13.681538581848145],["▁പ്രകാശ",-13.68153953552246],["ဖြယ္",-13.681540489196776],["▁PICHA",-13.68154239654541],["▁Terpercaya",-13.681543350219728],["ျမစ္",-13.68154525756836],["▁ixrac",-13.681546211242676],["▁yhteensä",-13.681547164916992],["ข้อเสนอ",-13.68155002593994],["▁vadītājs",-13.68155002593994],["▁गेली",-13.681551933288574],["▁reklám",-13.681554794311523],["▁कार्यरत",-13.681554794311523],["▁dimarts",-13.681560516357422],["▁spletu",-13.681560516357422],["▁crowd",-13.68156623840332],["▁tüdruk",-13.68156623840332],["▁Komputer",-13.681572914123535],["▁cumprir",-13.681575775146484],["બહેન",-13.681577682495115],["▁الجنوب",-13.681585311889648],["▁էջից",-13.68159008026123],["▁Sokol",-13.681591987609863],["▁заснован",-13.681591987609863],["ဂ္",-13.681595802307127],["▁сувязь",-13.681601524353027],["▁yazdı",-13.681602478027344],["대가",-13.681605339050291],["▁mondani",-13.681612014770508],["▁nauji",-13.681615829467772],["▁Toro",-13.681621551513672],["ختص",-13.68162727355957],["TORI",-13.681628227233888],["▁ម៉ា",-13.681639671325684],["tolera",-13.681644439697266],["andha",-13.681645393371582],["▁branca",-13.681657791137695],["▁meant",-13.681659698486328],["▁लाइफ",-13.681676864624023],["▁Summa",-13.681702613830566],["▁အသက်",-13.681702613830566],["▁والسلام",-13.681739807128906],["▁рисков",-13.681742668151855],["▁ადამიანს",-13.681743621826172],["▁polskiego",-13.681745529174805],["▁Företag",-13.68174648284912],["▁geçer",-13.681747436523438],["▁bolečin",-13.681756019592283],["dığım",-13.68175983428955],["▁Vereins",-13.68177604675293],["▁Republican",-13.681787490844728],["▁الفرنسية",-13.681798934936523],["ដូ",-13.681802749633787],["▁diantaranya",-13.681814193725586],["ائهم",-13.6818265914917],["няты",-13.681849479675291],["dělal",-13.681852340698242],["▁organise",-13.681859016418455],["▁skrywer",-13.68186378479004],["▁کتب",-13.68186855316162],["▁zagotavljanj",-13.681869506835938],["強烈",-13.681879997253418],["▁estación",-13.681897163391112],["▁frequente",-13.681926727294922],["▁Edel",-13.681931495666504],["▁Balance",-13.68193244934082],["▁Jinak",-13.68193817138672],["▁Abdu",-13.6819486618042],["stämma",-13.681951522827148],["▁postao",-13.681952476501465],["ଦାର",-13.68195343017578],["▁Rate",-13.681958198547363],["▁زند",-13.681965827941896],["ĺ",-13.681971549987791],["▁hitrost",-13.68198013305664],["▁مودی",-13.681987762451172],["▁senhor",-13.681989669799805],["▁rolul",-13.682008743286133],["▁tärkeää",-13.682015419006348],["▁광주",-13.682026863098145],["▁expresa",-13.682032585144045],["▁194",-13.682034492492676],["▁uile",-13.682034492492676],["▁overleg",-13.682039260864258],["▁finants",-13.682040214538574],["▁grasa",-13.682066917419434],["scritto",-13.682085990905762],["▁አገ",-13.682098388671877],["ობების",-13.682109832763672],["▁Illustr",-13.68211555480957],["▁kolorze",-13.68211555480957],["▁udziału",-13.682117462158203],["▁своја",-13.682147979736328],["▁corretta",-13.682154655456545],["கிழமை",-13.68216037750244],["▁užtikrinti",-13.68216037750244],["▁बैठकले",-13.682161331176758],["werth",-13.682171821594238],["▁naturlige",-13.68218994140625],["▁234",-13.68220043182373],["▁Howard",-13.68220329284668],["ेत्",-13.682211875915527],["альном",-13.682225227355955],["ຟັງ",-13.682245254516602],["▁хлад",-13.682252883911133],["▁opinions",-13.682268142700195],["▁vəzifələr",-13.682291030883787],["▁hermano",-13.68229866027832],["▁foorumi",-13.682306289672852],["▁сэргийлэх",-13.682317733764648],["ዮች",-13.682329177856444],["▁dnevni",-13.682331085205078],["▁Веб",-13.682336807250977],["▁Rally",-13.682337760925291],["prostred",-13.682340621948242],["ოლოგი",-13.682347297668455],["▁moduan",-13.68234920501709],["▁parcela",-13.682353973388672],["▁verwys",-13.682357788085938],["▁Termeni",-13.682360649108888],["կայ",-13.682364463806152],["▁Kı",-13.682369232177734],["▁Данас",-13.68238925933838],["nnosta",-13.682393074035645],["▁ഇത്ര",-13.682398796081545],["வாதி",-13.682411193847656],["ுகிற",-13.68242359161377],["▁ప్రేక్షకుల",-13.68243408203125],["▁heslo",-13.682448387145996],["صاص",-13.682461738586426],["เอาไว้",-13.682461738586426],["цыяй",-13.68246841430664],["നിധി",-13.682473182678224],["▁kommande",-13.682494163513184],["შინ",-13.6824951171875],["cã",-13.682506561279297],["ttelee",-13.682517051696776],["▁korpo",-13.682518005371094],["сник",-13.682519912719728],["servis",-13.68253231048584],["▁కన్న",-13.682533264160156],["كوك",-13.682540893554688],["▁ተመ",-13.682559967041016],["ੈਸ",-13.682564735412598],["ারি",-13.682570457458496],["▁늘어",-13.682571411132812],["▁Goo",-13.682581901550291],["گيري",-13.682598114013672],["▁comic",-13.682634353637695],["▁стабилност",-13.68264389038086],["▁geçirme",-13.682650566101074],["▁Banja",-13.682661056518556],["▁Carbon",-13.682671546936035],["▁aims",-13.682671546936035],["ಾಗುವ",-13.682682991027832],["ναλ",-13.682692527770996],["当て",-13.682693481445312],["▁죄",-13.68270492553711],["▁jāie",-13.682720184326172],["▁Aps",-13.682726860046388],["▁obchodu",-13.68273639678955],["ವನ್ನೂ",-13.682771682739258],["▁Lähe",-13.682778358459473],["▁legit",-13.682784080505373],["▁համալսարան",-13.682787895202637],["▁लुक",-13.682816505432127],["ในเด็ก",-13.682831764221191],["▁Priv",-13.682844161987305],["▁sürət",-13.682853698730469],["▁lyckas",-13.682881355285645],["136",-13.682896614074709],["▁knulle",-13.682905197143556],["რებით",-13.682907104492188],["▁yaratish",-13.682931900024414],["teckning",-13.682940483093262],["▁chans",-13.682940483093262],["▁내부",-13.682944297790527],["早い",-13.682950019836426],["csok",-13.682965278625488],["38)",-13.682966232299805],["▁հանդիպման",-13.682975769042969],["▁പിടിച്ചു",-13.682976722717283],["гүү",-13.683002471923828],["▁dönt",-13.683012962341309],["▁científicos",-13.683027267456056],["▁කෙර",-13.683038711547852],["▁сни",-13.683040618896484],["/2002",-13.683046340942385],["ूस",-13.683058738708496],["▁react",-13.683098793029783],["▁เที่ยว",-13.683103561401367],["▁estivo",-13.683106422424316],["▁našeho",-13.683114051818848],["つもり",-13.683116912841797],["▁Adem",-13.683120727539062],["▁øye",-13.683140754699709],["▁ଫି",-13.683149337768556],["▁জিয়ার",-13.683160781860352],["Napoca",-13.683189392089844],["▁дайым",-13.68319320678711],["тичен",-13.683207511901855],["elkészül",-13.683234214782717],["виток",-13.683259963989258],["▁Sisä",-13.68326187133789],["▁مراقبت",-13.683268547058104],["▁pediatr",-13.683341026306152],["▁پیچ",-13.683347702026367],["արվեստ",-13.683348655700684],["▁helyet",-13.683353424072266],["▁Bryn",-13.683356285095217],["▁nijak",-13.683356285095217],["ようになって",-13.68337345123291],["▁bæta",-13.68338394165039],["▁ស្ថិត",-13.683387756347656],["▁genannten",-13.683399200439451],["くれました",-13.683405876159668],["REK",-13.6834077835083],["▁sensual",-13.68341064453125],["ανά",-13.683412551879885],["▁השק",-13.68341827392578],["▁komandos",-13.683475494384766],["ngkak",-13.68349266052246],["▁askeri",-13.683566093444824],["▁vienam",-13.683573722839355],["▁میٹر",-13.683581352233888],["парк",-13.683585166931152],["▁Gaming",-13.683591842651367],["▁праблем",-13.683639526367188],["رافق",-13.68366241455078],["有意",-13.68367862701416],["ائى",-13.68368434906006],["▁ядерн",-13.683695793151855],["弟弟",-13.683700561523438],["▁skum",-13.683701515197754],["▁Lasten",-13.68370246887207],["▁craft",-13.683722496032717],["▁લઇ",-13.683728218078612],["ไง",-13.68372917175293],["喂",-13.683730125427246],["csap",-13.683743476867676],["веројатно",-13.683744430541992],["▁außer",-13.68374729156494],["にくい",-13.683752059936523],["Категорија",-13.683758735656738],["▁duomenis",-13.683760643005373],["▁zastanawia",-13.683760643005373],["▁ανάγκες",-13.683760643005373],["▁сарадњи",-13.683760643005373],["▁כאילו",-13.683760643005373],["▁کرپشن",-13.683760643005373],["▁कप्तान",-13.683760643005373],["▁উদ্বোধন",-13.683760643005373],["▁চিন্তা",-13.683760643005373],["▁атрымаў",-13.683761596679688],["▁ସକାଳ",-13.683761596679688],["Jährige",-13.683762550354004],["ไลน์",-13.68376350402832],["▁învăţ",-13.68376350402832],["кувати",-13.683764457702637],["ဖြင့္",-13.683764457702637],["▁SharePoint",-13.683764457702637],["คาสิโนออนไลน์",-13.68376636505127],["▁brengt",-13.68376636505127],["▁cuideachd",-13.68376636505127],["▁دسمبر",-13.68376636505127],["▁ఉత్తమ",-13.68376636505127],["▁Gewicht",-13.68376922607422],["▁ثبات",-13.683770179748535],["▁ordförande",-13.683772087097168],["▁நமது",-13.683773040771484],["▁బహు",-13.683774948120115],["週末",-13.683774948120115],["▁əmlak",-13.683775901794434],["ութեան",-13.683777809143066],["▁Überblick",-13.683783531188965],["▁klausimai",-13.683788299560549],["▁כאלה",-13.683793067932127],["▁કરવી",-13.683797836303713],["▁відповідь",-13.683812141418455],["tooda",-13.683819770812988],["▁Загреб",-13.68382167816162],["▁készítés",-13.683822631835938],["▁відкрито",-13.683823585510254],["▁Gale",-13.683836936950684],["dición",-13.683839797973633],["▁چىڭ",-13.683859825134276],["▁שול",-13.683860778808594],["трик",-13.68386173248291],["▁انتظامیہ",-13.683866500854492],["▁دادگاه",-13.683867454528809],["strādā",-13.683874130249023],["▁Мос",-13.68387508392334],["တမ္း",-13.683876037597656],["▁принимает",-13.683887481689451],["▁campion",-13.683889389038086],["▁situaciones",-13.68390655517578],["▁editie",-13.68390941619873],["▁hiba",-13.68392276763916],["▁भोजन",-13.68395709991455],["תרחש",-13.683975219726562],["▁이유는",-13.683985710144045],["ಗೊ",-13.68399715423584],["▁Cabe",-13.68399715423584],["▁ontem",-13.684000968933104],["экономического",-13.684004783630373],["▁مغربی",-13.684017181396484],["ដឹងថា",-13.684033393859863],["▁рол",-13.684062957763672],["航天",-13.684062957763672],["ച്",-13.68407154083252],["▁СТА",-13.684078216552734],["▁യുവതി",-13.684099197387695],["▁Register",-13.684114456176758],["ወድ",-13.684138298034668],["▁syfte",-13.684146881103516],["តេ",-13.68416690826416],["▁توفر",-13.684179306030272],["▁börjat",-13.68419361114502],["▁jokių",-13.68421459197998],["▁qeydə",-13.68421745300293],["含量",-13.684228897094728],["▁الثالثة",-13.684240341186523],["▁دروغ",-13.68424129486084],["會在",-13.684243202209473],["çiler",-13.684260368347168],["ëll",-13.684260368347168],["▁castiga",-13.684264183044434],["▁Fidel",-13.684270858764648],["一定是",-13.684270858764648],["▁trods",-13.684274673461914],["▁ძლიერი",-13.684280395507812],["▁rūši",-13.684281349182127],["▁fotografías",-13.684284210205078],["▁inşaat",-13.684304237365724],["פיס",-13.684308052062988],["▁Kantor",-13.684311866760254],["čiams",-13.68431282043457],["يىتى",-13.684313774108888],["▁ulje",-13.68431568145752],["▁ordini",-13.684326171875],["یکٹر",-13.68433666229248],["▁münaqişə",-13.684348106384276],["drum",-13.684349060058594],["dıklarını",-13.684359550476074],["▁Darwin",-13.684361457824709],["१३",-13.684364318847656],["▁satura",-13.684372901916504],["的關係",-13.684402465820312],["▁көш",-13.684409141540527],["▁හට",-13.684422492980955],["▁gençler",-13.684423446655272],["▁Ebost",-13.68442440032959],["▁परे",-13.68443202972412],["▁قريب",-13.68444538116455],["ิบ",-13.684449195861816],["▁Coach",-13.684459686279297],["▁generacij",-13.684463500976562],["▁€/",-13.684465408325195],["▁ඝාතනය",-13.684467315673828],["▁Driver",-13.684477806091309],["▁uporabljamo",-13.684490203857422],["▁labur",-13.684491157531738],["▁varbūt",-13.684493064880373],["▁ბატონ",-13.684500694274902],["▁accade",-13.684507369995115],["▁finales",-13.684508323669434],["rzec",-13.684517860412598],["而成",-13.68453311920166],["▁Verdi",-13.684535026550291],["▁සේවක",-13.684535026550291],["▁MÜ",-13.684551239013672],["▁transparente",-13.684568405151367],["▁moderator",-13.684576034545898],["▁dårlige",-13.684592247009276],["ډې",-13.68460464477539],["▁კლუბ",-13.68463134765625],["якого",-13.684637069702148],["КОВ",-13.684640884399414],["▁Machi",-13.684647560119627],["▁similares",-13.68465805053711],["▁paydo",-13.684659004211426],["xml",-13.684659957885742],["▁vsebino",-13.684667587280272],["▁здрави",-13.684683799743652],["▁qalan",-13.684694290161133],["비를",-13.684709548950195],["▁Wag",-13.684717178344728],["ываем",-13.68472671508789],["▁alternatív",-13.684747695922852],["▁सकेको",-13.684764862060549],["平板",-13.684767723083496],["▁kompanija",-13.684779167175291],["коменду",-13.68479824066162],["▁nəzər",-13.68480110168457],["▁నగర",-13.684803009033203],["▁σοβαρ",-13.684805870056152],["စၥ",-13.684822082519531],["団体",-13.68482494354248],["▁принципе",-13.684849739074709],["▁ჯგუფი",-13.684855461120604],["နာရီ",-13.684863090515137],["wraca",-13.68487548828125],["ckom",-13.68488311767578],["TIT",-13.684895515441896],["եպ",-13.68492317199707],["▁Hiç",-13.684925079345703],["限量",-13.68492603302002],["339",-13.684928894042969],["witch",-13.684940338134766],["▁ຫມາຍ",-13.684940338134766],["▁ពួកគេ",-13.684943199157717],["▁bateria",-13.684944152832031],["▁ESB",-13.684956550598145],["ziert",-13.684972763061523],["ēšanai",-13.684974670410156],["▁karva",-13.684977531433104],["▁exception",-13.685040473937988],["▁kokë",-13.68504238128662],["▁لخوا",-13.685059547424316],["1/3",-13.685093879699709],["சரி",-13.685099601745604],["▁egység",-13.685101509094238],["räume",-13.685105323791504],["▁Anglia",-13.685110092163086],["ляли",-13.685165405273438],["▁ős",-13.685169219970703],["ційні",-13.685172080993652],["بے",-13.685174942016602],["יפן",-13.685188293457031],["దర్",-13.68519115447998],["▁ڪوٽ",-13.68521213531494],["▁Аляксандра",-13.685213088989258],["▁folosite",-13.685233116149902],["▁Ахмет",-13.685240745544434],["▁TAB",-13.68526554107666],["▁KITA",-13.685270309448242],["一流",-13.685272216796877],["▁이하",-13.68527603149414],["дачу",-13.685312271118164],["▁চান",-13.68531608581543],["浴室",-13.68531608581543],["▁znakov",-13.685342788696287],["ご案内",-13.685385704040527],["▁2,000",-13.685388565063477],["▁జరిగి",-13.68541145324707],["▁glemme",-13.685417175292969],["દે",-13.685428619384766],["▁kenë",-13.685432434082031],["▁arunc",-13.685468673706056],["มาดู",-13.685469627380373],["▁بعيد",-13.68547821044922],["ettevõte",-13.685503005981444],["lıqla",-13.685506820678713],["392",-13.685531616210938],["▁भारतको",-13.68554401397705],["▁teulu",-13.685572624206545],["▁начальника",-13.68559741973877],["▁ուժերի",-13.685656547546388],["▁Questi",-13.685663223266602],["โต้",-13.68566608428955],["పాఠ",-13.68569278717041],["Информ",-13.685720443725586],["▁সাত",-13.685750007629396],["▁lieka",-13.685750961303713],["▁راهنمای",-13.68575668334961],["▁있다면",-13.685768127441406],["▁regarder",-13.685776710510254],["▁cvet",-13.68578052520752],["▁Tokio",-13.685782432556152],["▁spas",-13.685798645019531],["▁internazionali",-13.68580436706543],["数年",-13.685809135437012],["▁IMF",-13.68581199645996],["▁bewust",-13.68581771850586],["▁visites",-13.685834884643556],["แต่งงาน",-13.685842514038086],["зил",-13.685864448547363],["літератур",-13.68586540222168],["وڊ",-13.685867309570312],["▁ໂລກ",-13.68587589263916],["▁زاد",-13.685887336730955],["▁afecte",-13.685906410217283],["▁чал",-13.685916900634766],["▁setia",-13.685933113098145],["抑制",-13.685941696166992],["▁පිලි",-13.685942649841309],["针",-13.685942649841309],["ОВЕ",-13.685956954956056],["▁წიგნი",-13.685958862304688],["ማግኘት",-13.685959815979004],["▁ۋاقىت",-13.685970306396484],["▁얼마",-13.685973167419434],["坎",-13.685973167419434],["▁síðast",-13.685994148254396],["▁contrário",-13.68599796295166],["▁dimecres",-13.68599796295166],["▁otázku",-13.68599796295166],["▁razgovara",-13.68599796295166],["▁strokovnjak",-13.68599796295166],["▁suradnji",-13.68599796295166],["▁səmərəli",-13.68599796295166],["▁скільки",-13.68599796295166],["▁ځانګړې",-13.68599796295166],["▁चेतावनी",-13.68599796295166],["▁বিজ্ঞপ্তি",-13.68599796295166],["▁గుర్తించ",-13.68599796295166],["▁మనిషి",-13.68599796295166],["▁കൃഷി",-13.68599796295166],["▁រំលង",-13.68599796295166],["▁uitgevoerd",-13.685998916625977],["▁Комисија",-13.685998916625977],["▁նամակ",-13.685998916625977],["▁Verenigd",-13.685999870300291],["▁eksklusiv",-13.685999870300291],["▁किताब",-13.685999870300291],["▁ਪੁੱਛ",-13.68600082397461],["▁perdagangan",-13.686001777648926],["▁כניסה",-13.686001777648926],["▁uśmiech",-13.686006546020508],["▁تعريف",-13.686009407043455],["▁үүсгэ",-13.686010360717772],["▁కనిపిస్త",-13.686010360717772],["▁земљу",-13.68601894378662],["▁pemberian",-13.686026573181152],["▁ජෝ",-13.686026573181152],["▁많아",-13.686028480529783],["▁මුලින්ම",-13.686033248901367],["märgi",-13.68603801727295],["▁régulièrement",-13.68603801727295],["▁Külön",-13.686043739318848],["花園",-13.68604850769043],["▁गरेपछि",-13.686050415039062],["▁അവന്റെ",-13.686050415039062],["▁kinesisk",-13.686052322387695],["▁달리",-13.686058044433594],["▁કઈ",-13.686060905456545],["лава",-13.68606185913086],["▁மாலை",-13.686065673828123],["▁consumi",-13.686068534851074],["േരി",-13.68607234954834],["▁یکشنبه",-13.68608570098877],["▁સારી",-13.686097145080566],["ପଣ",-13.686098098754885],["▁bibliotēkas",-13.686100959777832],["▁шагнал",-13.686101913452148],["▁Германии",-13.686102867126465],["▁نجاح",-13.686107635498049],["▁проведении",-13.686108589172363],["▁ٹھیک",-13.686110496520996],["ԵՍ",-13.686114311218262],["▁보여주",-13.686126708984377],["▁francés",-13.686128616333008],["144",-13.68614101409912],["▁ଗଣ",-13.686144828796388],["▁dirêj",-13.686150550842283],["▁знаешь",-13.686152458190918],["беген",-13.68615436553955],["folyam",-13.686176300048828],["▁abierta",-13.686185836791992],["▁kigge",-13.68618869781494],["▁tikslas",-13.686189651489258],["ेगी",-13.686199188232422],["▁رنج",-13.686200141906738],["▁вътре",-13.686206817626951],["▁AW",-13.686209678649902],["▁domače",-13.686223983764648],["▁могућност",-13.686234474182127],["লাইন",-13.686237335205078],["▁annarra",-13.686245918273926],["▁07:00",-13.686261177062988],["четком",-13.68626308441162],["kić",-13.686269760131836],["▁PRU",-13.686275482177734],["時光",-13.686279296875],["▁برائے",-13.686285972595217],["▁mērķ",-13.686297416687012],["▁249",-13.686304092407228],["▁Скоп",-13.686309814453123],["▁Seznam",-13.686311721801758],["▁населението",-13.686311721801758],["▁Мына",-13.686321258544922],["period",-13.686330795288086],["ର୍ଟ",-13.686330795288086],["▁ହେଉ",-13.68634796142578],["חוויה",-13.68635082244873],["▁nemmeno",-13.686360359191896],["▁boði",-13.686361312866213],["▁hoone",-13.68637752532959],["▁turha",-13.686407089233398],["▁tretji",-13.686408042907717],["▁přitom",-13.686419486999512],["integr",-13.686434745788574],["▁laws",-13.686443328857422],["▁молча",-13.686446189880373],["▁Fenster",-13.686458587646484],["▁hakikat",-13.686458587646484],["▁туды",-13.686469078063965],["reikning",-13.686473846435549],["▁citliv",-13.686488151550291],["▁Команд",-13.686492919921877],["▁Karachi",-13.686497688293455],["▁wawili",-13.686530113220217],["▁republik",-13.686545372009276],["▁الهدف",-13.686562538146973],["395",-13.686582565307615],["▁materjali",-13.686583518981934],["üstə",-13.686593055725098],["▁રચના",-13.686595916748049],["▁робить",-13.686601638793944],["▁skille",-13.686607360839844],["velmente",-13.68662452697754],["DAM",-13.686625480651855],["▁বাংলাদেশে",-13.686637878417969],["ziak",-13.686662673950195],["▁великий",-13.686662673950195],["▁يقين",-13.686676025390623],["▁Tästä",-13.686677932739258],["▁potřeba",-13.68667984008789],["▁умерен",-13.686681747436523],["▁можеби",-13.686724662780762],["▁custos",-13.686729431152344],["сък",-13.686731338500977],["▁pirmais",-13.686737060546877],["үбүз",-13.68674373626709],["రొ",-13.686744689941406],["日本的",-13.68675136566162],["▁kuingia",-13.686753273010254],["icorum",-13.686760902404783],["UJI",-13.686761856079102],["▁Европи",-13.686763763427734],["IVI",-13.686767578125],["▁බැඳ",-13.686777114868164],["▁ប្រាប់",-13.686781883239746],["ათა",-13.686817169189451],["▁medija",-13.686817169189451],["▁universi",-13.686827659606934],["ሚኒ",-13.68683624267578],["▁जनताको",-13.686837196350098],["▁verdt",-13.68690013885498],["▁treće",-13.68694305419922],["▁szeroko",-13.686963081359863],["▁porusza",-13.686978340148926],["elevat",-13.686983108520508],["росли",-13.686999320983888],["潜在",-13.687031745910645],["▁Kvar",-13.687051773071287],["▁hazırlık",-13.687053680419922],["▁оруулалт",-13.68705940246582],["▁otvoreno",-13.687063217163086],["АО",-13.68707275390625],["▁പെണ്ണ",-13.687077522277832],["צד",-13.687078475952148],["上市公司",-13.68708038330078],["▁ସିଂହ",-13.687085151672363],["の状態",-13.687093734741213],["▁ສູງ",-13.687095642089844],["мант",-13.687101364135742],["jį",-13.687116622924805],["ಗಳನ್ನ",-13.687118530273438],["ലിന്",-13.68714714050293],["▁взят",-13.687152862548828],["жила",-13.687183380126951],["▁porez",-13.68718433380127],["▁බර",-13.687192916870115],["kjøp",-13.687193870544434],["▁asenta",-13.687200546264648],["ซอ",-13.687212944030762],["▁ညီ",-13.687215805053713],["ującym",-13.68723964691162],["වෙයි",-13.687261581420898],["ძვ",-13.687275886535645],["▁punctul",-13.687283515930176],["▁Cumhuriyeti",-13.687299728393556],["▁especialment",-13.687325477600098],["▁Cảm",-13.68732738494873],["▁מערכות",-13.687342643737791],["▁дисп",-13.68736457824707],["hastighet",-13.687365531921388],["ອົງ",-13.687376022338867],["找出",-13.687386512756348],["▁måneden",-13.687405586242676],["eneralforsamling",-13.687439918518066],["▁Bowl",-13.68747615814209],["▁وشي",-13.68752384185791],["aatti",-13.687541961669922],["ылатын",-13.687561988830566],["▁Vár",-13.687577247619627],["▁konsept",-13.68759536743164],["▁رده",-13.687602043151855],["သူေတြ",-13.687626838684082],["plete",-13.687639236450195],["MAI",-13.687641143798828],["rope",-13.687650680541992],["▁අයිතිය",-13.687667846679688],["hoidon",-13.68766975402832],["▁Bean",-13.687702178955078],["▁نيست",-13.687710762023926],["ጠላ",-13.687725067138672],["सेवा",-13.687738418579102],["▁Зим",-13.687742233276367],["siapa",-13.687767028808594],["▁povezani",-13.68779182434082],["剛好",-13.687807083129885],["▁Зі",-13.68782901763916],["+5",-13.687838554382324],["ману",-13.68784523010254],["▁kısmı",-13.68784523010254],["sheni",-13.687856674194336],["飛機",-13.687859535217283],["ফিক",-13.687877655029297],["нските",-13.687884330749512],["entwicklung",-13.687914848327637],["pró",-13.687947273254396],["චන",-13.687947273254396],["▁નિર્",-13.687983512878418],["▁промислов",-13.68798828125],["▁spurt",-13.687995910644531],["▁болуш",-13.68800163269043],["▁jeshi",-13.68802261352539],["▁elementar",-13.688027381896973],["графія",-13.688033103942873],["ფის",-13.688055038452148],["eiben",-13.688063621520996],["▁Зар",-13.688068389892578],["▁grandaj",-13.688085556030272],["drug",-13.688093185424805],["▁Drum",-13.688106536865234],["▁почеток",-13.688108444213867],["tamisel",-13.688124656677246],["zisz",-13.688127517700195],["▁suporte",-13.688129425048828],["การผลิต",-13.68813133239746],["人们的",-13.688150405883787],["▁Өкмөт",-13.688156127929688],["▁ಸಿನಿ",-13.688165664672852],["களா",-13.688189506530762],["账户",-13.68820571899414],["癒",-13.68821907043457],["യറ്റ",-13.688220977783203],["장의",-13.688222885131836],["ljni",-13.688227653503418],["摂取",-13.68823528289795],["इंग्रजी",-13.688240051269531],["▁Jamhuuriyadda",-13.688240051269531],["▁cymdeithasol",-13.688240051269531],["▁farmaceut",-13.688240051269531],["▁nhắn",-13.688240051269531],["▁şampiyon",-13.688240051269531],["▁БНХАУ",-13.688240051269531],["▁ықпал",-13.688240051269531],["▁Ռոբերտ",-13.688240051269531],["▁آمريڪا",-13.688240051269531],["▁گلگت",-13.688240051269531],["▁ଡାଉନଲୋଡ଼",-13.688240051269531],["▁ಅಮೆರಿಕ",-13.688240051269531],["▁තරගාවලිය",-13.688240051269531],["▁Peraturan",-13.688241004943848],["▁خىزمىتى",-13.688241004943848],["▁අවධානය",-13.688241004943848],["▁досліджень",-13.688241958618164],["▁интелектуал",-13.688241958618164],["▁తేదీ",-13.688243865966797],["▁utseende",-13.68824577331543],["▁համատեղ",-13.68824577331543],["▁πολλοί",-13.688247680664062],["▁ڀٽو",-13.68824863433838],["▁өлкөнүн",-13.688249588012695],["크로",-13.688254356384276],["▁příběh",-13.688261985778809],["▁הללו",-13.688262939453123],["agung",-13.68826389312744],["▁Apalagi",-13.688273429870604],["▁активни",-13.688273429870604],["▁waana",-13.688278198242188],["▁знаеш",-13.688278198242188],["▁wasaarad",-13.688279151916504],["▁आमदार",-13.688281059265137],["avenir",-13.68828582763672],["ຊື້",-13.688287734985352],["blomst",-13.688289642333984],["Ін",-13.68829345703125],["▁crème",-13.688297271728516],["▁Aktuell",-13.68830394744873],["尽快",-13.68830394744873],["▁نباشد",-13.688305854797363],["▁DUNIA",-13.688309669494627],["▁étudiants",-13.68831729888916],["▁도입",-13.68832015991211],["▁Symbol",-13.68832778930664],["▁obračun",-13.688349723815918],["▁minimalis",-13.688353538513184],["▁Média",-13.68835735321045],["▁najboljše",-13.68836784362793],["▁Break",-13.688379287719728],["▁übernehmen",-13.688380241394045],["▁элдин",-13.688396453857422],["▁delež",-13.688416481018066],["▁கூறிய",-13.688416481018066],["を作成",-13.688416481018066],["▁көрө",-13.68842601776123],["▁András",-13.688431739807127],["ରିକ",-13.688438415527344],["▁بیس",-13.688443183898926],["әк",-13.688448905944824],["▁cyfrif",-13.68845558166504],["এফ",-13.688462257385254],["উন",-13.688469886779783],["いたり",-13.688470840454102],["ēna",-13.688474655151367],["▁torneo",-13.68848991394043],["▁органу",-13.688498497009276],["▁Niels",-13.688499450683594],["000.000",-13.68850040435791],["▁jakiej",-13.688521385192873],["▁Neza",-13.688542366027832],["▁manifestazione",-13.688556671142578],["ణా",-13.68856716156006],["കൊ",-13.688572883605955],["▁പുഴ",-13.688589096069336],["နတ္",-13.688594818115234],["▁Lucy",-13.688602447509766],["▁química",-13.688603401184082],["▁მიწის",-13.688608169555664],["កាន់",-13.68860912322998],["맛집",-13.68863010406494],["▁чувствува",-13.68863582611084],["ട്ടാ",-13.688641548156738],["▁غالب",-13.688644409179688],["łabym",-13.688691139221191],["ີ້",-13.688691139221191],["ыма",-13.68869400024414],["iklis",-13.688695907592772],["щихся",-13.688698768615724],["متر",-13.68870449066162],["کرد",-13.688724517822266],["▁потребителите",-13.688724517822266],["▁Carp",-13.688727378845217],["▁نواب",-13.688737869262695],["▁Отже",-13.688743591308594],["ੇਗੀ",-13.688749313354492],["ლიანი",-13.68877410888672],["▁हाँ",-13.688802719116213],["▁vybral",-13.688838958740234],["▁الفني",-13.688847541809082],["▁شارك",-13.68885326385498],["▁balta",-13.688865661621094],["ർമ്മ",-13.688915252685549],["մը",-13.68892765045166],["▁huts",-13.688939094543455],["جلة",-13.688946723937988],["გულ",-13.688946723937988],["▁itaque",-13.688966751098633],["▁представники",-13.688994407653809],["▁stale",-13.689000129699709],["▁දන්න",-13.689008712768556],["光明",-13.689010620117188],["ITAT",-13.689041137695312],["▁постигна",-13.689051628112791],["▁banen",-13.689092636108398],["▁adolesce",-13.689168930053713],["▁жасаған",-13.68917179107666],["▁ranije",-13.689173698425291],["▁любовта",-13.689176559448242],["▁дегенде",-13.689183235168455],["ຫະ",-13.68918514251709],["▁büyüme",-13.68918514251709],["ดึง",-13.689210891723633],["▁لږ",-13.689224243164062],["▁ወዳ",-13.689227104187012],["▁billige",-13.689249038696287],["▁војска",-13.6892671585083],["kommt",-13.68927764892578],["▁invert",-13.689287185668944],["wurk",-13.689292907714844],["عنوان",-13.68929386138916],["స్తున్నా",-13.689298629760742],["▁tygodniu",-13.68929958343506],["භාවය",-13.689301490783691],["▁представници",-13.689324378967283],["▁býval",-13.68932819366455],["▁nemaju",-13.689332962036133],["▁Cita",-13.689388275146484],["▁vaya",-13.689393997192385],["빛",-13.68941593170166],["leysi",-13.689451217651367],["золо",-13.689453125],["▁yimid",-13.689456939697266],["▁облусу",-13.689460754394531],["▁spark",-13.689462661743164],["▁ಕೃಷ್ಣ",-13.689464569091797],["▁මුදල",-13.689468383789062],["▁Mannschaft",-13.68949031829834],["▁feleség",-13.689502716064451],["tycznej",-13.68952178955078],["▁Beide",-13.68954086303711],["มูลค่า",-13.68956184387207],["▁الشي",-13.689570426940918],["▁pastar",-13.689584732055664],["▁Pepe",-13.68958568572998],["Bru",-13.689590454101562],["▁kérdése",-13.689598083496094],["fié",-13.689620018005373],["▁ಬದಲಿಸಿ",-13.68962287902832],["▁garantia",-13.689637184143066],["פחד",-13.689653396606444],["маил",-13.689669609069824],["овани",-13.689669609069824],["▁təşkilatı",-13.68967342376709],["աձայն",-13.68968105316162],["ెత్త",-13.689689636230469],["▁listë",-13.689689636230469],["共识",-13.689691543579102],["▁DHA",-13.689698219299316],["▁Coro",-13.689715385437012],["▁אָנ",-13.689738273620604],["▁ouers",-13.689752578735352],["счет",-13.689770698547363],["▁fühlt",-13.689774513244627],["શ્રી",-13.689775466918944],["bränd",-13.689794540405272],["jõu",-13.689796447753906],["▁Viki",-13.689805030822754],["ійні",-13.689813613891602],["▁риска",-13.68983268737793],["▁ຕິດ",-13.68984031677246],["▁эне",-13.689858436584473],["▁Espoo",-13.689859390258787],["▁extremo",-13.689861297607422],["خلص",-13.689875602722168],["коле",-13.6898775100708],["▁balso",-13.689882278442385],["▁tinere",-13.689882278442385],["خطر",-13.689887046813965],["聞いた",-13.68989372253418],["нева",-13.689896583557127],["▁aza",-13.68990421295166],["▁मागे",-13.68990993499756],["alderen",-13.689919471740724],["▁विष",-13.689929008483888],["▁големите",-13.689956665039062],["aksjon",-13.689967155456545],["▁памер",-13.68998908996582],["yuan",-13.690000534057615],["▁калк",-13.690059661865234],["▁560",-13.690074920654297],["▁ഒരേ",-13.69011116027832],["▁duhovni",-13.690117835998535],["▁Perus",-13.690118789672852],["pv",-13.690123558044434],["▁zemí",-13.69012451171875],["▁отива",-13.690131187438965],["▁කරුණ",-13.690131187438965],["ర్డ్",-13.690133094787598],["زرع",-13.69014835357666],["ჯან",-13.690150260925291],["αίων",-13.690160751342772],["870",-13.690187454223633],["naise",-13.690187454223633],["▁superbe",-13.690216064453123],["可以说",-13.69021987915039],["ുണ്ടാക",-13.69022274017334],["▁Barva",-13.690230369567873],["▁courses",-13.690234184265137],["րած",-13.6902494430542],["zű",-13.690251350402832],["frac",-13.690269470214844],["агдсан",-13.690281867980955],["rückt",-13.690288543701172],["况",-13.690296173095703],["發出",-13.690301895141602],["▁Albani",-13.690305709838867],["们的",-13.690313339233398],["▁grati",-13.690325736999512],["▁управља",-13.69033432006836],["每周",-13.69034481048584],["▁žalost",-13.690346717834473],["▁философия",-13.690352439880373],["ಖ್ಯಾತ",-13.690366744995115],["▁evel",-13.690373420715332],["▁катышуу",-13.690387725830078],["▁അധിക",-13.69040298461914],["자와",-13.690409660339355],["人民群众",-13.690412521362305],["▁оформлен",-13.690430641174316],["▁సంవత్సరాల",-13.690431594848633],["▁storage",-13.69043254852295],["▁תה",-13.690442085266112],["顾问",-13.690442085266112],["缴",-13.690443992614746],["ార్థ",-13.69044589996338],["posisjon",-13.690465927124023],["輔導",-13.690473556518556],["сік",-13.690478324890137],["▁Mineral",-13.69048309326172],["▁snaga",-13.690484046936035],["ဓာတ္",-13.690485954284668],["▁Entfernung",-13.690486907958984],["▁berlebihan",-13.690486907958984],["▁celelalte",-13.690486907958984],["▁eksempler",-13.690486907958984],["▁ristorante",-13.690486907958984],["▁warunkach",-13.690486907958984],["▁Χρήσης",-13.690486907958984],["▁адбылося",-13.690486907958984],["▁лауазым",-13.690486907958984],["▁سەۋەب",-13.690486907958984],["▁இலவச",-13.690486907958984],["▁అంతర్జాతీయ",-13.690486907958984],["▁ಈಗಾಗಲೇ",-13.690486907958984],["﴾",-13.690486907958984],["gesproken",-13.6904878616333],["psykiatri",-13.6904878616333],["ṉ",-13.6904878616333],["▁shembull",-13.6904878616333],["▁పెరిగి",-13.6904878616333],["▁Hüquq",-13.690488815307615],["▁બેઠક",-13.690488815307615],["▁понятия",-13.690489768981934],["ティング",-13.690489768981934],["grader",-13.690492630004885],["▁sèrie",-13.690494537353516],["▁idazle",-13.690495491027832],["▁সদর",-13.690496444702148],["▁hétfő",-13.690497398376465],["▁ഉച്ച",-13.690497398376465],["താര",-13.690500259399414],["▁ndugu",-13.69050121307373],["▁замовлення",-13.69050121307373],["▁первым",-13.690505981445312],["▁Sleep",-13.690508842468262],["▁крім",-13.690511703491213],["▁parfaitement",-13.690512657165527],["පර",-13.690519332885742],["照明",-13.690519332885742],["進步",-13.690521240234377],["ន្ធ",-13.690524101257324],["▁छोड",-13.690524101257324],["▁заједничк",-13.690528869628906],["▁francesa",-13.690536499023438],["▁وهناك",-13.69053840637207],["▁разработки",-13.690544128417969],["▁సౌ",-13.690552711486816],["▁תפ",-13.690558433532717],["▁foreslå",-13.690567016601562],["ბეჭ",-13.690574645996094],["▁தாய்",-13.690577507019045],["▁ნინო",-13.690591812133787],["▁مطلع",-13.690594673156738],["劇場",-13.690596580505373],["▁पढ़े",-13.690598487854004],["▁yerda",-13.690601348876951],["▁supplémentaire",-13.690604209899902],["▁لاهور",-13.690607070922852],["น่าสนใจ",-13.690617561340332],["ናገሩ",-13.690619468688965],["▁Област",-13.690627098083496],["▁Cả",-13.690628051757812],["▁ayrılma",-13.690632820129396],["EZI",-13.690634727478027],["兩年",-13.690635681152344],["▁eskualde",-13.69064712524414],["▁ਨੋਟ",-13.69066333770752],["▁öppen",-13.690675735473633],["▁ΣΤΟ",-13.69067668914795],["kkende",-13.690677642822266],["นส์",-13.690677642822266],["▁cyklist",-13.690679550170898],["▁mødt",-13.690685272216797],["▁dividend",-13.69068717956543],["تخصص",-13.690706253051758],["▁organizaciones",-13.69070816040039],["ቀም",-13.690711975097656],["▁детали",-13.690730094909668],["นิค",-13.690732955932615],["રૂપ",-13.690744400024414],["▁रोजगारी",-13.690754890441896],["▁दरम्यान",-13.690756797790527],["▁Миш",-13.690773010253906],["▁vlees",-13.690777778625488],["▁Asya",-13.690788269042969],["RAP",-13.690789222717283],["▁riamh",-13.690791130065918],["റ്റര്",-13.690826416015623],["▁ბრალ",-13.690831184387209],["▁садржи",-13.690834999084473],["▁Bhag",-13.690839767456056],["▁tərkib",-13.690852165222168],["▁좋겠",-13.690855979919434],["▁præsentere",-13.690869331359863],["▁הכנס",-13.69088077545166],["ávanie",-13.69089412689209],["▁kavram",-13.690895080566406],["▁NOVA",-13.690899848937988],["मेन्ट",-13.690902709960938],["▁तहको",-13.690903663635254],["주신",-13.690910339355469],["▁wyraż",-13.690935134887695],["0.8",-13.690936088562012],["ೆಗೆ",-13.69094467163086],["èse",-13.690958976745604],["ពាន់",-13.690958976745604],["എൽ",-13.690959930419922],["▁מאיר",-13.690961837768556],["ALLI",-13.690962791442873],["പോയ",-13.690963745117188],["▁selection",-13.69096851348877],["desh",-13.69098949432373],["组织的",-13.69098949432373],["paketti",-13.690998077392578],["strek",-13.691001892089844],["ságát",-13.69100284576416],["▁obniż",-13.691007614135742],["▁κύκλο",-13.69100856781006],["▁hindu",-13.691020011901855],["▁muutu",-13.691020965576172],["rrêté",-13.691027641296388],["▁կատարելու",-13.691038131713867],["▁метри",-13.69105052947998],["▁कौ",-13.69105052947998],["schreib",-13.691088676452637],["▁основной",-13.691091537475586],["▁nahiko",-13.691095352172852],["▁두고",-13.691112518310549],["▁agradecer",-13.691147804260254],["tendent",-13.69114875793457],["igien",-13.691152572631836],["▁представника",-13.691162109375],["เพื่อนๆ",-13.691166877746582],["▁rulle",-13.691166877746582],["kkaita",-13.691194534301758],["▁ndoa",-13.691205978393556],["nošću",-13.691217422485352],["▁quidam",-13.69122314453125],["млади",-13.691231727600098],["▁පූජ",-13.69125747680664],["ลอย",-13.691274642944336],["▁любит",-13.691277503967283],["▁ilgilen",-13.69128704071045],["▁beriman",-13.691289901733398],["Говор",-13.691291809082031],["JD",-13.691299438476562],["пес",-13.691340446472168],["ೆಂದು",-13.691347122192385],["Тех",-13.69136905670166],["pladsen",-13.691370964050291],["▁நம்ப",-13.691393852233888],["ടിച്ചു",-13.69140338897705],["▁PSG",-13.691411972045898],["сиң",-13.691415786743164],["▁landing",-13.691425323486328],["▁gleder",-13.691431045532228],["▁proiectul",-13.691436767578123],["ावली",-13.69144344329834],["▁dalin",-13.691452026367188],["3.0",-13.691454887390137],["▁жеткізу",-13.691458702087402],["的经验",-13.69149112701416],["▁Studies",-13.691510200500488],["มาถึง",-13.691516876220703],["▁이용자",-13.691532135009766],["lüyü",-13.691536903381348],["Европ",-13.691536903381348],["▁اسکی",-13.691542625427246],["▁Investment",-13.691556930541992],["czesne",-13.691563606262209],["toihin",-13.691587448120115],["▁رکھے",-13.691598892211914],["▁පනත",-13.69159984588623],["▁gjuhë",-13.691624641418455],["▁waayo",-13.691651344299316],["▁экономически",-13.691652297973633],["▁សមត្ថកិច្ច",-13.691652297973633],["▁missing",-13.691655158996582],["▁исключен",-13.691673278808594],["osztály",-13.691686630249023],["▁კლასი",-13.691690444946287],["│",-13.691702842712402],["శ్రీ",-13.691716194152832],["▁sibh",-13.691730499267578],["▁koncertu",-13.691747665405272],["▁Liviu",-13.691771507263184],["也被",-13.6917724609375],["仲間",-13.691778182983398],["▁gonna",-13.691789627075195],["▁særligt",-13.691794395446776],["esercizio",-13.691807746887209],["tender",-13.691816329956056],["▁vẽ",-13.691824913024902],["▁בשבוע",-13.691854476928713],["nesc",-13.691861152648926],["▁وجل",-13.69190502166748],["projekti",-13.691927909851074],["жаргал",-13.691965103149414],["แห่งนี้",-13.691967964172363],["أتي",-13.69198226928711],["kick",-13.691987037658691],["▁verranno",-13.691990852355955],["▁szolgálat",-13.69199275970459],["របស់ខ្លួន",-13.692008018493652],["જ્ઞ",-13.692012786865234],["ቀና",-13.69201374053955],["USD",-13.692017555236816],["ෙට",-13.692051887512209],["▁competencias",-13.692058563232422],["▁இரா",-13.6920747756958],["ίζω",-13.692076683044434],["▁pateikti",-13.692079544067385],["▁அங்கு",-13.692084312438965],["▁કોણ",-13.69209098815918],["▁bhon",-13.69210147857666],["рык",-13.692111015319824],["せい",-13.692112922668455],["▁meski",-13.692138671875],["▁Haushalt",-13.692160606384276],["inkite",-13.692179679870604],["▁destpê",-13.692184448242188],["تعاون",-13.69219970703125],["لوج",-13.692204475402832],["▁maddesi",-13.692219734191896],["▁kamion",-13.692220687866213],["▁ingrediente",-13.692225456237791],["▁osaamis",-13.692252159118652],["▁voldoen",-13.692252159118652],["▁spełnia",-13.692283630371094],["▁கிள",-13.692296981811523],["▁মূল",-13.692299842834473],["czność",-13.69233512878418],["daily",-13.69234848022461],["▁Страх",-13.69234848022461],["review",-13.692364692687988],["แน่น",-13.69236946105957],["▁Håll",-13.692380905151367],["පුද්ගල",-13.692381858825684],["▁hoia",-13.692383766174316],["бна",-13.692402839660645],["大切に",-13.692402839660645],["▁béo",-13.692405700683594],["ుకుంటే",-13.692408561706545],["▁שלישי",-13.692408561706545],["HOL",-13.69241428375244],["▁entsprechenden",-13.69241428375244],["ြဲ",-13.692421913146973],["▁filtro",-13.69245719909668],["▁მც",-13.692469596862791],["▁القادم",-13.692482948303224],["▁කන",-13.692512512207031],["▁fikirlər",-13.69252109527588],["▁AFF",-13.692522048950195],["▁hasilnya",-13.692530632019045],["▁yollar",-13.692530632019045],["▁befolkningen",-13.692533493041992],["АРТ",-13.69253921508789],["154",-13.692543029785156],["▁vacker",-13.692550659179688],["feest",-13.692560195922852],["▁יחסי",-13.692588806152344],["▁Fahrt",-13.69262409210205],["▁adieraz",-13.692641258239746],["有個",-13.692649841308594],["硕士",-13.692662239074709],["лівы",-13.692671775817873],["叛",-13.692676544189451],["纠",-13.69268035888672],["▁permes",-13.692682266235352],["▁گردید",-13.692691802978516],["▁noći",-13.692697525024414],["▁spek",-13.692699432373049],["粉丝",-13.692704200744627],["杆",-13.69271755218506],["netz",-13.692718505859377],["▁রোহিঙ্গা",-13.69272232055664],["捲",-13.692724227905272],["იყ",-13.692734718322754],["ຕ້ານ",-13.69273853302002],["អប់រំ",-13.69273853302002],["▁Мыйзам",-13.69273853302002],["ቁጥጥር",-13.692739486694336],["▁Benutzer",-13.692739486694336],["▁Cependant",-13.692739486694336],["▁Cầu",-13.692739486694336],["▁Repubblica",-13.692739486694336],["▁pròpies",-13.692739486694336],["▁război",-13.692739486694336],["▁έδωσε",-13.692739486694336],["▁повърхност",-13.692739486694336],["▁Հանրապետություն",-13.692739486694336],["▁արտասանություն",-13.692739486694336],["▁բիզնես",-13.692739486694336],["▁फाइनल",-13.692739486694336],["▁विकिपीडिया",-13.692739486694336],["▁ఎంపిక",-13.692739486694336],["▁ኩባንያ",-13.692739486694336],["쾌",-13.692739486694336],["▁Silahlı",-13.692740440368652],["▁clásico",-13.692740440368652],["▁hẹn",-13.692740440368652],["▁ویدئو",-13.692740440368652],["▁ব্যবসায়ী",-13.692740440368652],["▁젊은",-13.692740440368652],["ຫຼວງ",-13.692741394042969],["▁zəruri",-13.692741394042969],["▁fuoco",-13.692742347717283],["▁ਸੈਕਸ",-13.692742347717283],["▁тарбия",-13.692743301391602],["▁मृत्यू",-13.692743301391602],["جدیدترین",-13.692744255065918],["konsult",-13.692745208740234],["▁specjalne",-13.692745208740234],["▁velocità",-13.69274616241455],["▁मार्केट",-13.69274616241455],["▁Andrzej",-13.692747116088867],["▁సంతోష",-13.692747116088867],["▁ukvarja",-13.692750930786133],["▁consenso",-13.69275188446045],["▁Fjöl",-13.692752838134766],["▁wiadomości",-13.692753791809082],["▁अनुदान",-13.692754745483398],["▁содержит",-13.692760467529297],["▁minuuttia",-13.692761421203612],["▁Spezial",-13.69276237487793],["▁դաշտ",-13.69276237487793],["льцы",-13.692763328552246],["▁אַזוי",-13.692767143249512],["▁kavuş",-13.692769050598145],["XU",-13.692774772644045],["▁nebūs",-13.692774772644045],["▁znajdziesz",-13.692776679992676],["▁dhashay",-13.692778587341309],["▁zdaniem",-13.69278049468994],["▁ਸਾਈਟ",-13.69278335571289],["ιάζει",-13.692785263061523],["▁driving",-13.692789077758787],["ARIO",-13.692798614501951],["▁soomaali",-13.69280242919922],["▁influenza",-13.6928071975708],["▁იცის",-13.6928071975708],["颜色",-13.692808151245115],["kõ",-13.6928129196167],["▁corde",-13.692814826965332],["▁DAIŞê",-13.692818641662598],["ካር",-13.692821502685549],["จะไป",-13.69283390045166],["▁soubor",-13.692837715148926],["▁орсон",-13.692838668823242],["▁zajmuje",-13.692840576171877],["▁економија",-13.69285488128662],["▁jardí",-13.692867279052734],["▁Obecnie",-13.69287109375],["▁Medizin",-13.692873001098633],["▁Rû",-13.69288158416748],["▁Buss",-13.692888259887695],["▁шинээр",-13.692900657653809],["▁بخصوص",-13.69291877746582],["▁kış",-13.69293212890625],["▁käigus",-13.6929349899292],["▁Cate",-13.69294548034668],["전주",-13.69294548034668],["▁spremembe",-13.69295883178711],["▁Γιώργος",-13.692968368530272],["▁prefeito",-13.692975997924805],["ကုိယ္",-13.69299030303955],["▁посетители",-13.692992210388184],["▁elokuuta",-13.692999839782717],["▁Точно",-13.692999839782717],["▁paksu",-13.693017959594728],["▁Vakar",-13.69302749633789],["▁компаниясы",-13.693047523498535],["▁Norveg",-13.693053245544434],["ājā",-13.693066596984863],["ớt",-13.693069458007812],["astronomi",-13.69308090209961],["izadas",-13.693085670471191],["્રિ",-13.693092346191406],["▁funzioni",-13.693108558654783],["▁četvrtak",-13.693111419677734],["▁Hương",-13.693113327026367],["▁улуу",-13.69312858581543],["▁Yazar",-13.693133354187012],["propi",-13.693155288696287],["ટિંગ",-13.693155288696287],["სმ",-13.693155288696287],["▁ресурсів",-13.693178176879885],["ышат",-13.693182945251465],["▁kraje",-13.693191528320312],["▁قوا",-13.693197250366213],["▁кредита",-13.693204879760742],["▁Рат",-13.69320583343506],["▁политик",-13.693222999572754],["luvu",-13.693249702453612],["▁प्रती",-13.693283081054688],["▁valge",-13.693291664123535],["▁үйрен",-13.69329833984375],["▁ገና",-13.69331169128418],["४८",-13.693315505981444],["कॉ",-13.693341255187988],["▁पहुंचा",-13.693360328674316],["soon",-13.693361282348633],["iseret",-13.693366050720217],["▁quinto",-13.693367004394531],["▁परा",-13.693376541137695],["മില്ലാത്ത",-13.693379402160645],["▁zima",-13.693395614624023],["▁قازاق",-13.693401336669922],["▁ndërkombëtar",-13.693402290344238],["▁بينهم",-13.693437576293944],["不清楚",-13.693446159362791],["▁prenosi",-13.693453788757324],["ومة",-13.693455696105955],["不仅仅是",-13.69345760345459],["bordet",-13.69346523284912],["hanam",-13.693466186523438],["▁vormi",-13.693472862243652],["山區",-13.69347858428955],["周り",-13.693483352661133],["▁његово",-13.693489074707031],["মিন",-13.693493843078612],["▁sharci",-13.693495750427246],["▁गराउने",-13.693523406982422],["▁ভাব",-13.693535804748535],["fonds",-13.693538665771484],["▁વાહ",-13.6935453414917],["▁stejné",-13.693558692932127],["▁ijro",-13.693589210510254],["▁bëj",-13.693593978881836],["ரிக்க",-13.693599700927734],["官網",-13.693638801574709],["222",-13.69364070892334],["баў",-13.69364070892334],["▁reaction",-13.693650245666504],["▁ilmasto",-13.693652153015137],["▁პარლამენტ",-13.69365692138672],["/2001",-13.69367218017578],["▁Werbe",-13.69367218017578],["▁پنھنجي",-13.693682670593262],["▁смер",-13.69368839263916],["vinder",-13.693707466125488],["▁elenco",-13.693710327148438],["▁Çar",-13.69372844696045],["▁naturalmente",-13.693758010864258],["▁þeirri",-13.693772315979004],["▁കണ്ട്",-13.693774223327637],["ndrysh",-13.693818092346191],["▁Muzej",-13.69382667541504],["▁Quindi",-13.693855285644531],["್ರಾ",-13.69390106201172],["▁Seven",-13.693914413452148],["▁bunday",-13.693923950195312],["▁මූ",-13.693923950195312],["▁Жеке",-13.693940162658691],["դառնալ",-13.693961143493652],["ໂຄງການ",-13.693981170654297],["vidu",-13.693991661071776],["▁mbështetje",-13.69400405883789],["▁Buddha",-13.69400691986084],["▁İz",-13.69400691986084],["▁delovanja",-13.694008827209473],["гаад",-13.694016456604004],["▁miskin",-13.694021224975586],["далее",-13.6940336227417],["Eco",-13.694035530090332],["odon",-13.694050788879396],["stveno",-13.694055557250977],["▁populo",-13.694073677062988],["河北",-13.694092750549316],["▁पूर्वी",-13.694101333618164],["▁තිබුණ",-13.694107055664062],["▁ផល",-13.694107055664062],["▁urip",-13.694111824035645],["▁Anar",-13.694138526916504],["▁cinsi",-13.694148063659668],["masuk",-13.694159507751465],["▁krug",-13.69416046142578],["-2005",-13.694173812866213],["സ്റ്റാ",-13.694185256958008],["制品",-13.694188117980955],["▁seksuaal",-13.694189071655272],["λάσ",-13.694191932678224],["发电",-13.69419288635254],["▁reál",-13.69420051574707],["▁BANK",-13.694215774536133],["▁valinta",-13.694228172302246],["▁neurri",-13.69423770904541],["▁tekemä",-13.694307327270508],["▁تبلیغ",-13.694327354431152],["报道称",-13.694342613220217],["▁Produkten",-13.694347381591797],["▁agradece",-13.694347381591797],["▁videli",-13.694351196289062],["ថត",-13.694360733032228],["▁یزد",-13.694360733032228],["løy",-13.694361686706545],["▁hiervan",-13.694361686706545],["rã",-13.694363594055176],["ുണ്ടായിരുന്ന",-13.694382667541504],["დგენი",-13.694401741027832],["РАТ",-13.694429397583008],["▁امضا",-13.69443130493164],["▁paina",-13.694445610046388],["ন্দ্র",-13.694478034973145],["▁ponúk",-13.69448471069336],["▁שאל",-13.694488525390623],["dahl",-13.69449234008789],["บุก",-13.69450569152832],["▁Área",-13.694506645202637],["tawan",-13.694512367248535],["▁pregleda",-13.694520950317385],["▁പദ",-13.694522857666016],["▁addım",-13.694524765014648],["▁الكامل",-13.694533348083496],["▁získat",-13.694539070129396],["▁росту",-13.69454288482666],["рент",-13.694574356079102],["ชีว",-13.694608688354492],["臣",-13.694620132446287],["▁encontrará",-13.694631576538086],["▁სახელმწიფოს",-13.694639205932615],["үд",-13.694660186767578],["▁класса",-13.694693565368652],["▁단체",-13.6947021484375],["keria",-13.694708824157717],["▁fræk",-13.694726943969728],["ႏု",-13.694740295410156],["▁లుక్",-13.694746017456056],["▁بعمل",-13.694764137268066],["誰か",-13.694771766662598],["▁Konsum",-13.694787979125977],["するという",-13.69480323791504],["ведено",-13.694808959960938],["▁معنى",-13.694823265075684],["▁Fact",-13.694844245910645],["flower",-13.694849014282228],["收費",-13.69486141204834],["loží",-13.694870948791504],["导弹",-13.694901466369627],["▁манастира",-13.69491195678711],["俱",-13.694928169250488],["▁Asli",-13.694934844970703],["▁gados",-13.694955825805664],["▁ekipleri",-13.69495677947998],["▁הראש",-13.694957733154297],["姜",-13.69498062133789],["争议",-13.694988250732422],["▁ночь",-13.694989204406738],["ເທົ່າ",-13.694992065429688],["تېخنىك",-13.69499683380127],["သမၼတ",-13.69499683380127],["▁atbalstīt",-13.69499683380127],["▁kolovoza",-13.69499683380127],["▁riguardo",-13.69499683380127],["▁Хурлын",-13.69499683380127],["▁российских",-13.69499683380127],["▁ситуація",-13.69499683380127],["▁соопшти",-13.69499683380127],["▁تغذیه",-13.69499683380127],["▁गोपनीयतेच",-13.69499683380127],["▁ელექტრო",-13.69499683380127],["▁მთავრობის",-13.69499683380127],["플러스",-13.69499683380127],["▁Debrecen",-13.694997787475586],["▁арбитраж",-13.694997787475586],["▁мінімум",-13.694997787475586],["redaguoti",-13.694998741149902],["▁εσύ",-13.694998741149902],["▁ಮತ್ತೊಂದು",-13.694998741149902],["▁వెనుక",-13.69499969482422],["▁оролцож",-13.695003509521484],["▁વિભાગ",-13.695003509521484],["▁megjelenés",-13.6950044631958],["▁teilweise",-13.695005416870115],["▁шилдэг",-13.695005416870115],["▁працаваць",-13.695012092590332],["معايير",-13.695015907287598],["▁rendimiento",-13.695015907287598],["を引き",-13.695019721984863],["▁правите",-13.695022583007812],["▁freelance",-13.695035934448242],["mmmm",-13.695038795471191],["▁chừng",-13.695046424865724],["▁respeto",-13.695059776306152],["▁speciell",-13.695068359375],["▁mengajak",-13.695085525512695],["▁slid",-13.69508934020996],["▁získá",-13.69509506225586],["▁시청",-13.69509983062744],["▁במחיר",-13.69510269165039],["▁الأقل",-13.695112228393556],["▁wraca",-13.69512176513672],["手续",-13.69512176513672],["▁അന്വേഷണം",-13.695123672485352],["▁roto",-13.695124626159668],["擴大",-13.695124626159668],["▁Slova",-13.695125579833984],["▁sociálne",-13.6951265335083],["ГАН",-13.695141792297363],["▁показать",-13.695144653320312],["▁Shadow",-13.695161819458008],["▁సమయం",-13.695168495178224],["زراعة",-13.695178031921388],["▁днем",-13.695186614990234],["▁(42)",-13.695191383361816],["▁მუშაობა",-13.695198059082031],["▁struktura",-13.695208549499512],["▁తీసుకున్న",-13.695239067077637],["▁խոսել",-13.69524097442627],["▁rugby",-13.695247650146484],["ដែលបាន",-13.69525909423828],["▁memohon",-13.695266723632812],["申报",-13.695270538330078],["▁משום",-13.695310592651367],["ynnu",-13.695319175720217],["▁Porém",-13.69532871246338],["▁kerä",-13.69532871246338],["tkazib",-13.695372581481934],["▁yapılma",-13.69537353515625],["կիր",-13.6953763961792],["▁vicina",-13.6953763961792],["▁Naisten",-13.695380210876465],["店家",-13.695390701293944],["▁баланы",-13.695396423339844],["▁processen",-13.695416450500488],["▁Söder",-13.695428848266602],["जीवन",-13.695432662963867],["▁ඔයාල",-13.695432662963867],["▁cương",-13.695442199707031],["続き",-13.695451736450195],["שמאל",-13.695452690124512],["▁plenum",-13.69545841217041],["ወል",-13.695465087890623],["▁Lahore",-13.695484161376951],["▁هاء",-13.695488929748535],["▁udziela",-13.695499420166016],["▁chleb",-13.695508003234863],["ειας",-13.695511817932127],["かつ",-13.69551944732666],["လံ",-13.695526123046877],["381",-13.695533752441406],["▁comentário",-13.695539474487305],["▁zmago",-13.695549964904783],["льную",-13.695568084716797],["ásokat",-13.69557285308838],["▁zwraca",-13.69557285308838],["ρίες",-13.69557762145996],["▁забележи",-13.695595741271973],["▁знаний",-13.695600509643556],["▁verwag",-13.695603370666504],["▁supporter",-13.695627212524414],["▁маці",-13.695630073547363],["▁আগ",-13.69563102722168],["▁ڪرائي",-13.695635795593262],["▁دیوان",-13.695651054382324],["▁הסדר",-13.695673942565918],["دۇرۇش",-13.695695877075195],["unico",-13.69569969177246],["▁разлога",-13.695711135864258],["բե",-13.695714950561523],["▁akak",-13.695714950561523],["▁aventure",-13.695725440979004],["▁түп",-13.695727348327637],["▁überall",-13.695752143859863],["▁հայտարարություն",-13.695754051208496],["ေရွ",-13.695761680603027],["▁سیمو",-13.69576644897461],["▁Monika",-13.695770263671877],["▁confortable",-13.695778846740724],["▁фирмата",-13.69577980041504],["શુ",-13.695796966552734],["客户的",-13.695799827575684],["▁genoemd",-13.695828437805176],["allë",-13.695833206176758],["preso",-13.6958589553833],["身為",-13.69586181640625],["▁команду",-13.695878028869627],["▁ਅੰਤ",-13.695895195007324],["▁পড়া",-13.695916175842283],["ിരുന്നില്ല",-13.69591999053955],["специфич",-13.695920944213867],["▁qytetit",-13.695931434631348],["▁کارشناسان",-13.695932388305664],["salah",-13.69593620300293],["▁Реклама",-13.695950508117676],["▁támad",-13.695960998535156],["▁gelar",-13.695966720581056],["▁гід",-13.69597053527832],["▁prikazan",-13.6959867477417],["ያዙ",-13.69599723815918],["skeho",-13.696000099182127],["ешься",-13.696027755737305],["ങ്ങ്",-13.696039199829102],["▁ចុង",-13.696043014526367],["straf",-13.696057319641112],["▁Déc",-13.696057319641112],["▁때는",-13.696064949035645],["▁pārāk",-13.69607639312744],["▁лепо",-13.696107864379885],["луулах",-13.6961088180542],["▁palīdzēt",-13.69614028930664],["▁oriente",-13.696184158325195],["ទស្ស",-13.696185111999512],["▁Kava",-13.696189880371094],["▁semestr",-13.69619083404541],["hoone",-13.696195602416992],["abilità",-13.696200370788574],["▁ఆనంద",-13.69622039794922],["▁درجہ",-13.696221351623535],["ってくる",-13.696226119995115],["takat",-13.696237564086914],["▁kolumn",-13.696245193481444],["मला",-13.696261405944824],["GAT",-13.696264266967772],["▁הסת",-13.696267127990724],["▁سڌ",-13.69626808166504],["ാണെന്ന",-13.696285247802734],["▁म्हणा",-13.69629192352295],["RIP",-13.696303367614746],["ਪੰਜਾਬ",-13.696314811706545],["Espanya",-13.69631576538086],["名單",-13.696316719055176],["▁தெரிவித்த",-13.696334838867188],["▁Skill",-13.696338653564451],["ımızı",-13.696361541748049],["รน",-13.696364402770996],["жэння",-13.696365356445312],["считыва",-13.696374893188477],["ത്ഥ",-13.696374893188477],["եան",-13.69637680053711],["▁kool",-13.696402549743652],["▁bouwen",-13.696406364440918],["ификация",-13.69641399383545],["▁prvním",-13.696417808532717],["ปฏิบัติการ",-13.696429252624512],["▁asuva",-13.69643497467041],["본부",-13.696447372436523],["ເຮັດວຽກ",-13.696453094482422],["OJE",-13.696460723876951],["▁סול",-13.696460723876951],["łowy",-13.696494102478027],["▁తే",-13.696495056152344],["ANU",-13.696502685546877],["постави",-13.69651222229004],["▁luva",-13.696513175964355],["▁svaret",-13.696525573730469],["▁комплекса",-13.696535110473633],["شیر",-13.696573257446287],["▁విషయ",-13.6965913772583],["marathi",-13.696626663208008],["▁أحب",-13.696636199951172],["zce",-13.696660041809082],["ሰቦች",-13.696677207946776],["▁szkolenia",-13.696700096130373],["▁حصو",-13.69670295715332],["▁Qanun",-13.69672679901123],["▁Blank",-13.696734428405762],["▁yolları",-13.696763038635254],["handla",-13.696770668029783],["▁Shirka",-13.696771621704102],["कारण",-13.696773529052734],["はその",-13.69678783416748],["yzm",-13.69679832458496],["–18",-13.69679832458496],["▁ມາດ",-13.696800231933594],["▁Gerek",-13.696829795837402],["▁فرزندان",-13.696861267089844],["שלם",-13.696864128112791],["和发展",-13.696871757507324],["▁mababa",-13.69687557220459],["总局",-13.69687843322754],["▁бреме",-13.696884155273438],["▁redus",-13.696893692016602],["mette",-13.696907043457031],["▁aggiunge",-13.696910858154297],["▁bilinen",-13.696925163269045],["可用",-13.696931838989258],["▁животу",-13.69695281982422],["▁Tinder",-13.696953773498535],["vimus",-13.696954727172852],["جمل",-13.696965217590332],["Of",-13.69696807861328],["ന്നാ",-13.696976661682127],["々と",-13.696976661682127],["拥",-13.696980476379396],["▁الرابع",-13.696990966796877],["hế",-13.697004318237305],["ՆԵՐԻ",-13.69700813293457],["invent",-13.697011947631836],["ମାନଙ୍କର",-13.697011947631836],["คุก",-13.697036743164062],["▁الموت",-13.697044372558594],["▁satan",-13.697051048278809],["收取",-13.697054862976074],["wają",-13.697070121765137],["▁बुक",-13.69716453552246],["▁informado",-13.697169303894045],["▁трите",-13.697175979614258],["▁navegació",-13.69718074798584],["ย่าน",-13.69719696044922],["تعارف",-13.697209358215332],["どんどん",-13.697245597839355],["mizden",-13.697253227233888],["สงคราม",-13.697254180908203],["精准",-13.69725513458252],["မ်က္ႏွာ",-13.697257995605469],["អាជ្ញាធរ",-13.697258949279783],["▁Algemene",-13.697258949279783],["▁hủy",-13.697258949279783],["▁sijaitsee",-13.697258949279783],["▁sodelovanju",-13.697258949279783],["▁wówczas",-13.697258949279783],["▁φέτος",-13.697258949279783],["▁եկեղեցի",-13.697258949279783],["▁कारबाही",-13.697258949279783],["▁टाईम्स",-13.697258949279783],["▁দ্বিতীয়",-13.697258949279783],["▁નિર્ણય",-13.697258949279783],["▁అత్యంత",-13.697258949279783],["▁త్వరలో",-13.697258949279783],["▁ግጭት",-13.697258949279783],["verblijf",-13.697259902954102],["▁Гэтэл",-13.697259902954102],["▁مەيدان",-13.697259902954102],["▁අදාල",-13.697261810302734],["▁विवेक",-13.69726276397705],["ໃບ",-13.697263717651367],["▁ନିୟମ",-13.697264671325684],["▁izdelki",-13.697273254394531],["▁жарнама",-13.697275161743164],["▁Gate",-13.697279930114746],["▁izraža",-13.697288513183594],["▁öppet",-13.697293281555176],["▁దీనికి",-13.69730281829834],["▁жатады",-13.697304725646973],["▁Gjennom",-13.697308540344238],["▁kötelezettség",-13.69731616973877],["▁представителей",-13.69731616973877],["solv",-13.697322845458984],["▁складається",-13.697344779968262],["▁parodo",-13.697349548339844],["rechnung",-13.697352409362791],["没有任何",-13.69735336303711],["▁marknaden",-13.69735622406006],["▁آیین",-13.69735622406006],["▁Highlight",-13.697358131408691],["ΙΚ",-13.697378158569336],["▁Брат",-13.697378158569336],["▁licence",-13.697383880615234],["▁trương",-13.697394371032717],["▁decisione",-13.697404861450195],["▁ponude",-13.697406768798828],["▁héten",-13.697410583496094],["▁Dominum",-13.69744110107422],["ляю",-13.697443962097168],["ేసి",-13.6974458694458],["▁substitui",-13.69745922088623],["▁၃။",-13.697460174560549],["▁הישראלית",-13.69746208190918],["▁никаква",-13.69747829437256],["▁തൂ",-13.697489738464355],["▁افغانانو",-13.697494506835938],["▁रहेछ",-13.697515487670898],["が増え",-13.697516441345217],["▁izbora",-13.69752025604248],["▁saama",-13.697529792785645],["▁Львова",-13.697539329528809],["oinnin",-13.697543144226074],["বিশ্ববিদ্যালয়ের",-13.697543144226074],["▁tāds",-13.697547912597656],["вързан",-13.697549819946287],["អ្វី",-13.697549819946287],["კრე",-13.697554588317873],["▁gėr",-13.69756031036377],["▁الطرف",-13.697593688964844],["▁හිතා",-13.697627067565918],["dinga",-13.697635650634766],["փոստ",-13.697639465332031],["▁šola",-13.697672843933104],["???????",-13.69768238067627],["▁sukob",-13.697690963745115],["acteur",-13.6976957321167],["▁border",-13.697698593139648],["▁Filipp",-13.697704315185549],["▁jakoś",-13.69773006439209],["▁плани",-13.697734832763672],["▁sanitaria",-13.697766304016112],["344",-13.697769165039062],["▁Mikkel",-13.69780445098877],["▁مشت",-13.697806358337402],["▁переко",-13.697818756103516],["各界",-13.69782543182373],["獨特",-13.69783878326416],["▁rrugën",-13.697857856750488],["▁bror",-13.697875022888184],["▁organizzato",-13.69787883758545],["▁žemė",-13.697880744934082],["▁وگو",-13.697898864746094],["▁sebebi",-13.697901725769045],["▁प्रण",-13.697903633117676],["▁۲۶",-13.69792366027832],["▁рейс",-13.697942733764648],["▁228",-13.697945594787598],["と言われています",-13.697956085205078],["▁meisie",-13.69797706604004],["Ly",-13.697996139526367],["ιμα",-13.69800090789795],["的利益",-13.698010444641112],["▁ריי",-13.698018074035645],["1-3",-13.698019981384276],["▁производството",-13.69802951812744],["▁بشار",-13.698030471801758],["切れ",-13.698033332824709],["▁Matri",-13.698043823242188],["▁Kudu",-13.69805145263672],["▁светског",-13.698055267333984],["чевић",-13.698075294494627],["μενο",-13.698100090026855],["▁dodatek",-13.698131561279297],["թեւ",-13.69813346862793],["iirka",-13.698142051696776],["▁نوش",-13.69814395904541],["ذات",-13.698150634765623],["43)",-13.698159217834473],["适当",-13.698168754577637],["уудад",-13.698175430297852],["▁malbon",-13.698183059692385],["▁τόπο",-13.698201179504396],["▁korisnici",-13.69821548461914],["▁usona",-13.69821548461914],["▁розмов",-13.698217391967772],["мэл",-13.69822120666504],["▁रुपयां",-13.69823169708252],["▁веков",-13.69823932647705],["олон",-13.698241233825684],["▁Qərb",-13.698262214660645],["▁ultimii",-13.698265075683594],["يدىغان",-13.69826889038086],["هتم",-13.698283195495604],["ुपर्छ",-13.698291778564451],["ดีมาก",-13.69831085205078],["▁Бла",-13.698341369628906],["▁proizvode",-13.69834327697754],["上手",-13.698365211486816],["индеги",-13.698371887207031],["опу",-13.698373794555664],["म्बा",-13.698397636413574],["▁Restaura",-13.698397636413574],["▁sick",-13.698410034179688],["▁иду",-13.698420524597168],["▁בגיל",-13.698436737060549],["teket",-13.698458671569824],["▁Kelas",-13.69847297668457],["▁المف",-13.69847297668457],["▁panaši",-13.69847583770752],["▁лекови",-13.69848346710205],["ഖാ",-13.69848918914795],["1,2",-13.698494911193848],["ркі",-13.698495864868164],["▁প্রাণ",-13.698495864868164],["പ്പെടുത്തി",-13.69850730895996],["pupu",-13.698518753051758],["ପୋ",-13.698518753051758],["szol",-13.698546409606934],["▁scriu",-13.698558807373049],["▁Respect",-13.698561668395996],["cină",-13.698563575744627],["демократ",-13.698575973510742],["▁Ingat",-13.698579788208008],["▁құжат",-13.698596954345703],["▁Elias",-13.698604583740234],["▁diskret",-13.698609352111816],["völl",-13.69863224029541],["▁adamları",-13.698647499084473],["▁Įmon",-13.698653221130373],["єва",-13.698667526245115],["▁Министер",-13.698680877685549],["▁تخصص",-13.698712348937988],["▁630",-13.69871425628662],["దూ",-13.698716163635254],["▁Красно",-13.698719024658203],["enstein",-13.698737144470217],["vulling",-13.698745727539062],["▁sozialen",-13.698752403259276],["▁dobrá",-13.698755264282228],["ட்ஸ்",-13.698768615722656],["attaque",-13.698772430419922],["▁Dylan",-13.698802947998049],["▁peggio",-13.698803901672363],["军队",-13.698805809020996],["බැ",-13.698837280273438],["sdorf",-13.698848724365234],["▁jednotliv",-13.698850631713867],["时间内",-13.698896408081056],["çek",-13.698904037475586],["▁жаштар",-13.698904991149902],["▁parçası",-13.698907852172852],["ლერი",-13.69892120361328],["▁procurar",-13.698945045471191],["每一個",-13.698957443237305],["▁klubov",-13.698963165283203],["外國",-13.698973655700684],["ÁM",-13.698976516723633],["▁امین",-13.699005126953123],["niec",-13.69900894165039],["מאן",-13.69901180267334],["била",-13.699045181274414],["▁unutma",-13.699050903320312],["▁expressa",-13.699058532714844],["ఫా",-13.699064254760742],["▁kvet",-13.699074745178224],["▁autoridad",-13.69907569885254],["လိမ္",-13.699078559875488],["간의",-13.699097633361816],["سٽر",-13.699101448059082],["kusan",-13.699122428894045],["▁ахуй",-13.69912815093994],["vajú",-13.699164390563965],["▁Katanya",-13.699176788330078],["▁dopis",-13.69920539855957],["るので",-13.699228286743164],["▁bogate",-13.699236869812012],["▁οικονομία",-13.699244499206545],["▁vaxtı",-13.699246406555176],["▁3,2",-13.699256896972656],["▁الاجتماع",-13.69926643371582],["મંત્ર",-13.699281692504885],["▁íslenska",-13.69930934906006],["альної",-13.69931411743164],["▁Kvinder",-13.69931983947754],["utė",-13.69932460784912],["น่ะ",-13.699329376220703],["▁공공",-13.699333190917969],["إصابة",-13.699342727661133],["ង្គ",-13.699356079101562],["tropi",-13.699357986450195],["ески",-13.69939422607422],["ด้า",-13.699440956115724],["艳",-13.699442863464355],["▁Tsa",-13.69944667816162],["▁आफै",-13.699451446533203],["烯",-13.699451446533203],["▁идеолог",-13.699453353881836],["Ќ",-13.699458122253418],["疼痛",-13.699461936950684],["▁acci",-13.69946575164795],["墨西哥",-13.699482917785645],["負擔",-13.69948387145996],["▁infekcij",-13.69948959350586],["▁လမ္း",-13.69949722290039],["მდებარე",-13.699503898620604],["总经理",-13.69951629638672],["ゆっくり",-13.699518203735352],["ေက်းဇူး",-13.699525833129885],["▁художеств",-13.699525833129885],["Ń",-13.6995267868042],["ନାହାନ୍ତି",-13.6995267868042],["ః",-13.6995267868042],["▁Näiteks",-13.6995267868042],["▁castelán",-13.6995267868042],["▁conséquence",-13.6995267868042],["▁darbuotojų",-13.6995267868042],["▁decyzji",-13.6995267868042],["▁otázka",-13.6995267868042],["▁përbashkët",-13.6995267868042],["▁εύκολα",-13.6995267868042],["▁арганізацыя",-13.6995267868042],["▁өөрийгөө",-13.6995267868042],["▁دیجیتال",-13.6995267868042],["▁ଉଭୟ",-13.6995267868042],["▁იყიდება",-13.6995267868042],["▁anschließend",-13.699527740478516],["▁১৪২৫",-13.699527740478516],["▁Битола",-13.699528694152832],["▁matangazo",-13.699529647827148],["▁ricordo",-13.699529647827148],["▁গরম",-13.699529647827148],["▁относятся",-13.69953155517578],["fhéadfadh",-13.69953441619873],["ツール",-13.69953441619873],["▁sənaye",-13.699536323547363],["▁људе",-13.699536323547363],["▁ጥበቃ",-13.699536323547363],["▁Lembaga",-13.69953727722168],["▁נאָר",-13.699542045593262],["▁citoyen",-13.699544906616213],["▁બધી",-13.699545860290527],["▁خلیج",-13.69954776763916],["▁کبله",-13.69954776763916],["▁српска",-13.699548721313477],["▁Imagine",-13.69955062866211],["▁arrivé",-13.699559211730955],["▁연락",-13.699560165405272],["▁байгуулсан",-13.699562072753906],["▁imposibil",-13.699563026428224],["▁acusado",-13.699565887451172],["játok",-13.69957160949707],["▁используется",-13.699573516845703],["▁השר",-13.699583053588867],["αση",-13.699591636657717],["▁(2001)",-13.699591636657717],["▁excuse",-13.699594497680664],["රයක්",-13.699597358703612],["љене",-13.699599266052246],["▁Chudai",-13.699600219726562],["▁erstellt",-13.69960594177246],["▁παρουσίαση",-13.699618339538574],["က႑",-13.69962215423584],["▁maxaa",-13.69962215423584],["▁Ерөнхийлөгчийн",-13.699636459350586],["spreek",-13.69963836669922],["▁angenehm",-13.699642181396484],["мената",-13.699646949768066],["ຫລັງ",-13.6996488571167],["▁محاولة",-13.699649810791016],["▁сваком",-13.699660301208496],["tsid",-13.699665069580078],["▁precizat",-13.699667930603027],["▁ستون",-13.699679374694824],["വില്ല",-13.69968605041504],["ріс",-13.69969654083252],["▁ofertę",-13.699698448181152],["▁ჰი",-13.699714660644531],["▁njihovim",-13.699724197387695],["▁sesama",-13.69973850250244],["攜帶",-13.699739456176758],["▁επε",-13.699745178222656],["▁주차",-13.699751853942873],["▁ئايرى",-13.699755668640137],["्ना",-13.699784278869627],["▁Hilton",-13.699795722961426],["最後に",-13.699803352355955],["▁consensu",-13.69980812072754],["체계",-13.699823379516602],["klī",-13.69982624053955],["▁sonucunda",-13.699827194213867],["▁dureri",-13.699837684631348],["▁Nö",-13.699875831604004],["▁concurrent",-13.699883460998535],["▁چلے",-13.699892044067385],["▁kişisel",-13.699904441833496],["ллю",-13.699921607971191],["▁чыга",-13.69992446899414],["127",-13.699931144714355],["▁Hour",-13.699932098388672],["▁допълнителни",-13.699945449829102],["▁практике",-13.699954986572266],["bahay",-13.699965476989746],["efekt",-13.699970245361328],["▁resultar",-13.699979782104492],["▁الكلام",-13.700000762939451],["▁namesti",-13.7000093460083],["▁collaboration",-13.70001983642578],["▁Mục",-13.700024604797363],["▁տեսակ",-13.700032234191896],["route",-13.700033187866213],["▁Kazan",-13.70005226135254],["ផ",-13.70005702972412],["▁инструменти",-13.700088500976562],["▁síl",-13.70008945465088],["plac",-13.700095176696776],["kalbi",-13.700105667114258],["▁వర్ష",-13.700105667114258],["▁Kapitel",-13.70011043548584],["▁megfelel",-13.700138092041016],["▁Chau",-13.700145721435549],["▁investigadores",-13.700154304504396],["▁altos",-13.700162887573242],["חזיר",-13.70016384124756],["▁നിന്നുള്ള",-13.70016860961914],["▁näiden",-13.700172424316406],["аналар",-13.700186729431152],["ЈИ",-13.700194358825684],["արա",-13.700213432312012],["▁явц",-13.700218200683594],["badet",-13.70022201538086],["പത്",-13.700239181518556],["▁kombinere",-13.70024871826172],["ميم",-13.700249671936035],["▁ներդրում",-13.700262069702148],["▁maksas",-13.700275421142578],["▁ተቃውሞ",-13.700284004211426],["лса",-13.70029640197754],["λευκ",-13.700298309326172],["▁ചൂട",-13.70030403137207],["▁selected",-13.700304985046388],["ရုပ္",-13.700329780578612],["વાઈ",-13.700337409973145],["▁inzwischen",-13.700339317321776],["▁كردن",-13.700352668762209],["▁εξω",-13.700371742248535],["▁détails",-13.7003812789917],["юючи",-13.700383186340332],["▁없을",-13.700394630432127],["▁пришли",-13.70041561126709],["УВА",-13.700419425964355],["▁nepří",-13.700471878051758],["ධාන",-13.700483322143556],["▁உடை",-13.70048713684082],["דרוש",-13.700493812561035],["kët",-13.700542449951172],["▁Tecno",-13.700562477111816],["▁pismo",-13.700562477111816],["讲述",-13.700616836547852],["▁gril",-13.70063591003418],["алне",-13.70065975189209],["ዳት",-13.700677871704102],["▁bhaile",-13.700688362121582],["▁ඉඳ",-13.700695991516112],["▁hj",-13.700698852539062],["цкіх",-13.700701713562012],["▁مچ",-13.700719833374023],["▁görünüm",-13.700723648071287],["▁konk",-13.700730323791504],["▁දුව",-13.70073127746582],["όρι",-13.700740814208984],["▁vpis",-13.7007417678833],["chado",-13.700743675231934],["כד",-13.700756072998049],["▁workers",-13.700759887695312],["ిస్తుంది",-13.700772285461426],["不像",-13.700775146484377],["▁fyldt",-13.700793266296388],["▁apropiere",-13.700801849365234],["▁убива",-13.700812339782717],["▁kristne",-13.70081901550293],["▁креп",-13.700823783874512],["хөө",-13.700839042663574],["那么多",-13.70083999633789],["▁paliek",-13.700850486755373],["రాజ్య",-13.70088005065918],["▁حمام",-13.700883865356444],["▁matchen",-13.700894355773926],["szenie",-13.700895309448242],["▁رها",-13.700898170471191],["ताम्",-13.70090103149414],["ఓ",-13.700902938842772],["▁egyetem",-13.700912475585938],["нської",-13.700928688049316],["لىگەن",-13.700929641723633],["▁авторски",-13.700934410095217],["puxo",-13.700945854187012],["▁Róm",-13.700945854187012],["ليه",-13.700955390930176],["corpora",-13.70095920562744],["Δεν",-13.700969696044922],["تيار",-13.700995445251465],["minta",-13.701010704040527],["รวดเร็ว",-13.70102310180664],["▁udarbejde",-13.701025009155272],["▁gedən",-13.70106315612793],["▁idin",-13.70108413696289],["tinti",-13.701087951660156],["vusi",-13.701098442077637],["बास",-13.701114654541016],["▁dér",-13.701125144958496],["చేయ",-13.701129913330078],["బట్ట",-13.701146125793455],["▁nepriklausom",-13.70114803314209],["▁مجرم",-13.701149940490724],["ಪೂ",-13.70115089416504],["WAK",-13.701151847839355],["сындағы",-13.70116901397705],["ذن",-13.701169967651367],["มีความสุข",-13.70118522644043],["عرق",-13.701194763183594],["▁zaista",-13.701201438903809],["▁presentere",-13.701202392578123],["tiisa",-13.701204299926758],["▁uređen",-13.70120620727539],["▁məhsulları",-13.701229095458984],["▁масти",-13.701242446899414],["▁socios",-13.701249122619627],["▁तामाङ",-13.701251983642578],["▁القط",-13.701268196105955],["cella",-13.701272010803224],["▁şekil",-13.701281547546388],["фест",-13.701290130615234],["▁nekih",-13.70129680633545],["▁pomohl",-13.70130443572998],["ღე",-13.701316833496094],["▁Црна",-13.70134449005127],["ճառ",-13.701345443725586],["lagun",-13.701360702514648],["校園",-13.701398849487305],["viya",-13.701400756835938],["ूट",-13.70140552520752],["▁کولی",-13.70141315460205],["ყუ",-13.701446533203123],["▁znane",-13.701451301574709],["nčios",-13.701456069946287],["СН",-13.701457977294922],["studium",-13.701481819152832],["▁Πρ",-13.701496124267578],["▁Pry",-13.701497077941896],["▁programul",-13.701499938964844],["▁suveren",-13.70150089263916],["субсиди",-13.701504707336426],["▁открити",-13.701507568359377],["зав",-13.70151710510254],["▁코스",-13.701532363891602],["рық",-13.701556205749512],["gró",-13.701567649841309],["▁думка",-13.701568603515623],["▁merket",-13.70157241821289],["калі",-13.701587677001951],["▁vlasnik",-13.701597213745115],["المزيد",-13.701605796813965],["ഥി",-13.701618194580078],["▁Skapa",-13.701623916625977],["▁Clari",-13.701651573181152],["▁слави",-13.701683044433594],["课堂",-13.701688766479492],["октябр",-13.701723098754885],["▁šar",-13.7017240524292],["▁фл",-13.701730728149414],["▁қозға",-13.70175838470459],["树立",-13.701764106750488],["阿姨",-13.701773643493652],["јети",-13.70177936553955],["▁ženska",-13.7017822265625],["▁sacri",-13.701784133911133],["頒",-13.70178508758545],["放鬆",-13.701787948608398],["వన్",-13.701788902282717],["▁hårt",-13.701789855957031],["▁cavall",-13.701796531677246],["▁felelős",-13.701797485351562],["နှုန်း",-13.70179843902588],["ផ្ទៃ",-13.70179843902588],["Ngươi",-13.701799392700195],["▁dilaporkan",-13.701799392700195],["▁mládež",-13.701799392700195],["▁priemonių",-13.701799392700195],["▁sewaktu",-13.701799392700195],["▁tiešām",-13.701799392700195],["▁κατασκευή",-13.701799392700195],["▁дырэктар",-13.701799392700195],["▁לצורך",-13.701799392700195],["▁દક્ષિણ",-13.701799392700195],["30,000",-13.701801300048828],["▁nowoczesny",-13.701801300048828],["▁санкции",-13.701801300048828],["冰箱",-13.701801300048828],["▁közzé",-13.701802253723145],["▁riaditeľ",-13.701802253723145],["▁ujumbe",-13.701802253723145],["▁костюм",-13.70180320739746],["▁פונעם",-13.70180320739746],["▁Dejting",-13.701805114746094],["▁Englisch",-13.701805114746094],["ဖို႕",-13.70180892944336],["▁svoja",-13.701810836791992],["▁کيږي",-13.701811790466309],["▁تەسىر",-13.701812744140623],["▁obiettivi",-13.70181369781494],["▁جھوٹ",-13.70181655883789],["▁પડશે",-13.70181655883789],["ስቶ",-13.701817512512209],["▁leadership",-13.701817512512209],["▁Paradise",-13.701818466186523],["▁explotación",-13.701820373535156],["▁Anjeun",-13.701821327209473],["SZT",-13.701825141906738],["▁خادم",-13.701828956604004],["▁жазылған",-13.70182991027832],["▁miłości",-13.701830863952637],["หลับ",-13.701831817626951],["пите",-13.701836585998535],["▁Mwanza",-13.701836585998535],["▁jungen",-13.701836585998535],["▁srpski",-13.701838493347168],["▁Sehat",-13.701844215393066],["[18]",-13.701848030090332],["ဒ်",-13.701848983764648],["▁ახლოს",-13.701848983764648],["▁odgovornosti",-13.70185375213623],["▁മുസ്",-13.701854705810549],["▁Venus",-13.70185661315918],["▁programação",-13.701861381530762],["▁Tore",-13.701863288879396],["▁предпринима",-13.701864242553713],["▁породице",-13.70186996459961],["▁ფაქტი",-13.701870918273926],["فرة",-13.70188045501709],["자들",-13.701886177062988],["合成",-13.701887130737305],["▁газраас",-13.701891899108888],["▁Вардар",-13.701895713806152],["▁उतार",-13.701908111572266],["▁légère",-13.701921463012695],["▁සෝ",-13.701924324035645],["▁жазылган",-13.701936721801758],["द्या",-13.70193862915039],["▁Karşı",-13.701949119567873],["▁militare",-13.70195198059082],["▁Uanset",-13.701974868774414],["在本",-13.701974868774414],["ሱን",-13.701983451843262],["▁verskyn",-13.701983451843262],["引入",-13.701985359191896],["γερ",-13.701993942260742],["АРЫ",-13.702013969421388],["степен",-13.702021598815918],["▁فرصة",-13.70202350616455],["▁వాళ్ళు",-13.702027320861816],["▁zdravo",-13.702032089233398],["posicion",-13.702054977416992],["▁бастады",-13.702070236206056],["HUR",-13.702077865600586],["▁nariai",-13.70209789276123],["▁Munt",-13.702105522155762],["▁1:0",-13.702110290527344],["▁חברים",-13.702140808105469],["ooo",-13.702160835266112],["стыру",-13.70216178894043],["▁salarial",-13.70216464996338],["வரின்",-13.702166557312012],["ਕੁ",-13.702191352844238],["▁minsan",-13.702200889587402],["▁laureat",-13.702210426330566],["▁цифров",-13.702219009399414],["▁Feira",-13.70222282409668],["ироваться",-13.702225685119627],["▁ແຮງ",-13.702239990234377],["еуі",-13.702244758605955],["▁explorar",-13.70224952697754],["▁උනේ",-13.702251434326172],["▁নং",-13.702253341674805],["▁Egg",-13.702258110046388],["▁regtig",-13.702259063720703],["▁վատ",-13.702260971069336],["▁ചിരി",-13.702275276184082],["▁ਸ਼ਾਮਲ",-13.70228385925293],["▁Fresh",-13.702285766601562],["▁nemendur",-13.702298164367676],["▁ዩኒቨርሲቲ",-13.70230770111084],["λακ",-13.70232105255127],["▁loài",-13.702377319335938],["▁iskren",-13.702381134033203],["▁hinc",-13.702384948730469],["▁ГК",-13.702415466308594],["▁ଦେଶର",-13.702422142028809],["▁внаслідок",-13.70244026184082],["▁سرگرم",-13.702445030212402],["▁දෙයි",-13.702455520629885],["▁Tic",-13.702478408813477],["▁адрас",-13.70249843597412],["▁ເງິນ",-13.702509880065918],["▁lapsed",-13.702531814575195],["▁Lucia",-13.702549934387209],["▁ارتقا",-13.702570915222168],["slipp",-13.7025785446167],["▁40.000",-13.702584266662598],["iisip",-13.702587127685549],["बंदी",-13.702597618103027],["▁طریقے",-13.702613830566406],["Алтай",-13.702625274658203],["▁ഇന്നത്തെ",-13.702649116516112],["▁najboljši",-13.702665328979492],["▁Lú",-13.702680587768556],["▁ugotovi",-13.702702522277832],["▁Фота",-13.702714920043944],["▁گویی",-13.702720642089844],["▁opłat",-13.702726364135742],["ของตัวเอง",-13.702730178833008],["296",-13.70274829864502],["▁LEG",-13.702750205993652],["אול",-13.702751159667969],["коро",-13.702754974365234],["が必要です",-13.70275592803955],["▁Xã",-13.702759742736816],["▁prvega",-13.70276165008545],["нуўся",-13.702776908874512],["▁περιορισ",-13.702777862548828],["▁Naš",-13.702780723571776],["▁precedenti",-13.702797889709473],["▁Dodaj",-13.702805519104004],["▁voraus",-13.702838897705078],["vosť",-13.702845573425291],["kowego",-13.70284652709961],["▁գալ",-13.702850341796877],["ലിന്റെ",-13.702855110168455],["▁פרטיות",-13.70286464691162],["爱的",-13.702869415283203],["▁ጉዳት",-13.702898979187012],["家伙",-13.702919006347656],["▁טיול",-13.702923774719238],["▁başlama",-13.70293140411377],["▁કેસ",-13.702942848205566],["▁honra",-13.702962875366213],["məklə",-13.703010559082031],["ផល",-13.703015327453612],["▁ይዘት",-13.703018188476562],["▁syna",-13.70301914215088],["▁फोरम",-13.703041076660156],["لىرىدىن",-13.703045845031738],["▁СЪ",-13.703045845031738],["▁auditor",-13.703046798706056],["忘记",-13.703048706054688],["▁ජාති",-13.703049659729004],["▁noudat",-13.703067779541016],["三分",-13.703068733215332],["ราม",-13.703084945678713],["▁పూజ",-13.70309066772461],["lunio",-13.70310878753662],["क्षेप",-13.703110694885254],["បង្កើត",-13.703112602233888],["神经",-13.70311450958252],["を受けた",-13.703115463256836],["ynghor",-13.703134536743164],["▁הרש",-13.703134536743164],["▁sugesti",-13.703137397766112],["及其他",-13.703143119812012],["Sim",-13.703176498413086],["▁আলী",-13.703189849853516],["▁Visst",-13.703200340270996],["▁Група",-13.703205108642578],["▁šeima",-13.703210830688477],["南海",-13.70322036743164],["▁autostrad",-13.703235626220703],["▁знаци",-13.703245162963867],["プラ",-13.703248977661133],["ရောင်",-13.703261375427246],["▁typy",-13.703262329101562],["Aeroport",-13.703265190124512],["▁ידוע",-13.703269004821776],["▁시리즈",-13.703287124633787],["▁जरा",-13.703290939331056],["▁sampah",-13.703293800354004],["മൂന്ന",-13.703299522399902],["ಸುರ",-13.703302383422852],["ставу",-13.703320503234863],["nevezés",-13.703328132629396],["▁शक्य",-13.703335762023926],["▁преноси",-13.703346252441406],["▁Ауыл",-13.70334815979004],["등록",-13.703349113464355],["являється",-13.703351020812988],["ژی",-13.70335578918457],["архитектур",-13.703370094299316],["bail",-13.70338535308838],["modi",-13.703434944152832],["varuste",-13.703449249267578],["▁vět",-13.703450202941896],["ovao",-13.703455924987791],["▁pesë",-13.703469276428224],["▁מכיר",-13.703481674194336],["скуп",-13.703483581542969],["financi",-13.703490257263184],["▁ubat",-13.703500747680664],["▁placering",-13.703518867492676],["देवी",-13.703539848327637],["fiant",-13.703540802001951],["ულს",-13.703550338745115],["ແຜນການ",-13.70355224609375],["▁naprave",-13.703553199768066],["paşa",-13.7035551071167],["▁जाति",-13.703561782836914],["tavate",-13.703582763671877],["▁රජා",-13.703590393066406],["ולוגיה",-13.703595161437988],["▁دیش",-13.703600883483888],["ուռ",-13.70360279083252],["▁construire",-13.703603744506836],["יקן",-13.70361042022705],["▁Radu",-13.703614234924316],["පල්",-13.703640937805176],["ซาน",-13.703649520874023],["يږي",-13.703680038452148],["抜け",-13.703690528869627],["▁stanja",-13.703693389892578],["eeyeen",-13.703718185424805],["▁Kullan",-13.703730583190918],["ለል",-13.703731536865234],["▁poliitik",-13.70373821258545],["▁rokom",-13.70373821258545],["▁hajó",-13.703750610351562],["0.2",-13.703758239746094],["పర",-13.703763008117676],["不算",-13.703795433044434],["▁произнес",-13.70383358001709],["▁želel",-13.703834533691406],["▁käll",-13.70384693145752],["▁močno",-13.703847885131836],["临床",-13.703913688659668],["ථා",-13.703920364379885],["▁կենտրոնի",-13.703929901123049],["▁jiného",-13.703940391540527],["▁restante",-13.703941345214844],["हं",-13.703969955444336],["DIS",-13.703970909118652],["▁During",-13.703975677490234],["▁revolucionar",-13.70397663116455],["▁લગાવ",-13.703981399536133],["▁Укра",-13.70398998260498],["争取",-13.704017639160156],["▁fatura",-13.704018592834473],["监测",-13.704023361206056],["迴",-13.704026222229004],["▁limites",-13.704031944274902],["▁Petrol",-13.70403289794922],["譯",-13.704034805297852],["▁aritu",-13.70405387878418],["虚拟",-13.704057693481444],["埔",-13.704065322875977],["คาสิโน",-13.70406723022461],["смет",-13.704068183898926],["▁zraka",-13.704068183898926],["▁մշակույթի",-13.704068183898926],["찌",-13.704069137573242],["päätöksen",-13.70407772064209],["▁Latviešu",-13.70407772064209],["▁Misalnya",-13.70407772064209],["▁Phoenix",-13.70407772064209],["▁καλοκαίρι",-13.70407772064209],["▁लखनऊ",-13.70407772064209],["▁ਪਟਿਆਲਾ",-13.70407772064209],["▁તપાસ",-13.70407772064209],["▁సాహిత్య",-13.70407772064209],["▁높이",-13.70407772064209],["▁skutečně",-13.704078674316406],["ပတ္သက္",-13.70408058166504],["▁المواطن",-13.704082489013672],["▁avqust",-13.704083442687988],["▁לעומת",-13.704084396362305],["▁ఫస్ట్",-13.704086303710938],["▁Marianne",-13.704087257385254],["▁mencintai",-13.704087257385254],["▁എല്ലാവരും",-13.704087257385254],["▁cothrom",-13.704089164733888],["▁பேச்சு",-13.704090118408203],["▁töltött",-13.704093933105469],["▁diikuti",-13.704094886779783],["▁નહી",-13.704096794128418],["▁확보",-13.704097747802734],["▁매력",-13.704100608825684],["▁хүмүүсийн",-13.704105377197266],["▁исполнения",-13.704130172729492],["▁Thể",-13.704131126403809],["ចំ",-13.704137802124023],["Hey",-13.704145431518556],["▁zes",-13.70414924621582],["▁צד",-13.704151153564451],["▁bæjar",-13.704161643981934],["▁хоолой",-13.704161643981934],["দর্শ",-13.704163551330566],["▁blíž",-13.704163551330566],["一行",-13.704187393188477],["▁делото",-13.704188346862791],["▁gainean",-13.70419692993164],["▁الصحيح",-13.704197883605955],["▁pavo",-13.704205513000488],["ڪٽ",-13.70420742034912],["1.7",-13.704208374023438],["▁batekin",-13.704216957092283],["▁Srbiji",-13.704235076904297],["▁pradėti",-13.70423984527588],["▁ലാല്",-13.704265594482422],["▁město",-13.704266548156738],["▁măcar",-13.704282760620115],["▁topshir",-13.704285621643066],["▁AMEA",-13.704289436340332],["▁വിവാഹം",-13.704290390014648],["▁cieľom",-13.704296112060549],["▁біле",-13.704299926757812],["aquests",-13.704305648803713],["▁kosztów",-13.70430850982666],["ಾಗಿರುವ",-13.704322814941406],["▁prosty",-13.70434856414795],["اکی",-13.704368591308594],["▁схвати",-13.704370498657228],["oitettu",-13.704385757446287],["▁események",-13.7044038772583],["니아",-13.704407691955566],["torem",-13.704413414001465],["▁platin",-13.704419136047363],["▁నిన్న",-13.704426765441896],["▁suffi",-13.704431533813477],["▁Între",-13.70443344116211],["▁berhak",-13.704438209533691],["▁රපටය",-13.70444679260254],["ავთ",-13.704449653625488],["▁spunea",-13.70445156097412],["不怕",-13.704458236694336],["▁Σι",-13.70447063446045],["▁принос",-13.704477310180664],["▁dritt",-13.704487800598145],["▁çocuğun",-13.704506874084473],["▁خۇ",-13.704516410827637],["កង",-13.704517364501951],["▁चिकित्सा",-13.704519271850586],["▁안에",-13.704523086547852],["ვედ",-13.704533576965332],["llur",-13.704545021057127],["▁ишмер",-13.704559326171877],["овою",-13.704599380493164],["▁բնական",-13.704599380493164],["▁건축",-13.704604148864746],["žų",-13.704608917236328],["▁slutte",-13.704611778259276],["luje",-13.704626083374023],["▁ക്യാ",-13.704633712768556],["欧元",-13.704633712768556],["▁პრი",-13.704649925231934],["▁obok",-13.704657554626465],["праве",-13.704659461975098],["▁visage",-13.704660415649414],["ריד",-13.704671859741213],["▁차지",-13.704672813415527],["▁୩୦",-13.704681396484377],["үлөт",-13.704694747924805],["▁hormone",-13.704696655273438],["▁miljoonaa",-13.704710960388184],["▁festivala",-13.704713821411133],["▁агре",-13.704720497131348],["▁частині",-13.704720497131348],["▁pokal",-13.704731941223145],["▁devra",-13.70474624633789],["▁կոն",-13.704769134521484],["139",-13.7047758102417],["ставляет",-13.704778671264648],["▁ផ",-13.704795837402344],["mācība",-13.704797744750977],["▁ກ່ອນ",-13.704805374145508],["▁കാർ",-13.70481777191162],["àrr",-13.70482349395752],["▁معاشی",-13.704827308654783],["▁denarja",-13.704833030700684],["hám",-13.704852104187012],["▁Australian",-13.704853057861328],["ducere",-13.704858779907228],["▁valget",-13.704867362976074],["▁រក្សា",-13.704880714416504],["▁Baker",-13.704896926879885],["матри",-13.704912185668944],["garh",-13.704937934875488],["ποί",-13.704941749572754],["ของท่าน",-13.704955101013184],["ърт",-13.704957962036133],["کال",-13.704983711242676],["▁obiective",-13.705002784729004],["jünk",-13.705016136169434],["▁αλλο",-13.705042839050291],["desk",-13.705045700073242],["▁خوند",-13.70505428314209],["▁օգտագործ",-13.705055236816406],["ေသာ္",-13.705060958862305],["▁Kaar",-13.705074310302734],["▁צעיר",-13.705078125],["▁تجعل",-13.70511245727539],["▁Alexandra",-13.705135345458984],["ടക്കം",-13.705137252807615],["krip",-13.70514965057373],["िष्ट",-13.705158233642578],["▁eiland",-13.70517635345459],["▁نجم",-13.705178260803224],["▁Kila",-13.705181121826172],["▁познато",-13.705182075500488],["एर",-13.705188751220703],["▁पठाउन",-13.705190658569336],["myndir",-13.705202102661133],["▁battle",-13.705207824707031],["०७",-13.70523166656494],["▁familio",-13.70523452758789],["ђ",-13.705238342285156],["▁nosil",-13.705246925354004],["▁баласы",-13.705262184143066],["▁horien",-13.70527458190918],["▁Καμ",-13.705289840698242],["Kad",-13.705294609069824],["panta",-13.705294609069824],["▁جلا",-13.705300331115724],["▁سکر",-13.705313682556152],["▁חוץ",-13.70533275604248],["▁lärare",-13.705341339111328],["եքս",-13.705352783203123],["ੋਨ",-13.705364227294922],["▁aktier",-13.705368995666504],["▁Images",-13.705384254455566],["tissimi",-13.705385208129885],["prire",-13.705391883850098],["▁prepared",-13.705395698547363],["▁हॉ",-13.70541286468506],["▁Farb",-13.705418586730955],["continu",-13.705427169799805],["ସ୍ଥଳ",-13.70546054840088],["▁serba",-13.705464363098145],["▁ម៉ូតូ",-13.70548152923584],["▁ervan",-13.705496788024902],["ախոս",-13.705527305603027],["▁etkinlikler",-13.705527305603027],["▁ئىگى",-13.705531120300291],["▁izbira",-13.705546379089355],["▁ئەن",-13.70555019378662],["▁balet",-13.705551147460938],["▁لور",-13.705560684204102],["▁Feel",-13.705572128295898],["▁měsíce",-13.70557689666748],["ច្បាប់",-13.705577850341797],["▁Наук",-13.705586433410645],["▁FIR",-13.705589294433594],["▁օգտագործել",-13.705609321594238],["ಮನ್",-13.705610275268556],["▁deegaan",-13.705611228942873],["▁बहा",-13.705626487731934],["ininkė",-13.70563507080078],["▁Хоча",-13.70563793182373],["▁ਲੇਖ",-13.705646514892578],["▁Korean",-13.70565700531006],["▁билдирүү",-13.705697059631348],["त्ति",-13.705707550048828],["ىتىنى",-13.70571517944336],["дығын",-13.705717086791992],["▁lavar",-13.705737113952637],["▁klassik",-13.705760955810549],["ৰণ",-13.705780982971191],["▁плот",-13.705780982971191],["▁superb",-13.70579719543457],["রুল",-13.705811500549316],["ഞ്ജ",-13.705842971801758],["▁bringe",-13.705848693847656],["▁Nafarroa",-13.705851554870604],["▁widział",-13.705862045288086],["jimu",-13.705870628356934],["してくれ",-13.705873489379885],["ถือว่า",-13.705881118774414],["▁טל",-13.705881118774414],["verantwoordelijk",-13.705886840820312],["suchen",-13.705902099609377],["nitz",-13.70591640472412],["កើត",-13.705924034118652],["▁прах",-13.705925941467283],["▁Boro",-13.705927848815918],["▁roof",-13.705934524536133],["▁تبصره",-13.705934524536133],["毫无",-13.705951690673828],["ประสิทธิภาพ",-13.705967903137209],["Сучасн",-13.705968856811523],["▁massi",-13.705973625183104],["▁Teie",-13.705975532531738],["▁różnic",-13.705992698669434],["▁Νικ",-13.705994606018066],["在日本",-13.705997467041016],["▁Bölge",-13.706000328063965],["ekből",-13.706008911132812],["bakteri",-13.706014633178713],["▁Vinci",-13.706016540527344],["▁특성",-13.706018447875977],["▁obitelj",-13.706029891967772],["้อง",-13.706039428710938],["交易所",-13.706045150756836],["▁Noel",-13.706068992614746],["▁használható",-13.706076622009276],["▁seega",-13.706079483032228],["ورات",-13.706085205078123],["南方",-13.706140518188477],["لوگ",-13.706162452697754],["ίδια",-13.706170082092283],["▁поўна",-13.706183433532717],["محكمة",-13.70618724822998],["націонал",-13.706212043762209],["▁dauert",-13.706225395202637],["▁democrat",-13.706241607666016],["▁लढ",-13.706258773803713],["淚",-13.706266403198242],["▁때문",-13.706270217895508],["▁Turku",-13.706278800964355],["ໄຫ",-13.706286430358888],["യിലുള്ള",-13.706293106079102],["▁Lähi",-13.70629596710205],["▁ይሆናል",-13.706299781799316],["扑",-13.706300735473633],["检察",-13.706307411193848],["奮",-13.706311225891112],["嚇",-13.706318855285645],["擾",-13.706336975097656],["▁krí",-13.706351280212402],["▁vivenda",-13.70635223388672],["ဥပေဒ",-13.70635986328125],["スペース",-13.70635986328125],["'",-13.706360816955566],["Энхболд",-13.706360816955566],["ਬਿਊਰੋ",-13.706360816955566],["▁Lūdzu",-13.706360816955566],["▁coñecido",-13.706360816955566],["▁etibarən",-13.706360816955566],["▁frecuente",-13.706360816955566],["▁odpověď",-13.706360816955566],["▁thriller",-13.706360816955566],["▁thánh",-13.706360816955566],["▁бясплатна",-13.706360816955566],["▁কৃষি",-13.706360816955566],["▁నువ్వు",-13.706360816955566],["▁აცხადებს",-13.706360816955566],["▁მუსიკა",-13.706360816955566],["▁მშვიდ",-13.7063627243042],["▁recipient",-13.706363677978516],["مبادرة",-13.706364631652832],["▁bármely",-13.706365585327148],["ಷ್ಠ",-13.706366539001465],["▁දකුණු",-13.70636749267578],["▁طارق",-13.706369400024414],["▁yukarı",-13.70637321472168],["▁සියළු",-13.70637321472168],["▁идэвх",-13.706375122070312],["挑選",-13.706376075744627],["▁tradizionale",-13.706380844116213],["▁ASEAN",-13.706382751464844],["▁تاسیس",-13.70638656616211],["Энэ",-13.706391334533691],["дугу",-13.706391334533691],["▁ondersoek",-13.706391334533691],["▁Subscribe",-13.706400871276855],["▁သူတို႔",-13.706403732299805],["ထြန္း",-13.706405639648438],["▁êdî",-13.706409454345703],["▁suplement",-13.706411361694336],["▁Disember",-13.706427574157717],["ധര",-13.70644187927246],["▁18%",-13.70644760131836],["▁získať",-13.706454277038574],["▁Επί",-13.70645523071289],["▁يحمل",-13.706469535827637],["ถานที่ท่องเที่ยว",-13.706470489501951],["▁Learn",-13.706475257873535],["▁करण्याचा",-13.706480979919434],["hegy",-13.706498146057127],["▁bheadh",-13.706514358520508],["▁əhalinin",-13.70652198791504],["▁Кино",-13.706531524658203],["▁покажува",-13.706552505493164],["▁zabawy",-13.706555366516112],["カラー",-13.706555366516112],["▁देखते",-13.706570625305176],["▁сервіс",-13.70659065246582],["▁Miquel",-13.706595420837402],["جبهة",-13.706598281860352],["但是在",-13.706615447998049],["ህል",-13.70661735534668],["▁études",-13.706623077392578],["▁وام",-13.706625938415527],["▁zusätzliche",-13.706639289855955],["उने",-13.70664405822754],["▁قائلا",-13.706645011901855],["În",-13.706652641296388],["命名",-13.706658363342283],["▁discovered",-13.706668853759766],["ämä",-13.706672668457031],["▁айту",-13.70667839050293],["▁Tavo",-13.706701278686523],["▁státní",-13.7067232131958],["▁ਰਹਿ",-13.706727981567385],["▁kiadás",-13.706732749938965],["▁вызыва",-13.706737518310549],["▁Autori",-13.706747055053713],["▁Christina",-13.706748008728027],["他表示",-13.706757545471191],["▁utila",-13.70676040649414],["llón",-13.706762313842772],["▁eelmise",-13.706764221191406],["מיק",-13.706765174865724],["▁Cuir",-13.706767082214355],["povi",-13.706783294677734],["▁기능을",-13.706798553466797],["▁inspect",-13.70680809020996],["权威",-13.70681381225586],["▁извршен",-13.70682144165039],["▁omului",-13.706823348999023],["ődött",-13.706859588623049],["▁gøy",-13.70686149597168],["garten",-13.706866264343262],["NAI",-13.706870079040527],["იდი",-13.70687198638916],["▁groupes",-13.706876754760742],["▁tekin",-13.70688819885254],["வல்",-13.706891059875488],["任何人",-13.706897735595703],["▁پیٹ",-13.706902503967283],["cát",-13.70692253112793],["▁proclama",-13.706927299499512],["▁Huis",-13.706934928894045],["▁ikkuna",-13.706938743591309],["غە",-13.706950187683104],["▁nastave",-13.706960678100586],["▁meisje",-13.706989288330078],["▁качеството",-13.706995010375977],["▁విద్యా",-13.70700740814209],["▁Islom",-13.70701026916504],["ymų",-13.707019805908203],["ग्रस्त",-13.70703125],["▁ökar",-13.70703125],["▁Сед",-13.707043647766112],["▁mělo",-13.70706272125244],["კომ",-13.707066535949709],["ována",-13.707069396972656],["ご予約",-13.707080841064451],["▁litrato",-13.707086563110352],["گرفت",-13.70710563659668],["▁zombie",-13.707108497619627],["▁considerare",-13.707110404968262],["▁cadangan",-13.70712184906006],["▁szkl",-13.707138061523438],["ującego",-13.70715045928955],["▁осв",-13.707183837890623],["▁وراء",-13.707200050354004],["▁lungul",-13.707207679748535],["▁стават",-13.707212448120115],["▁תור",-13.707212448120115],["markkinoi",-13.707220077514648],["▁බිම",-13.707220077514648],["стану",-13.707235336303713],["▁Vej",-13.707237243652344],["▁jämför",-13.70724105834961],["▁חינוך",-13.707258224487305],["▁organlari",-13.707260131835938],["▁ddau",-13.707268714904783],["ماذا",-13.707269668579102],["▁լս",-13.707269668579102],["▁decisiones",-13.707293510437012],["nešio",-13.707297325134276],["▁בוקר",-13.70730972290039],["▁Ķī",-13.707314491271973],["▁нямат",-13.707315444946287],["▁Սեւ",-13.707320213317873],["970",-13.707324981689451],["ასი",-13.707330703735352],["šina",-13.707348823547363],["masligi",-13.707356452941896],["▁stundu",-13.707379341125488],["▁saavat",-13.707389831542969],["▁പറ്റി",-13.707396507263184],["ദു",-13.707405090332031],["▁тарта",-13.707405090332031],["ošana",-13.707417488098145],["ىلىك",-13.70742130279541],["申込",-13.70742893218994],["▁둘",-13.707440376281738],["▁буди",-13.707479476928713],["▁باطل",-13.707514762878418],["てくる",-13.707521438598633],["▁vašu",-13.707529067993164],["liver",-13.707548141479492],["▁securi",-13.70755386352539],["▁voeg",-13.70758819580078],["▁areal",-13.707646369934082],["alaking",-13.707663536071776],["كيف",-13.707671165466309],["▁kirjasto",-13.707677841186523],["▁Heimat",-13.707682609558104],["മ്പാ",-13.707716941833496],["iteiten",-13.707718849182127],["ırıl",-13.707721710205078],["▁episodio",-13.707740783691406],["▁priateľov",-13.707765579223633],["licitud",-13.707804679870604],["▁प्राथमिकता",-13.707809448242188],["funga",-13.707813262939451],["շն",-13.707825660705566],["SCI",-13.70785903930664],["tuuri",-13.707868576049805],["▁HPV",-13.707870483398438],["▁незна",-13.707873344421388],["▁oplevelser",-13.707884788513184],["▁القول",-13.70792007446289],["ଣ୍ଡି",-13.707921028137209],["Türkiye",-13.707921981811523],["ضرورة",-13.707924842834473],["ილა",-13.707932472229004],["▁docela",-13.707942962646484],["▁lëvizje",-13.707942962646484],["▁hirdet",-13.7079439163208],["▁نامی",-13.707947731018066],["▁Karolin",-13.7079496383667],["ਦਾਸ",-13.707962036132812],["▁yon",-13.707964897155762],["▁Gerade",-13.707966804504396],["▁jolie",-13.707969665527344],["нога",-13.707999229431152],["zanje",-13.708008766174316],["(12",-13.708011627197266],["▁abertura",-13.708048820495604],["käl",-13.708075523376465],["▁hrát",-13.70809841156006],["▁намного",-13.708121299743652],["ହୋ",-13.708125114440918],["mədən",-13.708135604858398],["שוט",-13.708138465881348],["▁באות",-13.708144187927246],["了吗",-13.708160400390623],["▁دولتي",-13.708170890808104],["▁၁၁",-13.708172798156738],["▁thói",-13.708206176757812],["▁diversifica",-13.708207130432127],["▁içer",-13.708211898803713],["ďalej",-13.708216667175291],["▁lapset",-13.708219528198242],["ოდეს",-13.70822048187256],["▁ბავშვი",-13.708239555358888],["მოც",-13.708255767822266],["▁شخصيت",-13.708256721496582],["▁majeur",-13.708266258239746],["phos",-13.708269119262695],["▁kawo",-13.708277702331545],["ማረ",-13.708280563354492],["▁ਰਹਿਣ",-13.708295822143556],["႐ု",-13.708312034606934],["▁احتمالا",-13.7083158493042],["siirto",-13.70832061767578],["▁땅",-13.708322525024414],["▁लाई",-13.708329200744627],["▁Chao",-13.708332061767578],["▁apropia",-13.708337783813477],["かね",-13.70835304260254],["ありました",-13.7083740234375],["hram",-13.708377838134766],["▁резко",-13.708425521850586],["▁Tiến",-13.70844554901123],["ກ່ອນ",-13.708462715148926],["flict",-13.708466529846191],["▁dicunt",-13.70847225189209],["吸引了",-13.708473205566406],["gwyd",-13.708480834960938],["gged",-13.708494186401367],["▁sumie",-13.708511352539062],["lyck",-13.708550453186035],["▁palīdzība",-13.708551406860352],["csol",-13.708555221557615],["▁testing",-13.708556175231934],["▁chúc",-13.708563804626465],["▁dotter",-13.70858669281006],["珊",-13.708587646484377],["▁bozor",-13.708600997924805],["▁සිදුව",-13.708600997924805],["扯",-13.708611488342283],["▁okamžit",-13.708621978759766],["▁Наста",-13.708623886108398],["arstvo",-13.70863914489746],["駐車場",-13.708640098571776],["名古屋",-13.70864486694336],["สารสนเทศ",-13.708647727966309],["読む",-13.708647727966309],["ວັດທະນະທໍາ",-13.708648681640623],["▁Argazki",-13.708648681640623],["▁hvězd",-13.708648681640623],["▁mövzusunda",-13.708648681640623],["▁underholdning",-13.708648681640623],["▁σχέσεις",-13.708648681640623],["▁Некоторые",-13.708648681640623],["▁српском",-13.708648681640623],["▁լրատվական",-13.708648681640623],["▁կուսակցություն",-13.708648681640623],["▁खत्म",-13.708648681640623],["▁আহমেদ",-13.708648681640623],["▁କିମ୍ବା",-13.708648681640623],["▁ಗೌರವ",-13.708648681640623],["▁ඉංග්",-13.708648681640623],["▁දෙපාර්තමේන්තුව",-13.708648681640623],["▁නිවැරදි",-13.708648681640623],["▁მიზნით",-13.708648681640623],["▁같아요",-13.708648681640623],["줘",-13.708648681640623],["▁развитию",-13.70864963531494],["▁اپریل",-13.70864963531494],["▁praec",-13.708650588989258],["▁Նրանք",-13.708650588989258],["▁μάλλον",-13.708651542663574],["▁ਕੁਮਾਰ",-13.708651542663574],["▁ഇന്നലെ",-13.708651542663574],["▁ვერსია",-13.70865249633789],["พันธ์",-13.708653450012209],["▁Nyerere",-13.708653450012209],["▁श्रद्धा",-13.708653450012209],["▁मतदाता",-13.708654403686523],["▁সুপার",-13.708656311035156],["▁ډالر",-13.708660125732422],["▁አዋጅ",-13.708660125732422],["カフェ",-13.708661079406738],["รุ่ง",-13.708662986755373],["▁bjóða",-13.708662986755373],["▁Човек",-13.70866584777832],["▁Nusrat",-13.708669662475586],["▁Beijing",-13.708672523498535],["▁fodbold",-13.708673477172852],["▁eksamen",-13.708683013916016],["უწყებ",-13.70868682861328],["ဝယ်",-13.708694458007812],["▁전쟁",-13.708701133728027],["▁хэт",-13.70870304107666],["▁රීසි",-13.708711624145508],["▁ସାରା",-13.708714485168455],["▁gordura",-13.708717346191406],["▁മണ്ണ",-13.708720207214355],["▁మేము",-13.708723068237305],["▁vuelve",-13.70872402191162],["አል",-13.70872688293457],["▁външни",-13.708728790283203],["▁признании",-13.708735466003418],["gjald",-13.708739280700684],["▁khuyên",-13.708755493164062],["▁musia",-13.70876121520996],["СЬ",-13.70877170562744],["fasi",-13.70877456665039],["▁особенности",-13.70878791809082],["▁trije",-13.708796501159668],["íky",-13.7087984085083],["haidh",-13.7088041305542],["▁Македония",-13.708810806274414],["▁Salzburg",-13.70881462097168],["laşıb",-13.708820343017578],["▁युवती",-13.708822250366213],["ύματα",-13.70884132385254],["▁muszę",-13.70884895324707],["▁සුර",-13.708857536315918],["rekisteri",-13.708858489990234],["▁разы",-13.708860397338867],["▁Infinit",-13.708888053894045],["▁يزيد",-13.70888900756836],["▁piatto",-13.708901405334473],["▁الصوت",-13.708902359008787],["▁anlama",-13.708913803100586],["▁Hunter",-13.708914756774902],["▁kirjoittaa",-13.708916664123535],["▁disponibiliza",-13.7089262008667],["گفت",-13.708946228027344],["▁Brook",-13.708955764770508],["Emp",-13.708965301513672],["▁රටට",-13.708969116210938],["▁Tangan",-13.708972930908203],["▁የሆነው",-13.708988189697266],["лыктар",-13.708995819091797],["લેન્ડ",-13.708999633789062],["risidagi",-13.709007263183594],["▁করছেন",-13.709012985229492],["▁sagatavot",-13.709016799926758],["üsse",-13.709025382995604],["chick",-13.709035873413086],["совет",-13.709039688110352],["▁በማስ",-13.709043502807615],["▁Treff",-13.709057807922363],["▁remit",-13.709080696105955],["Informa",-13.709092140197754],["▁poklad",-13.709107398986816],["▁Soviet",-13.709113121032717],["▁vreun",-13.709118843078612],["的最新",-13.709125518798828],["▁Není",-13.709136962890623],["▁लौट",-13.709141731262209],["▁отпуск",-13.709145545959473],["▁kiên",-13.709147453308104],["ologique",-13.709150314331056],["▁tortur",-13.709152221679688],["▁Forslag",-13.709155082702637],["▁Јовановић",-13.709176063537598],["алдық",-13.709208488464355],["在接受",-13.709239959716797],["ijk",-13.709264755249023],["földi",-13.709274291992188],["operativ",-13.709280967712402],["▁iritzi",-13.709285736083984],["▁PRED",-13.709295272827148],["▁дружество",-13.709298133850098],["▁ika",-13.709325790405272],["shore",-13.709332466125488],["rilgan",-13.709342956542969],["▁cement",-13.709359169006348],["기자",-13.709379196166992],["▁spansk",-13.709389686584473],["控股",-13.709393501281738],["naendelea",-13.709403038024902],["▁Billy",-13.70942211151123],["▁სპ",-13.709433555603027],["▁나와",-13.709446907043455],["▁dhí",-13.70944881439209],["▁धारा",-13.709452629089355],["▁simplici",-13.709466934204102],["▁неким",-13.709476470947266],["るために",-13.709485054016112],["▁Offer",-13.709514617919922],["▁Ей",-13.70952033996582],["ωρα",-13.709535598754885],["▁проблемите",-13.70954704284668],["▁Dhaq",-13.709587097167969],["DHA",-13.70959186553955],["▁بهینه",-13.709603309631348],["völ",-13.709606170654297],["mbok",-13.709612846374512],["ality",-13.70961570739746],["نظار",-13.70961856842041],["▁ಅಂಗ",-13.70961856842041],["▁විසඳ",-13.70964813232422],["▁wojsk",-13.709651947021484],["istički",-13.709668159484863],["▁banana",-13.70970344543457],["▁بيانات",-13.709725379943848],["▁Zorg",-13.709753036499023],["▁הסיפור",-13.709766387939451],["nosťou",-13.709794044494627],["tiedosto",-13.709797859191896],["▁Rank",-13.709811210632324],["त्स",-13.709815979003906],["▁beror",-13.709837913513184],["fandi",-13.709864616394045],["ភព",-13.709894180297852],["▁szed",-13.70991325378418],["▁Filtr",-13.709915161132812],["▁parket",-13.709915161132812],["▁کشید",-13.709919929504396],["▁wstęp",-13.709935188293455],["▁dalil",-13.709940910339355],["форт",-13.70994472503662],["▁تھیں۔",-13.709969520568848],["▁dilay",-13.709975242614746],["ော့",-13.709982872009276],["1975",-13.710000991821287],["▁ზედა",-13.710027694702148],["▁liaj",-13.710058212280272],["!«",-13.710061073303224],["▁وسلو",-13.710077285766602],["▁kifo",-13.710097312927246],["олов",-13.710124015808104],["ліна",-13.710139274597168],["элтэй",-13.710147857666016],["▁Kwi",-13.710159301757812],["lmoqda",-13.71019458770752],["geria",-13.710219383239746],["▁paio",-13.710219383239746],["▁espais",-13.710223197937012],["▁əhəmiyyətli",-13.710227012634276],["んじゃないか",-13.71023178100586],["นโยบาย",-13.710248947143556],["გარ",-13.7102632522583],["ခ်ီ",-13.710267066955566],["פריט",-13.710277557373049],["▁Национални",-13.710289001464844],["ຕອນ",-13.71031093597412],["▁london",-13.71031093597412],["▁Atlet",-13.710323333740234],["▁المسلم",-13.710323333740234],["ୟୋ",-13.710331916809082],["▁Odası",-13.710331916809082],["宿舍",-13.710339546203612],["mAh",-13.71035861968994],["▁ବସ୍",-13.71035861968994],["वृत्ती",-13.71036434173584],["фесі",-13.710367202758787],["ರೈ",-13.710415840148926],["▁Klip",-13.710427284240724],["▁compromis",-13.710441589355469],["▁Žensk",-13.710443496704102],["▁Export",-13.71045207977295],["സമ്പ",-13.710454940795898],["又は",-13.710474014282228],["落實",-13.710476875305176],["▁코드",-13.710482597351074],["▁investasi",-13.710521697998049],["оўскі",-13.710533142089844],["54)",-13.710537910461426],["דון",-13.710537910461426],["▁conflicto",-13.710565567016602],["▁Президенті",-13.71057415008545],["▁forud",-13.710576057434082],["▁мәлімет",-13.710643768310549],["▁накопи",-13.710653305053713],["▁navigation",-13.710683822631836],["▁assisti",-13.710716247558594],["ફે",-13.710753440856934],["▁tarz",-13.710753440856934],["tiikka",-13.710755348205566],["女の子",-13.710782051086426],["▁Склад",-13.710783004760742],["▁simultan",-13.71079444885254],["ІТ",-13.710824012756348],["泪",-13.710856437683104],["ghas",-13.710858345031738],["araw",-13.710872650146484],["សារព័ត៌មាន",-13.710874557495115],["与此同时",-13.710874557495115],["▁käib",-13.71088695526123],["纹",-13.710887908935549],["▁الاب",-13.710895538330078],["灵活",-13.71091079711914],["▁RAZ",-13.710911750793455],["▁spinn",-13.71091365814209],["येत्",-13.71093463897705],["onfidențialitate",-13.710942268371582],["спеціаліст",-13.710942268371582],["บรรจุ",-13.710942268371582],["▁Plataforma",-13.710942268371582],["▁građevin",-13.710942268371582],["▁inwestycji",-13.710942268371582],["▁kelahiran",-13.710942268371582],["▁përpjekje",-13.710942268371582],["▁spiaggia",-13.710942268371582],["▁təyyarə",-13.710942268371582],["▁ζήτημα",-13.710942268371582],["▁ділянки",-13.710942268371582],["▁жніўня",-13.710942268371582],["▁اکتوبر",-13.710942268371582],["▁خوزستان",-13.710942268371582],["▁يؤدي",-13.710942268371582],["▁ਤਹਿਤ",-13.710942268371582],["▁ਦਸੰਬਰ",-13.710942268371582],["▁બધું",-13.710942268371582],["▁갑니다",-13.710942268371582],["▁distância",-13.710943222045898],["▁dzīvē",-13.710943222045898],["▁Диплом",-13.710943222045898],["▁алтернатива",-13.710943222045898],["▁վերականգն",-13.710943222045898],["▁ਅਜ਼ਾਦ",-13.710943222045898],["უსტ",-13.71094799041748],["▁bakan",-13.710952758789062],["▁ٻڌايو",-13.710952758789062],["▁ਸਥਾਨਕ",-13.710954666137695],["▁반응",-13.710954666137695],["▁звернення",-13.710957527160645],["▁nowa",-13.710962295532228],["lanjut",-13.710970878601074],["▁folur",-13.710970878601074],["▁చక్క",-13.71097183227539],["▁यावेळी",-13.710973739624023],["▁त्यांचा",-13.710975646972656],["▁любое",-13.710978507995604],["արկում",-13.71098804473877],["气象",-13.710994720458984],["▁profite",-13.7109956741333],["▁Ding",-13.71102523803711],["▁muzeja",-13.711039543151855],["▁mailako",-13.711050033569336],["▁ئۇچۇر",-13.7110595703125],["▁አራት",-13.711068153381348],["/22",-13.711071014404297],["ഹര",-13.711076736450195],["SHOP",-13.711101531982422],["中間",-13.711108207702637],["▁носител",-13.711112022399902],["▁Kalender",-13.711114883422852],["شري",-13.7111177444458],["▁fosfor",-13.711129188537598],["த்தல்",-13.711133003234863],["iños",-13.71113395690918],["▁догляд",-13.711145401000977],["▁Object",-13.711151123046877],["▁befolkning",-13.711153030395508],["venlig",-13.711155891418455],["▁Valde",-13.711164474487305],["ส้ม",-13.711166381835938],["▁существующ",-13.711173057556152],["▁lingvon",-13.711174964904783],["▁하는데",-13.71121311187744],["▁االله",-13.711236953735352],["▁lokakuuta",-13.7112398147583],["iwyd",-13.71126651763916],["غلق",-13.711271286010742],["▁Скоро",-13.711275100708008],["▁ühendust",-13.711288452148438],["▁ಮಗ",-13.711343765258787],["▁texnologiyalari",-13.711349487304688],["▁niat",-13.71135139465332],["▁ambientale",-13.711352348327637],["▁እሱ",-13.71135425567627],["▁gudaha",-13.711362838745115],["▁Gjen",-13.711368560791016],["▁Komitəsi",-13.711384773254396],["โว",-13.71138858795166],["▁ostati",-13.711389541625977],["▁პერიოდში",-13.711393356323242],["▁Nép",-13.711396217346191],["▁étel",-13.711398124694824],["这时",-13.711414337158203],["▁necesarias",-13.711421012878418],["ထဲမှာ",-13.711421966552734],["▁kulcs",-13.711421966552734],["▁компанийн",-13.711431503295898],["▁novej",-13.711434364318848],["▁Κρ",-13.71143913269043],["izuje",-13.711453437805176],["▁خواهش",-13.711453437805176],["ließ",-13.711467742919922],["Рад",-13.711471557617188],["▁ньому",-13.71147346496582],["adequa",-13.711482048034668],["NIKA",-13.711496353149414],["пров",-13.711496353149414],["▁basketbol",-13.711499214172363],["vaiku",-13.711535453796388],["▁Алардын",-13.711540222167969],["ოზ",-13.711541175842283],["▁väikese",-13.711565017700195],["કુ",-13.71158218383789],["▁kesän",-13.711587905883787],["▁supposed",-13.711604118347168],["▁turite",-13.711607933044434],["▁Barça",-13.711626052856444],["მარტ",-13.711639404296877],["යුතු",-13.711641311645508],["ecká",-13.71164321899414],["▁خرم",-13.711645126342772],["▁temperatuur",-13.711664199829102],["▁fritid",-13.711670875549316],["▁מחבר",-13.711670875549316],["lhão",-13.71167278289795],["▁hammas",-13.711677551269531],["其中的",-13.71168041229248],["他还",-13.71172046661377],["ЫР",-13.711739540100098],["朋友们",-13.711739540100098],["▁gawe",-13.711750984191896],["▁Petru",-13.711770057678224],["▁crush",-13.71177577972412],["▁خواسته",-13.711783409118652],["ട്ടില്",-13.71180248260498],["▁hær",-13.71180534362793],["▁ended",-13.711821556091309],["▁jatkaa",-13.711824417114258],["▁канцелар",-13.711837768554688],["▁šķir",-13.711867332458496],["▁Док",-13.711874961853027],["▁301",-13.711875915527344],["▁nacionais",-13.711884498596191],["تفاعل",-13.711891174316406],["ලත්",-13.711896896362305],["▁followed",-13.711905479431152],["▁vecí",-13.71191692352295],["աստան",-13.711923599243164],["▁народни",-13.711931228637695],["এইচ",-13.71194839477539],["開設",-13.711950302124023],["▁vizibil",-13.71196460723877],["▁Mux",-13.711979866027832],["▁iväg",-13.711981773376465],["ଟୋ",-13.711990356445312],["▁កញ្ញា",-13.711997985839844],["▁цв",-13.711999893188477],["▁մրցանակ",-13.712000846862791],["ováním",-13.712020874023438],["جهي",-13.71203327178955],["▁ರನ್",-13.712058067321776],["▁бронз",-13.71205997467041],["소개",-13.71206760406494],["lasse",-13.712078094482422],["携",-13.71208381652832],["чё",-13.712096214294434],["中介",-13.712096214294434],["▁Globo",-13.712102890014648],["ღერ",-13.712103843688965],["ිම",-13.712105751037598],["▁granit",-13.712108612060549],["Так",-13.712133407592772],["ច្",-13.71213436126709],["▁2-2",-13.712140083312988],["беларуску",-13.71214771270752],["▁دیکھیں",-13.712148666381836],["▁comigo",-13.712149620056152],["之前的",-13.712152481079102],["သင့်",-13.71215534210205],["▁nezá",-13.712173461914062],["▁брани",-13.712177276611328],["paste",-13.712191581726074],["යෝග",-13.712197303771973],["Арх",-13.712202072143556],["▁Gradsk",-13.71220588684082],["▁опасно",-13.712206840515137],["▁сфери",-13.71220874786377],["▁поврзани",-13.712210655212402],["▁nærmest",-13.71222686767578],["▁പെരു",-13.712233543395996],["▁ആവ",-13.71225357055664],["aspx",-13.712268829345703],["▁بعدها",-13.712299346923828],["▁లేద",-13.712307929992676],["mörk",-13.71232795715332],["一開始",-13.712343215942385],["▁ponaša",-13.712346076965332],["▁nótt",-13.712362289428713],["▁දරුවා",-13.712363243103027],["▁itakuwa",-13.712382316589355],["除非",-13.71239185333252],["daryti",-13.712392807006836],["▁базу",-13.712420463562012],["년대",-13.712431907653809],["iausia",-13.712448120117188],["わかり",-13.712468147277832],["▁beskyttelse",-13.712491035461426],["▁εθνική",-13.712493896484377],["ATS",-13.712505340576172],["3.3",-13.712519645690918],["아트",-13.712528228759766],["眼中",-13.712538719177246],["לול",-13.712546348571776],["▁pensato",-13.712556838989258],["▁convo",-13.712565422058104],["cind",-13.712580680847168],["রাই",-13.712580680847168],["▁Integration",-13.712584495544434],["▁lietošanas",-13.712587356567385],["잔",-13.7125883102417],["ບໍລິສັດ",-13.712590217590332],["▁դար",-13.71259593963623],["▁kedalam",-13.712602615356444],["ιστούν",-13.712605476379396],["▁sakini",-13.712605476379396],["शिक्ष",-13.712618827819824],["▁castig",-13.712647438049316],["▁biztosan",-13.712668418884276],["228",-13.712686538696287],["ത്തിനായി",-13.712688446044922],["नेट",-13.712695121765137],["▁falsk",-13.712705612182615],["▁negar",-13.712726593017578],["▁uyg",-13.71273136138916],["▁ارزان",-13.712732315063477],["respons",-13.712776184082031],["釋",-13.71279239654541],["▁фамил",-13.712862014770508],["▁плакат",-13.71286678314209],["▁కాస్త",-13.712876319885254],["avuus",-13.712883949279783],["gène",-13.712890625],["▁dreng",-13.712899208068848],["טענה",-13.712904930114746],["āṃ",-13.71292209625244],["▁suffer",-13.712965965270996],["ष्टा",-13.712969779968262],["▁গেলে",-13.712977409362791],["න්ඩ්",-13.712994575500488],["янка",-13.713027954101562],["▁መዝ",-13.713048934936523],["▁зовнішньо",-13.71306037902832],["▁Cord",-13.7130708694458],["ಜ್ಞ",-13.71307373046875],["▁Hrad",-13.713080406188965],["▁Моя",-13.713086128234863],["ckou",-13.71308708190918],["▁الفور",-13.713089942932127],["ţu",-13.713102340698242],["▁Simona",-13.71311378479004],["▁broad",-13.713159561157228],["▁tohle",-13.71317195892334],["▁आसान",-13.713175773620604],["ХИ",-13.71318244934082],["捉",-13.713187217712402],["▁érint",-13.713189125061035],["társai",-13.713194847106934],["▁మొబైల్",-13.713197708129885],["琪",-13.713197708129885],["睡觉",-13.713202476501465],["▁ನೆನಪ",-13.713207244873049],["▁غد",-13.713212966918944],["アウト",-13.713218688964844],["ສະເຫນີ",-13.713238716125488],["▁найближч",-13.71324062347412],["▁նպաստ",-13.71324062347412],["▁Eisteddfod",-13.713241577148438],["▁Inggeris",-13.713241577148438],["▁Lənkəran",-13.713241577148438],["▁ehemalige",-13.713241577148438],["▁kaominina",-13.713241577148438],["▁phẫu",-13.713241577148438],["▁жінки",-13.713241577148438],["▁заједнице",-13.713241577148438],["▁посвећен",-13.713241577148438],["▁انعقاد",-13.713241577148438],["▁पवित्र",-13.713241577148438],["▁हिमाचल",-13.713241577148438],["▁ਰਾਹੀਂ",-13.713241577148438],["▁ස්තූතියි",-13.713241577148438],["▁ምክንያቱም",-13.713241577148438],["덩",-13.713241577148438],["▁7.2",-13.713242530822754],["▁нууц",-13.713242530822754],["▁पोलीस",-13.71324348449707],["▁নিজস্ব",-13.71324348449707],["▁avvicina",-13.713244438171388],["สุวรรณ",-13.713245391845703],["▁desenvolvido",-13.713245391845703],["▁търговски",-13.713245391845703],["▁कार्तिक",-13.713245391845703],["▁nexweş",-13.713247299194336],["▁قبیل",-13.713247299194336],["▁କେହି",-13.713247299194336],["▁délután",-13.713248252868652],["▁الدخول",-13.713248252868652],["▁kinerja",-13.713250160217283],["▁Португал",-13.713250160217283],["▁чотири",-13.713253021240234],["▁සංගමය",-13.71325397491455],["▁obilježava",-13.713254928588867],["▁ഹിന്ദു",-13.713260650634766],["▁설립",-13.713260650634766],["▁Jozef",-13.713269233703612],["▁Процес",-13.713271141052246],["▁번역",-13.713271141052246],["▁vrch",-13.713272094726562],["市长",-13.71327781677246],["▁nemocnice",-13.713278770446776],["▁الأيام",-13.713278770446776],["▁twierdz",-13.71328067779541],["▁Andere",-13.713285446166992],["▁κλείσ",-13.713287353515623],["▁мача",-13.71328830718994],["378",-13.713290214538574],["▁қаласында",-13.713290214538574],["▁Serbisë",-13.71329116821289],["▁retirada",-13.713293075561523],["▁menyala",-13.71329402923584],["▁құрмет",-13.71329402923584],["▁ಹೈ",-13.713302612304688],["▁incentiva",-13.713306427001951],["▁Meyer",-13.713323593139648],["▁nekako",-13.713323593139648],["▁registrert",-13.71333122253418],["▁ਜਾਂਚ",-13.71334171295166],["▁Mobi",-13.713351249694824],["▁මෙහිදී",-13.713358879089355],["Undang",-13.713360786437988],["neuvosto",-13.71336555480957],["▁Antik",-13.713372230529783],["▁mahupun",-13.713386535644531],["▁নিজের",-13.713391304016112],["▁Commissione",-13.713394165039062],["▁рекла",-13.713430404663086],["▁តុលាការ",-13.713433265686035],["▁հոկտեմբերի",-13.71343994140625],["ырақ",-13.713445663452148],["▁њихово",-13.71344757080078],["▁ඇගේ",-13.713448524475098],["▁Endre",-13.713454246520996],["▁shkolla",-13.713475227355955],["เกิดจาก",-13.713488578796388],["▁eventyr",-13.713488578796388],["▁спроведе",-13.713489532470703],["၀ယ္",-13.71349048614502],["或其他",-13.713492393493652],["搬家",-13.713496208190918],["▁maliyet",-13.713505744934082],["▁uzas",-13.713513374328612],["▁звуча",-13.713520050048828],["▁смъртта",-13.713534355163574],["▁ବାହାର",-13.713536262512209],["▁آمر",-13.713553428649902],["გალ",-13.713560104370115],["▁айтканда",-13.713561058044434],["▁թերթ",-13.713562965393066],["我相信",-13.713584899902344],["▁Pearl",-13.71358585357666],["▁riittää",-13.713603019714355],["▁bequem",-13.713603973388672],["▁colour",-13.71361255645752],["▁novým",-13.713628768920898],["▁Alca",-13.713637351989746],["▁experiences",-13.713647842407228],["▁lág",-13.713659286499023],["▁näistä",-13.713664054870604],["വെച്ച",-13.713665008544922],["පුර",-13.71368408203125],["▁속도",-13.7136869430542],["ပါပဲ။",-13.713716506958008],["agentur",-13.713725090026855],["tým",-13.713726043701172],["tambua",-13.713726997375488],["▁waarheid",-13.71372890472412],["▁şər",-13.71373176574707],["▁поняття",-13.713735580444336],["▁վաղը",-13.71374225616455],["▁essentielle",-13.713749885559082],["TAC",-13.71376895904541],["跟著",-13.71377944946289],["▁ලැබේ",-13.713805198669434],["Эр",-13.71384048461914],["▁केहि",-13.713844299316406],["▁јула",-13.713848114013672],["ტალ",-13.713855743408203],["▁affärs",-13.713863372802734],["ిస్తారు",-13.713873863220217],["▁കീ",-13.713884353637695],["artista",-13.713909149169922],["▁Гэ",-13.713912963867188],["▁ಬಾಲ",-13.713915824890137],["erius",-13.713921546936035],["▁potestate",-13.71393871307373],["네이",-13.713943481445312],["sposo",-13.713945388793944],["▁ගීත",-13.713960647583008],["▁სინ",-13.713966369628906],["▁ପତ୍ର",-13.71400260925293],["beretning",-13.714007377624512],["우스",-13.714028358459473],["▁dokumenty",-13.714044570922852],["▁Pictures",-13.714045524597168],["▁totalement",-13.714051246643066],["mân",-13.714052200317385],["▁നായകന",-13.71406364440918],["688",-13.714068412780762],["笑着",-13.71407413482666],["▁Reme",-13.714099884033203],["kkari",-13.714105606079102],["ცნობი",-13.71412467956543],["ग्रा",-13.71413230895996],["havet",-13.714143753051758],["▁Tiga",-13.714146614074709],["已被",-13.714177131652832],["ilised",-13.714181900024414],["puter",-13.714187622070312],["▁olasz",-13.714189529418944],["▁Topic",-13.71420669555664],["▁жұмысы",-13.714219093322754],["gefühl",-13.714227676391602],["▁taasi",-13.714234352111816],["க்களை",-13.71424674987793],["▁mauvaise",-13.714262962341309],["▁تدخل",-13.71426486968994],["Ху",-13.714274406433104],["ੁਰ",-13.71428394317627],["▁mingit",-13.71428394317627],["కెట్",-13.714303016662598],["▁Gina",-13.714311599731444],["færslu",-13.714313507080078],["我还",-13.714319229125977],["0.7",-13.714320182800291],["▁modem",-13.714324951171877],["▁baas",-13.71434497833252],["TUA",-13.714345932006836],["ේම",-13.714354515075684],["▁spatele",-13.71437931060791],["▁rencontrer",-13.714394569396973],["פורים",-13.714398384094238],["▁elnöke",-13.71441650390625],["▁cumparat",-13.714427947998049],["▁cenie",-13.71445369720459],["▁Хари",-13.714454650878906],["▁الفرق",-13.714454650878906],["▁Suuri",-13.714468955993652],["▁gjuhën",-13.71449089050293],["▁navedeni",-13.714505195617676],["▁Halb",-13.714518547058104],["▁ოჯახის",-13.71452522277832],["▁ويئي",-13.714531898498535],["▁payah",-13.714536666870115],["▁usw",-13.714570045471191],["▁multă",-13.714580535888672],["▁niyə",-13.714611053466797],["▁personalidade",-13.714624404907228],["▁regista",-13.71464729309082],["▁zəngin",-13.714658737182615],["▁бараа",-13.714661598205566],["和你",-13.71467399597168],["▁Интересно",-13.714696884155272],["ೋಣ",-13.714701652526855],["၂၄",-13.714729309082031],["ทั้งหลาย",-13.71473217010498],["▁моите",-13.714741706848145],["iskais",-13.714746475219728],["▁ugled",-13.714747428894045],["صلة",-13.714773178100586],["НГ",-13.714818000793455],["▁chci",-13.714825630187988],["▁ഇതില്",-13.714828491210938],["ทุกท่าน",-13.71484088897705],["ในประเทศ",-13.71484661102295],["んですよね",-13.714877128601074],["选手",-13.714884757995604],["зул",-13.71489715576172],["សភា",-13.71494483947754],["▁radova",-13.714945793151855],["ಉದ್ಯೋಗ",-13.714948654174805],["▁קאנ",-13.714951515197754],["glass",-13.714974403381348],["▁Kranj",-13.714990615844728],["▁filla",-13.715018272399902],["เยี่ยม",-13.715022087097168],["▁accedere",-13.715028762817385],["īni",-13.715049743652344],["гуш",-13.715054512023926],["εύεται",-13.71505641937256],["CCC",-13.715060234069824],["▁hartzeko",-13.715068817138672],["ഡിയ",-13.715085983276367],["ത്തിനുള്ള",-13.715092658996582],["▁Arre",-13.715095520019531],["▁приехал",-13.715100288391112],["ТГ",-13.715126037597656],["ىلىق",-13.715149879455566],["▁ୟୁ",-13.715155601501465],["▁إبراهيم",-13.715169906616213],["▁наркотик",-13.715188026428224],["јска",-13.715191841125488],["ְּ",-13.715197563171388],["▁precise",-13.715208053588867],["▁Hindistan",-13.715240478515623],["uesve",-13.71524429321289],["Чи",-13.71524429321289],["▁faudra",-13.71524715423584],["▁chercher",-13.715250015258787],["▁применение",-13.715254783630373],["нута",-13.71525764465332],["▁ఎదురు",-13.715259552001951],["▁Patient",-13.715299606323242],["വിട",-13.715314865112305],["▁йде",-13.715324401855469],["▁bluz",-13.71533203125],["દ્ર",-13.715337753295898],["▁મળ",-13.715359687805176],["schicht",-13.71536636352539],["aider",-13.715377807617188],["▁öğrencilerin",-13.715377807617188],["▁jaký",-13.71538543701172],["▁מפת",-13.715391159057615],["▁iroda",-13.715412139892578],["▁Loca",-13.71545696258545],["ค้น",-13.715459823608398],["认知",-13.71547031402588],["tivní",-13.715474128723145],["എച്ച്",-13.715497016906738],["▁previo",-13.715497016906738],["▁vakna",-13.715503692626951],["罕",-13.715509414672852],["▁domni",-13.715513229370115],["▁Tuma",-13.7155179977417],["oucí",-13.715526580810549],["会被",-13.715527534484863],["监控",-13.715532302856444],["とにかく",-13.715537071228027],["暂时",-13.71554470062256],["룩",-13.71554470062256],["פתרונות",-13.715545654296877],["▁Física",-13.715545654296877],["▁heddiw",-13.715545654296877],["▁medžiagos",-13.715545654296877],["▁pîroz",-13.715545654296877],["▁uiteraard",-13.715545654296877],["▁випадках",-13.715545654296877],["▁рэдактар",-13.715545654296877],["▁المسلحة",-13.715545654296877],["▁تمہارے",-13.715545654296877],["▁گناہ",-13.715545654296877],["▁ଉଦ୍ଧାର",-13.715545654296877],["▁మద్దతు",-13.715545654296877],["▁ಟ್ರೆಂಡಿಂಗ್",-13.715545654296877],["스러운",-13.715545654296877],["▁perlawanan",-13.715546607971191],["▁вирішення",-13.715546607971191],["틱",-13.715546607971191],["▁vurğulayıb",-13.715547561645508],["▁wireless",-13.715547561645508],["▁xarunta",-13.715548515319824],["▁становника",-13.715548515319824],["▁ڪهڙي",-13.71554946899414],["▁स्वप्न",-13.715551376342772],["▁الانترنت",-13.715554237365724],["▁مؤسسة",-13.715554237365724],["▁ਨਹੀ",-13.715554237365724],["▁déroule",-13.71555519104004],["▁rëndësishëm",-13.715556144714355],["▁ક્લ",-13.715556144714355],["▁දිහා",-13.715558052062988],["▁borta",-13.715560913085938],["▁vörös",-13.715560913085938],["▁stavby",-13.71556282043457],["▁३३",-13.715563774108888],["▁அண்ணா",-13.715563774108888],["▁оролцох",-13.71556568145752],["▁Cinayət",-13.715566635131836],["▁prosiect",-13.715567588806152],["casino",-13.715569496154783],["▁מעולה",-13.715569496154783],["kläder",-13.715578079223633],["▁موټر",-13.71557903289795],["▁compter",-13.715580940246582],["tuvieron",-13.715581893920898],["▁apărut",-13.715582847595217],["▁Podcast",-13.715596199035645],["սենյակ",-13.71560001373291],["▁uczestników",-13.715601921081545],["▁горадзе",-13.715601921081545],["▁ঘটনায়",-13.715601921081545],["▁جایزه",-13.715603828430176],["▁warzyw",-13.715605735778809],["▁czynności",-13.71560764312744],["▁වෙලාවට",-13.71561050415039],["admin",-13.715614318847656],["▁pakeisti",-13.715614318847656],["▁அறிய",-13.715615272521973],["に入る",-13.715620040893556],["▁Разбира",-13.715620994567873],["ஸா",-13.715621948242188],["▁자격",-13.71562385559082],["▁facilities",-13.715625762939451],["▁maarufu",-13.71562671661377],["▁matang",-13.71562671661377],["စို",-13.7156343460083],["▁çempionatı",-13.7156343460083],["▁canviar",-13.715646743774414],["▁পৰা",-13.715646743774414],["▁koszty",-13.71564769744873],["▁خپلواک",-13.71566104888916],["▁البشرية",-13.71567726135254],["▁trombo",-13.715685844421388],["▁नेम",-13.715699195861816],["піць",-13.715703010559082],["▁čine",-13.71571159362793],["▁보안",-13.715727806091309],["พระเจ้า",-13.715733528137209],["▁skirti",-13.715738296508787],["▁상품명",-13.715747833251951],["▁चिकित्सक",-13.715754508972168],["▁Ereb",-13.715757369995115],["▁ramane",-13.715760231018066],["▁பலி",-13.715763092041016],["▁hallgat",-13.715786933898926],["▁дават",-13.715791702270508],["ענק",-13.715806007385254],["suori",-13.715807914733888],["▁эхний",-13.715819358825684],["웨어",-13.715825080871582],["▁kontinu",-13.715832710266112],["▁jahon",-13.71587371826172],["▁espresso",-13.715887069702148],["▁tanssi",-13.715887069702148],["▁मन्त्रालयले",-13.715896606445312],["ליות",-13.715912818908691],["орун",-13.71594524383545],["▁ਸੰਤ",-13.715948104858398],["वरी",-13.715964317321776],["▁centi",-13.715964317321776],["▁продуктів",-13.715984344482422],["íčka",-13.715987205505373],["▁Ministria",-13.716023445129396],["০৮",-13.716044425964355],["▁මෙතන",-13.716046333312988],["▁stereo",-13.716049194335938],["▁Supra",-13.71606159210205],["ところが",-13.71606159210205],["▁менің",-13.716063499450684],["▁олимп",-13.71606731414795],["▁Putih",-13.716068267822266],["▁ενδιαφέρ",-13.716073036193848],["ಲಿಂಗ",-13.716083526611328],["ेने",-13.716084480285645],["élève",-13.71609592437744],["brem",-13.716100692749023],["▁Berisha",-13.716114044189451],["▁caída",-13.716124534606934],["這位",-13.716130256652832],["فائل",-13.716144561767578],["mayacağı",-13.716171264648438],["▁vetro",-13.716175079345703],["كسر",-13.716198921203612],["▁serije",-13.716242790222168],["▁teoría",-13.716259956359863],["▁commentaires",-13.716268539428713],["▁anlamı",-13.71628475189209],["▁помощта",-13.716288566589355],["järven",-13.71629238128662],["▁mensual",-13.716294288635254],["▁perfeito",-13.716307640075684],["▁воля",-13.716315269470217],["වලදී",-13.716327667236328],["▁Nieuwe",-13.716328620910645],["ovice",-13.71633529663086],["▁lífs",-13.716350555419922],["בלים",-13.716354370117188],["▁Pano",-13.716365814208984],["கிறேன்",-13.7163667678833],["▁ermənilər",-13.716387748718262],["ταξιδ",-13.716389656066896],["▁bờ",-13.71639347076416],["шког",-13.71642017364502],["zusetzen",-13.716447830200195],["はとても",-13.71646785736084],["▁Franca",-13.716470718383787],["เสือ",-13.716480255126951],["▁appears",-13.716487884521484],["▁שמן",-13.71649932861328],["▁Բար",-13.716501235961914],["▁Χα",-13.71654987335205],["▁Tingkat",-13.716557502746582],["шле",-13.716570854187012],["▁véleménye",-13.716570854187012],["▁mediul",-13.716593742370604],["aliya",-13.716605186462402],["▁Serial",-13.716619491577148],["ärer",-13.71662139892578],["væn",-13.71662712097168],["feil",-13.716645240783691],["нацыянальны",-13.716647148132324],["▁עורכי",-13.716675758361816],["▁peixe",-13.716676712036133],["▁konsumsi",-13.716678619384766],["▁interessert",-13.716681480407717],["▁arbor",-13.716683387756348],["內心",-13.716687202453612],["liyinə",-13.716697692871094],["▁მარა",-13.71670150756836],["ISTE",-13.716713905334473],["▁πιθανό",-13.716715812683104],["▁standing",-13.716730117797852],["▁onderstaande",-13.71674346923828],["▁대중",-13.716769218444824],["Stat",-13.716772079467772],["isilla",-13.716773986816406],["▁Mums",-13.716782569885254],["▁faixa",-13.716795921325684],["▁останува",-13.716797828674316],["▁Lepas",-13.716812133789062],["ย่อม",-13.716814041137695],["▁виплат",-13.716835021972656],["aggiornamento",-13.716854095458984],["▁назив",-13.716870307922363],["▁कॉल",-13.71688461303711],["▁faço",-13.716922760009766],["▁سمو",-13.716947555541992],["ရာဇ",-13.716949462890623],["არო",-13.716954231262209],["▁kitaip",-13.716989517211914],["▁వీరి",-13.717016220092772],["わけで",-13.717041969299316],["151",-13.71705722808838],["ROL",-13.717063903808594],["▁2:0",-13.717070579528809],["▁שלמה",-13.71707820892334],["▁مقدمات",-13.717084884643556],["Суу",-13.717087745666504],["▁होकर",-13.717110633850098],["kiran",-13.717123031616213],["▁интернете",-13.717123031616213],["▁veículos",-13.717127799987791],["▁ውድ",-13.71715259552002],["▁duševn",-13.717153549194336],["▁Rahat",-13.717162132263184],["の内容",-13.717169761657717],["AGO",-13.717174530029297],["सर्व",-13.71719455718994],["▁Træ",-13.717209815979004],["▁পানি",-13.717222213745115],["考え方",-13.7172269821167],["▁решението",-13.717230796813965],["त्काळ",-13.71723461151123],["סוג",-13.717254638671877],["▁Xog",-13.717265129089355],["“",-13.71728801727295],["▁Ciencias",-13.71728801727295],["עיקר",-13.717289924621582],["▁ربط",-13.717321395874023],["▁Autó",-13.717344284057615],["▁Žem",-13.717376708984377],["▁Bēr",-13.71738052368164],["▁legion",-13.717382431030272],["▁തോന്നി",-13.71741008758545],["▁рази",-13.717418670654297],["սին",-13.717429161071776],["▁artması",-13.717443466186523],["▁марш",-13.71744441986084],["▁גבר",-13.717456817626951],["Miss",-13.717459678649902],["▁0,9",-13.7174654006958],["▁সেরা",-13.71747589111328],["▁Kroat",-13.717491149902344],["▁ڇپ",-13.717509269714355],["各類",-13.717522621154783],["কেল",-13.717529296875],["▁ตอนที่",-13.717536926269531],["▁čovjeka",-13.717538833618164],["ช์",-13.717541694641112],["▁akit",-13.717549324035645],["هذه",-13.717554092407228],["žene",-13.71757698059082],["άται",-13.717580795288086],["vén",-13.717584609985352],["▁පට",-13.717611312866213],["ariusz",-13.717616081237791],["gól",-13.71761703491211],["▁بوش",-13.717622756958008],["▁izinto",-13.717633247375488],["▁داشتم",-13.717639923095703],["▁Перво",-13.717655181884766],["▁mövzu",-13.71766757965088],["▁yolla",-13.71767807006836],["▁265",-13.717680931091309],["ुं",-13.717713356018066],["▁Gestió",-13.717719078063965],["ogató",-13.717727661132812],["▁יולי",-13.717756271362305],["მელი",-13.717764854431152],["▁directora",-13.717774391174316],["खत",-13.71777629852295],["祂",-13.717788696289062],["በለ",-13.717792510986328],["Аустр",-13.71780014038086],["▁Esperanta",-13.717809677124023],["▁mások",-13.717812538146973],["济",-13.71782112121582],["拘",-13.7178316116333],["ၢ",-13.717846870422363],["▁Chwarae",-13.717853546142578],["มั้ย",-13.717854499816896],["ဖုိ႔",-13.717854499816896],["ቴክኖሎጂ",-13.717854499816896],["▁săptămâni",-13.717854499816896],["▁Εκκλησία",-13.717854499816896],["▁детьми",-13.717854499816896],["▁енглески",-13.717854499816896],["▁интервю",-13.717854499816896],["▁техніки",-13.717854499816896],["▁शैक्षिक",-13.717854499816896],["▁નરેન્દ્ર",-13.717854499816896],["▁కెమెరా",-13.717854499816896],["닉",-13.717854499816896],["쓴",-13.717854499816896],["▁funkcjonaln",-13.717855453491213],["sisitiza",-13.717856407165527],["▁традыцый",-13.717856407165527],["▁վարչության",-13.717856407165527],["美味しい",-13.717856407165527],["Cilvēk",-13.717857360839844],["▁niewielki",-13.717857360839844],["液晶",-13.71785831451416],["▁мобилни",-13.717860221862791],["▁nyílt",-13.71786403656006],["▁האחרונה",-13.71786403656006],["▁Shqiperi",-13.717864990234377],["▁recevoir",-13.717866897583008],["▁придонес",-13.717870712280272],["▁қаражат",-13.717870712280272],["പ്രകടന",-13.717873573303224],["▁무슨",-13.71787452697754],["▁מישהו",-13.717875480651855],["▁لون",-13.717875480651855],["▁вистински",-13.717878341674805],["▁ಬಿಟ್ಟು",-13.71787929534912],["▁Εδώ",-13.71788215637207],["▁취업",-13.71788215637207],["זמנים",-13.717886924743652],["▁Sist",-13.717889785766602],["▁క్రీ",-13.717891693115234],["▁käytetty",-13.71789836883545],["▁membahas",-13.71789836883545],["▁eksisterende",-13.717904090881348],["▁proposons",-13.717915534973145],["क्षे",-13.71791648864746],["▁resnično",-13.717924118041992],["▁წუთი",-13.717924118041992],["▁puteți",-13.717926025390623],["工作室",-13.71792984008789],["▁елдің",-13.717936515808104],["▁sedap",-13.717965126037598],["▁תואר",-13.717971801757812],["▁позиција",-13.717984199523926],["▁layer",-13.717988967895508],["wiązk",-13.717992782592772],["▁करार",-13.718009948730469],["▁Ireo",-13.718015670776367],["▁сгради",-13.718017578125],["ollo",-13.71803855895996],["▁উপজেলার",-13.718055725097656],["เพจ",-13.718061447143556],["വൻ",-13.718072891235352],["▁såväl",-13.71807861328125],["skrip",-13.718079566955566],["▁dagegen",-13.718080520629885],["▁согуш",-13.718084335327148],["▁사람들은",-13.718085289001465],["ಿಯನ್",-13.71809196472168],["île",-13.718093872070312],["▁stärker",-13.718099594116213],["accepta",-13.718100547790527],["▁ręcz",-13.718107223510742],["暑假",-13.718107223510742],["什么时候",-13.718117713928224],["▁kiinnostu",-13.71811866760254],["▁Ρε",-13.718130111694336],["skjerm",-13.718134880065918],["▁выконва",-13.718135833740234],["觀念",-13.71816349029541],["▁comisión",-13.718180656433104],["▁kufikia",-13.718192100524902],["feydd",-13.718195915222168],["▁दिनमा",-13.718198776245115],["▁필요가",-13.718205451965332],["▁betrekking",-13.718209266662598],["▁mbajtur",-13.718221664428713],["▁zoku",-13.718223571777344],["ნახე",-13.71828842163086],["▁뇌",-13.718334197998049],["▁analist",-13.718342781066896],["labot",-13.718361854553224],["▁domaći",-13.718366622924805],["▁Muller",-13.71837329864502],["を食べ",-13.718384742736816],["▁Špe",-13.718385696411133],["utiliser",-13.718399047851562],["PIA",-13.71841526031494],["iktų",-13.718427658081056],["▁ණය",-13.718432426452637],["004",-13.718440055847168],["▁ruime",-13.71844482421875],["きて",-13.718457221984863],["▁petek",-13.71845817565918],["▁incep",-13.718478202819824],["▁Fost",-13.718481063842772],["348",-13.718486785888672],["▁딱",-13.718512535095217],["▁opinto",-13.718527793884276],["▁fornito",-13.718541145324709],["darî",-13.718548774719238],["▁честит",-13.718549728393556],["▁កើត",-13.718555450439451],["得多",-13.718571662902832],["운전",-13.718573570251465],["hnout",-13.71857452392578],["▁പങ്കു",-13.718591690063477],["റിയാ",-13.718607902526855],["▁противник",-13.718620300292969],["▁verktyg",-13.718631744384766],["crimina",-13.718636512756348],["历史上",-13.71865463256836],["liyində",-13.71866512298584],["▁iau",-13.718683242797852],["▁häntä",-13.7186861038208],["▁endring",-13.718687057495115],["ritz",-13.71873664855957],["اچ",-13.718738555908203],["слуха",-13.71875],["▁Richter",-13.718757629394531],["▁vaših",-13.71876335144043],["▁Vorteil",-13.718767166137695],["▁íslensk",-13.718771934509276],["▁ເລື່ອງ",-13.71878147125244],["kundig",-13.718788146972656],["▁સ્થળ",-13.718789100646973],["▁parella",-13.718792915344238],["inizin",-13.718806266784668],["luğa",-13.71882438659668],["▁Guard",-13.718831062316896],["▁विल",-13.71883487701416],["▁Larsen",-13.71884822845459],["▁oportunitat",-13.71885108947754],["▁kestävä",-13.718859672546388],["ичното",-13.718871116638184],["▁diretto",-13.718871116638184],["▁Desain",-13.718873023986816],["▁Blanc",-13.718887329101562],["▁szállítás",-13.71889591217041],["▁සෙට්",-13.718901634216309],["▁Crow",-13.718904495239258],["▁මිනිහ",-13.718916893005373],["▁shumta",-13.71891975402832],["▁පිරිමි",-13.718926429748535],["255",-13.718928337097168],["▁Samsun",-13.718942642211914],["connect",-13.71894359588623],["തുകൊണ്ട്",-13.718947410583496],["ქსი",-13.718957901000977],["▁tartışma",-13.71895980834961],["5,7",-13.71896266937256],["ഷൻ",-13.71898078918457],["▁prazer",-13.719000816345217],["▁najväčš",-13.71901512145996],["▁fadhi",-13.719045639038086],["▁Fren",-13.719090461730955],["績",-13.719097137451172],["قيد",-13.719103813171388],["פשט",-13.719106674194336],["▁სესხ",-13.719127655029297],["▁fundar",-13.719138145446776],["▁תקשורת",-13.71916675567627],["▁самоуправ",-13.719168663024902],["▁tourist",-13.719178199768066],["仓",-13.719182014465332],["▁بگذار",-13.719191551208496],["പ്രേ",-13.719193458557127],["▁Karu",-13.719194412231444],["pój",-13.719198226928713],["▁போட",-13.719208717346191],["▁megint",-13.719219207763672],["▁បោះឆ្នោត",-13.719264030456545],["ගන",-13.71926975250244],["▁museli",-13.719283103942873],["hjem",-13.719294548034668],["ADH",-13.719315528869627],["зії",-13.719316482543944],["▁frater",-13.719322204589844],["▁किंमत",-13.71932315826416],["კითხვის",-13.719338417053224],["ایران",-13.71933937072754],["ANDE",-13.719340324401855],["▁Humanit",-13.719340324401855],["▁vâ",-13.71934413909912],["▁ህይወት",-13.719367027282717],["τικοί",-13.719372749328612],["▁Tanya",-13.71937656402588],["Asocio",-13.719382286071776],["▁materiallar",-13.719405174255373],["▁Európske",-13.719408988952637],["▁professionali",-13.719412803649902],["ресурс",-13.719438552856444],["נוכח",-13.719484329223633],["▁нормативно",-13.719493865966797],["groei",-13.719494819641112],["▁anëtar",-13.71949577331543],["▁აღმოსავლეთ",-13.719522476196287],["▁nomini",-13.7195405960083],["甚至是",-13.719550132751465],["cyjną",-13.719569206237791],["▁mache",-13.71959114074707],["▁многи",-13.719598770141602],["JN",-13.719599723815918],["▁bölgede",-13.719647407531738],["ýður",-13.719660758972168],["WB",-13.719672203063965],["โชค",-13.719686508178713],["Fac",-13.719693183898926],["▁Schle",-13.719708442687988],["▁helder",-13.719727516174316],["▁estudar",-13.719732284545898],["Варшав",-13.719740867614746],["▁Conclu",-13.71974277496338],["ার্ট",-13.719743728637695],["▁teini",-13.719751358032228],["tävän",-13.719783782958984],["3.8",-13.719786643981934],["▁خطا",-13.719786643981934],["őbb",-13.719788551330566],["さんと",-13.719792366027832],["▁міні",-13.719794273376465],["▁510",-13.71979522705078],["ాయ్",-13.71980094909668],["бени",-13.719805717468262],["▁djali",-13.719829559326172],["▁смак",-13.719842910766602],["▁bėg",-13.71985912322998],["▁otrā",-13.719865798950195],["▁mövqeyi",-13.719889640808104],["▁ಅಧಿಕಾರಿ",-13.719889640808104],["וקי",-13.719893455505373],["דיל",-13.71989631652832],["shindwa",-13.71992301940918],["▁যারা",-13.719923973083496],["poly",-13.719929695129396],["ደቡብ",-13.719941139221191],["这两个",-13.719944953918455],["▁miało",-13.719945907592772],["▁Domu",-13.719947814941406],["▁Кому",-13.719961166381836],["spalv",-13.719964027404783],["▁ඔයාට",-13.720000267028809],["▁használata",-13.720008850097656],["▁İsa",-13.72001838684082],["▁Revolution",-13.720027923583984],["ଦେଇ",-13.720029830932615],["▁пошли",-13.720036506652832],["▁muj",-13.720043182373049],["ಕುಮಾರ",-13.720059394836426],["▁Christoph",-13.72006130218506],["мија",-13.720069885253906],["▁ट्रक",-13.720072746276855],["へと",-13.72009563446045],["郊",-13.720098495483398],["菲律宾",-13.720108032226562],["圖書館",-13.720115661621094],["▁Præ",-13.720123291015623],["ରିଆ",-13.720125198364258],["奶奶",-13.720141410827637],["煎",-13.720142364501951],["മത",-13.720147132873535],["弘",-13.720147132873535],["依據",-13.720155715942385],["გამო",-13.72016143798828],["▁amalan",-13.720162391662598],["มะเร็ง",-13.720168113708496],["ທ້າຍ",-13.720168113708496],["▁адабият",-13.720168113708496],["အာဏာ",-13.720169067382812],["▁అందుబాటులో",-13.720169067382812],["▁సమీక్ష",-13.720169067382812],["έγραψε",-13.720170021057127],["พงษ์",-13.720170021057127],["ማስረጃ",-13.720170021057127],["▁FACEBOOK",-13.720170021057127],["▁esperientzia",-13.720170021057127],["▁expensive",-13.720170021057127],["▁frecuencia",-13.720170021057127],["▁ponedjeljak",-13.720170021057127],["▁zamonaviy",-13.720170021057127],["▁Öffentlichkeit",-13.720170021057127],["▁élelmiszer",-13.720170021057127],["▁γυμν",-13.720170021057127],["▁Резултати",-13.720170021057127],["▁үкімет",-13.720170021057127],["▁րոպե",-13.720170021057127],["▁کررہے",-13.720170021057127],["▁ਉਮਰ",-13.720170021057127],["▁ਗ੍ਰੰਥ",-13.720170021057127],["▁ખર્ચ",-13.720170021057127],["▁થોડા",-13.720170021057127],["▁లైఫ్",-13.720170021057127],["▁ಇತಿಹಾಸ",-13.720170021057127],["▁സ്ഥാപന",-13.720170021057127],["▁ამავე",-13.720170021057127],["▁საშუალო",-13.720170021057127],["▁ყველაფერს",-13.720170021057127],["셋",-13.720170021057127],["▁Mkutano",-13.720170974731444],["▁Xocalı",-13.720170974731444],["▁câştig",-13.720170974731444],["▁núcleo",-13.720170974731444],["▁اسرائيل",-13.720170974731444],["▁ویدیو",-13.720170974731444],["▁ಸಲಹೆ",-13.720170974731444],["▁гісторык",-13.720171928405762],["▁keskiviikko",-13.720172882080078],["▁zespół",-13.720172882080078],["▁Bedarf",-13.720173835754396],["▁ռազմական",-13.720176696777344],["ALDE",-13.72017765045166],["▁địch",-13.72017765045166],["▁ባንክ",-13.72017765045166],["▁оборудование",-13.720182418823242],["▁mięśni",-13.72018337249756],["▁comienza",-13.720186233520508],["▁جرائم",-13.720191955566406],["▁pecunia",-13.720192909240724],["▁Skandinav",-13.720194816589355],["граден",-13.720195770263672],["పాల",-13.720197677612305],["▁Мындай",-13.720197677612305],["▁iepirk",-13.72019863128662],["▁Experience",-13.720202445983888],["▁ونحن",-13.720202445983888],["▁сомнева",-13.720203399658203],["▁mafuta",-13.720206260681152],["▁Konzert",-13.720208168029783],["▁kakšen",-13.720211029052734],["argomento",-13.72021198272705],["rauta",-13.720215797424316],["▁Եղ",-13.720220565795898],["▁உயிர",-13.72022819519043],["▁상승",-13.72022819519043],["▁konung",-13.72023582458496],["▁bahaya",-13.72024631500244],["났다",-13.72024631500244],["▁beställa",-13.720257759094238],["▁hlið",-13.720257759094238],["▁आश",-13.720259666442873],["▁quisquam",-13.720267295837402],["最後の",-13.720267295837402],["baptis",-13.720269203186035],["▁হওয়ার",-13.720270156860352],["▁matkusta",-13.720277786254885],["ตั้งใจ",-13.7202787399292],["▁oraindik",-13.720280647277832],["ጠይቅ",-13.720292091369627],["▁Tubuh",-13.720306396484377],["▁precisar",-13.720307350158691],["高く",-13.720308303833008],["▁Kaf",-13.720309257507324],["turk",-13.72031307220459],["▁погледа",-13.72032070159912],["▁Armenia",-13.72033405303955],["ປິດ",-13.720335006713867],["違反",-13.720335960388184],["▁Ping",-13.720351219177246],["ιακής",-13.720352172851562],["ystė",-13.72035312652588],["▁raro",-13.720358848571776],["▁çatıb",-13.720359802246094],["њо",-13.72036075592041],["影音",-13.72036361694336],["▁שינויים",-13.720372200012209],["▁objectifs",-13.720396041870115],["▁koble",-13.720398902893066],["▁toisin",-13.720399856567385],["Hand",-13.72041130065918],["加以",-13.720415115356444],["▁kapsul",-13.720417022705078],["ম্যান",-13.720418930053713],["に向けて",-13.720419883728027],["▁actuel",-13.72043514251709],["實驗",-13.720452308654783],["化妝",-13.720471382141112],["▁Tibb",-13.720491409301758],["قاع",-13.720499038696287],["▁şehrin",-13.720504760742188],["▁ይፋ",-13.720514297485352],["▁තියා",-13.720515251159668],["തയും",-13.720520973205566],["નમાં",-13.720532417297363],["▁Лор",-13.720541954040527],["▁ਰੋਕ",-13.720561027526855],["▁acordul",-13.720566749572754],["▁Metsä",-13.720580101013184],["延長",-13.720609664916992],["▁Aztán",-13.720610618591309],["▁بدی",-13.72061824798584],["▁Runde",-13.720633506774902],["▁embarca",-13.720638275146484],["▁մասնակից",-13.720645904541016],["▁appuntamento",-13.720672607421877],["▁определения",-13.720704078674316],["րության",-13.720721244812012],["▁Tiba",-13.720722198486328],["▁украс",-13.72072410583496],["ਲੂ",-13.720726013183594],["▁റോഡ",-13.720735549926758],["▁ქალაქი",-13.72074031829834],["▁квартиры",-13.720741271972656],["▁PNL",-13.720760345458984],["ογράφο",-13.720763206481934],["▁lähtee",-13.720765113830566],["▁מבקש",-13.72077178955078],["▁buscan",-13.720775604248049],["▁pankki",-13.720780372619627],["▁αγαπη",-13.720820426940918],["Rozhod",-13.720829010009766],["▁позов",-13.720830917358398],["ተዋል፡፡",-13.72084140777588],["ները՝",-13.720844268798828],["▁trvá",-13.720849990844728],["יכו",-13.72085189819336],["▁ปรสิตใน",-13.720854759216309],["▁الظ",-13.72085952758789],["▁кийинки",-13.720860481262209],["大批",-13.72086238861084],["▁Gary",-13.720881462097168],["▁organizasyon",-13.720884323120115],["▁dużej",-13.720890045166016],["άτες",-13.720914840698242],["▁житло",-13.720922470092772],["公司在",-13.720924377441406],["lənib",-13.720925331115724],["▁hrano",-13.720934867858888],["▁igrati",-13.720934867858888],["▁senyum",-13.720941543579102],["දෝ",-13.720942497253418],["▁forduló",-13.720945358276367],["▁internasjonale",-13.720951080322266],["▁सङ्",-13.720952033996582],["▁komende",-13.720965385437012],["アー",-13.72096824645996],["treiben",-13.720991134643556],["▁বিয়ে",-13.720993995666504],["▁mówił",-13.721009254455566],["▁giocare",-13.7210111618042],["-38",-13.721031188964844],["▁ਮਹਿ",-13.721038818359377],["▁loppuun",-13.721039772033691],["五月",-13.721039772033691],["▁datë",-13.721054077148438],["▁යාපාරික",-13.721064567565918],["▁pandang",-13.721068382263184],["▁Африк",-13.7210693359375],["jack",-13.721073150634766],["တာကို",-13.721074104309082],["▁taagan",-13.721081733703612],["▁delako",-13.721087455749512],["民警",-13.721092224121094],["▁валюта",-13.721123695373535],["గిన",-13.721129417419434],["361",-13.721136093139648],["օրինակ",-13.721171379089355],["运作",-13.721183776855469],["▁skaffe",-13.721211433410645],["▁شې",-13.72121810913086],["▁labda",-13.721232414245604],["▁वायु",-13.721232414245604],["▁невы",-13.721237182617188],["▁kuriuo",-13.721259117126465],["biye",-13.721261978149414],["MIC",-13.721312522888184],["▁відкрив",-13.7213134765625],["▁дія",-13.721346855163574],["▁Stiftung",-13.721348762512209],["▁relacionado",-13.721353530883787],["▁kalkula",-13.721354484558104],["▁oficinas",-13.721359252929688],["▁zdroje",-13.72136688232422],["ระวัง",-13.72138214111328],["ಡ್ಡಿ",-13.721385955810549],["ሙን",-13.721394538879396],["笑顔",-13.721397399902344],["▁نقص",-13.721405029296877],["lać",-13.721424102783203],["sgrif",-13.72142505645752],["▁aurkako",-13.721439361572266],["▁تەڭ",-13.721442222595217],["▁indít",-13.721454620361328],["연구소",-13.721455574035645],["ঈ",-13.72146701812744],["▁طويل",-13.721497535705566],["ẠN",-13.721519470214844],["ούλα",-13.721522331237791],["ջան",-13.721534729003906],["▁Saks",-13.721537590026855],["▁glavne",-13.721542358398438],["▁პარლამენტის",-13.721545219421388],["▁මිලියන",-13.721552848815918],["▁službu",-13.721555709838867],["▁حامی",-13.721576690673828],["ėliai",-13.72158145904541],["▁తాజాగా",-13.721583366394045],["▁работно",-13.721609115600586],["گذار",-13.721620559692385],["▁Đang",-13.7216215133667],["ವೋ",-13.721652030944824],["▁responsabilitat",-13.72166919708252],["▁identify",-13.721674919128418],["▁బ్ల",-13.721689224243164],["ありますが",-13.721719741821287],["▁asteroid",-13.721761703491213],["▁največji",-13.721783638000488],["假如",-13.721803665161133],["▁notable",-13.721808433532717],["▁바다",-13.72184944152832],["▁selam",-13.721856117248535],["▁दिइ",-13.721861839294434],["ਵਾਹ",-13.721877098083496],["рылған",-13.721903800964355],["WAL",-13.72192668914795],["▁podíl",-13.72194004058838],["▁جبر",-13.72194004058838],["推行",-13.721985816955566],["▁Move",-13.721994400024414],["īri",-13.72201919555664],["■",-13.72201919555664],["ແຮງ",-13.722021102905272],["English",-13.72202205657959],["ചിത",-13.722036361694336],["▁sending",-13.722058296203612],["اعية",-13.722067832946776],["မေလး",-13.722068786621094],["性感",-13.722075462341309],["льними",-13.722089767456056],["▁marto",-13.722126960754396],["▁rêz",-13.722135543823242],["▁večera",-13.722142219543455],["შენებ",-13.722148895263672],["▁skirta",-13.722156524658203],["引用",-13.722167015075684],["ใช้บริการ",-13.722172737121582],["▁төлөвлө",-13.72218894958496],["ാറുണ്ട്",-13.722192764282228],["▁americké",-13.722195625305176],["ஞ்சு",-13.722200393676758],["lagos",-13.722208976745604],["独特的",-13.722211837768556],["▁சம்ப",-13.722222328186035],["▁datuak",-13.7222261428833],["▁sõltu",-13.722256660461426],["はい",-13.722259521484377],["▁nespo",-13.722267150878906],["泄",-13.722275733947754],["▁پڙه",-13.72227668762207],["წარმო",-13.72227954864502],["▁ያላ",-13.72228717803955],["▁Gerne",-13.72230625152588],["▁Benki",-13.72231674194336],["▁đươ",-13.722329139709473],["▁suurus",-13.722345352172852],["▁የነበረ",-13.72235107421875],["▁देती",-13.722389221191406],["▁Міністр",-13.722397804260254],["鸣",-13.72241497039795],["▁nebolo",-13.722416877746582],["▁वृत्त",-13.722416877746582],["זכור",-13.722423553466797],["默默",-13.722431182861328],["炉",-13.722455024719238],["▁abuur",-13.722456932067873],["▁បញ្ចេញ",-13.72247314453125],["삭",-13.72248363494873],["▁humanist",-13.72248649597168],["干净",-13.72248649597168],["ఢ",-13.722489356994627],["▁обсужден",-13.722489356994627],["▁ചികിത്സ",-13.722489356994627],["▁ბრძოლა",-13.722489356994627],["▁Henkilö",-13.722490310668944],["▁Həmçinin",-13.722490310668944],["▁apdovano",-13.722490310668944],["▁bohužel",-13.722490310668944],["▁cumprimento",-13.722490310668944],["▁geskiedenis",-13.722490310668944],["▁podršku",-13.722490310668944],["▁przyszłości",-13.722490310668944],["▁ugyanakkor",-13.722490310668944],["▁понеделник",-13.722490310668944],["▁қағида",-13.722490310668944],["▁Աստված",-13.722490310668944],["▁նոյեմբերի",-13.722490310668944],["▁ֆինանսական",-13.722490310668944],["▁اهتمام",-13.722490310668944],["▁खतरा",-13.722490310668944],["▁ਦਿਵਸ",-13.722490310668944],["▁ሶስት",-13.722490310668944],["잉",-13.722490310668944],["주셔서",-13.722490310668944],["튜",-13.722490310668944],["▁dihasilkan",-13.722491264343262],["▁αναζητ",-13.722492218017578],["▁Romsdal",-13.722493171691896],["▁genellikle",-13.72249698638916],["▁gcónaí",-13.72249984741211],["▁Polar",-13.722500801086426],["▁хочешь",-13.722501754760742],["▁ਨਵੇਂ",-13.72250270843506],["məyəcək",-13.722505569458008],["▁HTTP",-13.722506523132324],["▁münasibətilə",-13.722511291503906],["▁नोटिफिकेशन्स",-13.722511291503906],["▁последње",-13.722512245178224],["▁अक्षय",-13.72251319885254],["▁कार्की",-13.722516059875488],["▁daarin",-13.722517013549805],["마트",-13.722518920898438],["▁स्वदेश",-13.722521781921388],["గ్రహ",-13.722524642944336],["▁Palazzo",-13.722530364990234],["▁халықтың",-13.722536087036133],["用於",-13.722540855407717],["▁قيد",-13.722549438476562],["▁verdienen",-13.72256088256836],["▁reunião",-13.722562789916992],["▁cywil",-13.722567558288574],["соль",-13.72257137298584],["체육",-13.722573280334473],["Gene",-13.72258472442627],["ıç",-13.722596168518066],["▁თავისუფალი",-13.722599029541016],["▁kasutamine",-13.72260284423828],["▁לעמוד",-13.722606658935549],["слеп",-13.722607612609863],["▁आयुक्त",-13.722616195678713],["▁smanji",-13.72262954711914],["▁Litera",-13.722634315490724],["▁пределах",-13.722639083862305],["တည္း",-13.72264003753662],["бейт",-13.722657203674316],["▁ferrovia",-13.72265911102295],["▁lahayn",-13.72266674041748],["Bud",-13.722686767578123],["שותף",-13.72268772125244],["▁၂။",-13.72268772125244],["▁notizia",-13.72269058227539],["▁изјава",-13.722713470458984],["▁ginen",-13.722715377807615],["ေယာ",-13.72271728515625],["▁честь",-13.722722053527832],["ТТ",-13.722740173339844],["▁доволно",-13.72274398803711],["▁Burn",-13.722753524780272],["▁වර්ෂ",-13.72275447845459],["гуй",-13.722759246826172],["▁sunan",-13.72279167175293],["▁بچا",-13.72279167175293],["▁Pieter",-13.722818374633787],["த்தது",-13.722824096679688],["ματική",-13.722835540771484],["▁მძიმე",-13.72285270690918],["▁_----",-13.722862243652344],["▁cartea",-13.722864151000977],["▁Prior",-13.722902297973633],["▁samtliga",-13.722925186157228],["▁mechanic",-13.722947120666504],["▁संदर्भ",-13.722949028015137],["காம்",-13.722954750061035],["רכיב",-13.722968101501465],["▁Izraeli",-13.722977638244627],["▁Taller",-13.722982406616213],["▁korrupt",-13.72298812866211],["▁بەك",-13.722989082336426],["стоять",-13.723007202148438],["▁vaihto",-13.72300910949707],["rām",-13.723018646240234],["plata",-13.723021507263184],["見え",-13.723026275634766],["▁なお",-13.723031044006348],["▁افکار",-13.72304916381836],["▁частью",-13.723050117492676],["▁logotip",-13.723062515258787],["▁mantenir",-13.723104476928713],["▁Clima",-13.723122596740724],["▁በቅ",-13.72313117980957],["▁induc",-13.723137855529783],["▁поднял",-13.72316551208496],["▁põhjal",-13.723166465759276],["▁ಮಾಡಿಕೊಂಡ",-13.723173141479492],["▁Forschungs",-13.723188400268556],["▁ardura",-13.72319221496582],["ዉን",-13.723211288452148],["idhinn",-13.72321605682373],["ованого",-13.72321891784668],["▁veículo",-13.723221778869627],["авала",-13.723262786865234],["▁વહે",-13.7232666015625],["▁periodu",-13.723270416259766],["▁verwacht",-13.723273277282717],["ျဖ",-13.723294258117676],["ਦਾਨ",-13.72330093383789],["7,8",-13.723305702209473],["ることで",-13.723310470581056],["不对",-13.723316192626951],["éért",-13.723329544067385],["▁souhlas",-13.723332405090332],["▁כלים",-13.723333358764648],["▁chceme",-13.723340034484863],["▁izobraževanj",-13.72337245941162],["بشر",-13.72340488433838],["▁वित्त",-13.72341537475586],["ზღვ",-13.723421096801758],["▁vaut",-13.723432540893556],["▁ಚಂದ್ರ",-13.723438262939451],["▁원인",-13.723468780517578],["주년",-13.72348690032959],["▁ringi",-13.723495483398438],["ИТИ",-13.723505973815918],["bře",-13.723526000976562],["▁بدلا",-13.723578453063965],["▁profili",-13.723589897155762],["▁знищ",-13.723594665527344],["自己在",-13.723626136779783],["▁testet",-13.723639488220217],["▁responsabile",-13.723655700683594],["▁Maior",-13.723663330078123],["ነቱን",-13.723668098449709],["一线",-13.72368335723877],["ੰਗਾ",-13.72368621826172],["▁hazırlanması",-13.723689079284668],["▁регіональн",-13.723692893981934],["▁ધો",-13.723694801330566],["movie",-13.723711967468262],["▁желаю",-13.72371768951416],["ておく",-13.723721504211426],["▁ਵਰਤ",-13.723727226257324],["▁lisas",-13.723739624023438],["ىلار",-13.72374439239502],["кић",-13.723749160766602],["▁Minute",-13.723756790161133],["▁indice",-13.723763465881348],["ត្ត",-13.723773002624512],["▁kärsi",-13.723787307739258],["▁ಪೇ",-13.723797798156738],["▁القومي",-13.723807334899902],["มั่น",-13.723825454711914],["▁Devo",-13.723873138427734],["சிங்க",-13.723882675170898],["▁margir",-13.723882675170898],["laşdırıl",-13.72389030456543],["σώ",-13.72390079498291],["Bol",-13.72390842437744],["က်င္းပ",-13.72391414642334],["stub",-13.723916053771973],["▁samlede",-13.723958969116213],["▁modtage",-13.723974227905272],["വരി",-13.723999977111816],["ТІ",-13.72402286529541],["▁plát",-13.724051475524902],["▁kolam",-13.724055290222168],["▁признава",-13.7240571975708],["หนึ่งใน",-13.724069595336914],["▁قدیم",-13.724085807800291],["ਜੋ",-13.72414493560791],["ํ",-13.724166870117188],["▁природо",-13.724177360534668],["▁парку",-13.724186897277832],["▁ڈرا",-13.724193572998049],["言われ",-13.724204063415527],["▁करावे",-13.72420597076416],["到着",-13.724223136901855],["▁znajo",-13.72422695159912],["▁prvaka",-13.724233627319336],["▁төслий",-13.724235534667969],["▁بقول",-13.724235534667969],["ZEM",-13.724268913269045],["▁sæd",-13.724272727966309],["bundet",-13.724273681640623],["맞",-13.724284172058104],["باي",-13.724292755126951],["vuutta",-13.724302291870115],["▁ඕනෙ",-13.724325180053713],["多元化",-13.724326133728027],["▁절대",-13.72433376312256],["▁rematar",-13.72434902191162],["wurd",-13.724356651306152],["▁είπα",-13.724357604980469],["express",-13.724369049072266],["तुल",-13.724371910095217],["▁გამი",-13.72438144683838],["▁kapalı",-13.724382400512695],["빌",-13.724401473999023],["МБ",-13.72441291809082],["ขณะนี้",-13.7244234085083],["▁граждани",-13.724424362182615],["▁ចូលរួម",-13.724425315856934],["▁Бах",-13.724449157714844],["Карпат",-13.724461555480955],["▁Gaas",-13.724512100219728],["▁megyei",-13.724534034729004],["▁Sugar",-13.724544525146484],["▁המון",-13.7245454788208],["▁pulver",-13.724556922912598],["▁Deklar",-13.724564552307127],["רר",-13.724565505981444],["volo",-13.72458553314209],["▁ဖုန္း",-13.72459316253662],["▁אנא",-13.724647521972656],["▁پریس",-13.724655151367188],["▁협력",-13.724656105041504],["няв",-13.724658966064451],["izeaza",-13.72466278076172],["凍",-13.724713325500488],["▁levo",-13.724719047546388],["மர்",-13.724732398986816],["פסק",-13.724742889404297],["雾",-13.724743843078612],["ਜਰ",-13.72476863861084],["hatás",-13.724771499633787],["▁Strecke",-13.724775314331056],["132",-13.7247896194458],["づくり",-13.724796295166016],["呵呵",-13.724798202514648],["▁Закарпат",-13.724808692932127],["zub",-13.724813461303713],["▁аман",-13.724814414978027],["รุนแรง",-13.724815368652344],["လႊာ",-13.724815368652344],["▁Myslím",-13.724815368652344],["▁Ujerumani",-13.724815368652344],["▁possède",-13.724815368652344],["▁predovšetkým",-13.724815368652344],["▁įsigyti",-13.724815368652344],["▁γίνουν",-13.724815368652344],["▁δημοσιογράφο",-13.724815368652344],["▁Шығыс",-13.724815368652344],["▁територији",-13.724815368652344],["▁упражнения",-13.724815368652344],["▁ميڊيا",-13.724815368652344],["▁तुझ्या",-13.724815368652344],["▁सप्टेंबर",-13.724815368652344],["▁প্রাথমিক",-13.724815368652344],["▁ඔක්කොම",-13.724815368652344],["▁Mohamad",-13.72481632232666],["▁Председател",-13.72481632232666],["▁עצמה",-13.72481632232666],["▁ጣቢያ",-13.72481632232666],["▁aanbieding",-13.724817276000977],["▁domicile",-13.724817276000977],["▁કાર્યક્રમ",-13.724817276000977],["ក្តី",-13.72481918334961],["▁વિવિધ",-13.724821090698242],["▁сделки",-13.72482204437256],["▁նույնպես",-13.72482204437256],["▁посещение",-13.724822998046877],["▁Duterte",-13.724824905395508],["▁մայիսի",-13.724824905395508],["▁ثبوت",-13.724824905395508],["▁ऑफिस",-13.724824905395508],["▁diplomatik",-13.724825859069824],["▁geweet",-13.724827766418455],["▁verändert",-13.724830627441406],["反応",-13.724831581115724],["▁sākumā",-13.72483253479004],["▁Lebensmittel",-13.724834442138672],["▁النووي",-13.724846839904783],["▁ବିବାଦ",-13.724847793579102],["▁leerlingen",-13.724848747253418],["▁постапка",-13.724858283996582],["▁યોગ્ય",-13.724859237670898],["▁eleştir",-13.724860191345217],["▁δούμε",-13.724861145019531],["▁Gainera",-13.724862098693848],["▁Associação",-13.724870681762695],["▁sprawdzić",-13.724870681762695],["▁өндіру",-13.724871635437012],["▁blízkosti",-13.724872589111328],["▁чыгуу",-13.724873542785645],["▁ustaw",-13.72487449645996],["▁היינט",-13.72487449645996],["▁ഹരി",-13.724876403808594],["用水",-13.724883079528809],["▁prekin",-13.724884033203123],["קופה",-13.72488498687744],["▁heimili",-13.724886894226074],["▁بارداری",-13.724893569946287],["▁ちなみに",-13.724894523620604],["дневен",-13.724896430969238],["▁cyngor",-13.724905014038086],["فرق",-13.724907875061035],["▁dizayn",-13.72491455078125],["▁pamiętać",-13.724915504455566],["▁shtat",-13.724937438964844],["▁Einstellung",-13.72495174407959],["ගනිමින්",-13.72495460510254],["▁yargı",-13.72496509552002],["عفو",-13.7249755859375],["▁530",-13.7249755859375],["▁तौर",-13.724982261657717],["▁взаємо",-13.724994659423828],["▁mindenkinek",-13.725000381469728],["佛教",-13.72500228881836],["адам",-13.725006103515623],["▁jednoduchý",-13.725028038024902],["fata",-13.725052833557127],["▁حلف",-13.725053787231444],["▁Raven",-13.725057601928713],["▁rádio",-13.725059509277344],["▁الأساسية",-13.72506046295166],["▁profesores",-13.725069999694824],["▁자신이",-13.725077629089355],["▁Meslek",-13.725082397460938],["▁കൂടിയ",-13.725090980529783],["▁prestazioni",-13.72509479522705],["حادث",-13.72509765625],["▁कम्पनीले",-13.725119590759276],["▁експло",-13.72513198852539],["pól",-13.725136756896973],["▁søvn",-13.725147247314451],["жол",-13.72516918182373],["▁පාට",-13.725189208984377],["ыкты",-13.72520923614502],["▁ఏళ్ల",-13.72522258758545],["▁ഗ്ഗ",-13.72524642944336],["▁rodinný",-13.725247383117676],["分类",-13.725249290466309],["▁løbe",-13.725255012512209],["▁poklon",-13.725257873535156],["▁პრობლემები",-13.725259780883787],["▁vendre",-13.725268363952637],["▁प्रभु",-13.725279808044434],["▁Dator",-13.725289344787598],["lność",-13.72529125213623],["ಷನ್",-13.72530460357666],["▁Eyni",-13.725305557250977],["▁meron",-13.725319862365724],["▁Fried",-13.72532081604004],["▁цяло",-13.725363731384276],["▁مقابلہ",-13.725374221801758],["▁алмай",-13.725382804870604],["▁ልክ",-13.725383758544922],["သလဲ",-13.725391387939451],["унду",-13.725394248962402],["▁magen",-13.725397109985352],["▁actuacións",-13.725423812866213],["كثير",-13.72542667388916],["בוט",-13.725454330444336],["▁వస్తున్న",-13.725454330444336],["μπό",-13.725464820861816],["▁βία",-13.725481986999512],["ెస్",-13.725483894348145],["кир",-13.725492477416992],["ਬੰਦੀ",-13.725496292114258],["▁milionů",-13.725506782531738],["▁FUN",-13.725515365600586],["▁எண்",-13.72552490234375],["aktiiv",-13.725526809692385],["၁၂",-13.725530624389648],["ເອງ",-13.725533485412598],["волі",-13.725545883178713],["ərsə",-13.725555419921877],["TERA",-13.725567817687988],["пълва",-13.72559928894043],["▁автоматично",-13.725605010986328],["▁escuchar",-13.725607872009276],["▁oldum",-13.725608825683594],["▁SVE",-13.72562026977539],["▁телото",-13.725655555725098],["كسي",-13.725672721862791],["▁Харківськ",-13.72567653656006],["▁våga",-13.725702285766602],["ЛС",-13.725717544555664],["▁relaxar",-13.725720405578612],["▁pozwalają",-13.725722312927246],["شهاد",-13.72574234008789],["▁περιοχής",-13.725749015808104],["ాలో",-13.725762367248535],["▁teži",-13.725785255432127],["▁مهد",-13.725790023803713],["▁रहें",-13.72581386566162],["▁процента",-13.725814819335938],["編集",-13.725814819335938],["lamış",-13.725842475891112],["也不知道",-13.72585105895996],["東北",-13.725869178771973],["magazin",-13.72587776184082],["▁Fischer",-13.72587776184082],["▁prego",-13.725896835327148],["▁alimenti",-13.725902557373049],["▁enviado",-13.725908279418944],["▁verità",-13.725910186767578],["▁العر",-13.725918769836426],["團購",-13.72596263885498],["▁பெண்ண",-13.725963592529297],["▁realizou",-13.725967407226562],["▁સુખ",-13.725969314575195],["▁ponder",-13.725996971130373],["ধু",-13.726001739501951],["▁Ekologi",-13.726008415222168],["▁experi",-13.726011276245115],["▁casu",-13.72602081298828],["▁катастрофа",-13.726027488708496],["▁ԵՄ",-13.726055145263672],["uotos",-13.726064682006836],["▁mAh",-13.726086616516112],["▁llyfr",-13.72609043121338],["▁profesora",-13.726131439208984],["▁wiesz",-13.726131439208984],["autore",-13.726147651672363],["出来ない",-13.72617530822754],["reiro",-13.72618579864502],["▁airgead",-13.726188659667969],["нското",-13.726204872131348],["liberal",-13.726222038269045],["一項",-13.726232528686523],["▁скра",-13.72623348236084],["▁בחדר",-13.726237297058104],["▁iniziare",-13.726242065429688],["▁bayramı",-13.726256370544434],["▁tunnin",-13.72625732421875],["lekua",-13.72629165649414],["▁onaj",-13.726296424865724],["▁borish",-13.726305961608888],["▁lipo",-13.72632122039795],["▁요금",-13.726326942443848],["▁cercando",-13.726327896118164],["▁mirip",-13.726333618164062],["нску",-13.72634506225586],["占比",-13.726394653320312],["▁unnið",-13.72640323638916],["ምላ",-13.726407051086426],["ោត",-13.72641658782959],["▁askari",-13.72642993927002],["▁Tanz",-13.726445198059082],["▁päätös",-13.726447105407717],["▁याचा",-13.726455688476562],["▁rabo",-13.726466178894045],["▁Bilde",-13.726476669311523],["▁siekiant",-13.726481437683104],["stamiseks",-13.726489067077637],["ാനാ",-13.726521492004396],["▁puolella",-13.726537704467772],["▁holo",-13.726540565490724],["みる",-13.726544380187988],["▁løn",-13.726555824279783],["үштү",-13.726563453674316],["▁പേരില്",-13.72657299041748],["ጋት",-13.726598739624023],["gräns",-13.726609230041504],["▁emaitza",-13.726644515991213],["▁शुद्ध",-13.72666072845459],["▁bore",-13.726661682128906],["▁galerija",-13.726661682128906],["ेवर",-13.726667404174805],["▁предност",-13.72667121887207],["▁Salve",-13.726679801940918],["▁методы",-13.726696968078612],["szok",-13.72669792175293],["▁koleksi",-13.726703643798828],["త్రం",-13.726713180541992],["늘",-13.726733207702637],["▁assessor",-13.726737022399902],["▁Одлука",-13.726737022399902],["같은",-13.72673797607422],["▁עסקי",-13.726749420166016],["સિંહ",-13.726759910583496],["스포츠",-13.726761817932127],["があると",-13.726798057556152],["دے",-13.726814270019531],["▁Роди",-13.726815223693848],["がありますが",-13.726825714111328],["▁платы",-13.726842880249023],["▁отворена",-13.72686004638672],["▁talp",-13.726893424987791],["▁festes",-13.726923942565918],["特色的",-13.726927757263184],["▁պատմության",-13.726937294006348],["▁Sankta",-13.726940155029297],["▁247",-13.726950645446776],["▁использован",-13.72700309753418],["常委",-13.727033615112305],["නයේ",-13.727036476135254],["▁служе",-13.72704029083252],["อุดม",-13.727105140686035],["▁Vinh",-13.727108001708984],["▁ubytov",-13.727127075195312],["授權",-13.727134704589844],["▁कपिल",-13.72713565826416],["▁Hunt",-13.727137565612791],["▁kulturne",-13.727142333984377],["ডাউনলোড",-13.72714614868164],["▁Brasília",-13.72714614868164],["▁Mawrth",-13.72714614868164],["▁brezhoneg",-13.72714614868164],["▁wanachama",-13.72714614868164],["▁wokół",-13.72714614868164],["▁Öğretmen",-13.72714614868164],["▁сериозно",-13.72714614868164],["▁الكبرى",-13.72714614868164],["▁यातायात",-13.72714614868164],["▁சமையல்",-13.72714614868164],["▁네이버",-13.72714614868164],["럭",-13.72714614868164],["ยื่น",-13.727147102355955],["អភិវឌ្ឍន៍",-13.727147102355955],["▁vazifalar",-13.727147102355955],["▁vücudu",-13.727147102355955],["▁өтініш",-13.727147102355955],["▁معیشت",-13.727147102355955],["▁খুলনা",-13.727147102355955],["▁ക്ലിക്ക്",-13.727147102355955],["▁Książ",-13.727148056030272],["▁klockan",-13.727148056030272],["▁onderneming",-13.727148056030272],["▁পরিবেশ",-13.72714900970459],["▁bãi",-13.727149963378906],["▁ebből",-13.727152824401855],["▁կրակ",-13.727152824401855],["▁Architect",-13.727153778076172],["▁بەرگەن",-13.727153778076172],["▁ubytování",-13.727154731750488],["▁заштити",-13.727157592773438],["▁फिर्ता",-13.727158546447754],["▁קטנים",-13.72715950012207],["ಶಾಸ್ತ್ರ",-13.727160453796388],["▁Книга",-13.72716236114502],["▁ظالم",-13.72716236114502],["▁портрет",-13.727168083190918],["▁часопіс",-13.72716999053955],["▁બતાવ",-13.72716999053955],["▁Tromsø",-13.727171897888184],["▁설계",-13.7271728515625],["▁förvänta",-13.727173805236816],["▁ketahui",-13.727184295654297],["▁giştî",-13.727185249328612],["▁லட்சம்",-13.72718620300293],["▁nebus",-13.727188110351562],["▁közelében",-13.727190971374512],["890",-13.727191925048828],["▁África",-13.727192878723145],["▁довери",-13.72719383239746],["▁effectué",-13.727198600769045],["▁አይደለም፡፡",-13.727205276489258],["▁ցավ",-13.72720718383789],["▁պաշտպանված",-13.727217674255373],["▁vigtigste",-13.727232933044434],["▁දාලා",-13.72724151611328],["אוכל",-13.727243423461914],["▁hA",-13.72727394104004],["▁дээрх",-13.72727394104004],["▁изследване",-13.727275848388672],["▁Schlaf",-13.727276802062988],["▁yoq",-13.727276802062988],["owiec",-13.727279663085938],["ушки",-13.72729778289795],["▁Verhalten",-13.727307319641112],["▁ООД",-13.727307319641112],["▁Ответ",-13.727310180664062],["855",-13.727314949035645],["තාවය",-13.72732639312744],["▁sælge",-13.727330207824709],["▁tematy",-13.727331161499023],["▁אחריות",-13.727347373962402],["ிலும்",-13.727352142333984],["▁нивоу",-13.727357864379885],["▁posteriormente",-13.727360725402832],["▁جبل",-13.727364540100098],["▁wenyewe",-13.727365493774414],["▁melléklet",-13.727368354797363],["6.5",-13.727407455444336],["וכח",-13.727407455444336],["▁continued",-13.727410316467283],["▁planlagt",-13.7274169921875],["▁conscience",-13.727426528930664],["▁հի",-13.72742748260498],["▁କରାଯାଇ",-13.72743797302246],["tekijä",-13.727449417114258],["お伝え",-13.727449417114258],["увався",-13.72745132446289],["▁Šar",-13.72745132446289],["▁eftirfarandi",-13.727455139160156],["011",-13.727457046508787],["λεκτρ",-13.727462768554688],["▁гора",-13.727463722229004],["▁kehtesta",-13.727483749389648],["▁옷",-13.727494239807127],["▁ताज",-13.727500915527344],["▁როგორი",-13.727500915527344],["▁ജന്മ",-13.727510452270508],["▁prejšnj",-13.727516174316406],["▁בקו",-13.727521896362305],["▁ais",-13.72757339477539],["▁білікті",-13.727577209472656],["მარი",-13.727595329284668],["GRO",-13.727604866027832],["HARA",-13.727618217468262],["にしても",-13.72764778137207],["ằm",-13.727649688720703],["▁prate",-13.727656364440918],["▁حسینی",-13.727662086486816],["đeno",-13.727686882019045],["▁необходимые",-13.727737426757812],["▁kūno",-13.727758407592772],["uliza",-13.727770805358888],["▁tadbir",-13.727783203125],["عوب",-13.727785110473633],["ከበር",-13.72779369354248],["流通",-13.72779655456543],["▁tänava",-13.727797508239746],["하고자",-13.727805137634276],["▁fiquei",-13.727808952331545],["ਆਰ",-13.72781467437744],["огляд",-13.727853775024414],["▁revenir",-13.727858543395996],["財政",-13.727867126464844],["▁sembuh",-13.727874755859377],["樂團",-13.727877616882324],["▁treneri",-13.727883338928224],["њег",-13.727893829345703],["หิ",-13.72789478302002],["▁pořad",-13.72789478302002],["かかり",-13.7279052734375],["ひとり",-13.72793197631836],["dzię",-13.727940559387209],["ვალე",-13.727946281433104],["▁ütle",-13.727946281433104],["▁puteri",-13.727947235107422],["itatile",-13.727951049804688],["ရိုက္",-13.727959632873535],["▁Farbe",-13.727962493896484],["LEV",-13.7279634475708],["▁ataques",-13.727964401245115],["ຄູ",-13.727967262268066],["റ്റില്",-13.727972030639648],["éry",-13.727978706359863],["▁sikkerhet",-13.727985382080078],["ママ",-13.72799301147461],["▁registaro",-13.728005409240724],["車站",-13.728009223937988],["eceksiniz",-13.72804069519043],["ຫຍັງ",-13.728055000305176],["зображ",-13.728083610534668],["演唱",-13.728086471557615],["تحول",-13.728096961975098],["▁redusere",-13.728118896484377],["برت",-13.72812271118164],["▁Canaria",-13.728143692016602],["▁Вашите",-13.728145599365234],["▁resurse",-13.728153228759766],["વાના",-13.728156089782717],["できるように",-13.72815990447998],["6,7",-13.728184700012209],["有什麼",-13.728194236755373],["▁Ayon",-13.728203773498535],["▁Szép",-13.7282075881958],["▁finalement",-13.728240966796877],["瞭",-13.728243827819824],["▁anglais",-13.72825050354004],["ישות",-13.728282928466797],["7.5",-13.728299140930176],["▁جميعا",-13.728303909301758],["▁whom",-13.728304862976074],["CEP",-13.728334426879885],["ੰਸ",-13.7283353805542],["▁eterna",-13.72834014892578],["的專業",-13.728341102600098],["▁dirigir",-13.72834587097168],["మృత",-13.728363037109377],["ላፊ",-13.728363037109377],["▁Bizkaia",-13.72837257385254],["▁Dildo",-13.728374481201172],["▁mahnı",-13.728375434875488],["GUR",-13.72838306427002],["▁adolescente",-13.728389739990234],["▁saír",-13.728443145751951],["yksessä",-13.728468894958496],["▁underlag",-13.728492736816406],["▁бирж",-13.72850227355957],["обновлен",-13.728510856628418],["▁madera",-13.728530883789062],["보니",-13.728530883789062],["гүүр",-13.728557586669922],["▁felel",-13.728571891784668],["▁kannski",-13.72858428955078],["หลง",-13.728586196899414],["▁prepričan",-13.728593826293944],["▁prvem",-13.728598594665527],["▁பரி",-13.728598594665527],["เปลี่ยนแปลง",-13.72862720489502],["ностей",-13.728630065917969],["xesha",-13.728632926940918],["فيل",-13.728638648986816],["ဲြ",-13.728646278381348],["▁eemalda",-13.728646278381348],["才知道",-13.72865867614746],["jesti",-13.728671073913574],["▁මොනවා",-13.728676795959473],["តុ",-13.728680610656738],["あなたは",-13.728682518005373],["▁Қала",-13.728732109069824],["▁телефони",-13.728741645812988],["ؤل",-13.72874641418457],["▁Сава",-13.728748321533203],["PET",-13.728754997253418],["ուխ",-13.728763580322266],["▁stoga",-13.728764533996582],["▁өмірі",-13.728788375854492],["STRO",-13.728796005249023],["េក",-13.728802680969238],["▁reiser",-13.728816032409668],["▁వుండ",-13.728830337524414],["mantel",-13.728833198547363],["▁तिथे",-13.728853225708008],["▁køber",-13.72885513305664],["водни",-13.728864669799805],["ədən",-13.728866577148438],["▁অ্যাপ",-13.728890419006348],["▁опыта",-13.728914260864258],["▁continues",-13.728936195373535],["▁जाऊ",-13.728946685791016],["atriði",-13.728959083557127],["పడి",-13.728970527648926],["▁nádhern",-13.728970527648926],["两次",-13.728974342346191],["他们在",-13.72898769378662],["CAP",-13.728997230529783],["▁qilinadi",-13.729004859924316],["▁décide",-13.72900676727295],["લાય",-13.729039192199709],["▁necesitan",-13.72904109954834],["▁Pien",-13.729043006896973],["幻想",-13.729058265686035],["ພ້ອມ",-13.72906494140625],["कृष्ण",-13.729081153869627],["zunk",-13.729084968566896],["▁харах",-13.72914218902588],["▁starea",-13.729144096374512],["▁munosabat",-13.729147911071776],["▁ulteriori",-13.729148864746094],["▁hurbil",-13.729186058044434],["▁Sains",-13.729191780090332],["▁קני",-13.729196548461914],["▁tasca",-13.729202270507812],["▁скорость",-13.729214668273926],["▁hihi",-13.729242324829102],["▁trebaju",-13.729242324829102],["▁صغير",-13.72924518585205],["▁skoon",-13.729254722595217],["▁sanitario",-13.729291915893556],["YG",-13.729318618774414],["cionin",-13.729358673095703],["▁provodi",-13.729358673095703],["▁convenci",-13.729397773742676],["赔",-13.72940731048584],["Christ",-13.729409217834473],["▁eight",-13.729422569274902],["绑",-13.729433059692385],["侵犯",-13.72943878173828],["▁정확",-13.72944164276123],["▁паўста",-13.729442596435549],["协助",-13.729445457458496],["កម្មករ",-13.72945785522461],["▁sorter",-13.729459762573242],["▁incide",-13.729466438293455],["เยือน",-13.72948169708252],["▁atpakaļ",-13.729482650756836],["▁erklären",-13.729482650756836],["▁khuẩn",-13.729482650756836],["▁kilómetros",-13.729482650756836],["▁súčasnosti",-13.729482650756836],["▁sắm",-13.729482650756836],["▁жайгашкан",-13.729482650756836],["▁напрыклад",-13.729482650756836],["▁прадпрыемства",-13.729482650756836],["▁առանձին",-13.729482650756836],["▁ویکیپیڈیا",-13.729482650756836],["▁ಚರ್ಚೆ",-13.729482650756836],["▁උදව්",-13.729482650756836],["▁පාවිච්චි",-13.729482650756836],["▁ስምምነት",-13.729482650756836],["ປະກາດ",-13.729483604431152],["▁Vandaag",-13.729483604431152],["▁zwierząt",-13.729483604431152],["▁мүшелері",-13.729483604431152],["▁مشاكل",-13.729483604431152],["▁endereço",-13.729484558105469],["▁பொருட்",-13.729485511779783],["▁Mourinho",-13.729486465454102],["▁патрабава",-13.729486465454102],["▁Schmerz",-13.729487419128418],["▁теории",-13.729487419128418],["▁इस्लाम",-13.729487419128418],["เผยแพร่",-13.729488372802734],["▁pódense",-13.729488372802734],["▁lehiaketa",-13.729490280151367],["▁lucrativos",-13.7294921875],["▁توليد",-13.7294921875],["▁відділення",-13.729494094848633],["▁traucē",-13.72949504852295],["▁ڪٿي",-13.72949504852295],["▁ଏଠାରେ",-13.72949504852295],["▁Ministarstva",-13.729497909545898],["▁ispira",-13.729497909545898],["▁communicatie",-13.729500770568848],["កែវ",-13.729501724243164],["dled",-13.729504585266112],["เพียงแค่",-13.72950553894043],["□",-13.729510307312012],["[19]",-13.72951316833496],["▁учреждений",-13.729515075683594],["▁необходима",-13.729517936706545],["▁حقيقة",-13.729522705078123],["▁табл",-13.729528427124023],["▁سائيٽ",-13.729530334472656],["▁məhv",-13.729536056518556],["▁Thread",-13.72953987121582],["▁убежден",-13.729540824890137],["▁vidék",-13.729546546936035],["▁halkaas",-13.729548454284668],["▁Totalt",-13.729551315307615],["▁geskryf",-13.729551315307615],["▁നമ്മള്",-13.729552268981934],["▁զրույցում",-13.7295560836792],["▁کړۍ",-13.729557991027832],["▁deseti",-13.729561805725098],["▁এতে",-13.729565620422363],["▁התחת",-13.729576110839844],["▁vnitřní",-13.729592323303224],["ობრივი",-13.729598999023438],["τως",-13.729604721069336],["▁મારો",-13.729605674743652],["▁תוצאות",-13.729619026184082],["請問",-13.72962760925293],["▁የኦሮሞ",-13.72963809967041],["▁развија",-13.72964096069336],["▁အိမ်",-13.729641914367676],["▁öldürül",-13.729643821716309],["发生的",-13.729653358459473],["▁bereiken",-13.729656219482422],["▁likuma",-13.729662895202637],["▁Divin",-13.729668617248535],["▁мањи",-13.729669570922852],["träger",-13.729692459106444],["▁التنظيم",-13.729693412780762],["▁recebeu",-13.729735374450684],["▁rupe",-13.72977352142334],["▁indikator",-13.729783058166504],["సింది",-13.729785919189451],["игна",-13.729795455932615],["فرنس",-13.729796409606934],["▁मनात",-13.729816436767578],["▁ennill",-13.729825973510742],["▁Петар",-13.729833602905272],["शब्द",-13.729840278625488],["ጠቀም",-13.729840278625488],["ביצוע",-13.729846954345703],["ोत्तर",-13.729873657226562],["▁кейбір",-13.729898452758787],["▁possam",-13.729904174804688],["▁jännit",-13.729907035827637],["ຕັດ",-13.729937553405762],["▁minerale",-13.72994327545166],["审批",-13.729944229125977],["▁जिम्मेवारी",-13.72994613647461],["▁жовт",-13.729951858520508],["▁kommunens",-13.72995376586914],["▁Banken",-13.729955673217772],["▁Tochter",-13.72997760772705],["▁ຕອນ",-13.729985237121582],["▁veidu",-13.73000144958496],["▁kencing",-13.73001194000244],["מאס",-13.730015754699709],["▁zuständig",-13.730031967163086],["крес",-13.730057716369627],["แนวทาง",-13.730060577392578],["▁ተራ",-13.730077743530272],["集成",-13.730084419250488],["plní",-13.730110168457031],["忽然",-13.73011589050293],["ဖြစ်ပြီး",-13.730124473571776],["SUN",-13.730125427246094],["ങ്ങൾക്ക്",-13.730134963989258],["▁305",-13.730147361755373],["GEM",-13.730148315429688],["ڊا",-13.730156898498535],["▁dampak",-13.730157852172852],["ห้องนอน",-13.73017120361328],["crypt",-13.730181694030762],["▁அவர்களுக்கு",-13.730195045471191],["▁народи",-13.730195999145508],["▁만에",-13.730210304260254],["▁raam",-13.73021125793457],["▁богатство",-13.730217933654783],["▁ricevere",-13.730238914489746],["seminar",-13.730241775512695],["Për",-13.73026180267334],["▁ofiar",-13.730274200439451],["képző",-13.730287551879885],["▁poznám",-13.730295181274414],["mhain",-13.73029613494873],["kriisi",-13.730302810668944],["▁കത്ത",-13.73031520843506],["▁yna",-13.730327606201172],["▁szerkezet",-13.730331420898438],["▁jirto",-13.73033332824707],["▁grūti",-13.730360984802246],["▁qodob",-13.73037815093994],["▁Genau",-13.730380058288574],["有机会",-13.730382919311523],["vastus",-13.730396270751951],["▁čuti",-13.730402946472168],["▁అభిప్రాయ",-13.7304105758667],["▁ტურის",-13.730424880981444],["▁аларды",-13.730425834655762],["រាម",-13.73043155670166],["▁መብ",-13.730432510375977],["▁Knut",-13.730440139770508],["यन्त्र",-13.730454444885254],["bûnê",-13.730464935302734],["▁componentes",-13.730475425720217],["▁kombe",-13.730480194091797],["▁ravintola",-13.730484008789062],["KEM",-13.73048496246338],["▁butikk",-13.730494499206545],["▁lancar",-13.730513572692873],["▁عشرة",-13.730515480041504],["▁šalia",-13.73052978515625],["▁Електрон",-13.730531692504885],["▁vasitələri",-13.730588912963867],["▁إعلان",-13.73060417175293],["đenja",-13.73062229156494],["▁поголем",-13.73062229156494],["▁modelleri",-13.730639457702637],["▁मजा",-13.730674743652344],["▁Diplom",-13.730679512023926],["▁आर्",-13.730682373046877],["ONU",-13.730738639831545],["▁بخار",-13.73074722290039],["▁rapporten",-13.730749130249023],["ічне",-13.73076629638672],["▁буц",-13.730768203735352],["věřit",-13.730780601501465],["▁elaborado",-13.730790138244627],["ກຸ່ມ",-13.730792999267578],["▁pà",-13.730792999267578],["▁incorporar",-13.730804443359377],["ávajú",-13.730809211730955],["syukur",-13.730833053588867],["فران",-13.730856895446776],["મેર",-13.730895042419434],["▁TIC",-13.73090362548828],["സിൽ",-13.73092555999756],["▁najmanje",-13.730927467346191],["തെന്ന",-13.730937957763672],["horf",-13.730944633483888],["▁sieť",-13.73094654083252],["▁rialta",-13.730969429016112],["ยว",-13.730971336364746],["▁tarptautini",-13.730999946594238],["రంగా",-13.731056213378906],["აო",-13.731080055236816],["كتشف",-13.731086730957031],["道具",-13.731108665466309],["▁musíme",-13.73111343383789],["▁Hamas",-13.7311429977417],["▁մարմնի",-13.731145858764648],["ທິດ",-13.731149673461914],["Vor",-13.73115062713623],["▁pagalba",-13.731183052062988],["▁rollen",-13.731183052062988],["▁istehsalı",-13.731196403503418],["▁Srebr",-13.731201171875],["פנים",-13.73121166229248],["ικα",-13.731257438659668],["กุม",-13.731266975402832],["ापासून",-13.731298446655272],["▁curioso",-13.731301307678224],["▁ვერც",-13.73130226135254],["gerðar",-13.73130989074707],["tivamente",-13.73130989074707],["▁ایکس",-13.731310844421388],["rganish",-13.731311798095703],["surat",-13.73131275177002],["สร้างความ",-13.731315612792969],["▁בנק",-13.731318473815918],["▁pire",-13.731319427490234],["▁rëndësi",-13.731350898742676],["arzt",-13.731382369995115],["▁동물",-13.7313871383667],["▁begon",-13.731405258178713],["▁deinem",-13.731415748596191],["▁fuar",-13.731426239013672],["▁spør",-13.731426239013672],["▁descargar",-13.73144245147705],["▁Стандарт",-13.731450080871582],["ຈີ",-13.73148250579834],["kabin",-13.73150634765625],["▁programmu",-13.731508255004885],["សង្គម",-13.731524467468262],["的成绩",-13.731529235839844],["见过",-13.73154354095459],["▁αλλα",-13.73154640197754],["ക്ഷണ",-13.73155689239502],["życie",-13.73157024383545],["ଚିତ",-13.731576919555664],["▁setempat",-13.731596946716309],["arter",-13.731599807739258],["ovanou",-13.73160171508789],["▁improvement",-13.731610298156738],["where",-13.73161506652832],["▁bingo",-13.73162841796875],["োনা",-13.731669425964355],["ičnega",-13.73167610168457],["▁acesteia",-13.73169994354248],["branschen",-13.73171615600586],["▁પાર",-13.731719970703123],["▁serios",-13.73173713684082],["ሥልጣ",-13.731741905212402],["当事人",-13.73176383972168],["▁المناسب",-13.731766700744627],["όνη",-13.731767654418944],["闊",-13.731768608093262],["辽宁",-13.731773376464844],["佢",-13.731779098510742],["▁natao",-13.731791496276855],["菊",-13.731791496276855],["▁маалыматтар",-13.731792449951172],["的风险",-13.7318115234375],["КӨ",-13.731812477111816],["닐",-13.731822967529297],["就是在",-13.731823921203612],["drastrecha",-13.73182487487793],["▁Peyğəmbər",-13.73182487487793],["▁cowgirl",-13.73182487487793],["▁kecepatan",-13.73182487487793],["▁rješenja",-13.73182487487793],["▁įmonių",-13.73182487487793],["▁χρήστη",-13.73182487487793],["▁місяць",-13.73182487487793],["▁төсвийн",-13.73182487487793],["▁मिळविण्यासाठी",-13.73182487487793],["▁বৈঠক",-13.73182487487793],["▁ବିଧାୟକ",-13.73182487487793],["▁சாதனை",-13.73182487487793],["▁ზუსტად",-13.73182487487793],["엉",-13.73182487487793],["▁exciting",-13.731825828552246],["▁javascript",-13.731825828552246],["▁լուրեր",-13.731825828552246],["▁ମିଶ୍ର",-13.731825828552246],["▁ఇష్టం",-13.731825828552246],["ฉาก",-13.731826782226562],["▁mokykla",-13.731826782226562],["▁Mawalan",-13.73182773590088],["▁мурда",-13.73182773590088],["▁frumoasa",-13.731828689575195],["▁Şirket",-13.731828689575195],["▁dứt",-13.731829643249512],["▁तार",-13.73183250427246],["▁досягнення",-13.731833457946776],["ฉาย",-13.731834411621094],["ПОР",-13.73183822631836],["▁मुद्रा",-13.73183822631836],["▁जैसा",-13.731842041015623],["椅子",-13.731842041015623],["зло",-13.73184871673584],["▁ਮੇਰੀ",-13.731849670410156],["▁eredmények",-13.731851577758787],["▁આભાર",-13.73185920715332],["▁színész",-13.731860160827637],["▁šećer",-13.731861114501951],["ασμού",-13.73186206817627],["▁kohaselt",-13.731863021850586],["▁savukārt",-13.731863021850586],["▁zsidó",-13.731863021850586],["首席",-13.73186492919922],["▁cognosc",-13.731865882873535],["ukum",-13.731866836547852],["▁thoirt",-13.731871604919434],["รวย",-13.731873512268066],["▁Qurban",-13.731887817382812],["▁жилья",-13.731887817382812],["▁نوزاد",-13.731887817382812],["▁სახელობის",-13.731887817382812],["▁ಚೆ",-13.731889724731444],["蛋白",-13.73189926147461],["▁aelodau",-13.731916427612305],["ไขมัน",-13.731925964355469],["▁అక్క",-13.731926918029783],["▁forever",-13.73193073272705],["▁spricht",-13.731952667236328],["▁അഞ്ച്",-13.731953620910645],["▁ნებისმიერი",-13.731953620910645],["▁φωτογραφία",-13.73195457458496],["▁köpte",-13.731959342956545],["غذية",-13.73196029663086],["▁svetovne",-13.731961250305176],["▁στάση",-13.731961250305176],["ТЭ",-13.731964111328123],["simula",-13.731969833374023],["▁үгүй",-13.731969833374023],["اللہ",-13.731978416442873],["▁dievča",-13.7319974899292],["БК",-13.732003211975098],["▁يستخدم",-13.732010841369627],["မောင်",-13.732011795043944],["▁தவிர",-13.732013702392578],["▁Naravno",-13.732017517089844],["▁කළු",-13.732030868530272],["▁preparazione",-13.73203182220459],["▁الأميركية",-13.73203468322754],["▁مقطع",-13.73205852508545],["▁bituka",-13.73207664489746],["사회복지",-13.732083320617676],["▁реф",-13.732088088989258],["സെ",-13.732097625732422],["▁simplesmente",-13.732097625732422],["所有人",-13.732110023498535],["▁давхар",-13.732112884521484],["ແຊ",-13.732131958007812],["▁আসনে",-13.732139587402344],["▁אוויר",-13.73215389251709],["proksim",-13.73216152191162],["ในเรื่อง",-13.732163429260254],["▁Harris",-13.732163429260254],["▁шума",-13.732185363769531],["▁pateik",-13.732187271118164],["투자",-13.73219394683838],["夫人",-13.73221206665039],["▁растения",-13.73222541809082],["▁עליהם",-13.732229232788086],["▁ചേര",-13.732259750366213],["▁สูตร",-13.732270240783691],["▁검사",-13.732284545898438],["ווייַ",-13.732293128967283],["อภิ",-13.732314109802246],["▁ബീ",-13.73232650756836],["▁அடை",-13.732344627380373],["ΑΤ",-13.732362747192385],["logic",-13.732382774353027],["▁klippe",-13.73240852355957],["▁palma",-13.732414245605469],["тычныя",-13.732422828674316],["▁Macam",-13.732425689697266],["▁Latino",-13.732431411743164],["ციული",-13.73243236541748],["▁රූප",-13.732449531555176],["ανο",-13.732450485229492],["▁हमने",-13.732465744018556],["ိုက္",-13.732489585876465],["▁együttes",-13.732492446899414],["Kha",-13.732500076293944],["xha",-13.73250961303711],["tengono",-13.732525825500488],["▁Rana",-13.732549667358398],["▁naročil",-13.732565879821776],["▁17.30",-13.732583999633787],["▁podium",-13.732589721679688],["最初の",-13.73259449005127],["▁რომელთა",-13.7326021194458],["▁padang",-13.732617378234863],["лугу",-13.732651710510254],["▁ਰੰਗ",-13.732658386230469],["▁varam",-13.732701301574709],["muştur",-13.732710838317873],["好康",-13.732711791992188],["▁Джу",-13.73273754119873],["▁številka",-13.732751846313477],["基督",-13.732752799987791],["▁genomen",-13.732780456542969],["headh",-13.732784271240234],["ІІ",-13.732789993286133],["▁Salle",-13.732796669006348],["▁puder",-13.732796669006348],["АГА",-13.73281192779541],["▁zwaar",-13.73281192779541],["แตกต่าง",-13.732831954956056],["▁bookmark",-13.732837677001951],["▁qüvvələri",-13.732857704162598],["▁вправе",-13.73285961151123],["▁γραφ",-13.732880592346191],["されていた",-13.732924461364746],["下記",-13.732925415039062],["▁devolv",-13.732950210571287],["▁Rashid",-13.732965469360352],["volution",-13.732975959777832],["Пи",-13.73297882080078],["niemiecki",-13.732993125915527],["големите",-13.73302936553955],["▁pyta",-13.733041763305664],["教会",-13.733044624328612],["ાંક",-13.73304557800293],["▁tuuli",-13.733047485351562],["▁Одним",-13.73305606842041],["ујући",-13.733060836791992],["søge",-13.733068466186523],["▁ჰე",-13.733078956604004],["ั่ว",-13.733084678649902],["해주세요",-13.733084678649902],["வுடன்",-13.733095169067385],["▁набира",-13.73310375213623],["പാദ",-13.733113288879396],["tehnic",-13.733114242553713],["▁thousands",-13.733141899108888],["▁డై",-13.733153343200684],["▁гимнази",-13.733159065246582],["▁ເປີດ",-13.73318576812744],["osťou",-13.733186721801758],["ثقافة",-13.733197212219238],["գե",-13.733200073242188],["ことになる",-13.733214378356934],["大地",-13.733238220214844],["▁6.3",-13.73324966430664],["тааны",-13.733308792114258],["▁Vastu",-13.733320236206056],["לוט",-13.733322143554688],["teori",-13.733325958251951],["zito",-13.733348846435549],["▁земе",-13.733358383178713],["วงการ",-13.733362197875977],["रिज",-13.733366012573242],["▁змага",-13.73338794708252],["masından",-13.733392715454102],["אמת",-13.733404159545898],["▁spēku",-13.733405113220217],["▁χρώμα",-13.733405113220217],["▁flore",-13.733406066894531],["▁umjetnosti",-13.73341178894043],["கொடு",-13.733420372009276],["▁voorwaarden",-13.733428001403809],["▁አማ",-13.733428955078123],["성에",-13.733431816101074],["▁британски",-13.73343276977539],["▁externo",-13.733436584472656],["ಶ್ಚ",-13.733439445495604],["klin",-13.733441352844238],["▁Diario",-13.73344612121582],["ுங்க",-13.733461380004885],["OOL",-13.733475685119627],["▁þinn",-13.733492851257324],["▁վարչապետի",-13.733494758605955],["▁Kushtet",-13.733518600463867],["の写真",-13.733528137207031],["▁підстав",-13.733535766601562],["سماء",-13.733542442321776],["tívny",-13.733552932739258],["▁siguen",-13.733555793762209],["ոչ",-13.73357105255127],["▁милост",-13.733590126037598],["OKU",-13.733593940734863],["ტყუ",-13.73361110687256],["sprak",-13.73362159729004],["▁significativo",-13.73363208770752],["▁ಚಿಕ್ಕ",-13.73365306854248],["▁pipe",-13.733657836914062],["▁дълг",-13.733687400817873],["cool",-13.73371124267578],["лаған",-13.733723640441896],["hlobo",-13.733739852905272],["▁håp",-13.73374080657959],["▁scade",-13.73375129699707],["▁کرنی",-13.733781814575195],["いるの",-13.733800888061523],["கல",-13.733806610107422],["▁destiné",-13.733817100524902],["niekam",-13.733820915222168],["▁cyfan",-13.733823776245115],["▁водител",-13.733830451965332],["▁ಮಧ್ಯ",-13.733845710754396],["▁Еве",-13.733847618103027],["▁znaczy",-13.73385238647461],["function",-13.733861923217772],["ಚಿತ್ರ",-13.733869552612305],["სვენებ",-13.733875274658203],["▁sektoru",-13.733880043029783],["足以",-13.733884811401367],["▁സ്ഥല",-13.733890533447266],["▁도전",-13.733908653259276],["▁postelj",-13.733922004699709],["われる",-13.733925819396973],["▁parker",-13.733929634094238],["gradnja",-13.733931541442873],["最初に",-13.733931541442873],["▁पाउन",-13.733935356140137],["非常重要",-13.7339506149292],["▁спомена",-13.733959197998049],["▁gelişmeler",-13.733979225158691],["▁económicas",-13.733980178833008],["▁koosta",-13.733980178833008],["ிறார்",-13.73398208618164],["の仕事",-13.734001159667969],["▁fundador",-13.734021186828612],["▁құрылысы",-13.734025955200195],["158",-13.734027862548828],["కున్న",-13.734038352966309],["▁கால்",-13.734042167663574],["▁teritorijā",-13.73404312133789],["▁Sprach",-13.734058380126951],["ρυθμ",-13.734067916870115],["১৮",-13.734071731567385],["hjelp",-13.7340726852417],["▁देणार",-13.73407745361328],["▁רגל",-13.734082221984863],["▁راهی",-13.734102249145508],["▁liburua",-13.734111785888672],["抚",-13.734118461608888],["鈴",-13.734135627746582],["▁часта",-13.734136581420898],["广播",-13.734137535095217],["賺",-13.734145164489746],["裤",-13.734149932861328],["▁Kontak",-13.734150886535645],["▁congela",-13.734150886535645],["副主任",-13.734155654907228],["יאַ",-13.73415756225586],["пачатку",-13.734161376953123],["▁sprzętu",-13.73416805267334],["ຈຶ່ງ",-13.734170913696287],["persoonsgegevens",-13.734171867370604],["▁Haradinaj",-13.734171867370604],["▁absoluut",-13.734171867370604],["▁joprojām",-13.734171867370604],["▁pludselig",-13.734171867370604],["▁pròpia",-13.734171867370604],["▁Điểm",-13.734171867370604],["▁Đất",-13.734171867370604],["▁αμέσως",-13.734171867370604],["▁Гісторыя",-13.734171867370604],["▁Таксама",-13.734171867370604],["▁урьдчилан",-13.734171867370604],["▁نېټه",-13.734171867370604],["▁નજીક",-13.734171867370604],["▁inzibati",-13.734172821044922],["▁kehadiran",-13.734172821044922],["▁подтвержден",-13.734172821044922],["▁כנראה",-13.734172821044922],["▁شیطان",-13.734172821044922],["▁სკოლის",-13.734172821044922],["▁cálculo",-13.734173774719238],["▁tërheq",-13.734173774719238],["▁сусрет",-13.734173774719238],["▁الفندق",-13.734176635742188],["▁teigė",-13.734177589416504],["▁көрсетілген",-13.734177589416504],["▁للحصول",-13.734177589416504],["▁ጥቃት",-13.734179496765137],["▁Άγιο",-13.73418140411377],["▁portfolio",-13.734183311462402],["▁uzskata",-13.734183311462402],["▁melewati",-13.734186172485352],["▁kakšno",-13.734190940856934],["▁diseñado",-13.73419189453125],["▁ΔΕΝ",-13.734195709228516],["▁aktuální",-13.734203338623049],["▁Così",-13.734204292297363],["▁प्रकृति",-13.734204292297363],["ադարձ",-13.734212875366213],["▁Einkauf",-13.734212875366213],["▁Stof",-13.734219551086426],["▁sekadar",-13.73422622680664],["▁pilnīgi",-13.734230041503906],["▁MacBook",-13.734237670898438],["▁Fondunun",-13.73423957824707],["garde",-13.734241485595703],["▁національного",-13.734247207641602],["▁האינטרנט",-13.7342529296875],["▁Shqipërinë",-13.734258651733398],["ಸಭೆ",-13.734262466430664],["▁associated",-13.73426914215088],["▁sorumlu",-13.734274864196776],["▁골프",-13.734275817871094],["tööd",-13.73427963256836],["▁postaw",-13.734292984008787],["▁నటి",-13.73430347442627],["▁fenntartva",-13.734310150146484],["同期",-13.734315872192385],["▁கதைகள்",-13.734319686889648],["▁samorząd",-13.73432159423828],["вшая",-13.734350204467772],["ينية",-13.734350204467772],["專屬",-13.734362602233888],["▁постојат",-13.734374046325684],["▁hatása",-13.73438835144043],["▁дзеці",-13.734416961669922],["▁ആയിര",-13.734427452087402],["ασπ",-13.734429359436035],["دافع",-13.734436988830566],["▁Sollte",-13.73444366455078],["सुन",-13.734453201293944],["▁Kili",-13.734457969665527],["▁분명",-13.734468460083008],["▁sonuçları",-13.734469413757324],["▁ließ",-13.73447608947754],["▁pecah",-13.734478950500488],["sendelse",-13.734479904174805],["▁internetā",-13.734479904174805],["水準",-13.73448371887207],["▁izleme",-13.734484672546388],["▁выбран",-13.734504699707031],["▁koppla",-13.734508514404297],["▁суми",-13.73451042175293],["▁utilização",-13.734514236450195],["▁imaš",-13.73452377319336],["▁Ezzel",-13.734532356262209],["▁публикуван",-13.734533309936523],["▁айырма",-13.734549522399902],["øya",-13.734553337097168],["▁ორგანიზაცია",-13.734563827514648],["▁Område",-13.734573364257812],["hön",-13.734578132629396],["▁Lore",-13.734586715698242],["▁amewa",-13.734588623046877],["▁මව",-13.734603881835938],["という事",-13.734606742858888],["aqiiq",-13.734607696533203],["▁ಬಾಸ್",-13.734613418579102],["▁concilia",-13.734622955322266],["▁కథలు",-13.734623908996582],["▁vơ",-13.73463535308838],["▁kaua",-13.734667778015137],["jić",-13.734671592712402],["▁Konsult",-13.734673500061035],["മ്മാ",-13.734688758850098],["▁rəng",-13.734689712524414],["ผลไม้",-13.734695434570312],["▁prowadząc",-13.734695434570312],["▁düşman",-13.734696388244627],["▁tåg",-13.734702110290527],["▁schneller",-13.734704971313477],["▁ಸ್ಕ",-13.734728813171388],["жок",-13.734729766845703],["▁तस्",-13.734774589538574],["▁Reus",-13.734787940979004],["▁jogadores",-13.73480224609375],["▁планы",-13.734838485717772],["तोय",-13.734840393066406],["▁Pahang",-13.734868049621582],["▁పైగా",-13.734880447387695],["तला",-13.73488712310791],["petit",-13.734890937805176],["▁ගන්නවා",-13.734912872314451],["十大",-13.734920501708984],["▁patogi",-13.734926223754885],["čini",-13.734955787658691],["agtig",-13.734959602355955],["的责任",-13.734968185424805],["гъ",-13.734990119934082],["רמה",-13.734992980957031],["▁gravita",-13.735004425048828],["▁básico",-13.735011100769045],["▁Kısa",-13.735017776489258],["▁ktp",-13.735018730163574],["▁arrange",-13.73503875732422],["爆发",-13.73504638671875],["မူး",-13.735051155090332],["阿尔",-13.735058784484863],["ánach",-13.735093116760254],["▁entidad",-13.735122680664062],["manje",-13.73514175415039],["▁ichida",-13.735147476196287],["時間は",-13.735148429870604],["▁pekee",-13.735172271728516],["copy",-13.735183715820312],["▁Kolor",-13.735186576843262],["ຫຼື",-13.73520851135254],["的存在",-13.735211372375488],["▁Klassen",-13.735217094421388],["ेंस",-13.735224723815918],["nüz",-13.735251426696776],["▁رکڻ",-13.735258102416992],["▁foundation",-13.735267639160156],["▁законодав",-13.735272407531738],["▁receptor",-13.735306739807127],["אָפּ",-13.735319137573242],["ಾತ್ಮಕ",-13.735321044921877],["▁beperkt",-13.735322952270508],["▁lehetne",-13.73533821105957],["▁அனுப்ப",-13.735343933105469],["▁הרא",-13.735344886779783],["માર",-13.73535442352295],["▁tickets",-13.735371589660645],["▁безбедности",-13.735384941101074],["▁නගර",-13.73538589477539],["ಬೇಡಿ",-13.735422134399414],["▁Kongreso",-13.73542594909668],["背景下",-13.735458374023438],["غوا",-13.735464096069336],["▁البار",-13.735469818115234],["ദേ",-13.7354736328125],["▁ნიშნ",-13.7354736328125],["ולות",-13.735474586486816],["▁කාන්තාව",-13.735475540161133],["▁बैंकको",-13.735477447509766],["▁kujua",-13.735478401184082],["▁Arvo",-13.735492706298828],["▁Consider",-13.73555850982666],["▁berdin",-13.735601425170898],["▁Регионал",-13.73560619354248],["▁eliminare",-13.735636711120604],["▁trào",-13.735637664794922],["充足",-13.735644340515137],["▁Panta",-13.73566436767578],["ก็สามารถ",-13.735697746276855],["▁novou",-13.73570156097412],["▁tractament",-13.735735893249512],["▁Shafi",-13.735742568969728],["▁осам",-13.735747337341309],["widz",-13.735773086547852],["▁çıkış",-13.735774993896484],["▁Vendar",-13.73583984375],["ഹു",-13.735848426818848],["▁nikto",-13.73587131500244],["ทั",-13.735872268676758],["րում",-13.735878944396973],["доволен",-13.735879898071287],["▁diệt",-13.735891342163086],["日電",-13.7358980178833],["ვნი",-13.735899925231934],["子供の",-13.73590087890625],["小小",-13.735921859741213],["ljivosti",-13.735943794250488],["▁Conti",-13.73594856262207],["wasili",-13.735957145690918],["מפל",-13.735977172851562],["▁luua",-13.736011505126951],["書類",-13.736032485961914],["átka",-13.736041069030762],["സമിതി",-13.736054420471191],["signatur",-13.736089706420898],["▁studiju",-13.736101150512695],["▁건물",-13.736126899719238],["▁ଫେରି",-13.736136436462402],["చ్చు",-13.736173629760742],["क्रो",-13.736180305480955],["放下",-13.736190795898438],["াক",-13.736204147338867],["▁„",-13.736207962036133],["▁чувствовал",-13.73622226715088],["XVIII",-13.73622703552246],["കാര്യ",-13.736234664916992],["▁ръце",-13.736239433288574],["▁контролира",-13.736260414123535],["▁പ്രകാരം",-13.73626708984375],["▁submit",-13.73627471923828],["▁meldt",-13.73627758026123],["▁fiatalok",-13.736291885375977],["▁korteri",-13.736291885375977],["▁مري",-13.736310958862305],["▁działają",-13.736321449279783],["ర్లు",-13.736322402954102],["attenzione",-13.736327171325684],["ട്ടിയ",-13.736331939697266],["ర్ల",-13.736334800720217],["털",-13.736355781555176],["National",-13.736356735229492],["▁ચાલુ",-13.736360549926758],["▁turime",-13.736371040344238],["tamassa",-13.736391067504885],["▁പഠന",-13.7363920211792],["PIE",-13.73639678955078],["▁complète",-13.736406326293944],["▁кетті",-13.736408233642578],["▁skús",-13.736435890197754],["贯彻落实",-13.736451148986816],["▁vzdeláva",-13.736458778381348],["▁Skú",-13.736464500427246],["▁prossima",-13.736467361450195],["veido",-13.736488342285156],["Слобод",-13.736491203308104],["▁గణ",-13.736495018005373],["▁Loan",-13.736496925354004],["▁сестри",-13.736501693725586],["ēģ",-13.736523628234863],["സ്വാതന്ത്ര്യ",-13.73652458190918],["เทศกาล",-13.73652458190918],["▁хуманитар",-13.73652458190918],["▁విద్యార్థుల",-13.73652458190918],["▁cheveux",-13.736525535583496],["▁complesso",-13.736525535583496],["▁cymuned",-13.736525535583496],["▁egyaránt",-13.736525535583496],["▁oblečení",-13.736525535583496],["▁perlindungan",-13.736525535583496],["▁područje",-13.736525535583496],["▁αναμένεται",-13.736525535583496],["▁ποσοστό",-13.736525535583496],["▁присъства",-13.736525535583496],["▁տղամարդ",-13.736525535583496],["▁היטב",-13.736525535583496],["▁المؤسسات",-13.736525535583496],["▁शारीरिक",-13.736525535583496],["▁પૃષ્ઠ",-13.736525535583496],["▁ஆயிரம்",-13.736525535583496],["▁ದಾಖಲೆ",-13.736525535583496],["▁პოლიტიკა",-13.736525535583496],["▁ታላቅ",-13.736525535583496],["▁leginkább",-13.736526489257812],["▁వెంకట",-13.736526489257812],["▁istimewa",-13.736527442932127],["▁հաղթանակ",-13.736528396606444],["▁كەلتۈر",-13.736528396606444],["▁לפרטים",-13.736529350280762],["lokho",-13.736530303955078],["▁γνώμη",-13.736531257629396],["ვარაუდ",-13.736536026000977],["▁Департамент",-13.736536979675291],["▁Зошто",-13.736538887023926],["▁سرمایہ",-13.736538887023926],["▁nettbutikk",-13.73654079437256],["vaatteet",-13.736541748046877],["▁Roth",-13.736543655395508],["▁pokračuj",-13.736543655395508],["▁Алайда",-13.73655128479004],["▁οργάνωση",-13.736553192138672],["▁atsaking",-13.736563682556152],["▁ለማግኘት",-13.736565589904783],["▁cosúil",-13.736581802368164],["▁այլեւ",-13.736581802368164],["färg",-13.736584663391112],["保守",-13.73658561706543],["▁उससे",-13.736587524414062],["थु",-13.736589431762695],["▁ቢሮ",-13.736602783203123],["▁Алексей",-13.736608505249023],["▁اقامت",-13.73660945892334],["▁salata",-13.736610412597656],["▁fwyaf",-13.736611366271973],["giem",-13.736615180969238],["▁зграда",-13.736627578735352],["▁Jokowi",-13.736632347106934],["CCM",-13.73667049407959],["▁άμεση",-13.73667049407959],["▁webmaster",-13.736680030822754],["▁mintegy",-13.736693382263184],["▁Bästa",-13.736703872680664],["▁geïn",-13.736711502075195],["ерот",-13.736716270446776],["▁ग्रहण",-13.736723899841309],["▁umożliwia",-13.736724853515623],["typer",-13.736729621887209],["▁frío",-13.736739158630373],["▁inzicht",-13.736745834350586],["▁dringend",-13.736749649047852],["การแสดง",-13.736750602722168],["▁axtarış",-13.736753463745115],["どこか",-13.736773490905762],["▁fremover",-13.736775398254396],["เนื่องจาก",-13.736776351928713],["▁években",-13.736785888671877],["▁numeroase",-13.736809730529783],["ARTA",-13.736811637878418],["▁éco",-13.736811637878418],["▁무대",-13.736811637878418],["ՀԱ",-13.736815452575684],["▁secreto",-13.73681926727295],["▁attól",-13.736834526062012],["▁ክብር",-13.736841201782228],["▁Andres",-13.736856460571287],["५६",-13.736859321594238],["▁suporta",-13.736863136291504],["schoenen",-13.736870765686035],["▁wrap",-13.736883163452148],["▁kushiriki",-13.73689079284668],["▁generally",-13.73692512512207],["▁লীগ",-13.736927032470703],["IŲ",-13.7369384765625],["▁behandlingen",-13.736956596374512],["▁2017/18",-13.73696517944336],["▁secunda",-13.73698616027832],["▁медиумите",-13.7369966506958],["ყავს",-13.737001419067385],["ruba",-13.7370023727417],["బాబు",-13.737003326416016],["ropol",-13.737008094787598],["▁failure",-13.737016677856444],["▁түрк",-13.737041473388672],["▁Rojava",-13.737055778503418],["చర",-13.737079620361328],["▁намет",-13.737089157104492],["▁Uang",-13.737091064453123],["▁నగర్",-13.737103462219238],["gwyl",-13.737104415893556],["niške",-13.737109184265137],["▁maximale",-13.737114906311035],["후기",-13.7371244430542],["clan",-13.737125396728516],["▁Baka",-13.737126350402832],["▁꾸",-13.73713493347168],["കരമായ",-13.737162590026855],["▁brillante",-13.737170219421388],["Now",-13.737211227416992],["▁iniciat",-13.73721694946289],["îkî",-13.737218856811523],["▁Леон",-13.737218856811523],["▁acteurs",-13.737220764160156],["▁uitgebreide",-13.737225532531738],["▁rääkis",-13.737228393554688],["▁siltum",-13.737239837646484],["▁читать",-13.7372407913208],["لىرىغا",-13.737242698669434],["▁хэлэх",-13.737244606018066],["ໃນປີ",-13.7372465133667],["တဲ့အခါ",-13.737300872802734],["▁скарг",-13.737309455871582],["▁situace",-13.737318992614746],["সির",-13.737319946289062],["▁സാഹിത്യ",-13.737332344055176],["भाषा",-13.737339973449709],["▁իրավա",-13.737369537353516],["sziget",-13.737371444702148],["▁redakci",-13.73737907409668],["кулов",-13.737393379211426],["▁ଭାରତର",-13.73740291595459],["▁влак",-13.737408638000488],["▁ನಗರದ",-13.73741054534912],["jušies",-13.737411499023438],["전을",-13.737414360046388],["િટી",-13.73743724822998],["毎",-13.737441062927246],["ІЯ",-13.737443923950195],["▁minulý",-13.73748016357422],["▁Ultimate",-13.737488746643066],["▁wiil",-13.737497329711914],["روج",-13.73751163482666],["▁dhax",-13.737512588500977],["▁свима",-13.737512588500977],["▁खरे",-13.737519264221191],["▁улици",-13.737526893615724],["▁existir",-13.737557411193848],["▁establish",-13.737570762634276],["▁Arbeids",-13.737576484680176],["▁elección",-13.737597465515137],["မတ်",-13.737625122070312],["▁Artık",-13.737668991088867],["▁partits",-13.73768138885498],["▁келсе",-13.737689018249512],["▁výška",-13.737690925598145],["三人",-13.737699508666992],["▁একই",-13.737703323364258],["▁تمت",-13.73772144317627],["timer",-13.737722396850586],["angebot",-13.737723350524902],["▁эсэх",-13.737746238708496],["▁크기",-13.737751960754396],["台中市",-13.737754821777344],["▁წავი",-13.73776149749756],["▁continuă",-13.737773895263672],["reći",-13.737784385681152],["tsaina",-13.737784385681152],["▁gminy",-13.737808227539062],["▁орындары",-13.737832069396973],["▁സംസ്",-13.737850189208984],["structure",-13.737868309020996],["▁Inner",-13.737900733947754],["ર્ષ",-13.73791790008545],["合适的",-13.737937927246094],["▁muutus",-13.73793888092041],["YAK",-13.737969398498535],["失去了",-13.737974166870115],["స్తున్నాయి",-13.737994194030762],["▁arau",-13.73799991607666],["▁investimentos",-13.738021850585938],["▁gaga",-13.738024711608888],["فقد",-13.738027572631836],["▁जाण",-13.738027572631836],["▁harrasta",-13.738032341003418],["جها",-13.73803424835205],["▁колдоо",-13.738075256347656],["▁αργ",-13.738077163696287],["背後",-13.738080978393556],["▁digitáln",-13.738093376159668],["מתח",-13.738096237182615],["▁தென்",-13.738107681274414],["ումից",-13.738157272338867],["▁prevenire",-13.738178253173828],["▁գազ",-13.73818016052246],["▁vydal",-13.738198280334473],["처리",-13.738224983215332],["KTI",-13.73823070526123],["▁Hinweis",-13.738255500793455],["▁територија",-13.738255500793455],["▁되었",-13.738262176513672],["▁эсте",-13.73826789855957],["uzet",-13.738317489624023],["▁पार्टीको",-13.738344192504885],["▁шк",-13.73834991455078],["ଡେ",-13.738350868225098],["▁шаруашылық",-13.738363265991213],["▁សមាជិក",-13.738364219665527],["عاقب",-13.738370895385742],["済",-13.738381385803224],["▁20-25",-13.738429069519045],["വില",-13.738432884216309],["▁Germanio",-13.738449096679688],["▁davlatlar",-13.738465309143066],["▁tudta",-13.738466262817385],["▁Terrasse",-13.738471031188965],["▁ψυχή",-13.738476753234863],["npr",-13.738479614257812],["少数",-13.738483428955078],["තක්",-13.738492965698242],["istys",-13.738511085510254],["megy",-13.738518714904783],["ഷാ",-13.738525390625],["▁Kras",-13.738554000854492],["▁promete",-13.738563537597656],["▁smink",-13.738567352294922],["ऑ",-13.738574981689451],["ώματος",-13.738584518432615],["▁prius",-13.73858642578125],["ေဖာ္",-13.738590240478516],["▁exclusivo",-13.738591194152832],["passa",-13.73859405517578],["▁የቤተ",-13.73861026763916],["អស់",-13.738639831542969],["▁işleri",-13.73865032196045],["▁утга",-13.738662719726562],["НОГО",-13.73866844177246],["nämnd",-13.738672256469728],["▁қи",-13.738676071166992],["▁jerawat",-13.738690376281738],["▁केलं",-13.73869514465332],["▁రాష్ట్రంలో",-13.73870849609375],["▁vagyon",-13.738710403442385],["ေလးကို",-13.738717079162598],["▁असल्या",-13.738718032836914],["အိမ်",-13.73872184753418],["ткен",-13.738730430603027],["▁അന്വേഷണ",-13.738759994506836],["սպառ",-13.738771438598633],["▁വിശ്വ",-13.738789558410645],["氣氛",-13.73879623413086],["▁دستگیر",-13.73880100250244],["▁vesper",-13.73880386352539],["LJU",-13.73880672454834],["酥",-13.738819122314451],["▁FILM",-13.738823890686035],["▁vähi",-13.738824844360352],["capa",-13.738840103149414],["興奮",-13.738845825195312],["▁diweddar",-13.738853454589844],["ύσει",-13.73886013031006],["▁Вера",-13.738862037658691],["浪费",-13.738863945007324],["濱",-13.73886489868164],["レッスン",-13.738872528076172],["쁘",-13.738876342773438],["温泉",-13.73887825012207],["ປ່ຽນ",-13.738880157470703],["влечь",-13.738883972167969],["▁Mitsubishi",-13.738883972167969],["▁conférence",-13.738883972167969],["▁cẩn",-13.738883972167969],["▁dipublikasikan",-13.738883972167969],["▁pemahaman",-13.738883972167969],["▁wachezaji",-13.738883972167969],["▁xildhibaan",-13.738883972167969],["▁Мысалы",-13.738883972167969],["▁коришћење",-13.738883972167969],["▁цэцэрлэг",-13.738883972167969],["▁Լեւոն",-13.738883972167969],["▁יעצט",-13.738883972167969],["▁सेटिंग्ज",-13.738883972167969],["▁পঠিত",-13.738883972167969],["▁মিনিট",-13.738883972167969],["▁ତିଆରି",-13.738883972167969],["▁ಇದೀಗ",-13.738883972167969],["▁ರಾಹುಲ್",-13.738883972167969],["▁ხელისუფლების",-13.738883972167969],["▁ሚኒስቴር",-13.738883972167969],["▁sudėting",-13.738885879516602],["▁Украіны",-13.738885879516602],["▁ندارم",-13.738885879516602],["▁걱정",-13.738885879516602],["▁ล้านบาท",-13.738886833190918],["幫忙",-13.738886833190918],["▁Stáit",-13.73888874053955],["▁predpoklad",-13.73888874053955],["▁эффективны",-13.73888874053955],["▁یونین",-13.73888874053955],["▁psychiatr",-13.738890647888184],["▁zajęć",-13.738890647888184],["▁እጅ",-13.738890647888184],["▁ధర్మ",-13.7388916015625],["budowę",-13.738892555236816],["▁Fico",-13.73889446258545],["▁смогут",-13.73889446258545],["▁ባለፈው",-13.73889446258545],["▁apvieno",-13.738901138305664],["▁amatöör",-13.73890209197998],["▁διαθέτει",-13.738903045654297],["▁атындагы",-13.738903045654297],["▁kompetenci",-13.73890495300293],["豪華",-13.73890495300293],["▁მოგვ",-13.738916397094728],["▁кесип",-13.738917350769045],["▁செயல",-13.738919258117676],["▁Kõige",-13.738920211791992],["▁leaving",-13.738922119140623],["▁అధికారులు",-13.73892307281494],["▁réduction",-13.73892879486084],["▁żadn",-13.73892879486084],["工程师",-13.73892879486084],["▁영업",-13.738930702209473],["空港",-13.738931655883787],["speicher",-13.738943099975586],["อยากให้",-13.738945960998535],["▁والتعليم",-13.73895263671875],["美术",-13.73895263671875],["სხვა",-13.738953590393066],["▁ਕਦੇ",-13.738953590393066],["ANC",-13.738954544067385],["▁Teknisk",-13.7389554977417],["▁связанные",-13.738962173461914],["හන්",-13.738965034484863],["▁ugovora",-13.738966941833496],["▁piedzīvo",-13.738967895507812],["▁gertatu",-13.738973617553713],["ລໍາ",-13.738978385925291],["▁membina",-13.73897933959961],["्थे",-13.738981246948242],["ಲಾಗುವುದು",-13.738984107971191],["ความงาม",-13.738995552062988],["४७",-13.739001274108888],["tərəfli",-13.739002227783203],["कांत",-13.739002227783203],["▁правни",-13.739002227783203],["▁dhulka",-13.739007949829102],["▁балкон",-13.739009857177734],["▁جملہ",-13.739015579223633],["లైన",-13.73902702331543],["ühendus",-13.739028930664062],["▁المجموعة",-13.739028930664062],["▁ചോ",-13.73904800415039],["▁Olsen",-13.739049911499023],["▁øyne",-13.739049911499023],["▁helft",-13.73905086517334],["▁талаарх",-13.739052772521973],["pięt",-13.739058494567873],["овской",-13.73906135559082],["møbler",-13.739072799682615],["▁congrega",-13.739073753356934],["▁karşılık",-13.739076614379885],["▁zgjedhjet",-13.73908519744873],["▁besteak",-13.739105224609377],["ቀረቡ",-13.739117622375488],["çant",-13.73911952972412],["raten",-13.739121437072754],["▁erhältlich",-13.739126205444336],["▁일단",-13.739128112792969],["▁લોકોને",-13.739133834838867],["ljud",-13.739144325256348],["▁ಬಗೆ",-13.73914909362793],["▁Vorteile",-13.739151000976562],["administration",-13.73917293548584],["שיעור",-13.739179611206056],["ంటో",-13.739179611206056],["▁тартыл",-13.739181518554688],["▁şirkətlər",-13.73918628692627],["gelegen",-13.739188194274902],["▁تظهر",-13.7391939163208],["▁مواجهة",-13.739200592041016],["မြန္",-13.739225387573242],["ψεις",-13.739227294921877],["▁gidiyor",-13.739230155944824],["▁Studios",-13.739239692687988],["▁борби",-13.73924732208252],["▁ನಾಲ್ಕು",-13.739252090454102],["▁ငါး",-13.73927402496338],["ticamente",-13.73929214477539],["ibhe",-13.739293098449709],["▁славу",-13.739294052124023],["▁modèles",-13.739306449890137],["▁propostes",-13.739312171936035],["知っている",-13.739317893981934],["▁Train",-13.739322662353516],["spieva",-13.739327430725098],["▁décor",-13.739354133605955],["▁зустрічі",-13.739385604858398],["▁важи",-13.739389419555664],["avantage",-13.739392280578612],["次会议",-13.73941421508789],["▁ανακ",-13.739422798156738],["zım",-13.7394437789917],["▁besøge",-13.739453315734863],["ความสามารถ",-13.739468574523926],["▁Staten",-13.739471435546877],["▁šiek",-13.739490509033203],["▁توڑ",-13.739492416381836],["▁włącz",-13.739494323730469],["जिक",-13.73950481414795],["▁Ministra",-13.739511489868164],["▁kolikor",-13.739511489868164],["▁талқыла",-13.739513397216797],["cháin",-13.739520072937012],["▁ಹೇಳಿದ",-13.739523887634276],["▁зауваж",-13.739566802978516],["▁ekspedi",-13.739575386047363],["▁өмүр",-13.73958969116211],["ৃত",-13.739599227905272],["дају",-13.73960781097412],["▁niihin",-13.739618301391602],["▁kevät",-13.739620208740234],["▁차이",-13.739638328552246],["▁großes",-13.739649772644045],["▁benefic",-13.739654541015623],["▁investigador",-13.739678382873535],["ዚሁ",-13.739706039428713],["▁ecran",-13.73974895477295],["ampak",-13.739749908447266],["▁Сталин",-13.739753723144531],["કટ",-13.739792823791504],["วิชาการ",-13.739797592163086],["ក្រុង",-13.739797592163086],["AMPA",-13.739810943603516],["▁comprobar",-13.739839553833008],["ଧନ",-13.739845275878906],["▁Kutoka",-13.739858627319336],["▁kamati",-13.739861488342283],["Gold",-13.739886283874512],["▁parinti",-13.739892959594728],["Bot",-13.739897727966309],["оном",-13.739927291870115],["▁маалым",-13.739933013916016],["▁지구",-13.739957809448242],["ολόγιο",-13.739968299865724],["▁refr",-13.73996925354004],["▁odhod",-13.73997402191162],["ರೂಪ",-13.739974975585938],["trašanās",-13.739975929260254],["වර්ධන",-13.739977836608888],["वटा",-13.73999309539795],["meester",-13.740002632141112],["▁ಕಣ್ಣ",-13.74001693725586],["ණිය",-13.740025520324709],["▁Capo",-13.740032196044922],["▁descobri",-13.740045547485352],["様な",-13.740066528320312],["▁روزها",-13.740072250366213],["▁centrala",-13.74007797241211],["ଭୋ",-13.74009132385254],["▁ආදරේ",-13.740107536315918],["цки",-13.74012565612793],["डु",-13.740138053894045],["▁niezbędn",-13.740205764770508],["TIKA",-13.740230560302734],["észeti",-13.740241050720217],["▁Pazo",-13.740241050720217],["ෂ්ඨ",-13.740263938903809],["laştırma",-13.740267753601074],["kově",-13.740273475646973],["▁izvir",-13.740280151367188],["▁мәні",-13.74028491973877],["▁kooxda",-13.740288734436035],["▁spíš",-13.740291595458984],["krati",-13.740302085876465],["ەش",-13.740309715270996],["▁ድም",-13.740346908569336],["مراقبة",-13.740349769592283],["ówek",-13.740354537963867],["慣れ",-13.7403564453125],["▁الجب",-13.74036693572998],["donis",-13.740371704101562],["คลิก",-13.740371704101562],["▁ፍላጎት",-13.74038314819336],["շե",-13.740413665771484],["▁302",-13.740422248840332],["▁noroc",-13.740434646606444],["▁skada",-13.740440368652344],["▁rindu",-13.74044704437256],["▁גרויס",-13.74044704437256],["まって",-13.74044704437256],["^^;",-13.74047565460205],["▁Tribun",-13.74049949645996],["▁nekik",-13.74050521850586],["▁ღია",-13.740537643432615],["▁tuotteita",-13.7405424118042],["本网",-13.74056339263916],["让自己",-13.740564346313477],["▁využití",-13.740568161010742],["łbym",-13.74058437347412],["ਸਾਰ",-13.74061107635498],["▁главе",-13.740614891052246],["öpö",-13.740674018859863],["▁ہندو",-13.740694999694824],["Data",-13.740700721740724],["YER",-13.740720748901367],["▁septem",-13.740726470947266],["▁oblika",-13.740755081176758],["▁Ministru",-13.74077033996582],["▁ئۇيغۇرلار",-13.740775108337402],["▁ngrit",-13.740815162658691],["দ্দ",-13.740818977355955],["年起",-13.74082088470459],["的影響",-13.74082851409912],["устойчив",-13.740837097167969],["چې",-13.74089241027832],["▁ଆବଶ୍ୟକ",-13.740897178649902],["▁чоловіка",-13.740900993347168],["▁erstellen",-13.740911483764648],["▁сабра",-13.740911483764648],["PAL",-13.740918159484863],["▁станц",-13.740918159484863],["yaq",-13.740934371948242],["▁Дон",-13.740946769714355],["▁lucky",-13.740954399108888],["152",-13.740960121154783],["▁бүхэн",-13.740962028503418],["的可能性",-13.740973472595217],["fíci",-13.740988731384276],["▁Gigi",-13.741007804870604],["களுக்கும்",-13.741034507751465],["VOL",-13.741060256958008],["päivän",-13.741064071655272],["▁مخه",-13.741076469421388],["▁തള്ളി",-13.741116523742676],["verksamhet",-13.741125106811523],["ភ្នំពេញ",-13.74112606048584],["udvikling",-13.741130828857422],["▁αρκετ",-13.741155624389648],["▁விஷய",-13.741164207458496],["▁stagn",-13.741167068481444],["▁odhad",-13.74118423461914],["中小企业",-13.74118995666504],["▁වැඩසටහන",-13.741222381591797],["衫",-13.741223335266112],["戦争",-13.74123191833496],["艦",-13.741232872009276],["▁нарийн",-13.74123764038086],["巢",-13.741240501403809],["いろんな",-13.74124240875244],["▁မျှဝေ",-13.74124526977539],["▁ευρωπα",-13.741246223449709],["ေပ်ာ္",-13.741247177124023],["▁സുഹൃത്ത",-13.741247177124023],["윈",-13.741247177124023],["▁Trọng",-13.74124813079834],["▁dhèanamh",-13.74124813079834],["▁giỏi",-13.74124813079834],["▁memperbaiki",-13.74124813079834],["▁mlynedd",-13.74124813079834],["▁podjetju",-13.74124813079834],["▁srpnja",-13.74124813079834],["▁şübhə",-13.74124813079834],["▁Нұрсұлтан",-13.74124813079834],["▁Տիգրան",-13.74124813079834],["▁համաշխարհային",-13.74124813079834],["▁כאמור",-13.74124813079834],["▁अप्रैल",-13.74124813079834],["▁મુંબઈ",-13.74124813079834],["▁ಬ್ರೇಕಿಂಗ್",-13.74124813079834],["▁උත්තර",-13.741249084472656],["▁kewangan",-13.741250038146973],["▁Onneksi",-13.741250991821287],["▁mỏi",-13.741250991821287],["▁Zealand",-13.741252899169922],["ተካሄደ",-13.741253852844238],["▁যখন",-13.741253852844238],["▁ainoastaan",-13.741254806518556],["▁μέλος",-13.741254806518556],["ေဟာင္း",-13.741255760192873],["ေမြး",-13.741257667541504],["▁évfolyam",-13.741257667541504],["▁अर्जुन",-13.741257667541504],["▁Mieszka",-13.74126434326172],["효율",-13.74126434326172],["निमित्त",-13.7412691116333],["ဆိုင္ရာ",-13.741270065307615],["預防",-13.741275787353516],["▁کلتور",-13.741277694702148],["受益",-13.741278648376465],["▁Labarai",-13.74127960205078],["เนอร์",-13.741281509399414],["▁kokonaan",-13.74128246307373],["תקציב",-13.741287231445312],["▁povezave",-13.741297721862791],["spôsob",-13.74130153656006],["福音",-13.741303443908691],["▁എഴുതി",-13.741308212280272],["pinta",-13.741315841674805],["▁tudják",-13.741318702697754],["▁consilier",-13.741320610046388],["应急",-13.741329193115234],["▁Swan",-13.741336822509766],["▁అప్పుడు",-13.741336822509766],["▁мільйонів",-13.741338729858398],["▁ponudnik",-13.741339683532717],["▁coca",-13.741341590881348],["▁المعروف",-13.741341590881348],["õpe",-13.741342544555664],["▁Семей",-13.741345405578612],["▁Müller",-13.74134635925293],["▁છેલ્લા",-13.74134635925293],["▁þróun",-13.741347312927246],["▁ചിത്രങ്ങള്",-13.741351127624512],["мэн",-13.741353034973145],["▁სრული",-13.741353034973145],["▁xwendin",-13.741354942321776],["▁Државн",-13.74135684967041],["▁tartozik",-13.741358757019045],["▁uzyskać",-13.74135971069336],["▁سيارة",-13.741363525390623],["▁situações",-13.74136734008789],["▁ràpid",-13.741368293762209],["▁سلب",-13.741377830505373],["▁Stanley",-13.741378784179688],["▁přijde",-13.741385459899902],["▁udsigt",-13.741385459899902],["▁чине",-13.74138641357422],["баланс",-13.741395950317385],["▁عادی",-13.741412162780762],["▁артық",-13.741418838500977],["▁Экс",-13.741419792175291],["Ես",-13.741422653198242],["४४",-13.741426467895508],["ենա",-13.74143409729004],["▁Pilih",-13.74143409729004],["மல்",-13.74143886566162],["▁Всё",-13.741443634033203],["▁preprosto",-13.741446495056152],["▁háttér",-13.741454124450684],["ször",-13.741458892822266],["▁أثر",-13.741463661193848],["▁важен",-13.741469383239746],["Membuka",-13.741472244262695],["mitasi",-13.741472244262695],["▁narrativa",-13.741483688354492],["▁skupnosti",-13.74148941040039],["▁семейството",-13.741498947143556],["▁ದೇಹ",-13.741517066955566],["▁відповідає",-13.74152660369873],["息子",-13.74152946472168],["▁Հո",-13.741532325744627],["істів",-13.741549491882324],["施行",-13.741552352905272],["▁माजी",-13.741555213928224],["▁Үнэ",-13.741567611694336],["▁дізна",-13.741569519042969],["▁странке",-13.741578102111816],["▁താരം",-13.741584777832031],["▁nitong",-13.74158763885498],["ডে",-13.741600036621094],["▁веку",-13.74160861968994],["▁избра",-13.741613388061523],["▁дээш",-13.741618156433104],["▁Flott",-13.74162769317627],["▁zachowa",-13.74163055419922],["ريك",-13.741631507873535],["քների",-13.741650581359863],["▁문제를",-13.741652488708496],["▁କଣ",-13.741654396057127],["▁practical",-13.741665840148926],["▁মধ্য",-13.741670608520508],["ଷ୍ଠ",-13.741682052612305],["▁potuto",-13.74169921875],["▁entende",-13.741714477539062],["▁secretario",-13.74173069000244],["首歌",-13.74173641204834],["▁yrityksen",-13.741744041442873],["▁contributi",-13.74174976348877],["▁Jawatan",-13.74177074432373],["▁мужчины",-13.741776466369627],["▁Kreuz",-13.741783142089844],["▁словно",-13.741783142089844],["▁सोध",-13.741791725158691],["▁faktum",-13.74183750152588],["白天",-13.74183750152588],["▁Jerry",-13.74184513092041],["▁delvis",-13.741861343383787],["គ្រោះថ្នាក់",-13.741865158081056],["▁پاسي",-13.74186897277832],["zijde",-13.7418851852417],["όμενα",-13.741886138916016],["产生的",-13.741888999938965],["▁پنجم",-13.74189281463623],["▁njegovim",-13.74189567565918],["▁проходить",-13.741897583007812],["▁Dini",-13.741899490356444],["▁борбата",-13.741903305053713],["يانو",-13.741910934448242],["▁foyda",-13.741910934448242],["▁Wunder",-13.74192237854004],["▁једини",-13.741925239562988],["▁दवा",-13.741935729980469],["itzada",-13.741963386535645],["▁વર્ષે",-13.741963386535645],["▁बदलाव",-13.741968154907228],["๊ด",-13.741991996765137],["▁ardhmen",-13.742002487182615],["aquell",-13.742005348205566],["ציר",-13.74201488494873],["crat",-13.742019653320312],["▁الدراسي",-13.742029190063477],["▁ludo",-13.742056846618652],["▁mashina",-13.742059707641602],["▁метров",-13.742074966430664],["טבח",-13.742093086242676],["▁образованието",-13.742106437683104],["▁aðgang",-13.742110252380373],["▁tehnologija",-13.742111206054688],["Энх",-13.742117881774902],["ถือเป็น",-13.74211883544922],["关注的",-13.742125511169434],["Nar",-13.742149353027344],["▁دومین",-13.742151260375977],["▁ਭਾ",-13.742159843444824],["▁láz",-13.742164611816406],["เจีย",-13.742165565490724],["快捷",-13.742170333862305],["த்தையும்",-13.742175102233888],["vrh",-13.742193222045898],["▁sprejet",-13.742195129394531],["▁яаж",-13.742201805114746],["أسف",-13.742242813110352],["▁irakurri",-13.742262840270996],["פסיק",-13.742267608642578],["▁gören",-13.742274284362791],["นุ่ม",-13.742281913757324],["Ту",-13.742308616638184],["द्वार",-13.742313385009766],["▁aplinkos",-13.742329597473145],["▁revizi",-13.742379188537598],["▁ngộ",-13.742386817932127],["kiej",-13.742388725280762],["▁verdik",-13.74242115020752],["▁Shugaba",-13.742425918579102],["▁creato",-13.742441177368164],["▁2.500",-13.74244785308838],["స్వ",-13.74245834350586],["▁perfor",-13.742473602294922],["▁урбан",-13.742490768432615],["▁Democrat",-13.742494583129885],["戶外",-13.742506980895996],["LÍ",-13.74252223968506],["▁ulicy",-13.742527961730955],["؟؟؟؟",-13.742534637451172],["▁produktų",-13.742579460144045],["▁twin",-13.742637634277344],["ପର",-13.742668151855469],["▁почали",-13.742669105529783],["▁herkesin",-13.742670059204102],["msebenzi",-13.742671966552734],["▁kommunist",-13.742671966552734],["日晚",-13.742676734924316],["▁Ultima",-13.742721557617188],["▁программе",-13.742746353149414],["▁dihat",-13.742751121520996],["▁Adnan",-13.742756843566896],["▁jobben",-13.742762565612791],["▁Calvin",-13.74277114868164],["实习",-13.742782592773438],["▁агуу",-13.74281120300293],["▁signature",-13.742828369140623],["▁qartulad",-13.742842674255373],["▁2558",-13.74286651611328],["owałem",-13.74290657043457],["ጠል",-13.742911338806152],["▁tevreden",-13.74296760559082],["▁విద్య",-13.74297046661377],["▁կինո",-13.742982864379885],["▁روسی",-13.742993354797363],["obchod",-13.742997169494627],["▁Khal",-13.742999076843262],["▁mesmas",-13.743041038513184],["▁ապահով",-13.74305820465088],["కై",-13.743082046508787],["▁broja",-13.743108749389648],["West",-13.743123054504396],["byśmy",-13.743131637573242],["▁обеспечив",-13.743165016174316],["▁nechal",-13.74317455291748],["အပ်",-13.743186950683594],["▁Bright",-13.74321460723877],["▁preoccupa",-13.743227005004885],["▁ນຶ່ງ",-13.743228912353516],["▁Lahat",-13.743239402770996],["ొచ్చు",-13.74326229095459],["▁Faro",-13.743274688720703],["▁narių",-13.74328899383545],["kendelse",-13.743293762207031],["مناسب",-13.743295669555664],["▁wert",-13.743304252624512],["▁Açık",-13.743335723876951],["ട്ടം",-13.743374824523926],["Ề",-13.743388175964355],["шёл",-13.743393898010254],["▁motiver",-13.743400573730469],["▁врски",-13.74344539642334],["▁risparmi",-13.74354076385498],["ভিডিও",-13.743542671203612],["ផ្តល់",-13.743549346923828],["密码",-13.743553161621094],["ËS",-13.743555068969728],["桿",-13.743559837341309],["扰",-13.743560791015623],["▁чара",-13.743578910827637],["锅",-13.743579864501951],["▁hinzu",-13.743590354919434],["終わり",-13.743603706359863],["靴",-13.74360466003418],["タイミング",-13.743606567382812],["▁распаўсюд",-13.743616104125977],["٪",-13.743617057800291],["▁Eŭropo",-13.743617057800291],["▁Spørsmål",-13.743617057800291],["▁Zwischen",-13.743617057800291],["▁memudahkan",-13.743617057800291],["▁mõeldud",-13.743617057800291],["▁rugsėjo",-13.743617057800291],["▁بسرعة",-13.743617057800291],["▁अवार्ड",-13.743617057800291],["▁पहिचान",-13.743617057800291],["▁फॅमिली",-13.743617057800291],["▁স্বামী",-13.743617057800291],["▁இயற்கை",-13.743617057800291],["▁Lincoln",-13.74361801147461],["▁колледж",-13.74361801147461],["▁режисер",-13.74361801147461],["▁തവണ",-13.74361801147461],["▁නුවර",-13.74361801147461],["▁მმართველ",-13.74361801147461],["▁Avbryt",-13.743618965148926],["▁sannsynlig",-13.743618965148926],["▁ਖਿਲਾਫ",-13.743618965148926],["ເດັກ",-13.74362087249756],["歪",-13.74362087249756],["▁kırmızı",-13.743621826171877],["▁شریعت",-13.743621826171877],["▁ስሜት",-13.743621826171877],["▁društvu",-13.743623733520508],["▁tegishli",-13.743623733520508],["▁మరిన్ని",-13.743624687194824],["▁doprinos",-13.74362564086914],["▁जहाज",-13.74362564086914],["▁երկրում",-13.743627548217772],["▁مذمت",-13.743627548217772],["သတင်း",-13.743629455566406],["▁ചെലവ",-13.743629455566406],["▁falou",-13.743630409240724],["▁بازداشت",-13.743633270263672],["▁ярьж",-13.743634223937988],["▁оказалось",-13.74363613128662],["▁İdman",-13.74363899230957],["▁საკითხი",-13.743640899658203],["▁çarp",-13.74364185333252],["▁혼자",-13.743644714355469],["▁belgilangan",-13.74364948272705],["▁전혀",-13.743650436401367],["▁ბათუმი",-13.743651390075684],["▁protams",-13.74365520477295],["▁చేయడానికి",-13.743659973144531],["▁mobiliza",-13.743660926818848],["สงบ",-13.743664741516112],["回归",-13.74366569519043],["▁ବିରୋଧରେ",-13.743667602539062],["▁markmið",-13.743670463562012],["▁Mwenyezi",-13.743671417236328],["▁gwefan",-13.743671417236328],["▁foreldra",-13.74367332458496],["▁książka",-13.74367904663086],["▁баштаган",-13.743680000305176],["▁თავიდან",-13.743682861328123],["หัวหน้า",-13.74368381500244],["▁tasdiq",-13.74368381500244],["ကၠ",-13.743692398071287],["▁완전",-13.743692398071287],["▁remercie",-13.743700981140137],["żół",-13.743701934814451],["▁აზრით",-13.743701934814451],["▁Ավելի",-13.74371337890625],["весни",-13.743727684020996],["▁hazırlanan",-13.743727684020996],["▁valoarea",-13.743727684020996],["▁படு",-13.74374771118164],["▁ఆప",-13.743751525878906],["▁crimes",-13.7437744140625],["▁ավտո",-13.743779182434082],["▁teises",-13.743786811828612],["▁හිටියා",-13.74378776550293],["tengeneza",-13.743793487548828],["▁кооператив",-13.743793487548828],["libro",-13.74379825592041],["▁Δημοκρατίας",-13.743812561035156],["▁догоди",-13.743825912475586],["▁সুযোগ",-13.743825912475586],["▁چىقىر",-13.743828773498535],["▁aktiivi",-13.743829727172852],["▁келин",-13.74384307861328],["887",-13.743844985961914],["▁Hawaii",-13.743847846984863],["▁వారిని",-13.743847846984863],["ακριβ",-13.743849754333496],["有望",-13.743849754333496],["бын",-13.74388027191162],["▁producte",-13.743881225585938],["▁مترجم",-13.743881225585938],["బుల్",-13.743882179260254],["תוכנית",-13.74388599395752],["▁demandas",-13.743901252746582],["▁säkerhets",-13.743903160095217],["▁სისხლის",-13.743908882141112],["stykke",-13.743918418884276],["▁tækni",-13.74392032623291],["tojmë",-13.74392795562744],["▁суроо",-13.743938446044922],["▁empregado",-13.74394416809082],["microsoft",-13.7439546585083],["▁teniendo",-13.743975639343262],["fattning",-13.743980407714844],["▁Paw",-13.74399757385254],["dock",-13.744012832641602],["متع",-13.74401569366455],["▁wypadku",-13.744016647338867],["▁выплаты",-13.744029998779297],["▁читател",-13.74403953552246],["▁mực",-13.744048118591309],["ਉਣ",-13.744049072265623],["▁paremini",-13.744049072265623],["実現",-13.744070053100586],["▁программу",-13.7440824508667],["▁çka",-13.74408721923828],["▁instante",-13.744098663330078],["ησία",-13.744099617004396],["นี้จะ",-13.74410915374756],["▁dodać",-13.744134902954102],["▁doenças",-13.744152069091797],["▁postaja",-13.74416160583496],["▁Mozart",-13.744166374206545],["▁სახის",-13.744207382202148],["tanggung",-13.744214057922363],["旅程",-13.744220733642578],["▁جناح",-13.744266510009766],["▁Fear",-13.74427318572998],["▁opportunità",-13.744274139404297],["▁besucht",-13.74427604675293],["▁사람을",-13.74429702758789],["▁počtu",-13.74429988861084],["Με",-13.744314193725586],["▁నష్ట",-13.74431610107422],["▁pagament",-13.744317054748535],["▁stappen",-13.7443265914917],["മൊഴി",-13.744329452514648],["▁установить",-13.744335174560549],["▁प्रि",-13.744354248046877],["▁fantazi",-13.744357109069824],["▁Król",-13.744365692138672],["ճան",-13.744366645812988],["வத",-13.74439811706543],["▁albuma",-13.744399070739746],["▁oldalán",-13.744409561157228],["▁දැනුම",-13.744413375854492],["יעו",-13.744417190551758],["वृत्ति",-13.744417190551758],["снаб",-13.74443531036377],["▁قالغان",-13.744443893432615],["▁Harri",-13.744444847106934],["ចាស់",-13.7444486618042],["▁manufa",-13.744449615478516],["▁þína",-13.74445629119873],["উল",-13.744457244873049],["utatud",-13.744462013244627],["jaama",-13.744470596313477],["ываются",-13.744479179382324],["▁yetki",-13.74448585510254],["▁వ్యక్తం",-13.744503021240234],["gazdálkodás",-13.7445068359375],["зілі",-13.744521141052246],["一隻",-13.744522094726562],["▁höjd",-13.744526863098145],["یکس",-13.744535446166992],["▁ذرا",-13.744547843933104],["▁זיינע",-13.7445707321167],["▁kakao",-13.744585037231444],["▁sauf",-13.744586944580078],["Мөнх",-13.744626998901367],["یکه",-13.744629859924316],["▁таква",-13.744656562805176],["Финанс",-13.744672775268556],["ሽብር",-13.744677543640137],["▁світло",-13.74468231201172],["▁زمینی",-13.744686126708984],["▁Tibet",-13.7446870803833],["▁sarap",-13.744701385498049],["amerikanische",-13.744702339172363],["▁partenaires",-13.74472713470459],["▁Expedi",-13.744730949401855],["นู",-13.744757652282717],["▁fidei",-13.744760513305664],["hantering",-13.74477195739746],["▁Franse",-13.744781494140623],["્યાં",-13.744784355163574],["▁radno",-13.744784355163574],["▁ziekte",-13.744791984558104],["▁صوتی",-13.744800567626951],["เวียน",-13.744810104370115],["▁avatud",-13.744812965393066],["▁konfor",-13.744824409484863],["шага",-13.74482536315918],["▁rakasta",-13.744827270507812],["▁туршлага",-13.74485206604004],["我家",-13.744854927062988],["غىنى",-13.744860649108888],["▁성인",-13.744865417480469],["४९",-13.74487590789795],["▁drabba",-13.744879722595217],["▁məktəbin",-13.744894981384276],["▁experimentar",-13.744999885559082],["ირება",-13.745017051696776],["▁Verlag",-13.7450532913208],["▁kansalais",-13.74506664276123],["bory",-13.74509048461914],["▁Şube",-13.74509048461914],["ovány",-13.745099067687988],["▁jõulu",-13.745118141174316],["pied",-13.745126724243164],["▁مواطن",-13.74513053894043],["▁oibre",-13.74513339996338],["▁Collect",-13.745158195495604],["▁Ülke",-13.745179176330566],["▁गर्म",-13.745230674743652],["▁laikam",-13.745254516601562],["年來",-13.745254516601562],["▁kucing",-13.745261192321776],["▁škr",-13.745262145996094],["▁Дух",-13.745268821716309],["țul",-13.745279312133787],["خام",-13.745280265808104],["කිය",-13.74528694152832],["ШИ",-13.745306968688965],["▁hugsa",-13.745306968688965],["▁diger",-13.745309829711914],["LR",-13.745343208312988],["▁రాజ్",-13.745355606079102],["ິງ",-13.745359420776367],["▁регулира",-13.745368003845217],["▁rika",-13.745370864868164],["▁těle",-13.74537467956543],["▁Designer",-13.745400428771973],["新兴",-13.745413780212402],["▁прекр",-13.74543285369873],["▁poţi",-13.745439529418944],["▁надад",-13.745450973510742],["▁pieņem",-13.745452880859377],["▁бүрт",-13.74546241760254],["▁adept",-13.745466232299805],["▁мери",-13.74547290802002],["▁Monaco",-13.745473861694336],["▁esprime",-13.745476722717283],["ύριο",-13.74548625946045],["▁මාත්",-13.745494842529297],["买了",-13.74551773071289],["▁अन्न",-13.745540618896484],["▁plocka",-13.745563507080078],["існа",-13.745574951171877],["brechen",-13.74557876586914],["▁Φωτ",-13.745586395263672],["▁birnin",-13.745599746704102],["有多少",-13.745619773864746],["▁خانہ",-13.745620727539062],["▁прэс",-13.74563217163086],["我认为",-13.745640754699709],["▁finalizar",-13.745643615722656],["▁اخي",-13.745647430419922],["▁härligt",-13.745709419250488],["▁Cry",-13.745753288269045],["тину",-13.745772361755373],["▁దిన",-13.745772361755373],["로드",-13.74581527709961],["▁verklighet",-13.745837211608888],["5-8",-13.745841026306152],["▁preguntar",-13.745841979980469],["▁orgazm",-13.745845794677734],["▁polje",-13.745857238769531],[":1)",-13.745861053466797],["股权",-13.74586582183838],["一定會",-13.745877265930176],["▁rezid",-13.745879173278809],["▁användare",-13.745891571044922],["輸",-13.745916366577148],["▁aynu",-13.745919227600098],["烂",-13.745928764343262],["贾",-13.745938301086426],["逻辑",-13.74594497680664],["马来西亚",-13.745946884155272],["▁kulturelle",-13.745954513549805],["ጀር",-13.74596118927002],["锐",-13.745964050292969],["hôtel",-13.745970726013184],["允許",-13.7459716796875],["ferie",-13.745980262756348],["コミュニケーション",-13.745985984802246],["เน็ต",-13.745990753173828],["ခွင့်",-13.745991706848145],["▁Igrexa",-13.74599266052246],["▁bwysig",-13.74599266052246],["▁mafanikio",-13.74599266052246],["▁mempelajari",-13.74599266052246],["▁poskytnut",-13.74599266052246],["▁wafanyakazi",-13.74599266052246],["▁жывёл",-13.74599266052246],["▁آلبوم",-13.74599266052246],["▁नोव्हेंबर",-13.74599266052246],["▁নিশ্চিত",-13.74599266052246],["▁শ্যামাঙ্গিনী",-13.74599266052246],["▁ਲੁਧਿਆਣਾ",-13.74599266052246],["▁తెలుస్తోంది",-13.74599266052246],["▁വ്യത്യസ്ത",-13.74599266052246],["ນະຄອນຫຼວງ",-13.745993614196776],["▁Besonders",-13.745993614196776],["▁dhaqaalaha",-13.745994567871094],["▁dibêjin",-13.745994567871094],["▁Doğru",-13.74599552154541],["▁២០១៦",-13.745997428894045],["ුණේ",-13.74599838256836],["▁Tāpēc",-13.74599838256836],["▁aelod",-13.74599838256836],["▁điển",-13.74599838256836],["▁Geburt",-13.745999336242676],["▁ស្ត្រី",-13.746000289916992],["▁эркек",-13.74600315093994],["▁తక్షణ",-13.74600315093994],["▁ຫລື",-13.74600315093994],["▁ক্লিক",-13.746004104614258],["▁Dodatkowo",-13.746005058288574],["▁síðasta",-13.746005058288574],["▁pendekatan",-13.74600601196289],["▁այնպիսի",-13.746006965637209],["▁parcursul",-13.746009826660156],["▁చేశాడు",-13.746017456054688],["▁മദ്യ",-13.74602508544922],["▁Arbets",-13.746026992797852],["▁nessun",-13.746034622192385],["▁обязанности",-13.7460355758667],["▁techniczne",-13.746038436889648],["▁જોકે",-13.746038436889648],["▁डेटा",-13.746041297912598],["▁muntanya",-13.74604320526123],["প্রাপ্ত",-13.746047019958496],["▁посади",-13.746057510375977],["▁Schluss",-13.746058464050291],["▁søknad",-13.74606227874756],["▁የተነሳ",-13.74606227874756],["▁Respond",-13.74606990814209],["▁cualquiera",-13.74607276916504],["▁Rajoy",-13.746092796325684],["▁அமைப்பு",-13.74609661102295],["▁ملتونو",-13.746097564697266],["▁mnou",-13.746103286743164],["경영",-13.74610424041748],["aplicació",-13.746105194091797],["▁வயத",-13.746106147766112],["とはいえ",-13.746108055114746],["ುತ್ತಾನೆ",-13.746110916137695],["ନଗର",-13.74612045288086],["▁quieren",-13.746123313903809],["▁policijos",-13.746129989624023],["▁(2000)",-13.74614429473877],["▁garso",-13.74614715576172],["▁בשנים",-13.746148109436035],["ພົນ",-13.746160507202148],["▁جمہوریت",-13.746161460876465],["טכנולוגיה",-13.746166229248049],["输入",-13.746167182922363],["ziko",-13.746179580688477],["▁tárol",-13.746182441711426],["мало",-13.746191024780272],["▁võimaldab",-13.74619960784912],["▁Dün",-13.746203422546388],["▁qaydaları",-13.746204376220703],["rekiko",-13.746206283569336],["富有",-13.746213912963867],["絶対に",-13.746214866638184],["제도",-13.746219635009766],["▁Webcam",-13.746231079101562],["cripció",-13.746237754821776],["şıq",-13.746248245239258],["ратор",-13.746274948120115],["▁министрлігі",-13.746286392211914],["つき",-13.746286392211914],["▁pareng",-13.746294021606444],["▁spektr",-13.746323585510254],["kártyá",-13.746335983276367],["停车",-13.746337890625],["▁enfoca",-13.746339797973633],["▁Toàn",-13.74634075164795],["▁nedeni",-13.746341705322266],["sicherheit",-13.746346473693848],["▁16:30",-13.746352195739746],["▁aggiungere",-13.74635887145996],["▁گردوں",-13.74636459350586],["▁ફ્લ",-13.746366500854492],["beheer",-13.74637222290039],["områden",-13.746390342712402],["▁olgu",-13.746394157409668],["áři",-13.746404647827148],["▁Perang",-13.74640941619873],["karya",-13.746410369873049],["▁гульня",-13.746411323547363],["▁yaklaş",-13.74642276763916],["▁الدعم",-13.746474266052246],["▁δυσκολ",-13.74648380279541],["ကြည်",-13.746487617492676],["பொ",-13.7465181350708],["▁Iuda",-13.746533393859863],["的身份",-13.746535301208496],["自動車",-13.746551513671877],["ுற",-13.74656105041504],["Kre",-13.74656581878662],["▁పరిచయం",-13.746574401855469],["▁folgen",-13.74659252166748],["▁Działa",-13.746623992919922],["hija",-13.74662971496582],["▁שפי",-13.746648788452148],["НЭ",-13.746665000915527],["▁Стро",-13.74666976928711],["▁Næst",-13.746671676635742],["▁बनने",-13.746685981750488],["▁बालक",-13.74670696258545],["skyldig",-13.746712684631348],["▁අඩවි",-13.746713638305664],["Serie",-13.74671745300293],["▁generación",-13.746758460998535],["▁نورې",-13.746774673461914],["сынып",-13.746795654296877],["▁accepterer",-13.746804237365724],["▁päivit",-13.746804237365724],["єю",-13.746807098388672],["▁제도",-13.746832847595217],["▁סופר",-13.74685287475586],["▁מצו",-13.74686050415039],["hlaup",-13.746875762939451],["піль",-13.746880531311035],["▁Kula",-13.7468843460083],["▁глави",-13.746885299682615],["اسين",-13.746903419494627],["▁оценива",-13.746908187866213],["那时",-13.746918678283691],["Anh",-13.746920585632324],["▁स्वरूप",-13.746922492980955],["สตาร์",-13.746923446655272],["▁المض",-13.74693775177002],["▁Eisen",-13.746939659118652],["▁frö",-13.746946334838867],["▁պատերազմ",-13.746971130371094],["ကောင်",-13.74699592590332],["▁vojno",-13.74703884124756],["fællesskab",-13.74704360961914],["kaling",-13.74704647064209],["izacijo",-13.747058868408203],["▁Lucky",-13.747072219848633],["講師",-13.747092247009276],["▁resume",-13.747103691101074],["▁провежда",-13.747108459472656],["äuße",-13.747117042541504],["▁dames",-13.74712085723877],["▁ποιο",-13.747137069702148],["▁distrikto",-13.747150421142578],["▁Casi",-13.747156143188477],["ENIE",-13.74715805053711],["גלה",-13.747169494628906],["▁محدودیت",-13.747169494628906],["▁hydrat",-13.747175216674805],["staviť",-13.747214317321776],["ünüz",-13.747214317321776],["ተኞች",-13.747233390808104],["▁چئي",-13.74724578857422],["▁wykonanie",-13.747285842895508],["ในวัน",-13.747325897216797],["▁sanno",-13.747344017028809],["μίας",-13.747347831726074],["置いて",-13.747393608093262],["▁judul",-13.747400283813477],["tatem",-13.747435569763184],["全員",-13.74745750427246],["▁Опис",-13.74747371673584],["▁dawladda",-13.7474946975708],["▁நிறைய",-13.74756145477295],["▁ngoba",-13.747572898864746],["ในร่างกาย",-13.747583389282228],["的项目",-13.74758529663086],["▁کرام",-13.747594833374023],["▁ໂຄງການ",-13.74760627746582],["ਗ੍ਰ",-13.747614860534668],["החלטת",-13.747628211975098],["▁önlem",-13.747637748718262],["▁Kaiser",-13.747663497924805],["+0",-13.747685432434082],["▁раёне",-13.747686386108398],["▁Môn",-13.747687339782717],["▁taak",-13.747705459594728],["▁samal",-13.747709274291992],["▁dövrdə",-13.747736930847168],["यें",-13.747783660888672],["▁göç",-13.747783660888672],["enként",-13.74778938293457],["▁curte",-13.747791290283203],["▁Vaba",-13.747802734375],["▁ሆኖም",-13.747809410095217],["▁նշան",-13.74781322479248],["상담",-13.747817039489746],["▁raise",-13.747817993164062],["▁بوي",-13.747820854187012],["▁Gece",-13.747827529907228],["▁definitivt",-13.747840881347656],["163",-13.74785614013672],["▁фасад",-13.747878074645996],["drop",-13.747879981994627],["hluta",-13.747885704040527],["ที่ทํา",-13.747889518737791],["ătorii",-13.747928619384766],["▁تىلى",-13.74797534942627],["▁भन्छ",-13.747979164123535],["▁sikkerhets",-13.7479887008667],["سارة",-13.747990608215332],["തമായ",-13.748026847839355],["פרק",-13.74803352355957],["цах",-13.748037338256836],["▁chłop",-13.748045921325684],["▁achten",-13.748054504394531],["PSI",-13.748066902160645],["ിക്കാന",-13.748080253601074],["تحقيق",-13.748099327087402],["▁укр",-13.748109817504885],["▁düşünüyor",-13.74814796447754],["બેન",-13.74815273284912],["ைத்",-13.748154640197754],["பிள்",-13.748159408569336],["▁audzē",-13.748159408569336],["ಗಲ",-13.748161315917969],["ਤੂ",-13.748167037963867],["▁huolto",-13.748167991638184],["键",-13.748176574707031],["أزمة",-13.748217582702637],["▁כותב",-13.748285293579102],["▁skorzysta",-13.748298645019531],["▁Израил",-13.748300552368164],["▁Peut",-13.74831485748291],["遙",-13.748336791992188],["証拠",-13.748342514038086],["可以通过",-13.748343467712402],["炭",-13.748346328735352],["▁pastinya",-13.748353004455566],["抱怨",-13.7483549118042],["拠",-13.748356819152832],["▁мезгил",-13.74836254119873],["preşedinte",-13.74836540222168],["▁fødsel",-13.748368263244627],["อีเมล",-13.748369216918944],["▁условията",-13.748370170593262],["ប្រមាណ",-13.748372077941896],["allotjament",-13.748373985290527],["▁Bedingungen",-13.748373985290527],["▁Panasonic",-13.748373985290527],["▁adroddiad",-13.748373985290527],["▁espírito",-13.748373985290527],["▁zdjęcie",-13.748373985290527],["▁χαρακτήρα",-13.748373985290527],["▁Александър",-13.748373985290527],["▁Свабода",-13.748373985290527],["▁завдань",-13.748373985290527],["▁орынбасары",-13.748373985290527],["▁сустрэчы",-13.748373985290527],["▁ייעוץ",-13.748373985290527],["▁সিদ্ধান্ত",-13.748373985290527],["▁પૈસા",-13.748373985290527],["▁ప్రకటన",-13.748373985290527],["▁어려운",-13.748373985290527],["зважаючи",-13.748374938964844],["▁Recursos",-13.748374938964844],["▁pazienti",-13.748374938964844],["▁təbliğ",-13.74837589263916],["▁təhlil",-13.74837589263916],["▁برآمد",-13.74837589263916],["Verhältnis",-13.748376846313477],["▁medžiaga",-13.748377799987791],["▁ژمن",-13.748377799987791],["▁ګټه",-13.748377799987791],["▁magkaroon",-13.748379707336426],["▁ኳስ",-13.748379707336426],["▁अनुरोध",-13.748380661010742],["▁তখন",-13.748380661010742],["▁Galipedia",-13.74838161468506],["▁کلا",-13.74838161468506],["▁Kuzey",-13.748383522033691],["▁ühiskonna",-13.748383522033691],["▁Αποστολή",-13.748383522033691],["▁ناروغ",-13.748383522033691],["▁tradicionais",-13.748390197753906],["▁höfum",-13.748391151428224],["Видео",-13.74839210510254],["▁išskirtin",-13.74839210510254],["▁αποστολή",-13.748394012451172],["▁spreminja",-13.748394966125488],["▁vozidla",-13.748397827148438],["▁کمبود",-13.748397827148438],["▁apmeklēj",-13.748407363891602],["▁приватизаци",-13.748411178588867],["▁Преузе",-13.748417854309082],["ИА",-13.748419761657717],["ouvrage",-13.748421669006348],["▁rudaí",-13.748421669006348],["▁వదిల",-13.748424530029297],["▁Бардык",-13.748428344726562],["▁Kliknite",-13.74842929840088],["บาน",-13.74843406677246],["▁apuc",-13.748435974121094],["▁неприятно",-13.74843692779541],["EED",-13.748440742492676],["ంటాయి",-13.748441696166992],["ຮອດ",-13.748449325561523],["ለች።",-13.74845027923584],["▁خویش",-13.748458862304688],["▁каттоо",-13.748464584350586],["ffar",-13.748473167419434],["ndry",-13.74848175048828],["ирајте",-13.748483657836914],["▁salvat",-13.748497009277344],["▁pæn",-13.74850368499756],["▁noktası",-13.748505592346191],["čevanje",-13.74850845336914],["ຫນັກ",-13.748513221740724],["paid",-13.74852466583252],["▁بلاول",-13.74852466583252],["▁zentrale",-13.748530387878418],["▁מבין",-13.748533248901367],["íčko",-13.748536109924316],["▁waxba",-13.748541831970217],["▁guests",-13.748543739318848],["คริส",-13.748558044433594],["líon",-13.74856185913086],["メン",-13.748564720153809],["▁annum",-13.74857234954834],["原因是",-13.748602867126465],["長時間",-13.748602867126465],["▁ಖಾತೆ",-13.748604774475098],["▁varietat",-13.748605728149414],["▁장기",-13.74860668182373],["ininku",-13.74862003326416],["ologija",-13.748620986938477],["DIC",-13.748624801635742],["▁غنی",-13.748627662658691],["▁vejledning",-13.74863052368164],["▁moteris",-13.748632431030272],["▁zawodowe",-13.74864673614502],["lød",-13.748650550842283],["房價",-13.748650550842283],["▁hervor",-13.748656272888184],["▁ریاضی",-13.748656272888184],["▁कुर्",-13.748677253723145],["▁fungus",-13.74868106842041],["ምርጫ",-13.74868392944336],["ಬಂಧ",-13.748687744140623],["▁ಸಾಮಾನ್ಯ",-13.748696327209473],["▁vecuma",-13.748705863952637],["▁pyrki",-13.74871063232422],["γκο",-13.748711585998535],["ONY",-13.748724937438965],["maailm",-13.74873161315918],["▁võtab",-13.74874782562256],["▁odchod",-13.748762130737305],["▁постара",-13.748773574829102],["▁məcburi",-13.748778343200684],["▁conservación",-13.748807907104492],["usią",-13.748812675476074],["▁ddiweddar",-13.748814582824709],["▁ziņā",-13.748823165893556],["▁لوله",-13.748833656311035],["État",-13.748847007751465],["▁fuldt",-13.74885368347168],["▁ເກີດ",-13.748855590820312],["▁фоне",-13.748879432678224],["▁Donostiako",-13.748888969421388],["▁запах",-13.748893737792969],["▁להר",-13.748899459838867],["ပစ်",-13.74890422821045],["davė",-13.748916625976562],["▁קומ",-13.748918533325195],["erbjudande",-13.748930931091309],["▁الطالب",-13.748930931091309],["०६",-13.748931884765623],["▁sediul",-13.748937606811523],["ුමක්",-13.748942375183104],["▁సొంత",-13.748942375183104],["▁מפני",-13.74896240234375],["▁ашык",-13.748980522155762],["▁Меня",-13.748987197875977],["▁महानगरपालिका",-13.74901008605957],["▁ଭାଗ",-13.749011993408203],["ативно",-13.749034881591797],["▁системі",-13.749053001403809],["јевић",-13.74905490875244],["айце",-13.749069213867188],["壇",-13.74907112121582],["▁боловсруулах",-13.749080657958984],["▁طوری",-13.749080657958984],["▁Takie",-13.74909782409668],["▁Kasih",-13.749103546142578],["קצב",-13.749110221862791],["▁aparecen",-13.74913501739502],["▁אפשרות",-13.749138832092283],["▁dúirt",-13.74915599822998],["ռան",-13.749157905578612],["phor",-13.749210357666016],["▁boya",-13.749216079711914],["վազ",-13.749234199523926],["▁մշակութային",-13.74923610687256],["▁اللون",-13.749245643615724],["▁მაკ",-13.749252319335938],["реме",-13.749263763427734],["▁acudir",-13.749268531799316],["drob",-13.74927043914795],["тэт",-13.749271392822266],["ENCE",-13.749286651611328],["▁aipatu",-13.749296188354492],["▁suurenda",-13.749300003051758],["▁ਸ਼ਿ",-13.74931526184082],["▁Danny",-13.7493257522583],["관광",-13.749335289001465],["▁kirish",-13.749351501464844],["▁napos",-13.749364852905272],["601",-13.749377250671388],["▁چالش",-13.749383926391602],["ٹل",-13.749394416809082],["▁volgt",-13.749414443969728],["شركة",-13.74942684173584],["天才",-13.74944305419922],["把他",-13.749473571777344],["मंड",-13.749475479125977],["ящ",-13.74948024749756],["▁mysig",-13.74948501586914],["tjänster",-13.74949836730957],["▁ਬੈ",-13.749499320983888],["▁близко",-13.749500274658203],["саң",-13.74950122833252],["▁సిద్ధ",-13.749507904052734],["▁rohke",-13.74950885772705],["▁Memorial",-13.749526977539062],["▁सरल",-13.74954891204834],["тігі",-13.749558448791504],["▁בזה",-13.749560356140137],["▁korisnik",-13.749566078186035],["ichean",-13.749573707580566],["stawić",-13.749576568603516],["Rom",-13.749622344970703],["eeyaan",-13.74964714050293],["▁capac",-13.749655723571776],["monina",-13.749678611755373],["▁sërish",-13.7496976852417],["▁veszély",-13.749698638916016],["hatsz",-13.749700546264648],["plán",-13.749709129333496],["▁рэк",-13.749717712402344],["▁Gada",-13.749730110168455],["▁Nasze",-13.74973201751709],["▁क्रिया",-13.749748229980469],["▁ପେ",-13.749753952026367],["▁최종",-13.749756813049316],["պիսի",-13.749820709228516],["CEO",-13.749841690063477],["▁programov",-13.749855995178224],["ండా",-13.74985694885254],["বিদ",-13.749881744384766],["▁првите",-13.749906539916992],["ಂಪ್",-13.74990940093994],["▁Riba",-13.749917984008787],["生まれ",-13.74992561340332],["▁разрешен",-13.749935150146484],["▁լուծում",-13.749943733215332],["ศา",-13.749944686889648],["▁kellel",-13.749960899353027],["▁darbojas",-13.749972343444824],["▁alternativas",-13.749987602233888],["აშვილმა",-13.749993324279783],["▁څار",-13.750041961669922],["▁ændringer",-13.750060081481934],["PIN",-13.75007152557373],["▁бій",-13.750073432922363],["▁ແກ່",-13.750073432922363],["宅配",-13.750075340270996],["▁kurash",-13.750089645385742],["kötő",-13.750116348266602],["▁नक्की",-13.75012493133545],["ແຕ່ງ",-13.750151634216309],["UBA",-13.750164985656738],["cího",-13.750165939331056],["падзе",-13.750166893005373],["ลดลง",-13.75017547607422],["747",-13.750179290771484],["▁tarte",-13.750218391418455],["ഥാ",-13.750225067138672],["上がって",-13.750226974487305],["▁لکيو",-13.75023078918457],["มาน",-13.750238418579102],["▁własne",-13.75024127960205],["▁boat",-13.750267028808594],["लिका",-13.750274658203123],["▁flutt",-13.750280380249023],["▁difusión",-13.750288009643556],["чилик",-13.750299453735352],["▁சபை",-13.750314712524414],["▁fruta",-13.750327110290527],["▁بالك",-13.750329971313477],["▁முதல்வர்",-13.750335693359377],["慎",-13.750349044799805],["cour",-13.750367164611816],["talk",-13.75037956237793],["▁Ogos",-13.750380516052246],["пута",-13.750391960144045],["▁فار",-13.750398635864258],["▁psihologi",-13.750412940979004],["▁grønn",-13.750414848327637],["оване",-13.750431060791016],["▁პოსტ",-13.750432014465332],["Pass",-13.750435829162598],["▁Rule",-13.750435829162598],["▁rodas",-13.750458717346191],["фти",-13.750533103942873],["ీకరణ",-13.750536918640137],["ことが多い",-13.750555038452148],["ñez",-13.750564575195312],["▁suppose",-13.750564575195312],["▁ਦੇਖ",-13.750568389892578],["▁поставен",-13.750574111938477],["▁കെട്ടി",-13.750579833984377],["papier",-13.750600814819336],["▁мә",-13.750629425048828],["颜",-13.750632286071776],["hrif",-13.750635147094728],["▁другая",-13.750636100769045],["utilisateur",-13.750642776489258],["คุณสามารถ",-13.75066089630127],["▁друзья",-13.75066375732422],["9,99",-13.750672340393066],["有助于",-13.750679969787598],["▁zhvilluar",-13.750697135925291],["ляється",-13.750699996948242],["▁kvaliteten",-13.75070095062256],["▁नोंद",-13.750701904296877],["鴨",-13.750704765319824],["耶穌",-13.750707626342772],["▁Angeli",-13.75072193145752],["▁direcció",-13.750723838806152],["扔",-13.750726699829102],["动态",-13.75072956085205],["▁यसका",-13.750736236572266],["▁Sukan",-13.750740051269531],["もしくは",-13.750743865966797],["軒",-13.75074863433838],["ساس",-13.750754356384276],["▁используем",-13.75075912475586],["നേതാക്കള",-13.750760078430176],["ฉีด",-13.750760078430176],["▁dospěl",-13.750760078430176],["▁doživlja",-13.750760078430176],["▁enthousiast",-13.750760078430176],["▁membolehkan",-13.750760078430176],["▁mengurangkan",-13.750760078430176],["▁άποψη",-13.750760078430176],["▁грошей",-13.750760078430176],["▁понякога",-13.750760078430176],["▁софтуер",-13.750760078430176],["▁المشكلة",-13.750760078430176],["▁علاقہ",-13.750760078430176],["▁وابسته",-13.750760078430176],["▁कतिपय",-13.750760078430176],["▁সৃষ্টি",-13.750760078430176],["▁అసెంబ్లీ",-13.750760078430176],["▁සහෝදර",-13.750760078430176],["▁በቀጥታ",-13.750760078430176],["둥",-13.750760078430176],["▁Savienības",-13.750761032104492],["▁electrónica",-13.750761032104492],["▁аеродром",-13.750761032104492],["▁маусым",-13.750761032104492],["▁کرکټ",-13.750761032104492],["▁ਅਮਰਿੰਦਰ",-13.750761032104492],["▁dagirker",-13.750761985778809],["▁cîhanê",-13.750762939453123],["oppervlak",-13.75076389312744],["▁ülikooli",-13.75076389312744],["▁Hintergrund",-13.750765800476074],["▁ख़बर",-13.75076675415039],["kinta",-13.750767707824709],["▁شپږ",-13.750767707824709],["▁իրավական",-13.750768661499023],["▁menyerahkan",-13.750771522521973],["▁לבדוק",-13.750773429870604],["▁భాగంగా",-13.750774383544922],["▁түскен",-13.750775337219238],["▁түшкөн",-13.750775337219238],["▁ජයග්",-13.750778198242188],["▁főváros",-13.750782012939451],["▁बारिश",-13.75078296661377],["dıktan",-13.750788688659668],["▁cyfnod",-13.750788688659668],["▁средстава",-13.7507905960083],["ចុងក្រោយ",-13.750794410705566],["▁wartość",-13.750794410705566],["▁λόγους",-13.750794410705566],["▁possunt",-13.750797271728516],["▁ഒന്നാം",-13.750799179077148],["▁Aufnahme",-13.750810623168944],["▁tilfeldig",-13.750818252563477],["▁rezerw",-13.750819206237791],["▁Grenzen",-13.750821113586426],["▁سرباز",-13.750822067260742],["▁penerima",-13.750825881958008],["▁Hoppa",-13.750836372375488],["▁devenu",-13.750847816467283],["▁coppia",-13.75085163116455],["▁incrementar",-13.750855445861816],["ഡോ",-13.750873565673828],["▁დედ",-13.75087547302246],["/2000",-13.750876426696776],["▁волос",-13.750876426696776],["▁Chung",-13.750882148742676],["стройств",-13.750886917114258],["▁اوھان",-13.750887870788574],["▁Kodi",-13.750897407531738],["▁המחשב",-13.750920295715332],["ማም",-13.750927925109863],["អាន",-13.750927925109863],["▁فقه",-13.750932693481444],["خش",-13.75093936920166],["▁Hozir",-13.750943183898926],["▁많다",-13.750946998596191],["▁газов",-13.750964164733888],["▁decidiu",-13.750975608825684],["▁لوبه",-13.750975608825684],["▁Måske",-13.750982284545898],["▁tuore",-13.750982284545898],["ダメ",-13.75099277496338],["▁اخرى",-13.75100040435791],["▁Jedná",-13.751002311706545],["▁Kiến",-13.751002311706545],["▁પ્રમાણે",-13.751002311706545],["▁organisations",-13.751005172729492],["并未",-13.75100803375244],["ስል",-13.751009941101074],["алдуу",-13.751012802124023],["▁seguente",-13.751012802124023],["▁definitivamente",-13.75101375579834],["▁Adapun",-13.751033782958984],["▁лікаря",-13.751033782958984],["▁кући",-13.75103759765625],["▁Artis",-13.751052856445312],["akên",-13.751057624816896],["▁արել",-13.75107479095459],["▁ediləcək",-13.751084327697754],["άι",-13.751087188720703],["▁kopie",-13.75111484527588],["▁produselor",-13.75111961364746],["▁Gregor",-13.751132011413574],["▁túc",-13.751134872436523],["ခန့်",-13.75113582611084],["лекси",-13.751151084899902],["▁dhinaca",-13.751157760620115],["stoupil",-13.751160621643066],["ုပ္",-13.751164436340332],["▁gestern",-13.751176834106444],["▁போட்ட",-13.75118923187256],["aviy",-13.75119972229004],["▁مٿان",-13.75119972229004],["darbo",-13.751202583312988],["СЯ",-13.751205444335938],["ೊಳಗೆ",-13.751214027404783],["▁Gjith",-13.751230239868164],["▁vilaĝo",-13.75123119354248],["Esc",-13.751232147216797],["▁රිකා",-13.751237869262695],["▁Gandhi",-13.751243591308594],["每个人都",-13.751246452331545],["▁pentsa",-13.751251220703123],["▁Ajánl",-13.751258850097656],["▁morrer",-13.751262664794922],["▁Rahim",-13.751276016235352],["▁ህግ",-13.751333236694336],["▁visitare",-13.751334190368652],["▁mengurus",-13.75135898590088],["▁башчы",-13.751367568969728],["及時",-13.751373291015623],["ریس",-13.751379013061523],["▁празници",-13.751388549804688],["ړۍ",-13.751392364501951],["▁nødvendige",-13.751395225524902],["▁drejtues",-13.7514009475708],["▁ĉefa",-13.751409530639648],["ยัน",-13.751412391662598],["▁określony",-13.751412391662598],["srebrn",-13.751421928405762],["fungwa",-13.751441955566406],["คําว่า",-13.751445770263672],["▁твори",-13.751445770263672],["േട്ടന്",-13.751452445983888],["▁рачуна",-13.751484870910645],["▁твърди",-13.75148868560791],["άδων",-13.751503944396973],["▁хем",-13.751522064208984],["▁Ingrid",-13.751529693603516],["(6",-13.75156593322754],["东北",-13.751567840576172],["178",-13.75159740447998],["klej",-13.751601219177246],["▁zastosowanie",-13.751605033874512],["▁etildi",-13.751622200012209],["มหาชน",-13.75162410736084],["శేష",-13.751626014709473],["ごとに",-13.751638412475586],["▁ശിവ",-13.751651763916016],["টন",-13.751681327819824],["ugnay",-13.751683235168455],["-47",-13.751684188842772],["ГРАД",-13.75169277191162],["▁UFC",-13.751715660095217],["▁सारी",-13.751742362976074],["▁Programu",-13.751748085021973],["▁Läti",-13.751752853393556],["ლოთ",-13.7517728805542],["▁yaşıyor",-13.75179672241211],["▁lytte",-13.75180435180664],["казывал",-13.751816749572754],["επιδ",-13.751832962036133],["▁tunnusta",-13.751850128173828],["▁Tunis",-13.751917839050291],["ਦੇਸ਼",-13.75191879272461],["▁ülkeler",-13.751922607421877],["▁קיים",-13.751922607421877],["▁মূল্য",-13.751924514770508],["▁psyko",-13.75193214416504],["ొక",-13.751935958862305],["ðurinn",-13.75195026397705],["ēnu",-13.751961708068848],["skåp",-13.751964569091797],["▁mühafizə",-13.751975059509276],["▁konge",-13.751991271972656],["▁Mele",-13.751999855041504],["進而",-13.751999855041504],["ાનો",-13.752018928527832],["वः",-13.752055168151855],["▁моих",-13.752057075500488],["▁Sjö",-13.752081871032717],["▁ਮਹਿਲਾ",-13.752097129821776],["▁vodstvo",-13.752103805541992],["minyak",-13.752130508422852],["ėlės",-13.752130508422852],["wunder",-13.752140998840332],["不愿意",-13.752141952514648],["័យ",-13.752154350280762],["forte",-13.752161979675291],["ulong",-13.752171516418455],["▁Padang",-13.752172470092772],["▁possession",-13.75217628479004],["▁pianist",-13.752180099487305],["പിന്ന",-13.752195358276367],["▁tlf",-13.752198219299316],["▁ಒಪ್ಪ",-13.752201080322266],["ιστής",-13.752219200134276],["しやすい",-13.752219200134276],["пустили",-13.752229690551758],["▁ئىشلار",-13.752241134643556],["ბლი",-13.75225067138672],["ערב",-13.752264976501465],["ovanej",-13.75236701965332],["▁úžasn",-13.75237274169922],["klju",-13.752373695373535],["▁направление",-13.752381324768066],["头发",-13.75238800048828],["لند",-13.752391815185549],["მენი",-13.752399444580078],["▁našo",-13.752421379089355],["aasje",-13.752432823181152],["▁vauva",-13.752453804016112],["سماع",-13.752473831176758],["ేశారు",-13.752508163452148],["▁shtetas",-13.752511024475098],["▁Schä",-13.752518653869627],["分け",-13.752519607543944],["▁pegar",-13.752540588378906],["▁мүмкіндігі",-13.752551078796388],["▁ਚੌ",-13.752556800842283],["▁spíše",-13.752603530883787],["▁упис",-13.752603530883787],["▁üretimi",-13.752606391906738],["▁явуулж",-13.752631187438965],["▁drugimi",-13.752634048461914],["ماقتا",-13.752646446228027],["▁پرده",-13.752693176269531],["▁vibrator",-13.752699851989746],["▁generate",-13.752710342407228],["というか",-13.75271224975586],["כיל",-13.752721786499023],["ಕ್ರಿ",-13.752741813659668],["도는",-13.752741813659668],["เข้าถึง",-13.752752304077148],["ρικά",-13.752767562866213],["▁əlaqələrin",-13.75278377532959],["තික",-13.752785682678224],["▁irland",-13.752785682678224],["▁Пал",-13.75279712677002],["▁Confi",-13.752800941467283],["▁பொருள்",-13.752802848815918],["するもの",-13.752817153930664],["fase",-13.752829551696776],["ກິ",-13.752829551696776],["▁کالج",-13.752829551696776],["▁gaire",-13.752838134765623],["▁agentúr",-13.752848625183104],["▁ematea",-13.752850532531738],["Же",-13.75286102294922],["احي",-13.752877235412598],["▁Verge",-13.752899169921877],["▁Ελληνική",-13.752899169921877],["▁პრეზიდენტი",-13.75290298461914],["▁fenti",-13.75291633605957],["▁споменик",-13.752944946289062],["▁Szeret",-13.752954483032228],["▁voorbij",-13.752958297729492],["yniadau",-13.752970695495604],["▁folle",-13.752992630004885],["▁Душан",-13.753059387207031],["▁Antworten",-13.753097534179688],["ительных",-13.75309944152832],["哪怕",-13.753108024597168],["同比增长",-13.753110885620115],["▁населени",-13.75311279296875],["咁",-13.753121376037598],["katerimi",-13.753133773803713],["▁ningú",-13.753140449523926],["邮件",-13.753144264221191],["အသင်း",-13.75314712524414],["損害",-13.753149032592772],["▁alıp",-13.753150939941406],["▁විවෘත",-13.753150939941406],["▁четвърт",-13.753151893615724],["낭",-13.753151893615724],["▁Akureyri",-13.75315284729004],["▁Pogledajte",-13.75315284729004],["▁mnohých",-13.75315284729004],["▁müdahale",-13.75315284729004],["▁ndërmjet",-13.75315284729004],["▁төмөнкү",-13.75315284729004],["▁характар",-13.75315284729004],["▁وضاحت",-13.75315284729004],["▁ਦਿੱਤੇ",-13.75315284729004],["▁బ్లాక్",-13.75315284729004],["גענומען",-13.753153800964355],["ทัศน",-13.753153800964355],["▁Misschien",-13.753153800964355],["▁Nhìn",-13.753153800964355],["▁þekkt",-13.753153800964355],["▁თეატრ",-13.753154754638672],["لىشىپ",-13.753156661987305],["ペット",-13.75315761566162],["▁Škoda",-13.75316047668457],["▁dopravy",-13.75316333770752],["▁Yêu",-13.753165245056152],["▁ئىكەن",-13.753168106079102],["▁Trình",-13.753173828125],["▁kasneje",-13.753174781799316],["▁үздік",-13.753186225891112],["▁Баткен",-13.753189086914062],["▁השח",-13.753190994262695],["▁φως",-13.753192901611328],["મેન",-13.753195762634276],["▁dlhodob",-13.753198623657228],["▁शाळा",-13.753198623657228],["▁msimu",-13.753201484680176],["▁gewählt",-13.753204345703123],["▁επόμενο",-13.753204345703123],["▁součástí",-13.75320816040039],["▁sisustus",-13.753212928771973],["▁priimti",-13.753215789794922],["▁nálad",-13.753218650817873],["mitglied",-13.753219604492188],["▁contigo",-13.75322437286377],["▁premjer",-13.75322723388672],["▁Jamal",-13.753231048583984],["mnazi",-13.75323486328125],["▁ලදී",-13.75323486328125],["Bukhari",-13.753246307373049],["▁ослободи",-13.753246307373049],["גרו",-13.753247261047363],["十足",-13.753252983093262],["▁темы",-13.75327205657959],["▁нумар",-13.75328254699707],["ໄກ",-13.753288269042969],["▁ждать",-13.753290176391602],["角落",-13.753290176391602],["bayang",-13.753292083740234],["▁dječj",-13.753292083740234],["▁edebilirsiniz",-13.753292083740234],["▁прогрес",-13.753294944763184],["▁yaqaan",-13.753301620483398],["Û",-13.75331687927246],["ವಾಣಿ",-13.75332260131836],["հրա",-13.753334999084473],["▁helyre",-13.753339767456056],["所需",-13.753345489501951],["▁בשנה",-13.753347396850586],["簡介",-13.7533540725708],["▁izlet",-13.753357887268066],["▁married",-13.753366470336914],["▁кстати",-13.753366470336914],["▁හමුවේ",-13.753366470336914],["verta",-13.753385543823242],["hack",-13.75339412689209],["▁Filipino",-13.753405570983888],["ундагы",-13.753410339355469],["فاع",-13.753411293029783],["▁súper",-13.75341510772705],["▁شراء",-13.75341796875],["▁אויסגע",-13.753427505493164],["▁دورة",-13.753429412841797],["ਨੋ",-13.75343132019043],["hudhu",-13.75344181060791],["▁gudbi",-13.753445625305176],["▁поднесе",-13.753445625305176],["терапевт",-13.75345230102539],["▁Чак",-13.753456115722656],["▁ايمان",-13.753475189208984],["▁ostean",-13.753477096557615],["ieši",-13.75347900390625],["▁Вижте",-13.753479957580566],["dikten",-13.75349235534668],["▁несие",-13.75349235534668],["▁consejos",-13.753493309020996],["▁provincie",-13.753495216369627],["แฟ",-13.75351333618164],["▁2015-2016",-13.75351333618164],["konstrukt",-13.753514289855955],["OVÁ",-13.75352382659912],["▁მოძრაობა",-13.75352668762207],["▁planos",-13.753535270690918],["▁Руски",-13.753560066223145],["▁Asker",-13.75356674194336],["▁chapter",-13.753580093383787],["▁դատարանի",-13.753584861755373],["чане",-13.753585815429688],["▁הַ",-13.753591537475586],["▁hauteur",-13.7535982131958],["▁menyebutkan",-13.753602981567385],["▁وعد",-13.753605842590332],["▁pieds",-13.753607749938965],["ከረ",-13.75361156463623],["▁жұмысын",-13.753615379333496],["▁meitene",-13.753628730773926],["null",-13.753629684448242],["▁görüşüb",-13.753631591796877],["▁odluka",-13.75365161895752],["▁ngerti",-13.753668785095217],["▁অনুষ্ঠানে",-13.75367546081543],["záró",-13.753714561462402],["▁haigus",-13.753718376159668],["بني",-13.75373077392578],["▁kıy",-13.753746032714844],["▁લાભ",-13.75375270843506],["▁niaj",-13.753762245178224],["▁jiných",-13.753768920898438],["ämpi",-13.753783226013184],["▁למח",-13.753785133361816],["▁hayan",-13.753791809082031],["▁მსოფლიოში",-13.75381851196289],["▁результатами",-13.753835678100586],["власт",-13.753839492797852],["STEN",-13.753840446472168],["▁पालन",-13.753849029541016],["player",-13.753849983215332],["▁Kogu",-13.75385284423828],["▁ambulant",-13.753857612609863],["▁zamenja",-13.753867149353027],["這項",-13.753870964050291],["íochtaí",-13.753873825073242],["▁میلاد",-13.753881454467772],["▁prosjektet",-13.753884315490724],["вте",-13.753902435302734],["ктур",-13.753905296325684],["▁सीट",-13.753920555114746],["▁dialekt",-13.753928184509276],["હેર",-13.753938674926758],["schritt",-13.75394344329834],["▁begrip",-13.753947257995604],["Ново",-13.75395965576172],["▁millora",-13.753992080688477],["▁leşkerî",-13.753994941711426],["▁teatru",-13.753998756408691],["нур",-13.754010200500488],["▁tiede",-13.754018783569336],["▁ஊர்",-13.754021644592283],["▁Kuka",-13.754037857055664],["மென்",-13.754057884216309],["▁सदस्यता",-13.754070281982422],["▁ტრ",-13.75408935546875],["duğunu",-13.7540922164917],["的状态",-13.7540922164917],["▁szereg",-13.754101753234863],["▁professionell",-13.754106521606444],["▁رسالت",-13.754130363464355],["彼の",-13.75413703918457],["▁იქნებ",-13.75413990020752],["▁Troll",-13.754142761230469],["▁زمن",-13.754142761230469],["体内",-13.754146575927734],["▁Муж",-13.754158020019531],["▁kött",-13.754168510437012],["▁ugn",-13.75418472290039],["▁Kubo",-13.754186630249023],["ելուց",-13.754215240478516],["積極的に",-13.754252433776855],["▁ravnatelj",-13.754260063171388],["▁Ferro",-13.754266738891602],["▁المحلي",-13.75426959991455],["ᄏᄏᄏ",-13.754280090332031],["ოვი",-13.754307746887209],["▁Gana",-13.75430965423584],["টিভি",-13.754312515258787],["▁использованием",-13.754319190979004],["▁Creu",-13.754323959350586],["នារី",-13.754355430603027],["▁staðar",-13.754359245300291],["▁ବସି",-13.75436019897461],["ጆች",-13.754362106323242],["ڇي",-13.75438404083252],["▁Herceg",-13.75439453125],["ayım",-13.754398345947266],["០០០",-13.75440502166748],["steroid",-13.75442123413086],["▁Afin",-13.754425048828123],["цкія",-13.754448890686035],["▁versjon",-13.754505157470703],["клони",-13.754510879516602],["はこの",-13.75451374053955],["▁મૂક",-13.754517555236816],["▁lucu",-13.75452995300293],["▁چال",-13.754536628723145],["ЕШ",-13.754558563232422],["եի",-13.754571914672852],["▁fejlesztése",-13.754573822021484],["päällikkö",-13.754583358764648],["▁potentia",-13.754592895507812],["תיאור",-13.754603385925291],["▁Zoran",-13.754603385925291],["▁കള്ള",-13.754618644714355],["ביה",-13.754621505737305],["▁betra",-13.754639625549316],["▁слуг",-13.754645347595217],["▁рукав",-13.754653930664062],["▁aktris",-13.754658699035645],["▁serveru",-13.754672050476074],["206",-13.75467300415039],["▁datotek",-13.75469207763672],["▁trebuia",-13.754693031311035],["тичних",-13.754693984985352],["რიც",-13.754698753356934],["▁mld",-13.754704475402832],["欧美",-13.754717826843262],["▁መረጃዎች",-13.75474739074707],["▁anadan",-13.754786491394045],["▁Those",-13.754788398742676],["143",-13.754798889160156],["▁páxinas",-13.754813194274902],["félög",-13.754819869995115],["▁შედეგი",-13.754819869995115],["(...)",-13.754858016967772],["การณ์",-13.754859924316406],["▁száj",-13.75486183166504],["הוראות",-13.754870414733888],["حياء",-13.754878044128418],["ქან",-13.754926681518556],["овци",-13.754927635192873],["▁keresés",-13.754934310913086],["▁ПОЛ",-13.754937171936035],["▁Maggio",-13.7549409866333],["höf",-13.754956245422363],["▁ፓርቲዎች",-13.754959106445312],["▁BEZ",-13.754982948303224],["臺北市",-13.754984855651855],["不清",-13.754987716674805],["▁мої",-13.754990577697754],["▁posud",-13.754993438720703],["▁होस्",-13.75501537322998],["ହର",-13.755023956298828],["全ての",-13.755043029785156],["▁miniatur",-13.755050659179688],["▁Meil",-13.755070686340332],["▁μονο",-13.75510597229004],["brana",-13.755122184753418],["▁ಸದಸ್ಯ",-13.755138397216797],["▁profesionist",-13.755176544189451],["器材",-13.755184173583984],["▁Qanunun",-13.7551851272583],["玩法",-13.755193710327148],["מרחב",-13.755221366882324],["▁правилата",-13.755224227905272],["▁Urte",-13.75523853302002],["ացույց",-13.755273818969728],["▁endte",-13.7553071975708],["▁zmien",-13.755316734313965],["Anna",-13.755326271057127],["ложено",-13.755326271057127],["▁видела",-13.755326271057127],["ల్లు",-13.75533390045166],["есс",-13.755334854125977],["▁İdarə",-13.755335807800291],["▁PIB",-13.755337715148926],["гээс",-13.755338668823242],["дэв",-13.755352020263672],["▁працівника",-13.75535774230957],["▁πρόσωπο",-13.75537109375],["▁ശക്തമായ",-13.755378723144531],["▁успя",-13.75538444519043],["▁уюштур",-13.755386352539062],["เสียชีวิต",-13.75539493560791],["▁უკ",-13.75539493560791],["递",-13.755457878112791],["იდა",-13.755471229553224],["▁במצב",-13.75547695159912],["涛",-13.755485534667969],["▁빼",-13.755486488342283],["IKAN",-13.755495071411133],["▁средба",-13.755504608154297],["เขียว",-13.755507469177246],["食べて",-13.755507469177246],["▁duchov",-13.755513191223145],["▁bodde",-13.755515098571776],["総合",-13.755517959594728],["怀孕",-13.755520820617676],["面白い",-13.755521774291992],["峰会",-13.755525588989258],["арство",-13.755526542663574],["肢",-13.75552749633789],["细胞",-13.755532264709473],["トレーニング",-13.755533218383787],["ազգ",-13.755535125732422],["恆",-13.755536079406738],["▁ਅਨ",-13.755539894104004],["룡",-13.755545616149902],["રાષ્ટ્ર",-13.755548477172852],["ኹ",-13.755550384521484],["▁Lõuna",-13.755550384521484],["▁melaporkan",-13.755550384521484],["▁nangangahulugan",-13.755550384521484],["▁pamięci",-13.755550384521484],["▁straipsniai",-13.755550384521484],["▁vedrørende",-13.755550384521484],["▁většinou",-13.755550384521484],["▁Байгаль",-13.755550384521484],["▁людзьмі",-13.755550384521484],["▁دواړو",-13.755550384521484],["▁ਵਿਕਾਸ",-13.755550384521484],["▁வேண்டாம்",-13.755550384521484],["▁ವ್ಯಕ್ತಪಡಿಸ",-13.755550384521484],["▁සංඛ්",-13.755550384521484],["Хүрэлсүх",-13.7555513381958],["تصنيف",-13.7555513381958],["▁söylüyor",-13.7555513381958],["▁Кеңес",-13.7555513381958],["▁meneruskan",-13.755552291870115],["▁શરૂઆત",-13.755552291870115],["▁선생님",-13.755552291870115],["▁ऑस्ट्रेलिया",-13.755553245544434],["▁українською",-13.75555419921875],["▁سوداګر",-13.755555152893066],["▁gembira",-13.7555570602417],["▁Бугарија",-13.755558013916016],["▁pranzo",-13.755558967590332],["▁заранее",-13.755559921264648],["ໄຟຟ້າ",-13.755560874938965],["▁zainteresira",-13.755560874938965],["▁crkve",-13.75556182861328],["▁ವಿಜ್ಞಾನ",-13.75556468963623],["▁izjemno",-13.755566596984863],["予防",-13.755568504333496],["▁Луганськ",-13.755575180053713],["논",-13.755576133728027],["▁Gizarte",-13.75558090209961],["వలసిన",-13.755581855773926],["▁krejt",-13.755587577819824],["儘管",-13.755595207214355],["▁Dewlet",-13.755597114562988],["▁ಅಂದರೆ",-13.75559902191162],["▁ویژن",-13.75560474395752],["imizning",-13.755606651306152],["ປະສົບ",-13.755610466003418],["ნილ",-13.755620956420898],["princip",-13.755634307861328],["▁ভগ",-13.755634307861328],["▁caput",-13.75563907623291],["▁wapen",-13.75563907623291],["▁ಲೈನ್",-13.755640029907228],["▁grønt",-13.755661964416504],["ERTA",-13.755664825439451],["կոմ",-13.755677223205566],["▁представників",-13.755684852600098],["แปลง",-13.755695343017578],["▁Ayam",-13.755696296691896],["▁Investor",-13.755696296691896],["ibilbide",-13.755704879760742],["▁Krakowie",-13.755704879760742],["వీడియో",-13.755708694458008],["▁mauaji",-13.755708694458008],["hospital",-13.755712509155272],["▁၂၀၀",-13.755722999572754],["▁Því",-13.755724906921388],["▁разказва",-13.755738258361816],["▁Thunder",-13.755741119384766],["▁اللعبة",-13.755743026733398],["▁eskubidea",-13.75575351715088],["▁ගන්නේ",-13.755766868591309],["▁управе",-13.755767822265623],["▁celom",-13.755783081054688],["▁Allan",-13.755785942077637],["ysgrif",-13.755789756774902],["៊",-13.75580596923828],["▁ഞങ്ങൾ",-13.755824089050291],["napaswa",-13.755831718444824],["agence",-13.755852699279783],["▁hlb",-13.75587558746338],["▁Cesar",-13.755881309509276],["▁nasihat",-13.755892753601074],["▁kashe",-13.755898475646973],["નારા",-13.755904197692873],["овыми",-13.755908012390137],["▁osatu",-13.755919456481934],["▁бебето",-13.755921363830566],["منت",-13.755926132202148],["ъчни",-13.75593376159668],["anyagok",-13.755943298339844],["panje",-13.75594425201416],["ાણ",-13.755946159362791],["خود",-13.755953788757324],["▁asmenų",-13.755962371826172],["▁दीपक",-13.755964279174805],["શિયા",-13.755967140197754],["▁الطبية",-13.755969047546388],["▁lääkäri",-13.755973815917969],["▁jich",-13.755989074707031],["▁vogliono",-13.755989074707031],["▁රැක",-13.755996704101562],["пізна",-13.75599765777588],["▁λειτουργίας",-13.75599765777588],["▁məqalə",-13.75601863861084],["▁creazione",-13.756022453308104],["២នាក់",-13.756023406982422],["▁başlıyor",-13.756027221679688],["▁میلادی",-13.756037712097168],["▁باھا",-13.75604248046875],["▁dagaalka",-13.756060600280762],["TANG",-13.75608253479004],["▁תג",-13.756086349487305],["▁خيار",-13.756101608276367],["▁Olyan",-13.75610637664795],["▁yönəl",-13.756114959716797],["▁Možete",-13.756123542785645],["▁Egil",-13.756141662597656],["▁lastnosti",-13.756149291992188],["▁मंच",-13.75615406036377],["▁Güven",-13.7561674118042],["matricula",-13.756168365478516],["▁gumb",-13.756183624267578],["▁హోం",-13.756183624267578],["▁சக்தி",-13.756186485290527],["▁필수",-13.756190299987791],["বীর",-13.756199836730955],["都沒有",-13.75620174407959],["прозрач",-13.756214141845703],["▁democratic",-13.75621509552002],["ванняў",-13.756219863891602],["▁elektromos",-13.756221771240234],["▁Правил",-13.756221771240234],["WP",-13.756226539611816],["▁Kanunu",-13.756231307983398],["▁능력",-13.75623893737793],["▁औषधि",-13.756270408630373],["▁soporte",-13.756272315979004],["▁чыгаруу",-13.756272315979004],["▁қиын",-13.756275177001951],["▁Kategorija",-13.756306648254396],["したいと思います",-13.756315231323242],["ဖော်ပြ",-13.756336212158203],["1960",-13.756348609924316],["▁noaptea",-13.756356239318848],["ვუ",-13.756370544433594],["▁alueen",-13.75637435913086],["▁dominan",-13.756402969360352],["▁југ",-13.756420135498049],["kevitra",-13.756434440612791],["▁המק",-13.756439208984377],["ttoman",-13.756441116333008],["kulttuuri",-13.756465911865234],["▁1/8",-13.756489753723145],["▁réuni",-13.756492614746094],["▁nekateri",-13.756522178649902],["▁nujno",-13.75654411315918],["▁pacienti",-13.756559371948242],["పత్ర",-13.756589889526367],["を持ち",-13.756593704223633],["▁rafin",-13.75659465789795],["▁projecten",-13.756597518920898],["葡萄酒",-13.75661277770996],["▁Slavon",-13.756622314453123],["▁ölkələrdə",-13.756634712219238],["▁besökare",-13.75663948059082],["不小心",-13.756646156311035],["真心",-13.756673812866213],["قون",-13.756691932678224],["ATH",-13.756699562072754],["▁sauce",-13.756705284118652],["203",-13.756707191467283],["થે",-13.756714820861816],["kaste",-13.75671672821045],["сц",-13.756729125976562],["▁laborator",-13.75673770904541],["▁келтір",-13.756752014160156],["pladser",-13.756755828857422],["▁올리",-13.75676441192627],["ejoj",-13.75676727294922],["▁bogen",-13.756802558898926],["にならない",-13.756802558898926],["ុប",-13.756810188293455],["▁Amerike",-13.756814002990724],["qit",-13.756824493408203],["▁Dankie",-13.756839752197266],["▁الهام",-13.756848335266112],["▁trgovine",-13.756868362426758],["▁ridurre",-13.756880760192873],["▁сторона",-13.756898880004885],["gnac",-13.756930351257324],["०९",-13.756930351257324],["▁лична",-13.756939888000488],["GON",-13.756949424743652],["wolu",-13.756952285766602],["ರಷ್ಟು",-13.756957054138184],["▁Мод",-13.756958961486816],["▁காட்ட",-13.756964683532717],["ຢາກ",-13.756976127624512],["▁דרום",-13.756990432739258],["▁fibro",-13.757000923156738],["โดยไม่",-13.757019996643066],["▁უც",-13.75704574584961],["▁esaten",-13.75705337524414],["▁ಊರ",-13.757054328918455],["דלת",-13.75706386566162],["▁culturali",-13.757071495056152],["▁brugte",-13.75707721710205],["▁Üniversite",-13.757079124450684],["▁indigna",-13.75709056854248],["ීමේ",-13.757102966308594],["两位",-13.757123947143556],["▁Minder",-13.7571439743042],["ğımız",-13.757149696350098],["ησα",-13.757179260253906],["▁совета",-13.757183074951172],["ակցության",-13.757187843322754],["▁آرامش",-13.757207870483398],["▁kék",-13.75721549987793],["▁zrovna",-13.75721836090088],["တော",-13.757244110107422],["▁Jumat",-13.757253646850586],["לש",-13.757254600524902],["▁zoon",-13.757259368896484],["довольн",-13.757269859313965],["▁mahalle",-13.757269859313965],["ցան",-13.75727081298828],["▁tingene",-13.757278442382812],["律師",-13.75729751586914],["▁osad",-13.757305145263672],["▁መንፈስ",-13.75730800628662],["▁جنسي",-13.757320404052734],["▁Hochschule",-13.757326126098633],["▁Shqiptare",-13.757383346557615],["ให้เลือก",-13.757397651672363],["▁Cham",-13.757400512695312],["lektor",-13.757401466369627],["ტანი",-13.757417678833008],["▁зид",-13.757427215576172],["საფ",-13.75743007659912],["ESE",-13.75746250152588],["శారు",-13.75746250152588],["ობაზე",-13.757474899291992],["দো",-13.757484436035156],["▁ישראלי",-13.757489204406738],["▁јаде",-13.75749969482422],["изол",-13.757500648498535],["▁teño",-13.757515907287598],["Ча",-13.757529258728027],["▁சொல்வ",-13.757543563842772],["▁esok",-13.757582664489746],["▁жары",-13.75760269165039],["▁значаја",-13.75761604309082],["▁Velo",-13.757624626159668],["лц",-13.757627487182615],["▁ստեղծագործ",-13.7576322555542],["দর",-13.757638931274414],["حفاظ",-13.757641792297363],["▁lenda",-13.757678985595703],["▁observe",-13.757680892944336],["▁شادي",-13.7576904296875],["▁verão",-13.757696151733398],["▁mjeri",-13.757709503173828],["▁기분",-13.75771427154541],["各方面",-13.75772476196289],["▁tranquilo",-13.757744789123535],["▁barış",-13.757760047912598],["thathu",-13.757771492004396],["▁അതേ",-13.757783889770508],["issimus",-13.757789611816406],["▁potpis",-13.757804870605469],["योः",-13.757813453674316],["▁ማርያም",-13.757817268371582],["▁achei",-13.757823944091797],["ວິຊາ",-13.757827758789062],["nuť",-13.75783348083496],["▁പഴ",-13.757851600646973],["chód",-13.757857322692873],["४३",-13.757885932922363],["罚",-13.757890701293944],["เปล่า",-13.757914543151855],["怎",-13.75792121887207],["ONDO",-13.757936477661133],["權益",-13.757939338684082],["orienta",-13.757944107055664],["アドバイス",-13.757948875427246],["▁tableau",-13.757953643798828],["ദൃശ്യ",-13.757954597473145],["▁Pesquisa",-13.757954597473145],["▁hevdîtin",-13.757954597473145],["▁tilsvarende",-13.757954597473145],["▁warunków",-13.757954597473145],["▁Југославије",-13.757954597473145],["▁някаква",-13.757954597473145],["▁однієї",-13.757954597473145],["▁հրավիր",-13.757954597473145],["▁להביא",-13.757954597473145],["▁מאנטאג",-13.757954597473145],["▁دونالد",-13.757954597473145],["▁گۈزەل",-13.757954597473145],["▁ਅਦਾਲਤ",-13.757954597473145],["▁විශ්වවිද්",-13.757954597473145],["▁አድራሻ",-13.757954597473145],["▁繼續閱讀",-13.757954597473145],["▁Vždy",-13.75795555114746],["▁eksploat",-13.75795555114746],["▁exatamente",-13.75795555114746],["▁nxënës",-13.75795555114746],["▁volwassen",-13.75795555114746],["▁дівчат",-13.75795555114746],["▁предсједник",-13.75795555114746],["▁Aşağı",-13.75795841217041],["▁nedēļas",-13.757959365844728],["느냐",-13.757959365844728],["คุ้มค่า",-13.75796127319336],["တဲ႕",-13.757962226867676],["▁ταχυδρομείου",-13.757962226867676],["▁мурдагы",-13.757962226867676],["▁접근",-13.757965087890623],["▁بېرته",-13.75796604156494],["ေမာ္",-13.757966995239258],["▁асуулт",-13.75796890258789],["▁برگشت",-13.757970809936523],["▁sekedar",-13.757973670959473],["▁izvēle",-13.757975578308104],["▁ይላሉ",-13.757977485656738],["▁Dry",-13.757980346679688],["▁مشترك",-13.757986068725586],["ОЖ",-13.75798797607422],["▁Sønder",-13.75798797607422],["▁önüne",-13.75798797607422],["▁המה",-13.757989883422852],["▁thập",-13.757990837097168],["эры",-13.757997512817385],["▁multumesc",-13.7579984664917],["▁વાંચો",-13.758002281188965],["▁înregistrat",-13.758007049560549],["▁Clu",-13.758009910583496],["▁לקנות",-13.758010864257812],["▁Alemania",-13.758017539978027],["▁የተደረገ",-13.75802230834961],["▁къщи",-13.758026123046877],["▁ਖੇਤਰ",-13.758034706115724],["เครื่องดื่ม",-13.75803565979004],["▁diretamente",-13.758036613464355],["▁لگتا",-13.758045196533203],["▁Daire",-13.758055686950684],["ໃຫ້ແກ່",-13.758056640625],["▁peduli",-13.758057594299316],["െഴുത",-13.75805950164795],["cosa",-13.758060455322266],["▁الكمبيوتر",-13.758071899414062],["▁ടീമ",-13.758086204528809],["เข้าชม",-13.758089065551758],["▁cooperativa",-13.758098602294922],["▁komencis",-13.758098602294922],["▁üretici",-13.758102416992188],["▁shekull",-13.758108139038086],["НИХ",-13.758113861083984],["▁القديمة",-13.7581148147583],["ငါ",-13.758132934570312],["▁Madonna",-13.758142471313477],["ডা",-13.758160591125488],["▁තියෙනව",-13.758160591125488],["▁kahve",-13.758164405822754],["▁scump",-13.758172035217283],["▁stały",-13.758176803588867],["తున్న",-13.758184432983398],["▁דארט",-13.75819206237793],["▁gratuitamente",-13.758196830749512],["ակերպ",-13.758217811584473],["▁хос",-13.758217811584473],["▁syystä",-13.758224487304688],["▁خواهم",-13.758224487304688],["▁القادمة",-13.758240699768066],["▁pierwszego",-13.758291244506836],["▁оруулж",-13.758304595947266],["▁dommage",-13.758350372314451],["▁lohnt",-13.758356094360352],["நகர்",-13.758371353149414],["תגובות",-13.758378982543944],["▁سخنان",-13.758378982543944],["▁merujuk",-13.758389472961426],["ывал",-13.75840187072754],["معرفة",-13.758407592773438],["▁вреда",-13.758411407470703],["aquestes",-13.75841236114502],["▁krizi",-13.758413314819336],["▁shalat",-13.758421897888184],["▁доходов",-13.75842571258545],["▁vacanta",-13.758432388305664],["▁üzv",-13.758432388305664],["▁Tenho",-13.75843906402588],["美麗的",-13.758448600769045],["рый",-13.75844955444336],["▁ڪيون",-13.758451461791992],["כמו",-13.75845718383789],["waji",-13.75847625732422],["▁барилга",-13.758509635925291],["▁giren",-13.758522987365724],["▁pasang",-13.758537292480469],["वाच",-13.758543014526367],["▁aylık",-13.758546829223633],["▁venera",-13.758585929870604],["▁jūt",-13.758587837219238],["▁naroči",-13.758593559265137],["eindruck",-13.758602142333984],["▁titan",-13.758604049682615],["Гол",-13.758606910705566],["▁staré",-13.758620262145996],["გადა",-13.758621215820312],["▁Dallas",-13.758628845214844],["▁študentov",-13.75864601135254],["▁spelers",-13.758657455444336],["▁1024",-13.758665084838867],["▁بدین",-13.758676528930664],["▁Jakie",-13.758696556091309],["κτική",-13.758729934692385],["▁policije",-13.7587308883667],["▁polítics",-13.758734703063965],["توصل",-13.758740425109863],["▁ئاساسى",-13.758761405944824],["▁pompa",-13.758795738220217],["▁tornare",-13.758808135986328],["▁rapporti",-13.758809089660645],["▁voinut",-13.758822441101074],["▁bolet",-13.75882625579834],["▁poroz",-13.75884246826172],["koht",-13.758852005004885],["▁kuud",-13.758874893188477],["шум",-13.758881568908691],["▁fique",-13.758882522583008],["ବେଳେ",-13.758889198303224],["▁కాక",-13.758889198303224],["భావ",-13.758891105651855],["áilte",-13.758913040161133],["▁պատմություն",-13.758913040161133],["▁തൊട്ട",-13.758926391601562],["▁kannatta",-13.758933067321776],["▁थांब",-13.758938789367676],["atorul",-13.758944511413574],["▁chráni",-13.75894832611084],["▁Люд",-13.758956909179688],["▁ايڊ",-13.758956909179688],["▁помисли",-13.75896453857422],["▁riesco",-13.758965492248535],["▁pakistan",-13.758984565734863],["▁9.30",-13.758996963500977],["▁stefnu",-13.758997917175291],["Полит",-13.759014129638672],["▁후기",-13.75901699066162],["ነገረ",-13.75902271270752],["▁неделя",-13.759025573730469],["这段",-13.75904369354248],["हरे",-13.759045600891112],["▁SPO",-13.759078025817873],["▁видови",-13.759078979492188],["mbaran",-13.759087562561035],["▁средата",-13.759087562561035],["▁успел",-13.75910186767578],["▁Filed",-13.759136199951172],["▁нивото",-13.759140968322754],["خلو",-13.759157180786133],["▁shaqo",-13.759174346923828],["▁skutočne",-13.759175300598145],["▁Tlf",-13.759196281433104],["▁miały",-13.759202003479004],["▁Vaz",-13.759209632873535],["einket",-13.759215354919434],["▁பிரா",-13.759222030639648],["▁μουσικ",-13.759238243103027],["▁считаю",-13.759242057800291],["▁budući",-13.759243965148926],["▁europa",-13.759272575378418],["▁kategoriji",-13.759294509887695],["▁Lisesi",-13.75930404663086],["▁картина",-13.759305000305176],["▁consultation",-13.759349822998049],["▁kjenne",-13.759360313415527],["alueen",-13.759364128112791],["शय",-13.75937271118164],["▁හොඳට",-13.75937557220459],["▁trivs",-13.759424209594728],["▁vaikus",-13.759428024291992],["ଚିତ୍ର",-13.759432792663574],["ээрэй",-13.759454727172852],["▁이용해",-13.759478569030762],["դու",-13.75948429107666],["▁Czar",-13.759503364562988],["קסי",-13.759539604187012],["▁сектора",-13.759554862976074],["▁ಮನೆಯಲ್ಲಿ",-13.759567260742188],["▁militær",-13.75960636138916],["▁వార్త",-13.759607315063477],["老年人",-13.75961208343506],["ефективни",-13.759614944458008],["▁hahmo",-13.759614944458008],["▁avrà",-13.75963020324707],["▁acolle",-13.759634017944336],["▁adică",-13.759644508361816],["▁այցելել",-13.759644508361816],["лыққа",-13.759649276733398],["ሥጋ",-13.759652137756348],["läis",-13.759658813476562],["قصد",-13.759662628173828],["分公司",-13.759662628173828],["▁kiribû",-13.759685516357422],["ුනේ",-13.759733200073242],["부가",-13.759742736816406],["իլի",-13.75975227355957],["ปร",-13.759754180908203],["くださいね",-13.759757041931152],["▁هجوم",-13.759757995605469],["▁colloca",-13.759807586669922],["▁professora",-13.75982666015625],["▁шинжилгээ",-13.7598295211792],["▁വിളിക്ക",-13.75985050201416],["តិច",-13.759851455688477],["▁občine",-13.759851455688477],["▁ശ്രമിച്ച",-13.759875297546388],["Ադրբեջան",-13.75987720489502],["▁Serge",-13.759881019592283],["▁Reiki",-13.759885787963867],["▁всичките",-13.759913444519045],["▁գնալ",-13.759926795959473],["▁Alarm",-13.759927749633787],["ebilmek",-13.759929656982422],["ረታ",-13.75994110107422],["isuuteen",-13.759944915771484],["▁המרכז",-13.75994873046875],["▁fituar",-13.759954452514648],["२५",-13.759971618652344],["၅၀",-13.759992599487305],["さい",-13.75999355316162],["מיקום",-13.759997367858888],["▁tuntui",-13.760015487670898],["pēc",-13.760016441345217],["▁lingüística",-13.76002025604248],["用来",-13.760031700134276],["▁odpowiednie",-13.760038375854492],["▁province",-13.76007080078125],["ಟೋ",-13.760072708129885],["▁ηλικία",-13.760082244873049],["密度",-13.760103225708008],["▁membuatnya",-13.760104179382324],["▁Udala",-13.760114669799805],["▁osud",-13.760123252868652],["▁bunda",-13.760132789611816],["▁포인트",-13.760161399841309],["▁nafta",-13.760164260864258],["Model",-13.760198593139648],["▁Einen",-13.760205268859863],["▁falë",-13.760208129882812],["▁tapasztal",-13.760231971740724],["陷",-13.76025104522705],["▁tulajdonos",-13.76025390625],["liha",-13.760271072387695],["▁sushi",-13.760271072387695],["▁Llu",-13.760296821594238],["▁нормы",-13.760296821594238],["öket",-13.760306358337402],["▁minic",-13.760311126708984],["▁подобро",-13.760313987731934],["순위",-13.760313987731934],["Não",-13.760320663452148],["电子商务",-13.760323524475098],["牽",-13.760330200195312],["оров",-13.760339736938477],["▁бүгін",-13.76034164428711],["ພື້ນ",-13.760348320007324],["▁وسا",-13.760353088378906],["ซอง",-13.760355949401855],["旭",-13.76035976409912],["ຄອບຄົວ",-13.760363578796388],["ព្រះសីហនុ",-13.760363578796388],["សមុទ្រ",-13.760363578796388],["▁ଆଧାର",-13.760363578796388],["▁แนวข้อสอบ",-13.760363578796388],["スーパー",-13.760363578796388],["มูลนิธิ",-13.760364532470703],["▁Febroary",-13.760364532470703],["▁Hälfte",-13.760364532470703],["▁Milletvekili",-13.760364532470703],["▁Vázquez",-13.760364532470703],["▁aģentūra",-13.760364532470703],["▁gezeigt",-13.760364532470703],["▁kecantikan",-13.760364532470703],["▁kehamilan",-13.760364532470703],["▁kvankam",-13.760364532470703],["▁obywatel",-13.760364532470703],["▁žmonėms",-13.760364532470703],["▁υπεύθυν",-13.760364532470703],["▁курулуш",-13.760364532470703],["▁надворешни",-13.760364532470703],["▁препоръчва",-13.760364532470703],["▁церемони",-13.760364532470703],["▁آلودگی",-13.760364532470703],["▁راډیو",-13.760364532470703],["▁উপলক্ষে",-13.760364532470703],["▁ყურადღება",-13.760364532470703],["▁ጋዜጣ",-13.760364532470703],["▁부동산",-13.760364532470703],["▁Gửi",-13.76036548614502],["▁ਕਾਲਜ",-13.76036548614502],["▁საფრანგეთ",-13.76036548614502],["▁ezagutu",-13.760366439819336],["▁طوفان",-13.760366439819336],["▁ئوخشاش",-13.760369300842283],["によると",-13.760369300842283],["▁අනෙක්",-13.760370254516602],["▁тәрбиелеу",-13.76037311553955],["▁vēsture",-13.760374069213867],["inātā",-13.760375022888184],["ಬಾರದು",-13.7603759765625],["ורו",-13.760376930236816],["▁تۇرۇپ",-13.760377883911133],["▁forklare",-13.760379791259766],["重复",-13.760381698608398],["▁dibangun",-13.760383605957031],["▁dispositif",-13.760384559631348],["▁najwięcej",-13.760385513305664],["▁έδρα",-13.76038646697998],["▁فإذا",-13.760390281677246],["thema",-13.760391235351562],["▁navegant",-13.760395050048828],["▁Mbeya",-13.76039695739746],["▁Swinger",-13.760397911071776],["▁Taasisi",-13.760397911071776],["▁ಅಭಿ",-13.760400772094728],["▁fühlen",-13.760405540466309],["▁రోజుల్లో",-13.760406494140623],["▁व्यक्तिगत",-13.760408401489258],["▁Pridal",-13.760416030883787],["▁vníma",-13.760416984558104],["▁thiệt",-13.760427474975586],["▁obtin",-13.760428428649902],["임을",-13.760430335998535],["▁забезпечує",-13.760435104370115],["▁Tengo",-13.76043701171875],["ുവാൻ",-13.76044464111328],["projekte",-13.760445594787598],["णारी",-13.76045036315918],["▁urgente",-13.760456085205078],["▁хуже",-13.76046371459961],["válogatott",-13.760464668273926],["▁தகவல்கள்",-13.760476112365724],["καλού",-13.760478019714355],["▁παρε",-13.76048183441162],["▁ideias",-13.76048469543457],["▁ნახვა",-13.760489463806152],["発展",-13.76049518585205],["▁Kollegen",-13.76051139831543],["አገራችን",-13.760512351989746],["▁ardhi",-13.76051425933838],["▁ажилтан",-13.76051902770996],["读者",-13.760528564453123],["▁τρί",-13.760544776916504],["▁reilu",-13.76055908203125],["▁וכך",-13.760560035705566],["▁sposobnosti",-13.760570526123049],["▁सत्र",-13.760571479797363],["▁lycka",-13.760577201843262],["▁почитува",-13.760578155517578],["▁hyväksyt",-13.760580062866213],["▁የስራ",-13.760597229003906],["▁Proti",-13.760615348815918],["▁turizma",-13.760628700256348],["▁penyu",-13.760631561279297],["▁saqlash",-13.760635375976562],["▁kapatid",-13.76064109802246],["រណ",-13.76064395904541],["▁05:00",-13.76064682006836],["▁logga",-13.760648727416992],["ခိုး",-13.760650634765623],["ЕБ",-13.76065158843994],["▁ٻيا",-13.760656356811523],["▁innovatie",-13.760666847229004],["▁موشک",-13.760692596435549],["▁ülkenin",-13.760697364807127],["▁разрешения",-13.760703086853027],["▁brutto",-13.760706901550291],["▁جاں",-13.76071834564209],["▁elefant",-13.76073932647705],["▁Fill",-13.760744094848633],["▁hổ",-13.760746002197266],["▁Echt",-13.760762214660645],["nejších",-13.760767936706545],["ინგი",-13.760775566101074],["közben",-13.76077938079834],["▁2023",-13.760791778564451],["ガン",-13.76081085205078],["▁јавног",-13.760815620422363],["ထပ်",-13.760819435119627],["▁אכן",-13.760819435119627],["▁Swiss",-13.760820388793944],["▁ходи",-13.760820388793944],["▁alvor",-13.76085090637207],["▁Olav",-13.76085376739502],["▁Campan",-13.760860443115234],["Бал",-13.76086711883545],["▁Τό",-13.760869979858398],["‹",-13.760886192321776],["▁Meena",-13.760887145996094],["fc",-13.76089572906494],["люд",-13.76090145111084],["▁milito",-13.76091766357422],["▁لديه",-13.76091766357422],["6,6",-13.760920524597168],["bransjen",-13.760923385620115],["▁hääle",-13.760923385620115],["▁ideig",-13.760934829711914],["마을",-13.760944366455078],["▁politică",-13.760963439941406],["▁hjir",-13.760978698730469],["▁నచ్చ",-13.760981559753418],["lassung",-13.760985374450684],["▁cewek",-13.761009216308594],["▁Emily",-13.761033058166504],["148",-13.761062622070312],["▁ಅಪ್ಪ",-13.761070251464844],["kanie",-13.761101722717283],["bumi",-13.761117935180664],["shirye",-13.761133193969728],["tvrdi",-13.761134147644045],["stora",-13.76113986968994],["▁värd",-13.76116943359375],["ύλι",-13.761212348937988],["▁pintor",-13.76132869720459],["חמים",-13.761340141296388],["โอน",-13.76134204864502],["▁orto",-13.761361122131348],["的重大",-13.761387825012209],["mulo",-13.761397361755373],["▁הקשר",-13.761457443237305],["▁kWh",-13.761459350585938],["▁πιά",-13.761467933654783],["▁životní",-13.761481285095217],["▁esik",-13.761482238769531],["WAY",-13.761486053466797],["▁situācija",-13.761503219604492],["▁पैर",-13.761507034301758],["ଗ୍ରା",-13.761514663696287],["▁estic",-13.761531829833984],["▁простору",-13.761531829833984],["▁denně",-13.761541366577148],["šeno",-13.761560440063477],["▁രസ",-13.761619567871094],["▁wünsche",-13.761652946472168],["pošto",-13.761653900146484],["ಬೇಕೆ",-13.761662483215332],["▁olemaan",-13.76166534423828],["ZIO",-13.761675834655762],["▁acabe",-13.761682510375977],["▁главен",-13.761699676513672],["▁plný",-13.76171875],["חלוק",-13.76173496246338],["bój",-13.761737823486328],["▁prvej",-13.761749267578123],["Smart",-13.761754035949709],["▁lassan",-13.761757850646973],["▁стратегии",-13.761762619018556],["achtig",-13.761767387390137],["▁budžeta",-13.76176929473877],["▁шег",-13.7617769241333],["▁Света",-13.761786460876465],["Hub",-13.761804580688477],["መንግስት",-13.76181411743164],["▁পারবেন",-13.76181411743164],["▁medica",-13.76181983947754],["▁Estate",-13.76182746887207],["▁fonduri",-13.761836051940918],["ນໍາໃຊ້",-13.761848449707031],["▁nastupa",-13.761860847473145],["▁wyciąg",-13.761883735656738],["νγκ",-13.7619047164917],["▁пројекат",-13.761913299560549],["▁cito",-13.761924743652344],["▁Utan",-13.761929512023926],["▁geweldig",-13.76196575164795],["討",-13.76198387145996],["▁інвест",-13.761985778808594],["tråd",-13.762005805969238],["realiz",-13.762011528015137],["ાર્થ",-13.762078285217283],["▁Popol",-13.762083053588867],["보증",-13.76210117340088],["▁claims",-13.762103080749512],["▁завршен",-13.7621431350708],["▁completed",-13.76215362548828],["YAT",-13.762195587158203],["حين",-13.762224197387695],["izmin",-13.76225757598877],["▁gyártó",-13.762279510498049],["▁spending",-13.762287139892578],["Все",-13.762309074401855],["▁smoothie",-13.762313842773438],["보는",-13.762325286865234],["▁nêu",-13.762331008911133],["少了",-13.762334823608398],["▁կանգ",-13.762368202209473],["▁imád",-13.762369155883787],["ਕਤ",-13.762373924255373],["ghadh",-13.762393951416016],["દો",-13.762393951416016],["▁Экономика",-13.76239776611328],["appoint",-13.76240348815918],["▁254",-13.762408256530762],["▁preveri",-13.762415885925291],["skinn",-13.76241970062256],["▁ragihandin",-13.762421607971191],["▁बंगाल",-13.762434005737305],["african",-13.76243782043457],["▁крови",-13.762442588806152],["▁gemensam",-13.762465476989746],["▁иднина",-13.762469291687012],["ಳ್ಳಿ",-13.76250457763672],["▁oproti",-13.7625093460083],["émotion",-13.762578010559082],["ంటారు",-13.762627601623535],["▁разговора",-13.762638092041016],["▁kuuluvat",-13.762643814086914],["的成功",-13.76264762878418],["рахування",-13.762648582458496],["▁محکم",-13.76265811920166],["来讲",-13.762666702270508],["中国特色社会主义",-13.762690544128418],["187",-13.762701988220217],["vrši",-13.762720108032228],["の原因",-13.762731552124023],["旁边",-13.762738227844238],["償",-13.76274585723877],["▁Wrocław",-13.762747764587402],["▁hulgas",-13.762747764587402],["▁Џон",-13.762747764587402],["കൃഷ്ണന്",-13.762754440307615],["授权",-13.762754440307615],["打了",-13.762763023376465],["▁kedvezmény",-13.762779235839844],["▁lunedì",-13.762779235839844],["▁ourselves",-13.762779235839844],["▁scríobh",-13.762779235839844],["▁waarschijnlijk",-13.762779235839844],["▁þennan",-13.762779235839844],["▁λογαριασμό",-13.762779235839844],["▁митрополит",-13.762779235839844],["▁դառնում",-13.762779235839844],["▁ברחבי",-13.762779235839844],["▁مەكتەپ",-13.762779235839844],["▁وخاصة",-13.762779235839844],["▁कोइराला",-13.762779235839844],["▁ধরনের",-13.762779235839844],["▁ပင်မစာမျက်နှာ",-13.762779235839844],["Ď",-13.76278018951416],["▁nödvändig",-13.76278018951416],["▁pēdējo",-13.76278018951416],["▁szándék",-13.76278018951416],["▁środki",-13.76278018951416],["▁ετοιμ",-13.76278018951416],["▁съхранява",-13.76278018951416],["▁அப்போது",-13.76278018951416],["▁സ്വദേശി",-13.76278018951416],["▁რამდენად",-13.76278018951416],["ওয়ার্ড",-13.762781143188477],["▁zajednica",-13.762781143188477],["▁величезн",-13.762781143188477],["▁забравя",-13.762781143188477],["▁povzroči",-13.762782096862791],["▁ဝင်ရောက်",-13.762782096862791],["▁carattere",-13.76278305053711],["▁erakutsi",-13.76278305053711],["▁nyugdíj",-13.76278305053711],["▁povijesn",-13.76278305053711],["▁рублёў",-13.76278305053711],["▁کارکردگی",-13.762784004211426],["▁nimittäin",-13.76278591156006],["ਖੇ",-13.762786865234377],["▁Телефон",-13.762786865234377],["筋肉",-13.762787818908691],["▁текстови",-13.762789726257324],["▁अधिकांश",-13.762789726257324],["▁Конгрес",-13.76279067993164],["▁Беларускай",-13.76279354095459],["▁SPF",-13.762795448303224],["▁나누",-13.762797355651855],["lisse",-13.762799263000488],["▁cinéma",-13.76280117034912],["▁त्यानंतर",-13.76280117034912],["▁έκαναν",-13.762802124023438],["▁لیڈر",-13.762802124023438],["▁beskerm",-13.762803077697754],["▁phường",-13.762803077697754],["▁Triệu",-13.762804985046388],["▁படத்தின்",-13.762807846069336],["对我们",-13.762807846069336],["▁çalışması",-13.762821197509766],["▁kilometre",-13.762825965881348],["▁menatap",-13.76283073425293],["▁العظيم",-13.762834548950195],["▁hjertet",-13.762836456298828],["▁التنمية",-13.762840270996094],["▁constitucional",-13.762847900390623],["▁novosti",-13.76284885406494],["▁طویل",-13.762856483459473],["▁အမျိုးသား",-13.762858390808104],["▁քաղաքացիական",-13.762859344482422],["▁Париз",-13.762877464294434],["ტში",-13.762880325317385],["▁ሴቶች",-13.762882232666016],["ศาสนา",-13.76289176940918],["▁iestādes",-13.762892723083496],["▁принятия",-13.762897491455078],["▁keçirib",-13.76290225982666],["▁finančne",-13.76291275024414],["▁אוהב",-13.762919425964355],["▁Tweet",-13.76292896270752],["toimittaja",-13.762929916381836],["funde",-13.76294231414795],["專利",-13.762944221496582],["相同的",-13.762947082519531],["ცის",-13.762953758239746],["▁imeti",-13.762958526611328],["▁yarar",-13.762968063354492],["স্টার",-13.762979507446287],["▁trafikk",-13.763026237487791],["▁հաղորդում",-13.763032913208008],["▁baleset",-13.763042449951172],["▁රථය",-13.763056755065918],["▁paštas",-13.763066291809082],["YouTube",-13.763072967529297],["▁valokuva",-13.76307487487793],["▁провал",-13.763093948364258],["▁الأمنية",-13.763115882873535],["▁exceptionnel",-13.763117790222168],["پىر",-13.763118743896484],["▁calitatea",-13.763132095336914],["▁съдържанието",-13.763132095336914],["▁HBO",-13.76315212249756],["▁요소",-13.763158798217772],["νές",-13.763174057006836],["▁nuorten",-13.763192176818848],["▁grafika",-13.763202667236328],["程度上",-13.763223648071287],["▁Stay",-13.763225555419922],["번째",-13.76323413848877],["висока",-13.7632474899292],["ګرځ",-13.763260841369627],["폭력",-13.76327419281006],["oài",-13.76327896118164],["▁terkini",-13.763279914855955],["诸多",-13.76328945159912],["сіў",-13.763290405273438],["▁Walker",-13.763294219970703],["▁værd",-13.763304710388184],["▁arquitectura",-13.76332664489746],["▁idéz",-13.763327598571776],["बान",-13.763334274291992],["▁בעבר",-13.76333713531494],["▁juhli",-13.76333999633789],["▁الطريقة",-13.763348579406738],["▁Magna",-13.763381958007812],["ნდი",-13.763382911682127],["▁τραγ",-13.763386726379396],["▁چنان",-13.76339054107666],["▁Freiheit",-13.76339626312256],["▁anlayışı",-13.76341438293457],["▁integrado",-13.763418197631836],["▁barruan",-13.763421058654783],["▁দিল",-13.763421058654783],["▁BES",-13.763425827026367],["▁راهکار",-13.763434410095217],["遅",-13.76343822479248],["▁kādas",-13.76344394683838],["▁በወ",-13.763446807861328],["▁ਅਪ",-13.763447761535645],["神經",-13.763493537902832],["▁חובה",-13.763494491577148],["▁Donna",-13.763504981994627],["▁færre",-13.763520240783691],["▁estancia",-13.763544082641602],["▁potrai",-13.7635498046875],["▁begann",-13.763550758361816],["▁написа",-13.763555526733398],["▁gCo",-13.763561248779297],["▁എന്താണ്",-13.76356601715088],["▁Brenn",-13.763587951660156],["郑州",-13.763601303100586],["▁korras",-13.7636079788208],["▁strade",-13.763615608215332],["▁gatal",-13.763616561889648],["▁şiir",-13.763636589050291],["私たちは",-13.763641357421877],["▁udělal",-13.763643264770508],["इए",-13.76366138458252],["และมี",-13.763683319091797],["▁préféré",-13.76368808746338],["▁margar",-13.76369285583496],["व्हा",-13.763697624206545],["manis",-13.76369857788086],["вођење",-13.763700485229492],["▁ඉවර",-13.763736724853516],["ίνι",-13.763748168945312],["ოკ",-13.763757705688477],["infektion",-13.763758659362791],["wyneb",-13.76376247406006],["▁მაღალ",-13.763764381408691],["▁койду",-13.763765335083008],["דעה",-13.763766288757324],["▁iclası",-13.763774871826172],["▁intervenir",-13.76379680633545],["▁idzie",-13.76380729675293],["▁постао",-13.76381778717041],["ЙТ",-13.763848304748535],["Bos",-13.763853073120115],["発行",-13.763854026794434],["ពីរ",-13.763875961303713],["▁سلو",-13.76388454437256],["▁Дер",-13.763887405395508],["νοια",-13.763890266418455],["带动",-13.763908386230469],["prowadzi",-13.763931274414062],["▁Порт",-13.763934135437012],["的內容",-13.763935089111328],["▁вълн",-13.76395034790039],["cijske",-13.763958930969238],["ურთ",-13.764010429382324],["ในราคา",-13.764019966125488],["▁Maza",-13.764034271240234],["▁neuz",-13.764052391052246],["▁Мекен",-13.76405906677246],["▁WIB",-13.7641019821167],["ියන්",-13.764108657836914],["続いて",-13.76410961151123],["▁zaključi",-13.764117240905762],["brow",-13.76414680480957],["कर्त",-13.76415729522705],["▁студентите",-13.764177322387695],["▁마치",-13.764180183410645],["▁gabinet",-13.764209747314451],["▁בשר",-13.764220237731934],["しま",-13.764226913452148],["▁assistenza",-13.764248847961426],["ੈਕਟ",-13.764318466186523],["▁Размер",-13.764331817626951],["EMBA",-13.76433277130127],["ପର୍",-13.764334678649902],["▁hårdt",-13.764334678649902],["▁оронд",-13.764344215393066],["▁kişilik",-13.76435089111328],["beke",-13.764382362365724],["的意见",-13.764387130737305],["▁Tare",-13.76440143585205],["▁Տեր",-13.764432907104492],["▁científico",-13.764446258544922],["stedt",-13.764456748962402],["了几",-13.764463424682615],["▁ஆப்",-13.764467239379885],["morgen",-13.764471054077148],["▁Într",-13.764482498168944],["pře",-13.764493942260742],["▁komitet",-13.76450252532959],["的身体",-13.764506340026855],["▁obiectiv",-13.764530181884766],["▁mirin",-13.764535903930664],["Class",-13.76453971862793],["▁zakaz",-13.764543533325195],["būvē",-13.764562606811523],["ωθούν",-13.764568328857422],["▁modulo",-13.764594078063965],["تني",-13.764601707458496],["▁zarządza",-13.764608383178713],["▁reeks",-13.764613151550291],["ഷ്ഠ",-13.764633178710938],["▁absolutt",-13.764638900756836],["▁опрос",-13.764643669128418],["Ģ",-13.76466178894043],["analytic",-13.76467990875244],["▁موڪلي",-13.76468563079834],["▁curata",-13.76469612121582],["ピン",-13.764702796936035],["▁Theodor",-13.76471996307373],["▁Fabrik",-13.764725685119627],["▁Ballkan",-13.764726638793944],["ராஜ",-13.76475715637207],["șu",-13.764765739440918],["▁ulaştı",-13.764766693115234],["syyttä",-13.764772415161133],["▁શુ",-13.764779090881348],["▁Dwi",-13.76478385925293],["▁ניתוח",-13.764796257019045],["前期",-13.76480197906494],["Ազատ",-13.764815330505373],["ziel",-13.76486110687256],["▁hastalığı",-13.764866828918455],["▁аппараты",-13.764875411987305],["amaha",-13.764890670776367],["LÉ",-13.764910697937012],["▁BIG",-13.76491641998291],["不易",-13.76492404937744],["▁Schweizer",-13.764925956726074],["不錯的",-13.765007972717283],["ићи",-13.765013694763184],["▁דברי",-13.765031814575195],["ตูด",-13.76504611968994],["▁hring",-13.76511287689209],["▁Молдо",-13.765145301818848],["斤",-13.765151023864746],["məyin",-13.76516056060791],["檢測",-13.765170097351074],["承諾",-13.76517105102539],["寻求",-13.765185356140137],["牠",-13.765185356140137],["そもそも",-13.76519012451172],["▁Blade",-13.765192031860352],["▁duzue",-13.76519775390625],["▁വീട്ട",-13.765198707580566],["▁інтэрнэт",-13.765199661254885],["୤",-13.7652006149292],["▁Hiệp",-13.7652006149292],["▁Schmidt",-13.7652006149292],["▁abgeschlossen",-13.7652006149292],["▁compañeiro",-13.7652006149292],["▁croissance",-13.7652006149292],["▁ožujka",-13.7652006149292],["▁pentadbiran",-13.7652006149292],["▁réflexion",-13.7652006149292],["▁Зокрема",-13.7652006149292],["▁հունվարի",-13.7652006149292],["▁דינסטאג",-13.7652006149292],["▁مضامین",-13.7652006149292],["▁इतनी",-13.7652006149292],["▁মোহাম্মদ",-13.7652006149292],["▁සිල්වා",-13.7652006149292],["▁ერთმანეთს",-13.7652006149292],["▁ვარსკვლავ",-13.7652006149292],["▁열심히",-13.7652006149292],["▁Erlebnis",-13.765201568603516],["▁mémoire",-13.765201568603516],["▁претставници",-13.765201568603516],["▁этгээд",-13.765201568603516],["▁சேர்த்து",-13.765202522277832],["แป้ง",-13.765203475952148],["▁apaixona",-13.765203475952148],["▁Splošn",-13.76520538330078],["▁اندیشه",-13.76520538330078],["▁движи",-13.765206336975098],["▁granul",-13.76520824432373],["▁díreach",-13.765212059020996],["▁христиан",-13.765213012695312],["▁ټاکل",-13.765213966369627],["▁Jona",-13.765214920043944],["▁seminggu",-13.765218734741213],["▁cumpăra",-13.765219688415527],["ပြောင်း",-13.765222549438477],["▁ادارہ",-13.765225410461426],["▁ikusteko",-13.76522731781006],["▁хватает",-13.765229225158691],["▁belast",-13.765238761901855],["▁చేయాలని",-13.76524257659912],["יצירה",-13.765243530273438],["▁ychwanegol",-13.765243530273438],["▁सम्झ",-13.765249252319336],["▁täpselt",-13.765250205993652],["▁Novruz",-13.765251159667969],["▁islahatlar",-13.765252113342283],["▁lambat",-13.765252113342283],["ОХ",-13.765259742736816],["▁atraktiv",-13.76526165008545],["▁찾기",-13.76527214050293],["fahrer",-13.765274047851562],["▁length",-13.765277862548828],["▁Chiến",-13.765280723571776],["▁231",-13.765281677246094],["សុទ្ធ",-13.765287399291992],["▁conmemora",-13.765297889709473],["▁Tasarım",-13.765304565429688],["digital",-13.76530933380127],["▁протести",-13.765317916870115],["▁боловсролын",-13.765318870544434],["▁умре",-13.765318870544434],["▁rexistrada",-13.765325546264648],["ツアー",-13.765336990356444],["▁mélange",-13.765347480773926],["▁првом",-13.765355110168455],["▁Çevre",-13.765360832214355],["mmaksi",-13.765375137329102],["▁megold",-13.76537799835205],["▁locaux",-13.765382766723633],["▁ஆசி",-13.765396118164062],["碰到",-13.765396118164062],["▁يعيش",-13.765416145324709],["▁bəyanat",-13.76543426513672],["▁invoca",-13.765438079833984],["▁Concord",-13.765446662902832],["原油",-13.765447616577148],["େନ୍ଦ୍ର",-13.765457153320312],["၀တ္",-13.765458106994627],["▁вътрешни",-13.765462875366213],["▁láttam",-13.765467643737791],["▁tsarin",-13.765467643737791],["발전",-13.765474319458008],["කුරු",-13.765480995178224],["годдзя",-13.765485763549805],["αρά",-13.76548671722412],["ઝન",-13.765494346618652],["▁sangre",-13.765512466430664],["▁ખુશ",-13.765524864196776],["▁കാണുക",-13.765543937683104],["▁bravo",-13.765554428100586],["အုပ္",-13.765555381774902],["▁Mərkəzinin",-13.76555633544922],["▁говорится",-13.765567779541016],["▁sobrang",-13.765568733215332],["相關的",-13.765572547912598],["үлдү",-13.765596389770508],["▁तहमा",-13.765597343444824],["▁म्ह",-13.765607833862305],["▁քաղաքում",-13.76561164855957],["ίσεις",-13.765623092651367],["▁ezarri",-13.765631675720217],["穿着",-13.765642166137695],["5-1",-13.765650749206545],["▁കരുതി",-13.765670776367188],["▁geleverd",-13.765676498413086],["▁acompañado",-13.765684127807615],["▁sånt",-13.765697479248049],["▁חדרים",-13.765698432922363],["కోసం",-13.765702247619627],["플라",-13.765703201293944],["▁बनाएको",-13.76572608947754],["▁Concurs",-13.76574993133545],["▁demora",-13.765756607055664],["fehér",-13.765758514404297],["риан",-13.765758514404297],["▁någonsin",-13.76576328277588],["▁písomn",-13.765777587890623],["▁Kolla",-13.765802383422852],["▁Lương",-13.765829086303713],["نعم",-13.76584529876709],["▁Section",-13.765877723693848],["▁අවි",-13.765889167785645],["טינג",-13.765893936157228],["პოზი",-13.765893936157228],["▁буквально",-13.765893936157228],["енерго",-13.76590347290039],["ţul",-13.765911102294922],["kalenteri",-13.765918731689451],["▁қазан",-13.765925407409668],["ቅርብ",-13.765929222106934],["▁Miranda",-13.765942573547363],["▁kolejnych",-13.765949249267578],["▁მთა",-13.765966415405272],["OON",-13.765973091125488],["▁output",-13.765978813171388],["▁døds",-13.765981674194336],["yrer",-13.765982627868652],["▁១៩",-13.765993118286133],["自ら",-13.765993118286133],["ລາດ",-13.766008377075195],["▁Ее",-13.766043663024902],["ៀន",-13.766048431396484],["љо",-13.766063690185549],["▁দেয়",-13.766073226928713],["▁celá",-13.766077041625977],["ničky",-13.76608180999756],["جاه",-13.766122817993164],["▁έσ",-13.766157150268556],["▁kartı",-13.76616096496582],["તરી",-13.766162872314451],["▁tomada",-13.76616382598877],["▁ўлады",-13.76616382598877],["▁ülkemiz",-13.766221046447754],["▁структуру",-13.766254425048828],["Space",-13.76626205444336],["язы",-13.76626682281494],["חשוב",-13.766276359558104],["מפעל",-13.76628303527832],["박스",-13.76632022857666],["▁awali",-13.766350746154783],["▁POLI",-13.766353607177734],["▁Ministero",-13.766366004943848],["ئے۔",-13.766372680664062],["▁өнімдері",-13.766434669494627],["ΙΚΟ",-13.76646327972412],["▁Teater",-13.7664794921875],["▁рассмотрен",-13.76649284362793],["体会",-13.76650619506836],["世界上最",-13.766529083251951],["ғанда",-13.766533851623535],["▁ಆತನ",-13.766546249389648],["CAL",-13.766547203063965],["▁لکڻ",-13.766554832458496],["▁Tira",-13.766561508178713],["▁Rodin",-13.766562461853027],["▁hazard",-13.766571044921877],["▁SEP",-13.766578674316406],["▁nantinya",-13.76659870147705],["▁федерал",-13.766602516174316],["▁trail",-13.76660442352295],["හන",-13.766607284545898],["▁першу",-13.766608238220217],["▁Strik",-13.76662254333496],["య్యా",-13.76662540435791],["▁tisto",-13.766639709472656],["blogger",-13.766646385192873],["ŞI",-13.766657829284668],["▁βρει",-13.766672134399414],["▁spilleautomat",-13.766701698303224],["ეთა",-13.766719818115234],["关税",-13.7667236328125],["▁রান",-13.766735076904297],["آباد",-13.766740798950195],["TIVA",-13.766759872436523],["▁rutina",-13.766765594482422],["сака",-13.766793251037598],["▁tähti",-13.76682949066162],["▁አካ",-13.766831398010254],["▁ਲਗ",-13.766836166381836],["stavljanje",-13.766860008239746],["keena",-13.766863822937012],["ው፣",-13.766879081726074],["ຫຼັງ",-13.766901969909668],["▁akeh",-13.766902923583984],["روق",-13.766926765441896],["▁बेल",-13.766931533813477],["▁شاب",-13.76693344116211],["myndighet",-13.766948699951172],["▁ብለን",-13.766959190368652],["arkiv",-13.76696491241455],["▁Nge",-13.767005920410156],["▁bekomme",-13.767016410827637],["▁Extern",-13.767049789428713],["▁jogar",-13.767050743103027],["იშვილის",-13.76705837249756],["อยากจะ",-13.767067909240724],["▁sobat",-13.76706886291504],["▁Strategie",-13.767072677612305],["vloei",-13.767074584960938],["ዛሬ",-13.767084121704102],["▁malakas",-13.76710319519043],["จะถูก",-13.767112731933594],["▁azaroa",-13.767123222351074],["नन्द",-13.76712417602539],["▁прадстаў",-13.767125129699709],["▁subjek",-13.767139434814451],["▁کہنے",-13.767139434814451],["ssakin",-13.767149925231934],["▁Esco",-13.767168998718262],["▁rescat",-13.767172813415527],["▁필요하다",-13.767176628112791],["▁оқиға",-13.767179489135742],["मंद",-13.76718521118164],["▁pastoral",-13.767189979553224],["سكو",-13.767193794250488],["beurt",-13.767202377319336],["▁aansluit",-13.767218589782717],["▁svolge",-13.767245292663574],["▁Igual",-13.767247200012209],["동안",-13.767251014709473],["▁aktuali",-13.767253875732422],["▁напротив",-13.767257690429688],["bež",-13.767304420471191],["▁vzniku",-13.767308235168455],["▁Lazar",-13.767352104187012],["ztea",-13.767363548278809],["čce",-13.767383575439451],["ণা",-13.767383575439451],["▁tarkibi",-13.767385482788086],["▁גאר",-13.767386436462402],["ENO",-13.767390251159668],["จะใช้",-13.767391204833984],["▁Statens",-13.767400741577148],["▁zdravstvene",-13.76740837097168],["▁учур",-13.767419815063477],["îkê",-13.767426490783691],["служебн",-13.76744556427002],["▁fortsatte",-13.767457008361816],["▁remedyo",-13.767463684082031],["▁Batt",-13.767470359802246],["▁contidos",-13.767497062683104],["されない",-13.767504692077637],["▁ulykke",-13.767507553100586],["悬",-13.767529487609863],["▁ସମ୍ପର୍କ",-13.767539024353027],["喬",-13.767548561096191],["ប្រជុំ",-13.767580032348633],["赶紧",-13.76760196685791],["銘",-13.767603874206545],["スキル",-13.767606735229492],["幸运",-13.767606735229492],["▁долги",-13.76760959625244],["プロジェクト",-13.76760959625244],["ปรึกษา",-13.767622947692873],["ឆ្នោត",-13.76762580871582],["ทักษะ",-13.767626762390137],["▁Egészség",-13.767627716064451],["▁Mawasiliano",-13.767627716064451],["▁memperhatikan",-13.767627716064451],["▁stjörnu",-13.767627716064451],["▁článků",-13.767627716064451],["▁φιλοξεν",-13.767627716064451],["▁сегодняшний",-13.767627716064451],["▁соответственно",-13.767627716064451],["▁Կայքի",-13.767627716064451],["▁կրթության",-13.767627716064451],["▁مەلۇم",-13.767627716064451],["▁छत्तीसगढ़",-13.767627716064451],["▁পুরুষ",-13.767627716064451],["▁ಬೆಳಗಾವಿ",-13.767627716064451],["▁ಸಾಕಷ್ಟು",-13.767627716064451],["▁ഭീകര",-13.767627716064451],["▁දිස්ත්",-13.767627716064451],["▁ინფორმაციით",-13.767627716064451],["▁օրենսդր",-13.76762866973877],["▁नेटवर्क",-13.76762866973877],["▁કિંમત",-13.76762866973877],["гістарычны",-13.767629623413086],["▁வீரர்",-13.767629623413086],["▁სეზონი",-13.767629623413086],["թանգարան",-13.767630577087402],["▁فیزیک",-13.767630577087402],["▁Zeichen",-13.767632484436035],["▁zobowiąza",-13.767632484436035],["▁پرچم",-13.767632484436035],["▁inscrición",-13.767633438110352],["▁Suomalais",-13.767634391784668],["▁pobijedi",-13.767634391784668],["▁২০১৫",-13.767634391784668],["▁Lowongan",-13.7676362991333],["▁назву",-13.767637252807615],["▁हुनुपर्छ",-13.767638206481934],["▁ұғым",-13.76763916015625],["▁کنکور",-13.7676420211792],["▁rispetta",-13.767642974853516],["▁täältä",-13.767643928527832],["เซลล์",-13.76764678955078],["▁кыска",-13.76764678955078],["▁nawawala",-13.767647743225098],["▁kerralla",-13.767648696899414],["▁aukščiau",-13.767651557922363],["▁שצריך",-13.76765251159668],["▁കൊച്ചു",-13.76765251159668],["▁OnePlus",-13.767654418945312],["▁phức",-13.767654418945312],["▁berpirs",-13.767658233642578],["▁зориулсан",-13.767661094665527],["▁Метод",-13.767663955688477],["▁Leonard",-13.767666816711426],["▁gwrth",-13.76766872406006],["▁дію",-13.76766872406006],["▁తనకు",-13.76766872406006],["▁كنا",-13.767669677734377],["▁ارکان",-13.767671585083008],["▁విమాన",-13.767675399780272],["▁xafiis",-13.76767635345459],["▁Можна",-13.76767635345459],["▁Zeitraum",-13.76767921447754],["轉換",-13.76768398284912],["BOT",-13.76768970489502],["פייה",-13.767690658569336],["แม่น้ํา",-13.767691612243652],["қимыл",-13.767693519592283],["▁کشت",-13.767712593078612],["рылып",-13.767720222473145],["▁Мені",-13.767723083496094],["决心",-13.767730712890623],["원은",-13.767744064331056],["かかって",-13.767763137817385],["▁Komfort",-13.767773628234863],["▁земельного",-13.767776489257812],["▁ötlet",-13.767792701721191],["▁يحتوي",-13.767792701721191],["▁ארץ",-13.767812728881836],["mää",-13.767820358276367],["ološko",-13.767849922180176],["stunden",-13.767850875854492],["税收",-13.76785659790039],["▁машине",-13.767857551574709],["▁पाठक",-13.767871856689451],["रन",-13.76787281036377],["▁Subscriure",-13.76787281036377],["▁Dü",-13.767882347106934],["HIM",-13.767889022827148],["▁өч",-13.76789379119873],["disciplin",-13.767901420593262],["埃及",-13.767906188964844],["▁sälja",-13.76792812347412],["▁қою",-13.767938613891602],["▁Andersson",-13.7679443359375],["ນຶ່ງ",-13.767948150634766],["▁प्रकाशने",-13.767951965332031],["▁taotle",-13.767955780029297],["жење",-13.767962455749512],["▁miteinander",-13.767962455749512],["▁cereal",-13.767983436584473],["კული",-13.767996788024902],["ਮਤ",-13.768009185791016],["▁దశ",-13.768012046813965],["▁gracz",-13.768026351928713],["▁geest",-13.768043518066406],["смислен",-13.768049240112305],["LJA",-13.76805305480957],["▁ışık",-13.768056869506836],["ගුණ",-13.76806354522705],["மலை",-13.768072128295898],["rolle",-13.768085479736328],["萬人",-13.768086433410645],["▁Fare",-13.768096923828123],["ingresso",-13.768115043640137],["ΤΩΝ",-13.76811695098877],["▁részben",-13.768121719360352],["在网上",-13.768132209777832],["ಕಾಯಿ",-13.768134117126465],["baarheid",-13.76813507080078],["▁коллег",-13.768141746520996],["▁alimentaire",-13.768159866333008],["بيب",-13.768169403076172],["▁opnemen",-13.76817798614502],["Ақ",-13.768180847167969],["▁Anja",-13.768202781677246],["▁minuut",-13.768218994140623],["നീയ",-13.76822566986084],["გონი",-13.768226623535156],["▁öreg",-13.768244743347168],["▁Nachbar",-13.768248558044434],["▁електронни",-13.768263816833496],["planet",-13.76828384399414],["▁Karya",-13.768290519714355],["▁uměl",-13.768291473388672],["▁Хүний",-13.768322944641112],["▁punktów",-13.768324851989746],["▁Крст",-13.76834201812744],["▁रहस्य",-13.768353462219238],["▁республик",-13.768360137939451],["ၿပ",-13.768364906311035],["ाम्",-13.76841163635254],["▁такої",-13.768413543701172],["もあるので",-13.768415451049805],["ေခတ္",-13.768441200256348],["uuf",-13.768452644348145],["▁kaalu",-13.76845359802246],["▁Міжнародна",-13.768457412719728],["▁لایه",-13.768471717834473],["▁SUN",-13.768477439880373],["▁støv",-13.768484115600586],["▁Uso",-13.768492698669434],["▁морски",-13.768495559692385],["▁mõjuta",-13.768497467041016],["▁Ең",-13.768499374389648],["การเปลี่ยนแปลง",-13.768518447875977],["týr",-13.768525123596191],["informasjon",-13.768534660339355],["▁qaatay",-13.76853847503662],["▁riječima",-13.76853847503662],["▁leiab",-13.768548011779783],["▁ਰਸ",-13.76858139038086],["▁medicamente",-13.768601417541504],["▁շարունակում",-13.768606185913086],["▁కవిత",-13.768614768981934],["ливі",-13.768616676330566],["▁intensywn",-13.768619537353516],["מוקד",-13.768635749816896],["▁uğurlu",-13.768649101257324],["പാഠ",-13.76866626739502],["▁vizion",-13.768670082092283],["ÁV",-13.76869297027588],["▁saqla",-13.76870059967041],["▁vašej",-13.768733978271484],["▁προηγ",-13.76873779296875],["▁하나의",-13.768750190734863],["▁Élet",-13.768765449523926],["ಗಳಿವೆ",-13.768770217895508],["ਫੈ",-13.768786430358888],["▁שטח",-13.768790245056152],["щени",-13.76879596710205],["▁praktisch",-13.76881504058838],["▁Bedste",-13.768823623657228],["ความสุข",-13.768851280212402],["▁Бойко",-13.768858909606934],["บุรี",-13.768860816955566],["春天",-13.768874168395996],["만을",-13.768879890441896],["න්නෙ",-13.768881797790527],["▁ነበረ",-13.768884658813477],["MEL",-13.76890468597412],["▁၁၈",-13.7689208984375],["transfer",-13.76893424987793],["ріл",-13.768946647644045],["▁Pensi",-13.76895809173584],["გავს",-13.768960952758787],["▁kipu",-13.768976211547852],["▁የግል",-13.768980026245115],["observa",-13.76898193359375],["guera",-13.768982887268066],["▁төменде",-13.768991470336914],["▁ගැනීමේ",-13.769023895263672],["▁smanjen",-13.769039154052734],["界的",-13.769047737121582],["▁gestire",-13.769048690795898],["деше",-13.769057273864746],["бог",-13.769058227539062],["▁profilu",-13.769061088562012],["▁pristop",-13.769071578979492],["▁avevano",-13.769089698791504],["▁കുഞ്ഞു",-13.76909637451172],["abhängig",-13.769119262695312],["▁dejavnost",-13.76912784576416],["하니",-13.76914405822754],["▁nascut",-13.769145011901855],["стъ",-13.76914882659912],["▁artikelen",-13.769149780273438],["的心理",-13.769161224365234],["յր",-13.769192695617676],["▁многое",-13.769203186035156],["דאר",-13.769205093383787],["รอด",-13.769213676452637],["자리",-13.76921844482422],["רוד",-13.769225120544434],["▁చెబుత",-13.769232749938965],["▁shoir",-13.769248962402344],["るような",-13.769250869750977],["承認",-13.76926612854004],["ଗେ",-13.769268989562988],["▁Trick",-13.769268989562988],["▁బాల",-13.769274711608888],["िकल",-13.769301414489746],["نگے",-13.769327163696287],["▁kuum",-13.769327163696287],["▁vpliv",-13.769338607788086],["▁حافظه",-13.76935577392578],["mbali",-13.769368171691896],["▁diyerek",-13.769371032714844],["▁beharra",-13.769381523132324],["▁מידי",-13.7694091796875],["▁закладу",-13.769434928894045],["▁penjara",-13.769444465637209],["કુમાર",-13.769450187683104],["▁بشري",-13.769461631774902],["▁showing",-13.769474983215332],["škim",-13.769493103027344],["ЕЗ",-13.769498825073242],["を考え",-13.769515991210938],["▁දිවි",-13.769530296325684],["▁인상",-13.76953125],["നാട്",-13.769549369812012],["脏",-13.769554138183594],["▁dilê",-13.769566535949709],["moedig",-13.769570350646973],["▁Jurnal",-13.769573211669922],["▁adoro",-13.769573211669922],["ЛЕД",-13.769590377807615],["时光",-13.769606590270996],["հանգստ",-13.769609451293944],["百万",-13.769611358642578],["▁خلالها",-13.769622802734377],["▁દેખ",-13.769651412963867],["manager",-13.769680976867676],["▁supliment",-13.769713401794434],["▁ролі",-13.769733428955078],["▁지급",-13.769733428955078],["▁kölcsön",-13.769745826721191],["čala",-13.769749641418455],["很快就",-13.769769668579102],["▁വെടി",-13.769770622253418],["ກິນ",-13.769795417785645],["YH",-13.769861221313477],["▁jednání",-13.769903182983398],["下一步",-13.769927024841309],["▁këngëtar",-13.769937515258787],["▁அல",-13.769942283630373],["▁fasz",-13.769965171813965],["▁తీసి",-13.770012855529783],["▁اړخ",-13.770014762878418],["иваются",-13.770015716552734],["客观",-13.770018577575684],["綿",-13.770036697387695],["▁وحدة",-13.770044326782228],["翠",-13.770044326782228],["コーヒー",-13.770052909851074],["kläd",-13.77005672454834],["ขอนแก่น",-13.770057678222656],["燃料",-13.770059585571287],["ዴሞክራሲ",-13.770061492919922],["ጋዜጠኛ",-13.770061492919922],["▁Desktop",-13.770061492919922],["▁Filozof",-13.770061492919922],["▁Giuseppe",-13.770061492919922],["▁Kodėl",-13.770061492919922],["▁Megjithatë",-13.770061492919922],["▁Shacabka",-13.770061492919922],["▁cặp",-13.770061492919922],["▁effeithio",-13.770061492919922],["▁poprvé",-13.770061492919922],["▁təşəkkür",-13.770061492919922],["▁zespołu",-13.770061492919922],["▁Незалежн",-13.770061492919922],["▁изобщо",-13.770061492919922],["▁հստակ",-13.770061492919922],["▁اشتہار",-13.770061492919922],["▁विस्तृत",-13.770061492919922],["▁గాంధీ",-13.770061492919922],["▁ഗാന്ധി",-13.770061492919922],["▁xarxes",-13.770062446594238],["▁родственник",-13.770062446594238],["▁مخلوق",-13.770062446594238],["мисъл",-13.770063400268556],["เพาะ",-13.770065307617188],["▁začíná",-13.770065307617188],["▁шығармашылық",-13.770065307617188],["▁ਸਿਰਫ",-13.770065307617188],["▁электронной",-13.770069122314451],["▁প্রকল্প",-13.770069122314451],["▁þitt",-13.77007007598877],["▁обращения",-13.770071029663086],["▁ਆਇਆ",-13.770071983337402],["▁Đánh",-13.770075798034668],["發佈",-13.7700777053833],["▁tăi",-13.770079612731934],["▁мінімальн",-13.770079612731934],["қым",-13.770082473754885],["▁링크",-13.770090103149414],["▁commencé",-13.77009391784668],["▁إنشاء",-13.770095825195312],["▁foloseste",-13.770098686218262],["▁ለምሳሌ",-13.77010440826416],["▁ácido",-13.770106315612791],["▁պետություն",-13.77010726928711],["▁دوسرا",-13.770113945007324],["▁ვისაც",-13.770113945007324],["▁የፈ",-13.77011775970459],["þætti",-13.77012062072754],["▁افسان",-13.770126342773438],["مستشار",-13.77013874053955],["在内的",-13.770146369934082],["▁cieľ",-13.770151138305664],["ОШ",-13.770153045654297],["▁tire",-13.770153045654297],["obligatori",-13.770153999328612],["▁ثالث",-13.770164489746094],["▁açıklaması",-13.770177841186523],["▁Bodrum",-13.770197868347168],["▁sezonas",-13.770203590393066],["▁veljače",-13.770204544067385],["ボール",-13.770211219787598],["▁Oriental",-13.770217895507812],["▁Pengarah",-13.770220756530762],["▁parcel",-13.770220756530762],["▁روايت",-13.770222663879396],["▁təmiz",-13.770224571228027],["▁социального",-13.77023983001709],["teltu",-13.770244598388672],["данні",-13.770249366760254],["▁ಪಂಚ",-13.77025318145752],["169",-13.77026081085205],["čaka",-13.770265579223633],["▁Guvernul",-13.77026653289795],["▁kolesar",-13.770282745361328],["▁indici",-13.770288467407228],["▁پذیرش",-13.770297050476074],["▁impozit",-13.770303726196287],["▁Lily",-13.770326614379885],["▁במה",-13.770331382751465],["▁παρόν",-13.770342826843262],["ଚ୍ଛ",-13.770349502563477],["▁puncak",-13.770363807678224],["▁musisz",-13.77039623260498],["▁없고",-13.770398139953612],["▁දරුව",-13.770413398742676],["فيس",-13.770426750183104],["▁glemt",-13.770427703857422],["▁Көк",-13.770429611206056],["▁другата",-13.77043628692627],["Ham",-13.77043914794922],["коруп",-13.77045726776123],["▁မသိ",-13.770467758178713],["▁tecnologías",-13.770478248596191],["▁prestigi",-13.770488739013672],["▁bolji",-13.770492553710938],["맛",-13.77050495147705],["▁Норма",-13.770509719848633],["▁القسم",-13.770515441894531],["379",-13.770533561706545],["▁Celo",-13.77053451538086],["取って",-13.770543098449709],["ለን፡፡",-13.770548820495604],["▁Blind",-13.770563125610352],["ზღ",-13.770564079284668],["▁Рей",-13.77057647705078],["▁වතුර",-13.77057647705078],["മെന്ന",-13.77057933807373],["▁hidr",-13.770587921142578],["▁concorso",-13.770601272583008],["▁powoduje",-13.770601272583008],["profession",-13.77060890197754],["▁навукова",-13.770626068115234],["▁సర్వ",-13.770638465881348],["138",-13.77064323425293],["▁Флор",-13.770652770996094],["▁पाएको",-13.770658493041992],["تظاهر",-13.770697593688965],["▁Обама",-13.770703315734863],["ūti",-13.770705223083496],["之处",-13.770709991455078],["▁θέλω",-13.770739555358888],["▁ontwikkeld",-13.770750999450684],["▁íbúð",-13.770752906799316],["▁سمجهي",-13.77076244354248],["▁seguros",-13.770781517028809],["▁vendimi",-13.770781517028809],["▁знайом",-13.770784378051758],["▁ਅੱਗ",-13.770785331726074],["▁Voda",-13.770794868469238],["▁Gamot",-13.7708101272583],["▁ചൊ",-13.77081298828125],["産業",-13.770822525024414],["मैन",-13.77082347869873],["▁tuš",-13.770824432373049],["▁Nepo",-13.770844459533691],["чнага",-13.770852088928224],["▁депутатов",-13.770853996276855],["▁autism",-13.770864486694336],["ไม่ควร",-13.77087116241455],["رزق",-13.7708740234375],["פרו",-13.770893096923828],["iseren",-13.77090072631836],["עמק",-13.770941734313965],["▁словы",-13.770949363708496],["▁كلمات",-13.770954132080078],["▁изображения",-13.77095890045166],["▁хране",-13.770965576171877],["▁ръко",-13.770974159240724],["▁hladi",-13.770994186401367],["ార్",-13.77099895477295],["▁Freundin",-13.771014213562012],["▁Poti",-13.771016120910645],["צדק",-13.771025657653809],["പൂ",-13.771037101745604],["▁menghindari",-13.771063804626465],["▁cruinn",-13.771065711975098],["▁۱۰۰",-13.771073341369627],["▁באותו",-13.771099090576172],["▁регионот",-13.771108627319336],["უძ",-13.771111488342283],["zetek",-13.771121978759766],["▁klinika",-13.771130561828612],["dögum",-13.771162033081056],["فاضل",-13.771163940429688],["▁устройств",-13.771169662475586],["▁realizando",-13.771178245544434],["ræða",-13.771180152893066],["▁گروهی",-13.771190643310549],["시키는",-13.771210670471191],["▁severe",-13.771244049072266],["дзю",-13.771245956420898],["▁ضرر",-13.771246910095217],["▁apropiat",-13.771263122558594],["OVI",-13.77127456665039],["▁dochter",-13.771284103393556],["టిక్",-13.771300315856934],["▁Administrativ",-13.771307945251465],["వచ్చ",-13.771309852600098],["▁rekommendera",-13.771334648132324],["▁வாழ்த்து",-13.771340370178224],["▁броја",-13.771357536315918],["▁prestiž",-13.771387100219728],["▁hiszem",-13.771398544311523],["149",-13.77139949798584],["punkti",-13.771418571472168],["▁угодно",-13.771437644958496],["▁ژر",-13.771441459655762],["▁попередні",-13.771444320678713],["▁catering",-13.771461486816406],["▁Ayo",-13.771474838256836],["▁baisse",-13.771492958068848],["lätt",-13.77150821685791],["ንብ",-13.771526336669922],["▁siht",-13.77153491973877],["▁facilite",-13.771552085876465],["▁rezultāti",-13.77155590057373],["கழி",-13.77157211303711],["▁instalacións",-13.77157211303711],["ມື້",-13.771574020385742],["▁capture",-13.77158546447754],["ဇီ",-13.771594047546388],["▁Рок",-13.771599769592283],["▁minutit",-13.77160930633545],["ቧ",-13.771618843078612],["▁universiti",-13.77162742614746],["▁Förder",-13.771644592285156],["主機",-13.771674156188965],["áků",-13.771695137023926],["▁этаж",-13.771698951721191],["ିର",-13.77171230316162],["▁overleve",-13.771714210510254],["本书",-13.771719932556152],["▁irgi",-13.771727561950684],["ธน",-13.771734237670898],["lateral",-13.771738052368164],["ความจริง",-13.771739959716797],["▁brang",-13.771739959716797],["没有人",-13.77174472808838],["▁textura",-13.771769523620604],["▁vyhr",-13.771773338317873],["движен",-13.771780014038086],["utul",-13.771790504455566],["▁ஒளி",-13.77180004119873],["▁wykorzysta",-13.771820068359377],["▁негиз",-13.771859169006348],["▁местно",-13.77187156677246],["▁tollen",-13.771872520446776],["vadász",-13.771879196166992],["זכה",-13.77188205718994],["▁барем",-13.77188205718994],["ここに",-13.771885871887209],["جنس",-13.771910667419434],["იზი",-13.77191162109375],["▁സിനിമാ",-13.77194595336914],["んですね",-13.771953582763672],["▁conclusione",-13.77196216583252],["දාස",-13.77198600769043],["▁Mezi",-13.771990776062012],["▁organizuar",-13.772006034851074],["ଷଣ",-13.772007942199709],["ioară",-13.772019386291504],["jith",-13.77202033996582],["▁Tudor",-13.772069931030272],["▁mantenere",-13.772083282470703],["▁Forse",-13.77209758758545],["Spa",-13.772102355957031],["▁Mise",-13.772134780883787],["▁FIL",-13.772138595581056],["5,8",-13.7721529006958],["jskem",-13.772161483764648],["▁kulo",-13.772167205810549],["mosios",-13.772177696228027],["▁helyes",-13.77217960357666],["▁कुं",-13.772188186645508],["ضعف",-13.772189140319824],["▁مسير",-13.772208213806152],["▁klim",-13.772218704223633],["ционной",-13.772222518920898],["▁закрыт",-13.772222518920898],["▁gustos",-13.77224826812744],["▁ودر",-13.77225112915039],["zoval",-13.772253036499023],["▁kruis",-13.772272109985352],["mæt",-13.772300720214844],["մեղ",-13.772321701049805],["▁شوم",-13.772327423095703],["вель",-13.772356033325195],["ولد",-13.772383689880373],["бекова",-13.772388458251951],["▁custodi",-13.772393226623535],["▁1894",-13.7723970413208],["▁spesialis",-13.7723970413208],["册",-13.772404670715332],["▁индустрия",-13.772408485412598],["부문",-13.772414207458496],["परी",-13.772418975830078],["λυση",-13.772422790527344],["ຫາກ",-13.77243423461914],["▁zisk",-13.772440910339355],["▁देशका",-13.772456169128418],["━",-13.772473335266112],["zabal",-13.772482872009276],["brä",-13.772490501403809],["ประเด็น",-13.77249813079834],["อํานวยความสะดวก",-13.77249813079834],["เข็ม",-13.772499084472656],["ຕະຫຼາດ",-13.772499084472656],["စွမ်း",-13.772499084472656],["တွဲ",-13.772499084472656],["ƯƠNG",-13.772500038146973],["សេចក្ដី",-13.772500038146973],["▁Aktivitäten",-13.772500038146973],["▁Comercial",-13.772500038146973],["▁Ngoại",-13.772500038146973],["▁Vyriausybė",-13.772500038146973],["▁ditentukan",-13.772500038146973],["▁kërkesë",-13.772500038146973],["▁sgiliau",-13.772500038146973],["▁thầm",-13.772500038146973],["▁اللازمة",-13.772500038146973],["▁لوبډلې",-13.772500038146973],["▁प्रियंका",-13.772500038146973],["▁উপস্থিত",-13.772500038146973],["▁มกราคม",-13.772500038146973],["▁ikastetxe",-13.772500991821287],["▁πλήρη",-13.772500991821287],["▁връзки",-13.772500991821287],["▁भइसकेको",-13.772500991821287],["▁ვფიქრობ",-13.772500991821287],["▁Pedersen",-13.772501945495604],["▁Təbii",-13.772501945495604],["▁Vabariigi",-13.772501945495604],["▁kunstenaar",-13.772501945495604],["▁доларів",-13.772501945495604],["▁piegāde",-13.772502899169922],["▁училиште",-13.772504806518556],["▁गरिरहेका",-13.772504806518556],["▁semnal",-13.772505760192873],["▁համոզ",-13.772505760192873],["▁खनाल",-13.772505760192873],["▁xəstəxana",-13.772507667541504],["▁ξέρω",-13.772509574890137],["接続",-13.77251148223877],["▁1863",-13.772513389587402],["▁துணை",-13.772513389587402],["ोत्",-13.77251434326172],["▁Rûsyayê",-13.77251434326172],["tilsynet",-13.772518157958984],["▁الحلقة",-13.772521018981934],["▁esindaja",-13.77252197265625],["▁thằng",-13.772522926330566],["▁becoming",-13.772526741027832],["▁бичгийн",-13.772530555725098],["▁miniszter",-13.77253246307373],["▁Paldies",-13.772533416748049],["▁вызнача",-13.772533416748049],["高品質",-13.772533416748049],["▁Benedikt",-13.772534370422363],["ტარი",-13.772536277770996],["▁sömu",-13.772537231445312],["nature",-13.772541999816896],["▁מבחינת",-13.772541999816896],["▁ಬೈ",-13.772542953491213],["▁ابعاد",-13.772547721862791],["▁sokszor",-13.772549629211426],["ဆြဲ",-13.77255153656006],["▁رکھتا",-13.77255153656006],["▁bergantung",-13.77255630493164],["▁композитор",-13.772558212280272],["▁ಏರ್",-13.772561073303224],["▁ביצוע",-13.77256202697754],["▁користування",-13.772567749023438],["▁تمرکز",-13.772570610046388],["avour",-13.772571563720703],["▁ئۆس",-13.772581100463867],["▁quibusdam",-13.772582054138184],["▁entertainment",-13.772583961486816],["▁kuula",-13.772600173950195],["एंगे",-13.772604942321776],["▁Турција",-13.77260684967041],["▁campanie",-13.772629737854004],["زواج",-13.772637367248535],["galio",-13.772638320922852],["▁لأنها",-13.772638320922852],["madda",-13.772655487060549],["▁nemzet",-13.772665977478027],["▁أطلق",-13.772665977478027],["▁amenaza",-13.772666931152344],["▁ប្រវត្តិ",-13.772675514221191],["ിയായ",-13.772687911987305],["ghair",-13.772697448730469],["▁кездесу",-13.77270793914795],["心裡",-13.772727012634276],["larımıza",-13.77273178100586],["▁variabil",-13.77275276184082],["▁muffin",-13.772767066955566],["▁మెగా",-13.772767066955566],["▁בכתב",-13.772770881652832],["▁mendaftar",-13.772774696350098],["▁swasta",-13.772774696350098],["▁epok",-13.772782325744627],["лөрдүн",-13.77280330657959],["▁μπορείς",-13.772830963134766],["▁GÜN",-13.772834777832031],["▁υγείας",-13.77284049987793],["μενων",-13.772844314575195],["▁rivier",-13.772846221923828],["▁Кои",-13.772851943969728],["▁Motto",-13.772882461547852],["臨床",-13.772889137268066],["▁zählt",-13.7728910446167],["Київ",-13.772915840148926],["▁foredrag",-13.772954940795898],["Азаттык",-13.772979736328123],["▁රශ්නයක්",-13.77298641204834],["▁Kaunas",-13.772987365722656],["▁turės",-13.772990226745604],["▁piatti",-13.773005485534668],["ίκα",-13.773006439208984],["tične",-13.773015975952148],["▁(43)",-13.773022651672363],["уецца",-13.773032188415527],["Tidak",-13.773041725158691],["▁утврди",-13.773045539855955],["పురం",-13.773054122924805],["волно",-13.773063659667969],["▁చెంద",-13.773072242736816],["ात्मा",-13.773076057434082],["ထင်",-13.77309513092041],["▁silla",-13.773109436035156],["▁pengurusan",-13.77313995361328],["▁wist",-13.773146629333496],["▁syke",-13.773174285888672],["るもの",-13.773194313049316],["▁javítás",-13.77319622039795],["▁erhält",-13.773197174072266],["▁Ampak",-13.77321434020996],["vnica",-13.773238182067873],["cession",-13.773250579833984],["івський",-13.773259162902832],["▁qida",-13.773265838623049],["▁სოციალური",-13.773265838623049],["▁హరి",-13.77328395843506],["رجاء",-13.773286819458008],["նախ",-13.773303985595703],["ענג",-13.773306846618652],["우는",-13.773310661315918],["ยาน",-13.773332595825195],["▁Dže",-13.773332595825195],["▁gheall",-13.77334213256836],["ЦЕН",-13.773372650146484],["ñado",-13.773378372192385],["▁बजा",-13.773382186889648],["▁esperando",-13.773399353027344],["ziņa",-13.77340602874756],["ॉक",-13.773414611816406],["ásáról",-13.773419380187988],["▁financière",-13.773425102233888],["▁ماهر",-13.773435592651367],["▁хүнийг",-13.773459434509276],["भ्र",-13.7734956741333],["▁brzy",-13.773497581481934],["▁arredor",-13.773507118225098],["▁dalšího",-13.773536682128906],["▁darček",-13.77357578277588],["משחק",-13.773606300354004],["▁Målet",-13.7736234664917],["701",-13.77363109588623],["简称",-13.773648262023926],["▁usuário",-13.77365493774414],["▁ապրում",-13.77366065979004],["▁sayti",-13.773667335510254],["中最",-13.77366828918457],["여성",-13.773669242858888],["▁чиглэл",-13.773685455322266],["▁제공하는",-13.773691177368164],["آپ",-13.77374267578125],["▁לעצטע",-13.77374267578125],["لىكىنى",-13.773749351501465],["▁උදේ",-13.77375030517578],["сіб",-13.773775100708008],["បើ",-13.773781776428224],["▁Организаци",-13.773792266845703],["يسر",-13.773797035217283],["มือสอง",-13.773799896240234],["тез",-13.77380084991455],["▁Administration",-13.773862838745115],["EJA",-13.773863792419434],["▁denove",-13.773869514465332],["▁заврше",-13.773879051208496],["usius",-13.77388858795166],["▁bancar",-13.773896217346191],["সিং",-13.773898124694824],["▁Varje",-13.773908615112305],["nameya",-13.773917198181152],["нз",-13.77394676208496],["▁جرمنی",-13.773947715759276],["ливе",-13.77394962310791],["หน้าจอ",-13.773971557617188],["روئ",-13.773978233337402],["1983",-13.773982048034668],["വിനെ",-13.773996353149414],["▁ذاتی",-13.773996353149414],["ώθηκαν",-13.77401065826416],["▁frost",-13.774027824401855],["▁préparer",-13.774028778076172],["מורים",-13.774055480957031],["னாக",-13.774065017700195],["▁Lola",-13.77408218383789],["▁älska",-13.774083137512209],["▁dibaca",-13.774090766906738],["கிறார்",-13.774107933044434],["▁различно",-13.774140357971191],["fona",-13.77414608001709],["▁basse",-13.77414608001709],["書いた",-13.77415657043457],["▁sæson",-13.77415943145752],["▁hasič",-13.774161338806152],["▁wyniku",-13.774168014526367],["从小",-13.774170875549316],["ohjelman",-13.77417278289795],["眼鏡",-13.774178504943848],["▁هى",-13.77420139312744],["を使った",-13.774208068847656],["hlav",-13.774214744567873],["▁цэнтра",-13.77422046661377],["рать",-13.774221420288086],["440",-13.774222373962402],["▁gestionar",-13.774225234985352],["saari",-13.774230003356934],["▁siento",-13.77424430847168],["gestion",-13.774249076843262],["valvonta",-13.77425479888916],["▁sjovt",-13.774259567260742],["▁stanoven",-13.774266242980955],["শীল",-13.77426815032959],["كلف",-13.774270057678224],["▁اسرائیلی",-13.774273872375488],["த்தார்",-13.774300575256348],["кнул",-13.774328231811523],["▁harian",-13.774328231811523],["ענד",-13.774332046508787],["▁ricevi",-13.774333000183104],["▁snažil",-13.774380683898926],["ੱਬ",-13.774405479431152],["▁patio",-13.774412155151367],["ดูรายละเอียด",-13.774432182312012],["lectual",-13.774442672729492],["USTA",-13.77445125579834],["ክሮ",-13.774459838867188],["▁جهڙي",-13.774473190307615],["▁әлі",-13.7744779586792],["▁వ్యక్త",-13.77448272705078],["▁öst",-13.774511337280272],["▁knowing",-13.774513244628906],["▁దూరం",-13.774517059326172],["laşan",-13.774540901184082],["වැනි",-13.774554252624512],["තෘ",-13.774555206298828],["бэр",-13.77457332611084],["▁공항",-13.774578094482422],["▁намаз",-13.77461051940918],["▁pato",-13.774616241455078],["▁המכ",-13.774627685546877],["▁ਤਕ",-13.77463150024414],["▁чергов",-13.77463722229004],["▁депутати",-13.77464771270752],["jomis",-13.77467441558838],["▁ನಿರ್ಮಿಸ",-13.77469539642334],["ിരിക്കും",-13.774740219116213],["▁10-20",-13.774744033813477],["▁Чын",-13.774746894836426],["ითა",-13.774768829345703],["ಪ್ತ",-13.774772644042969],["▁široko",-13.77479362487793],["materiaal",-13.774809837341309],["▁शिर",-13.774819374084473],["▁Qan",-13.774825096130373],["▁vatandaş",-13.774827003479004],["പൊ",-13.7748441696167],["foglalás",-13.774847030639648],["▁gese",-13.774849891662598],["▁kodin",-13.77485466003418],["▁balık",-13.774861335754396],["▁lámh",-13.774873733520508],["▁آزادي",-13.774879455566406],["▁പട്ടിക",-13.77488613128662],["ຫນາ",-13.77489185333252],["PUS",-13.77491569519043],["კოვ",-13.774922370910645],["▁Казакстан",-13.774925231933594],["玩笑",-13.774927139282228],["▁onlardan",-13.77492904663086],["タイトル",-13.774931907653809],["とりあえず",-13.774935722351074],["▁Cəmiyyət",-13.774944305419922],["ปัญญา",-13.774945259094238],["▁Arbeitgeber",-13.774945259094238],["▁Derudover",-13.774945259094238],["▁Müəllif",-13.774945259094238],["▁descuento",-13.774945259094238],["▁förutsättningar",-13.774945259094238],["▁maščob",-13.774945259094238],["▁medžiagų",-13.774945259094238],["▁nëpërmjet",-13.774945259094238],["▁ráadásul",-13.774945259094238],["▁безкоштовно",-13.774945259094238],["▁жыцці",-13.774945259094238],["▁революция",-13.774945259094238],["▁чужбина",-13.774945259094238],["▁գրասենյակ",-13.774945259094238],["▁հուլիսի",-13.774945259094238],["▁تفسیر",-13.774945259094238],["▁فدراسیون",-13.774945259094238],["▁ڊيسڪ",-13.774945259094238],["▁ब्राऊजर",-13.774945259094238],["▁জনপ্রিয়",-13.774945259094238],["▁ଅନୁଷ୍ଠିତ",-13.774945259094238],["▁ତଦନ୍ତ",-13.774945259094238],["▁ಕಾಲೇಜು",-13.774945259094238],["▁ಸಮಾಚಾರ",-13.774945259094238],["▁ကျွန်ုပ်တို့",-13.774945259094238],["▁თარიღი",-13.774945259094238],["ေဘး",-13.774946212768556],["▁Fiecare",-13.774946212768556],["▁guarantee",-13.774946212768556],["▁મુદ્દ",-13.774946212768556],["বিরোধী",-13.774947166442873],["▁Buxoro",-13.774947166442873],["▁verskeie",-13.774947166442873],["▁ஏற்படுத்த",-13.774947166442873],["▁வெளியிட்ட",-13.774947166442873],["▁ആരാധക",-13.774947166442873],["forandring",-13.774948120117188],["▁língua",-13.774948120117188],["▁vyzerá",-13.774948120117188],["▁atvērt",-13.774949073791504],["▁গাড়ি",-13.774949073791504],["▁můj",-13.774951934814451],["လြင္",-13.77495288848877],["▁tecnici",-13.77495288848877],["▁διάθεση",-13.77495288848877],["▁çiçek",-13.774953842163086],["▁Gutschein",-13.77495574951172],["激动",-13.77495574951172],["▁hominem",-13.774956703186035],["▁Þjóð",-13.774956703186035],["▁жағынан",-13.774958610534668],["▁didapati",-13.774959564208984],["▁yeşil",-13.7749605178833],["▁obradoiro",-13.774961471557615],["▁príliš",-13.774961471557615],["▁zdrowie",-13.774961471557615],["યાત્રા",-13.774962425231934],["▁جنبش",-13.774962425231934],["▁समाजवादी",-13.774965286254885],["▁हुँ",-13.774969100952148],["▁зорилгоор",-13.774974822998049],["▁relatório",-13.774981498718262],["▁Lucian",-13.774985313415527],["▁있던",-13.774986267089844],["▁tamaşa",-13.774991035461426],["▁Bihar",-13.774999618530272],["▁ਛੇ",-13.774999618530272],["▁ପାଖରେ",-13.774999618530272],["▁ລັບ",-13.775002479553224],["▁финансовых",-13.775005340576172],["▁chcę",-13.77501106262207],["▁beruflich",-13.775014877319336],["просто",-13.775017738342283],["▁azadlıq",-13.775032043457031],["最后一",-13.775032043457031],["▁módulo",-13.775032997131348],["▁نکنید",-13.775033950805664],["▁seguidores",-13.77503490447998],["清洗",-13.77503490447998],["▁ценности",-13.775036811828612],["տաս",-13.775038719177246],["▁communities",-13.775043487548828],["правда",-13.775050163269045],["▁रहती",-13.77505111694336],["यात",-13.77505588531494],["▁проживання",-13.775059700012209],["▁اہمیت",-13.775069236755373],["▁ದಿನದ",-13.775071144104004],["hasta",-13.77507781982422],["▁பணம்",-13.775079727172852],["▁غزل",-13.775081634521484],["▁qonaq",-13.775083541870115],["майтын",-13.775106430053713],["▁Wagner",-13.775107383728027],["▁आँखा",-13.775121688842772],["تأسيس",-13.775132179260254],["▁അപ്പോള്",-13.775132179260254],["▁opened",-13.77513313293457],["▁сиромаш",-13.77513313293457],["ეხ",-13.775137901306152],["ନାଥ",-13.775140762329102],["▁чаша",-13.775161743164062],["▁petugas",-13.775163650512695],["▁kvelden",-13.775166511535645],["▁лицу",-13.775168418884276],["▁Baum",-13.775179862976074],["▁largest",-13.77518081665039],["리스트",-13.77518367767334],["▁സത്യം",-13.775186538696287],["▁passos",-13.77521514892578],["▁значење",-13.77521514892578],["▁ድል",-13.77522087097168],["rojë",-13.77523422241211],["ЛЕН",-13.77525234222412],["▁காலம்",-13.77525234222412],["整形",-13.775254249572754],["▁отпуска",-13.775262832641602],["▁rejim",-13.775267601013184],["runner",-13.7752685546875],["▁Targ",-13.775282859802246],["▁lawr",-13.77528953552246],["▁ұш",-13.775315284729004],["▁Geral",-13.775318145751951],["МВ",-13.775324821472168],["ән",-13.775330543518066],["ළිය",-13.77534294128418],["▁სიკვდილი",-13.775343894958496],["ಗಳೂ",-13.775348663330078],["▁traditionnel",-13.775373458862305],["MEK",-13.775402069091797],["▁јануари",-13.775402069091797],["▁դեղ",-13.775416374206545],["▁ሴት",-13.775425910949709],["قيام",-13.775426864624023],["▁Ţ",-13.775443077087402],["▁כלפי",-13.77544403076172],["ဓာ",-13.775446891784668],["скими",-13.775457382202148],["חודש",-13.775458335876465],["▁compania",-13.775463104248049],["▁инфраструктура",-13.775482177734377],["၂၀၁၈",-13.77548885345459],["▁מסר",-13.775492668151855],["发起",-13.775517463684082],["▁جنگی",-13.77552318572998],["ורש",-13.77552890777588],["▁Рез",-13.775529861450195],["▁speaking",-13.775556564331056],["▁adopta",-13.775562286376951],["▁قطعه",-13.775567054748535],["▁ढि",-13.775580406188965],["▁Христо",-13.775588989257812],["شاد",-13.775594711303713],["▁أجر",-13.77560329437256],["prinz",-13.775609016418455],["▁നോക്കിയ",-13.775609970092772],["ЯТ",-13.775623321533203],["▁concentrat",-13.775632858276367],["▁mooiste",-13.775639533996582],["▁Compre",-13.77564811706543],["ຂອງການ",-13.775656700134276],["場景",-13.775656700134276],["ესა",-13.77565860748291],["ГІ",-13.775665283203123],["▁Pietro",-13.77566909790039],["▁Məsələn",-13.775681495666504],["▁tvinga",-13.775691986083984],["▁Obchodné",-13.775701522827148],["▁sinif",-13.775707244873049],["▁ଲାଭ",-13.77572536468506],["密碼",-13.775736808776855],["ניע",-13.775737762451172],["▁అల్లు",-13.775737762451172],["▁weapon",-13.775747299194336],["297",-13.775751113891602],["▁나서",-13.775753021240234],["▁मुह",-13.77575969696045],["▁සමර",-13.775771141052246],["性別",-13.775778770446776],["▁Shiva",-13.775786399841309],["▁жетки",-13.775816917419434],["189",-13.775838851928713],["치고",-13.775842666625977],["▁వెళ్ళి",-13.77585220336914],["▁υπηρεσία",-13.775870323181152],["▁داۋام",-13.77588939666748],["▁frutas",-13.775918960571287],["▁Irlanda",-13.77592945098877],["▁Možno",-13.775936126708984],["▁قند",-13.775946617126465],["ιστικές",-13.775949478149414],["chette",-13.775952339172363],["ותם",-13.775960922241213],["▁داخلہ",-13.77596664428711],["தில்லை",-13.775986671447754],["▁podobe",-13.776074409484863],["ないこと",-13.776082038879396],["שמע",-13.776083946228027],["မွဳ",-13.776086807250977],["କ୍ରମ",-13.77609157562256],["തയുടെ",-13.776110649108888],["▁komitə",-13.776113510131836],["▁ہند",-13.776121139526367],["▁ביקורת",-13.77612590789795],["▁Articolul",-13.776129722595217],["▁Publicado",-13.776131629943848],["авік",-13.77613925933838],["шить",-13.77617073059082],["▁dzird",-13.77617359161377],["īte",-13.776175498962402],["▁russe",-13.776256561279297],["▁frère",-13.776263236999512],["▁специалисти",-13.776268005371094],["▁працю",-13.776280403137209],["個別",-13.776286125183104],["ديس",-13.776289939880373],["▁بدست",-13.776301383972168],["Grand",-13.776317596435549],["षि",-13.776320457458496],["▁glauben",-13.776322364807127],["ируются",-13.776331901550291],["▁Rush",-13.776337623596191],["条款",-13.776359558105469],["▁ekkor",-13.77639102935791],["▁düşünme",-13.776397705078123],["μπου",-13.77640438079834],["▁Oriente",-13.776409149169922],["▁magnam",-13.776412963867188],["▁Zotit",-13.776423454284668],["▁గుర",-13.776444435119627],["▁domácnosti",-13.776445388793944],["ታሪክ",-13.776459693908691],["▁tabela",-13.776460647583008],["ravan",-13.776464462280272],["▁henki",-13.776469230651855],["▁biləcək",-13.776518821716309],["സൂ",-13.776543617248535],["▁voordeel",-13.77655029296875],["izované",-13.776571273803713],["Anti",-13.776582717895508],["▁સ્ત્રી",-13.776583671569824],["ுகிறேன்",-13.77659034729004],["шер",-13.77659511566162],["▁kunnat",-13.776644706726074],["можна",-13.77665901184082],["▁Aparat",-13.776671409606934],["▁løpe",-13.776700973510742],["zähl",-13.776702880859377],["▁conveniente",-13.776704788208008],["నికి",-13.776705741882324],["▁நடிக்க",-13.776741981506348],["არია",-13.77675724029541],["▁სამსახურის",-13.776762008666992],["хуу",-13.77676773071289],["▁बँक",-13.77676773071289],["λέξ",-13.77678394317627],["▁истражување",-13.776803016662598],["▁മൊ",-13.776836395263672],["▁שאי",-13.776851654052734],["▁chiedere",-13.776873588562012],["▁सीख",-13.77687644958496],["▁tikro",-13.776891708374023],["▁станция",-13.776909828186035],["▁igualdade",-13.776915550231934],["▁sellel",-13.776951789855955],["球員",-13.77696418762207],["▁probleemi",-13.77697467803955],["▁Aplika",-13.777021408081056],["▁convenient",-13.777021408081056],["▁التركي",-13.77702522277832],["рева",-13.777027130126951],["情人",-13.777047157287598],["ttomasti",-13.777048110961914],["▁கண்டு",-13.777058601379396],["▁أجهزة",-13.777071952819824],["▁Վրաստանի",-13.777091026306152],["യുന്ന",-13.777091979980469],["▁وڻ",-13.777097702026367],["▁каквото",-13.777125358581545],["GUS",-13.777135848999023],["ţelor",-13.777167320251465],["७२",-13.777196884155272],["▁тестов",-13.77720069885254],["▁provar",-13.77723503112793],["▁раскр",-13.777236938476562],["웠",-13.77723789215088],["▁Idő",-13.777250289916992],["▁Maas",-13.777261734008787],["없",-13.777267456054688],["ങ്ങളിലെ",-13.777268409729004],["miljö",-13.77727222442627],["ფილი",-13.77730941772461],["ดูหนัง",-13.77731704711914],["▁kullanmak",-13.77731990814209],["ぜ",-13.77732276916504],["负担",-13.777345657348633],["hờ",-13.777349472045898],["▁jogok",-13.777351379394531],["▁Olympia",-13.777352333068848],["▁tombe",-13.77735996246338],["▁creste",-13.77737045288086],["邱",-13.777371406555176],["陀",-13.77737522125244],["除此之外",-13.77737808227539],["▁শিক্ষার্থী",-13.777379035949709],["បិទ",-13.777395248413086],["捏",-13.777395248413086],["သုိ႔",-13.777396202087402],["▁académico",-13.777396202087402],["▁gegründet",-13.777396202087402],["▁podrobnosti",-13.777396202087402],["▁poszczególnych",-13.777396202087402],["▁résztvevő",-13.777396202087402],["▁saptamani",-13.777396202087402],["▁trần",-13.777396202087402],["▁wiedzieć",-13.777396202087402],["▁αφορμή",-13.777396202087402],["▁историю",-13.777396202087402],["▁отношению",-13.777396202087402],["▁сериозни",-13.777396202087402],["▁өрсөлд",-13.777396202087402],["▁چھوٹے",-13.777396202087402],["▁ਟਰੰਪ",-13.777396202087402],["▁ಒದಗಿಸ",-13.777396202087402],["▁มีนาคม",-13.777396202087402],["▁괜찮",-13.777396202087402],["✅",-13.777396202087402],["աթերթ",-13.77739715576172],["▁Skåne",-13.77739715576172],["▁بىلدۈر",-13.77739715576172],["▁କ୍ଷେତ୍ରରେ",-13.77739715576172],["▁ameliyat",-13.777398109436035],["▁teidän",-13.777398109436035],["▁Симеон",-13.777398109436035],["▁בעזרת",-13.777398109436035],["▁حیران",-13.777398109436035],["▁යුත්තේ",-13.777398109436035],["▁refrigera",-13.777400016784668],["▁вежба",-13.777400970458984],["▁დახმარება",-13.777400970458984],["▁павінен",-13.7774019241333],["▁událost",-13.777406692504885],["▁φίλους",-13.777406692504885],["▁ủng",-13.777406692504885],["▁احسان",-13.777410507202148],["▁Jornada",-13.777411460876465],["▁khúc",-13.777411460876465],["▁அருமை",-13.777413368225098],["▁فرنسا",-13.77741527557373],["▁amerykański",-13.777416229248049],["shabaab",-13.777417182922363],["▁количестве",-13.777419090270996],["▁বৈ",-13.777422904968262],["▁kvenna",-13.777423858642578],["▁Raamatu",-13.777424812316896],["▁dejansko",-13.77743148803711],["▁Tanács",-13.777432441711426],["▁언제나",-13.777432441711426],["ស្ស",-13.777433395385742],["▁בגין",-13.777437210083008],["▁liikaa",-13.77744197845459],["▁uitvoering",-13.77744960784912],["▁Cung",-13.777461051940918],["▁prednosti",-13.777467727661133],["▁QUI",-13.777472496032717],["▁tiê",-13.777472496032717],["▁ವಹಿಸ",-13.77748203277588],["ဆႏၵ",-13.777482986450195],["▁محکمہ",-13.777483940124512],["存款",-13.777483940124512],["▁ايندڙ",-13.77748966217041],["ադիր",-13.777496337890623],["▁Familj",-13.77749729156494],["▁सादर",-13.777501106262209],["▁tundub",-13.777502059936523],["▁ibarətdir",-13.777510643005373],["bī",-13.777511596679688],["▁φάση",-13.777533531188965],["▁sostenible",-13.77756118774414],["ສໍາລັບການ",-13.777573585510254],["јева",-13.777576446533203],["▁uitzicht",-13.777584075927734],["▁ඕනෑම",-13.777586936950684],["▁빨",-13.777586936950684],["онт",-13.777588844299316],["страх",-13.777605056762695],["০৬",-13.77762222290039],["ਮਾਰ",-13.777623176574709],["▁skønne",-13.777637481689451],["ിക്കേണ്ട",-13.777642250061035],["▁Богом",-13.77765941619873],["▁üzleti",-13.777666091918944],["▁تلخ",-13.777666091918944],["ଯୋଗ",-13.777674674987791],["하우스",-13.777681350708008],["▁качан",-13.77769947052002],["ocupació",-13.777711868286133],["▁ਜੰਗ",-13.77771282196045],["ैर्",-13.777716636657717],["▁hostel",-13.777717590332031],["▁1888",-13.777739524841309],["▁tuttavia",-13.777755737304688],["▁میراث",-13.777761459350586],["▁സ്പ",-13.777767181396484],["भक्ति",-13.777771949768066],["gør",-13.777777671813965],["▁stimula",-13.77779483795166],["▁ajatus",-13.777813911437988],["▁हाई",-13.777824401855469],["▁छिन्",-13.777826309204102],["▁வசதி",-13.77782917022705],["െടുത്ത്",-13.777830123901367],["▁نمک",-13.777840614318848],["əstə",-13.77784252166748],["1978",-13.77786350250244],["▁صفحة",-13.77786636352539],["▁जोर",-13.777880668640137],["artiste",-13.777939796447754],["▁szinten",-13.777946472167969],["оксид",-13.778000831604004],["▁филиал",-13.778000831604004],["▁märkta",-13.778023719787598],["▁જેને",-13.778031349182127],["بيد",-13.778116226196287],["▁túa",-13.778117179870604],["▁values",-13.778120040893556],["▁miliardi",-13.778120994567873],["▁ਜੁ",-13.778173446655272],["sarbete",-13.778189659118652],["تحمل",-13.778190612792969],["אָד",-13.77819538116455],["247",-13.778203964233398],["▁vrijednost",-13.778212547302246],["▁Nawet",-13.778227806091309],["▁دوستی",-13.778233528137209],["▁Celý",-13.778240203857422],["vermogen",-13.778242111206056],["▁hesabına",-13.778242111206056],["有許多",-13.778243064880373],["▁gwamnatin",-13.778257369995115],["▁प्ले",-13.778278350830078],["ثواب",-13.77828311920166],["▁دکھائی",-13.77828598022461],["альныя",-13.778289794921877],["бүз",-13.77829360961914],["療法",-13.77829647064209],["熟悉的",-13.778313636779783],["▁අහන්න",-13.778326034545898],["▁referenc",-13.778338432312012],["▁granica",-13.778347969055176],["▁आवास",-13.778349876403809],["▁diatur",-13.778356552124023],["▁незалежно",-13.778361320495604],["이드",-13.778368949890137],["▁Центра",-13.77837371826172],["ўская",-13.778379440307615],["angsang",-13.778382301330566],["▁ჩემთვის",-13.778419494628906],["legia",-13.778422355651855],["▁temporar",-13.778423309326172],["▁blat",-13.77842617034912],["ayrı",-13.778429985046388],["▁divertir",-13.77847671508789],["▁Range",-13.778483390808104],["ጠቁ",-13.77849292755127],["传承",-13.778499603271484],["రేషన్",-13.778508186340332],["เมน",-13.778509140014648],["ଣୁ",-13.778512001037598],["रिस",-13.778518676757812],["LUX",-13.778522491455078],["ხევ",-13.778534889221191],["率先",-13.778550148010254],["▁првог",-13.778557777404783],["nök",-13.778559684753418],["▁højre",-13.778579711914062],["▁දීමට",-13.778579711914062],["▁الهو",-13.77858066558838],["▁conversion",-13.778581619262695],["▁బ్రహ్మ",-13.778589248657228],["diisa",-13.778594970703123],["ওয়ার",-13.77859592437744],["TCH",-13.778596878051758],["ntañ",-13.778599739074709],["▁2100",-13.778603553771973],["bool",-13.77861785888672],["րատ",-13.778637886047363],["▁ובכל",-13.778668403625488],["▁secretaria",-13.778677940368652],["▁વસ",-13.778687477111816],["▁poida",-13.778693199157717],["▁Karib",-13.778703689575195],["ιστικών",-13.778708457946776],["माला",-13.77871322631836],["▁interni",-13.778730392456056],["profi",-13.778731346130373],["នាង",-13.778748512268066],["所謂",-13.778765678405762],["አቀ",-13.778773307800291],["ёй",-13.778780937194824],["礼物",-13.778791427612305],["พุ",-13.778813362121582],["▁diferit",-13.778813362121582],["▁ನೀರಿನ",-13.778826713562012],["▁blus",-13.778834342956545],["▁teljesít",-13.778864860534668],["▁Víz",-13.778865814208984],["▁insekt",-13.778868675231934],["▁мистецтва",-13.778878211975098],["kell",-13.77888298034668],["▁Трен",-13.778894424438477],["က်ပ္",-13.77889633178711],["▁preču",-13.778897285461426],["▁ஆங்கில",-13.778922080993652],["▁визит",-13.778923034667969],["▁Malang",-13.778937339782717],["▁ଦୁର୍",-13.778943061828612],["▁љубави",-13.778944969177246],["全球化",-13.778963088989258],["▁synth",-13.77896499633789],["▁Medica",-13.778970718383787],["▁pelakon",-13.77899169921875],["റ്റാ",-13.779027938842772],["▁слове",-13.77902889251709],["▁Freud",-13.77905559539795],["ผู้ป่วย",-13.779067039489746],["▁jahat",-13.779067993164062],["▁mabuti",-13.77906894683838],["▁yaradır",-13.77907943725586],["ნტროლ",-13.779092788696287],["▁vooruit",-13.779096603393556],["▁paštu",-13.779099464416504],["جدد",-13.779108047485352],["▁شمالي",-13.779118537902832],["klart",-13.779138565063477],["▁politische",-13.779138565063477],["นี้มี",-13.779181480407717],["▁հրապարակում",-13.779197692871094],["аванні",-13.779227256774902],["ಾಧಿಕಾರಿ",-13.779273986816406],["▁Mohan",-13.77928352355957],["▁මැතිවරණය",-13.779285430908203],["▁yenə",-13.779288291931152],["▁Helsing",-13.779294967651367],["▁రకాల",-13.779294967651367],["▁מהו",-13.779296875],["ोक्त",-13.779372215270996],["ເຣ",-13.779378890991213],["的確",-13.779391288757324],["▁apua",-13.779397010803224],["▁dapur",-13.779476165771484],["tasunak",-13.779488563537598],["▁करण्या",-13.779489517211914],["▁Իմ",-13.779513359069824],["▁kelapa",-13.779522895812988],["这个时候",-13.77952480316162],["▁maxi",-13.779525756835938],["不只是",-13.77952766418457],["▁смърт",-13.779565811157228],["▁архів",-13.779570579528809],["▁leher",-13.779585838317873],["いきます",-13.77958869934082],["▁dwi",-13.77961254119873],["▁индустрија",-13.779627799987791],["דלק",-13.779633522033691],["会議",-13.779640197753906],["şîr",-13.779651641845703],["höfn",-13.779659271240234],["forslag",-13.779690742492676],["▁haberi",-13.77969741821289],["pitia",-13.779707908630373],["ຍາກ",-13.779732704162598],["▁текущ",-13.779732704162598],["▁រយៈពេល",-13.77974796295166],["▁mavzu",-13.779751777648926],["▁המצב",-13.779757499694824],["สิบ",-13.779763221740724],["▁skå",-13.779778480529783],["ագիտական",-13.779781341552734],["ອອນໄລນ໌",-13.779784202575684],["▁ئاياللار",-13.779786109924316],["现状",-13.779788970947266],["Player",-13.779797554016112],["戰略",-13.779812812805176],["嘆",-13.779818534851074],["worm",-13.779826164245604],["弊",-13.779826164245604],["תוצאה",-13.779829978942873],["偵",-13.779831886291504],["പ്രസംഗ",-13.779837608337402],["すごい",-13.779850959777832],["ອາຊຽນ",-13.779852867126465],["መንፈሳዊ",-13.779852867126465],["▁Betreuung",-13.779852867126465],["▁entgegen",-13.779852867126465],["▁gällande",-13.779852867126465],["▁každej",-13.779852867126465],["▁pembinaan",-13.779852867126465],["▁poljoprivredn",-13.779852867126465],["▁преступления",-13.779852867126465],["▁продължение",-13.779852867126465],["▁կրկին",-13.779852867126465],["▁المعارضة",-13.779852867126465],["▁عوارض",-13.779852867126465],["▁गूगल",-13.779852867126465],["▁सांस्कृतिक",-13.779852867126465],["▁हिसाब",-13.779852867126465],["▁சகோதர",-13.779852867126465],["▁ఇద్దరు",-13.779852867126465],["▁ಕ್ರಿಕೆಟ್",-13.779852867126465],["▁തെരഞ്ഞെടുപ്പ",-13.779852867126465],["▁මානසික",-13.779852867126465],["▁საუბარი",-13.779852867126465],["▁በቅርቡ",-13.779852867126465],["equilibra",-13.77985382080078],["▁Muuqaal",-13.77985382080078],["▁kerékpár",-13.77985382080078],["▁període",-13.77985382080078],["▁qebûl",-13.77985382080078],["▁паветра",-13.77985382080078],["▁ਪ੍ਰਕਾਸ਼",-13.77985382080078],["▁ඉතිරි",-13.77985382080078],["▁gilydd",-13.779854774475098],["▁відразу",-13.779854774475098],["▁cercetare",-13.779855728149414],["▁neredeyse",-13.779855728149414],["▁അറസ്റ്റ്",-13.779855728149414],["风景",-13.779855728149414],["▁Infrastruktur",-13.779858589172363],["屯",-13.779858589172363],["▁piisavalt",-13.77985954284668],["蔥",-13.77985954284668],["▁pertanian",-13.779860496520996],["viranomais",-13.779865264892578],["▁teisę",-13.779868125915527],["谷歌",-13.779869079589844],["▁చేస్తుంది",-13.77987003326416],["▁aliyekuwa",-13.779870986938477],["▁viwanda",-13.779871940612791],["▁چربی",-13.77987289428711],["▁baxa",-13.779873847961426],["▁مدريد",-13.779873847961426],["春节",-13.779873847961426],["▁century",-13.779874801635742],["▁turpmāk",-13.779874801635742],["▁шилжүүл",-13.779877662658691],["▁досад",-13.779878616333008],["▁постојано",-13.779878616333008],["▁speciální",-13.77988052368164],["▁önska",-13.77988338470459],["▁سازد",-13.779884338378906],["ປ້ອງກັນ",-13.779885292053224],["▁наукових",-13.77988624572754],["▁popolazione",-13.779897689819336],["▁ਮਈ",-13.779897689819336],["▁Vasar",-13.779902458190918],["ನಾಥ್",-13.779903411865234],["▁aparıcı",-13.77990436553955],["rkən",-13.779906272888184],["▁gharama",-13.7799072265625],["▁මැති",-13.779908180236816],["▁अमृत",-13.779909133911133],["▁அளித்த",-13.779909133911133],["▁reglugerð",-13.77991008758545],["▁१००",-13.779913902282717],["▁радянськ",-13.77992343902588],["▁جاپان",-13.779925346374512],["ULO",-13.779937744140623],["▁tontolo",-13.779937744140623],["▁최신",-13.77994155883789],["▁فندق",-13.779942512512209],["▁վիրավոր",-13.77994441986084],["▁besit",-13.779946327209473],["▁Vau",-13.779953002929688],["ירוש",-13.779956817626951],["▁nordmenn",-13.77995777130127],["▁müdiri",-13.779970169067385],["▁pensiero",-13.779970169067385],["▁нашој",-13.779970169067385],["▁tudományos",-13.779979705810549],["小さい",-13.779986381530762],["▁sfaturi",-13.77999496459961],["▁kurssi",-13.779995918273926],["Deireadh",-13.779998779296877],["ခဲ့ပြီး",-13.779998779296877],["▁alimentação",-13.78000259399414],["▁לישראל",-13.78000259399414],["▁financiamento",-13.780014991760254],["▁uanset",-13.780021667480469],["פיי",-13.780031204223633],["▁sentenza",-13.780034065246582],["▁afgelope",-13.780044555664062],["性质",-13.780046463012695],["ಾಯಿತು",-13.780047416687012],["▁Sola",-13.780049324035645],["▁כעת",-13.780051231384276],["vasara",-13.78005313873291],["▁прије",-13.780075073242188],["ókat",-13.780085563659668],["ացումը",-13.78011417388916],["▁راوړ",-13.78011703491211],["▁маю",-13.780120849609377],["eswara",-13.78012466430664],["▁आफूले",-13.780132293701172],["▁częściej",-13.780139923095703],["▁батыр",-13.780142784118652],["තෝ",-13.780145645141602],["▁праблемы",-13.780169486999512],["▁vositalari",-13.780179977416992],["▁författare",-13.780184745788574],["▁igrexa",-13.780200004577637],["ецът",-13.780200958251951],["▁Livet",-13.78020191192627],["▁miljardi",-13.780211448669434],["▁ogromne",-13.78022003173828],["VITA",-13.78022289276123],["▁مامور",-13.78023910522461],["▁Vejle",-13.780247688293455],["という方",-13.780282020568848],["Тур",-13.780287742614746],["▁banky",-13.78029727935791],["न्दा",-13.780319213867188],["▁Trà",-13.780327796936035],["▁оказва",-13.78034782409668],["▁מלאה",-13.780356407165527],["▁ελληνικής",-13.780364990234377],["▁lietotāji",-13.780449867248535],["▁vanwege",-13.780467987060549],["▁джерел",-13.780468940734863],["prítom",-13.780471801757812],["▁χτυπ",-13.780503273010254],["puru",-13.780516624450684],["▁последния",-13.780517578125],["երգ",-13.780518531799316],["▁verksamheten",-13.780525207519531],["▁vzdrž",-13.780543327331545],["лэгч",-13.780553817749023],["▁вашим",-13.780556678771973],["▁tortura",-13.780576705932615],["▁עמוק",-13.780583381652832],["▁basketbal",-13.780584335327148],["▁Atlantic",-13.780588150024414],["▁madri",-13.780590057373049],["▁तदा",-13.780593872070312],["▁Mulla",-13.780596733093262],["იზაცია",-13.780606269836426],["667",-13.780613899230955],["▁vergeet",-13.780619621276855],["puolella",-13.780624389648438],["▁rahmat",-13.780627250671388],["之地",-13.780633926391602],["▁danışma",-13.780638694763184],["цкім",-13.780640602111816],["wiecie",-13.780645370483398],["▁мець",-13.780678749084473],["ΛΕ",-13.780696868896484],["▁quinque",-13.780696868896484],["▁سۈرەت",-13.780750274658203],["▁јули",-13.780756950378418],["משלוח",-13.780760765075684],["▁aveam",-13.78076171875],["dotti",-13.780774116516112],["furahi",-13.78077507019043],["▁이들",-13.780790328979492],["នុ",-13.78080940246582],["▁लागी",-13.780824661254885],["▁ujung",-13.780844688415527],["INGS",-13.78084945678711],["ाणां",-13.780858993530272],["▁velká",-13.780863761901855],["իրական",-13.780878067016602],["▁recimo",-13.78088665008545],["▁pripravili",-13.780903816223145],["▁ಮಾದರಿ",-13.780919075012209],["щенко",-13.780938148498535],["▁положительн",-13.780948638916016],["dagur",-13.780962944030762],["▁സൈറ്റ",-13.780975341796877],["పొ",-13.780996322631836],["MINI",-13.781003952026367],["აჟი",-13.781018257141112],["▁વર્",-13.78103256225586],["▁Børn",-13.781033515930176],["长大",-13.781073570251465],["▁работите",-13.78107452392578],["▁rezultata",-13.781078338623049],["קם",-13.78108024597168],["上来",-13.78111171722412],["▁екологічн",-13.781113624572754],["▁Cassi",-13.781153678894045],["алж",-13.781180381774902],["▁скан",-13.781188011169434],["સાર",-13.781192779541016],["▁நல",-13.781200408935549],["既に",-13.781200408935549],["▁Belgia",-13.781224250793455],["bergen",-13.781238555908203],["▁кыймыл",-13.781238555908203],["▁банд",-13.78124713897705],["▁koge",-13.78130054473877],["▁szervezetek",-13.781301498413086],["ктан",-13.781327247619627],["深受",-13.781332969665527],["的理念",-13.781368255615234],["▁redz",-13.78136920928955],["ี้ยว",-13.7813720703125],["க்கிறேன்",-13.78138542175293],["образов",-13.781397819519045],["владе",-13.781407356262209],["ການພັດທະນາ",-13.78140926361084],["glez",-13.781432151794434],["▁fantastiskt",-13.781441688537598],["▁тауар",-13.781442642211914],["▁դրան",-13.781445503234863],["ສະພາບ",-13.781447410583496],["пеш",-13.78145694732666],["▁snar",-13.781486511230469],["elės",-13.781556129455566],["颗",-13.78158187866211],["▁хуульд",-13.781588554382324],["外科",-13.78159999847412],["▁preparare",-13.781617164611816],["▁призов",-13.78161907196045],["virasto",-13.781620025634766],["▁मधेस",-13.781630516052246],["екват",-13.781633377075195],["▁başkanı",-13.781635284423828],["12.2013",-13.78165340423584],["λαμβάνει",-13.781655311584473],["▁znaju",-13.781661987304688],["grać",-13.781671524047852],["▁Suf",-13.781691551208496],["евдин",-13.781704902648926],["▁fukt",-13.781707763671877],["ARK",-13.781715393066406],["▁deputet",-13.781739234924316],["▁여기에",-13.781753540039062],["▁نڪري",-13.78176212310791],["▁concerned",-13.781777381896973],["տրա",-13.781792640686035],["▁ivez",-13.781795501708984],["ຊາວ",-13.7817964553833],["▁Такі",-13.781797409057615],["суп",-13.781821250915527],["ებისათვის",-13.781826972961426],["▁fíor",-13.781834602355955],["探討",-13.781845092773438],["tinę",-13.781850814819336],["▁yapısı",-13.781853675842283],["▁কাল",-13.781902313232422],["ยี",-13.78193473815918],["▁живопис",-13.78193473815918],["▁patate",-13.781935691833496],["▁dificultat",-13.781937599182127],["▁gulv",-13.781942367553713],["BIZ",-13.781947135925291],["▁tle",-13.781947135925291],["ទេ។",-13.781959533691406],["数が",-13.781959533691406],["▁metai",-13.781976699829102],["ולוג",-13.78198528289795],["▁නැග",-13.781989097595217],["▁243",-13.78199577331543],["alaikum",-13.782004356384276],["▁dəqiq",-13.782011985778809],["imper",-13.782039642333984],["▁сенс",-13.782054901123049],["σον",-13.782063484191896],["▁תוכנית",-13.782063484191896],["▁каментар",-13.782079696655272],["▁membela",-13.78208827972412],["▁недостатн",-13.782106399536133],["▁ការពារ",-13.782115936279297],["сторо",-13.782127380371094],["▁पत्ता",-13.782129287719728],["გროვ",-13.782154083251951],["▁మార్పు",-13.782159805297852],["ॅप",-13.7821626663208],["▁miradi",-13.78217601776123],["▁يوميا",-13.782194137573242],["▁genero",-13.782197952270508],["▁passeio",-13.78219985961914],["selskabet",-13.782217025756836],["▁Kolek",-13.782218933105469],["цикл",-13.78223705291748],["ponto",-13.782238960266112],["▁eesmärgi",-13.782251358032228],["Uz",-13.782255172729492],["▁uisce",-13.782259941101074],["ىچە",-13.782267570495604],["मद",-13.782268524169922],["インストール",-13.782304763793944],["桜",-13.782304763793944],["ฝรั่งเศส",-13.78231430053711],["ဋ",-13.782315254211426],["ល្ងាច",-13.782315254211426],["CHADEMA",-13.782316207885742],["hőmérséklet",-13.782316207885742],["ٴ",-13.782316207885742],["ມະນຸດ",-13.782316207885742],["កម្សាន្ត",-13.782316207885742],["▁Fréttir",-13.782316207885742],["▁manufacture",-13.782316207885742],["▁społeczeństw",-13.782316207885742],["▁εμάς",-13.782316207885742],["▁Мындан",-13.782316207885742],["▁ніколі",-13.782316207885742],["▁այժմ",-13.782316207885742],["▁بادشاہ",-13.782316207885742],["▁ਅਗਸਤ",-13.782316207885742],["▁તમારો",-13.782316207885742],["▁ଆକ୍ରମଣ",-13.782316207885742],["▁ପହଞ୍ଚି",-13.782316207885742],["▁ପ୍ରସଙ୍ଗ",-13.782316207885742],["▁ಪ್ರತಿಭಟನೆ",-13.782316207885742],["▁พฤศจิกายน",-13.782316207885742],["▁အကူအညီ",-13.782316207885742],["▁졸업",-13.782316207885742],["▁Navoiy",-13.78231716156006],["▁bíróság",-13.78231716156006],["▁অসমীয়া",-13.78231716156006],["▁తెరకెక్క",-13.78231716156006],["▁Τέλος",-13.782318115234377],["▁derrière",-13.782320022583008],["▁malattia",-13.782320022583008],["▁nhuận",-13.782323837280272],["▁Palermo",-13.78232479095459],["▁đuổi",-13.78232479095459],["▁බැවින්",-13.78232479095459],["▁bağımsız",-13.782325744628906],["▁법률",-13.782325744628906],["▁fericire",-13.782326698303224],["▁הודעה",-13.782326698303224],["▁обувки",-13.78232765197754],["sojen",-13.782328605651855],["▁fehlt",-13.782328605651855],["▁yaygın",-13.782328605651855],["▁História",-13.782329559326172],["▁୧୬",-13.782329559326172],["▁খাবার",-13.782330513000488],["▁prosinca",-13.782331466674805],["▁NOMOR",-13.782336235046388],["▁είστε",-13.78233814239502],["▁Сабақ",-13.78233814239502],["▁కామెంట్",-13.782344818115234],["▁соціальної",-13.782357215881348],["▁ببینید",-13.782360076904297],["▁ਆਏ",-13.782362937927246],["▁følte",-13.782371520996094],["▁Գիտ",-13.782373428344728],["▁مارکیٹ",-13.78237533569336],["▁ਅਕਾਲ",-13.78237533569336],["▁Hieronder",-13.782377243041992],["▁തങ്ങളുടെ",-13.78238296508789],["▁पटेल",-13.78238582611084],["▁sestra",-13.782387733459473],["▁mạc",-13.782389640808104],["▁cartão",-13.782393455505373],["▁integrar",-13.782408714294434],["▁Вік",-13.782408714294434],["▁ایمنی",-13.782421112060549],["否則",-13.782421112060549],["▁Saad",-13.782424926757812],["▁सिम",-13.782432556152344],["ຂອບ",-13.782435417175291],["ريز",-13.782437324523926],["▁шүүхийн",-13.78244686126709],["ከው",-13.78245735168457],["▁passaggio",-13.782462120056152],["házak",-13.782466888427734],["ಂತಿ",-13.782469749450684],["▁абы",-13.78247356414795],["▁koşul",-13.782483100891112],["▁иностранных",-13.782483100891112],["▁жактан",-13.78249740600586],["▁определение",-13.782498359680176],["这场",-13.782500267028809],["▁výstup",-13.782514572143556],["▁pobreza",-13.782515525817873],["バン",-13.78251838684082],["أفلام",-13.78252410888672],["▁сұрақтар",-13.782530784606934],["▁döndü",-13.782540321350098],["자원",-13.782547950744627],["▁الطبيعية",-13.782552719116213],["▁سکیں",-13.782564163208008],["οτήτων",-13.78256607055664],["▁remél",-13.782575607299805],["▁ütleb",-13.782581329345703],["▁dreptate",-13.782583236694336],["▁výrobu",-13.782590866088867],["шити",-13.782601356506348],["▁زیبای",-13.782645225524902],["▁доставки",-13.782678604125977],["▁ఘన",-13.78268051147461],["Drive",-13.782686233520508],["▁Србима",-13.782687187194824],["頂いて",-13.782691955566406],["සක",-13.782732963562012],["▁1/1",-13.782747268676758],["▁hominis",-13.782750129699709],["防守",-13.78275203704834],["rätten",-13.782761573791504],["▁piani",-13.78276824951172],["红色",-13.782769203186035],["værdig",-13.782774925231934],["▁البل",-13.78277587890625],["brew",-13.782776832580566],["▁العمال",-13.782797813415527],["▁ujarnya",-13.78280544281006],["▁допаѓа",-13.782812118530272],["なかったので",-13.782814025878906],["▁שהיו",-13.782822608947754],["▁грчки",-13.782828330993652],["កុង",-13.782831192016602],["▁технологийн",-13.782832145690918],["▁želijo",-13.7828369140625],["▁mică",-13.78283977508545],["▁fakulteta",-13.782846450805664],["タル",-13.78285312652588],["▁маска",-13.78287124633789],["▁నిర్ణయం",-13.782877922058104],["▁наклад",-13.782889366149902],["▁заробітн",-13.7829008102417],["▁저도",-13.782931327819824],["zuka",-13.782962799072266],["ىرىپ",-13.782963752746582],["지로",-13.782965660095217],["▁drone",-13.782966613769531],["▁zuzendu",-13.78297996520996],["设计师",-13.782984733581545],["▁producenta",-13.782991409301758],["▁ತೋರಿಸ",-13.782992362976074],["руулах",-13.783010482788086],["▁tough",-13.783015251159668],["instant",-13.78302764892578],["▁prometa",-13.783038139343262],["▁макул",-13.78304672241211],["تعليم",-13.783066749572754],["▁აზრი",-13.783066749572754],["比起",-13.783135414123535],["្ត",-13.78314208984375],["▁дамы",-13.783143043518066],["出租",-13.783143043518066],["очный",-13.78315258026123],["▁Kereta",-13.783153533935549],["Ord",-13.783156394958496],["ාගාර",-13.783203125],["▁попередньо",-13.783214569091797],["▁نجی",-13.783218383789062],["▁keçirildi",-13.78321933746338],["วอ",-13.783220291137695],["▁fylde",-13.78322696685791],["▁тыңда",-13.783244132995604],["▁menilai",-13.783246994018556],["数据库",-13.783254623413086],["▁hory",-13.783255577087402],["▁маги",-13.783255577087402],["▁tiid",-13.783258438110352],["ကိုင္",-13.783288955688477],["▁minime",-13.783291816711426],["▁Turkiya",-13.78331184387207],["▁besøker",-13.783326148986816],["stich",-13.783329010009766],["▁الزمن",-13.783329963684082],["▁ליב",-13.783340454101562],["ساند",-13.78334140777588],["▁polno",-13.783345222473145],["tavalt",-13.783380508422852],["▁Ruang",-13.783384323120115],["▁Lisä",-13.783398628234863],["දුන්",-13.783400535583496],["▁néző",-13.783401489257812],["▁milión",-13.783409118652344],["▁јавноста",-13.783413887023926],["ូក",-13.783448219299316],["▁gesin",-13.783458709716797],["حصر",-13.783466339111328],["▁ажлаа",-13.783485412597656],["ফোর",-13.783489227294922],["LMA",-13.783495903015137],["▁farba",-13.7835054397583],["▁кыргыздар",-13.783517837524414],["▁prodi",-13.783534049987791],["▁Jamie",-13.783546447753906],["лія",-13.783557891845703],["▁Alexandre",-13.78357982635498],["rādi",-13.783584594726562],["artyst",-13.783592224121094],["▁Etats",-13.783594131469728],["РІ",-13.78360080718994],["ziņas",-13.783609390258787],["ሞላ",-13.783610343933104],["▁selling",-13.783615112304688],["▁econòmica",-13.78361701965332],["買い物",-13.783620834350586],["▁klíč",-13.7836332321167],["▁покушава",-13.78367519378662],["ňte",-13.783687591552734],["要想",-13.783717155456545],["سین",-13.78371810913086],["словен",-13.783734321594238],["wuh",-13.78378200531006],["▁došli",-13.78378200531006],["ётся",-13.783791542053224],["estructura",-13.783818244934082],["न्ते",-13.783841133117676],["လင္",-13.783843994140623],["▁rally",-13.78385066986084],["の方々",-13.783857345581056],["▁nişan",-13.783884048461914],["▁figyelmet",-13.783926963806152],["CHU",-13.783933639526367],["ിക്കാം",-13.783943176269531],["▁internasjonal",-13.78395175933838],["▁Dove",-13.78395938873291],["▁Sitä",-13.78396224975586],["▁mijloc",-13.783963203430176],["▁fasa",-13.783977508544922],["▁Robb",-13.783991813659668],["最多的",-13.78400421142578],["lepingu",-13.784017562866213],["▁qapı",-13.784022331237791],["ର୍ସ",-13.784039497375488],["ေါ",-13.784046173095703],["ισμοί",-13.784114837646484],["▁පක්ෂයේ",-13.784114837646484],["▁replica",-13.784116744995115],["▁običaj",-13.784147262573242],["▁Capri",-13.784162521362305],["జూ",-13.784193992614746],["▁osasun",-13.784208297729492],["▁студии",-13.78421115875244],["páj",-13.784221649169922],["になってしまう",-13.784223556518556],["ଖଣ୍ଡ",-13.78423309326172],["▁uztic",-13.784253120422363],["െയ്",-13.784258842468262],["すぎる",-13.78429889678955],["ՆԱ",-13.78435230255127],["▁নানা",-13.784354209899902],["▁herken",-13.78436279296875],["LOW",-13.784369468688965],["▁جستجوی",-13.784377098083496],["ಹೆ",-13.784403800964355],["하시기",-13.784412384033203],["Pub",-13.784415245056152],["▁बाग",-13.784431457519531],["▁ભૂ",-13.78443431854248],["こうした",-13.784438133239746],["оўнік",-13.784439086914062],["▁kilpa",-13.784456253051758],["▁rādīt",-13.784461975097656],["▁кристал",-13.784462928771973],["▁jelenlegi",-13.784468650817873],["정도",-13.78447437286377],["去找",-13.784479141235352],["▁Unterricht",-13.784504890441896],["està",-13.78451919555664],["▁nụ",-13.784520149230955],["▁ዘገባ",-13.784536361694336],["▁okno",-13.784540176391602],["0.9",-13.784552574157717],["šajte",-13.78456687927246],["дзё",-13.784582138061523],["คําสั่ง",-13.784587860107422],["▁lepe",-13.784587860107422],["ենց",-13.78459358215332],["▁interessi",-13.784597396850586],["▁fotel",-13.78464412689209],["▁والعمل",-13.78465747833252],["▁signifikan",-13.784661293029783],["▁Mód",-13.78466510772705],["▁wykonany",-13.784693717956545],["መሆናቸው",-13.784695625305176],["ប្រធាន",-13.784706115722656],["▁mörg",-13.784706115722656],["θείτε",-13.784707069396973],["▁Bagian",-13.78471565246582],["▁sprid",-13.78471565246582],["202",-13.78471851348877],["喘",-13.784738540649414],["妆",-13.784741401672363],["っていました",-13.784748077392578],["ለዋ",-13.784750938415527],["ნამ",-13.784753799438477],["වලා",-13.784756660461426],["▁levanta",-13.784767150878906],["ໂອກາດ",-13.784783363342283],["ဗုဒၶ",-13.784784317016602],["Württemberg",-13.784785270690918],["τίθεται",-13.784785270690918],["▁Desenvolvimento",-13.784785270690918],["▁portuguesa",-13.784785270690918],["▁szakértő",-13.784785270690918],["▁γρήγορα",-13.784785270690918],["▁रिफंड",-13.784785270690918],["▁కోర్టు",-13.784785270690918],["▁రాహుల్",-13.784785270690918],["▁ಇಷ್ಟವಾಯಿತೆ",-13.784785270690918],["▁Kooperation",-13.784786224365234],["▁pouvait",-13.784786224365234],["▁štandard",-13.784786224365234],["잇",-13.784786224365234],["▁ahamiyat",-13.78478717803955],["▁પિતા",-13.78478717803955],["▁postanowił",-13.784788131713867],["▁überzeugt",-13.784788131713867],["▁әкімшілік",-13.784788131713867],["▁පහසු",-13.784788131713867],["▁девојка",-13.784789085388184],["▁κλίμα",-13.7847900390625],["ซิตี้",-13.784791946411133],["fordern",-13.784793853759766],["▁Panorama",-13.784793853759766],["▁hodiaŭ",-13.784793853759766],["númer",-13.784794807434082],["▁Ήταν",-13.784795761108398],["ກີບ",-13.784798622131348],["▁гектар",-13.784799575805664],["▁дүрэм",-13.78480052947998],["середині",-13.784802436828612],["ბრძანებ",-13.784802436828612],["▁julkaista",-13.784802436828612],["▁америчко",-13.784802436828612],["โพส",-13.78480339050293],["▁Дзень",-13.784805297851562],["تجنب",-13.784809112548828],["在網路上",-13.784809112548828],["▁részlet",-13.78481101989746],["▁розроблен",-13.784812927246094],["▁അതിന",-13.784812927246094],["▁mamlakatlar",-13.784814834594728],["▁intestinal",-13.784815788269045],["pišite",-13.784817695617676],["▁petunjuk",-13.784817695617676],["▁mokymo",-13.784823417663574],["▁അറിവ",-13.784823417663574],["tegemea",-13.784835815429688],["▁пачалі",-13.784836769104004],["▁городского",-13.78483772277832],["▁прадукт",-13.78483772277832],["▁dienomis",-13.78484344482422],["▁polgár",-13.784844398498535],["បញ្ច",-13.784845352172852],["▁шинж",-13.784847259521484],["▁تۇت",-13.784852981567385],["▁превърна",-13.784855842590332],["▁Informacije",-13.78485870361328],["ナン",-13.784860610961914],["親切",-13.784862518310549],["ຊົມ",-13.784863471984863],["▁242",-13.784863471984863],["▁ΜΑΣ",-13.784868240356444],["▁बसेको",-13.784873008728027],["▁hlavný",-13.784878730773926],["CUL",-13.78488063812256],["▁ستكون",-13.784890174865724],["▁tropical",-13.78489112854004],["难度",-13.784892082214355],["▁szülő",-13.784907341003418],["▁suhteen",-13.784918785095217],["▁വെച്ച്",-13.784919738769531],["▁Aula",-13.784929275512695],["▁opravil",-13.784930229187012],["▁forebygge",-13.78493309020996],["▁ಸೇರ",-13.78493309020996],["▁Oficina",-13.784981727600098],["▁wirkt",-13.78498649597168],["▁करायला",-13.784992218017578],["▁учење",-13.78500270843506],["▁rozhodně",-13.785003662109377],["▁bersedia",-13.785018920898438],["▁hardt",-13.78502368927002],["കുള",-13.78504753112793],["▁વર્ષની",-13.785054206848145],["ksiä",-13.78505516052246],["▁plina",-13.785057067871094],["▁tedavisi",-13.785057067871094],["してもらう",-13.785061836242676],["▁Əsas",-13.78506565093994],["物語",-13.785096168518066],["▁Също",-13.785120010375977],["▁рачун",-13.785125732421877],["▁लगायतका",-13.785126686096191],["▁galerie",-13.78514575958252],["重建",-13.785149574279783],["▁internetowy",-13.785150527954102],["▁ērti",-13.785154342651367],["ЕЛИ",-13.785168647766112],["▁droog",-13.785168647766112],["▁ჩავ",-13.785173416137695],["周邊",-13.785179138183594],["▁voeren",-13.785196304321287],["mès",-13.785197257995604],["▁الكبيرة",-13.785199165344238],["▁әуе",-13.785263061523438],["▁Verdens",-13.78526782989502],["有益",-13.785270690917969],["لىنىدۇ",-13.785316467285156],["энергетическ",-13.785319328308104],["▁sorunlar",-13.785325050354004],["jiri",-13.78532886505127],["▁байгууллагууд",-13.785357475280762],["ക്കാം",-13.785360336303713],["▁annuel",-13.785360336303713],["▁buitenland",-13.785360336303713],["▁სერია",-13.785367965698242],["▁cellule",-13.785374641418455],["▁برسد",-13.785377502441406],["rzecz",-13.785380363464355],["▁parka",-13.785401344299316],["▁રોક",-13.785402297973633],["▁перемен",-13.785407066345217],["Ebre",-13.785409927368164],["ಭಾವ",-13.785419464111328],["▁prud",-13.785425186157228],["ленген",-13.785435676574709],["▁plemen",-13.785470962524414],["▁заключа",-13.78548526763916],["▁старши",-13.785497665405272],["▁العراقية",-13.78552532196045],["which",-13.785527229309082],["▁fidem",-13.785537719726562],["▁Lösungen",-13.785540580749512],["darbību",-13.785541534423828],["▁Sää",-13.785541534423828],["שמח",-13.785554885864258],["▁legii",-13.785579681396484],["програм",-13.78558349609375],["ຊິ",-13.785606384277344],["▁høsten",-13.785615921020508],["▁банке",-13.78561782836914],["▁uuesti",-13.785637855529783],["▁Pelajaran",-13.785648345947266],["▁iqtisadiyyat",-13.785662651062012],["▁ancha",-13.785663604736328],["▁svu",-13.785673141479492],["是一件",-13.785673141479492],["zingen",-13.785679817199709],["▁Moli",-13.785684585571287],["斗争",-13.78569507598877],["ਦਿ",-13.785696029663086],["무역",-13.785698890686035],["യാണെന്ന്",-13.785728454589844],["niejszym",-13.785733222961426],["▁sanki",-13.785758018493652],["▁teremt",-13.785758972167969],["добрата",-13.78576374053955],["▁bilion",-13.785770416259766],["лоў",-13.785784721374512],["▁dogter",-13.785786628723145],["ركب",-13.78578758239746],["▁Leyla",-13.785788536071776],["אנגלית",-13.785796165466309],["▁problémák",-13.78580093383789],["nensis",-13.785829544067385],["▁uscat",-13.785844802856444],["słucha",-13.785849571228027],["▁ഗു",-13.785852432250977],["▁1885",-13.78585720062256],["prawy",-13.785877227783203],["ījumā",-13.78588581085205],["▁tajuk",-13.78588581085205],["▁Bunda",-13.785894393920898],["▁1800-",-13.785906791687012],["▁garapen",-13.78591251373291],["▁országos",-13.785924911499023],["ქარი",-13.785943984985352],["वु",-13.78595733642578],["రాం",-13.785981178283691],["▁ಕಣ",-13.78599739074707],["▁எழுத்து",-13.786005973815918],["لری",-13.786006927490234],["▁hönd",-13.786018371582031],["▁причины",-13.786036491394045],["激发",-13.786048889160156],["▁mendje",-13.7860689163208],["троф",-13.786084175109863],["▁begynne",-13.786091804504396],["ચિ",-13.786097526550291],["▁банківськ",-13.786113739013672],["的原则",-13.786137580871582],["ālis",-13.786163330078123],["▁cave",-13.78616428375244],["баяр",-13.786177635192873],["▁visualizza",-13.786184310913086],["▁ўлада",-13.786200523376465],["▁шет",-13.786210060119627],["ినట్లు",-13.786240577697754],["▁Persoan",-13.786240577697754],["▁tju",-13.786242485046388],["▁imprezy",-13.786260604858398],["▁обем",-13.786261558532717],["▁regioni",-13.786277770996094],["ვიტ",-13.78628635406494],["歳の",-13.786301612854004],["GOR",-13.78630828857422],["dávať",-13.786322593688965],["katalog",-13.786346435546877],["▁herum",-13.78636074066162],["▁stilte",-13.786367416381836],["▁жиын",-13.786367416381836],["▁undersökning",-13.786369323730469],["▁nessuna",-13.786384582519531],["▁သူမ",-13.786388397216797],["liike",-13.78642749786377],["ဖော်",-13.78644847869873],["▁ostaja",-13.786471366882324],["实用",-13.786478996276855],["לאנג",-13.786502838134766],["の中には",-13.786529541015623],["▁érkezett",-13.786550521850586],["▁Bê",-13.786554336547852],["▁nataon",-13.786555290222168],["ccup",-13.786561012268066],["▁шарты",-13.786565780639648],["fada",-13.786568641662598],["greina",-13.78658390045166],["▁සාකච්ඡා",-13.786605834960938],["▁jít",-13.786609649658203],["align",-13.786613464355469],["რვა",-13.786638259887695],["205",-13.786646842956545],["Ras",-13.78665542602539],["▁aurreko",-13.786664962768556],["▁развод",-13.786678314208984],["▁જાણે",-13.78675937652588],["fuhr",-13.786778450012209],["مجتمع",-13.786786079406738],["▁21.00",-13.786787986755373],["비가",-13.78679656982422],["ਛਾ",-13.786802291870115],["大手",-13.78681755065918],["▁ਕੇਸ",-13.78682804107666],["▁caused",-13.786834716796877],["▁respirator",-13.786835670471191],["▁Ngayon",-13.786853790283203],["▁muži",-13.786865234375],["▁contará",-13.786867141723633],["▁мекеме",-13.786876678466797],["▁ఉచిత",-13.786885261535645],["▁سابقا",-13.78689670562744],["▁apik",-13.786901473999023],["▁ജൂ",-13.78690242767334],["▁otsus",-13.786925315856934],["▁magistrat",-13.786935806274414],["▁dosti",-13.786941528320312],["ฮิต",-13.786946296691896],["▁கட்டி",-13.786981582641602],["izimin",-13.786993026733398],["▁tampilan",-13.787001609802246],["▁eBay",-13.78702449798584],["▁OEM",-13.787055015563965],["ទី២",-13.787063598632812],["▁ილია",-13.787070274353027],["▁viib",-13.78709602355957],["▁verskil",-13.787107467651367],["▁уруксат",-13.787110328674316],["▁svarīgi",-13.787135124206545],["▁respondent",-13.787137031555176],["▁включает",-13.787166595458984],["으로는",-13.787175178527832],["rinish",-13.78718090057373],["maması",-13.787189483642578],["▁Philippe",-13.787200927734377],["▁چای",-13.787202835083008],["盧",-13.787212371826172],["▁माणसा",-13.78722095489502],["口碑",-13.787229537963867],["รวบรวม",-13.78725814819336],["ปัจจัย",-13.787260055541992],["ສະບັບ",-13.787260055541992],["សន្តិសុខ",-13.787260055541992],["▁ਕੰਪਨੀ",-13.787260055541992],["மில்லை",-13.787261009216309],["▁Aontaithe",-13.787261009216309],["▁Campeonato",-13.787261009216309],["▁heutigen",-13.787261009216309],["▁jährlich",-13.787261009216309],["▁náměstí",-13.787261009216309],["▁recoñece",-13.787261009216309],["▁rozwiązań",-13.787261009216309],["▁utekelezaji",-13.787261009216309],["▁Ιταλία",-13.787261009216309],["▁μεγάλα",-13.787261009216309],["▁Маалымат",-13.787261009216309],["▁Соңғы",-13.787261009216309],["▁търговия",-13.787261009216309],["▁Մուտք",-13.787261009216309],["▁להמשיך",-13.787261009216309],["▁تذکر",-13.787261009216309],["▁کنٹرول",-13.787261009216309],["▁कम्युनिष्ट",-13.787261009216309],["▁कात्तिक",-13.787261009216309],["▁পরিকল্পনা",-13.787261009216309],["▁ଡାକ୍ତର",-13.787261009216309],["▁எதிராக",-13.787261009216309],["▁బాలీవుడ్",-13.787261009216309],["▁სასწავლო",-13.787261009216309],["▁ሂደት",-13.787261009216309],["▁Мадрид",-13.787261962890623],["▁продукция",-13.787261962890623],["▁थालेको",-13.787261962890623],["モノ",-13.787261962890623],["▁izstāde",-13.787264823913574],["▁markedsføring",-13.787264823913574],["▁съответни",-13.78726577758789],["▁სახით",-13.78726577758789],["▁международни",-13.787266731262209],["▁କରନ୍ତି",-13.787266731262209],["ظلم",-13.787274360656738],["▁Amedê",-13.787275314331056],["▁népszerű",-13.787275314331056],["▁ਵੱਲ",-13.787276268005373],["▁cəhd",-13.787277221679688],["▁Caddesi",-13.78728199005127],["▁Croatia",-13.78728199005127],["▁английско",-13.787283897399902],["▁Jóhann",-13.78728485107422],["▁konstruktiv",-13.787285804748535],["▁uppskatta",-13.787285804748535],["မွတ္တမ္း",-13.787286758422852],["▁ምድር",-13.787287712097168],["相続",-13.787287712097168],["betingsten",-13.787290573120115],["▁tähista",-13.78729248046875],["▁እንደማይ",-13.78730010986328],["▁verminder",-13.787312507629396],["好み",-13.787312507629396],["▁verklaar",-13.78731632232666],["▁મજા",-13.787322998046877],["▁ஆசை",-13.787327766418455],["▁СРБИ",-13.787341117858888],["सारखे",-13.78734302520752],["1920",-13.787343978881836],["▁Главная",-13.787357330322266],["▁İdarəsinin",-13.78736686706543],["▁Szabad",-13.787391662597656],["▁보통",-13.787397384643556],["ເຈັບ",-13.787408828735352],["▁بوجود",-13.787423133850098],["▁3,8",-13.787424087524414],["▁охраны",-13.78742504119873],["əliyev",-13.787437438964844],["▁vətəndaşı",-13.78744888305664],["▁ўмовы",-13.787453651428224],["能力和",-13.787461280822754],["▁स्वच्छ",-13.787469863891602],["вінава",-13.787471771240234],["▁условие",-13.787500381469728],["▁Руб",-13.787501335144045],["▁역할",-13.78750705718994],["ləyib",-13.787524223327637],["▁Consul",-13.787534713745115],["▁Bollywood",-13.7875394821167],["gyldig",-13.787542343139648],["▁درلود",-13.787555694580078],["▁udgave",-13.78756046295166],["▁նշում",-13.787567138671877],["▁презема",-13.787568092346191],["▁stavka",-13.78757095336914],["▁planes",-13.787585258483888],["Dienstleistungen",-13.787586212158203],["च्चा",-13.787598609924316],["чыла",-13.787617683410645],["▁zapisa",-13.787625312805176],["▁tempel",-13.78762912750244],["▁amplio",-13.787639617919922],["Sau",-13.787640571594238],["ひと",-13.787646293640137],["▁Itä",-13.787672996520996],["▁mỡ",-13.78770637512207],["▁nekaterih",-13.787728309631348],["▁оюндар",-13.787734031677246],["▁karš",-13.787760734558104],["▁wyzna",-13.787769317626951],["▁негизинде",-13.78778076171875],["setter",-13.787792205810549],["▁xərclə",-13.787805557250977],["это",-13.78784465789795],["▁attendre",-13.787847518920898],["▁dáta",-13.787848472595217],["▁понимал",-13.787850379943848],["ख्य",-13.787859916687012],["▁vullnet",-13.787860870361328],["iczną",-13.787864685058594],["نادي",-13.787886619567873],["▁etməyi",-13.787893295288086],["▁musimy",-13.787901878356934],["2500",-13.787909507751465],["manî",-13.787930488586426],["▁prijatelja",-13.78793239593506],["▁справжні",-13.787992477416992],["▁ئانا",-13.788000106811523],["Holland",-13.788004875183104],["დონ",-13.788007736206056],["▁частину",-13.78801441192627],["▁қорытынды",-13.788015365600586],["▁udvide",-13.788016319274902],["ກະຊວງ",-13.788047790527344],["▁двери",-13.788071632385254],["▁fyddai",-13.788101196289062],["▁vifaa",-13.788113594055176],["くれた",-13.788113594055176],["▁Ohr",-13.78813648223877],["▁soutěže",-13.788145065307615],["▁thơm",-13.788147926330566],["មកពី",-13.788209915161133],["▁Guill",-13.788211822509766],["اعتماد",-13.78824234008789],["▁deseño",-13.788244247436523],["івська",-13.78824520111084],["ψω",-13.788283348083496],["ನಾಥ",-13.788288116455078],["▁elekto",-13.788324356079102],["▁тобі",-13.788329124450684],["stavení",-13.788339614868164],["▁tarjota",-13.78835391998291],["▁उन्",-13.788355827331545],["▁necesitas",-13.788368225097656],["▁विश्",-13.788369178771973],["▁vallás",-13.788399696350098],["▁børnene",-13.78840160369873],["▁prøvd",-13.788434028625488],["kezelés",-13.78843879699707],["პტ",-13.788460731506348],["தவ",-13.78846549987793],["▁poveća",-13.788474082946776],["▁רוסי",-13.78847599029541],["postavlj",-13.78847885131836],["▁arco",-13.788487434387209],["▁अरू",-13.788501739501951],["▁Венец",-13.788512229919434],["已成为",-13.788518905639648],["際に",-13.788522720336914],["▁заклик",-13.788529396057127],["ധാന",-13.788552284240724],["publica",-13.788565635681152],["ilebilir",-13.788566589355469],["ગુ",-13.788579940795898],["▁familier",-13.788585662841797],["▁mokslini",-13.78859519958496],["▁sobib",-13.78864288330078],["ኞቹ",-13.788658142089844],["ъд",-13.788690567016602],["▁verificat",-13.788702011108398],["▁novem",-13.788707733154297],["öğ",-13.788711547851562],["▁kapitalist",-13.788729667663574],["▁phà",-13.788763999938965],["▁Blanco",-13.788772583007812],["▁Opfer",-13.788774490356444],["ാവശ്യ",-13.788789749145508],["▁модерни",-13.788858413696287],["kodanik",-13.788859367370604],["▁agertu",-13.78886890411377],["რტ",-13.788873672485352],["ิ้ว",-13.78888702392578],["tajia",-13.788908004760742],["▁આવેલ",-13.788923263549805],["▁idealnie",-13.788945198059082],["▁površine",-13.788957595825195],["LUT",-13.788959503173828],["AUD",-13.78899383544922],["▁Forschung",-13.788996696472168],["grze",-13.789000511169434],["ÜK",-13.789002418518066],["κλη",-13.78904628753662],["ehrt",-13.789048194885254],["intro",-13.789080619812012],["▁toimitus",-13.789084434509276],["▁sarrera",-13.78908634185791],["husu",-13.789095878601074],["ಪಾದ",-13.789101600646973],["nutri",-13.789102554321287],["▁добавил",-13.789112091064451],["žovat",-13.789130210876465],["აძემ",-13.789134979248049],["卒",-13.789135932922363],["法案",-13.789135932922363],["ΟΥΝ",-13.789137840270996],["гове",-13.789137840270996],["turum",-13.789140701293944],["▁پلاس",-13.789145469665527],["▁Grunn",-13.78915786743164],["znamená",-13.789159774780272],["▁parlamenta",-13.789170265197754],["▁klassische",-13.789172172546388],["▁bakoitza",-13.789189338684082],["されており",-13.789209365844728],["▁ayaw",-13.789213180541992],["ക്കൊ",-13.789214134216309],["▁Lieferung",-13.789214134216309],["ረም",-13.789217948913574],["ອົງການ",-13.78925895690918],["▁Hermann",-13.78925895690918],["ఠ",-13.78927993774414],["ЗУ",-13.789280891418455],["▁eskaini",-13.78929042816162],["dhiga",-13.789291381835938],["▁Fiyat",-13.789305686950684],["▁tubuhnya",-13.789313316345217],["▁дорогу",-13.78931713104248],["▁عورتن",-13.789325714111328],["▁મારે",-13.789355278015137],["▁León",-13.789356231689451],["▁आयात",-13.789373397827148],["▁lengva",-13.789376258850098],["▁проба",-13.789380073547363],["▁математик",-13.789382934570312],["žnih",-13.789392471313477],["ಹು",-13.789424896240234],["רוצים",-13.789429664611816],["▁иргэний",-13.789436340332031],["▁hoppa",-13.78945255279541],["▁хэдий",-13.789453506469728],["ხსნა",-13.78945541381836],["▁sõja",-13.789459228515623],["▁Nyky",-13.78947639465332],["▁yerində",-13.789484977722168],["▁ڈیم",-13.789490699768066],["▁عمان",-13.789491653442385],["ರ್ಪ",-13.789499282836914],["▁propietario",-13.78950309753418],["madım",-13.789518356323242],["▁Fyr",-13.789523124694824],["▁explicit",-13.789531707763672],["落后",-13.789533615112305],["▁haraka",-13.789541244506836],["▁पाव",-13.789548873901367],["personen",-13.78955364227295],["是从",-13.789592742919922],["догу",-13.78959846496582],["▁januára",-13.789615631103516],["▁کېدو",-13.789631843566896],["▁italien",-13.789647102355955],["盼",-13.789660453796388],["袁",-13.78966236114502],["طعام",-13.789668083190918],["▁partiet",-13.789669036865234],["▁Cabo",-13.78966999053955],["冯",-13.7896728515625],["resor",-13.789673805236816],["投诉",-13.789680480957031],["▁групу",-13.789682388305664],["▁Slots",-13.789692878723145],["cikkei",-13.789695739746094],["αριστερ",-13.789703369140623],["偶然",-13.78970432281494],["整治",-13.789709091186523],["▁גלו",-13.789731979370115],["nutia",-13.78973388671875],["አፍሪቃ",-13.78974151611328],["▁dotychczas",-13.78974151611328],["▁kényelmes",-13.78974151611328],["▁əsgər",-13.78974151611328],["กําไร",-13.789742469787598],["▁Melbourne",-13.789742469787598],["▁Parabéns",-13.789742469787598],["▁REPUBLIK",-13.789742469787598],["▁episódio",-13.789742469787598],["▁juhatuse",-13.789742469787598],["▁najważniejsze",-13.789742469787598],["▁penampilan",-13.789742469787598],["▁popüler",-13.789742469787598],["▁szkół",-13.789742469787598],["▁sắt",-13.789742469787598],["▁tækifæri",-13.789742469787598],["▁umístěn",-13.789742469787598],["▁zadržana",-13.789742469787598],["▁Янукович",-13.789742469787598],["▁алгоритм",-13.789742469787598],["▁објасни",-13.789742469787598],["▁углавном",-13.789742469787598],["▁Գեւորգ",-13.789742469787598],["▁برطانوی",-13.789742469787598],["▁سگهجي",-13.789742469787598],["▁भौतिक",-13.789742469787598],["▁আমেরিকা",-13.789742469787598],["▁ਪੋਸਟ",-13.789742469787598],["▁உறுதி",-13.789742469787598],["▁മൊബൈല",-13.789742469787598],["▁მანქანა",-13.789742469787598],["▁የመገናኛ",-13.789742469787598],["tutkimuksen",-13.789743423461914],["إمكانية",-13.789743423461914],["▁Fähigkeit",-13.789743423461914],["▁искористи",-13.789743423461914],["▁અભિ",-13.78974437713623],["▁současné",-13.789745330810549],["▁Гэвч",-13.789745330810549],["▁જાહેરાત",-13.789745330810549],["hodnocení",-13.789748191833496],["▁epekto",-13.789748191833496],["▁тэлефон",-13.789748191833496],["▁നേരിട",-13.789750099182127],["▁apprendre",-13.789751052856444],["ໂຄ",-13.789752006530762],["▁решила",-13.789752006530762],["أسلحة",-13.789752960205078],["նենք",-13.789753913879396],["▁హత్య",-13.789755821228027],["blue",-13.789758682250977],["▁aadan",-13.789758682250977],["ຈ່າຍ",-13.789759635925291],["▁डाक्टर",-13.78976345062256],["▁آگاہ",-13.78976821899414],["▁ଜିଲା",-13.789774894714355],["▁моќ",-13.789779663085938],["▁huynh",-13.789783477783203],["▁schlimm",-13.789786338806152],["▁ಪ್ಯಾ",-13.789786338806152],["▁ਪ੍ਰਤੀ",-13.789793014526367],["▁التنفيذي",-13.789793968200684],["▁Freizeit",-13.789796829223633],["▁νεα",-13.789800643920898],["▁universidades",-13.789801597595217],["▁tindak",-13.789802551269531],["▁الشمال",-13.78981876373291],["වනවා",-13.789820671081545],["▁ցանց",-13.789820671081545],["κίνηση",-13.789827346801758],["▁kysymys",-13.789827346801758],["▁sammenheng",-13.789828300476074],["စစ်ဆေး",-13.789836883544922],["ின்றன",-13.78984546661377],["ไว้ใน",-13.789849281311035],["▁감소",-13.789854049682615],["હે",-13.789860725402832],["▁salario",-13.789871215820312],["▁گاہ",-13.789874076843262],["▁ಸೈಟ್",-13.789876937866213],["▁dimensiuni",-13.789880752563477],["▁ambeien",-13.789883613586426],["▁variation",-13.789897918701172],["▁organisaatio",-13.789898872375488],["▁întreaga",-13.789898872375488],["▁sävy",-13.78990077972412],["делу",-13.789907455444336],["▁sight",-13.789922714233398],["ונס",-13.789926528930664],["▁പറയാന്",-13.789928436279297],["090",-13.78995418548584],["▁කිහිප",-13.789958953857422],["▁Seguro",-13.789978981018066],["▁նստ",-13.7899808883667],["മാസ",-13.789994239807127],["▁boule",-13.789994239807127],["▁drauf",-13.789999008178713],["▁collective",-13.790002822875977],["▁शरद",-13.790003776550291],["úci",-13.790019989013672],["weeg",-13.790035247802734],["iselta",-13.790037155151367],["▁hùng",-13.79004192352295],["▁बालों",-13.790046691894531],["▁பின்ன",-13.790050506591797],["▁დაიწყე",-13.790050506591797],["▁డైరెక్టర్",-13.790088653564451],["▁Haben",-13.790104866027832],["▁accesibil",-13.790106773376465],["dirib",-13.790116310119627],["▁площадь",-13.790117263793944],["▁vervolg",-13.790118217468262],["套房",-13.790118217468262],["上周",-13.790119171142578],["θυμ",-13.790139198303224],["▁оборон",-13.79014015197754],["▁Bhd",-13.790142059326172],["ነጻ",-13.790153503417969],["perat",-13.790155410766602],["▁güvenliği",-13.790157318115234],["rahman",-13.790173530578612],["▁مجاني",-13.790180206298828],["▁الضغط",-13.790205955505373],["▁одржана",-13.790213584899902],["▁osnovi",-13.790216445922852],["▁faaliyetleri",-13.7902193069458],["▁ստանալու",-13.79022216796875],["▁규모",-13.79022216796875],["▁enzim",-13.790240287780762],["▁Ekip",-13.79026222229004],["▁Ustaz",-13.790278434753418],["都需要",-13.790283203125],["▁ambaŭ",-13.790284156799316],["▁митинг",-13.790318489074709],["520",-13.79032039642334],["▁змогу",-13.7903413772583],["▁miris",-13.790352821350098],["▁employer",-13.790360450744627],["بولي",-13.790361404418944],["트로",-13.790379524230955],["▁থাকা",-13.79039192199707],["▁Gho",-13.79040241241455],["▁الكرة",-13.7904052734375],["නො",-13.790409088134766],["▁chicken",-13.790409088134766],["▁Aurora",-13.79042625427246],["ինք",-13.790431022644045],["گەر",-13.79043197631836],["wody",-13.790472984313965],["såg",-13.79047679901123],["esport",-13.790492057800291],["▁घुम",-13.790492057800291],["քաղաքականություն",-13.790502548217772],["တာပါ။",-13.790515899658203],["▁Newydd",-13.79052448272705],["XY",-13.790531158447266],["ឧ",-13.790539741516112],["▁bepaald",-13.790562629699709],["កើតឡើង",-13.790568351745604],["▁escucha",-13.790570259094238],["▁ಬೇಕ",-13.790572166442873],["zinga",-13.790583610534668],["▁морето",-13.79059600830078],["▁yard",-13.790606498718262],["Gənc",-13.79061222076416],["muligheder",-13.79061222076416],["▁dinsdag",-13.790616989135742],["되면",-13.790627479553224],["▁Əf",-13.790635108947754],["upaya",-13.790657997131348],["เป็",-13.79067611694336],["▁drogą",-13.790684700012209],["▁paljud",-13.79069995880127],["▁hazırlanmış",-13.790728569030762],["▁puma",-13.790731430053713],["Труд",-13.790741920471191],["▁مهاجرت",-13.79074478149414],["پول",-13.790751457214355],["避け",-13.790761947631836],["▁Presentació",-13.790780067443848],["千年",-13.79081916809082],["stimul",-13.790826797485352],["▁obuv",-13.790827751159668],["లెట్",-13.79085922241211],["▁сделали",-13.790867805480955],["▁modalitat",-13.790870666503906],["▁ഇതുവരെ",-13.790878295898438],["▁ਦੂਰ",-13.79088020324707],["▁Alder",-13.790881156921388],["▁beállítás",-13.790884971618652],["▁Silvi",-13.790892601013184],["▁монах",-13.790895462036133],["▁shtator",-13.79090404510498],["▁жылда",-13.790918350219728],["Asia",-13.790942192077637],["▁rodičov",-13.790945053100586],["факт",-13.790953636169434],["arbeidet",-13.79095458984375],["▁קשור",-13.7909574508667],["funkci",-13.790979385375977],["▁najväčší",-13.79098129272461],["ოდენ",-13.790983200073242],["▁ომის",-13.791006088256836],["▁Покрај",-13.791025161743164],["▁ledende",-13.79103183746338],["▁براہ",-13.79103183746338],["▁پەن",-13.79103946685791],["незалежн",-13.791043281555176],["▁habría",-13.791051864624023],["dioxid",-13.79105281829834],["▁ciwan",-13.791056632995604],["▁алууга",-13.791059494018556],["平面",-13.79106903076172],["▁Photograph",-13.791074752807615],["▁سورة",-13.79108715057373],["ختلف",-13.79110622406006],["▁అమలు",-13.791126251220703],["▁NAB",-13.791138648986816],["dienu",-13.791152000427246],["对比",-13.791162490844728],["▁կհա",-13.791163444519045],["▁vinh",-13.79117202758789],["▁والشر",-13.791183471679688],["Neftçi",-13.79118537902832],["▁લાવ",-13.791196823120115],["৮৮",-13.7912015914917],["▁snadno",-13.791210174560549],["விற்கு",-13.79121208190918],["ਵੀਰ",-13.791215896606444],["сээр",-13.791242599487305],["▁ಪೂರ್ಣ",-13.791247367858888],["شۇ",-13.791252136230469],["зай",-13.79128360748291],["▁planeja",-13.791284561157228],["▁Economia",-13.79129409790039],["▁감정",-13.791319847106934],["חיבור",-13.791321754455566],["▁arter",-13.791326522827148],["▁провеждане",-13.791330337524414],["tumisen",-13.791333198547363],["никът",-13.791393280029297],["lého",-13.791394233703612],["▁좋았",-13.791394233703612],["բեկ",-13.791399002075195],["▁povie",-13.791399002075195],["▁Санал",-13.791403770446776],["לחם",-13.791404724121094],["▁vozilo",-13.791431427001951],["▁Signal",-13.791438102722168],["▁еко",-13.791440963745115],["celli",-13.79145622253418],["ιχν",-13.791457176208496],["բո",-13.791462898254396],["▁රස්ත",-13.791475296020508],["تكامل",-13.79149055480957],["০৫",-13.791504859924316],["▁Félag",-13.791519165039062],["heel",-13.791528701782228],["▁KARA",-13.79153537750244],["▁верно",-13.791542053222656],["рства",-13.791543960571287],["▁Krav",-13.791543960571287],["ለሰ",-13.791589736938477],["ګړ",-13.791590690612791],["jskich",-13.791593551635742],["레드",-13.791613578796388],["▁Прем",-13.791618347167969],["▁251",-13.791638374328612],["▁класі",-13.791640281677246],["▁אחוז",-13.791641235351562],["জনক",-13.791665077209473],["▁högsta",-13.791667938232422],["嬉しい",-13.791687965393066],["225",-13.791688919067385],["▁կերպ",-13.791708946228027],["▁biztonsági",-13.791711807250977],["▁küçəsi",-13.791711807250977],["▁activităţi",-13.791754722595217],["κατοικ",-13.791792869567873],["▁CARA",-13.79180908203125],["▁ranna",-13.791839599609377],["เลยทีเดียว",-13.791840553283691],["▁metara",-13.79185962677002],["EČ",-13.791906356811523],["forhandling",-13.79190731048584],["▁Button",-13.791912078857422],["வேளை",-13.7919340133667],["เกอร์",-13.791947364807127],["▁ფიქრობ",-13.791949272155762],["ලන්",-13.791974067687988],["到现在",-13.79197883605957],["कुश",-13.791983604431152],["▁owners",-13.792009353637695],["▁liens",-13.79202365875244],["▁নিবন্ধ",-13.792037010192873],["தர",-13.792040824890137],["Национал",-13.792041778564451],["▁پڙهي",-13.792052268981934],["▁Veliki",-13.79206085205078],["▁включени",-13.792067527770996],["▁სიახლე",-13.792068481445312],["▁Success",-13.79208278656006],["▁بالی",-13.792108535766602],["ольф",-13.792109489440918],["▁जस्ता",-13.792110443115234],["standing",-13.792133331298828],["▁specifico",-13.792150497436523],["犹",-13.79217529296875],["趋",-13.792177200317385],["僵",-13.792179107666016],["lícula",-13.792189598083496],["腎",-13.792196273803713],["斌",-13.792200088500977],["▁Predsjednik",-13.792207717895508],["られました",-13.792208671569824],["▁Sən",-13.792221069335938],["訂購",-13.792221069335938],["▁Själv",-13.792228698730469],["Ί",-13.792229652404783],["▁Felhasználó",-13.792229652404783],["▁Kegiatan",-13.792229652404783],["▁eztabaida",-13.792229652404783],["▁gözlənilir",-13.792229652404783],["▁khớp",-13.792229652404783],["▁menceritakan",-13.792229652404783],["▁ndërkohë",-13.792229652404783],["▁psoriazis",-13.792229652404783],["▁θάλασσα",-13.792229652404783],["▁Учурда",-13.792229652404783],["▁комбинация",-13.792229652404783],["▁підтримку",-13.792229652404783],["▁հերոս",-13.792229652404783],["▁تمہیں",-13.792229652404783],["▁پارلیمنٹ",-13.792229652404783],["▁अनिवार्य",-13.792229652404783],["▁પરિણામ",-13.792229652404783],["▁ಹಲವಾರು",-13.792229652404783],["▁ලාංකික",-13.792229652404783],["▁แขวง",-13.792229652404783],["▁አመራር",-13.792229652404783],["▁UNIVERS",-13.792230606079102],["▁binadamu",-13.792230606079102],["▁सञ्जाल",-13.792230606079102],["ႀကိမ္",-13.792231559753418],["▁pêwîst",-13.792231559753418],["▁отбеляза",-13.792231559753418],["▁၂၀၁၇",-13.792232513427734],["▁larunbat",-13.79223346710205],["▁സന്ദര്",-13.79223346710205],["▁слободе",-13.792235374450684],["▁Rani",-13.792240142822266],["▁keindahan",-13.792243957519531],["▁automática",-13.792245864868164],["▁აღწერა",-13.792245864868164],["▁سعادت",-13.792248725891112],["▁इलाज",-13.792248725891112],["▁prilagodi",-13.79224967956543],["▁gestalten",-13.792250633239746],["精華",-13.792250633239746],["▁bildirilir",-13.792251586914062],["חנויות",-13.79225730895996],["لىرىمىز",-13.79225730895996],["▁რაიმე",-13.792261123657228],["▁šķiet",-13.79226303100586],["▁personuppgifter",-13.792266845703123],["▁inclúe",-13.792268753051758],["▁Tính",-13.79227066040039],["▁селото",-13.79227066040039],["电视台",-13.792278289794922],["▁남북",-13.792279243469238],["스럽",-13.792285919189451],["▁ћете",-13.792291641235352],["▁1950-",-13.792296409606934],["jeżdża",-13.79230785369873],["▁žurnalist",-13.792308807373049],["▁elektronika",-13.792319297790527],["▁तोड़",-13.792323112487791],["▁फेरी",-13.792323112487791],["▁الحكومية",-13.79233455657959],["▁altul",-13.792338371276855],["▁పూర్తిగా",-13.79234504699707],["▁bakgrund",-13.792346954345703],["▁Nowy",-13.792351722717283],["▁hóf",-13.7923583984375],["▁Summit",-13.792359352111816],["varsel",-13.792362213134766],["പെ",-13.792364120483398],["▁lähteä",-13.792365074157717],["▁tròn",-13.79237461090088],["مىدى",-13.792384147644045],["▁stable",-13.792386054992676],["▁shtuar",-13.792388916015623],["▁χτ",-13.792391777038574],["bukser",-13.792409896850586],["▁bëhen",-13.79245376586914],["▁የሥራ",-13.79246711730957],["ėkite",-13.792472839355469],["▁العقل",-13.792473793029783],["▁conjunta",-13.792492866516112],["▁продолжал",-13.79250144958496],["▁असणार",-13.792510986328123],["▁illam",-13.79253387451172],["રની",-13.792534828186035],["ธุ",-13.792548179626465],["▁juara",-13.792550086975098],["▁görsel",-13.792551040649414],["▁remény",-13.792552947998049],["Bild",-13.792559623718262],["▁дисциплін",-13.792563438415527],["▁Pater",-13.792566299438477],["▁Иска",-13.792569160461426],["▁하나님의",-13.79257106781006],["janski",-13.792572021484377],["▁exercer",-13.792573928833008],["▁löv",-13.792591094970703],["သီခ်င္း",-13.792610168457031],["▁gevallen",-13.792619705200195],["회원",-13.79262351989746],["ేషన్",-13.792632102966309],["▁meša",-13.792632102966309],["▁ስልጣን",-13.792644500732422],["▁vulnerabil",-13.792649269104004],["▁վկայ",-13.79265022277832],["ぶり",-13.792659759521484],["▁zeuden",-13.79266357421875],["▁քննարկ",-13.792670249938965],["民事",-13.792672157287598],["▁samaan",-13.79267692565918],["367",-13.792677879333496],["▁знања",-13.792688369750977],["▁dům",-13.792694091796877],["▁मई",-13.792701721191406],["หุ",-13.79270362854004],["ുമുണ്ട്",-13.792716979980469],["▁πράξη",-13.79272747039795],["▁крыніцу",-13.79272747039795],["члэн",-13.792743682861328],["▁credibil",-13.792744636535645],["▁Spolu",-13.792750358581545],["▁نفع",-13.79275608062744],["draai",-13.792771339416504],["▁bağlıdır",-13.792784690856934],["▁Datu",-13.792787551879885],["lashtirilgan",-13.792795181274414],["▁českých",-13.792800903320312],["▁останат",-13.792814254760742],["зеркал",-13.792816162109377],["▁미리",-13.792829513549805],["piemēro",-13.792839050292969],["▁decidido",-13.79286289215088],["γος",-13.792888641357422],["▁akoma",-13.792889595031738],["▁hagyomány",-13.79289436340332],["▁објављен",-13.79290008544922],["ministeeriumi",-13.792928695678713],["▁Anand",-13.792943000793455],["вство",-13.79294776916504],["મિલ",-13.792960166931152],["▁Советот",-13.792974472045898],["▁posjed",-13.79297924041748],["ಉ",-13.79298210144043],["▁tapti",-13.793006896972656],["ीज",-13.793010711669922],["Хар",-13.793025016784668],["▁αυτοκίνητο",-13.79302978515625],["▁중심으로",-13.793034553527832],["▁kompil",-13.793049812316896],["ච්",-13.79305362701416],["▁түштү",-13.793057441711426],["▁16%",-13.793096542358398],["▁Rust",-13.79310417175293],["מקום",-13.793107986450195],["▁umulig",-13.79312801361084],["strict",-13.793150901794434],["▁Phase",-13.793172836303713],["multi",-13.793206214904783],["▁ilgai",-13.793249130249023],["باغ",-13.793292045593262],["▁simptome",-13.79330062866211],["▁plaque",-13.79330348968506],["参加了",-13.793325424194336],["haq",-13.7933349609375],["▁konteks",-13.793341636657717],["न्यास",-13.793342590332031],["ేలా",-13.79335117340088],["▁ولكنه",-13.793377876281738],["▁прошлого",-13.793427467346191],["▁belief",-13.793435096740724],["▁കോള",-13.793435096740724],["▁wyg",-13.793438911437988],["▁sporočila",-13.793448448181152],["▁kanten",-13.793451309204102],["高科技",-13.793465614318848],["helyettes",-13.79347801208496],["▁мәселесі",-13.793478965759276],["සල",-13.793482780456545],["kúša",-13.793485641479492],["ેય",-13.79351806640625],["▁margine",-13.793548583984377],["▁ഷെ",-13.793560028076172],["▁spiral",-13.793588638305664],["をはじめ",-13.793607711791992],["siynau",-13.79361343383789],["▁مختلفی",-13.793641090393066],["▁چیت",-13.793644905090332],["கூ",-13.79365062713623],["क्षक",-13.79367733001709],["▁перегляд",-13.793708801269531],["▁tribun",-13.793743133544922],["▁mantan",-13.793749809265137],["▁օգտ",-13.79375171661377],["rekening",-13.793755531311035],["▁ബ്ല",-13.79379940032959],["事宜",-13.79379940032959],["▁verdura",-13.793803215026855],["▁eaque",-13.793813705444336],["▁изборите",-13.793818473815918],["▁লিখ",-13.793822288513184],["▁धान",-13.79382610321045],["▁Posa",-13.79383945465088],["▁คุณสามารถ",-13.793843269348145],["▁hodinu",-13.793853759765623],["7,000",-13.793856620788574],["▁Varga",-13.793866157531738],["▁მახ",-13.79387664794922],["▁ideální",-13.793883323669434],["▁Woh",-13.793886184692385],["андр",-13.793889045715332],["лею",-13.793893814086914],["▁escape",-13.79391098022461],["▁krigen",-13.793917655944824],["177",-13.793939590454102],["▁saakka",-13.793941497802734],["▁surprised",-13.793943405151367],["▁Μην",-13.793973922729492],["भ्रम",-13.793978691101074],["▁односу",-13.793990135192873],["плод",-13.794004440307615],["▁கிடைக்கும்",-13.794010162353516],["▁swing",-13.794058799743652],["▁رہے۔",-13.79406452178955],["远远",-13.794100761413574],["folia",-13.794111251831056],["ריע",-13.79415512084961],["▁nájde",-13.79416275024414],["▁تارىخى",-13.79416847229004],["▁Primul",-13.794208526611328],["oniston",-13.79421329498291],["しますが",-13.794214248657228],["▁studenog",-13.79423713684082],["▁מכבי",-13.7942533493042],["▁timid",-13.794262886047363],["▁आउट",-13.794273376464844],["してきました",-13.794285774230955],["▁សេដ្ឋកិច្ច",-13.79429531097412],["▁incluido",-13.794306755065918],["▁ediliyor",-13.794307708740234],["OMS",-13.794326782226562],["▁zeci",-13.79433250427246],["ดีขึ้น",-13.794343948364258],["ませんが",-13.794356346130373],["▁баштады",-13.794374465942385],["▁regulator",-13.79438591003418],["▁ಪತ್ರಿಕೆ",-13.79439640045166],["изације",-13.794413566589355],["függő",-13.794416427612305],["▁winkels",-13.794416427612305],["ប្រា",-13.794443130493164],["▁інтерв",-13.794448852539062],["шак",-13.794466018676758],["▁fejleszt",-13.794479370117188],["▁رکيو",-13.794482231140137],["当局",-13.794483184814451],["▁omaan",-13.794487953186035],["▁Bomba",-13.794488906860352],["▁raudon",-13.794498443603516],["્ટી",-13.794529914855955],["▁rendelkez",-13.794541358947754],["▁νοσ",-13.794553756713867],["▁негізде",-13.794564247131348],["êrê",-13.794586181640623],["▁կյանք",-13.79460620880127],["▁Bash",-13.794622421264648],["▁došao",-13.794632911682127],["▁quotidiana",-13.794638633728027],["žný",-13.794639587402344],["▁ರವಿ",-13.794658660888672],["ВЕР",-13.79466724395752],["ОВО",-13.794668197631836],["辖",-13.79468059539795],["▁ATV",-13.794682502746582],["资讯",-13.794689178466797],["เทียม",-13.794694900512695],["帥",-13.794694900512695],["醇",-13.794708251953123],["▁Tampereen",-13.794713973999023],["ទីតាំង",-13.794721603393556],["เติบโต",-13.794722557067873],["အမျိုးသမီး",-13.794722557067873],["▁ишеним",-13.794722557067873],["ئەلەيھىسسالام",-13.794723510742188],["กระดูก",-13.794723510742188],["សហគមន៍",-13.794723510742188],["▁Ayuntamiento",-13.794723510742188],["▁Flughafen",-13.794723510742188],["▁Suceava",-13.794723510742188],["▁Vergangenheit",-13.794723510742188],["▁Vĩnh",-13.794723510742188],["▁născut",-13.794723510742188],["▁októbra",-13.794723510742188],["▁português",-13.794723510742188],["▁pozycji",-13.794723510742188],["▁starostlivosť",-13.794723510742188],["▁Íslendinga",-13.794723510742188],["▁ансамбл",-13.794723510742188],["▁децембра",-13.794723510742188],["▁становништва",-13.794723510742188],["▁ڳالهيون",-13.794723510742188],["▁ধন্যবাদ",-13.794723510742188],["▁ਭਾਜਪਾ",-13.794723510742188],["▁வரலாறு",-13.794723510742188],["▁വിക്കിപീഡിയ",-13.794723510742188],["▁സർക്കാർ",-13.794723510742188],["▁දැනුවත්",-13.794723510742188],["▁සුපිරි",-13.794723510742188],["▁พฤษภาคม",-13.794723510742188],["▁အဓိက",-13.794723510742188],["▁ساینس",-13.794724464416504],["▁bekymre",-13.79472541809082],["▁ermöglichen",-13.79472541809082],["▁የዚህ",-13.79472541809082],["▁അനുമതി",-13.794726371765137],["正规",-13.794726371765137],["▁penumpang",-13.79472827911377],["▁ușor",-13.79472827911377],["▁недостаточно",-13.794729232788086],["▁każda",-13.794730186462402],["▁tetszik",-13.794730186462402],["▁tsakanin",-13.794730186462402],["▁достоинств",-13.794730186462402],["▁नाशिक",-13.794730186462402],["ՈՂ",-13.79473114013672],["▁optik",-13.79473114013672],["▁දිගටම",-13.79473114013672],["▁տեղակալ",-13.794732093811035],["主管部门",-13.794732093811035],["bahagian",-13.794733047485352],["▁كىتاب",-13.794733047485352],["👇",-13.794734954833984],["▁Kenntnis",-13.794737815856934],["▁підкресл",-13.794737815856934],["▁වුනේ",-13.794737815856934],["护理",-13.794737815856934],["信貸",-13.794739723205566],["▁jumatate",-13.794740676879885],["▁वास्तविक",-13.794740676879885],["▁උපන්",-13.794742584228516],["▁شهادة",-13.794744491577148],["▁Nuestra",-13.794745445251465],["▁odluke",-13.794745445251465],["▁prakticky",-13.794745445251465],["▁ලැබූ",-13.794745445251465],["▁गर्नुभयो",-13.79475212097168],["▁හැටියට",-13.794754028320312],["▁Rabi",-13.794756889343262],["▁այդպես",-13.794759750366213],["▁झोप",-13.794760704040527],["▁defensiv",-13.794767379760742],["▁γκολ",-13.794771194458008],["▁지금까지",-13.79477882385254],["▁Eagle",-13.794781684875488],["ອ່ານ",-13.79478359222412],["جری",-13.794788360595703],["富士",-13.794788360595703],["▁поняла",-13.794793128967283],["▁keşfe",-13.794804573059082],["▁fehlen",-13.794814109802246],["posed",-13.79481601715088],["▁اليڪشن",-13.794819831848145],["▁inredning",-13.79482650756836],["▁Utusan",-13.794828414916992],["▁století",-13.794836044311523],["దేవి",-13.79483699798584],["fundur",-13.794837951660156],["きれい",-13.794838905334473],["▁sambutan",-13.794857025146484],["▁presència",-13.794858932495115],["▁aksesuar",-13.794879913330078],["▁oxunan",-13.794881820678713],["▁varsinkin",-13.794891357421877],["▁decoração",-13.794913291931152],["▁տղա",-13.794913291931152],["を集め",-13.794913291931152],["▁majorité",-13.79491901397705],["▁ದೇವರ",-13.794919967651367],["▁Beton",-13.794937133789062],["▁дизел",-13.794940948486328],["▁країнах",-13.79494285583496],["▁مقتل",-13.79494285583496],["▁Московск",-13.794943809509276],["▁שווה",-13.79494571685791],["▁Čeprav",-13.794954299926758],["的应用",-13.794957160949709],["▁관심을",-13.794979095458984],["ইস",-13.794991493225098],["▁موضع",-13.794997215270996],["納得",-13.795002937316896],["▁الاقتصادي",-13.795014381408691],["▁пачаў",-13.795022010803224],["idah",-13.79502296447754],["▁القضاء",-13.795026779174805],["esquerra",-13.795032501220703],["malai",-13.795034408569336],["▁joukko",-13.795044898986816],["乘坐",-13.795045852661133],["▁информацията",-13.795048713684082],["▁världs",-13.795062065124512],["▁गिरी",-13.795063972473145],["▁isiklik",-13.795079231262209],["▁дио",-13.79509162902832],["▁verhalen",-13.795093536376951],["▁λαού",-13.79511260986328],["морф",-13.79511547088623],["助け",-13.795116424560549],["▁내용은",-13.795122146606444],["▁north",-13.795132637023926],["▁uğurla",-13.79513454437256],["▁البيان",-13.79513454437256],["▁novērtē",-13.795151710510254],["σημα",-13.79515266418457],["אבי",-13.79515266418457],["▁dialogue",-13.795161247253418],["تكم",-13.795173645019531],["▁Tätä",-13.795185089111328],["▁Dagen",-13.79518699645996],["▁kokeilla",-13.795196533203123],["فک",-13.795201301574709],["▁основного",-13.795201301574709],["▁توڙي",-13.795201301574709],["▁valabil",-13.795202255249023],["tjenesten",-13.795244216918944],["حرف",-13.795251846313477],["▁veikti",-13.795262336730955],["▁rukáv",-13.795272827148438],["▁Gyven",-13.795276641845703],["ušies",-13.795324325561523],["▁asili",-13.795348167419434],["不喜欢",-13.795354843139648],["▁keçirdiyi",-13.795361518859863],["gelbė",-13.795369148254396],["järjestelmän",-13.795380592346191],["▁Fredag",-13.795380592346191],["αλά",-13.795394897460938],["מסלול",-13.795408248901367],["▁moita",-13.79542064666748],["▁pamo",-13.795439720153809],["▁Deporte",-13.795452117919922],["▁ارض",-13.795470237731934],["▁oefen",-13.795475006103516],["pitan",-13.795479774475098],["حسين",-13.79549789428711],["ကိုယ်",-13.795506477355955],["циони",-13.795507431030272],["▁виконавч",-13.795507431030272],["○",-13.795516967773438],["жень",-13.795520782470703],["▁mõõt",-13.795537948608398],["▁Vị",-13.795555114746094],["▁dispensa",-13.795597076416016],["▁bottle",-13.795604705810549],["▁szaf",-13.79560661315918],["မွား",-13.795610427856444],["▁كرة",-13.795610427856444],["GLI",-13.795614242553713],["▁vermə",-13.795620918273926],["▁sancta",-13.795636177062988],["ផ្សាយ",-13.79563808441162],["▁צבי",-13.79567813873291],["▁13%",-13.795684814453123],["▁გუნდი",-13.795694351196287],["רומ",-13.795699119567873],["述べ",-13.795721054077148],["▁რეჟისორი",-13.795730590820312],["ಟನ್",-13.79574966430664],["أو",-13.795774459838867],["▁antalet",-13.795775413513184],["▁قطعات",-13.795777320861816],["אנחנו",-13.795798301696776],["▁titlu",-13.7958345413208],["සල්",-13.79584789276123],["▁wegge",-13.79585075378418],["預算",-13.795867919921877],["▁باران",-13.795897483825684],["▁doğrudan",-13.795903205871582],["gnant",-13.795910835266112],["▁iddynt",-13.79592227935791],["زاز",-13.795927047729492],["kthim",-13.79592990875244],["▁sjellë",-13.795937538146973],["ρωση",-13.795979499816896],["bibigay",-13.795991897583008],["стві",-13.796015739440918],["▁административно",-13.796022415161133],["▁јави",-13.796040534973145],["▁შესაძლებლობა",-13.79604148864746],["▁басма",-13.796048164367676],["▁kyla",-13.796079635620115],["▁publicidade",-13.796083450317385],["կայան",-13.796086311340332],["انھن",-13.796096801757812],["▁Factory",-13.796103477478027],["▁Дараа",-13.796119689941406],["▁comenzi",-13.796152114868164],["zovat",-13.79617977142334],["実際の",-13.796186447143556],["▁втория",-13.796189308166504],["▁dejlige",-13.796208381652832],["ानन्द",-13.796216011047363],["▁requires",-13.796250343322754],["қай",-13.796257019042969],["jalni",-13.79627799987793],["▁existing",-13.79628086090088],["▁دوو",-13.796314239501951],["▁skulu",-13.796332359313965],["▁vidē",-13.796334266662598],["▁eleccións",-13.796342849731444],["ónico",-13.796344757080078],["▁പുരുഷ",-13.79636001586914],["ቀይ",-13.796384811401367],["קלי",-13.796394348144531],["▁akcije",-13.79643440246582],["▁vláda",-13.796451568603516],["▁svež",-13.796453475952148],["▁adidas",-13.796483039855955],["▁Հայկ",-13.79649829864502],["▁לעו",-13.796499252319336],["▁מקומי",-13.796594619750977],["▁opmærksom",-13.796601295471191],["ACIÓ",-13.796602249145508],["▁যেতে",-13.79660415649414],["ಳ್",-13.796610832214355],["foot",-13.796628952026367],["▁seçenekleri",-13.796631813049316],["▁SDS",-13.79664707183838],["dön",-13.796648979187012],["たので",-13.796661376953123],["▁қыс",-13.79667854309082],["miasa",-13.796696662902832],["պո",-13.796701431274414],["ാകുന്നു",-13.796723365783691],["▁galva",-13.796728134155272],["▁Путін",-13.796730041503906],["▁ਪਾਰ",-13.79674243927002],["станції",-13.796812057495115],["▁slutten",-13.796828269958496],["աբանական",-13.79685115814209],["▁lattia",-13.796859741210938],["▁leczenia",-13.796866416931152],["dzim",-13.79688835144043],["▁11%",-13.79688835144043],["۳۱",-13.796893119812012],["▁Congo",-13.79689598083496],["▁Baza",-13.796940803527832],["іздің",-13.796942710876465],["ռա",-13.796947479248049],["▁připraven",-13.796953201293944],["▁přesto",-13.796981811523438],["▁absolvent",-13.796987533569336],["▁produir",-13.796991348266602],["▁mayroon",-13.796995162963867],["ायो",-13.797008514404297],["▁kiekvienas",-13.797016143798828],["ireadh",-13.797057151794434],["▁eres",-13.797057151794434],["ट्रा",-13.797073364257812],["▁americký",-13.79708194732666],["▁снабд",-13.79708766937256],["するように",-13.797091484069824],["ਇਰ",-13.797099113464355],["还将",-13.797103881835938],["▁Ofic",-13.797126770019531],["년도",-13.79714298248291],["টায়",-13.797154426574709],["美人",-13.797162055969238],["▁montri",-13.797163009643556],["秉持",-13.797170639038086],["縫",-13.797194480895996],["aktiviteter",-13.797195434570312],["▁عامة",-13.797197341918944],["ದನ",-13.797208786010742],["▁පාසල",-13.797208786010742],["พรรค",-13.797222137451172],["ไวท์เทนนิ่ง",-13.797223091125488],["ទាញយក",-13.797223091125488],["▁Sąjungos",-13.797223091125488],["▁addirittura",-13.797223091125488],["▁ausdrücklich",-13.797223091125488],["▁berkualitas",-13.797223091125488],["▁dalyvauti",-13.797223091125488],["▁välttämättä",-13.797223091125488],["▁λίγες",-13.797223091125488],["▁ياردەم",-13.797223091125488],["▁जोखिम",-13.797223091125488],["▁ਸਿਹਤ",-13.797223091125488],["▁ಚುನಾವಣಾ",-13.797223091125488],["▁მოსწავლე",-13.797223091125488],["▁ტრადიცი",-13.797223091125488],["▁ሚኒስትሩ",-13.797223091125488],["쯤",-13.797223091125488],["▁közönség",-13.797224044799805],["▁memerintah",-13.797224044799805],["▁évtized",-13.797224044799805],["▁जाईल",-13.797224044799805],["▁ପ୍ରଶାସନ",-13.797224044799805],["橡",-13.797224044799805],["밍",-13.797224044799805],["šené",-13.79722499847412],["▁şehîd",-13.79722499847412],["▁συστήματος",-13.797225952148438],["▁දැමීම",-13.797225952148438],["▁Rijeka",-13.79722785949707],["▁δίπλα",-13.79722785949707],["▁וועלן",-13.79722785949707],["▁распределен",-13.797228813171388],["▁المعارف",-13.797228813171388],["▁PowerPoint",-13.797229766845703],["▁பயணம்",-13.797229766845703],["▁ATƏT",-13.79723072052002],["▁Snapchat",-13.797231674194336],["▁possibly",-13.797231674194336],["▁Defensa",-13.797233581542969],["▁άδεια",-13.797233581542969],["▁하늘",-13.797233581542969],["▁Rwanda",-13.797235488891602],["björn",-13.797236442565918],["▁composé",-13.79724407196045],["▁Згідн",-13.79724407196045],["▁dievčatá",-13.797245025634766],["ພວກເຂົາ",-13.797245979309082],["▁ئابدۇ",-13.797248840332031],["▁मतलब",-13.797250747680664],["▁kuchyň",-13.79725170135498],["シーン",-13.797259330749512],["▁ಹಿಂದಿನ",-13.79726219177246],["กระจาย",-13.797264099121094],["▁tietenkin",-13.79726505279541],["▁ناټو",-13.797268867492676],["▁şehit",-13.797269821166992],["івки",-13.79727268218994],["▁niewy",-13.797277450561523],["비밀번호",-13.797281265258787],["▁خسته",-13.797286033630373],["▁Qof",-13.797286987304688],["▁moodusta",-13.797289848327637],["▁جلسات",-13.797300338745115],["שירות",-13.79730224609375],["մաս",-13.797316551208496],["开奖",-13.797316551208496],["▁executar",-13.797325134277344],["我可以",-13.79732894897461],["▁þekki",-13.79734992980957],["▁કરનાર",-13.797353744506836],["သို",-13.797367095947266],["▁նախորդ",-13.797368049621582],["▁lumayan",-13.797401428222656],["▁제안",-13.797403335571287],["насил",-13.797409057617188],["登山",-13.797409057617188],["머니",-13.797411918640137],["▁réteg",-13.797417640686035],["▁vezetés",-13.79746437072754],["▁иштеген",-13.797466278076172],["▁открыто",-13.797466278076172],["▁कामदार",-13.79746913909912],["▁veio",-13.797480583190918],["▁моменту",-13.797481536865234],["▁пъ",-13.7974853515625],["conscious",-13.79748821258545],["▁ገብረ",-13.797504425048828],["▁mwenyewe",-13.79750633239746],["▁majority",-13.797515869140623],["▁ապրել",-13.797527313232422],["มาเป็น",-13.7975435256958],["ിരിക്കുകയാണ്",-13.797571182250977],["▁কালো",-13.797574996948242],["▁onlangs",-13.797576904296877],["▁Στον",-13.797576904296877],["▁طے",-13.79758071899414],["▁croire",-13.797601699829102],["▁تیاری",-13.797605514526367],["▁ruas",-13.79762363433838],["▁Mong",-13.79766082763672],["dığında",-13.7976655960083],["▁flak",-13.7976655960083],["ണോ",-13.797667503356934],["▁Maska",-13.797679901123049],["▁thép",-13.797683715820312],["▁mozga",-13.79772663116455],["时代的",-13.797771453857422],["▁Viagra",-13.797806739807127],["ප්ත",-13.797823905944824],["പത്ര",-13.797834396362305],["מיט",-13.797837257385254],["schätze",-13.797840118408203],["กุล",-13.797842025756836],["▁slappe",-13.797844886779783],["ဖတ်",-13.797850608825684],["▁malware",-13.797850608825684],["ကြန္",-13.797857284545898],["▁தேர்",-13.797874450683594],["▁Lễ",-13.797877311706545],["сүнө",-13.79788303375244],["▁zaboravi",-13.797889709472656],["▁Tiền",-13.797895431518556],["▁komisije",-13.797898292541504],["یکا",-13.797905921936035],["▁констат",-13.797931671142578],["▁Hoàn",-13.797938346862791],["▁професионално",-13.79793930053711],["▁prethodn",-13.797945022583008],["▁поставља",-13.797967910766602],["▁muskel",-13.797971725463867],["▁(45)",-13.797981262207031],["ທີ່ໄດ້",-13.797986030578612],["▁demanar",-13.797990798950195],["७१",-13.798004150390623],["ከፍ",-13.798032760620115],["▁Hôm",-13.79804229736328],["▁attesa",-13.798049926757812],["▁kymmen",-13.798077583312988],["гэх",-13.79811668395996],["▁físicas",-13.798174858093262],["wering",-13.798177719116213],["눈",-13.798184394836426],["Download",-13.798213005065918],["▁RAD",-13.798236846923828],["▁Kriteri",-13.798253059387209],["▁ਉਪਰ",-13.798267364501951],["适用",-13.798298835754396],["▁Leid",-13.798304557800291],["▁STAT",-13.798319816589355],["aalselt",-13.798324584960938],["▁pahar",-13.798340797424316],["nızın",-13.798377990722656],["▁okupi",-13.798396110534668],["▁አፈ",-13.79840087890625],["илип",-13.7984037399292],["▁festen",-13.798421859741213],["▁ونقل",-13.798443794250488],["▁mhe",-13.798457145690918],["▁نسخ",-13.798463821411133],["נחל",-13.798480033874512],["▁složen",-13.798481941223145],["μέν",-13.798484802246094],["▁izvr",-13.798521995544434],["▁campionato",-13.7985258102417],["▁serupa",-13.798528671264648],["isempi",-13.798537254333496],["factor",-13.798539161682127],["hlásil",-13.798539161682127],["最近は",-13.798543930053713],["▁ප්ර",-13.798559188842772],["▁Simula",-13.798563957214355],["▁모르",-13.79857063293457],["бърз",-13.798606872558594],["חליט",-13.79863452911377],["▁omnem",-13.798649787902832],["ାଦ",-13.798650741577148],["غاندا",-13.798660278320312],["▁අතින්",-13.798666954040527],["▁ഇൻ",-13.798688888549805],["ፋይ",-13.79870891571045],["▁بعدما",-13.798717498779297],["ใ",-13.798725128173828],["▁Inspira",-13.798734664916992],["358",-13.798758506774902],["らなかった",-13.798758506774902],["▁ኢትዮጵያን",-13.79876708984375],["▁танысты",-13.798803329467772],["▁հնչ",-13.79880428314209],["LECT",-13.798812866210938],["▁Spir",-13.798824310302734],["▁julge",-13.79883098602295],["தமிழ்",-13.798842430114746],["408",-13.798853874206545],["▁eraldi",-13.798884391784668],["▁மாற",-13.798895835876465],["timme",-13.79890251159668],["▁গান",-13.798914909362791],["தார",-13.798921585083008],["▁magyaráz",-13.798928260803224],["röð",-13.798934936523438],["производ",-13.798941612243652],["▁pacate",-13.798956871032717],["▁leikki",-13.798965454101562],["▁තාත්ත",-13.798978805541992],["▁candida",-13.798994064331056],["▁પદ",-13.798996925354004],["替え",-13.798996925354004],["▁trace",-13.798999786376951],["▁begyndte",-13.799015998840332],["▁limpia",-13.799031257629396],["очной",-13.799036026000977],["кріп",-13.799057960510254],["IBI",-13.79905891418457],["kluz",-13.799063682556152],["▁binası",-13.799068450927734],["ක්ෂණ",-13.799076080322266],["▁βασιλ",-13.799076080322266],["▁diwar",-13.799084663391112],["שׂ",-13.799090385437012],["▁següents",-13.799092292785645],["דוגמא",-13.799100875854492],["▁polskim",-13.799101829528809],["▁precisam",-13.799105644226074],["regula",-13.799115180969238],["▁хэрэглээ",-13.79911994934082],["γών",-13.799120903015137],["▁Бран",-13.799120903015137],["▁galės",-13.79913330078125],["▁чемпион",-13.799134254455566],["ขวาง",-13.799135208129885],["開箱",-13.799147605895996],["대의",-13.799152374267578],["▁sosem",-13.799159049987791],["▁الحسين",-13.79916763305664],["▁түг",-13.799171447753906],["子どもたち",-13.799189567565918],["નાથ",-13.799192428588867],["রত",-13.799199104309082],["møtet",-13.799209594726562],["duzione",-13.79922103881836],["çıları",-13.79922103881836],["säljare",-13.79926872253418],["▁관련된",-13.799269676208496],["▁inteleg",-13.799270629882812],["▁draft",-13.79927921295166],["なんと",-13.799280166625977],["Good",-13.79931926727295],["▁Liik",-13.799321174621582],["▁väder",-13.799327850341797],["ίδων",-13.799335479736328],["▁لاين",-13.799339294433594],["סופר",-13.79934787750244],["▁projektów",-13.799371719360352],["明治",-13.799381256103516],["▁Barber",-13.799383163452148],["▁prazni",-13.79940414428711],["▁puisi",-13.799406051635742],["मांस",-13.799416542053224],["ເຮືອ",-13.799419403076172],["たこと",-13.799434661865234],["ക്കാനുള്ള",-13.799476623535156],["אולי",-13.799525260925291],["▁ગમે",-13.799555778503418],["okuba",-13.799575805664062],["ΔΗ",-13.79958152770996],["專用",-13.799623489379885],["▁невин",-13.79962921142578],["▁фермер",-13.79962921142578],["逊",-13.79962921142578],["▁земља",-13.799653053283691],["債務",-13.79968547821045],["▁14%",-13.799695014953612],["哩",-13.799715995788574],["軟件",-13.79971694946289],["▁Оқушылар",-13.799718856811523],["thuk",-13.799727439880373],["АЊЕ",-13.799727439880373],["สะพาน",-13.799728393554688],["ຍີ່ປຸ່ນ",-13.799728393554688],["นาฬิกา",-13.799729347229004],["፪",-13.799729347229004],["▁Copïo",-13.799729347229004],["▁caràcter",-13.799729347229004],["▁discapacidad",-13.799729347229004],["▁fremragende",-13.799729347229004],["▁legújabb",-13.799729347229004],["▁mengunjungi",-13.799729347229004],["▁paggamit",-13.799729347229004],["▁terkejut",-13.799729347229004],["▁verbessern",-13.799729347229004],["▁Міністерство",-13.799729347229004],["▁ерекшеліктері",-13.799729347229004],["▁թատրոն",-13.799729347229004],["▁اکاؤنٹ",-13.799729347229004],["▁आहोत",-13.799729347229004],["▁इंडियन",-13.799729347229004],["▁மதுரை",-13.799729347229004],["▁กุมภาพันธ์",-13.799729347229004],["▁გთხოვთ",-13.799729347229004],["▁Spiegel",-13.79973030090332],["▁Tvorba",-13.79973030090332],["▁axşam",-13.79973030090332],["▁ସାହିତ୍ୟ",-13.79973030090332],["▁მონაცემები",-13.79973030090332],["렬",-13.79973030090332],["ផ្ញើ",-13.799731254577637],["▁někde",-13.799731254577637],["▁చదివి",-13.799731254577637],["▁השלישי",-13.79973316192627],["▁atstovai",-13.799734115600586],["▁двајца",-13.799734115600586],["▁menetapkan",-13.799735069274902],["▁ઉપરાંત",-13.799735069274902],["▁கூடாது",-13.799736976623535],["▁reikėtų",-13.799737930297852],["▁쓸",-13.799737930297852],["▁որոշակի",-13.7997407913208],["▁टिप्स",-13.7997407913208],["tății",-13.799742698669434],["▁ਕੁੜੀ",-13.799742698669434],["▁शाळे",-13.79974365234375],["▁ਨਜ਼ਰ",-13.79974365234375],["▁részvétel",-13.799748420715332],["▁شجاع",-13.799749374389648],["▁avhengig",-13.79975128173828],["▁орлого",-13.799755096435549],["▁വിമര്",-13.799756050109863],["▁13:30",-13.79975700378418],["јављује",-13.799762725830078],["▁Lloyd",-13.799766540527344],["▁uzmanību",-13.79976749420166],["▁저렴한",-13.79976749420166],["▁Атанас",-13.79977035522461],["▁አየር",-13.799772262573242],["▁baci",-13.799777030944824],["▁übrigens",-13.799777030944824],["पृ",-13.79979133605957],["▁исследований",-13.79979419708252],["▁avslutte",-13.799795150756836],["▁itsensä",-13.799798011779783],["近年來",-13.799803733825684],["▁kakšne",-13.799805641174316],["▁sigurisë",-13.799805641174316],["▁திருமணம்",-13.799808502197266],["▁indlela",-13.799812316894531],["ອາຍຸ",-13.799818992614746],["▁popola",-13.799822807312012],["▁Тепер",-13.799823760986328],["ขวด",-13.799829483032228],["▁Muxtar",-13.799832344055176],["▁visitors",-13.799835205078123],["▁მდგომარეობა",-13.79983615875244],["▁manner",-13.799839973449709],["flet",-13.799861907958984],["▁боловч",-13.799880981445312],["ਭੂ",-13.79989528656006],["▁કરવાનું",-13.79989528656006],["▁terdengar",-13.799901962280272],["人格",-13.799911499023438],["▁בפייסבוק",-13.799918174743652],["▁selezione",-13.799928665161133],["▁viktigaste",-13.799932479858398],["▁жарат",-13.799935340881348],["bár",-13.79995059967041],["tříd",-13.79995059967041],["▁mahali",-13.799972534179688],["▁MARI",-13.799983024597168],["▁דואר",-13.799999237060549],["▁nivîskar",-13.800004005432127],["▁personagem",-13.800019264221191],["▁edirdi",-13.800020217895508],["▁čip",-13.800026893615724],["▁inutile",-13.800031661987305],["▁मॅ",-13.800037384033203],["▁fermer",-13.800045013427734],["▁inventore",-13.800052642822266],["▁διέ",-13.800057411193848],["▁zwiększ",-13.80005931854248],["▁Tilbud",-13.800068855285645],["▁දවසක්",-13.800068855285645],["▁комби",-13.800076484680176],["▁ырлар",-13.800078392028809],["رۇ",-13.800084114074709],["▁lättare",-13.800097465515137],["▁delante",-13.800113677978516],["▁gráfico",-13.800127029418944],["▁jāat",-13.80019187927246],["▁нині",-13.800198554992676],["▁ಮರು",-13.80020236968994],["但是我",-13.800204277038574],["様子",-13.800225257873535],["తిని",-13.80023193359375],["安全生产",-13.800241470336914],["ιακού",-13.800259590148926],["▁Krish",-13.800260543823242],["▁كلا",-13.80026626586914],["▁అప్పటి",-13.80026626586914],["▁glasova",-13.800267219543455],["▁fellow",-13.800273895263672],["ណ្ឌ",-13.800280570983888],["▁GTX",-13.800289154052734],["▁impulsar",-13.800289154052734],["▁vašich",-13.800291061401367],["▁Tăng",-13.800296783447266],["▁infants",-13.800311088562012],["▁velko",-13.800314903259276],["▁linguri",-13.800315856933594],["cuta",-13.80031967163086],["usapan",-13.800338745117188],["ምራ",-13.80034065246582],["▁stake",-13.80035400390625],["▁Nika",-13.800373077392578],["▁жолдо",-13.800374031066896],["ాత్మక",-13.80038070678711],["▁Dow",-13.80038070678711],["követ",-13.80038833618164],["INDI",-13.800396919250488],["ളും",-13.800396919250488],["▁Wor",-13.800410270690918],["▁domenii",-13.800411224365234],["▁نۇر",-13.800437927246094],["ੀਅਨ",-13.80044651031494],["▁Septem",-13.80044651031494],["падает",-13.800492286682127],["▁treaba",-13.800493240356444],["▁зуун",-13.800508499145508],["▁principu",-13.800511360168455],["らし",-13.800512313842772],["perioden",-13.800521850585938],["ЛЮ",-13.800529479980469],["▁Huh",-13.80054759979248],["▁тату",-13.800586700439451],["▁Newton",-13.80058765411377],["▁पॅ",-13.80059051513672],["▁restul",-13.800607681274414],["ຮາ",-13.80062484741211],["▁چھپ",-13.800637245178224],["▁1891",-13.80064296722412],["▁progressiv",-13.80066967010498],["טת",-13.80070686340332],["שולחן",-13.80071258544922],["ബുക്ക",-13.800721168518066],["体調",-13.800729751586914],["▁Ստ",-13.800748825073242],["στές",-13.800750732421877],["▁первое",-13.800751686096191],["ebilme",-13.800766944885254],["انتخاب",-13.800766944885254],["ISTI",-13.800769805908203],["pedagog",-13.800769805908203],["إسرائيل",-13.800769805908203],["ສະຫະລັດ",-13.800775527954102],["▁Завод",-13.800787925720217],["ผู้นํา",-13.800792694091797],["nå",-13.800808906555176],["determina",-13.800810813903809],["стям",-13.800817489624023],["▁Banc",-13.800832748413086],["▁accipi",-13.800841331481934],["▁bepaal",-13.8008451461792],["▁companiei",-13.800848007202148],["▁тавих",-13.80085277557373],["▁qoys",-13.800854682922363],["▁મહે",-13.800864219665527],["▁zgjedhur",-13.80086612701416],["▁полета",-13.800875663757324],["551",-13.800877571105955],["Prim",-13.800922393798828],["አሜሪካ",-13.80092430114746],["ենդ",-13.800951957702637],["推出了",-13.800952911376951],["▁צעד",-13.800957679748535],["1%)",-13.8009614944458],["▁bence",-13.800963401794434],["НИТЕ",-13.800969123840332],["▁forklarer",-13.800985336303713],["▁†",-13.800997734069824],["▁Versi",-13.800999641418455],["▁Møre",-13.801002502441406],["ਪਤੀ",-13.801017761230469],["ofobi",-13.801051139831545],["▁Đài",-13.80105209350586],["▁თავდა",-13.801074028015137],["طلع",-13.80111312866211],["▁старше",-13.801114082336426],["عراض",-13.801142692565918],["▁Mā",-13.801153182983398],["ũng",-13.801158905029297],["▁بزرگی",-13.801172256469728],["▁ഓടി",-13.801173210144045],["డ్స్",-13.801179885864258],["బడిన",-13.80119514465332],["▁trọn",-13.801201820373535],["▁tentera",-13.801224708557127],["▁ନେବା",-13.801237106323242],["▁cicle",-13.801246643066406],["لاعب",-13.801254272460938],["▁вперше",-13.801280975341797],["▁bolnav",-13.801300048828123],["▁Солун",-13.801311492919922],["Май",-13.801340103149414],["▁следващия",-13.801356315612791],["מלך",-13.801384925842283],["രണം",-13.80139446258545],["ുമുള്ള",-13.801398277282717],["SIT",-13.801401138305664],["▁ഉത്",-13.80140209197998],["ierul",-13.801417350769045],["▁հանդիպել",-13.801432609558104],["▁şeyin",-13.801465034484863],["ანში",-13.801481246948242],["▁праву",-13.801481246948242],["▁ریخت",-13.801484107971191],["▁lukket",-13.801496505737305],["исты",-13.801530838012695],["dannelse",-13.801533699035645],["▁bəyan",-13.801539421081545],["ילד",-13.80154037475586],["▁agradeci",-13.80155086517334],["▁Doç",-13.801559448242188],["▁Bereits",-13.801562309265137],["▁எழுதி",-13.8015775680542],["▁рівно",-13.80158233642578],["նետ",-13.801583290100098],["פאל",-13.801602363586426],["▁hüküm",-13.801607131958008],["▁ingredients",-13.801618576049805],["▁ligga",-13.801652908325195],["▁kryetari",-13.80165958404541],["▁кызматкерлери",-13.801661491394045],["▁διαφορετικ",-13.801664352416992],["▁tingui",-13.80166721343994],["▁chamar",-13.801681518554688],["▁vārdi",-13.801685333251951],["▁Vī",-13.801691055297852],["▁benyttes",-13.80172061920166],["8,9",-13.80172348022461],["ரெ",-13.801730155944824],["▁یورپی",-13.801742553710938],["▁لمس",-13.801765441894531],["▁públics",-13.80178165435791],["▁Alvar",-13.801788330078123],["▁Volta",-13.801795959472656],["loft",-13.801809310913086],["▁játékok",-13.801828384399414],["▁senest",-13.801839828491213],["ეო",-13.801841735839844],["▁kely",-13.801872253417969],["面倒",-13.801891326904297],["▁دانه",-13.801894187927246],["EER",-13.801898002624512],["▁vrarë",-13.801918029785156],["▁시도",-13.801932334899902],["▁játszott",-13.80193328857422],["برمج",-13.801947593688965],["យ៉ា",-13.801962852478027],["▁Joker",-13.801976203918455],["palveluiden",-13.801980018615724],["ύλα",-13.801993370056152],["มอ",-13.80202293395996],["hew",-13.802045822143556],["ంగి",-13.802054405212402],["▁біо",-13.802069664001465],["▁misure",-13.80207061767578],["στούν",-13.80207347869873],["НАТА",-13.80207347869873],["▁siltä",-13.802078247070312],["▁chake",-13.802087783813477],["єш",-13.802088737487791],["▁Филм",-13.80209732055664],["▁muur",-13.802103996276855],["клав",-13.8021240234375],["▁echipe",-13.80214500427246],["▁hrub",-13.802145957946776],["縱",-13.8021821975708],["巾",-13.80218505859375],["▁студэнт",-13.802199363708496],["▁काळात",-13.802200317382812],["ГӨ",-13.802207946777344],["矩",-13.802209854125977],["採取",-13.802231788635254],["診断",-13.802238464355469],["เนียน",-13.802241325378418],["ពាក់ព័ន្ធ",-13.802241325378418],["▁шілде",-13.802241325378418],["स्क्राईब",-13.802242279052734],["เทศบาล",-13.802242279052734],["▁Uingereza",-13.802242279052734],["▁prvenstva",-13.802242279052734],["▁przyjaciół",-13.802242279052734],["▁päikese",-13.802242279052734],["▁tiešsaistē",-13.802242279052734],["▁ystyried",-13.802242279052734],["▁Ελλάδος",-13.802242279052734],["▁εκατομμύρια",-13.802242279052734],["▁денсаулық",-13.802242279052734],["▁ийгилик",-13.802242279052734],["▁աղջիկ",-13.802242279052734],["▁العلاقة",-13.802242279052734],["▁فائنل",-13.802242279052734],["▁ویتامین",-13.802242279052734],["▁खेलकुद",-13.802242279052734],["▁वाणिज्य",-13.802242279052734],["▁श्रेणी",-13.802242279052734],["▁ମୋବାଇଲ",-13.802242279052734],["▁උපදෙස්",-13.802242279052734],["▁ፕሬዚዳንት",-13.802242279052734],["▁детям",-13.80224323272705],["▁అంచనా",-13.80224323272705],["▁Tecnologia",-13.802244186401367],["▁වුනත්",-13.802244186401367],["▁Jupiter",-13.80224609375],["▁dotyczących",-13.80224609375],["▁јединствен",-13.802247047424316],["▁Ciutat",-13.802248001098633],["▁Magazeti",-13.802250862121582],["▁Τώρα",-13.802250862121582],["▁заедница",-13.802252769470217],["▁kísérlet",-13.802255630493164],["▁Затем",-13.80225658416748],["▁знім",-13.80225658416748],["▁televizyon",-13.802260398864746],["▁שיהיה",-13.802260398864746],["出門",-13.802260398864746],["όνων",-13.80226230621338],["▁REPORT",-13.80226230621338],["▁ඔවුන්ට",-13.80226230621338],["▁мүлк",-13.802265167236328],["▁kaedah",-13.802266120910645],["▁обвиня",-13.802268981933594],["▁غړي",-13.802270889282228],["▁मोठा",-13.80227279663086],["▁sammenhæng",-13.802274703979492],["▁ආරක්",-13.802274703979492],["▁éigin",-13.802276611328123],["▁tölvu",-13.802278518676758],["▁iontach",-13.802281379699709],["▁Белорус",-13.802288055419922],["▁ହୋଇଛନ୍ତି",-13.802288055419922],["债务",-13.80229949951172],["▁gyventi",-13.802308082580566],["▁infanoj",-13.80231761932373],["▁domáci",-13.802319526672363],["▁графік",-13.802323341369627],["▁संधी",-13.802323341369627],["毒品",-13.802329063415527],["▁ਪਹਿਲੀ",-13.802335739135742],["▁शेती",-13.802339553833008],["▁ဘယ္လို",-13.802343368530272],["▁framåt",-13.80234718322754],["▁conforto",-13.802350044250488],["▁lisätä",-13.802356719970703],["▁токму",-13.802356719970703],["▁හිතුව",-13.802356719970703],["▁בשלב",-13.802364349365234],["▁ହେବାକୁ",-13.802367210388184],["▁starý",-13.802376747131348],["plug",-13.80238151550293],["▁Wake",-13.80238151550293],["▁pikkus",-13.802383422851562],["കോം",-13.802388191223145],["▁בסרט",-13.802392959594728],["ираните",-13.802393913269045],["▁przepisów",-13.802393913269045],["mättä",-13.802395820617676],["▁آلاف",-13.802398681640623],["בחינה",-13.802401542663574],["tiivinen",-13.802408218383787],["▁TNI",-13.802413940429688],["▁haluaisi",-13.802417755126951],["▁Akibat",-13.80242156982422],["kolleg",-13.802424430847168],["▁kasutatud",-13.80242919921875],["▁करावी",-13.802430152893066],["LTE",-13.802441596984863],["纳入",-13.802444458007812],["▁sprint",-13.802448272705078],["▁cerebro",-13.802456855773926],["▁incrementa",-13.802456855773926],["▁Installation",-13.802477836608888],["▁Τσίπρας",-13.802483558654783],["▁anticipat",-13.80248737335205],["▁ugyanaz",-13.80248737335205],["ותיו",-13.802505493164062],["▁sekalipun",-13.80250644683838],["▁dollarë",-13.802508354187012],["▁aumentare",-13.80252742767334],["▁phán",-13.8025484085083],["▁හිතෙන",-13.802555084228516],["▁frakci",-13.802566528320312],["najua",-13.80258083343506],["ويض",-13.802587509155272],["▁dodatni",-13.802606582641602],["شكال",-13.802608489990234],["koval",-13.80261516571045],["fejlesztés",-13.802620887756348],["▁Məclis",-13.802641868591309],["現地",-13.802641868591309],["▁desperat",-13.802651405334473],["▁Ами",-13.802654266357422],["Jesteśmy",-13.80266284942627],["▁odjel",-13.802677154541016],["▁magasabb",-13.802680015563965],["▁обслужване",-13.802684783935549],["▁mangiare",-13.802689552307127],["liseks",-13.80270767211914],["ilerek",-13.802722930908203],["▁అనుకున్న",-13.802728652954102],["▁proposito",-13.802740097045898],["יצירת",-13.802742004394531],["境外",-13.802745819091797],["▁qeveria",-13.802748680114746],["▁domaćin",-13.802762985229492],["▁caint",-13.802764892578123],["▁paradox",-13.802777290344238],["▁Empire",-13.80278778076172],["ล่าง",-13.8027925491333],["approccio",-13.802796363830566],["▁stă",-13.802803993225098],["غرافي",-13.802810668945312],["equivoca",-13.802816390991213],["tettävä",-13.802826881408691],["▁duvar",-13.80283546447754],["овского",-13.802851676940918],["bedarf",-13.80285358428955],["irtear",-13.802874565124512],["▁گۈل",-13.802876472473145],["ІЛ",-13.802882194519045],["owcy",-13.802894592285156],["ଷ୍ଟି",-13.802894592285156],["▁යාලය",-13.802912712097168],["wale",-13.802922248840332],["▁bivši",-13.802928924560549],["ပုိ",-13.802935600280762],["бон",-13.80295181274414],["βελ",-13.802977561950684],["نڈر",-13.80299949645996],["▁ನಂ",-13.803017616271973],["▁გვარ",-13.803020477294922],["mmalla",-13.803046226501465],["▁trebali",-13.803074836730955],["क्रिय",-13.803082466125488],["▁яваа",-13.803153038024902],["挙げ",-13.803153038024902],["▁персональн",-13.803160667419434],["▁বহু",-13.80316162109375],["ങ്കി",-13.803166389465332],["▁pjesme",-13.803170204162598],["▁מוצרי",-13.803187370300291],["മ്പോ",-13.803189277648926],["φθεί",-13.803211212158203],["▁incerca",-13.803217887878418],["만이",-13.80322265625],["Gro",-13.803229331970217],["▁mõista",-13.803229331970217],["▁Pinoy",-13.803230285644531],["▁بېل",-13.803258895874023],["▁अप्र",-13.803277015686035],["▁జరుగుతున్న",-13.803290367126465],["แอพ",-13.80329418182373],["▁видят",-13.803297996520996],["▁permanecer",-13.803305625915527],["támasz",-13.803337097167969],["=0",-13.803339958190918],["تغي",-13.80334758758545],["▁biały",-13.803353309631348],["▁nahka",-13.80336093902588],["▁перенос",-13.803364753723145],["tutkimus",-13.803372383117676],["▁fördel",-13.80337619781494],["дігін",-13.803380966186523],["szabály",-13.803382873535156],["τικο",-13.803384780883787],["▁интересни",-13.803400039672852],["的經驗",-13.803434371948242],["▁దర్శక",-13.803447723388672],["▁حمید",-13.80345058441162],["▁Marken",-13.80345344543457],["▁Hung",-13.803454399108888],["▁mikill",-13.803470611572266],["▁jimbo",-13.80348777770996],["▁borcu",-13.803494453430176],["ောင်း",-13.80351448059082],["▁nærings",-13.803526878356934],["قىلى",-13.803531646728516],["▁السفر",-13.80354118347168],["eyim",-13.803545951843262],["▁característica",-13.803571701049805],["▁իշխանության",-13.803576469421388],["იე",-13.80357837677002],["▁Universitetet",-13.803585052490234],["ದೋ",-13.803586959838867],["▁सेट",-13.803594589233398],["ନାଲ",-13.80359935760498],["බූ",-13.80360507965088],["▁gebeurt",-13.803635597229004],["▁kosta",-13.803645133972168],["kaap",-13.803659439086914],["צמח",-13.803661346435549],["సిన",-13.80366325378418],["▁prenume",-13.803665161132812],["157",-13.803672790527344],["!:)",-13.803702354431152],["▁parkeer",-13.803705215454102],["ńskie",-13.80370807647705],["▁padės",-13.80372428894043],["▁ანგარიშ",-13.803738594055176],["▁PLN",-13.80374240875244],["люк",-13.803754806518556],["▁தந்தை",-13.803757667541504],["новски",-13.803763389587402],["הסבר",-13.80380630493164],["▁investigation",-13.80381679534912],["▁mbajt",-13.803820610046388],["▁доверие",-13.80383014678955],["▁მოვა",-13.803838729858398],["▁среднего",-13.803857803344728],["▁속에",-13.803862571716309],["▁cica",-13.803903579711914],["▁muude",-13.80391788482666],["▁partecipa",-13.803922653198242],["▁szoba",-13.803922653198242],["поле",-13.803935050964355],["▁coleg",-13.803936004638672],["▁ihned",-13.803940773010254],["ジョ",-13.80394172668457],["▁konkursa",-13.803943634033203],["ंदर",-13.803963661193848],["▁ընտրությունների",-13.80398178100586],["▁említett",-13.803985595703123],["▁наблюдава",-13.803987503051758],["▁Peti",-13.803988456726074],["▁штук",-13.8040189743042],["▁kropps",-13.804040908813477],["وتا",-13.804064750671388],["▁भाग्य",-13.804073333740234],["因應",-13.804081916809082],["sitter",-13.804102897644045],["kelet",-13.80410861968994],["▁особам",-13.804109573364258],["▁Hits",-13.80411434173584],["▁نوی",-13.804128646850586],["▁componenti",-13.804136276245115],["ušas",-13.80413818359375],["▁அழகு",-13.804155349731444],["owskiej",-13.804160118103027],["يعة",-13.804179191589355],["在意",-13.804235458374023],["▁විශේෂයෙන්",-13.804241180419922],["ȚI",-13.804245948791504],["与中国",-13.804251670837402],["▁escolares",-13.804253578186035],["▁વાળ",-13.804265975952148],["ทอ",-13.80427360534668],["▁pokušaj",-13.804276466369627],["▁optimaal",-13.80428695678711],["ፈለገ",-13.804287910461426],["▁Schlag",-13.804308891296388],["▁ఆన్",-13.804309844970703],["▁čime",-13.804315567016602],["स्तरीय",-13.804323196411133],["▁ťažko",-13.804325103759766],["sfrist",-13.804327964782717],["▁παιδικ",-13.804343223571776],["સ્તા",-13.804349899291992],["व्हि",-13.804359436035156],["▁тань",-13.804375648498535],["▁ახალგაზრდ",-13.804378509521484],["▁orku",-13.8043794631958],["razí",-13.8043851852417],["だけではなく",-13.80438995361328],["7.3",-13.804396629333496],["▁интересу",-13.804402351379396],["▁предостави",-13.804408073425291],["▁Hoff",-13.804424285888672],["ໃນວັນທີ",-13.804425239562988],["▁conception",-13.804429054260254],["▁prioridade",-13.804429054260254],["▁тиісті",-13.804447174072266],["▁Oral",-13.804450035095217],["timen",-13.804463386535645],["▁larta",-13.80446434020996],["▁sovet",-13.80446434020996],["политическо",-13.804485321044922],["更能",-13.804492950439451],["▁voitto",-13.804498672485352],["▁क्रि",-13.804521560668944],["▁εξηγ",-13.804524421691896],["病情",-13.80453395843506],["ผิวหนัง",-13.80456256866455],["▁האש",-13.804571151733398],["いずれ",-13.804574966430664],["হত",-13.804597854614258],["studi",-13.804610252380373],["ستفيد",-13.804610252380373],["▁Πρόσ",-13.804622650146484],["ことができ",-13.804622650146484],["▁Мени",-13.80463981628418],["เข้าพัก",-13.804658889770508],["坚决",-13.804664611816406],["▁въведе",-13.804667472839355],["ታን",-13.804682731628418],["▁বিষয়ক",-13.804685592651367],["稻",-13.80469036102295],["čiui",-13.804694175720217],["سائر",-13.804699897766112],["టన్",-13.80470371246338],["ছাত্রী",-13.804722785949709],["▁நடைபெற்ற",-13.804733276367188],["▁geweld",-13.804734230041504],["▁poils",-13.804741859436035],["▁мать",-13.80474853515625],["凄",-13.804750442504885],["สังเกต",-13.80475902557373],["▁ಸುಲಭ",-13.804759979248049],["▁cwcis",-13.804760932922363],["▁peaaegu",-13.804760932922363],["▁асуудлаар",-13.804760932922363],["▁предоставления",-13.804760932922363],["▁تصدیق",-13.804760932922363],["▁చరణ్",-13.804760932922363],["▁እንደነበር",-13.804760932922363],["▁përveç",-13.80476188659668],["▁Γαλλία",-13.80476188659668],["▁кошумча",-13.80476188659668],["▁تۈركىستان",-13.80476188659668],["▁ભવિષ્ય",-13.80476188659668],["▁განსაზღვრ",-13.80476188659668],["▁kontzertu",-13.804762840270996],["▁petrecut",-13.804762840270996],["▁Václav",-13.804763793945312],["▁estabelece",-13.804764747619627],["▁teroris",-13.804764747619627],["▁yumurta",-13.804764747619627],["▁கீழ்",-13.804765701293944],["▁wnaeth",-13.80477237701416],["▁گهڻي",-13.804773330688477],["▁Python",-13.804774284362791],["▁Perubahan",-13.80477523803711],["▁правительство",-13.80477523803711],["▁съгласно",-13.804776191711426],["▁නිවාස",-13.804777145385742],["▁кайт",-13.804780960083008],["▁مەنبە",-13.804780960083008],["▁أسباب",-13.804783821105955],["▁Herbergi",-13.80478572845459],["▁Ղարաբաղի",-13.804789543151855],["▁Napište",-13.804790496826172],["▁историје",-13.804797172546388],["▁scelte",-13.804800987243652],["▁tissit",-13.804803848266602],["näyttö",-13.804805755615234],["ალს",-13.804808616638184],["▁karaoke",-13.804814338684082],["▁Слободан",-13.804816246032717],["ถอน",-13.80482292175293],["▁பகுதியில்",-13.80482292175293],["▁خریداری",-13.804824829101562],["ક્ત",-13.80482578277588],["▁soboto",-13.804827690124512],["ਅਤ",-13.80483055114746],["▁Eindruck",-13.80483055114746],["▁абзац",-13.804834365844728],["▁същата",-13.804838180541992],["ksessa",-13.804840087890623],["▁dávno",-13.80484676361084],["kampioen",-13.804848670959473],["▁배경",-13.80485725402832],["▁Cay",-13.80486011505127],["▁итд",-13.804865837097168],["▁رکھی",-13.804869651794434],["▁உள்ளன",-13.804882049560549],["经济学",-13.804891586303713],["ויס",-13.80490779876709],["▁DPR",-13.804926872253418],["▁krant",-13.804930686950684],["▁budowy",-13.804953575134276],["▁ਸਿੱਖਿਆ",-13.804978370666504],["みます",-13.804991722106934],["▁cairan",-13.80500030517578],["▁מיוחדים",-13.805009841918944],["▁dhowr",-13.805020332336426],["സല",-13.805023193359377],["▁Svjetsko",-13.805027961730955],["▁aufgenommen",-13.805039405822754],["yyar",-13.805042266845703],["Ong",-13.805071830749512],["▁משחקים",-13.80507469177246],["▁tilpasse",-13.805082321166992],["▁omdöme",-13.805094718933104],["য়ো",-13.805103302001951],["▁Europy",-13.80512237548828],["▁ಕೀ",-13.80512237548828],["▁испод",-13.805123329162598],["▁Dosta",-13.805129051208496],["▁повтори",-13.805142402648926],["▁Siempre",-13.805143356323242],["간다",-13.805148124694824],["▁cieszy",-13.805160522460938],["競賽",-13.805161476135254],["iseanna",-13.805168151855469],["▁saged",-13.805185317993164],["▁Mandela",-13.805195808410645],["▁היד",-13.805217742919922],["▁پائی",-13.805222511291504],["တား",-13.80522918701172],["▁речник",-13.8052396774292],["▁Vui",-13.80524730682373],["/12/2018",-13.805259704589844],["រាប់",-13.805282592773438],["▁üsul",-13.80528450012207],["▁слично",-13.805290222167969],["मिक",-13.80530071258545],["ԱՅԻՆ",-13.805330276489258],["տառ",-13.805333137512209],["ವಾಗುತ್ತದೆ",-13.805333137512209],["ٹے",-13.805335998535156],["ismului",-13.80534553527832],["तलब",-13.805347442626951],["境内",-13.805363655090332],["ൻഡ്",-13.805381774902344],["▁avocat",-13.805387496948242],["▁Equi",-13.805405616760254],["søgt",-13.805412292480469],["▁համարում",-13.805435180664062],["▁паркинг",-13.805444717407228],["▁planten",-13.805450439453123],["▁побачити",-13.805475234985352],["▁خودشان",-13.805475234985352],["▁પરથી",-13.805477142333984],["。【",-13.805485725402832],["▁있는지",-13.805493354797363],["▁Pride",-13.805503845214844],["▁ресторант",-13.80550479888916],["▁Musul",-13.80551528930664],["чыны",-13.80553150177002],["دوز",-13.805540084838867],["电信",-13.805571556091309],["▁لكنه",-13.805591583251951],["▁impulsa",-13.80559539794922],["▁mərhələsi",-13.805618286132812],["▁사진을",-13.80562973022461],["thola",-13.805633544921877],["▁посуд",-13.805634498596191],["EAT",-13.805635452270508],["ladda",-13.805656433105469],["დგება",-13.805658340454102],["▁shprehur",-13.805668830871582],["forretning",-13.805706024169922],["▁finestra",-13.805734634399414],["ившись",-13.805739402770996],["ାଧିକ",-13.805746078491213],["▁Pernah",-13.805778503417969],["ເຮົາ",-13.805782318115234],["▁serwer",-13.80579662322998],["▁środowiska",-13.80580234527588],["μην",-13.805821418762209],["anisha",-13.805824279785156],["אפט",-13.805862426757812],["транспорт",-13.805863380432127],["▁Nations",-13.805864334106444],["▁Сапар",-13.80589771270752],["▁hinnat",-13.805901527404783],["ಕೃಷ್ಣ",-13.805909156799316],["▁Ιερ",-13.805925369262695],["กอ",-13.805971145629885],["ความเร็ว",-13.805981636047363],["等人",-13.805984497070312],["▁Իրանի",-13.805994987487791],["วันหยุด",-13.806000709533691],["▁nuoma",-13.80600357055664],["ంభ",-13.806015968322754],["RADA",-13.806023597717283],["▁төс",-13.806038856506348],["▁मेह",-13.80605411529541],["külön",-13.80606746673584],["মুখ",-13.80607795715332],["▁දොර",-13.80608367919922],["шчыны",-13.8060884475708],["łączenia",-13.806095123291016],["▁şəxsə",-13.806106567382812],["▁známe",-13.806111335754396],["놀이",-13.806113243103027],["▁Віктар",-13.806135177612305],["▁الزامات",-13.80613899230957],["▁فرمت",-13.80614185333252],["▁skyr",-13.806145668029783],["▁patin",-13.806148529052734],["▁отказано",-13.80617332458496],["▁Though",-13.806175231933594],["▁tretje",-13.80617904663086],["▁ಹೀಗಾಗಿ",-13.80617904663086],["▁vítima",-13.806184768676758],["vizyon",-13.80618667602539],["▁anunciou",-13.806192398071287],["ovich",-13.806199073791504],["перс",-13.806206703186035],["▁materiāli",-13.806227684020996],["▁klasifik",-13.806229591369627],["bonne",-13.806236267089844],["▁toirt",-13.806251525878906],["गिरी",-13.806282043457031],["കളായ",-13.806289672851562],["▁nebun",-13.80629062652588],["ساق",-13.806302070617676],["▁trimestr",-13.80630588531494],["warm",-13.806312561035156],["ЕКС",-13.806312561035156],["əsinə",-13.806320190429688],["▁جواز",-13.806321144104004],["▁sânge",-13.806325912475586],["▁managed",-13.806333541870115],["▁maray",-13.80634880065918],["schul",-13.80635929107666],["▁düşmən",-13.806360244750977],["▁időt",-13.806422233581545],["▁vastust",-13.806425094604492],["▁Drie",-13.806426048278809],["▁regala",-13.80643367767334],["şul",-13.80649185180664],["▁metodologia",-13.806498527526855],["▁швед",-13.806499481201172],["▁внатре",-13.806517601013184],["godišnja",-13.806532859802246],["▁Avtor",-13.80653953552246],["▁1886",-13.80655002593994],["များအတွက်",-13.806570053100586],["▁parema",-13.806642532348633],["්නෙ",-13.806645393371582],["არსებობ",-13.806648254394531],["ecund",-13.80665683746338],["ห่าง",-13.806671142578123],["▁apg",-13.806696891784668],["ھم",-13.806699752807615],["▁Destina",-13.806707382202148],["▁შენს",-13.806716918945312],["മയ",-13.806734085083008],["▁corporate",-13.806750297546388],["explica",-13.806753158569336],["МТ",-13.806768417358398],["න්ගෙන්",-13.806777000427246],["▁сайдын",-13.806797981262209],["▁Бурхан",-13.806800842285156],["евые",-13.806802749633787],["▁javlja",-13.806822776794434],["▁módosít",-13.806838035583496],["▁turistas",-13.80687427520752],["▁derisa",-13.806876182556152],["스팅",-13.806890487670898],["▁фестиваль",-13.806892395019531],["үңүз",-13.80689811706543],["utusan",-13.806903839111328],["▁lunar",-13.806910514831545],["▁гражданско",-13.806961059570312],["▁আলম",-13.80697536468506],["▁قىزى",-13.806979179382324],["ጠበቀ",-13.806998252868652],["давали",-13.807000160217283],["ોન",-13.80700969696045],["्यांना",-13.807014465332031],["▁посиланням",-13.807014465332031],["▁மனு",-13.807021141052246],["foorum",-13.807039260864258],["▁sprejem",-13.807076454162598],["想要的",-13.807095527648926],["ఫర్",-13.80710220336914],["▁1889",-13.80710792541504],["▁sepatu",-13.80711555480957],["چک",-13.80711841583252],["▁שפּ",-13.80712604522705],["▁Ceny",-13.807168006896973],["εριν",-13.807173728942873],["tanong",-13.807175636291504],["▁తీర",-13.8071870803833],["▁următor",-13.80718994140625],["אדם",-13.807198524475098],["变革",-13.807215690612791],["沾",-13.807228088378906],["십시오",-13.807229042053224],["▁generator",-13.80723476409912],["▁higit",-13.80724811553955],["橘",-13.807262420654297],["実践",-13.807270050048828],["▁priemonė",-13.80727195739746],["お越し",-13.80727481842041],["樂趣",-13.807280540466309],["വിരുദ്ധ",-13.807283401489258],["よろしくお願いします",-13.807284355163574],["ห่วง",-13.80728530883789],["Ĺ",-13.807286262512209],["ចរាចរណ៍",-13.807286262512209],["▁Náisiúnta",-13.807286262512209],["▁Selanjutnya",-13.807286262512209],["▁aptuveni",-13.807286262512209],["▁ingredienser",-13.807286262512209],["▁øyeblikk",-13.807286262512209],["▁Πρόκειται",-13.807286262512209],["▁Сурталчилгаа",-13.807286262512209],["▁икономически",-13.807286262512209],["▁кәсіби",-13.807286262512209],["▁історія",-13.807286262512209],["▁مشاركة",-13.807286262512209],["▁უსაფრთხოების",-13.807286262512209],["▁ኃይለ",-13.807286262512209],["▁វិទ្យាសាស្ត្រ",-13.807286262512209],["▁einzigartig",-13.807287216186523],["▁priežiūra",-13.807287216186523],["▁προγραμματ",-13.807287216186523],["▁сонирхо",-13.807287216186523],["▁বিশাল",-13.807287216186523],["▁घोषित",-13.80728816986084],["▁உறுப்பினர்",-13.80728816986084],["▁ଫଳରେ",-13.807289123535156],["▁momčad",-13.807290077209473],["▁Saeimas",-13.807291030883787],["▁áhugaverð",-13.807291984558104],["▁رؤية",-13.807292938232422],["▁տեսակետ",-13.807293891906738],["▁සූදානම්",-13.807293891906738],["▁წინაშე",-13.80730152130127],["▁erschien",-13.807305335998535],["▁український",-13.807305335998535],["▁तस्वीरें",-13.807307243347168],["▁dyrektor",-13.807311058044434],["▁dédié",-13.807317733764648],["หมายถึง",-13.807320594787598],["▁listed",-13.807320594787598],["▁освободи",-13.807320594787598],["▁onneksi",-13.80732250213623],["▁মামলায়",-13.80732536315918],["▁እንኳ",-13.807327270507812],["▁الأخبار",-13.807331085205078],["▁barəsində",-13.80733871459961],["▁hoppe",-13.807350158691406],["有害",-13.807350158691406],["kêş",-13.807353019714355],["▁decizii",-13.807357788085938],["▁ಶುರು",-13.807377815246582],["▁základě",-13.807393074035645],["▁Kelamin",-13.807395935058594],["▁ځوان",-13.807408332824709],["▁የአሜሪካ",-13.807408332824709],["▁banyaknya",-13.80741024017334],["▁učitel",-13.807411193847656],["▁rilassa",-13.807413101196287],["▁Groningen",-13.807415962219238],["друго",-13.807424545288086],["▁ପାଠ",-13.8074369430542],["▁جانتے",-13.807439804077148],["▁ძირითადი",-13.807442665100098],["▁дистанц",-13.807448387145996],["▁সারা",-13.80745792388916],["▁vseeno",-13.807464599609377],["偶像",-13.807486534118652],["働く",-13.807486534118652],["השקעות",-13.80749225616455],["изирован",-13.807493209838867],["▁odpowiednio",-13.80751132965088],["ძლივ",-13.807512283325195],["ומו",-13.807522773742676],["▁здания",-13.807547569274902],["▁سعود",-13.807564735412598],["▁Sophie",-13.807571411132812],["▁seleção",-13.807581901550291],["▁نياز",-13.807592391967772],["▁Väga",-13.807598114013672],["▁prezidanto",-13.807600021362305],["эў",-13.807602882385254],["စင်",-13.807604789733888],["▁දුන්නේ",-13.807618141174316],["▁ურთიერთობა",-13.807634353637695],["▁verimli",-13.807635307312012],["mənəvi",-13.807639122009276],["▁archeologi",-13.80764102935791],["iyeke",-13.807642936706545],["▁tật",-13.807644844055176],["▁powiat",-13.807645797729492],["พัง",-13.807655334472656],["うち",-13.807655334472656],["qey",-13.807658195495604],["▁Államok",-13.807661056518556],["▁दोषी",-13.807662010192873],["一件事",-13.80766773223877],["▁povestea",-13.807668685913086],["čkami",-13.80767059326172],["▁морају",-13.8076810836792],["展出",-13.8076810836792],["▁bezeichnet",-13.807683944702148],["▁هنڌ",-13.80768585205078],["ÉT",-13.807690620422363],["▁ඉහල",-13.807714462280272],["▁የሀ",-13.807743072509766],["▁ορθ",-13.807785987854004],["▁descarga",-13.807796478271484],["▁Barro",-13.807806015014648],["stofa",-13.807808876037598],["▁tervezés",-13.807836532592772],["▁proteção",-13.807844161987305],["▁kutya",-13.80785083770752],["▁الطلب",-13.80785083770752],["KIS",-13.807854652404783],["▁gəlin",-13.807855606079102],["часов",-13.807861328125],["monte",-13.807862281799316],["ЛЕК",-13.807866096496582],["ूं",-13.807876586914062],["这样一个",-13.80788803100586],["▁recovery",-13.807920455932615],["▁בחיי",-13.807921409606934],["lično",-13.807927131652832],["yitir",-13.80793571472168],["▁тонна",-13.807945251464844],["tická",-13.80797290802002],["▁muerto",-13.80798053741455],["ІЙ",-13.808018684387209],["▁것에",-13.808027267456056],["▁ቅር",-13.808061599731444],["▁футболу",-13.808063507080078],["▁враћа",-13.808066368103027],["▁زرد",-13.808067321777344],["പ്പിച്ചു",-13.80806827545166],["▁машын",-13.808069229125977],["▁équipes",-13.808070182800291],["казалі",-13.80809497833252],["▁чланови",-13.808099746704102],["▁مشرق",-13.80811882019043],["▁cartel",-13.808151245117188],["ऐ",-13.808152198791504],["ительные",-13.808161735534668],["▁Národn",-13.808161735534668],["▁แต่ถ้า",-13.808168411254885],["▁shafukan",-13.808173179626465],["▁zlom",-13.80819320678711],["કસ",-13.808195114135742],["ІР",-13.808197975158691],["▁Ameerika",-13.80820369720459],["▁המים",-13.808220863342283],["ොස්",-13.808239936828612],["థి",-13.80824375152588],["aastase",-13.808279037475586],["ላችን",-13.808290481567385],["▁colaborar",-13.808295249938965],["せずに",-13.808295249938965],["▁употребу",-13.808326721191406],["чики",-13.808334350585938],["▁fusta",-13.808340072631836],["▁limiti",-13.808341979980469],["▁sunteti",-13.808347702026367],["▁concentración",-13.80835247039795],["▁KUL",-13.808354377746582],["了两",-13.808356285095217],["прием",-13.80838394165039],["▁tränings",-13.80838680267334],["▁أنك",-13.808398246765137],["▁Brusel",-13.80840015411377],["▁Letzte",-13.808404922485352],["▁Kız",-13.808417320251465],["▁prihaja",-13.80846118927002],["▁pogon",-13.808462142944336],["مسح",-13.808464050292969],["ተማሪ",-13.808467864990234],["▁systémy",-13.80848789215088],["▁ikasi",-13.80849266052246],["▁помина",-13.808493614196776],["▁जिल्लाको",-13.808493614196776],["ghar",-13.808494567871094],["▁ରିପୋର୍ଟ",-13.808496475219728],["טוי",-13.808505058288574],["▁ressource",-13.808517456054688],["称为",-13.80852222442627],["sträck",-13.808538436889648],["下方",-13.808551788330078],["▁sparen",-13.808576583862305],["▁sgt",-13.808579444885254],["できるよう",-13.80859088897705],["▁noktada",-13.808594703674316],["ମୁଖ",-13.808613777160645],["▁האמת",-13.808615684509276],["▁größere",-13.808623313903809],["странен",-13.808627128601074],["etmə",-13.808629035949709],["жных",-13.808635711669922],["אביב",-13.808637619018556],["▁arteri",-13.80864429473877],["▁Valenciana",-13.80866527557373],["▁sankt",-13.808671951293944],["מקצוע",-13.808701515197754],["▁frases",-13.808714866638184],["efnum",-13.808716773986816],["▁necessitats",-13.808730125427246],["skjorte",-13.808741569519045],["の良い",-13.808741569519045],["odpoved",-13.808745384216309],["▁nesto",-13.808749198913574],["▁привет",-13.808764457702637],["▁شخصية",-13.8087797164917],["▁presentada",-13.808796882629396],["бад",-13.808813095092772],["čník",-13.808905601501465],["▁хранителни",-13.808923721313477],["的看法",-13.808926582336426],["感觉到",-13.80893898010254],["▁karali",-13.808944702148438],["▁blauw",-13.808963775634766],["做什么",-13.808963775634766],["building",-13.808964729309082],["▁НАС",-13.808971405029297],["775",-13.80898380279541],["▁prietenii",-13.808988571166992],["からも",-13.809000968933104],["ਰਾਜ",-13.809002876281738],["ୁଥିବା",-13.809003829956056],["▁изабран",-13.809009552001951],["▁verantwortlich",-13.809012413024902],["സ്വാമി",-13.809030532836914],["▁ಕೊನೆಯ",-13.80905818939209],["МГ",-13.809059143066406],["▁capaces",-13.80908203125],["▁mutatja",-13.809088706970217],["vuode",-13.809099197387695],["jaligi",-13.809124946594238],["▁norite",-13.809141159057615],["▁anses",-13.809144973754885],["▁ترين",-13.80915069580078],["േല്",-13.809175491333008],["Kaap",-13.809185028076172],["▁בתור",-13.809211730957031],["یوم",-13.809256553649902],["jûn",-13.809261322021484],["▁halvt",-13.80927848815918],["▁lecteur",-13.809287071228027],["▁στεν",-13.809289932250977],["▁ನಡೆಯುತ್ತ",-13.809290885925291],["▁Kitty",-13.80932903289795],["保有",-13.809337615966797],["מול",-13.809369087219238],["▁upay",-13.8093843460083],["▁ફી",-13.80940055847168],["ಹಂ",-13.809406280517578],["▁Koha",-13.809409141540527],["▁жетекчиси",-13.809455871582031],["▁बज",-13.809455871582031],["159",-13.80946159362793],["▁szépség",-13.809471130371094],["સ્ટે",-13.809476852416992],["forrás",-13.809477806091309],["▁HDR",-13.809481620788574],["▁കാറ",-13.809483528137209],["▁Italiana",-13.809496879577637],["▁CUP",-13.80950927734375],["▁klassiske",-13.80951690673828],["▁samotné",-13.80951690673828],["▁ئەسلى",-13.809517860412598],["PIL",-13.80952262878418],["▁голове",-13.809525489807127],["անքի",-13.809542655944824],["តី",-13.809563636779783],["你能",-13.80959701538086],["▁tengan",-13.809606552124023],["छे",-13.809613227844238],["▁Results",-13.809619903564451],["ροι",-13.80962371826172],["▁swinger",-13.809640884399414],["ਯਾ",-13.80964183807373],["ිල",-13.809643745422363],["▁ताजा",-13.809643745422363],["ωπ",-13.809661865234377],["185",-13.809699058532717],["šak",-13.809700965881348],["▁Spil",-13.809701919555664],["▁Martín",-13.80970287322998],["ሕይወ",-13.809717178344728],["▁conseller",-13.809727668762209],["لقد",-13.809736251831056],["förändring",-13.809741020202637],["互聯網",-13.809769630432127],["▁ਕਿਸਾਨ",-13.809782028198242],["缺陷",-13.809783935546877],["亨",-13.809784889221191],["mong",-13.809797286987305],["咖",-13.80980110168457],["▁वास्तव",-13.809805870056152],["బడింది",-13.809814453125],["สมาร์ทโฟน",-13.80981731414795],["อ่ะ",-13.80981731414795],["ποιαδήποτε",-13.809818267822266],["चिंचवड",-13.809818267822266],["▁Erinnerung",-13.809818267822266],["▁Iechyd",-13.809818267822266],["▁Langganan",-13.809818267822266],["▁chạm",-13.809818267822266],["▁dalyvavo",-13.809818267822266],["▁egyértelmű",-13.809818267822266],["▁interesguneak",-13.809818267822266],["▁misericordia",-13.809818267822266],["▁približne",-13.809818267822266],["▁камітэт",-13.809818267822266],["▁поверителност",-13.809818267822266],["▁أسبوع",-13.809818267822266],["▁اسيمبلي",-13.809818267822266],["▁ينبغي",-13.809818267822266],["▁মাঝে",-13.809818267822266],["▁თავმჯდომარე",-13.809818267822266],["▁МОНГОЛ",-13.809819221496582],["▁геноцид",-13.809819221496582],["▁সর্বাধিক",-13.809819221496582],["▁ଗୁରୁତର",-13.809819221496582],["▁പുരസ്",-13.809819221496582],["spoločensk",-13.809820175170898],["คัดเลือก",-13.809820175170898],["▁conhecida",-13.809820175170898],["▁mwanzo",-13.809820175170898],["▁ତିନି",-13.809820175170898],["▁yüzyıl",-13.809821128845217],["▁изградба",-13.809823036193848],["▁көбөй",-13.809823036193848],["▁colexio",-13.809823989868164],["▁Excepteur",-13.809825897216797],["▁Ibland",-13.809825897216797],["βάλλει",-13.809829711914062],["▁अक्षर",-13.809829711914062],["部長",-13.809829711914062],["▁아이폰",-13.80983066558838],["▁дәрігер",-13.809832572937012],["▁коюп",-13.809832572937012],["vīzija",-13.809833526611328],["▁óriás",-13.809833526611328],["▁முன்பு",-13.809833526611328],["ėlė",-13.809836387634276],["▁hivatkoz",-13.809837341308594],["▁rezeki",-13.809837341308594],["▁tuleviku",-13.80983829498291],["我公司",-13.80983829498291],["▁მთავრობა",-13.809839248657228],["▁נגישות",-13.80984115600586],["方がいい",-13.809846878051758],["▁vorbi",-13.809847831726074],["▁συγκεκριμένα",-13.80984878540039],["▁Thượng",-13.809849739074709],["▁svarar",-13.809850692749023],["▁arwain",-13.80985164642334],["▁Cold",-13.809852600097656],["▁cestou",-13.809853553771973],["▁iomlán",-13.809853553771973],["▁Masturb",-13.809855461120604],["▁skäl",-13.809855461120604],["▁चिंता",-13.809856414794922],["▁اڳيان",-13.809857368469238],["พอดี",-13.809861183166504],["▁આપ્યો",-13.80986213684082],["▁કૉ",-13.80986785888672],["▁ඇඟ",-13.809870719909668],["▁galėsite",-13.80987548828125],["▁Сектор",-13.80988597869873],["▁שכבר",-13.809886932373049],["പ്രവർത്തന",-13.809891700744627],["▁తీసుకో",-13.809896469116213],["gizo",-13.809898376464844],["▁акції",-13.809904098510742],["▁críticas",-13.809921264648438],["管理和",-13.809925079345703],["▁criterios",-13.80992603302002],["▁ରେଳ",-13.809928894042969],["▁ushindi",-13.80994701385498],["▁спречи",-13.80994701385498],["入境",-13.809948921203612],["▁situazioni",-13.809954643249512],["▁Zoals",-13.809958457946776],["▁Komite",-13.809964179992676],["rætt",-13.809974670410156],["▁назнача",-13.809988975524902],["▁болгондо",-13.8100004196167],["야구",-13.8100004196167],["ជាប់",-13.810009956359863],["▁mètres",-13.810016632080078],["▁düzenli",-13.810043334960938],["▁هيواد",-13.810056686401367],["ඩො",-13.810062408447266],["▁модернизаци",-13.810065269470217],["▁الروسية",-13.810070037841797],["Vel",-13.81007194519043],["სპექტ",-13.810075759887695],["▁Полтав",-13.810083389282228],["▁Jūr",-13.81009006500244],["president",-13.810091972351074],["▁Вел",-13.810128211975098],["▁Додик",-13.810150146484377],["עזרה",-13.810176849365234],["▁scala",-13.81018352508545],["નીય",-13.81019115447998],["▁asjad",-13.810193061828612],["ගහ",-13.810195922851562],["▁रेखा",-13.810200691223145],["であっても",-13.810215950012209],["røy",-13.8102388381958],["▁Enrique",-13.810243606567385],["▁పుస్తకం",-13.810256958007812],["▁khung",-13.810269355773926],["▁поголема",-13.810276985168455],["▁तैयारी",-13.810277938842772],["▁ტიპის",-13.810279846191406],["аарай",-13.81028175354004],["▁સમાચારો",-13.810300827026367],["이상",-13.81033992767334],["▁Celebra",-13.8103609085083],["▁герман",-13.810376167297363],["▁Доктор",-13.810378074645996],["▁casta",-13.810383796691896],["▁مزا",-13.810384750366213],["dique",-13.810389518737791],["▁पुगे",-13.81039810180664],["▁պարտ",-13.810400009155272],["▁atribut",-13.81040382385254],["ञ्",-13.810416221618652],["▁בײַ",-13.810420036315918],["▁बालिका",-13.810425758361816],["▁đua",-13.810437202453612],["θρο",-13.810441970825195],["έλι",-13.810453414916992],["▁knull",-13.8104829788208],["▁Jauna",-13.810483932495115],["▁ଛାତ୍ର",-13.810484886169434],["▁vidí",-13.810506820678713],["ಥ್",-13.81051254272461],["独特",-13.810523986816406],["▁Základní",-13.810532569885254],["▁techniky",-13.810532569885254],["khoz",-13.81056022644043],["▁выбары",-13.810577392578123],["▁oppdage",-13.81058120727539],["▁johto",-13.810588836669922],["▁prijat",-13.810591697692873],["▁խաղեր",-13.81059455871582],["agatik",-13.810601234436035],["▁പക",-13.810602188110352],["▁kyselin",-13.810619354248049],["▁finalitat",-13.810623168945312],["Англ",-13.810629844665527],["▁سوچي",-13.810638427734377],["▁Rota",-13.810639381408691],["▁sprake",-13.810657501220703],["▁הלא",-13.810667037963867],["▁ducha",-13.810673713684082],["▁nyári",-13.810678482055664],["▁Holmes",-13.81072235107422],["▁hænge",-13.81072235107422],["▁vēsturi",-13.810731887817385],["▁absent",-13.810766220092772],["▁సాయి",-13.810770988464355],["▁dachte",-13.810775756835938],["▁לחבר",-13.810775756835938],["▁revenue",-13.810779571533203],["▁kurva",-13.81078815460205],["▁ನಡೆಸಿದ",-13.810805320739746],["έλε",-13.81080722808838],["spillet",-13.810810089111328],["▁ملڪن",-13.810819625854492],["ÍR",-13.810843467712402],["نویس",-13.8108491897583],["▁ଚନ୍ଦ୍ର",-13.810860633850098],["▁conservar",-13.810883522033691],["▁ռուսական",-13.81090259552002],["МИН",-13.810908317565918],["조건",-13.810925483703612],["▁ലാല",-13.810927391052246],["▁pepper",-13.810949325561523],["なんで",-13.810957908630373],["სტო",-13.810976028442385],["▁జీవితం",-13.810978889465332],["bibliotek",-13.81098747253418],["으로부터",-13.810990333557127],["好評",-13.811027526855469],["호텔",-13.811052322387695],["avaldus",-13.81105613708496],["Cookie",-13.811059951782228],["торів",-13.811100006103516],["▁toitu",-13.81110668182373],["▁kötött",-13.81114387512207],["קניון",-13.811162948608398],["خۇ",-13.811168670654297],["▁بازیکن",-13.81117057800293],["изатор",-13.8112154006958],["▁ਔ",-13.811222076416016],["▁Istri",-13.811223030090332],["▁formularz",-13.811223030090332],["ሪክ",-13.81123161315918],["▁كبيرا",-13.811240196228027],["метри",-13.811248779296877],["ଂଟ",-13.811248779296877],["全家",-13.811253547668455],["▁Sebe",-13.81126594543457],["▁түсті",-13.811267852783203],["havan",-13.811298370361328],["▁Udalak",-13.811312675476074],["▁политиката",-13.811322212219238],["وأ",-13.811326026916504],["▁네트워크",-13.811327934265137],["给自己",-13.811359405517578],["▁viděl",-13.811378479003906],["▁ciklus",-13.811405181884766],["познат",-13.811420440673828],["yoq",-13.811421394348145],["laithe",-13.811431884765623],["▁Πολύ",-13.81143856048584],["▁reģistrēt",-13.811442375183104],["▁پایدار",-13.811447143554688],["構造",-13.811473846435549],["▁postupne",-13.81149959564209],["्टी",-13.811515808105469],["▁exhaust",-13.811534881591797],["მწვანე",-13.811562538146973],["ເອ",-13.811578750610352],["▁paklaus",-13.811585426330566],["ئت",-13.811593055725098],["▁lîst",-13.811613082885742],["मंडल",-13.811623573303224],["ნებით",-13.811627388000488],["▁Naujausi",-13.811639785766602],["▁ഒളി",-13.811644554138184],["▁lẻ",-13.8116455078125],["TQ",-13.811647415161133],["company",-13.811654090881348],["▁দিনে",-13.811664581298828],["satunya",-13.81168270111084],["▁базі",-13.811687469482422],["▁erdő",-13.811689376831056],["ూరి",-13.811692237854004],["▁нацыянальна",-13.811713218688965],["ထိုင္",-13.811725616455078],["mazgā",-13.811765670776367],["那種",-13.811773300170898],["หว",-13.811782836914062],["նկ",-13.811803817749023],["ئاب",-13.811813354492188],["▁Wifi",-13.811850547790527],["▁партнера",-13.811856269836426],["форд",-13.811857223510742],["▁Dorf",-13.81186580657959],["姐妹",-13.811903953552246],["お仕事",-13.811944007873535],["▁prøv",-13.811945915222168],["പറ",-13.811963081359863],["▁muri",-13.811966896057127],["▁татвар",-13.81203269958496],["▁შინაგან",-13.812040328979492],["заем",-13.812044143676758],["Có",-13.812045097351074],["▁Чита",-13.812060356140137],["▁Зад",-13.812071800231934],["▁Josh",-13.812088012695312],["járól",-13.812089920043944],["▁ռազմա",-13.812091827392578],["▁nätverk",-13.81209945678711],["examen",-13.812105178833008],["▁Драго",-13.812113761901855],["▁leisten",-13.812127113342283],["ကွ",-13.812132835388184],["用户的",-13.812134742736816],["stöðum",-13.812188148498535],["▁Konstitusiya",-13.812196731567385],["ಳಿದ",-13.812200546264648],["▁الخامس",-13.812200546264648],["788",-13.81220531463623],["新鲜",-13.812206268310549],["ителем",-13.812238693237305],["煉",-13.81224250793457],["▁Minor",-13.812250137329102],["▁пацієнт",-13.812256813049316],["詹姆斯",-13.812297821044922],["欣赏",-13.81230926513672],["ယောက်",-13.812310218811035],["▁χέρι",-13.812314987182615],["錦",-13.812321662902832],["关闭",-13.812331199645996],["▁Akademiyası",-13.812337875366213],["营造",-13.812338829040527],["札",-13.812339782714844],["させて頂きます",-13.81235122680664],["ประทับใจ",-13.81235408782959],["ຮຽກຮ້ອງ",-13.812355041503906],["haastattelu",-13.812355995178224],["สถานการณ์",-13.812355995178224],["▁Kıbrıs",-13.812355995178224],["▁gegužės",-13.812355995178224],["▁powyżej",-13.812355995178224],["▁protéger",-13.812355995178224],["▁siyosiy",-13.812355995178224],["▁đựng",-13.812355995178224],["▁δεδομένων",-13.812355995178224],["▁μητέρα",-13.812355995178224],["▁παιδιών",-13.812355995178224],["▁Оңтүстік",-13.812355995178224],["▁бодлогын",-13.812355995178224],["▁полягає",-13.812355995178224],["▁яагаад",-13.812355995178224],["▁ऑगस्ट",-13.812355995178224],["▁ਸੁਰੱਖਿਆ",-13.812355995178224],["▁ಭವಿಷ್ಯ",-13.812355995178224],["▁სამეფო",-13.812355995178224],["▁레스토랑",-13.812355995178224],["▁ਜਿਨ੍ਹਾਂ",-13.81235694885254],["▁Addysg",-13.812357902526855],["▁விஜய",-13.812357902526855],["▁İtaliya",-13.812358856201172],["▁нижче",-13.812358856201172],["accéder",-13.812359809875488],["▁uključujući",-13.812359809875488],["▁vuodesta",-13.812359809875488],["▁financiële",-13.812360763549805],["▁Vukovar",-13.812362670898438],["▁remunera",-13.812362670898438],["▁신뢰",-13.81236457824707],["▁pemenang",-13.81236743927002],["▁اعدام",-13.812370300292969],["▁плаќа",-13.812371253967283],["▁juvenil",-13.812374114990234],["▁игровых",-13.81237506866455],["優雅",-13.81237506866455],["▁رویکرد",-13.812376022338867],["▁യുദ്ധ",-13.812382698059082],["▁vključuje",-13.812385559082031],["▁onlayn",-13.81238842010498],["▁신경",-13.812390327453612],["▁gjennomført",-13.812397003173828],["▁남편",-13.812397956848145],["▁असतील",-13.81239891052246],["▁lugeda",-13.812400817871094],["▁أيض",-13.812411308288574],["ขุน",-13.812413215637209],["▁Kuhusu",-13.812414169311523],["▁vastava",-13.812418937683104],["▁sebentar",-13.812426567077637],["▁minsken",-13.812429428100586],["気軽に",-13.812429428100586],["▁vašega",-13.812433242797852],["▁элдик",-13.812433242797852],["▁Català",-13.8124361038208],["▁tentativa",-13.812437057495115],["▁Често",-13.812443733215332],["▁Hoewel",-13.81244945526123],["kaartje",-13.812458038330078],["▁പൈ",-13.812460899353027],["kitės",-13.812470436096191],["▁MAA",-13.81247329711914],["▁virðist",-13.812477111816406],["▁oppnå",-13.812478065490724],["▁pangang",-13.812505722045898],["▁omtale",-13.812512397766112],["▁سیمې",-13.812525749206545],["▁Hingga",-13.812538146972656],["▁poziomu",-13.812539100646973],["▁chauffe",-13.812544822692873],["▁البيئة",-13.81255054473877],["▁касцёл",-13.812551498413086],["这件事",-13.812573432922363],["្រា",-13.812577247619627],["▁कशी",-13.812582015991213],["ខាងលើ",-13.812582969665527],["民间",-13.81259059906006],["▁skirtas",-13.812597274780272],["▁gemak",-13.812603950500488],["▁vettä",-13.812607765197754],["▁propres",-13.812612533569336],["パート",-13.812612533569336],["တည်း",-13.81262493133545],["ベース",-13.812642097473145],["▁recepta",-13.812647819519045],["▁sausio",-13.812649726867676],["▁Neque",-13.812681198120115],["▁hauria",-13.8126859664917],["▁талент",-13.8126859664917],["▁здравето",-13.812688827514648],["▁ഭാഗം",-13.812689781188965],["▁يظهر",-13.812698364257812],["▁Iphone",-13.812714576721191],["▁يعود",-13.812716484069824],["คําแนะนํา",-13.812724113464355],["▁szab",-13.812725067138672],["▁უნივერსიტეტის",-13.812752723693848],["▁cinsel",-13.812756538391112],["seminaari",-13.81276035308838],["▁براى",-13.812774658203123],["▁localitat",-13.812785148620604],["▁එසේම",-13.812795639038086],["▁בדבר",-13.812811851501465],["▁Током",-13.81281280517578],["▁подава",-13.81282901763916],["rossa",-13.812836647033691],["▁sykehus",-13.81285572052002],["1.00",-13.812862396240234],["่อง",-13.812862396240234],["▁എനിക്കു",-13.812868118286133],["▁православни",-13.812881469726562],["വിളി",-13.812891960144045],["▁օրենքով",-13.812906265258787],["▁मुक्ति",-13.812909126281738],["үндөгү",-13.812911987304688],["▁Urteil",-13.812915802001951],["ńskich",-13.812973976135254],["▁tied",-13.813017845153809],["psat",-13.81302261352539],["လြတ္",-13.813033103942873],["íssima",-13.813053131103516],["▁책을",-13.813093185424805],["▁nádob",-13.81309413909912],["▁جمعہ",-13.813104629516602],["तामा",-13.813106536865234],["▁halkın",-13.813121795654297],["▁Τεχν",-13.813127517700195],["▁viaja",-13.813128471374512],["▁دانند",-13.81313705444336],["ंचं",-13.813138961791992],["▁жарық",-13.813139915466309],["▁dåligt",-13.813150405883787],["▁ಕೊನೆ",-13.813151359558104],["▁დაცვა",-13.813155174255373],["605",-13.81317138671875],["▁skaidro",-13.81318187713623],["对你",-13.81318473815918],["的知识",-13.813188552856444],["▁gule",-13.813190460205078],["▁сабак",-13.813192367553713],["但这",-13.813194274902344],["سوق",-13.813199996948242],["несли",-13.813207626342772],["స్థానం",-13.813234329223633],["▁rozsahu",-13.81324291229248],["▁beslenme",-13.81324863433838],["▁gerust",-13.813261985778809],["ผู้บริหาร",-13.813276290893556],["▁divák",-13.813292503356934],["6.00",-13.813294410705566],["▁عده",-13.813295364379885],["▁એર",-13.813300132751465],["▁Món",-13.813307762145996],["▁Divê",-13.813314437866213],["▁අයියා",-13.813316345214844],["▁avancer",-13.81333827972412],["▁sufficiente",-13.813372611999512],["ကလေး",-13.813379287719728],["▁kuweza",-13.813401222229004],["▁santo",-13.813404083251951],["▁voller",-13.813408851623535],["▁समारोह",-13.813413619995115],["▁Freunden",-13.813433647155762],["ുടേത",-13.813450813293455],["▁Punta",-13.8134765625],["以下是",-13.81350040435791],["▁gummi",-13.813515663146973],["ભો",-13.813535690307615],["▁סב",-13.813554763793944],["▁барган",-13.813596725463867],["▁zatvoren",-13.813645362854004],["▁Fyrst",-13.813657760620115],["אוס",-13.813674926757812],["▁laboratorij",-13.813677787780762],["coach",-13.813680648803713],["ństwa",-13.813700675964355],["▁амар",-13.813705444335938],["▁relaks",-13.81374740600586],["ેક્ટ",-13.813772201538086],["▁nimet",-13.813772201538086],["▁ойр",-13.813779830932615],["▁حتي",-13.813799858093262],["▁vasitə",-13.813801765441896],["▁күнөө",-13.813836097717283],["▁Руси",-13.81383991241455],["押し",-13.81383991241455],["▁berjocht",-13.813841819763184],["▁Theatre",-13.813850402832031],["▁slippe",-13.813851356506348],["vuuden",-13.813857078552246],["▁Actualmente",-13.813907623291016],["▁живет",-13.81391143798828],["▁swingers",-13.813931465148926],["▁airport",-13.813936233520508],["ရယူ",-13.813955307006836],["masdan",-13.813971519470217],["казали",-13.813977241516112],["▁равни",-13.813986778259276],["▁حملات",-13.814009666442873],["handi",-13.814064025878906],["▁Модел",-13.81407642364502],["▁vaiko",-13.81408405303955],["▁رياض",-13.814091682434082],["▁יכולת",-13.814099311828612],["ेम",-13.814101219177246],["▁motto",-13.81412124633789],["੍ਹਾ",-13.814127922058104],["▁svakodnevno",-13.814127922058104],["▁הכולל",-13.814128875732422],["ಿಸುತ್ತದೆ",-13.814169883728027],["▁wajar",-13.81417465209961],["▁zalet",-13.814197540283203],["gull",-13.814213752746582],["▁syytä",-13.81422519683838],["▁citām",-13.814226150512695],["▁herriko",-13.814236640930176],["▁πραγματοποι",-13.814238548278809],["自分を",-13.814242362976074],["▁312",-13.814245223999023],["ирања",-13.814257621765137],["▁2200",-13.814265251159668],["▁Gadis",-13.814266204833984],["▁ذریعہ",-13.814266204833984],["▁ক্ষেত্রে",-13.814266204833984],["新規",-13.814285278320312],["▁blade",-13.814350128173828],["▁shaxs",-13.814358711242676],["▁teploty",-13.814373970031738],["▁житлово",-13.814397811889648],["ějí",-13.814403533935549],["▁Torrent",-13.814403533935549],["▁summu",-13.814411163330078],["utstyr",-13.814431190490724],["っていく",-13.814437866210938],["▁onunla",-13.814441680908203],["▁Հակոբյան",-13.814481735229492],["ផង",-13.814489364624023],["▁Raat",-13.81449031829834],["vanên",-13.814504623413086],["gehê",-13.814507484436035],["▁словом",-13.814513206481934],["▁ජනතාවට",-13.814517974853516],["ଫାଇ",-13.81454849243164],["▁ՄԻ",-13.814550399780272],["▁érzék",-13.81456470489502],["جرة",-13.814573287963867],["▁chuva",-13.814579963684082],["▁quartos",-13.814587593078612],["▁faccio",-13.814590454101562],["የሙ",-13.814607620239258],["不少人",-13.814621925354004],["भक्त",-13.814637184143066],["▁الرئيسي",-13.814640045166016],["级别",-13.814661979675291],["前から",-13.814678192138672],["▁вважають",-13.814696311950684],["▁개정",-13.814706802368164],["▁férfiak",-13.814725875854492],["sistema",-13.814745903015137],["गढ़",-13.814746856689451],["▁ilusa",-13.814753532409668],["▁الرياضي",-13.814762115478516],["▁Zeman",-13.814791679382324],["傢",-13.814818382263184],["丑",-13.81484031677246],["赌",-13.814844131469728],["翁",-13.814848899841309],["肠",-13.814855575561523],["トン",-13.81485652923584],["破壞",-13.814858436584473],["▁ಪಂದ್ಯ",-13.814871788024902],["ნიშნა",-13.814878463745115],["刮",-13.814878463745115],["崛起",-13.814879417419434],["▁sosok",-13.81488037109375],["赌场",-13.814884185791016],["ディング",-13.81489086151123],["迪士尼",-13.814891815185549],["πρόεδρο",-13.814896583557127],["▁alvo",-13.814896583557127],["貯",-13.814897537231444],["สํารวจ",-13.814899444580078],["콩",-13.814899444580078],["▁Szövetség",-13.814900398254396],["▁gwahanol",-13.814900398254396],["▁samochodów",-13.814900398254396],["▁serdecznie",-13.814900398254396],["▁umiddelbart",-13.814900398254396],["▁αντιμετώπιση",-13.814900398254396],["▁Ердоган",-13.814900398254396],["▁бекіту",-13.814900398254396],["▁սկզբունք",-13.814900398254396],["▁מיטוואך",-13.814900398254396],["▁ಮನುಷ್ಯ",-13.814900398254396],["▁ნამდვილად",-13.814900398254396],["▁izmaiņas",-13.814901351928713],["▁мотоцикл",-13.814901351928713],["▁առումով",-13.814901351928713],["▁विनोद",-13.814901351928713],["▁વિષય",-13.814901351928713],["▁მქონე",-13.814901351928713],["▁ተጠቃሚ",-13.814901351928713],["▁beschikt",-13.814902305603027],["▁მისამართი",-13.814902305603027],["▁រដ្ឋមន្ត្រី",-13.814902305603027],["▁compatible",-13.814903259277344],["▁măsură",-13.814903259277344],["▁sponzor",-13.814903259277344],["▁تجویز",-13.814903259277344],["▁පන්දු",-13.814903259277344],["救助",-13.814903259277344],["▁Jijini",-13.81490421295166],["▁לגמרי",-13.81490421295166],["▁التعريف",-13.81490421295166],["自家",-13.81490421295166],["▁különbség",-13.814905166625977],["▁peribadi",-13.814905166625977],["▁गतिविधि",-13.814905166625977],["▁ტერიტორიაზე",-13.814905166625977],["ေက်ာက္",-13.814906120300291],["منتديات",-13.81490707397461],["ရယူရန်",-13.814908027648926],["▁icazə",-13.814908027648926],["痴",-13.814908027648926],["▁agertzen",-13.814910888671877],["▁rezistent",-13.814911842346191],["рея",-13.814913749694824],["▁внедрен",-13.814916610717772],["▁వివాహ",-13.814918518066406],["▁կանանց",-13.814919471740724],["▁اعداد",-13.814923286437988],["▁موخه",-13.814924240112305],["▁झालं",-13.814924240112305],["ໄຕ",-13.81492519378662],["▁მგონი",-13.81492805480957],["▁оказался",-13.814929962158203],["▁Yellow",-13.81493091583252],["▁المؤسسة",-13.81493091583252],["▁Gewalt",-13.814932823181152],["▁lítið",-13.814935684204102],["▁उपयुक्त",-13.81493854522705],["▁місць",-13.814940452575684],["▁торгівлі",-13.814940452575684],["▁ఐదు",-13.814942359924316],["▁begränsa",-13.814945220947266],["▁бүсгүй",-13.814946174621582],["▁9:30",-13.814953804016112],["▁Polaków",-13.814955711364746],["▁କ୍ୟା",-13.814956665039062],["▁შიდა",-13.814958572387695],["▁þúsund",-13.814970970153809],["▁પક્ષ",-13.814970970153809],["▁banjir",-13.814973831176758],["യൻ",-13.814974784851074],["▁देंगे",-13.814974784851074],["▁Kampuni",-13.81497859954834],["▁서버",-13.81497859954834],["▁(1999)",-13.814982414245604],["겠지만",-13.814992904663086],["▁Oracle",-13.815000534057615],["▁הצליח",-13.815001487731934],["▁Meksika",-13.815003395080566],["▁زائر",-13.8150053024292],["▁chiqib",-13.815028190612791],["具体的",-13.815030097961426],["▁пратеници",-13.81503200531006],["▁პლ",-13.815045356750488],["释放",-13.815046310424805],["Mont",-13.815064430236816],["▁предпоч",-13.81506633758545],["benduaren",-13.815069198608398],["▁Experiment",-13.815083503723145],["▁Свобода",-13.815109252929688],["風味",-13.815120697021484],["▁делом",-13.815122604370115],["▁امروزه",-13.815133094787598],["▁당신은",-13.81513500213623],["▁règlement",-13.815146446228027],["▁partopreni",-13.815153121948242],["píše",-13.815159797668455],["kuptim",-13.815165519714355],["▁szavak",-13.81517219543457],["▁قبيل",-13.815180778503418],["▁በዚህም",-13.815185546875],["▁moza",-13.815187454223633],["▁رواں",-13.815202713012695],["▁этно",-13.815217971801758],["▁ملنے",-13.815220832824709],["▁druhou",-13.815260887145996],["▁Serikat",-13.815263748168944],["▁पार्",-13.815265655517578],["ുമായിരുന്നു",-13.815266609191896],["▁devient",-13.815272331237791],["▁күр",-13.815282821655272],["▁حملوں",-13.815285682678224],["æðum",-13.81529140472412],["▁શીખ",-13.815299987792969],["▁dificultades",-13.815305709838867],["▁odstav",-13.8153076171875],["▁चेक",-13.815308570861816],["▁mladých",-13.815320014953612],["▁aksident",-13.815325736999512],["▁kanaliz",-13.815327644348145],["▁sorgt",-13.815356254577637],["▁erbjudanden",-13.815357208251951],["דד",-13.815369606018066],["ानुसार",-13.81541919708252],["▁நடந்து",-13.815425872802734],["▁Thủy",-13.815478324890137],["пря",-13.81548309326172],["Бар",-13.815522193908691],["dnevni",-13.815539360046388],["▁ಪೂರ್ವ",-13.815552711486816],["illah",-13.815568923950195],["ുകളെ",-13.81560230255127],["▁lesznek",-13.815611839294434],["▁தெரிந்து",-13.815624237060549],["▁Nông",-13.815629005432127],["▁Бри",-13.815633773803713],["▁zeven",-13.81563663482666],["▁Spá",-13.815644264221191],["တွေနဲ့",-13.815645217895508],["더니",-13.815656661987305],["▁localizada",-13.815666198730469],["另一個",-13.815674781799316],["▁Yemek",-13.815678596496582],["▁கொடுத்து",-13.815685272216797],["iesiems",-13.815696716308594],["▁أصبحت",-13.815714836120604],["▁logika",-13.815722465515137],["kakku",-13.815723419189451],["▁міцн",-13.815723419189451],["▁rezulto",-13.815725326538086],["▁dodaj",-13.815730094909668],["nizin",-13.815743446350098],["▁opetus",-13.815752983093262],["▁تەرەققىيات",-13.815762519836426],["ٽري",-13.815768241882324],["▁Denmark",-13.815770149230955],["▁филмови",-13.815800666809082],["▁Катар",-13.815804481506348],["▁эли",-13.815804481506348],["▁axı",-13.815814018249512],["ኢትዮጵያ",-13.815832138061523],["▁avakirin",-13.815837860107422],["▁početku",-13.815855026245115],["्रु",-13.8158597946167],["קטע",-13.815878868103027],["▁Salih",-13.815885543823242],["స్తాయి",-13.815921783447266],["▁feuille",-13.815926551818848],["erfolg",-13.815935134887695],["▁Reisen",-13.815954208374023],["▁kurso",-13.81596565246582],["▁Brně",-13.815994262695312],["▁warstw",-13.816000938415527],["ある程度",-13.816021919250488],["▁खरं",-13.816035270690918],["▁saboda",-13.81604290008545],["▁celej",-13.816051483154297],["เจน",-13.816055297851562],["טין",-13.816060066223145],["▁informó",-13.816062927246094],["▁проектів",-13.81606388092041],["meddelelse",-13.816084861755373],["▁отца",-13.816143035888672],["一阵",-13.816166877746582],["141",-13.816184043884276],["▁åkte",-13.816186904907228],["▁перелік",-13.816200256347656],["▁teşekkürler",-13.816226959228516],["▁Георгиев",-13.816254615783691],["▁formát",-13.816266059875488],["藝術家",-13.816267013549805],["▁маск",-13.816282272338867],["물질",-13.816316604614258],["▁прэм",-13.816319465637209],["▁Մե",-13.816341400146484],["▁mchang",-13.8163423538208],["▁پژوهشی",-13.81635570526123],["▁karşılaş",-13.816375732421877],["ਤ੍ਰ",-13.816387176513672],["िएर",-13.81639575958252],["▁saglabāt",-13.816404342651367],["▁Alasan",-13.81642246246338],["▁సెల్",-13.816433906555176],["▁Puedes",-13.816452026367188],["▁odredi",-13.81647491455078],["жылдар",-13.816487312316896],["▁گنج",-13.816497802734377],["数十",-13.816503524780272],["ፊል",-13.81651496887207],["世界中",-13.81653118133545],["▁Mária",-13.816551208496094],["▁contul",-13.81655216217041],["ในพื้นที่",-13.816554069519045],["▁buraya",-13.816566467285156],["▁eventuelle",-13.81657886505127],["राज्य",-13.816582679748535],["▁заключения",-13.816594123840332],["▁Үйл",-13.816608428955078],["▁събрание",-13.816620826721191],["▁CAMP",-13.816621780395508],["ហារ",-13.816627502441406],["▁Copy",-13.816627502441406],["▁நடு",-13.81662940979004],["▁encama",-13.816640853881836],["વાય",-13.816646575927734],["kiston",-13.81664752960205],["gången",-13.816656112670898],["▁củ",-13.816658973693848],["▁Буду",-13.81667423248291],["mungkin",-13.816693305969238],["істы",-13.816707611083984],["▁manjši",-13.81673812866211],["▁الحديد",-13.816743850708008],["▁تقع",-13.81674575805664],["▁Provin",-13.81675624847412],["acyjnej",-13.81679916381836],["=23",-13.816802024841309],["မန်",-13.816808700561523],["▁Român",-13.816828727722168],["▁utopi",-13.816832542419434],["▁digna",-13.81684684753418],["ผู้เขียน",-13.81686782836914],["▁новото",-13.816869735717772],["▁вечар",-13.816876411437988],["▁ମୋର",-13.816896438598633],["ವನ್ನ",-13.816900253295898],["zaliwa",-13.81690502166748],["▁байгуулагд",-13.81691074371338],["gegner",-13.81691551208496],["▁bonke",-13.81691551208496],["▁Díaz",-13.81692600250244],["▁מוצא",-13.816948890686035],["▁yurit",-13.816949844360352],["關鍵字",-13.81695556640625],["▁Daarna",-13.816981315612791],["ต่อมา",-13.816993713378906],["layacak",-13.817008972167969],["۷۰",-13.817011833190918],["սով",-13.817014694213867],["▁JW",-13.8170166015625],["ИЙГ",-13.817022323608398],["▁കരുത",-13.817025184631348],["2-1",-13.817031860351562],["▁usato",-13.817039489746094],["房貸",-13.817051887512209],["▁Variante",-13.817073822021484],["เวที",-13.817090034484863],["▁limbi",-13.817132949829102],["▁උදා",-13.817133903503418],["▁길이",-13.817155838012695],["▁tlači",-13.817187309265137],["bekend",-13.81719970703125],["special",-13.81721305847168],["▁berja",-13.81722354888916],["▁yayin",-13.817261695861816],["width",-13.817262649536133],["ຫມາກ",-13.817268371582031],["ทวี",-13.817270278930664],["ອາທິດ",-13.81727695465088],["szent",-13.8173189163208],["γνωρίζ",-13.817319869995115],["қпа",-13.817319869995115],["▁Jaguar",-13.817337036132812],["▁വിശേഷ",-13.817338943481444],["dött",-13.81734848022461],["▁підрозділ",-13.817353248596191],["ႏၵ",-13.817364692687988],["▁לחוק",-13.817364692687988],["▁Liye",-13.81737995147705],["▁சாப்பிட",-13.81738567352295],["のうち",-13.817392349243164],["▁войска",-13.81739902496338],["▁Aviv",-13.817400932312012],["▁solltest",-13.817401885986328],["崔",-13.817410469055176],["偽",-13.81741428375244],["游泳",-13.81741714477539],["stemming",-13.817420959472656],["يمان",-13.817428588867188],["▁consulte",-13.817431449890137],["ပံ",-13.817437171936035],["因为我",-13.817442893981934],["▁Еуропа",-13.817445755004885],["Եվրոպա",-13.8174467086792],["ሑ",-13.81745147705078],["▁Sisällön",-13.81745147705078],["▁Spørgsmål",-13.81745147705078],["▁Tidligere",-13.81745147705078],["▁chấm",-13.81745147705078],["▁dëgjo",-13.81745147705078],["▁kiekvieną",-13.81745147705078],["▁nüfuz",-13.81745147705078],["▁Акрамя",-13.81745147705078],["▁Часопіс",-13.81745147705078],["▁нәрсе",-13.81745147705078],["▁сравнение",-13.81745147705078],["▁хубаво",-13.81745147705078],["▁բավարար",-13.81745147705078],["▁סעפטעמבער",-13.81745147705078],["▁विश्लेषण",-13.81745147705078],["▁ବ୍ୟବସାୟ",-13.81745147705078],["▁చూస్తే",-13.81745147705078],["▁ಗ್ಯಾಲರಿ",-13.81745147705078],["▁ಶೆಟ್ಟಿ",-13.81745147705078],["▁რომელშიც",-13.81745147705078],["▁სამედიცინო",-13.81745147705078],["▁플랫폼",-13.81745147705078],["Richtlinie",-13.817452430725098],["▁Manhattan",-13.817452430725098],["▁befinner",-13.817452430725098],["▁následující",-13.817452430725098],["▁КЫРГЫЗ",-13.817452430725098],["▁үнэлгээ",-13.817452430725098],["▁सगळे",-13.817452430725098],["▁gönül",-13.817453384399414],["▁нюанс",-13.817453384399414],["▁Hơn",-13.81745433807373],["▁scoperta",-13.81745433807373],["▁vzdialen",-13.81745433807373],["▁obravnava",-13.817455291748049],["▁työntekijä",-13.817455291748049],["▁çatışma",-13.817455291748049],["▁Медицинск",-13.817455291748049],["▁жатышат",-13.817455291748049],["▁предоставляет",-13.817455291748049],["▁homoseksual",-13.81745719909668],["▁муниципальных",-13.81745719909668],["▁көч",-13.817458152770996],["▁विराटनगर",-13.817458152770996],["▁ಹಕ್ಕು",-13.817458152770996],["▁പ്രവാസി",-13.817460060119627],["▁облусунун",-13.817463874816896],["9000",-13.817464828491213],["▁דארף",-13.817466735839844],["▁ځواکونه",-13.817466735839844],["▁milloin",-13.817472457885742],["וביץ",-13.81747341156006],["瑞典",-13.817476272583008],["fjarðar",-13.817479133605955],["ajuntament",-13.817480087280272],["▁አስታው",-13.817480087280272],["残念",-13.81748104095459],["▁الموافق",-13.817484855651855],["▁म्हणतात",-13.817486763000488],["▁Бусад",-13.81748867034912],["▁dì",-13.817490577697754],["買賣",-13.817490577697754],["λμ",-13.817498207092283],["▁नज़र",-13.817498207092283],["▁excepcional",-13.817499160766602],["▁پیشگیری",-13.817500114440918],["▁Pridať",-13.817512512207031],["▁başlayır",-13.817514419555664],["▁применять",-13.817517280578612],["آداب",-13.817519187927246],["▁avgifter",-13.81752586364746],["▁אצלנו",-13.81752872467041],["▁തേടി",-13.81752872467041],["▁sejenis",-13.817532539367676],["▁ಮಕ್ಕಳು",-13.817534446716309],["eğe",-13.817538261413574],["▁sizler",-13.81755256652832],["▁Росія",-13.81755542755127],["▁주목",-13.817556381225586],["▁பயன்",-13.817566871643066],["▁قطار",-13.8175687789917],["▁Några",-13.817571640014648],["▁terbatas",-13.817594528198242],["▁करणाऱ्या",-13.817594528198242],["▁klicken",-13.817599296569824],["▁заказа",-13.81760025024414],["▁Toivo",-13.817602157592772],["Page",-13.81760311126709],["เซล",-13.817606925964355],["▁entzun",-13.81761074066162],["ДИН",-13.817611694335938],["▁Kaki",-13.817611694335938],["▁לבד",-13.817621231079102],["聲明",-13.817627906799316],["▁spasi",-13.817636489868164],["▁edildiği",-13.81763744354248],["هيئة",-13.817663192749023],["▁ulogu",-13.81767463684082],["▁prototip",-13.817678451538086],["▁arviointi",-13.817688941955566],["▁вуліцы",-13.817693710327148],["▁seriously",-13.817708015441896],["▁umiestnen",-13.81771183013916],["▁samfunns",-13.817724227905272],["▁pragmat",-13.817741394042969],["▁hatari",-13.817744255065918],["▁reyna",-13.817744255065918],["▁inspektor",-13.81774616241455],["▁կես",-13.817755699157717],["▁nyuma",-13.817756652832031],["VÁ",-13.81776523590088],["档案",-13.817769050598145],["▁jepun",-13.817787170410156],["▁јавна",-13.817824363708496],["izzate",-13.81784725189209],["▁bllok",-13.817856788635254],["nafanya",-13.81786060333252],["▁वाक्य",-13.817861557006836],["▁कोड",-13.81787109375],["▁તેનું",-13.817885398864746],["▁Nữ",-13.817909240722656],["▁кампања",-13.817911148071287],["▁شراب",-13.817913055419922],["▁දේශපාලනය",-13.817914962768556],["▁allé",-13.81793212890625],["▁మూల",-13.8179349899292],["▁kidogo",-13.817936897277832],["۱۹",-13.817954063415527],["▁ത്താവ",-13.817956924438477],["▁pasakyti",-13.81796169281006],["कथा",-13.818034172058104],["▁আসছে",-13.818042755126951],["▁Cancer",-13.818044662475586],["masse",-13.8180570602417],["締",-13.81809139251709],["тельности",-13.81809902191162],["卻是",-13.818130493164062],["▁ánimo",-13.818135261535645],["usage",-13.818148612976074],["▁ASP",-13.818153381347656],["háló",-13.818157196044922],["▁آگاهی",-13.818177223205566],["▁comportement",-13.8181791305542],["ിലേക്കുള്ള",-13.818195343017578],["▁linguis",-13.818201065063477],["▁ומא",-13.818206787109377],["▁300.000",-13.818207740783691],["Prof",-13.818208694458008],["▁نفسك",-13.81821632385254],["▁अली",-13.818233489990234],["ратура",-13.818248748779297],["σελ",-13.818255424499512],["▁apliko",-13.818284034729004],["▁ଜଣା",-13.8182954788208],["▁direzione",-13.818303108215332],["▁vəziyyətdə",-13.818331718444824],["メント",-13.818337440490724],["षु",-13.81834888458252],["▁antropo",-13.818354606628418],["▁посред",-13.818359375],["తాయి",-13.81836223602295],["▁tavasz",-13.81836223602295],["▁Hakan",-13.81836986541748],["▁pensjon",-13.81838035583496],["▁dengê",-13.818385124206545],["ակներ",-13.818391799926758],["▁পেতে",-13.818392753601074],["▁sexiga",-13.818399429321287],["tična",-13.818401336669922],["മണി",-13.81842041015625],["ŠI",-13.81843090057373],["צרות",-13.818435668945312],["▁согласован",-13.818443298339844],["▁първото",-13.81845760345459],["▁beskrive",-13.818464279174805],["▁zákazníkov",-13.818477630615234],["▁mellék",-13.81847858428955],["മെന്ന്",-13.818479537963867],["trát",-13.818486213684082],["▁معاف",-13.818605422973633],["NJI",-13.818612098693848],["បំ",-13.818625450134276],["▁класів",-13.81863021850586],["Mb",-13.818632125854492],["▁hormonal",-13.818634033203123],["▁независност",-13.818634033203123],["frihet",-13.818636894226074],["▁Сүхбаатар",-13.818647384643556],["▁Avon",-13.81865406036377],["มม",-13.818668365478516],["人には",-13.818670272827148],["▁rolę",-13.818673133850098],["مەي",-13.818676948547363],["▁રોડ",-13.818684577941896],["مراجعة",-13.818742752075195],["▁ඉස්",-13.818753242492676],["▁износи",-13.818781852722168],["▁Sáng",-13.818799018859863],["▁anunciar",-13.818818092346191],["▁होऊन",-13.81882667541504],["ційних",-13.818829536437988],["▁čaká",-13.818832397460938],["▁këta",-13.818845748901367],["ЛГА",-13.818851470947266],["Amazon",-13.818862915039062],["▁новог",-13.818864822387695],["▁روانی",-13.818865776062012],["▁timpului",-13.818876266479492],["▁huomioi",-13.818885803222656],["▁paikalla",-13.818889617919922],["▁মাত্র",-13.81889820098877],["имость",-13.818910598754885],["▁θέλ",-13.818910598754885],["居家",-13.818917274475098],["▁접수",-13.818920135498049],["衡",-13.818931579589844],["▁valorar",-13.818949699401855],["ડવા",-13.818954467773438],["أسلوب",-13.818958282470703],["▁ciljem",-13.81898307800293],["▁algı",-13.818986892700195],["▁aquellas",-13.819002151489258],["▁Повер",-13.819045066833496],["▁Spitze",-13.819049835205078],["fariki",-13.819052696228027],["▁Tuote",-13.819066047668455],["▁شهرهای",-13.819074630737305],["▁Новог",-13.819129943847656],["▁chcemy",-13.819131851196287],["ARO",-13.819136619567873],["▁suured",-13.819138526916504],["्वि",-13.819154739379885],["▁shida",-13.819167137145996],["▁விவர",-13.819171905517578],["▁تحتوي",-13.819174766540527],["▁scambi",-13.819180488586426],["ላው",-13.819194793701172],["▁губернатор",-13.819194793701172],["▁විස",-13.81921100616455],["pinna",-13.81921672821045],["▁Schwei",-13.819229125976562],["▁ჩარ",-13.819235801696776],["▁കണ്ടെത്തിയ",-13.819243431091309],["综",-13.819252014160156],["▁anpassa",-13.819273948669434],["▁Muy",-13.819295883178713],["૧૭",-13.819314002990724],["▁Önce",-13.819329261779783],["▁reageer",-13.81933307647705],["▁shpërnda",-13.819341659545898],["的地位",-13.819375038146973],["▁obstoj",-13.819381713867188],["▁Rakst",-13.8193941116333],["▁ruwa",-13.819400787353516],["quette",-13.819403648376465],["▁екз",-13.819414138793944],["▁entrou",-13.819415092468262],["▁ממו",-13.819418907165527],["ващо",-13.819452285766602],["iškių",-13.819461822509766],["▁সংস্থা",-13.819479942321776],["sopimuksen",-13.819485664367676],["▁سنگھ",-13.819490432739258],["▁Shahid",-13.819520950317385],["ઇટ",-13.8195219039917],["▁predloži",-13.819522857666016],["也將",-13.81956672668457],["▁brann",-13.819584846496582],["▁министерство",-13.819658279418944],["kandi",-13.81966495513916],["▁lengi",-13.819669723510742],["▁քար",-13.81967067718506],["▁изузетно",-13.819676399230955],["▁реко",-13.819684982299805],["బూ",-13.819709777832031],["Agder",-13.819719314575195],["AAR",-13.819741249084473],["很可能",-13.81977367401123],["น้ําตาล",-13.819781303405762],["▁kvitt",-13.819785118103027],["▁আদালত",-13.819791793823242],["▁बराबर",-13.819808959960938],["ROG",-13.819811820983888],["▁однак",-13.819816589355469],["▁भएन",-13.819817543029783],["دوم",-13.819818496704102],["zole",-13.819830894470217],["▁tietty",-13.819859504699709],["நிகழ்",-13.81986141204834],["럽",-13.819873809814451],["Тоо",-13.819876670837402],["▁Leader",-13.8198881149292],["▁bunch",-13.819899559020996],["▁$5",-13.819900512695312],["▁мільйон",-13.819911003112791],["羞",-13.819924354553224],["无人机",-13.819938659667969],["弯",-13.8199462890625],["böl",-13.819953918457031],["▁शकत",-13.819963455200195],["鑽",-13.819976806640623],["彌",-13.819995880126951],["不思議",-13.819997787475586],["呂",-13.819997787475586],["បញ្ជា",-13.820002555847168],["柔軟",-13.820002555847168],["▁ದೆಹಲಿ",-13.820008277893066],["ທູດ",-13.820009231567385],["កុមារ",-13.820009231567385],["អំពើ",-13.820009231567385],["▁Asimismo",-13.820009231567385],["▁Tervetuloa",-13.820009231567385],["▁Zimbabwe",-13.820009231567385],["▁hochwertige",-13.820009231567385],["▁indivíduo",-13.820009231567385],["▁kiinteistö",-13.820009231567385],["▁obowiązują",-13.820009231567385],["▁Αναρτήθηκε",-13.820009231567385],["▁Египет",-13.820009231567385],["▁Ошондуктан",-13.820009231567385],["▁Център",-13.820009231567385],["▁боротьби",-13.820009231567385],["▁быстрее",-13.820009231567385],["▁керівництво",-13.820009231567385],["▁кожному",-13.820009231567385],["▁корголгон",-13.820009231567385],["▁нарочито",-13.820009231567385],["▁тышкары",-13.820009231567385],["▁جولائی",-13.820009231567385],["▁هدایت",-13.820009231567385],["▁কোম্পানি",-13.820009231567385],["▁সন্তান",-13.820009231567385],["▁ਚਾਹੀਦਾ",-13.820009231567385],["▁ਮਾਮਲਾ",-13.820009231567385],["▁హోదా",-13.820009231567385],["▁gezegd",-13.8200101852417],["▁inwoners",-13.8200101852417],["▁зниження",-13.8200101852417],["▁[0]",-13.820011138916016],["▁dimineata",-13.820011138916016],["▁cumple",-13.820012092590332],["konverents",-13.820013046264648],["▁Plzeň",-13.820013046264648],["▁Търново",-13.820013046264648],["범죄",-13.820013046264648],["▁kijiji",-13.820015907287598],["▁lontano",-13.820016860961914],["▁ndoshta",-13.820016860961914],["▁spomína",-13.820016860961914],["▁льгот",-13.820016860961914],["▁βρείτε",-13.820018768310549],["ឡុង",-13.82002067565918],["▁samþykkt",-13.820023536682127],["▁प्रमाणपत्र",-13.820024490356444],["▁제공됩니다",-13.820028305053713],["ბერძნ",-13.820030212402344],["たちは",-13.820034980773926],["Оюун",-13.820038795471191],["▁esclusivamente",-13.820038795471191],["▁objasni",-13.82004165649414],["сити",-13.820042610168455],["▁այնտեղ",-13.820042610168455],["▁пријаве",-13.820043563842772],["▁boshqaruv",-13.820046424865724],["▁હૈ",-13.820048332214355],["▁dimenzij",-13.820049285888672],["▁мульти",-13.820049285888672],["▁окружающей",-13.82005214691162],["▁значыць",-13.820053100585938],["▁வடிவ",-13.820054054260254],["▁ಪ್ರೊ",-13.82005500793457],["▁qalıb",-13.820063591003418],["▁извоз",-13.820064544677734],["▁uhuru",-13.820066452026367],["صمم",-13.820068359375],["နေတဲ့",-13.820070266723633],["▁Fransız",-13.82007122039795],["تىم",-13.820072174072266],["▁disponibilità",-13.820079803466797],["清洁",-13.82009983062744],["▁autonomia",-13.820108413696287],["▁návrat",-13.820109367370604],["▁dyshim",-13.820113182067873],["▁кућа",-13.820113182067873],["гине",-13.820114135742188],["بوب",-13.820119857788086],["▁чогул",-13.820120811462402],["▁Εισ",-13.82012176513672],["perioodi",-13.820122718811035],["▁किसानों",-13.820122718811035],["ార్ధ",-13.820124626159668],["▁Kitchen",-13.820125579833984],["頂く",-13.82012939453125],["▁departement",-13.8201322555542],["▁babo",-13.820157051086426],["▁Bremen",-13.820176124572754],["öiden",-13.82017707824707],["▁folosind",-13.820178985595703],["▁словах",-13.820180892944336],["▁fulani",-13.820196151733398],["▁Modal",-13.82020664215088],["帝国",-13.820207595825195],["증권",-13.82021141052246],["▁μπορώ",-13.820218086242676],["▁Bermain",-13.820223808288574],["▁ոստիկան",-13.820225715637209],["ғар",-13.82023811340332],["▁voetbal",-13.820246696472168],["▁дүгнэлт",-13.820249557495115],["Дніпро",-13.820252418518066],["▁Bold",-13.820253372192385],["▁főleg",-13.820256233215332],["▁razones",-13.820257186889648],["rijen",-13.820260047912598],["▁سكان",-13.820298194885254],["▁يقع",-13.820298194885254],["▁ezberdin",-13.820316314697266],["SOS",-13.820319175720217],["▁darbā",-13.82034397125244],["ौर",-13.820350646972656],["▁ගණන",-13.820350646972656],["▁următoarele",-13.820352554321287],["▁путу",-13.820354461669922],["пропаганд",-13.820359230041504],["▁правителството",-13.82036590576172],["/7/",-13.8203706741333],["▁munosabatlar",-13.8203763961792],["▁김정",-13.820377349853516],["▁کچه",-13.820409774780272],["▁kuchukua",-13.820412635803224],["वश",-13.820427894592283],["▁sjukvård",-13.820438385009766],["▁محب",-13.82046127319336],["▁considerazione",-13.820480346679688],["▁יסוד",-13.82048511505127],["▁imputa",-13.820509910583496],["కుంటా",-13.820514678955078],["▁Cục",-13.820520401000977],["▁freedom",-13.82053565979004],["▁naming",-13.82053565979004],["▁mortem",-13.82054042816162],["ציג",-13.820548057556152],["▁Бурханы",-13.82055950164795],["ሌት",-13.820576667785645],["ottaja",-13.82058334350586],["▁комитета",-13.82059097290039],["Vai",-13.820611953735352],["BLA",-13.820619583129885],["នាម",-13.82062816619873],["๊ย",-13.820629119873049],["пій",-13.820640563964844],["在全国",-13.82065200805664],["▁Later",-13.820652961730955],["કારી",-13.820653915405272],["մեռ",-13.820685386657717],["▁freut",-13.82069206237793],["▁situatë",-13.82069969177246],["▁administração",-13.82071304321289],["‬‬",-13.820730209350586],["ثورة",-13.8207426071167],["▁Cât",-13.82074737548828],["▁icrası",-13.82074737548828],["點擊",-13.820748329162598],["▁цэнтры",-13.820751190185549],["▁Ramai",-13.820756912231444],["သန်း",-13.820772171020508],["▁Weekend",-13.820775032043455],["▁tilfreds",-13.820780754089355],["▁Blau",-13.820791244506836],["▁educatiu",-13.820853233337402],["▁nastavení",-13.82086944580078],["▁Aprili",-13.820878982543944],["ряжен",-13.820880889892578],["▁skand",-13.82089138031006],["archive",-13.820903778076172],["▁oculta",-13.820921897888184],["▁گفتند",-13.820924758911133],["▁থাকলে",-13.820926666259766],["▁vrees",-13.820932388305664],["是我的",-13.820934295654297],["▁espazos",-13.820940971374512],["▁којом",-13.820944786071776],["പാല",-13.820979118347168],["一回",-13.821003913879396],["▁ورزی",-13.821012496948242],["vään",-13.821036338806152],["▁Luego",-13.821040153503418],["▁støtter",-13.82104778289795],["טיש",-13.821067810058594],["▁tūkstoš",-13.821083068847656],["मानी",-13.82109832763672],["▁flip",-13.821115493774414],["ючі",-13.821118354797363],["▁contexte",-13.821131706237791],["▁hversu",-13.82114028930664],["κρεμ",-13.821141242980955],["проф",-13.821141242980955],["▁Sức",-13.821142196655272],["▁நிற",-13.821145057678224],["▁tokie",-13.821157455444336],["▁السو",-13.821160316467283],["▁máquinas",-13.821166038513184],["得了",-13.821175575256348],["چىلار",-13.821179389953612],["পরি",-13.821213722229004],["374",-13.821216583251951],["▁vinger",-13.82121753692627],["පූ",-13.82122039794922],["▁животных",-13.821229934692385],["▁capace",-13.821249008178713],["āņu",-13.82126235961914],["▁əmin",-13.821264266967772],["▁Juta",-13.821267127990724],["▁ನ್ಯಾಯ",-13.82127285003662],["వల్ల",-13.821277618408203],["حكومة",-13.82128620147705],["▁შეხვედრ",-13.821290016174316],["▁Кем",-13.82131576538086],["▁فيصل",-13.821318626403809],["íocha",-13.82135772705078],["Point",-13.821365356445312],["一下子",-13.821386337280272],["opplevelse",-13.82140827178955],["▁Phúc",-13.821428298950195],["اتحاد",-13.821455001831056],["קטיבי",-13.821463584899902],["▁ئاساس",-13.821499824523926],["kovanie",-13.82151985168457],["▁školi",-13.821520805358888],["ສືບ",-13.821535110473633],["▁શબ્દો",-13.821537017822266],["▁አሰ",-13.821551322937012],["▁10.1",-13.82156467437744],["Union",-13.821576118469238],["▁Пријав",-13.821577072143556],["▁ремонту",-13.821581840515137],["ողը",-13.82158660888672],["▁vulkan",-13.821588516235352],["ولوژی",-13.82159423828125],["▁najveć",-13.821599006652832],["গান",-13.82160186767578],["onesha",-13.821614265441896],["maraton",-13.821615219116213],["حمل",-13.821617126464844],["发言",-13.82162380218506],["▁rukou",-13.821633338928224],["క్షి",-13.821670532226562],["▁ነን",-13.82168197631836],["▁monto",-13.821691513061523],["សិទ្ធិ",-13.821697235107422],["▁podróży",-13.82171630859375],["▁videti",-13.821722984313965],["▁krev",-13.82174301147461],["▁երեխայի",-13.82175064086914],["▁shower",-13.821754455566406],["شناسی",-13.82175636291504],["▁nekdo",-13.821775436401367],["▁rugam",-13.82179355621338],["▁Baju",-13.82179832458496],["▁органом",-13.821802139282228],["▁possibilita",-13.821810722351074],["ույս",-13.821836471557615],["ေပးပါ",-13.82185173034668],["▁kapcsol",-13.821868896484377],["▁utviklet",-13.821880340576172],["▁ponovi",-13.821892738342283],["essään",-13.821937561035156],["▁지속",-13.821940422058104],["▁פלי",-13.821948051452637],["▁Partisi",-13.821953773498535],["▁pozri",-13.8219575881958],["変わる",-13.821962356567385],["ところに",-13.821985244750977],["▁прогноза",-13.82201862335205],["▁Svein",-13.822025299072266],["▁හීන",-13.82204532623291],["▁nekas",-13.82205867767334],["▁കൂട്ടുകാര",-13.82206916809082],["grab",-13.822083473205566],["▁causam",-13.822084426879885],["▁اومد",-13.82209014892578],["witz",-13.822103500366213],["סיכוי",-13.822117805480955],["▁ලැබුණ",-13.822145462036133],["क्ति",-13.822197914123535],["iklus",-13.822199821472168],["schreibung",-13.822213172912598],["▁Айыл",-13.822248458862305],["τηκαν",-13.82225227355957],["۳۵",-13.822266578674316],["▁физички",-13.822271347045898],["ирају",-13.822272300720217],["▁현지",-13.822278022766112],["▁Fot",-13.822280883789062],["blir",-13.82229232788086],["▁последњи",-13.822299003601074],["ohemi",-13.82230281829834],["▁dhexe",-13.822308540344238],["▁Američk",-13.822309494018556],["在那里",-13.822320938110352],["手里",-13.8223295211792],["▁suspek",-13.822360038757324],["▁Yarış",-13.822376251220703],["cter",-13.822396278381348],["現状",-13.82239818572998],["▁facture",-13.822409629821776],["сик",-13.822416305541992],["ત્ય",-13.822439193725586],["สปา",-13.822457313537598],["▁17%",-13.822477340698242],["bistan",-13.82248306274414],["رسم",-13.82249641418457],["ింపు",-13.822519302368164],["▁ਇਨ",-13.822538375854492],["▁aktuálne",-13.822550773620604],["▁esimest",-13.822552680969238],["battere",-13.822553634643556],["ົ້າ",-13.822561264038086],["θέτει",-13.822562217712402],["▁обстоятельств",-13.822564125061035],["首页",-13.822566986083984],["ພຽງແຕ່",-13.82257080078125],["▁நடைபெற",-13.822571754455566],["ភ្នែក",-13.822572708129885],["▁Dictionary",-13.822572708129885],["▁тоглолт",-13.822572708129885],["អឺរ៉ុប",-13.8225736618042],["▁lehetővé",-13.8225736618042],["▁ljubezen",-13.8225736618042],["▁pleasure",-13.8225736618042],["▁rettigheder",-13.8225736618042],["▁voalohany",-13.8225736618042],["▁çünki",-13.8225736618042],["▁відпочинку",-13.8225736618042],["▁побољша",-13.8225736618042],["▁сасвим",-13.8225736618042],["▁Գագիկ",-13.8225736618042],["▁դեռեւս",-13.8225736618042],["▁التقرير",-13.8225736618042],["▁سکیورٹی",-13.8225736618042],["▁وآله",-13.8225736618042],["▁সাবেক",-13.8225736618042],["▁ਪਤਨੀ",-13.8225736618042],["▁ਹੁਕਮ",-13.8225736618042],["▁පරිගණක",-13.8225736618042],["▁문재인",-13.8225736618042],["▁빠르게",-13.8225736618042],["▁pouvons",-13.822574615478516],["▁învăț",-13.822574615478516],["▁мигрант",-13.822574615478516],["▁ሱዳን",-13.822574615478516],["▁свештеник",-13.822575569152832],["Κοινοποίηση",-13.822577476501465],["▁ತಿಳಿಸಿದರು",-13.822577476501465],["▁اړتیا",-13.82257843017578],["ウィ",-13.82257843017578],["သြယ္",-13.822579383850098],["▁Należy",-13.822579383850098],["▁సాక్షి",-13.822579383850098],["搭載",-13.822579383850098],["▁cztery",-13.82258129119873],["▁бэлэн",-13.822582244873049],["▁රහණය",-13.822583198547363],["▁melayani",-13.82258415222168],["▁Οικονομικών",-13.822585105895996],["▁pengertian",-13.822588920593262],["▁Arrimaha",-13.822589874267578],["▁rozměr",-13.82259464263916],["▁obýva",-13.82259750366211],["▁वर्गणी",-13.82260036468506],["▁бацька",-13.822603225708008],["▁vidím",-13.822604179382324],["▁Претседателот",-13.822609901428224],["▁Natuurlijk",-13.822611808776855],["▁ezazu",-13.822612762451172],["kasser",-13.822613716125488],["▁Đừng",-13.82261562347412],["▁højde",-13.822619438171388],["▁синтез",-13.822624206542969],["▁एकूण",-13.822626113891602],["商品を",-13.822632789611816],["mberg",-13.822637557983398],["▁гурав",-13.822638511657717],["▁Bachelor",-13.822641372680664],["ներս",-13.822643280029297],["▁Чүй",-13.822644233703612],["▁zarządzania",-13.822649955749512],["տո",-13.822651863098145],["▁барабан",-13.822653770446776],["▁сельского",-13.822654724121094],["▁ආසන්න",-13.82265853881836],["▁पुग्ने",-13.822662353515623],["▁батька",-13.82266330718994],["▁volonté",-13.82266616821289],["▁frymë",-13.822667121887209],["▁kończy",-13.822667121887209],["▁اداکارہ",-13.822676658630373],["▁területek",-13.822677612304688],["עצם",-13.822683334350586],["לעג",-13.82268524169922],["▁majutus",-13.822688102722168],["▁Safety",-13.822697639465332],["▁принципа",-13.82270050048828],["▁течност",-13.822701454162598],["▁praxi",-13.822702407836914],["▁karıştır",-13.822707176208496],["య్యే",-13.822710990905762],["▁ਸਿੱਖਾਂ",-13.822728157043455],["湖人",-13.82273006439209],["সংযোগ",-13.82273292541504],["▁Alliance",-13.82273292541504],["▁کښې",-13.822734832763672],["▁Zobrazit",-13.82274055480957],["▁yazdığı",-13.822741508483888],["eretur",-13.822746276855469],["▁udruga",-13.822747230529783],["▁نوٹس",-13.822748184204102],["tzeak",-13.822762489318848],["▁തീരുമാനം",-13.822769165039062],["列入",-13.822772026062012],["၁၅",-13.82277488708496],["සාර",-13.822781562805176],["▁otvoril",-13.822783470153809],["▁cierpi",-13.822787284851074],["▁Bankası",-13.822800636291504],["▁müşteriler",-13.822821617126465],["▁francuski",-13.82282543182373],["▁Ağır",-13.822850227355955],["▁digwydd",-13.82285499572754],["▁adiante",-13.822857856750488],["सकेको",-13.822872161865234],["▁lună",-13.822872161865234],["حك",-13.822876930236816],["▁wykład",-13.82287883758545],["GHz",-13.822887420654297],["▁nintzen",-13.822912216186523],["▁optreden",-13.822919845581056],["▁bodov",-13.822957992553713],["▁кнігу",-13.822959899902344],["ünden",-13.822967529296877],["▁destacado",-13.82297420501709],["▁покупки",-13.82301139831543],["▁Graham",-13.823028564453123],["ანდა",-13.823038101196287],["204",-13.823049545288086],["▁monopol",-13.82305908203125],["▁tiksli",-13.823060035705566],["▁beserta",-13.823076248168944],["▁producció",-13.823077201843262],["▁prišel",-13.823081016540527],["ذة",-13.823091506958008],["Гор",-13.823094367980955],["صغر",-13.82309913635254],["▁eilen",-13.823107719421388],["න්ට්",-13.823110580444336],["▁titoli",-13.82313060760498],["▁pozicion",-13.82314682006836],["timiza",-13.823163986206056],["မိတ်",-13.8231782913208],["ニック",-13.82319450378418],["sluiting",-13.823195457458496],["მების",-13.823196411132812],["▁sumbangan",-13.82321548461914],["▁магу",-13.823225021362305],["▁erotikk",-13.82325267791748],["მკითხველ",-13.823262214660645],["सीसी",-13.823265075683594],["सूर",-13.823280334472656],["▁reviste",-13.823285102844238],["▁brukte",-13.823290824890137],["NAL",-13.823304176330566],["▁palikt",-13.823309898376465],["▁архитектура",-13.82331085205078],["▁starka",-13.823319435119627],["▁לבני",-13.823325157165527],["▁Traian",-13.82333755493164],["հետ",-13.82334327697754],["▁universitario",-13.823369979858398],["▁وسو",-13.823389053344728],["መጽ",-13.823396682739258],["▁monetar",-13.823400497436523],["stenen",-13.823409080505373],["sungguh",-13.82341194152832],["fox",-13.82342529296875],["dhaaf",-13.823426246643066],["adiisa",-13.823427200317385],["▁différent",-13.823442459106444],["การประชุม",-13.82344913482666],["▁lapisan",-13.82347011566162],["▁അസ",-13.823472023010254],["▁بايان",-13.823482513427734],["sággal",-13.823484420776367],["▁그것",-13.823484420776367],["bháil",-13.82349967956543],["üüri",-13.82350254058838],["ịnh",-13.823504447937012],["▁posibilidade",-13.82356071472168],["▁Mesin",-13.823562622070312],["▁consequatur",-13.823565483093262],["▁Majd",-13.823589324951172],["▁Республикасында",-13.823596000671388],["经济增长",-13.823614120483398],["▁Швед",-13.823667526245115],["భవ",-13.82366943359375],["▁Cilj",-13.82369327545166],["▁baseret",-13.82373046875],["komis",-13.823735237121582],["▁बढाउन",-13.823748588562012],["Wiki",-13.823750495910645],["▁работна",-13.823765754699709],["▁nişte",-13.823771476745604],["ाङ्ग",-13.823785781860352],["ោង",-13.823785781860352],["ٺو",-13.823809623718262],["▁engem",-13.823813438415527],["kinti",-13.823816299438477],["▁домаћин",-13.823816299438477],["▁компетент",-13.823817253112791],["▁partenza",-13.823819160461426],["बुल",-13.823834419250488],["consult",-13.82384204864502],["▁artikkeli",-13.823854446411133],["▁vegetarian",-13.823858261108398],["miehen",-13.823864936828612],["ņos",-13.823874473571776],["▁konut",-13.82387924194336],["евро",-13.823911666870115],["ಚರ್",-13.823919296264648],["▁néha",-13.82392120361328],["▁dowlada",-13.823922157287598],["▁താള",-13.823929786682127],["Script",-13.823941230773926],["▁अंग्रेजी",-13.824002265930176],["▁ચાલી",-13.824007034301758],["יוס",-13.824013710021973],["▁එහෙ",-13.82402801513672],["的比例",-13.824064254760742],["それら",-13.824076652526855],["ysis",-13.824087142944336],["experiència",-13.824090003967283],["▁Paint",-13.824090957641602],["헤",-13.824090957641602],["ceiro",-13.82409381866455],["▁електронско",-13.824116706848145],["▁لقب",-13.824118614196776],["▁भनिन्",-13.824175834655762],["ളാ",-13.824193000793455],["▁půjčka",-13.82419490814209],["▁నాన్న",-13.824198722839355],["helyek",-13.824256896972656],["▁lengvai",-13.824256896972656],["ִים",-13.824265480041504],["▁najpo",-13.824265480041504],["に関して",-13.824284553527832],["▁рр",-13.824294090270996],["mmissa",-13.824297904968262],["ვნე",-13.82432460784912],["▁suposto",-13.82432746887207],["▁پڑا",-13.824329376220703],["্স",-13.824332237243652],["▁8-9",-13.82439136505127],["▁묻",-13.824397087097168],["▁مظاهر",-13.824410438537598],["nivel",-13.824423789978027],["▁munkahely",-13.824426651000977],["▁tikinti",-13.824462890625],["▁رومان",-13.824464797973633],["cense",-13.82448673248291],["这种情况",-13.82449722290039],["tokset",-13.82451057434082],["လယ်",-13.8245210647583],["▁മുട്ട",-13.8245210647583],["▁후에",-13.824542045593262],["मेन",-13.824565887451172],["▁retras",-13.824592590332031],["ባባ",-13.824621200561523],["▁এসব",-13.824628829956056],["ITAS",-13.824645042419434],["Динамо",-13.824658393859863],["▁કેટલા",-13.82467555999756],["ือก",-13.824689865112305],["▁vult",-13.82469367980957],["▁seksiä",-13.824702262878418],["МФ",-13.824718475341797],["▁monumento",-13.824728965759276],["ובס",-13.824763298034668],["▁учителя",-13.824774742126465],["▁Karim",-13.824801445007324],["▁vsebine",-13.824801445007324],["▁Foc",-13.82481288909912],["▁aplikacije",-13.824834823608398],["而非",-13.82483959197998],["udhu",-13.824846267700195],["▁joudu",-13.824853897094728],["laicīg",-13.824902534484863],["▁выполнение",-13.824912071228027],["なれば",-13.824941635131836],["▁ଟଙ୍କାର",-13.824970245361328],["hívás",-13.82498550415039],["公分",-13.824991226196287],["▁terminu",-13.824994087219238],["▁படத்தை",-13.824995040893556],["算法",-13.824995040893556],["▁փաստաթղթեր",-13.824997901916504],["ไหว",-13.8250150680542],["是如何",-13.82501983642578],["歌詞",-13.825030326843262],["מפי",-13.82503604888916],["▁ජීවිතේ",-13.825037002563477],["памят",-13.825037956237791],["還沒",-13.825047492980955],["HUS",-13.82504940032959],["▁الصحفي",-13.825054168701172],["воздушн",-13.825058937072754],["譜",-13.825090408325195],["毕",-13.825092315673828],["▁هوس",-13.825093269348145],["మైంది",-13.82509994506836],["困境",-13.825102806091309],["īgiem",-13.82510471343994],["枕",-13.825113296508787],["▁Maali",-13.825115203857422],["▁активист",-13.825119018554688],["攻撃",-13.825119972229004],["ম্বা",-13.825128555297852],["อธิบาย",-13.825139999389648],["ለሁ፡፡",-13.82514190673828],["ច្បាស់",-13.825142860412598],["ប៉ូលិស",-13.825142860412598],["▁अवैध",-13.825142860412598],["прежнему",-13.825143814086914],["प्रतिनिधी",-13.825143814086914],["መለከት",-13.825143814086914],["▁Crăciun",-13.825143814086914],["▁Weihnachten",-13.825143814086914],["▁bevolking",-13.825143814086914],["▁prezydent",-13.825143814086914],["▁siječnja",-13.825143814086914],["▁taisyklės",-13.825143814086914],["▁uwzględni",-13.825143814086914],["▁žaidimai",-13.825143814086914],["▁Τούρκο",-13.825143814086914],["▁дағды",-13.825143814086914],["▁شىركىتى",-13.825143814086914],["▁وظائف",-13.825143814086914],["▁সর্বস্বত্ব",-13.825143814086914],["▁பல்வேறு",-13.825143814086914],["▁ಆರ್ಥಿಕ",-13.825143814086914],["▁ഹൈക്കോടതി",-13.825143814086914],["▁გამომდინარე",-13.825143814086914],["تخطيط",-13.82514476776123],["ঢ",-13.82514476776123],["▁zaščito",-13.82514476776123],["▁Целта",-13.82514476776123],["▁қамту",-13.82514476776123],["▁உள்ளிட்ட",-13.82514476776123],["ເດີນທາງ",-13.825145721435549],["▁advogado",-13.825145721435549],["▁lizentzia",-13.825145721435549],["▁małych",-13.825145721435549],["zeichnen",-13.825146675109863],["▁चिनियाँ",-13.825146675109863],["▁நமக்கு",-13.825146675109863],["▁допълнително",-13.82514762878418],["▁Hozzá",-13.825148582458496],["曬",-13.825149536132812],["▁geändert",-13.825150489807127],["飲む",-13.825150489807127],["သြင္း",-13.825151443481444],["▁Økonomi",-13.825151443481444],["▁بطولة",-13.825151443481444],["▁skyriaus",-13.825153350830078],["▁məqam",-13.825154304504396],["▁baxış",-13.825155258178713],["▁Helsingissä",-13.825156211853027],["▁କେବେ",-13.825156211853027],["ધ્ય",-13.825157165527344],["takatifu",-13.825159072875977],["▁wojny",-13.82516384124756],["▁ansiedad",-13.825164794921877],["▁شگفت",-13.825164794921877],["▁запам",-13.825166702270508],["Istituto",-13.82516860961914],["▁suositu",-13.825170516967772],["▁sasvim",-13.825172424316406],["▁тижня",-13.825172424316406],["▁zabava",-13.825173377990724],["旦那",-13.825176239013672],["állomány",-13.825177192687988],["атыш",-13.825180053710938],["testülete",-13.825182914733888],["▁викенд",-13.82518482208252],["povídá",-13.825188636779783],["▁είδα",-13.825188636779783],["▁राखेर",-13.825188636779783],["▁nakoniec",-13.825191497802734],["▁selvästi",-13.825194358825684],["▁সেক্সি",-13.825196266174316],["▁ontwikkel",-13.825217247009276],["▁Vô",-13.825222969055176],["▁حاوی",-13.825222969055176],["▁contratación",-13.825223922729492],["▁испред",-13.825228691101074],["▁चर्चित",-13.825239181518556],["▁კამ",-13.825239181518556],["▁náhodou",-13.825240135192873],["▁బే",-13.82524585723877],["▁Basil",-13.825247764587402],["▁παιχνίδια",-13.825260162353516],["энерго",-13.825276374816896],["▁خزان",-13.825279235839844],["▁previamente",-13.825294494628906],["▁подобных",-13.825297355651855],["▁Study",-13.825303077697754],["▁nyitott",-13.825303077697754],["▁எடுத்த",-13.825305938720703],["器官",-13.82530689239502],["tirgan",-13.825312614440918],["▁jirin",-13.8253173828125],["ΒΑ",-13.825322151184082],["мисля",-13.825325965881348],["▁ainakaan",-13.82532787322998],["▁Prijava",-13.82533359527588],["▁własnych",-13.82533836364746],["▁Stačí",-13.825340270996094],["▁పెట్టి",-13.82535457611084],["不容",-13.825357437133787],["▁գործով",-13.825363159179688],["løk",-13.825395584106444],["PUL",-13.82541561126709],["▁مذهبي",-13.825421333312988],["จั",-13.825422286987305],["▁підставі",-13.825431823730469],["▁বের",-13.825468063354492],["kurssi",-13.825469970703123],["▁speziell",-13.825475692749023],["ारे",-13.825488090515137],["▁Texte",-13.825488090515137],["▁jokin",-13.825490951538086],["▁outlet",-13.825493812561035],["leştiril",-13.825494766235352],["חשיבות",-13.825506210327148],["▁سکا",-13.825521469116213],["БЫ",-13.825554847717283],["▁utbud",-13.825573921203612],["一夜",-13.825599670410156],["brus",-13.825601577758787],["▁ಹೇಳಿಕೆ",-13.825602531433104],["▁örgütü",-13.825608253479004],["▁pausa",-13.82561206817627],["▁diameter",-13.825629234313965],["აციო",-13.82563304901123],["istico",-13.825645446777344],["▁hrdin",-13.82565975189209],["▁моето",-13.825660705566406],["▁Autobus",-13.825666427612305],["▁sakte",-13.825674057006836],["▁batería",-13.82568073272705],["امية",-13.825684547424316],["簡単",-13.825684547424316],["▁bruto",-13.825692176818848],["▁Вече",-13.825692176818848],["▁Förderung",-13.825700759887695],["ίνει",-13.825701713562012],["ონს",-13.825706481933594],["ANKA",-13.82571792602539],["ช่วงเวลา",-13.825723648071287],["▁результатів",-13.825730323791504],["▁haqidagi",-13.825753211975098],["▁پخوا",-13.825756072998049],["▁Άλ",-13.825778007507324],["▁الحديثة",-13.825785636901855],["▁comodidad",-13.825811386108398],["смер",-13.825813293457031],["▁דל",-13.82581901550293],["பை",-13.82583999633789],["▁aşırı",-13.82585906982422],["链接",-13.825861930847168],["Це",-13.825894355773926],["▁організму",-13.82589626312256],["▁drenge",-13.825908660888672],["▁Ξε",-13.825913429260254],["ravin",-13.825916290283203],["▁Nathan",-13.825925827026367],["▁அவருக்கு",-13.82593059539795],["▁kəsi",-13.825932502746582],["▁shoes",-13.82594108581543],["▁ተነ",-13.825949668884276],["搭乘",-13.825953483581545],["ਿਨ",-13.825974464416504],["ഞ്ചി",-13.826008796691896],["▁በኢ",-13.826024055480955],["اؤں",-13.826027870178224],["▁mesiacmi",-13.826031684875488],["的眼睛",-13.82603359222412],["포항",-13.826045989990234],["▁пусна",-13.826072692871094],["▁работника",-13.82607650756836],["▁ఇప్పటి",-13.826111793518066],["▁CBS",-13.82614040374756],["▁දාන",-13.826180458068848],["▁Pino",-13.826181411743164],["▁categorías",-13.826183319091797],["ದಲ್ಲ",-13.826189041137695],["行情",-13.826190948486328],["WEB",-13.826207160949709],["tatni",-13.826223373413086],["▁경우가",-13.826229095458984],["테크",-13.82627010345459],["нүүд",-13.826272010803224],["▁Μουσ",-13.826285362243652],["▁atractiv",-13.826294898986816],["كوين",-13.826330184936523],["▁прве",-13.826361656188965],["국제",-13.82638454437256],["بقي",-13.82638931274414],["▁anlatı",-13.826409339904783],["२२",-13.826412200927734],["deiro",-13.826416015625],["kuntien",-13.826430320739746],["学家",-13.826454162597656],["이라면",-13.826476097106934],["ສາມ",-13.82648754119873],["▁пайдалануу",-13.826488494873049],["▁ખાન",-13.826489448547363],["▁Corri",-13.826495170593262],["▁Vég",-13.826497077941896],["feta",-13.82650375366211],["▁ከወ",-13.82650661468506],["▁பல்",-13.826516151428224],["กํากับ",-13.826539993286133],["াধিক",-13.82655143737793],["▁proposamen",-13.826553344726562],["DZI",-13.82656478881836],["▁falso",-13.826568603515623],["▁hoće",-13.826614379882812],["▁attīstība",-13.82663631439209],["лч",-13.82664394378662],["tetett",-13.826648712158203],["▁лицата",-13.826680183410645],["を考えて",-13.82668113708496],["loppet",-13.826699256896973],["トイレ",-13.826700210571287],["▁Кине",-13.82670783996582],["を使用して",-13.826711654663086],["តំបន់",-13.82671356201172],["轻轻",-13.826719284057615],["▁Плат",-13.826749801635742],["▁montrer",-13.82675838470459],["▁maneh",-13.826765060424805],["മാകുന്ന",-13.826796531677246],["▁ଅଧ",-13.826801300048828],["くらいの",-13.826828956604004],["▁hartuko",-13.826847076416016],["▁opphold",-13.826863288879396],["১৩",-13.826864242553713],["▁креира",-13.826874732971191],["yuu",-13.826910972595217],["▁Paco",-13.826929092407228],["▁ئۇچ",-13.82693862915039],["▁vaimo",-13.826944351196287],["▁کاله",-13.826966285705566],["flix",-13.82697296142578],["▁граждански",-13.82699203491211],["▁valtion",-13.827012062072754],["▁მოვლენ",-13.827019691467283],["▁trobada",-13.8270263671875],["▁mēģin",-13.82705020904541],["輸出",-13.827054977416992],["วรรณ",-13.827078819274902],["▁katıl",-13.82712745666504],["keş",-13.827143669128418],["▁igreja",-13.827150344848633],["стара",-13.827155113220217],["▁específicas",-13.82717990875244],["▁අගය",-13.827187538146973],["发布了",-13.827202796936035],["beszél",-13.82720947265625],["۰۹",-13.827226638793944],["ୱେ",-13.82725715637207],["▁mahtava",-13.827288627624512],["▁sizleri",-13.827299118041992],["▁especiales",-13.827301025390623],["ROP",-13.82730770111084],["▁gevaar",-13.8273344039917],["▁puber",-13.8273344039917],["▁excelent",-13.827341079711914],["ลอก",-13.827349662780762],["▁obseg",-13.82735538482666],["▁rõõmu",-13.82736110687256],["רייט",-13.827378273010254],["▁residenti",-13.827378273010254],["▁metodik",-13.827388763427734],["國小",-13.82739543914795],["▁nastupi",-13.827415466308594],["▁İnce",-13.827415466308594],["▁ASUS",-13.827425956726074],["ક્ટર",-13.827428817749023],["ምሩ",-13.827445030212402],["デン",-13.827447891235352],["▁sākot",-13.827486038208008],["最为",-13.827496528625488],["فيه",-13.827499389648438],["tirilgan",-13.827502250671388],["▁senador",-13.827526092529297],["商品の",-13.827536582946776],["▁rhag",-13.82756233215332],["腐败",-13.827567100524902],["דיא",-13.827582359313965],["▁ഭക്ഷണ",-13.827593803405762],["▁фаль",-13.827596664428713],["▁කෙරෙන",-13.827608108520508],["▁roupa",-13.82761001586914],["▁በዓ",-13.827611923217772],["เกรด",-13.827617645263672],["чним",-13.827621459960938],["肤",-13.827628135681152],["节能",-13.82765007019043],["▁اہلکار",-13.82766342163086],["Andre",-13.827672958374023],["貢獻",-13.82768440246582],["▁ಕುರಿತ",-13.827685356140137],["尝",-13.827689170837402],["嬰兒",-13.827695846557615],["ネットワーク",-13.827699661254885],["一人で",-13.827701568603516],["▁imtina",-13.827704429626465],["喪",-13.827707290649414],["seirbhís",-13.827717781066896],["約束",-13.827718734741213],["ซ่อม",-13.827719688415527],["ຂະແຫນງ",-13.827720642089844],["ຮັບຜິດຊອບ",-13.827720642089844],["▁تۈز",-13.827720642089844],["▁పరిస్థితుల",-13.827720642089844],["קיבוץ",-13.82772159576416],["ផ្លាស់ប្តូរ",-13.82772159576416],["▁Ankaŭ",-13.82772159576416],["▁Məhəmməd",-13.82772159576416],["▁Plattform",-13.82772159576416],["▁anderledes",-13.82772159576416],["▁dyfodol",-13.82772159576416],["▁előzetes",-13.82772159576416],["▁pieredzi",-13.82772159576416],["▁επαγγελματικ",-13.82772159576416],["▁Футбол",-13.82772159576416],["▁сыртқы",-13.82772159576416],["▁بڻجي",-13.82772159576416],["▁دانشکده",-13.82772159576416],["▁उमेदवार",-13.82772159576416],["▁বক্তব্য",-13.82772159576416],["▁বরিশাল",-13.82772159576416],["▁પાકિસ્તાન",-13.82772159576416],["▁டாக்டர்",-13.82772159576416],["소프트",-13.82772159576416],["▁Mokslas",-13.827722549438477],["▁superfície",-13.827722549438477],["▁शिकायत",-13.827722549438477],["▁affiliate",-13.827723503112791],["▁fytyrë",-13.827723503112791],["▁poriadku",-13.827723503112791],["▁наурыз",-13.827723503112791],["▁Gobernu",-13.82772445678711],["▁podarilo",-13.82772445678711],["▁KİV",-13.827725410461426],["▁veïns",-13.827725410461426],["húsnæði",-13.827726364135742],["▁polítiques",-13.827726364135742],["▁безплатно",-13.827726364135742],["▁പകരം",-13.82772731781006],["▁úspěch",-13.827728271484377],["▁внимательно",-13.827728271484377],["▁कारवाई",-13.827728271484377],["▁المتوسط",-13.827729225158691],["▁карьер",-13.827730178833008],["▁Jedan",-13.827731132507324],["▁сигурни",-13.82773208618164],["担保",-13.82773208618164],["▁всякакви",-13.827733039855955],["▁السيارة",-13.82773494720459],["▁kasashen",-13.827735900878906],["▁βλέπει",-13.827735900878906],["▁учесник",-13.827735900878906],["▁Κάθε",-13.827738761901855],["pañ",-13.827739715576172],["ثنائية",-13.827740669250488],["ప్పటి",-13.827740669250488],["▁iliyopita",-13.827740669250488],["▁ڪجھ",-13.827743530273438],["▁ishonch",-13.82774829864502],["▁διαφορά",-13.82774829864502],["▁કરવો",-13.827751159667969],["vinnslu",-13.827752113342283],["▁୨୪",-13.8277587890625],["gadu",-13.827759742736816],["▁orgullo",-13.827762603759766],["▁അതുകൊണ്ട്",-13.827771186828612],["▁vackert",-13.82777214050293],["▁කඳු",-13.827773094177246],["▁Bani",-13.827775955200195],["ಯವರು",-13.827776908874512],["▁entah",-13.827776908874512],["▁kezdő",-13.827776908874512],["委員会",-13.827778816223145],["▁повреди",-13.827781677246094],["[20]",-13.827783584594728],["▁poidan",-13.827802658081056],["ууну",-13.827803611755373],["▁почетн",-13.827805519104004],["▁mbledh",-13.827807426452637],["▁byddwn",-13.827808380126951],["▁jogosult",-13.827811241149902],["▁వస్తుంది",-13.82781219482422],["อัด",-13.827814102172852],["▁señaló",-13.827828407287598],["повіді",-13.827832221984863],["印发",-13.827837944030762],["▁Comissão",-13.827840805053713],["▁אותן",-13.82784366607666],["▁hiburan",-13.82785415649414],["▁Kombination",-13.827856063842772],["▁jättää",-13.82786464691162],["▁אויפן",-13.827865600585938],["▁Wedding",-13.827868461608888],["▁реше",-13.827889442443848],["▁سارے",-13.82790756225586],["▁څنګه",-13.827911376953123],["ოტი",-13.827922821044922],["▁resistencia",-13.827935218811035],["▁අංශ",-13.827945709228516],["▁господ",-13.827951431274414],["▁සූත්",-13.827962875366213],["בּ",-13.827973365783691],["▁compone",-13.827973365783691],["▁дерева",-13.827983856201172],["▁нийлүүл",-13.82800006866455],["▁strank",-13.828001022338867],["▁titull",-13.828001976013184],["▁reflexión",-13.828015327453612],["▁пасад",-13.828025817871094],["המלצות",-13.82802677154541],["▁nahm",-13.828032493591309],["മൊരു",-13.828045845031738],["会う",-13.82805347442627],["▁energija",-13.828100204467772],["▁Aslan",-13.828107833862305],["▁satışı",-13.828110694885254],["وتو",-13.82811164855957],["视为",-13.82811164855957],["јка",-13.828116416931152],["६४",-13.828117370605469],["альнага",-13.82812213897705],["▁revoca",-13.828136444091797],["▁muuttaa",-13.828166961669922],["▁Cisco",-13.828167915344238],["▁traditionell",-13.828170776367188],["ratan",-13.82817268371582],["▁Salo",-13.82819652557373],["▁favoritter",-13.828197479248049],["TÜRK",-13.828200340270996],["▁kuljet",-13.82820987701416],["▁linalo",-13.82820987701416],["▁nganggo",-13.828227043151855],["▁ocasiona",-13.828227996826172],["▁Seega",-13.828255653381348],["▁promozione",-13.82827854156494],["いくら",-13.828299522399902],["▁дәлел",-13.82831859588623],["▁dreta",-13.828327178955078],["▁کھو",-13.828341484069824],["进攻",-13.828346252441406],["▁ἐ",-13.828349113464355],["▁бивши",-13.828351974487305],["нулась",-13.828383445739746],["共通",-13.828386306762695],["વડા",-13.828389167785645],["▁consecutiv",-13.82839012145996],["▁pagitan",-13.828398704528809],["▁සතුටු",-13.828413009643556],["▁വധ",-13.82841682434082],["▁чисти",-13.828423500061035],["▁јануара",-13.828426361083984],["ความเห็น",-13.828428268432615],["▁generis",-13.828428268432615],["인은",-13.828431129455566],["っていう",-13.82844066619873],["▁участву",-13.828452110290527],["llekin",-13.828461647033691],["▁Vanha",-13.828473091125488],["▁qaran",-13.828495979309082],["▁מפות",-13.828506469726562],["▁Կան",-13.828511238098145],["▁Секс",-13.828518867492676],["▁snygga",-13.828521728515623],["▁matematic",-13.828537940979004],["▁documenti",-13.828557968139648],["▁lefel",-13.828558921813965],["▁rahim",-13.828560829162598],["yklos",-13.828567504882812],["▁נזק",-13.828570365905762],["多位",-13.828570365905762],["▁τηλε",-13.828575134277344],["▁Constant",-13.82859706878662],["を進め",-13.828601837158203],["▁Fedha",-13.82861328125],["▁یقینی",-13.828628540039062],["▁Općine",-13.82863426208496],["▁පහළ",-13.828640937805176],["▁Äl",-13.828652381896973],["▁बाळ",-13.828662872314451],["▁दहा",-13.828694343566896],["의료",-13.828697204589844],["երս",-13.828701972961426],["gráficos",-13.828718185424805],["▁وحده",-13.82875156402588],["раўн",-13.82876682281494],["▁ريت",-13.828777313232422],["▁broken",-13.828784942626951],["その時",-13.828800201416016],["▁பிடிக்க",-13.828814506530762],["дневн",-13.828815460205078],["▁Umma",-13.828821182250977],["▁legitima",-13.828827857971191],["8%)",-13.828850746154783],["ຂ່າວສານ",-13.828882217407228],["ዋት",-13.82889461517334],["ကီ",-13.828947067260742],["▁detecta",-13.828947067260742],["▁Filipina",-13.82895278930664],["▁നൽകിയ",-13.82895565032959],["▁szybkie",-13.82896327972412],["▁kolmen",-13.828997611999512],["▁ירוק",-13.828998565673828],["rapat",-13.82900333404541],["gression",-13.82902717590332],["ânica",-13.829031944274902],["▁ordusu",-13.829060554504396],["▁лицензия",-13.829076766967772],["▁sieben",-13.829108238220217],["身高",-13.82913303375244],["サービスの",-13.829147338867188],["▁Elisa",-13.829150199890137],["▁ഇള",-13.829195976257324],["▁prosjekter",-13.82920265197754],["▁промените",-13.829225540161133],["贫",-13.829227447509766],["の中から",-13.829243659973145],["▁pripravlja",-13.829249382019045],["5.5",-13.829269409179688],["不見",-13.829285621643066],["▁Warna",-13.82929515838623],["üç",-13.82929801940918],["ላም",-13.82929801940918],["▁üyesi",-13.829323768615724],["▁salário",-13.829338073730469],["▁tisdag",-13.829339027404783],["ಮುಖ",-13.829344749450684],["▁Natok",-13.82935619354248],["fashion",-13.829357147216797],["▁lasciare",-13.82935905456543],["厂商",-13.829360961914062],["▁fillos",-13.82936954498291],["CET",-13.82937240600586],["white",-13.829394340515137],["▁Bangsa",-13.829397201538086],["▁Nachrichten",-13.829401016235352],["িজ",-13.8294038772583],["ιζε",-13.829404830932615],["▁sanoat",-13.829404830932615],["▁ከዚያ",-13.82943344116211],["▁Ростов",-13.829442977905272],["▁Интерес",-13.82944393157959],["▁بوك",-13.829450607299805],["▁menggu",-13.82949161529541],["▁Aşa",-13.829492568969728],["▁izjavi",-13.829492568969728],["wił",-13.829506874084473],["作った",-13.829508781433104],["ക്തി",-13.8295259475708],["▁kultiv",-13.829549789428713],["уел",-13.829591751098633],["▁القب",-13.829622268676758],["עדיף",-13.829634666442873],["▁Marcelo",-13.829662322998049],["▁дуртай",-13.829665184020996],["不一定",-13.829668998718262],["ജോ",-13.829669952392578],["ከና",-13.829676628112791],["▁מרגיש",-13.82968044281006],["▁Terri",-13.829682350158691],["սե",-13.82968807220459],["▁დაბ",-13.82971477508545],["ڙيل",-13.829717636108398],["▁Slow",-13.829726219177246],["とお",-13.829726219177246],["థా",-13.82973575592041],["▁వంద",-13.829763412475586],["▁lubang",-13.82978630065918],["পৰ",-13.829792022705078],["вальні",-13.829819679260254],["▁плажа",-13.82982349395752],["ոլոգ",-13.829829216003418],["▁కుల",-13.829838752746582],["▁actualidad",-13.829862594604492],["streng",-13.829866409301758],["▁vekst",-13.829874992370604],["▁tekniske",-13.829878807067873],["▁nenes",-13.829882621765137],["▁berkumpul",-13.82988452911377],["▁искаш",-13.829893112182615],["▁Сэ",-13.82993507385254],["▁⇒",-13.82993507385254],["▁kandidát",-13.829959869384766],["▁Joel",-13.829963684082031],["ставляем",-13.82996654510498],["▁Wave",-13.82996940612793],["▁barok",-13.82997989654541],["▁веле",-13.829992294311523],["jevoj",-13.830000877380373],["▁нәтиже",-13.830004692077637],["قاف",-13.83000946044922],["കൻ",-13.830012321472168],["▁पंचायत",-13.830034255981444],["йдуть",-13.830050468444824],["▁tukar",-13.83006763458252],["▁wonen",-13.830077171325684],["կով",-13.830081939697266],["▁අංශය",-13.830081939697266],["▁સાંભળ",-13.830111503601074],["ନେଇ",-13.830114364624023],["改めて",-13.830143928527832],["Rad",-13.830156326293944],["դին",-13.830174446105955],["▁Sveiki",-13.830192565917969],["-2019",-13.830204010009766],["擠",-13.830207824707031],["жені",-13.830214500427246],["▁brzmi",-13.830223083496094],["▁Мара",-13.83022689819336],["夸",-13.830228805541992],["餐饮",-13.830259323120115],["앙",-13.830259323120115],["遵循",-13.830263137817385],["卜",-13.83026885986328],["ônica",-13.830269813537598],["gerichte",-13.830272674560549],["回饋",-13.830283164978027],["▁netiek",-13.830284118652344],["மர",-13.830286979675291],["เท่าไหร่",-13.830304145812988],["កុម្ភៈ",-13.830305099487305],["▁Galmudug",-13.830305099487305],["▁Pengertian",-13.830305099487305],["▁Snapdragon",-13.830305099487305],["▁kütləvi",-13.830305099487305],["▁menjëherë",-13.830305099487305],["▁območju",-13.830305099487305],["▁pravdepodobn",-13.830305099487305],["▁příležitost",-13.830305099487305],["▁électronique",-13.830305099487305],["▁υποστηρίζ",-13.830305099487305],["▁МЕНЕН",-13.830305099487305],["▁Сүүлийн",-13.830305099487305],["▁вынужден",-13.830305099487305],["▁السؤال",-13.830305099487305],["▁رستوران",-13.830305099487305],["▁مملکت",-13.830305099487305],["▁घरेलू",-13.830305099487305],["▁আপনাকে",-13.830305099487305],["▁સમસ્યા",-13.830305099487305],["▁దీన్ని",-13.830305099487305],["▁upinzani",-13.83030605316162],["▁аэропорт",-13.83030605316162],["▁بپرداز",-13.83030605316162],["▁स्पर्धा",-13.83030605316162],["▁Offentlig",-13.830307006835938],["▁Yıldız",-13.830307006835938],["▁extranjero",-13.830307006835938],["▁مسوول",-13.830307006835938],["▁ирээдүй",-13.830307960510254],["▁Speicher",-13.83030891418457],["▁ڪيترن",-13.83030891418457],["痘",-13.83030891418457],["záleží",-13.830309867858888],["▁udruge",-13.830309867858888],["▁obavijesti",-13.830310821533203],["▁voqea",-13.830310821533203],["▁কামাল",-13.830312728881836],["▁gedagte",-13.830313682556152],["▁ബഹു",-13.830314636230469],["▁будущее",-13.830317497253418],["▁واسطه",-13.830317497253418],["▁భూమి",-13.83031940460205],["▁беріледі",-13.830320358276367],["▁жижиг",-13.830320358276367],["▁dibuix",-13.830322265625],["▁تجربة",-13.830323219299316],["▁annoncé",-13.830324172973633],["▁pilnībā",-13.830324172973633],["▁Ocalan",-13.83032512664795],["▁otomatik",-13.83032512664795],["▁आउ",-13.83032512664795],["▁චන්ද්",-13.830326080322266],["产量",-13.830327987670898],["▁උතුරු",-13.830328941345217],["▁silenzio",-13.830330848693848],["▁אינעם",-13.830336570739746],["▁štyri",-13.83033847808838],["▁اوږد",-13.830347061157228],["▁Eduard",-13.830350875854492],["▁અમારી",-13.830357551574709],["精英",-13.83035945892334],["▁viatgers",-13.830360412597656],["▁pensamiento",-13.830361366271973],["▁dhacday",-13.830367088317873],["მიწ",-13.830368041992188],["ፈጸመ",-13.830368041992188],["▁widget",-13.830368995666504],["גוף",-13.83036994934082],["▁ಅಧಿಕಾರಿಗಳು",-13.830371856689451],["ročni",-13.830377578735352],["▁нивниот",-13.830378532409668],["▁tjeter",-13.830382347106934],["▁بالفعل",-13.830388069152832],["▁eintlik",-13.830397605895996],["▁Inver",-13.830415725708008],["▁बहि",-13.830415725708008],["▁Related",-13.830416679382324],["▁Kongre",-13.83042335510254],["▁Senator",-13.830429077148438],["▁Hasiera",-13.830435752868652],["▁виправ",-13.830435752868652],["▁dalį",-13.830437660217283],["▁запитання",-13.830446243286133],["▁Hår",-13.830451011657717],["kvėp",-13.830453872680664],["▁хяналтын",-13.830453872680664],["▁Qaydalar",-13.830456733703612],["እነዚህ",-13.830458641052246],["▁Včera",-13.830466270446776],["言語",-13.830469131469728],["▁कसैले",-13.830474853515623],["▁Auditor",-13.83047580718994],["▁товары",-13.83047580718994],["ശേഖര",-13.830484390258787],["▁преминава",-13.830485343933104],["相应",-13.830524444580078],["▁पुगेका",-13.830527305603027],["▁твой",-13.830546379089355],["▁paradigma",-13.830549240112305],["oikeudet",-13.83057689666748],["▁mover",-13.830591201782228],["▁níž",-13.830592155456545],["▁кийим",-13.830594062805176],["▁Primeiro",-13.830605506896973],["עמוד",-13.830617904663086],["▁otrzymał",-13.830623626708984],["▁Txa",-13.830629348754885],["▁තද",-13.830629348754885],["hóp",-13.830639839172363],["▁Murad",-13.830643653869627],["▁anvendelse",-13.830652236938477],["ለኝ",-13.83065414428711],["▁historique",-13.83065414428711],["▁любых",-13.83067226409912],["▁Mājas",-13.830693244934082],["躺在",-13.830694198608398],["ystävä",-13.83071231842041],["pärase",-13.83071517944336],["▁உங்களை",-13.830717086791992],["のかもしれません",-13.830721855163574],["▁الفرد",-13.83073902130127],["▁Borsa",-13.830740928649902],["▁பழைய",-13.830754280090332],["▁munkát",-13.83076000213623],["▁blæ",-13.830768585205078],["▁rodea",-13.83078956604004],["▁الغاز",-13.830798149108888],["▁Вест",-13.83080768585205],["▁dhici",-13.83081340789795],["▁наћи",-13.830836296081545],["ધ્ધ",-13.83083724975586],["▁embo",-13.830849647521973],["▁stia",-13.830851554870604],["ということを",-13.830856323242188],["▁راستای",-13.830876350402832],["ଯା",-13.830881118774414],["▁úsek",-13.830902099609377],["▁igénybe",-13.830906867980955],["▁ರೋಗ",-13.830906867980955],["▁Pesan",-13.830925941467283],["альну",-13.830931663513184],["▁känn",-13.830939292907717],["▁עקס",-13.830968856811523],["isuudessa",-13.830972671508787],["▁брашно",-13.83098030090332],["▁ಬೇಡ",-13.831003189086914],["ówkę",-13.831005096435549],["▁המרכזי",-13.831036567687988],["▁Kirchen",-13.831040382385254],["▁Egal",-13.83105182647705],["▁senge",-13.831059455871582],["▁посланик",-13.831074714660645],["ാടക",-13.831076622009276],["▁šans",-13.83107852935791],["▁Esku",-13.831096649169922],["▁балет",-13.8311128616333],["▁إب",-13.831113815307615],["▁keçirir",-13.831136703491213],["▁vặt",-13.83113956451416],["▁بهترين",-13.831141471862791],["▁उसले",-13.831156730651855],["خب",-13.831170082092283],["ធី",-13.831201553344728],["▁Kohl",-13.831202507019045],["▁teachers",-13.831207275390623],["待ち",-13.831244468688965],["的价值",-13.83126449584961],["oglu",-13.83127212524414],["小組",-13.831273078918455],["▁recta",-13.831291198730469],["▁Session",-13.831307411193848],["▁﴿",-13.831315994262695],["▁sturen",-13.831326484680176],["▁altii",-13.831340789794922],["▁koncentraci",-13.831347465515137],["▁látogató",-13.8313570022583],["бах",-13.831366539001465],["ώσουμε",-13.831391334533691],["osfera",-13.83139705657959],["▁gezicht",-13.831405639648438],["честве",-13.831421852111816],["▁entón",-13.831428527832031],["ੌਲ",-13.831440925598145],["▁mehreren",-13.831449508666992],["▁judeţ",-13.83147144317627],["▁pochodzą",-13.831480026245115],["▁закончил",-13.831515312194824],["▁zaprasza",-13.83152675628662],["▁krema",-13.831531524658203],["ຍ້ອນ",-13.831534385681152],["▁جرمن",-13.831544876098633],["assis",-13.831558227539062],["▁lisens",-13.831562042236328],["▁kelishi",-13.83157444000244],["▁romantis",-13.831578254699709],["▁τραπεζ",-13.83158016204834],["▁yaralı",-13.831595420837402],["▁pressione",-13.831646919250488],["బడి",-13.831650733947754],["առու",-13.831694602966309],["▁направим",-13.831721305847168],["aastane",-13.831725120544434],["▁የነበሩ",-13.83172607421875],["▁Универзитета",-13.831743240356444],["▁ambienti",-13.83179759979248],["▁kendte",-13.831811904907228],["▁fingre",-13.831815719604492],["افات",-13.831823348999023],["театр",-13.831831932067873],["▁guur",-13.831832885742188],["hetőség",-13.83186149597168],["REE",-13.831894874572754],["žimo",-13.83191204071045],["▁brom",-13.83191204071045],["▁kariér",-13.831929206848145],["▁собор",-13.831942558288574],["жене",-13.831949234008787],["ábamos",-13.831971168518066],["▁لڑ",-13.831974983215332],["▁egymást",-13.831993103027344],["▁Forte",-13.832002639770508],["▁Mesaj",-13.832003593444824],["▁antre",-13.832012176513672],["감이",-13.832025527954102],["виконав",-13.832040786743164],["▁vervangen",-13.832049369812012],["urgence",-13.832056045532228],["▁Cirk",-13.832059860229492],["▁мақала",-13.832098960876465],["ttavaksi",-13.832120895385742],["▁kelionės",-13.832120895385742],["wendung",-13.832140922546388],["▁hullu",-13.83214282989502],["▁predstavnici",-13.83214282989502],["messi",-13.83215045928955],["をご紹介",-13.83217716217041],["▁Batı",-13.832179069519045],["▁аспекти",-13.832181930541992],["▁Ivanov",-13.83221435546875],["▁Лип",-13.832216262817385],["▁наадам",-13.832239151000977],["▁прст",-13.83224105834961],["ący",-13.832242965698242],["▁današnji",-13.832304000854492],["తోనే",-13.83230686187744],["▁Foss",-13.83231258392334],["беля",-13.832332611083984],["▁Dori",-13.832335472106934],["▁Zlín",-13.832359313964844],["▁remote",-13.83236598968506],["▁enbart",-13.83238697052002],["▁kuidagi",-13.832389831542969],["▁dayandır",-13.83240795135498],["▁lourd",-13.832429885864258],["читать",-13.832435607910156],["▁mülk",-13.832443237304688],["▁преци",-13.832473754882812],["వేద",-13.832477569580078],["介绍了",-13.83248233795166],["▁engedélyez",-13.832497596740724],["توف",-13.832515716552734],["▁acela",-13.832531929016112],["сунун",-13.832554817199709],["▁Schulen",-13.832560539245604],["catur",-13.832562446594238],["เดีย",-13.8325834274292],["▁intelege",-13.832601547241213],["と思っていた",-13.83261489868164],["▁pašā",-13.83263874053955],["▁osjeti",-13.832650184631348],["ஹி",-13.832651138305664],["▁жүрө",-13.832663536071776],["▁συγκεκριμένη",-13.832681655883787],["▁ներկայացնել",-13.832703590393066],["▁lengre",-13.832712173461914],["angalia",-13.832741737365724],["▁bakış",-13.832748413085938],["荣誉",-13.832818031311035],["▁Արա",-13.832836151123049],["▁universiteto",-13.83284854888916],["芭",-13.832865715026855],["精緻",-13.83287239074707],["赢得",-13.832873344421388],["▁إست",-13.832874298095703],["ಲಾಗುತ್ತಿದೆ",-13.832889556884766],["ພິເສດ",-13.832893371582031],["မီဒီယာ",-13.832894325256348],["申し上げます",-13.832894325256348],["▁विद्यार्थ्यां",-13.832895278930664],["Стварыць",-13.83289623260498],["▁Committee",-13.83289623260498],["▁Gjykatë",-13.83289623260498],["▁Gogledd",-13.83289623260498],["▁Këtë",-13.83289623260498],["▁Pedagogi",-13.83289623260498],["▁Pokémon",-13.83289623260498],["▁Přihlásit",-13.83289623260498],["▁Thrones",-13.83289623260498],["▁increíble",-13.83289623260498],["▁kërkesa",-13.83289623260498],["▁lütfen",-13.83289623260498],["▁prehliadač",-13.83289623260498],["▁qarşılıqlı",-13.83289623260498],["▁škôl",-13.83289623260498],["▁Бюджет",-13.83289623260498],["▁айналыс",-13.83289623260498],["▁المحافظة",-13.83289623260498],["▁مائارىپ",-13.83289623260498],["▁उत्तराखंड",-13.83289623260498],["▁আত্ম",-13.83289623260498],["▁রবিবার",-13.83289623260498],["▁વગેરે",-13.83289623260498],["▁ଭିଡିଓ",-13.83289623260498],["▁ശതമാനം",-13.83289623260498],["▁පැවැති",-13.83289623260498],["▁여전히",-13.83289623260498],["▁Ovdje",-13.832897186279297],["▁Мирослав",-13.832897186279297],["▁शहीद",-13.832897186279297],["▁Şêx",-13.832898139953612],["▁छनोट",-13.832898139953612],["▁dderbyn",-13.83289909362793],["▁pernyataan",-13.83289909362793],["▁rrjedh",-13.83289909362793],["▁отслабване",-13.83289909362793],["▁хвілін",-13.83289909362793],["▁omgivelser",-13.832900047302246],["ຈິ",-13.83290195465088],["▁disertai",-13.832903861999512],["▁korlátoz",-13.832903861999512],["▁تمایل",-13.832904815673828],["▁رايونلۇق",-13.832905769348145],["oprócz",-13.832908630371094],["▁اشخاص",-13.832908630371094],["▁કચ્છ",-13.832916259765623],["▁mbliana",-13.832918167114258],["▁pengikut",-13.83292007446289],["▁patiënt",-13.832921028137209],["▁קטנה",-13.832921981811523],["▁ragazza",-13.832926750183104],["▁pagalingin",-13.832927703857422],["▁zonne",-13.832927703857422],["▁CÁC",-13.832942962646484],["▁مسترد",-13.832944869995115],["▁نشریه",-13.832944869995115],["▁hromad",-13.8329496383667],["▁Đọc",-13.832952499389648],["▁תשלום",-13.832955360412598],["▁Karakter",-13.832958221435549],["▁Therapie",-13.832958221435549],["ມະຫາ",-13.832966804504396],["▁czegoś",-13.83297061920166],["▁کہاکہ",-13.832978248596191],["เรียง",-13.832982063293455],["▁destacou",-13.83299160003662],["root",-13.83300495147705],["▁اسکا",-13.833005905151367],["ტყვე",-13.833020210266112],["▁zmienić",-13.833020210266112],["▁cartref",-13.833025932312012],["▁لیتے",-13.833029747009276],["▁Japanese",-13.833033561706545],["▁berharga",-13.83304500579834],["اړ",-13.833049774169922],["▁ليبيا",-13.833060264587402],["▁sügis",-13.833075523376465],["▁előfordul",-13.833118438720703],["▁יעדער",-13.83314037322998],["▁ketin",-13.833148002624512],["analiz",-13.833172798156738],["實力",-13.833200454711914],["▁Wajah",-13.833205223083496],["ប្រពន្ធ",-13.833209991455078],["▁کشورمان",-13.833213806152344],["▁qancha",-13.833219528198242],["Мэ",-13.83322048187256],["▁einzelne",-13.833234786987305],["▁אלף",-13.833236694335938],["▁Мей",-13.833269119262695],["▁लेते",-13.833288192749023],["▁دهان",-13.83330535888672],["ίσω",-13.833306312561035],["ψα",-13.833319664001465],["ከን",-13.833324432373049],["▁Башкы",-13.83334732055664],["▁Већ",-13.83336067199707],["▁γραμμή",-13.833369255065918],["stêd",-13.833388328552246],["▁Σας",-13.833388328552246],["lliseen",-13.833389282226562],["▁tvrde",-13.833394050598145],[".000,00",-13.83340072631836],["▁raman",-13.833404541015623],["▁подробности",-13.833412170410156],["▁meeldib",-13.833416938781738],["▁conflicte",-13.833441734313965],["▁felhasznál",-13.833443641662598],["▁توجد",-13.833447456359863],["▁కావడం",-13.83347225189209],["▁utilizzando",-13.83349323272705],["われた",-13.833494186401367],["σοφ",-13.833514213562012],["不論",-13.833518028259276],["Program",-13.833525657653809],["▁affronta",-13.833553314208984],["▁zakupu",-13.833584785461426],["▁조성",-13.83362102508545],["선거",-13.833622932434082],["▁sebaik",-13.833645820617676],["quran",-13.833659172058104],["ਫੋ",-13.833674430847168],["ଇଁ",-13.833681106567385],["▁wichtigen",-13.83368968963623],["unuzu",-13.833717346191406],["▁මල්ලි",-13.833736419677734],["▁Veera",-13.83374309539795],["▁Pö",-13.833747863769531],["▁bryn",-13.833748817443848],["▁sıkı",-13.833748817443848],["▁Hamza",-13.83375644683838],["▁Daudz",-13.833773612976074],["vetenskap",-13.833789825439451],["▁SofaScore",-13.833807945251465],["ீஸ்",-13.833815574645996],["بذل",-13.833817481994627],["ЩЕ",-13.833821296691896],["房产",-13.833824157714844],["▁összesen",-13.83382797241211],["AHAN",-13.833837509155272],["▁встречи",-13.833867073059082],["▁његовог",-13.83388614654541],["правления",-13.833901405334473],["▁есен",-13.83391284942627],["感动",-13.833930015563965],["▁tingin",-13.833941459655762],["▁Talán",-13.833990097045898],["▁готовност",-13.834012985229492],["▁sakta",-13.834013938903809],["▁τεχνολογία",-13.834013938903809],["जिन",-13.83401584625244],["▁សុខា",-13.83401584625244],["別に",-13.834019660949709],["wriaeth",-13.834024429321287],["▁համալսարանի",-13.834026336669922],["▁проще",-13.83403205871582],["ליכע",-13.834073066711426],["▁Kína",-13.83410358428955],["▁Taldea",-13.834110260009766],["ിനോട്",-13.834113121032717],["▁fortsat",-13.834136962890623],["▁keho",-13.83414077758789],["▁wondering",-13.83416748046875],["▁xalqın",-13.83416748046875],["▁njoj",-13.834188461303713],["ക്കാതെ",-13.834196090698242],["▁Адрес",-13.834199905395508],["▁höher",-13.834217071533203],["alimentation",-13.834220886230469],["რჯ",-13.834220886230469],["▁مؤمن",-13.834226608276367],["▁ભારતમાં",-13.834226608276367],["▁momentan",-13.834230422973633],["ଏନ୍",-13.834238052368164],["▁aimé",-13.834243774414062],["▁დამი",-13.83425998687744],["▁සිදුවන",-13.83426284790039],["▁chill",-13.83427619934082],["▁szeretnék",-13.83427619934082],["ကြဲ",-13.834290504455566],["▁frio",-13.834312438964844],["मुद",-13.834315299987791],["▁стартира",-13.83432674407959],["šnjo",-13.834342956542969],["kedett",-13.834370613098145],["▁censu",-13.834370613098145],["▁ამისა",-13.834391593933104],["២៣",-13.834421157836914],["▁डट",-13.834437370300291],["▁skrifa",-13.834440231323242],["▁helay",-13.83444881439209],["ুয়া",-13.834484100341797],["▁femeile",-13.834492683410645],["ઝી",-13.83449363708496],["பத்",-13.834497451782228],["▁apporter",-13.834498405456545],["▁문자",-13.834505081176758],["▁številne",-13.834528923034668],["ებისგან",-13.8345308303833],["질환",-13.834540367126465],["▁športov",-13.834550857543944],["વેલ",-13.834561347961426],["水晶",-13.83456325531006],["çıların",-13.834569931030272],["▁pielea",-13.834575653076172],["▁cortes",-13.834602355957031],["ියානු",-13.83462619781494],["ចំពោះ",-13.834628105163574],["▁Lokasi",-13.83464241027832],["▁нутагт",-13.834653854370115],["▁страт",-13.834659576416016],["▁yılları",-13.834664344787598],["▁birtok",-13.834671020507812],["▁distinctio",-13.83470344543457],["▁medarbejder",-13.834704399108888],["lanacak",-13.834716796875],["प्स",-13.83471965789795],["遭受",-13.834722518920898],["獨特的",-13.83474349975586],["▁Մո",-13.83476448059082],["▁ਲਾਲ",-13.834766387939451],["▁Ακ",-13.834778785705566],["ชิน",-13.834787368774414],["没人",-13.83479118347168],["が高く",-13.834793090820312],["▁بنائے",-13.834803581237791],["ošs",-13.834806442260742],["▁එක්ව",-13.834863662719728],["ώνουμε",-13.834901809692385],["▁activitati",-13.834901809692385],["▁domani",-13.834904670715332],["ाकडे",-13.834915161132812],["女神",-13.834935188293455],["vést",-13.834939002990724],["▁käyttöä",-13.83493995666504],["အနီး",-13.834952354431152],["итись",-13.834953308105469],["▁komento",-13.834973335266112],["мпи",-13.834990501403809],["▁fascinant",-13.835006713867188],["ԱԼ",-13.835021018981934],["方法を",-13.835067749023438],["ിപ്പിച്ചു",-13.835070610046388],["全台",-13.835094451904297],["lokhu",-13.835097312927246],["டிக்க",-13.835103034973145],["▁cxi",-13.835108757019045],["UMP",-13.835119247436523],["▁tonga",-13.835123062133787],["▁Sąd",-13.83513069152832],["▁tolerant",-13.83513641357422],["▁ысы",-13.835140228271484],["某个",-13.835144996643066],["▁qardaş",-13.835155487060549],["ეძებ",-13.835175514221191],["▁Cair",-13.835201263427734],["▁pilk",-13.835233688354492],["▁utawala",-13.835238456726074],["▁Bør",-13.83524227142334],["▁үнэгүй",-13.835247039794922],["▁carita",-13.83530330657959],["▁ստեղծ",-13.835304260253906],["យុទ្ធ",-13.835335731506348],["ڈو",-13.835341453552246],["▁circunstancias",-13.835351943969728],["▁golv",-13.835351943969728],["年輕人",-13.835360527038574],["▁gjuhës",-13.835369110107422],["sabiedrība",-13.835373878479004],["▁brett",-13.835382461547852],["▁тууган",-13.83539867401123],["▁çempion",-13.835400581359863],["▁sonora",-13.835403442382812],["▁Zelo",-13.835406303405762],["▁سیما",-13.835431098937988],["▁Riigi",-13.835432052612305],["▁gustado",-13.835433959960938],["ැත්ත",-13.835451126098633],["喚",-13.835454940795898],["▁categori",-13.835461616516112],["▁բյուջե",-13.835468292236328],["告诉记者",-13.835468292236328],["▁встановлено",-13.83547592163086],["▁helo",-13.835485458374023],["ตื่น",-13.835489273071287],["Ծառուկյան",-13.835493087768556],["કૃષ્ણ",-13.835493087768556],["อัตโนมัติ",-13.835493087768556],["▁Economía",-13.835493087768556],["▁Garcinia",-13.835493087768556],["▁Majalisar",-13.835493087768556],["▁ciężko",-13.835493087768556],["▁nüfus",-13.835493087768556],["▁tilbehør",-13.835493087768556],["▁κείμενο",-13.835493087768556],["▁υποψήφι",-13.835493087768556],["▁височина",-13.835493087768556],["▁дозвола",-13.835493087768556],["▁жилищно",-13.835493087768556],["▁некаторыя",-13.835493087768556],["▁пажњу",-13.835493087768556],["▁Қарағанды",-13.835493087768556],["▁շնորհիվ",-13.835493087768556],["▁اپلیکیشن",-13.835493087768556],["▁સંપર્ક",-13.835493087768556],["▁ଆହୁରି",-13.835493087768556],["▁ചലച്ചിത്ര",-13.835493087768556],["▁රසිද්ධ",-13.835493087768556],["▁카메라",-13.835493087768556],["▁Untersuchung",-13.835494041442873],["▁riešenie",-13.835494041442873],["▁дискримин",-13.835494041442873],["ऍ",-13.835494995117188],["▁երեկ",-13.835494995117188],["ផលិតផល",-13.835495948791504],["▁הגל",-13.835495948791504],["▁सप्ताह",-13.835495948791504],["▁வெளியிட",-13.835495948791504],["▁Zürich",-13.835497856140137],["▁afbeelding",-13.835497856140137],["▁სტატია",-13.835497856140137],["▁навколо",-13.835498809814451],["▁добавки",-13.83549976348877],["▁Shaklee",-13.835501670837402],["▁экономической",-13.835501670837402],["▁Avenida",-13.835504531860352],["ເຊີນ",-13.83551025390625],["မွဴး",-13.83551025390625],["▁експорт",-13.835511207580566],["▁maimaim",-13.835514068603516],["▁հերթական",-13.835515975952148],["解決方案",-13.835515975952148],["▁keyboard",-13.835524559020996],["▁önálló",-13.835524559020996],["▁čísla",-13.835524559020996],["▁bostads",-13.835525512695312],["▁کپتان",-13.835526466369627],["▁colaboradores",-13.835531234741213],["▁samochód",-13.835536003112791],["▁Hizmetleri",-13.83553695678711],["bré",-13.83554458618164],["ડ્ર",-13.835546493530272],["权益",-13.83554744720459],["thach",-13.835548400878906],["▁conseiller",-13.835549354553224],["bró",-13.83555030822754],["▁يۈەن",-13.83555793762207],["▁insight",-13.835562705993652],["▁Internetseite",-13.835564613342283],["▁mõnus",-13.835565567016602],["▁مدرن",-13.835565567016602],["▁gauna",-13.8355712890625],["▁писмено",-13.835578918457031],["▁Wolfgang",-13.835579872131348],["irnar",-13.835590362548828],["▁ගිනි",-13.83559226989746],["▁elecciones",-13.835599899291992],["קוס",-13.835601806640623],["▁ହେବେ",-13.83560276031494],["台東",-13.835618019104004],["▁धारणा",-13.835626602172852],["▁مدافع",-13.835634231567385],["▁Sapi",-13.835638046264648],["▁sodeluje",-13.835644721984863],["ຈົນ",-13.835648536682127],["▁سقف",-13.835653305053713],["▁направлении",-13.835660934448242],["事實",-13.835680961608888],["▁Kiev",-13.83568286895752],["就知道",-13.83568286895752],["braukt",-13.835684776306152],["▁Staaten",-13.835700035095217],["▁fà",-13.835700035095217],["▁Ahogy",-13.835700988769531],["▁يعطي",-13.83570384979248],["ਕ੍ਰ",-13.83570671081543],["▁Vuoden",-13.835726737976074],["▁behulp",-13.835728645324709],["▁Novemba",-13.835739135742188],["▁specifice",-13.835741996765137],["ىلگەن",-13.835750579833984],["▁zaposlen",-13.835750579833984],["jében",-13.835752487182615],["▁സന്തോഷം",-13.83575439453125],["无线",-13.835759162902832],["supra",-13.835771560668944],["▁Hast",-13.835771560668944],["▁basket",-13.835777282714844],["▁biolog",-13.835777282714844],["BON",-13.835780143737791],["決して",-13.835790634155272],["จัดทํา",-13.835796356201172],["▁populli",-13.835803031921388],["esforç",-13.83581829071045],["aniniwala",-13.835823059082031],["lejší",-13.835844993591309],["камер",-13.83585262298584],["▁régió",-13.835857391357422],["▁Correu",-13.835861206054688],["ZNA",-13.835870742797852],["▁keiner",-13.835885047912598],["▁தட",-13.835886001586914],["▁խնդիրների",-13.835887908935549],["▁Аны",-13.835895538330078],["▁sigurnosti",-13.835904121398926],["▁rart",-13.835925102233888],["ੂੰ",-13.835928916931152],["▁žiada",-13.83594036102295],["▁акыл",-13.835941314697266],["▁proident",-13.83595371246338],["▁gjyq",-13.83597469329834],["235",-13.835977554321287],["איי",-13.835978507995604],["રું",-13.835994720458984],["▁huvitava",-13.836004257202148],["▁Riječ",-13.836016654968262],["▁മോഹ",-13.836016654968262],["▁ലോകം",-13.836024284362791],["့ကို",-13.836030960083008],["чисти",-13.83603572845459],["▁killar",-13.836036682128906],["▁Ευρώπης",-13.836068153381348],["ાલ",-13.836071014404297],["baserad",-13.836078643798828],["േശ",-13.83608055114746],["လူငယ္",-13.83608627319336],["இந்த",-13.836087226867676],["▁başvur",-13.836098670959473],["▁Stage",-13.836105346679688],["▁twarz",-13.836111068725586],["▁прилика",-13.836159706115724],["▁नष्ट",-13.836175918579102],["izaci",-13.836188316345217],["لاج",-13.836189270019531],["తారు",-13.836215019226074],["уванням",-13.836225509643556],["మౌ",-13.836264610290527],["目的是",-13.836268424987791],["▁zadnjem",-13.836281776428224],["▁juuli",-13.83630657196045],["▁önemlidir",-13.836323738098145],["▁Fotografie",-13.83632755279541],["▁കിടന്ന",-13.83633041381836],["vakuutus",-13.836337089538574],["▁жиі",-13.836346626281738],["▁աշխատում",-13.836346626281738],["NAD",-13.836365699768066],["▁പ്രശ്ന",-13.836370468139648],["pause",-13.836376190185549],["ádá",-13.836376190185549],["brød",-13.836379051208496],["ੈਟ",-13.836383819580078],["▁energijo",-13.83640956878662],["▁transition",-13.836419105529783],["▁વધી",-13.836437225341797],["验证",-13.836448669433594],["▁максатында",-13.836464881896973],["סין",-13.836466789245604],["▁moord",-13.83647918701172],["▁Kalyan",-13.836495399475098],["▁среду",-13.836506843566896],["didit",-13.83653163909912],["високо",-13.836548805236816],["ಟ್ಸ್",-13.83655071258545],["▁يجد",-13.836557388305664],["▁Степан",-13.83656406402588],["▁ონლაინში",-13.836566925048828],["▁सिटी",-13.836596488952637],["መመ",-13.83660125732422],["▁בלוג",-13.836602210998535],["▁wnętrza",-13.836613655090332],["▁प्रत्या",-13.836614608764648],["▁encerra",-13.836627006530762],["▁Isteri",-13.836629867553713],["եւի",-13.836637496948242],["▁plānot",-13.836641311645508],["писать",-13.836669921875],["▁Κιν",-13.83667278289795],["▁якість",-13.83668041229248],["▁svåra",-13.83669376373291],["▁fará",-13.836702346801758],["▁баарын",-13.83670425415039],["summer",-13.836709976196287],["ఎఫ్",-13.836709976196287],["ورز",-13.836735725402832],["こな",-13.836737632751465],["▁самиот",-13.836745262145996],["▁materialer",-13.836746215820312],["儲存",-13.83675479888916],["ghail",-13.836762428283691],["439",-13.83677577972412],["▁aloittaa",-13.836784362792969],["▁моята",-13.836785316467283],["▁darajasi",-13.836786270141602],["žava",-13.836791038513184],["西北",-13.8367919921875],["▁ਸੁਰ",-13.836804389953612],["▁додао",-13.836836814880373],["ložený",-13.83683967590332],["▁febr",-13.83684539794922],["▁патот",-13.83685302734375],["▁Domina",-13.836854934692385],["▁produkten",-13.836870193481444],["▁sentence",-13.836870193481444],["bhre",-13.836892127990724],["גלים",-13.836894989013672],["ធិ",-13.836935997009276],["▁गौरव",-13.836936950683594],["▁hewa",-13.836950302124023],["وازي",-13.836953163146973],["פּער",-13.8369722366333],["ՏԱ",-13.836984634399414],["виховн",-13.83698844909668],["үүлэгч",-13.837030410766602],["ন্দা",-13.83703327178955],["▁سوله",-13.837039947509766],["▁వ్యా",-13.83705234527588],["▁එනවා",-13.837059020996094],["▁abejo",-13.83706283569336],["ნტა",-13.837064743041992],["▁masaj",-13.837090492248535],["اقتصاد",-13.837092399597168],["ambient",-13.837115287780762],["▁institusi",-13.837130546569824],["▁מתחת",-13.837148666381836],["▁09.00",-13.83718204498291],["liidu",-13.8372163772583],["▁celebre",-13.837239265441896],["▁chegara",-13.83725929260254],["▁diners",-13.8372802734375],["ANDI",-13.837294578552246],["▁dəyişiklik",-13.837309837341309],["▁τετ",-13.83731174468994],["▁benedic",-13.83731746673584],["▁помогнат",-13.837335586547852],["▁povijest",-13.837347030639648],["950",-13.837357521057127],["거래",-13.837371826171877],["▁kissa",-13.837409973144531],["άλε",-13.837441444396973],["▁séð",-13.83745002746582],["▁ఆధార",-13.837467193603516],["ಿಸುವುದು",-13.83747673034668],["▁Antar",-13.837478637695312],["▁Olisi",-13.837506294250488],["зис",-13.837509155273438],["▁الجل",-13.837523460388184],["▁Вяр",-13.83752727508545],["fiks",-13.83755874633789],["▁ziyan",-13.837559700012209],["▁kwamen",-13.837599754333496],["ቀድ",-13.837663650512695],["отворен",-13.837674140930176],["обществ",-13.837684631347656],["▁pozitive",-13.83770751953125],["▁додати",-13.837723731994627],["▁Халық",-13.837750434875488],["▁spezielle",-13.837762832641602],["货物",-13.837774276733398],["▁invité",-13.837791442871094],["▁numar",-13.837808609008787],["▁materiału",-13.83782196044922],["րեն",-13.837841033935549],["นน",-13.837844848632812],["Бра",-13.83785629272461],["▁maddeler",-13.837898254394531],["▁Şü",-13.837902069091797],["ΕΙΣ",-13.83790397644043],["▁zlata",-13.837925910949709],["大师",-13.837928771972656],["▁stavi",-13.837929725646973],["▁utvide",-13.837935447692873],["▁משמעותי",-13.837971687316896],["▁يحت",-13.837971687316896],["栋",-13.838025093078612],["▁przeczyta",-13.838027954101562],["▁Prif",-13.838037490844728],["勇敢",-13.838042259216309],["▁Tác",-13.838043212890623],["內地",-13.83804416656494],["ėlio",-13.838050842285156],["íkov",-13.838059425354004],["▁devoir",-13.838061332702637],["▁चर्च",-13.838062286376951],["頸",-13.83806324005127],["鹏",-13.838064193725586],["新西兰",-13.838067054748535],["▁никад",-13.838071823120115],["▁노인",-13.838078498840332],["懷孕",-13.838082313537598],["նես",-13.838095664978027],["▁pëlqe",-13.838096618652344],["▁ಲಭ್ಯ",-13.838096618652344],["‒",-13.83809757232666],["▁Engagement",-13.83809757232666],["▁Mtendaji",-13.83809757232666],["▁Symptom",-13.83809757232666],["▁agrícola",-13.83809757232666],["▁ehdottomasti",-13.83809757232666],["▁gekregen",-13.83809757232666],["▁İsveç",-13.83809757232666],["▁σημεία",-13.83809757232666],["▁σημερινή",-13.83809757232666],["▁большинстве",-13.83809757232666],["▁відбувся",-13.83809757232666],["▁република",-13.83809757232666],["▁сапраўды",-13.83809757232666],["▁тренутку",-13.83809757232666],["▁իմաստ",-13.83809757232666],["▁النواب",-13.83809757232666],["▁هشدار",-13.83809757232666],["▁ہتھیار",-13.83809757232666],["▁રાજકોટ",-13.83809757232666],["▁விளம்பர",-13.83809757232666],["▁საგარეო",-13.83809757232666],["▁ქრისტიან",-13.83809757232666],["សិល្បៈ",-13.838098526000977],["▁људски",-13.838098526000977],["▁सहभागिता",-13.838099479675291],["הוצאות",-13.83810043334961],["▁Inbhir",-13.83810043334961],["▁kvaliteedi",-13.83810043334961],["▁melebihi",-13.83810043334961],["▁меѓународни",-13.83810043334961],["▁הצדדים",-13.83810043334961],["▁පරාජය",-13.83810043334961],["▁පෝස්ට්",-13.83810043334961],["▁infraestrutura",-13.838101387023926],["▁biserici",-13.838102340698242],["▁rezultātā",-13.838102340698242],["▁xelkê",-13.838104248046877],["▁arratsalde",-13.838105201721191],["ṣṭ",-13.838106155395508],["▁folosește",-13.838106155395508],["▁jaarlijks",-13.838106155395508],["▁moterų",-13.838106155395508],["▁арналган",-13.838106155395508],["▁περιλαμβάνει",-13.83810806274414],["▁enllà",-13.83811092376709],["▁Seattle",-13.838112831115724],["▁اړوند",-13.83811378479004],["▁நானும்",-13.83811378479004],["▁ಹಾಡು",-13.838117599487305],["▁поздно",-13.838120460510254],["▁Cámara",-13.83812141418457],["▁формирования",-13.838123321533203],["▁stóð",-13.838127136230469],["▁මෙහෙ",-13.838128089904783],["▁өзгөртүү",-13.838129997253418],["▁stesse",-13.83813190460205],["▁Thường",-13.838138580322266],["▁algú",-13.838144302368164],["▁Werbung",-13.838147163391112],["▁Яшчэ",-13.838147163391112],["▁menentang",-13.838149070739746],["▁cruise",-13.838150024414062],["▁երբեք",-13.838150024414062],["圖案",-13.838153839111328],["▁Consulting",-13.838154792785645],["▁portanto",-13.838159561157228],["▁осуществления",-13.838165283203123],["附件",-13.83816909790039],["▁policisë",-13.838172912597656],["▁rawatan",-13.838175773620604],["▁ಬ್ಲ",-13.838179588317873],["▁prijevoz",-13.838180541992188],["▁реестр",-13.838186264038086],["տրանս",-13.838190078735352],["▁वोट",-13.838191032409668],["▁яйца",-13.838191986083984],["▁ymmär",-13.838200569152832],["keskuksen",-13.838213920593262],["▁magaca",-13.83823299407959],["▁típusú",-13.83823299407959],["▁appeal",-13.838236808776855],["▁Güvenlik",-13.838248252868652],["▁նախարարի",-13.8382568359375],["▁sanggol",-13.83825969696045],["▁sellaista",-13.838266372680664],["▁укугу",-13.83826732635498],["하였습니다",-13.838284492492676],["▁fitore",-13.838285446166992],["मैं",-13.838308334350586],["▁temperament",-13.838311195373535],["ړن",-13.838318824768066],["▁навчального",-13.838323593139648],["ティー",-13.83834171295166],["▁recomana",-13.838346481323242],["лээд",-13.838353157043455],["förande",-13.838356018066406],["▁huomioon",-13.838358879089355],["▁빨리",-13.838358879089355],["▁problemele",-13.838360786437988],["ΑΝΑ",-13.838366508483888],["▁innocent",-13.838369369506836],["一分",-13.838369369506836],["▁monda",-13.838374137878418],["▁западно",-13.838376998901367],["▁novinky",-13.83837890625],["EIRA",-13.83838176727295],["过去的",-13.838404655456545],["हरूमा",-13.838407516479492],["ቀለ",-13.838409423828123],["节日",-13.83841323852539],["▁пожал",-13.838418960571287],["▁Thy",-13.838425636291504],["▁موقت",-13.838434219360352],["កាន់តែ",-13.838438987731934],["ticket",-13.83845043182373],["▁Yaşam",-13.838459968566896],["▁Alegre",-13.838465690612791],["▁sprejel",-13.838489532470703],["jaribu",-13.838505744934082],["▁Mesir",-13.838507652282717],["▁tenkt",-13.838510513305664],["ampiseho",-13.838513374328612],["ក្រម",-13.838513374328612],["▁Chomh",-13.838513374328612],["▁الجنس",-13.83851432800293],["▁ശ്രദ്ധിക്ക",-13.83851432800293],["▁ahoj",-13.838515281677246],["▁specula",-13.838515281677246],["▁галіне",-13.838516235351562],["IQUE",-13.838547706604004],["▁сваімі",-13.838567733764648],["▁главната",-13.83857536315918],["koulutuksen",-13.838594436645508],["ထားပါ",-13.83859920501709],["▁материально",-13.83860969543457],["▁ተመል",-13.838628768920898],["▁informative",-13.838634490966797],["▁серии",-13.838643074035645],["▁спрема",-13.838643074035645],["▁POT",-13.83864402770996],["▁vezér",-13.838666915893556],["▁automobilis",-13.838674545288086],["▁mnoha",-13.838674545288086],["▁близина",-13.8386812210083],["▁vidéos",-13.838683128356934],["रूम",-13.838685035705566],["▁بالصور",-13.838690757751465],["▁Redmi",-13.838717460632324],["▁kmalu",-13.838732719421388],["krad",-13.838736534118652],["▁зголемување",-13.838751792907717],["考量",-13.838756561279297],["▁տիր",-13.83876609802246],["▁входит",-13.838773727416992],["▁שאת",-13.83878231048584],["▁Inova",-13.83879280090332],["iskus",-13.838805198669434],["ପେ",-13.838823318481444],["adoù",-13.838825225830078],["不下",-13.838828086853027],["名為",-13.83884334564209],["▁poroča",-13.838859558105469],["▁existuje",-13.838866233825684],["▁Regal",-13.838871955871582],["▁jalka",-13.838874816894531],["▁stryd",-13.83887767791748],["▁biznesi",-13.83888053894043],["ırıcı",-13.838881492614746],["▁சுற்று",-13.838887214660645],["かわ",-13.838891983032228],["▁учурунда",-13.83893585205078],["สกุล",-13.838971138000488],["▁TOT",-13.838971138000488],["史上",-13.838972091674805],["ატორი",-13.83897304534912],["跟他",-13.838982582092283],["ୁର",-13.838987350463867],["נכס",-13.838991165161133],["▁neljän",-13.838991165161133],["▁hebûn",-13.839011192321776],["ಂತ್ರ",-13.839038848876951],["無論是",-13.839049339294434],["球隊",-13.839062690734863],["ỹ",-13.839069366455078],["୍ୱା",-13.83908748626709],["ឯង",-13.839093208312988],["▁geweldige",-13.839097023010254],["▁1830",-13.839118957519531],["▁euró",-13.83913803100586],["خدا",-13.839150428771973],["▁قوه",-13.839153289794922],["▁سياسية",-13.839165687561035],["▁ինչի",-13.83917236328125],["▁луд",-13.839177131652832],["▁erop",-13.839197158813477],["PLA",-13.839200973510742],["▁nadšen",-13.839229583740234],["▁predstave",-13.839231491088867],["lišta",-13.83925724029541],["▁mjekë",-13.839262962341309],["▁salên",-13.839263916015623],["თხი",-13.839266777038574],["ဖွဲ့",-13.839284896850586],["ETU",-13.839317321777344],["▁topik",-13.83932399749756],["мобил",-13.839353561401367],["▁përket",-13.839383125305176],["▁болни",-13.839396476745604],["ชําระ",-13.839398384094238],["▁Galeria",-13.839399337768556],["मृत",-13.839404106140137],["▁eső",-13.83944034576416],["▁തോന്നിയ",-13.839486122131348],["▁പങ്കെടുക്ക",-13.83949089050293],["चिन्",-13.839512825012209],["▁dường",-13.839520454406738],["▁шкіри",-13.839536666870115],["ความหมาย",-13.8395414352417],["▁remise",-13.839550018310549],["▁призначен",-13.839555740356444],["ସ୍ତା",-13.839566230773926],["▁рецепта",-13.839574813842772],["مديرية",-13.839622497558594],["▁செய்வது",-13.839622497558594],["捨て",-13.839624404907228],["▁автомобиля",-13.839628219604492],["▁Panama",-13.839670181274414],["▁spomenut",-13.839674949645996],["▁آوردن",-13.839678764343262],["ให้แก่",-13.839683532714844],["▁2400",-13.839689254760742],["yilda",-13.83969497680664],["ল্ড",-13.83971118927002],["íssim",-13.839729309082031],["ಬಲ್",-13.83973789215088],["▁ਨਿਰ",-13.839741706848145],["សាលា",-13.83974838256836],["kirjoitus",-13.839751243591309],["ώσεων",-13.839771270751951],["állal",-13.839820861816406],["▁දේව",-13.839858055114746],["走路",-13.83985996246338],["гуль",-13.839862823486328],["masina",-13.839865684509276],["ákkal",-13.83987808227539],["joog",-13.839920043945312],["▁одбора",-13.839930534362791],["景觀",-13.839937210083008],["ruv",-13.839947700500488],["▁konar",-13.839951515197754],["▁zece",-13.83996295928955],["をかけて",-13.839971542358398],["▁приобретен",-13.839982986450195],["▁затворен",-13.839998245239258],["єн",-13.840007781982422],["ושם",-13.840057373046877],["kaupunki",-13.840063095092772],["在這個",-13.84006690979004],["想着",-13.840073585510254],["▁consent",-13.840078353881836],["대행",-13.840083122253418],["▁vogel",-13.84008502960205],["▁легло",-13.840087890625],["▁mužov",-13.840093612670898],["▁баспа",-13.840105056762695],["cadh",-13.840132713317873],["▁potřebuje",-13.840189933776855],["▁හින්ද",-13.840192794799805],["地图",-13.840194702148438],["ทําลาย",-13.840198516845703],["trick",-13.840211868286133],["なくなり",-13.840221405029297],["▁Xil",-13.840227127075195],["▁kamēr",-13.84023666381836],["▁Maku",-13.840248107910156],["国际化",-13.840250968933104],["▁बाप",-13.840252876281738],["beleid",-13.840261459350586],["ھان",-13.840289115905762],["▁skaber",-13.840298652648926],["▁Conrad",-13.840306282043455],["397",-13.84031581878662],["▁funció",-13.840319633483888],["▁rugi",-13.840340614318848],["不高",-13.840352058410645],["▁Immobilien",-13.840353965759276],["▁അർ",-13.840354919433594],["ОВИЋ",-13.84035587310791],["ของเวิร์ม",-13.840359687805176],["▁البحرين",-13.840362548828123],["ကမ္း",-13.84036922454834],["先の",-13.840374946594238],["ιτς",-13.84037971496582],["▁ætla",-13.840383529663086],["ಿಟ್ಟ",-13.840392112731934],["▁ਨਵ",-13.84039306640625],["▁кариера",-13.840394973754885],["▁antallet",-13.84040355682373],["▁Politiet",-13.840404510498049],["▁النقد",-13.840413093566896],["เสา",-13.84042263031006],["▁तपास",-13.84042739868164],["ฝ้า",-13.840442657470703],["▁Niger",-13.840461730957031],["▁hanki",-13.840476989746094],["▁mínim",-13.840484619140623],["▁vajag",-13.84048557281494],["▁ይጠ",-13.840516090393066],["dwr",-13.840518951416016],["を読んで",-13.840519905090332],["▁példá",-13.840534210205078],["▁여기서",-13.840585708618164],["පාන",-13.84058666229248],["ણું",-13.840590476989746],["▁ويله",-13.84059715270996],["▁riceve",-13.84060764312744],["▁izby",-13.840616226196287],["სწრაფ",-13.840617179870604],["▁Both",-13.840619087219238],["▁marge",-13.840667724609377],["▁verəcək",-13.840670585632324],["▁زمر",-13.840675354003906],["▁वेळा",-13.840678215026855],["▁modificación",-13.840679168701172],["猴",-13.84068489074707],["▁Terus",-13.840685844421388],["ENDI",-13.840689659118652],["▁Վիքիպեդիա",-13.84069538116455],["ຂ້ອຍ",-13.840704917907717],["▁ძმა",-13.840704917907717],["ภูเก็ต",-13.840706825256348],["趙",-13.840706825256348],["เมล็ด",-13.840707778930664],["ใกล้เคียง",-13.840707778930664],["ကိစ္စ",-13.840707778930664],["ព្រឹត្តិការណ៍",-13.840707778930664],["੩",-13.84070873260498],["ኦርቶዶክስ",-13.84070873260498],["▁Rajasthan",-13.84070873260498],["▁Verbesserung",-13.84070873260498],["▁ryzyko",-13.84070873260498],["▁têkoşîn",-13.84070873260498],["▁дзяржаўнай",-13.84070873260498],["▁Լեռնայի",-13.84070873260498],["▁नतिजा",-13.84070873260498],["▁సామాజిక",-13.84070873260498],["▁ക്യാമറ",-13.84070873260498],["▁വികസന",-13.84070873260498],["▁მიღებული",-13.84070873260498],["▁ትኩረት",-13.84070873260498],["펀",-13.84070873260498],["▁Aslında",-13.840709686279297],["▁Mehriban",-13.840709686279297],["▁stuðning",-13.840709686279297],["▁Αμερική",-13.840710639953612],["▁직업",-13.840710639953612],["▁geschützt",-13.84071159362793],["▁məktub",-13.84071159362793],["▁ئەسەر",-13.84071159362793],["▁سخنرانی",-13.84071159362793],["鋒",-13.840712547302246],["▁зорилт",-13.840713500976562],["▁ਵਰਤੋਂ",-13.840713500976562],["도서관",-13.840713500976562],["▁provenienti",-13.84071445465088],["▁ভবন",-13.840715408325195],["▁Leabhar",-13.840716361999512],["▁soğuk",-13.840716361999512],["▁μάχη",-13.840717315673828],["▁djeluje",-13.840718269348145],["▁շուտ",-13.840718269348145],["▁స్నేహ",-13.840721130371094],["▁ಬಯಸ",-13.84072208404541],["▁điệu",-13.840723037719728],["自分自身",-13.840723037719728],["▁təkcə",-13.840723991394045],["▁површина",-13.840726852416992],["▁злато",-13.840730667114258],["▁הולך",-13.840730667114258],["เก๋",-13.840731620788574],["ສິ່ງ",-13.840731620788574],["▁जैसी",-13.84073257446289],["สั",-13.84073543548584],["▁ಸದ್ಯ",-13.840738296508787],["▁სპეც",-13.840738296508787],["いたしました",-13.840739250183104],["▁स्वभाव",-13.840740203857422],["جاح",-13.840747833251951],["▁određeni",-13.84074878692627],["▁الصادر",-13.840753555297852],["▁वैसे",-13.840758323669434],["▁Könyv",-13.8407621383667],["▁notranji",-13.840764999389648],["▁indiqué",-13.84076976776123],["▁TRU",-13.840770721435549],["▁предпри",-13.840789794921877],["▁Тийм",-13.840795516967772],["▁навчальн",-13.840795516967772],["▁ప్రీ",-13.840797424316406],["សាក",-13.840805053710938],["ຄັ້ງ",-13.840807914733888],["▁volunteer",-13.840810775756836],["▁shak",-13.840813636779783],["СТРА",-13.840814590454102],["1986",-13.840816497802734],["▁облысында",-13.840826034545898],["▁आफु",-13.840827941894531],["▁Transforma",-13.84085750579834],["▁abdi",-13.840860366821287],["▁lamenta",-13.840862274169922],["ทราย",-13.840864181518556],["▁részét",-13.840867042541504],["▁దానికి",-13.840868949890137],["▁கோயில்",-13.84087371826172],["▁БЕЗ",-13.840877532958984],["▁මහතාගේ",-13.8408784866333],["▁conçu",-13.840895652770996],["LLO",-13.840897560119627],["લ્ટ",-13.840901374816896],["▁Olimpiad",-13.840919494628906],["▁kuule",-13.840923309326172],["▁používať",-13.840925216674805],["▁gojë",-13.840929985046388],["▁современных",-13.840934753417969],["▁zaitez",-13.840940475463867],["▁vecinos",-13.840947151184082],["▁Tô",-13.84095287322998],["ቡን",-13.84095859527588],["UNDA",-13.840960502624512],["▁zawierają",-13.840971946716309],["▁4*",-13.840978622436523],["▁نسخة",-13.84099006652832],["▁අට",-13.84099578857422],["гуна",-13.840997695922852],["úsáid",-13.841002464294434],["▁kés",-13.841015815734863],["AZIONE",-13.841024398803713],["▁በተለይም",-13.841028213500977],["▁منصوبے",-13.84103012084961],["▁analitik",-13.841056823730469],["▁ميزان",-13.841058731079102],["▁աշխատանքների",-13.84106731414795],["дцать",-13.841075897216797],["टु",-13.841089248657228],["▁Страница",-13.841090202331545],["▁stoppa",-13.841099739074709],["次回",-13.841102600097656],["の間に",-13.841104507446287],["▁chargé",-13.841108322143556],["▁Milit",-13.841126441955566],["▁calatori",-13.8411283493042],["▁қалалық",-13.8411283493042],["ქციო",-13.841134071350098],["▁ажиллагаанд",-13.841181755065918],["رتفع",-13.841184616088867],["▁அளவு",-13.841190338134766],["▁svetainės",-13.841231346130373],["ကပ္",-13.841236114501951],["▁Einige",-13.841236114501951],["方には",-13.841238975524902],["حاق",-13.841248512268066],["▁القوة",-13.8412504196167],["▁քաղաքականության",-13.841251373291016],["▁odnosa",-13.841256141662598],["▁सद्",-13.841265678405762],["การ์ด",-13.84127712249756],["ಮಾನ್",-13.84129524230957],["▁poderão",-13.841297149658203],["裝飾",-13.841316223144531],["ბორ",-13.84133243560791],["Gasteiz",-13.841334342956545],["▁ತಿಳಿಸ",-13.84133529663086],["▁ოფიციალური",-13.841344833374023],["▁आलेल्या",-13.841350555419922],["ూర్",-13.841351509094238],["食品安全",-13.841351509094238],["▁кінці",-13.841415405273438],["队员",-13.84141731262207],["▁עבודת",-13.84142017364502],["গঞ্জে",-13.84145450592041],["が良い",-13.84146499633789],["▁prizor",-13.841469764709473],["▁کیف",-13.841477394104004],["▁कोर",-13.8414945602417],["▁Bakının",-13.84150218963623],["▁pačiu",-13.841506958007812],["▁Bruker",-13.841511726379396],["▁tööle",-13.841517448425291],["▁inginkan",-13.841522216796877],["不动",-13.841524124145508],["▁çıkart",-13.841562271118164],["▁afectat",-13.841581344604492],["નલ",-13.84158420562744],["bón",-13.841609001159668],["▁Sər",-13.84164047241211],["▁yaparken",-13.84165859222412],["ทดสอบ",-13.841668128967283],["▁serĉ",-13.841678619384766],["▁ורק",-13.841679573059082],["▁piisa",-13.841680526733398],["eivel",-13.841683387756348],["बाटै",-13.841710090637209],["posisi",-13.84171199798584],["▁pierwszych",-13.841718673706056],["▁potrzebne",-13.841755867004396],["криття",-13.841764450073242],["▁celotne",-13.841791152954102],["▁එකයි",-13.841794967651367],["▁dokumentów",-13.841808319091797],["▁Contract",-13.841814041137695],["▁Yama",-13.84182834625244],["▁izvrši",-13.84184455871582],["▁umsebenzi",-13.841858863830566],["本校",-13.841862678527832],["େଟ",-13.84186553955078],["rohen",-13.841883659362791],["ဟိ",-13.84188461303711],["▁Vegeta",-13.841888427734377],["▁அதிகாரி",-13.841890335083008],["▁جمعی",-13.84189510345459],["可愛的",-13.841898918151855],["九州",-13.841912269592283],["▁простой",-13.841926574707031],["jelentkezés",-13.841927528381348],["理事",-13.841940879821776],["▁عسکر",-13.841944694519045],["▁igru",-13.841946601867676],["▁सोने",-13.841947555541992],["▁served",-13.841955184936523],["▁sakinləri",-13.841965675354004],["▁Nepali",-13.841976165771484],["▁ölç",-13.841997146606444],["▁Claudia",-13.841999053955078],["▁хоће",-13.841999053955078],["▁Kerry",-13.842001914978027],["ичката",-13.842018127441406],["innovazione",-13.842031478881836],["▁draugi",-13.842049598693848],["யிட",-13.84206199645996],["▁Bone",-13.84206199645996],["438",-13.842065811157228],["Natur",-13.842065811157228],["たちの",-13.842079162597656],["▁wiedział",-13.842085838317873],["▁ಕರ್ನಾಟಕದ",-13.842092514038086],["▁Watson",-13.8421049118042],["▁Wajib",-13.84211254119873],["▁максимално",-13.842145919799805],["▁rajin",-13.842185974121094],["művek",-13.84219741821289],["▁Neki",-13.842202186584473],["zaman",-13.842204093933104],["politiikka",-13.842215538024902],["▁ingredienti",-13.842232704162598],["▁ಇವೆ",-13.842245101928713],["▁madaxda",-13.842255592346191],["▁prisão",-13.84226894378662],["▁daň",-13.842287063598633],["▁యాత్ర",-13.842293739318848],["▁Puc",-13.84229564666748],["nizsə",-13.842296600341797],["▁obes",-13.842297554016112],["▁ਪਿਆ",-13.842320442199709],["▁updated",-13.842339515686035],["finden",-13.842345237731934],["▁වස",-13.842351913452148],["áris",-13.842358589172363],["▁нүүрс",-13.842362403869627],["▁vjerni",-13.842368125915527],["▁nutrition",-13.842384338378906],["ВАНЕ",-13.842398643493652],["▁vím",-13.842406272888184],["פנ",-13.842408180236816],["dodd",-13.842421531677246],["ístiques",-13.842436790466309],["ānija",-13.842463493347168],["▁hakata",-13.84247589111328],["▁tenda",-13.84247875213623],["▁duran",-13.84251880645752],["▁sporočil",-13.842522621154783],["szán",-13.842527389526367],["▁paredz",-13.842538833618164],["йық",-13.84254264831543],["krive",-13.84256076812744],["▁aliyo",-13.842571258544922],["▁Axel",-13.84257698059082],["▁Pend",-13.842577934265137],["▁lekë",-13.842580795288086],["អត្ថបទ",-13.842607498168944],["נטו",-13.842656135559082],["య్యారు",-13.842657089233398],["MIR",-13.84267520904541],["Linux",-13.842676162719728],["নিউজ",-13.842676162719728],["WTO",-13.842680931091309],["ردن",-13.842702865600586],["▁pauta",-13.842705726623535],["▁suola",-13.8427152633667],["სიტყვ",-13.842750549316406],["▁tarefa",-13.84276008605957],["▁plannen",-13.842763900756836],["▁مهما",-13.842775344848633],["▁kobieta",-13.842796325683594],["дних",-13.84279727935791],["▁وڏا",-13.842851638793944],["▁verken",-13.84287452697754],["टिङ",-13.842877388000488],["การดูแล",-13.842910766601562],["derunt",-13.842936515808104],["чыне",-13.842942237854004],["▁bevindt",-13.84299659729004],["▁zgodi",-13.843003273010254],["تكلم",-13.843017578125],["৩৩",-13.843048095703123],["ออฟ",-13.84305477142334],["暮",-13.843060493469238],["គោ",-13.843063354492188],["▁banker",-13.843066215515137],["▁Také",-13.84306812286377],["▁Ayat",-13.843088150024414],["▁ජල",-13.843107223510742],["üldü",-13.843117713928224],["ياتي",-13.843124389648438],["▁manifestar",-13.84312915802002],["ಧಾ",-13.8431396484375],["▁зарегистрирова",-13.843145370483398],["bhan",-13.843152046203612],["▁malum",-13.843158721923828],["ахгүй",-13.84316635131836],["ობდნენ",-13.84316635131836],["▁Konferenci",-13.843172073364258],["▁макро",-13.843180656433104],["▁visiting",-13.843216896057127],["ทั่วประเทศ",-13.843243598937988],["όντων",-13.84325122833252],["라이트",-13.843253135681152],["▁perfetta",-13.843265533447266],["غفر",-13.843271255493164],["èrent",-13.843276023864746],["披",-13.843278884887695],["ทะเบียน",-13.843284606933594],["கொள்ளுங்கள்",-13.84329605102539],["ਗੁਰ",-13.843307495117188],["▁የነ",-13.843311309814451],["始終",-13.843317985534668],["ตั๋ว",-13.843324661254885],["▁läkemedel",-13.8433256149292],["▁उपत्यका",-13.8433256149292],["▁Gaziantep",-13.843326568603516],["▁Vertrauen",-13.843326568603516],["▁ditawarkan",-13.843326568603516],["▁ponownie",-13.843326568603516],["▁tsunami",-13.843326568603516],["▁täynnä",-13.843326568603516],["▁xəsarət",-13.843326568603516],["▁zariadenie",-13.843326568603516],["▁пасажир",-13.843326568603516],["▁уређај",-13.843326568603516],["▁գիրք",-13.843326568603516],["▁אשכול",-13.843326568603516],["▁بلتستان",-13.843326568603516],["▁ژاپن",-13.843326568603516],["▁प्रगति",-13.843326568603516],["▁বৃদ্ধি",-13.843326568603516],["▁ამერიკის",-13.843326568603516],["겸",-13.843326568603516],["▁possuem",-13.843327522277832],["▁բժշկական",-13.843327522277832],["▁Μόνο",-13.843328475952148],["▁bikaranîn",-13.843329429626465],["▁liječnik",-13.843329429626465],["▁миналата",-13.843329429626465],["▁откуда",-13.84333038330078],["クリスマス",-13.84333038330078],["▁účinky",-13.843331336975098],["▁ثروت",-13.843332290649414],["▁ጉዞ",-13.843332290649414],["▁ponavlja",-13.84333324432373],["▁агаарын",-13.84333324432373],["▁მიმდინარეობს",-13.84333324432373],["▁időjárás",-13.843335151672363],["▁تأمین",-13.843337059020996],["▁ইমেইল",-13.843339920043944],["▁şəxsiyyət",-13.843343734741213],["▁بخوانید",-13.843343734741213],["▁شایع",-13.84334659576416],["▁یادگیری",-13.84334659576416],["▁remarque",-13.843348503112791],["▁gestaltet",-13.84334945678711],["▁brænde",-13.843354225158691],["▁paljude",-13.843356132507324],["▁trabalhadores",-13.843356132507324],["▁ساڳي",-13.843356132507324],["Mel",-13.84336757659912],["▁panduan",-13.843371391296388],["▁capsule",-13.843376159667969],["RAY",-13.843378067016602],["▁sritys",-13.843385696411133],["▁більшість",-13.843395233154297],["પ્લે",-13.843402862548828],["▁позже",-13.843402862548828],["▁అన్నీ",-13.84341049194336],["▁versterk",-13.843411445617676],["▁funkcji",-13.843424797058104],["▁poucas",-13.843424797058104],["valec",-13.843433380126951],["法庭",-13.843439102172852],["フレ",-13.843445777893066],["▁الإعلان",-13.843448638916016],["▁erdvė",-13.843469619750977],["▁hibrid",-13.84347152709961],["ನಾಡ",-13.84347438812256],["▁ლამაზი",-13.843477249145508],["▁atlygin",-13.843480110168455],["▁внутрішніх",-13.843518257141112],["▁социјална",-13.843518257141112],["▁blakus",-13.84351921081543],["▁Claire",-13.843534469604492],["▁تنهنجي",-13.84353733062744],["ierenden",-13.843538284301758],["▁متفق",-13.843542098999023],["tetni",-13.843544960021973],["гууд",-13.843550682067873],["的脸",-13.84356689453125],["stiden",-13.843567848205566],["▁berezia",-13.843568801879885],["▁милиция",-13.843572616577148],["▁belangrijkste",-13.84359359741211],["▁rispondere",-13.84360122680664],["433",-13.843602180480955],["▁चोर",-13.843602180480955],["มัด",-13.843603134155272],["▁graduate",-13.84360408782959],["▁მაისი",-13.843608856201172],["▁զարգացում",-13.843609809875488],["ေအ",-13.843612670898438],["▁ýmis",-13.84363079071045],["▁deserve",-13.84365177154541],["▁ദൈവം",-13.843653678894045],["▁qaadi",-13.843655586242676],["▁fénykép",-13.843657493591309],["▁spontane",-13.84366226196289],["СЛ",-13.84366512298584],["ווד",-13.843668937683104],["▁Tief",-13.843672752380373],["ցել",-13.843676567077637],["▁qorxu",-13.84369659423828],["▁claramente",-13.843701362609863],["▁Pato",-13.84371566772461],["390",-13.843720436096191],["▁යෝජනාව",-13.843724250793455],["ライト",-13.843730926513672],["▁Bapak",-13.84373950958252],["STY",-13.843762397766112],["广西",-13.843765258789062],["시티",-13.843768119812012],["ቤተክርስቲያን",-13.843770027160645],["▁druhého",-13.843786239624023],["▁стање",-13.843793869018556],["▁Shaw",-13.843827247619627],["▁formában",-13.84384536743164],["▁okrug",-13.843846321105955],["күй",-13.843879699707031],["▁నేతలు",-13.843887329101562],["▁lũ",-13.843889236450195],["andri",-13.843890190124512],["ניצח",-13.843891143798828],["▁മുഖ്യ",-13.843917846679688],["တောင်း",-13.843918800354004],["▁జన్మ",-13.84391975402832],["२१",-13.843928337097168],["▁гражданин",-13.843928337097168],["职能",-13.8439302444458],["▁Krakow",-13.84393310546875],["▁instrumental",-13.8439359664917],["▁முடிந்த",-13.843942642211914],["一堆",-13.84394359588623],["▁Singa",-13.843955993652344],["▁بىرى",-13.843968391418455],["bygget",-13.843987464904783],["▁מסוג",-13.843988418579102],["▁knihu",-13.843998908996582],["552",-13.844001770019531],["▁firmę",-13.844001770019531],["த்திரி",-13.84400749206543],["▁terrestre",-13.844016075134276],["▁qualitativ",-13.844029426574709],["▁putri",-13.844036102294922],["▁Хорошо",-13.844045639038086],["մեքենա",-13.844072341918944],["▁ዋናው",-13.844082832336426],["▁Dokter",-13.844090461730955],["▁adatokat",-13.84409999847412],["habitatge",-13.844100952148438],["tenberg",-13.8441162109375],["▁Марковић",-13.844133377075195],["ònica",-13.844134330749512],["はまだ",-13.84413719177246],["▁Энд",-13.84415054321289],["▁mamin",-13.844161033630373],["▁vigtig",-13.8441801071167],["▁अह",-13.84422206878662],["▁Medzi",-13.844223022460938],["▁Velika",-13.844225883483888],["▁mahtu",-13.844231605529783],["▁spēle",-13.844249725341797],["ುತ್ತಿದ್ದರು",-13.844258308410645],["▁kampeni",-13.844258308410645],["uiesc",-13.844286918640137],["inėti",-13.844305038452148],["laşdırma",-13.844310760498049],["દેવ",-13.844337463378906],["BJ",-13.844338417053224],["▁ජාතිය",-13.844345092773438],["▁Pécs",-13.84434986114502],["▁lék",-13.844352722167969],["▁రైతు",-13.844359397888184],["▁курсы",-13.844364166259766],["▁déan",-13.844365119934082],["▁rezervacij",-13.844367027282717],["žena",-13.844377517700195],["▁Opprett",-13.844383239746094],["▁بمبار",-13.844388008117676],["פארט",-13.844388961791992],["ສາວ",-13.844395637512209],["▁podstawowe",-13.8444185256958],["▁obvod",-13.844452857971191],["חברות",-13.844478607177734],["▁Informace",-13.844478607177734],["▁Цветан",-13.84447956085205],["▁administratie",-13.844480514526367],["▁생각을",-13.844480514526367],["▁دفن",-13.844491004943848],["▁ಹುಡುಗಿ",-13.844491958618164],["▁Janez",-13.84451675415039],["電気",-13.844536781311035],["gunean",-13.844537734985352],["▁olanak",-13.844539642333984],["▁ძიება",-13.844545364379885],["direktor",-13.84455108642578],["niecība",-13.844562530517578],["адносіны",-13.844563484191896],["▁tercers",-13.844566345214844],["▁ടെക്",-13.844568252563477],["▁இத",-13.844579696655272],["▁werklik",-13.844606399536133],["▁افشا",-13.84462547302246],["▁المدن",-13.844626426696776],["▁чије",-13.844654083251951],["▁सचिन",-13.844654083251951],["tinget",-13.844681739807127],["▁жарык",-13.844694137573242],["▁berkurang",-13.844703674316406],["▁поља",-13.844703674316406],["фэ",-13.84470558166504],["▁emner",-13.844707489013672],["効果を",-13.844727516174316],["ไน",-13.844728469848633],["ktsiooni",-13.84473991394043],["sąd",-13.844749450683594],["▁സമാ",-13.84476375579834],["ligiga",-13.844770431518556],["Електро",-13.84479522705078],["publi",-13.84481430053711],["▁குழ",-13.844820976257324],["2025",-13.84483242034912],["▁пиво",-13.84483814239502],["▁kohvi",-13.844839096069336],["▁bagage",-13.844857215881348],["▁Арсен",-13.844881057739258],["▁Huma",-13.844884872436523],["▁gedig",-13.84489917755127],["喜爱",-13.8449125289917],["▁prossimi",-13.844915390014648],["▁kansallis",-13.844928741455078],["▁opció",-13.844932556152344],["▁खाल",-13.844938278198242],["ઠી",-13.844940185546877],["କର୍ମ",-13.84505558013916],["လုံ",-13.84506893157959],["kulam",-13.845069885253906],["▁estética",-13.845078468322754],["▁Zahlen",-13.845101356506348],["RID",-13.845114707946776],["▁konkretne",-13.845122337341309],["▁måter",-13.845131874084473],["αυτό",-13.845145225524902],["inātu",-13.84514617919922],["LEG",-13.845155715942385],["▁comezo",-13.845157623291016],["▁muammolar",-13.845172882080078],["▁žur",-13.845186233520508],["▁вопросу",-13.845207214355469],["իկը",-13.845218658447266],["draget",-13.84523582458496],["▁stærke",-13.845240592956545],["цами",-13.845252990722656],["▁stukje",-13.845261573791504],["▁היהודי",-13.845270156860352],["ថៃ",-13.845292091369627],["▁Spod",-13.845294952392578],["ספּ",-13.845311164855955],["▁compose",-13.84531307220459],["▁valstu",-13.84533977508545],["ájának",-13.845366477966309],["କରି",-13.845369338989258],["▁varasto",-13.845370292663574],["гру",-13.845377922058104],["СТВ",-13.845379829406738],["பின்",-13.845417022705078],["▁Skog",-13.845419883728027],["Ř",-13.84542179107666],["▁چاند",-13.845455169677734],["στική",-13.845466613769531],["▁Niha",-13.845491409301758],["▁ordentligt",-13.845494270324709],["спер",-13.84549617767334],["ười",-13.845499992370604],["▁ծրագրերի",-13.845510482788086],["ስልጣን",-13.845514297485352],["▁zkušenost",-13.845520973205566],["Off",-13.84554958343506],["▁scheme",-13.845551490783691],["▁kasvaa",-13.845559120178224],["▁веры",-13.84556770324707],["这条",-13.845571517944336],["▁détail",-13.845599174499512],["▁Kaina",-13.845600128173828],["ÉSI",-13.84560489654541],["জের",-13.845608711242676],["▁dodaje",-13.845617294311523],["拓",-13.84561824798584],["▁attribut",-13.845635414123535],["▁herpes",-13.845641136169434],["▁طراح",-13.84564208984375],["▁құрылымы",-13.845669746398926],["▁మొద",-13.845670700073242],["▁labour",-13.845685005187988],["нууд",-13.84568691253662],["▁Lyric",-13.84571361541748],["hân",-13.845722198486328],["హార్",-13.845727920532228],["▁լրագրողների",-13.845739364624023],["чие",-13.84575653076172],["▁зник",-13.845759391784668],["▁চুল",-13.84578800201416],["건강",-13.845797538757324],["▁গ্র",-13.845809936523438],["▁violet",-13.845826148986816],["纲",-13.845861434936523],["已经有",-13.845865249633787],["侦",-13.845881462097168],["▁хүлээж",-13.845882415771484],["紧密",-13.845890045166016],["▁Дес",-13.845892906188965],["ಬಲ",-13.845897674560549],["杏",-13.845897674560549],["▁나오는",-13.845902442932127],["蚊",-13.845902442932127],["▁pěti",-13.845924377441406],["大哥",-13.845927238464355],["▁közvetlen",-13.845929145812988],["あるいは",-13.845934867858888],["爛",-13.845942497253418],["ሒ",-13.845951080322266],["Ợ",-13.845951080322266],["▁Demokrasi",-13.845951080322266],["▁Shtetit",-13.845951080322266],["▁cependant",-13.845951080322266],["▁dėmesio",-13.845951080322266],["▁entweder",-13.845951080322266],["▁každodenn",-13.845951080322266],["▁lähinnä",-13.845951080322266],["▁mươi",-13.845951080322266],["▁qoşun",-13.845951080322266],["▁vârstă",-13.845951080322266],["▁zvířat",-13.845951080322266],["▁áldozat",-13.845951080322266],["▁випадків",-13.845951080322266],["▁сэдэв",-13.845951080322266],["▁צווישן",-13.845951080322266],["▁اینستاگرام",-13.845951080322266],["▁স্থানীয়",-13.845951080322266],["▁ତଥ୍ୟ",-13.845951080322266],["▁ନିଯୁକ୍ତି",-13.845951080322266],["▁టైటిల్",-13.845951080322266],["▁බටහිර",-13.845951080322266],["▁캐릭터",-13.845951080322266],["▁Catedral",-13.845952033996582],["▁Schrift",-13.845952033996582],["▁жіноч",-13.845952033996582],["▁պայմանավորված",-13.845952033996582],["▁توزيع",-13.845952033996582],["▁قۇرۇلۇشى",-13.845952033996582],["▁जाहीर",-13.845952033996582],["▁přírodní",-13.845952987670898],["▁חיצוני",-13.845953941345217],["▁سامهون",-13.845953941345217],["▁mengenakan",-13.845955848693848],["▁signifas",-13.845955848693848],["▁pavisam",-13.845956802368164],["▁infrastructure",-13.84595775604248],["▁кеңейт",-13.845958709716797],["▁конференции",-13.845958709716797],["▁մատուց",-13.845958709716797],["▁හඳුනා",-13.845958709716797],["数は",-13.845958709716797],["▁americani",-13.845959663391112],["▁наявність",-13.845959663391112],["liikenteen",-13.84596061706543],["ញ៉",-13.84596061706543],["▁влијание",-13.845961570739746],["▁डलर",-13.845961570739746],["▁കൂടാതെ",-13.84596347808838],["▁ყოფილი",-13.84596347808838],["▁svaku",-13.845966339111328],["▁বিশ্বাস",-13.845966339111328],["▁tanë",-13.84597110748291],["▁columna",-13.845973014831545],["▁Spanyol",-13.84597396850586],["▁دریایی",-13.84597396850586],["▁आपूर्ति",-13.845974922180176],["▁Ярослав",-13.845975875854492],["▁cholesterol",-13.845976829528809],["▁національної",-13.845977783203123],["ត្រង់",-13.845979690551758],["▁Durrës",-13.845982551574709],["▁සුළු",-13.845982551574709],["▁Mjini",-13.84598445892334],["▁Pärast",-13.845988273620604],["▁ιστορι",-13.845991134643556],["▁ihtimal",-13.845993995666504],["▁ପ୍ରକାର",-13.845998764038086],["▁Mirror",-13.845999717712402],["▁بېرىپ",-13.846004486083984],["▁සමාන",-13.8460054397583],["▁berumur",-13.84600830078125],["▁Erste",-13.846012115478516],["▁faisait",-13.846012115478516],["▁उचित",-13.846014022827148],["teritori",-13.846017837524414],["▁spraoi",-13.846019744873049],["▁그럼",-13.846019744873049],["▁nyilván",-13.846035957336426],["▁избежать",-13.846049308776855],["▁მხ",-13.84605598449707],["▁ଅଛନ୍ତି",-13.846063613891602],["▁berdoa",-13.846065521240234],["▁tulnud",-13.846068382263184],["▁بصورة",-13.846071243286133],["▁ambele",-13.84608554840088],["dák",-13.846092224121094],["▁සිද්ධ",-13.846102714538574],["Band",-13.84610366821289],["▁Güneş",-13.846108436584473],["▁التحالف",-13.846108436584473],["▁لجميع",-13.84611988067627],["▁өтеді",-13.846136093139648],["escent",-13.846145629882812],["버튼",-13.846147537231444],["▁Jump",-13.846153259277344],["Gri",-13.846156120300291],["아시아",-13.846161842346191],["▁Feuer",-13.846165657043455],["▁Situat",-13.846168518066406],["რეალ",-13.84617042541504],["▁کیخلاف",-13.846177101135254],["▁mieru",-13.84617805480957],["▁búið",-13.846179962158203],["▁ритуал",-13.846183776855469],["▁Усё",-13.846185684204102],["▁bədən",-13.84618854522705],["доволни",-13.846189498901367],["▁bevind",-13.846193313598633],["▁hook",-13.846196174621582],["▁Boże",-13.846199989318848],["▁बनाई",-13.846202850341797],["ਾਊ",-13.846203804016112],["kajiem",-13.84622573852539],["▁выполнять",-13.846231460571287],["▁Viktig",-13.846235275268556],["▁راستہ",-13.846237182617188],["▁በሚያ",-13.84623908996582],["▁pomidor",-13.846270561218262],["▁Irene",-13.846285820007324],["▁Weight",-13.846287727355955],["▁преп",-13.84628963470459],["▁logistic",-13.846290588378906],["▁برده",-13.846323013305664],["▁tanish",-13.84632968902588],["ത്താന്",-13.846330642700195],["▁страхування",-13.846338272094728],["▁sestavin",-13.846341133117676],["סיק",-13.84634494781494],["исање",-13.846353530883787],["▁депутатів",-13.846353530883787],["▁využit",-13.846357345581056],["黃色",-13.84636688232422],["▁Heidi",-13.8463716506958],["▁المعلم",-13.84640407562256],["▁Funktionen",-13.84641456604004],["▁preţ",-13.846416473388672],["だったら",-13.846417427062988],["эз",-13.846449851989746],["▁gjërat",-13.846457481384276],["▁समितिले",-13.846460342407228],["gegaan",-13.846463203430176],["▁bối",-13.846475601196287],["▁observation",-13.846482276916504],["▁spacer",-13.84648323059082],["клик",-13.84650421142578],["▁Tusen",-13.846511840820312],["vnim",-13.846531867980955],["▁waziri",-13.8465576171875],["▁явуулах",-13.84656047821045],["gorri",-13.846563339233398],["▁grundig",-13.84660530090332],["čenju",-13.846609115600586],["ที่ตั้ง",-13.846620559692385],["Amb",-13.846623420715332],["▁vremenu",-13.846627235412598],["غور",-13.846638679504396],["▁naturalny",-13.84664821624756],["▁yalnızca",-13.846650123596191],["▁Efek",-13.846665382385254],["▁sỹ",-13.846665382385254],["▁нарушен",-13.846670150756836],["▁mikla",-13.8466796875],["SEM",-13.84668254852295],["แจก",-13.846686363220217],["ლანდი",-13.84670066833496],["▁ֆուտբոլիստ",-13.846715927124023],["▁Lähetä",-13.84673023223877],["acháin",-13.846731185913086],["▁спраў",-13.8467435836792],["▁pabrik",-13.846768379211426],["ล้ํา",-13.84681797027588],["▁Copii",-13.846819877624512],["κον",-13.846824645996094],["▁evaku",-13.846826553344728],["发现了",-13.846842765808104],["▁protokoll",-13.846845626831056],["ത്തെയും",-13.846848487854004],["▁Одеськ",-13.846858024597168],["نجر",-13.846872329711914],["ઓના",-13.846879959106444],["▁Доста",-13.84688663482666],["เสี่ยง",-13.846943855285645],["▁bwyd",-13.84695053100586],["នៃការ",-13.846969604492188],["bf",-13.84698486328125],["▁بردار",-13.84699249267578],["▁dvorani",-13.847002029418944],["新能源",-13.847017288208008],["วางแผน",-13.847018241882324],["人気の",-13.84702491760254],["łaś",-13.847053527832031],["▁jokainen",-13.847053527832031],["effet",-13.847064018249512],["خول",-13.847065925598145],["参加者",-13.84706687927246],["看不到",-13.847070693969728],["قع",-13.84708309173584],["▁Aroma",-13.847084045410156],["ການຮ່ວມມື",-13.847089767456056],["▁klam",-13.847100257873535],["▁údaj",-13.847103118896484],["▁ՀԱՅ",-13.847124099731444],["▁PDP",-13.84713649749756],["▁Møte",-13.847155570983888],["ွတ်",-13.84717082977295],["කට්",-13.847175598144531],["▁पढ्न",-13.84717845916748],["heir",-13.847187995910645],["▁válik",-13.847189903259276],["▁καλο",-13.847212791442873],["▁methods",-13.847216606140137],["▁Magi",-13.847229957580566],["stanku",-13.847233772277832],["▁Profession",-13.847264289855955],["ထာ",-13.847282409667969],["▁sütő",-13.847293853759766],["▁једино",-13.847296714782717],["▁crash",-13.847302436828612],["flat",-13.84732151031494],["કારો",-13.847322463989258],["▁Bölümü",-13.84732437133789],["лыгы",-13.84734058380127],["mıyorum",-13.847344398498535],["▁Mikko",-13.8473482131958],["▁hæl",-13.847368240356444],["▁önde",-13.84739112854004],["▁දූ",-13.847400665283203],["▁Tenis",-13.847427368164062],["▁پکڑ",-13.847436904907228],["6,4",-13.847437858581545],["▁Verme",-13.847439765930176],["OVO",-13.847452163696287],["▁vertikal",-13.847455024719238],["แตกต่างกัน",-13.847460746765137],["bashi",-13.847477912902832],["iranjem",-13.847489356994627],["▁ажилд",-13.847522735595703],["▁πραγματ",-13.847537994384766],["рези",-13.847538948059082],["▁φίλο",-13.847546577453612],["▁сабор",-13.84755802154541],["をご利用",-13.84758186340332],["播出",-13.847620010375977],["▁pensé",-13.847626686096191],["სმა",-13.84762954711914],["▁ndikim",-13.84762954711914],["▁szüksége",-13.847646713256836],["▁intend",-13.847650527954102],["▁diretti",-13.847657203674316],["▁Puik",-13.847668647766112],["▁спада",-13.847684860229492],["dekstr",-13.847685813903809],["▁innsats",-13.847716331481934],["strašn",-13.847726821899414],["▁glatt",-13.847726821899414],["חנו",-13.847729682922363],["▁moć",-13.847735404968262],["▁käes",-13.847737312316896],["▁කඩා",-13.847738265991213],["stavitev",-13.847755432128906],["▁rodil",-13.847779273986816],["▁considerable",-13.847783088684082],["dobrovol",-13.847786903381348],["▁селян",-13.847810745239258],["ాడని",-13.847811698913574],["▁larger",-13.847819328308104],["▁Parang",-13.847851753234863],["лемент",-13.847859382629396],["▁байнг",-13.84786605834961],["▁jostain",-13.847867012023926],["▁მამაკაც",-13.84787654876709],["सेन",-13.84788990020752],["श्वर",-13.847894668579102],["ຖາ",-13.847925186157228],["▁떠나",-13.847941398620604],["▁eksterne",-13.847955703735352],["тараў",-13.847976684570312],["▁불법",-13.847977638244627],["świat",-13.847982406616213],["▁firmie",-13.84798526763916],["▁bề",-13.848000526428224],["▁Басты",-13.848023414611816],["▁Plast",-13.84805965423584],["▁Kuigi",-13.84807300567627],["▁prozor",-13.84808349609375],["ക്കട",-13.848092079162598],["▁informiert",-13.848098754882812],["▁4500",-13.84811782836914],["▁деңгейін",-13.848127365112305],["anssi",-13.848134994506836],["▁göl",-13.848136901855469],["▁hakları",-13.848167419433594],["▁ഭൂ",-13.84817123413086],["อป",-13.848199844360352],["阿根廷",-13.848219871520996],["▁көбі",-13.848237037658691],["▁znajd",-13.848240852355955],["ంధ",-13.848268508911133],["▁قىس",-13.848271369934082],["מבו",-13.848275184631348],["▁কয়েক",-13.848280906677246],["דס",-13.848292350769045],["▁فٹ",-13.848321914672852],["ਇਨ",-13.848323822021484],["▁historike",-13.848325729370115],["▁раста",-13.848332405090332],["URAN",-13.84833526611328],["ผลกระทบ",-13.848341941833496],["▁mamlaka",-13.848346710205078],["▁pomoću",-13.848359107971191],["帖",-13.848386764526367],["ництва",-13.848433494567873],["▁영향",-13.848435401916504],["▁அமெரிக்க",-13.848478317260742],["iteach",-13.848496437072754],["▁tájékoztatás",-13.848496437072754],["▁іноземн",-13.848504066467283],["▁održana",-13.84852695465088],["▁Tian",-13.848530769348145],["鍊",-13.848532676696776],["渔",-13.84853744506836],["厉害",-13.848538398742676],["▁स्वतंत्र",-13.848539352416992],["愚",-13.848543167114258],["▁техническо",-13.848544120788574],["紧急",-13.848555564880373],["交通事故",-13.84856414794922],["っぽ",-13.848576545715332],["クラブ",-13.848576545715332],["逗",-13.848578453063965],["406",-13.84857940673828],["▁Hiina",-13.84857940673828],["หลวงปู่",-13.848581314086914],["δέχεται",-13.84858226776123],["بۈگۈن",-13.84858226776123],["ជិះ",-13.84858226776123],["▁Atmosphäre",-13.84858226776123],["▁Geschmack",-13.84858226776123],["▁Kuvendit",-13.84858226776123],["▁Residence",-13.84858226776123],["▁Vorbereitung",-13.84858226776123],["▁cupidatat",-13.84858226776123],["▁dikwels",-13.84858226776123],["▁nodokļu",-13.84858226776123],["▁Šiaulių",-13.84858226776123],["▁επίπεδα",-13.84858226776123],["▁σίγουρα",-13.84858226776123],["▁Великобритания",-13.84858226776123],["▁знайсці",-13.84858226776123],["▁подтверждаю",-13.84858226776123],["▁געזאגט",-13.84858226776123],["▁ضعیف",-13.84858226776123],["▁बताइएको",-13.84858226776123],["▁व्यावसायिक",-13.84858226776123],["▁লাইফ",-13.84858226776123],["▁ਪਰਮਾਤਮਾ",-13.84858226776123],["▁ಕಳುಹಿಸ",-13.84858226776123],["لتأكيد",-13.848583221435549],["▁luajtur",-13.848583221435549],["▁објавува",-13.848583221435549],["▁ଚିନ୍ତା",-13.848583221435549],["▁gekozen",-13.848584175109863],["▁беларускую",-13.848584175109863],["▁compleet",-13.84858512878418],["▁medarbetare",-13.84858512878418],["▁አካላት",-13.848586082458496],["▁təhlükəsizlik",-13.848587036132812],["▁accessoires",-13.848587989807127],["▁падзеі",-13.848587989807127],["制裁",-13.848587989807127],["▁кітапхана",-13.848588943481444],["▁шығу",-13.848588943481444],["မြင့်",-13.848591804504396],["▁pikërisht",-13.848591804504396],["▁حیدر",-13.848591804504396],["▁କହିଥିଲେ",-13.848591804504396],["▁Menggunakan",-13.848592758178713],["▁očitno",-13.848592758178713],["草案",-13.84859561920166],["▁comportamiento",-13.848597526550291],["カップ",-13.84859848022461],["▁Oifig",-13.848600387573242],["▁ਅਨੁ",-13.848600387573242],["▁Priprav",-13.848602294921877],["tivni",-13.848608016967772],["▁ayrıntı",-13.84860897064209],["▁মাথা",-13.84860897064209],["▁ගණන්",-13.84860897064209],["▁არავინ",-13.84860897064209],["pärane",-13.848612785339355],["▁творчески",-13.848617553710938],["▁Mircea",-13.848618507385254],["▁түмэн",-13.84862232208252],["▁Sponsor",-13.848634719848633],["▁қанша",-13.84864330291748],["▁καθημερινά",-13.848647117614746],["▁сказано",-13.84864902496338],["▁제공합니다",-13.848650932312012],["▁надлежно",-13.848651885986328],["▁연속",-13.848655700683594],["▁snaaks",-13.84865665435791],["▁chcú",-13.848657608032228],["▁Boutique",-13.84865951538086],["▁Walau",-13.848660469055176],["▁јавне",-13.848663330078123],["▁referència",-13.848669052124023],["▁huống",-13.848671913146973],["▁першої",-13.848677635192873],["പൂർ",-13.848692893981934],["▁ամբողջական",-13.848702430725098],["▁utilidad",-13.84870433807373],["▁የጋራ",-13.848710060119627],["сторож",-13.848718643188477],["▁ditzake",-13.84872341156006],["аналитик",-13.848725318908691],["▁automaticky",-13.848736763000488],["▁tudunk",-13.848742485046388],["▁Маш",-13.848745346069336],["▁војници",-13.848751068115234],["影视",-13.848767280578612],["▁запазва",-13.848779678344728],["初中",-13.848779678344728],["▁Ռա",-13.848794937133787],["▁Kysy",-13.84880256652832],["▁먹고",-13.84880542755127],["ערה",-13.848827362060549],["klap",-13.848834037780762],["▁geçiş",-13.848848342895508],["▁СВЕТ",-13.848868370056152],["ించాడు",-13.848891258239746],["▁deteriora",-13.848904609680176],["werten",-13.848913192749023],["▁прилично",-13.84891414642334],["相片",-13.848923683166504],["▁ചൈന",-13.848931312561035],["▁большие",-13.848933219909668],["▁prirodni",-13.8489408493042],["เบล",-13.848946571350098],["فراز",-13.848952293395996],["വാന",-13.848959922790527],["▁въздух",-13.848959922790527],["▁pogoje",-13.848962783813477],["▁godzinie",-13.848963737487791],["▁objavio",-13.848969459533691],["हाँ",-13.848973274230955],["▁Συγ",-13.848987579345703],["▁jāno",-13.848994255065918],["▁носят",-13.849011421203612],["hatják",-13.849020957946776],["▁fool",-13.849020957946776],["▁popullit",-13.849024772644045],["ผู้คน",-13.84902572631836],["▁csodá",-13.849038124084473],["produkte",-13.849059104919434],["▁fjölda",-13.849063873291016],["▁რუსული",-13.849072456359863],["四季",-13.849111557006836],["itheach",-13.849117279052734],["ແຕ່ລະ",-13.849135398864746],["▁personajes",-13.84914207458496],["▁ultim",-13.849148750305176],["▁המז",-13.849148750305176],["▁ఇచ్చి",-13.84915828704834],["▁съсед",-13.849160194396973],["hatan",-13.849177360534668],["▁الكتب",-13.849183082580566],["大多數",-13.849203109741213],["▁podido",-13.849206924438477],["▁kirjutas",-13.849213600158691],["kompani",-13.849220275878906],["▁sikkerheds",-13.849228858947754],["▁JUN",-13.84922981262207],["▁анализира",-13.849252700805664],["▁teosta",-13.849278450012209],["γί",-13.849337577819824],["▁дейността",-13.849340438842772],["▁ขณะที่",-13.84934425354004],["▁зогсоо",-13.849346160888672],["ıştır",-13.849373817443848],["しかない",-13.849410057067873],["EIS",-13.84942626953125],["對話",-13.849427223205566],["život",-13.84943389892578],["▁Melhor",-13.849443435668944],["시아",-13.849447250366213],["▁مفيد",-13.849452018737791],["▁vidio",-13.849453926086426],["▁köper",-13.849456787109377],["▁sprejema",-13.849457740783691],["▁ინდ",-13.84946632385254],["ƏN",-13.849468231201172],["براز",-13.849469184875488],["▁rendes",-13.84947109222412],["ంటుంది",-13.849479675292969],["▁بیشترین",-13.849485397338867],["ക്കുട്ടി",-13.84951400756836],["лэсэн",-13.84952163696289],["なのだ",-13.849522590637209],["▁betyg",-13.849529266357422],["▁arrangementer",-13.849538803100586],["▁pct",-13.849538803100586],["▁декоратив",-13.849601745605469],["പാത",-13.849602699279783],["▁Catalog",-13.84960651397705],["▁razne",-13.849609375],["▁formare",-13.849611282348633],["就没有",-13.849624633789062],["lipokuwa",-13.849627494812012],["▁Operator",-13.849628448486328],["▁fylla",-13.849675178527832],["▁զգաց",-13.849713325500488],["gniti",-13.849736213684082],["▁Lent",-13.849749565124512],["▁огне",-13.849774360656738],["▁olduğunuz",-13.849778175354004],["muuseumi",-13.849782943725586],["sopimuks",-13.849783897399902],["носити",-13.84980297088623],["lənən",-13.849806785583496],["▁النقل",-13.849807739257812],["▁vítimas",-13.849828720092772],["terror",-13.84984016418457],["▁क्षेत्रका",-13.849851608276367],["äkta",-13.849884033203123],["▁türü",-13.849884033203123],["▁черен",-13.84988784790039],["▁ይወ",-13.84990406036377],["▁вырас",-13.849905014038086],["▁funkcional",-13.849907875061035],["▁senyor",-13.849910736083984],["sprong",-13.84992218017578],["لۇ",-13.84992790222168],["▁personaj",-13.849935531616213],["ស្ទ",-13.849942207336426],["▁Root",-13.849946975708008],["▁Ման",-13.849985122680664],["ймыз",-13.849995613098145],["ଯୋଗ୍ୟ",-13.85001277923584],["शिया",-13.850018501281738],["▁странски",-13.8500337600708],["사와",-13.850085258483888],["大赛",-13.850089073181152],["▁Rasi",-13.850120544433594],["几十",-13.85013484954834],["第八",-13.850135803222656],["ాము",-13.850140571594238],["Стар",-13.850142478942873],["▁ټولې",-13.850159645080566],["▁práca",-13.8501615524292],["▁thellë",-13.850197792053224],["▁bulunur",-13.850211143493652],["bēr",-13.850225448608398],["▁byrja",-13.850231170654297],["▁kórház",-13.850261688232422],["▁piensa",-13.85031509399414],["▁skulder",-13.850329399108888],["กรอง",-13.850333213806152],["▁ይቀ",-13.850337028503418],["تخلص",-13.850351333618164],["▁rekli",-13.85036563873291],["▁skub",-13.850367546081545],["дром",-13.850377082824709],["▁ostaje",-13.850385665893556],["▁බහු",-13.85038948059082],["▁gråt",-13.850393295288086],["▁vrem",-13.850411415100098],["▁piilo",-13.850415229797363],["▁sistemleri",-13.850420951843262],["▁ოთახ",-13.850430488586426],["tēm",-13.85043716430664],["▁اشار",-13.850438117980955],["φιλ",-13.85044002532959],["▁käis",-13.850449562072754],["▁nastala",-13.850469589233398],["▁медицинска",-13.850492477416992],["SÁG",-13.850493431091309],["自从",-13.850504875183104],["ພົວພັນ",-13.850512504577637],["njakan",-13.85052490234375],["▁liever",-13.850542068481444],["▁ବିମାନ",-13.850557327270508],["▁apareix",-13.85055923461914],["▁vaatii",-13.850579261779783],["▁klem",-13.850586891174316],["loob",-13.850594520568848],["年金",-13.850594520568848],["▁būklės",-13.85061264038086],["后果",-13.850647926330566],["▁pocket",-13.850650787353516],["8,5",-13.850655555725098],["▁skupini",-13.850658416748049],["▁formulár",-13.85070514678955],["ਦੀਆਂ",-13.850711822509766],["▁හොඳින්",-13.850716590881348],["▁Trik",-13.850719451904297],["▁חלב",-13.850723266601562],["▁Español",-13.850735664367676],["▁የተቀ",-13.85074234008789],["▁ελλην",-13.850760459899902],["▁карте",-13.85079574584961],["▁meddai",-13.850829124450684],["itheoir",-13.850833892822266],["▁filmul",-13.850873947143556],["▁припада",-13.850878715515137],["júceho",-13.85090160369873],["▁misal",-13.850923538208008],["▁zmiana",-13.850980758666992],["▁shekara",-13.850996971130373],["programmer",-13.851012229919434],["▁phir",-13.851014137268066],["▁важност",-13.851016998291016],["▁Крај",-13.85104274749756],["▁بيشتر",-13.851067543029783],["▁ಕಳ",-13.851080894470217],["▁частично",-13.851094245910645],["▁şəraiti",-13.851097106933594],["▁cumpara",-13.85109806060791],["▁apna",-13.851102828979492],["КВ",-13.85110855102539],["▁bilete",-13.851197242736816],["▁අනතුර",-13.851203918457031],["鴻",-13.85120964050293],["沖縄",-13.85121250152588],["วิทยาลัย",-13.851219177246094],["ยุทธ",-13.85122013092041],["สิงห์",-13.851221084594728],["ሰብዓዊ",-13.851221084594728],["▁Használ",-13.851221084594728],["▁Técnica",-13.851221084594728],["▁Uutiset",-13.851221084594728],["▁depósito",-13.851221084594728],["▁getroffen",-13.851221084594728],["▁nokkuð",-13.851221084594728],["▁penilaian",-13.851221084594728],["▁piattaforma",-13.851221084594728],["▁pozrieť",-13.851221084594728],["▁préparation",-13.851221084594728],["▁περιεχόμενο",-13.851221084594728],["▁προγράμματος",-13.851221084594728],["▁гульню",-13.851221084594728],["▁нараджэння",-13.851221084594728],["▁приобрести",-13.851221084594728],["▁төрағасы",-13.851221084594728],["▁Հոդված",-13.851221084594728],["▁اضلاع",-13.851221084594728],["▁রাজনীতি",-13.851221084594728],["▁దెబ్బ",-13.851221084594728],["▁రూపాయల",-13.851221084594728],["▁കൊലപാതക",-13.851221084594728],["▁වෙළෙඳ",-13.851221084594728],["▁ပြောပါတယ်။",-13.851221084594728],["▁აგრეთვე",-13.851221084594728],["▁საინფორმაციო",-13.851221084594728],["▁dinyatakan",-13.851222038269045],["▁liczby",-13.851222038269045],["▁Многу",-13.851222038269045],["▁ترافیک",-13.851222038269045],["▁Kardashian",-13.85122299194336],["▁atgriez",-13.85122299194336],["▁erakusten",-13.85122299194336],["▁țară",-13.85122299194336],["▁аукцион",-13.85122299194336],["▁فیلتر",-13.851224899291992],["▁izdelek",-13.851225852966309],["▁हेर्नुहोस्",-13.851226806640623],["ย่อย",-13.85122776031494],["gyilkos",-13.851228713989258],["▁recorregut",-13.851228713989258],["▁cottura",-13.851231575012209],["▁pagkakataon",-13.851231575012209],["▁puutarha",-13.851231575012209],["ၼ်",-13.851232528686523],["▁darbinieku",-13.85123348236084],["▁vanhempi",-13.85123348236084],["▁succede",-13.851235389709473],["▁дефицит",-13.851235389709473],["▁ہوئے۔",-13.851236343383787],["▁orzamento",-13.851237297058104],["▁ubytovanie",-13.851237297058104],["▁περνά",-13.851237297058104],["▁ಪ್ರದರ್ಶನ",-13.851237297058104],["▁amper",-13.851238250732422],["▁જોઇએ",-13.851239204406738],["ข้อตกลง",-13.851242065429688],["▁жетекші",-13.851243019104004],["▁കുറെ",-13.851243019104004],["ຂ້າງ",-13.851244926452637],["▁kekayaan",-13.851245880126951],["▁tutulub",-13.851248741149902],["▁намеснік",-13.851253509521484],["▁qorunması",-13.851255416870115],["រៀន",-13.851258277893066],["▁ताकि",-13.851258277893066],["▁anywhere",-13.851261138916016],["▁frykt",-13.851265907287598],["▁liecina",-13.851265907287598],["▁අන්ත",-13.851266860961914],["▁Sähköposti",-13.85126781463623],["▁편안",-13.851269721984863],["▁העור",-13.85127067565918],["▁beliebte",-13.851272583007812],["หรือเปล่า",-13.85128116607666],["▁orqanı",-13.85128116607666],["▁Should",-13.851285934448242],["▁образовања",-13.851289749145508],["▁შორ",-13.851306915283203],["испол",-13.85130786895752],["▁Պատ",-13.85132122039795],["▁չեք",-13.851325988769531],["▁съчета",-13.851326942443848],["▁mosoly",-13.85132884979248],["▁cynnal",-13.85133171081543],["▁بتواند",-13.851332664489746],["▁కౌ",-13.851335525512695],["▁आफूलाई",-13.851344108581545],["▁تحسين",-13.851346015930176],["paikkoja",-13.851346969604492],["▁ræ",-13.851360321044922],["▁בעצם",-13.851360321044922],["ړل",-13.851362228393556],["▁changing",-13.851381301879885],["▁mewah",-13.851397514343262],["▁pavasari",-13.851402282714844],["▁Talous",-13.851404190063477],["ifying",-13.851408004760742],["▁зүйлийг",-13.851408004760742],["▁İsmayıl",-13.851436614990234],["dijk",-13.851444244384766],["▁ظفر",-13.85147476196289],["▁marknads",-13.85147762298584],["▁Stát",-13.851479530334473],["▁ضربه",-13.851483345031738],["▁мега",-13.85149383544922],["▁participou",-13.851510047912598],["termelés",-13.851511001586914],["KAH",-13.851522445678713],["▁üblich",-13.85152530670166],["▁vsakem",-13.851527214050291],["4.0",-13.851530075073242],["ສວນ",-13.85154914855957],["▁앞서",-13.851550102233888],["各國",-13.851569175720217],["mérnök",-13.851579666137695],["▁LIFE",-13.851582527160645],["▁изработка",-13.851584434509276],["2022",-13.851614952087402],["▁jocul",-13.851645469665527],["驱",-13.85167407989502],["▁cărui",-13.851675987243652],["▁üksik",-13.851675987243652],["▁tabla",-13.85168170928955],["▁الرياضية",-13.851691246032717],["▁Вода",-13.851694107055664],["▁Seneste",-13.851701736450195],["Vikipedi",-13.851703643798828],["▁Assembl",-13.851727485656738],["▁çizgi",-13.851740837097168],["ሥር",-13.851741790771484],["▁тобу",-13.851750373840332],["▁عصا",-13.85175323486328],["ipotesi",-13.851754188537598],["Unione",-13.85175609588623],["内容を",-13.851757049560549],["▁växa",-13.85179615020752],["BUD",-13.85182285308838],["▁என்கிற",-13.851842880249023],["korraldus",-13.85184383392334],["▁அவர",-13.851871490478516],["▁مٿ",-13.851874351501465],["๊อก",-13.85187530517578],["▁Zdaj",-13.85187816619873],["特殊的",-13.851884841918944],["っていない",-13.851887702941896],["ционные",-13.851899147033691],["▁քաղաքացիների",-13.851901054382324],["▁издање",-13.851916313171388],["平凡",-13.851922988891602],["વાં",-13.851930618286133],["▁იარაღი",-13.85193157196045],["▁koridor",-13.851941108703612],["sugli",-13.851946830749512],["ฤดู",-13.85196018218994],["▁Clement",-13.851969718933104],["▁Sunny",-13.852021217346191],["▁представление",-13.85202980041504],["ტით",-13.852033615112305],["▁hodie",-13.852041244506836],["▁informacione",-13.85205364227295],["φέ",-13.852083206176758],["▁Liep",-13.852103233337402],["шћа",-13.852107048034668],["▁महिन्या",-13.8521089553833],["▁ERR",-13.85211181640625],["മാരും",-13.85212230682373],["▁genade",-13.852131843566896],["ショー",-13.852133750915527],["1944",-13.85215950012207],["▁مړ",-13.852165222167969],["▁अा",-13.85216999053955],["대전",-13.852171897888184],["stempel",-13.852203369140623],["▁vriendin",-13.85220432281494],["▁yaptık",-13.852210998535156],["▁ingyen",-13.852214813232422],["サイトを",-13.852242469787598],["সম",-13.852274894714355],["вают",-13.852291107177734],["▁vsega",-13.852294921875],["ĥa",-13.852343559265137],["akkam",-13.852347373962402],["दर्शक",-13.852359771728516],["▁solicitudes",-13.85237979888916],["大国",-13.85239601135254],["נעל",-13.852418899536133],["▁Evalua",-13.852426528930664],["▁மகா",-13.852435111999512],["Brabant",-13.852437019348145],["▁morta",-13.852439880371094],["ПД",-13.85244846343994],["▁statul",-13.852458953857422],["わせ",-13.852469444274902],["▁Puig",-13.852479934692385],["▁Cantik",-13.852484703063965],["▁ششم",-13.852490425109863],["▁maman",-13.85251235961914],["▁cetera",-13.852523803710938],["▁Hashim",-13.852540016174316],["ányok",-13.852545738220217],["▁exibi",-13.852561950683594],["teatteri",-13.852563858032228],["ർച്ച",-13.852595329284668],["▁법적",-13.852628707885742],["▁адносін",-13.85262966156006],["▁Considera",-13.852631568908691],["▁Akbar",-13.852633476257324],["ंमध्ये",-13.852642059326172],["▁dentist",-13.852652549743652],["▁ಆಡ",-13.852660179138184],["▁Canta",-13.852664947509766],["цова",-13.852667808532717],["ியுள்ளது",-13.852691650390623],["▁muže",-13.85269260406494],["▁yapıldığı",-13.852706909179688],["心靈",-13.852718353271484],["▁1881",-13.85272216796875],["▁Котор",-13.852726936340332],["quvchilar",-13.852747917175291],["▁সাম",-13.852752685546877],["ПІ",-13.852754592895508],["▁главы",-13.85276222229004],["▁Studenten",-13.85279941558838],["▁војно",-13.85280418395996],["▁graden",-13.852816581726074],["▁Kommentare",-13.852853775024414],["▁paragraph",-13.852866172790527],["▁Magnet",-13.852871894836426],["ANTO",-13.852916717529297],["が表示され",-13.852917671203612],["▁ähnlich",-13.852923393249512],["▁یك",-13.852977752685549],["GPS",-13.852980613708496],["Ну",-13.852984428405762],["▁kamata",-13.852986335754396],["▁हिर",-13.852991104125977],["▁1892",-13.85300064086914],["▁अरुण",-13.853026390075684],["родны",-13.853032112121582],["▁lindur",-13.853099822998049],["麵包",-13.853108406066896],["ण्ट",-13.8531494140625],["▁trazer",-13.853155136108398],["ニア",-13.85316276550293],["ಬಂದ",-13.853163719177246],["▁toimivat",-13.85317325592041],["▁കല്ല",-13.853200912475586],["▁রোগ",-13.85321807861328],["▁ආකාරයට",-13.853219032287598],["▁ඇඳ",-13.85323715209961],["ስቱ",-13.853242874145508],["▁அவர்கள",-13.853246688842772],["▁komast",-13.85325050354004],["▁പേജ",-13.853256225585938],["ζω",-13.853266716003418],["▁Giriş",-13.85329818725586],["ಜರ್",-13.853301048278809],["▁âgé",-13.853301048278809],["מוק",-13.853328704833984],["▁Common",-13.853333473205566],["ልስ",-13.853363990783691],["▁Bengali",-13.85336971282959],["▁escapar",-13.853384017944336],["▁જમ",-13.853387832641602],["▁Venäjä",-13.853405952453612],["▁Horse",-13.85341739654541],["▁Schreiben",-13.853428840637209],["9.2",-13.853431701660156],["ливий",-13.853436470031738],["▁kontuan",-13.853438377380373],["▁δέν",-13.8534517288208],["▁Create",-13.8534574508667],["kovic",-13.853461265563965],["▁գործերի",-13.853464126586914],["როთ",-13.853471755981444],["явата",-13.853472709655762],["1600",-13.85350513458252],["▁pamje",-13.853508949279783],["κάρ",-13.853516578674316],["____",-13.853517532348633],["пив",-13.853520393371582],["▁HDD",-13.853559494018556],["这份",-13.853567123413086],["iramo",-13.853570938110352],["सिंग",-13.853572845458984],["법을",-13.853590965270996],["▁попита",-13.853592872619627],["encja",-13.853601455688477],["▁մեկի",-13.853602409362791],["▁уурхай",-13.8536376953125],["▁3-6",-13.853639602661133],["▁skaistu",-13.853643417358398],["AFA",-13.853663444519045],["▁দেবে",-13.853666305541992],["▁permiso",-13.853680610656738],["gunea",-13.85369873046875],["▁Fél",-13.853708267211914],["▁Vella",-13.85372543334961],["百姓",-13.853750228881836],["▁internațional",-13.853752136230469],["▁kosmos",-13.853760719299316],["邓",-13.85379409790039],["ದ್ದೇ",-13.853800773620604],["▁GTA",-13.853814125061035],["歇",-13.853816986083984],["登录",-13.853819847106934],["▁postingan",-13.853822708129885],["чными",-13.853827476501465],["ækket",-13.853838920593262],["▁նվազ",-13.853864669799805],["ฤทธิ์",-13.85386562347412],["ၶ",-13.85386562347412],["ḷ",-13.853866577148438],["▁Amharic",-13.853866577148438],["▁Filosofi",-13.853866577148438],["▁Gyermek",-13.853866577148438],["▁Noticias",-13.853866577148438],["▁Zároveň",-13.853866577148438],["▁campeonato",-13.853866577148438],["▁eventuelt",-13.853866577148438],["▁funzionalità",-13.853866577148438],["▁kryesisht",-13.853866577148438],["▁menampilkan",-13.853866577148438],["▁plötzlich",-13.853866577148438],["▁rzeczywistości",-13.853866577148438],["▁samræmi",-13.853866577148438],["▁síntomas",-13.853866577148438],["▁αγώνες",-13.853866577148438],["▁εμφάνιση",-13.853866577148438],["▁Дър",-13.853866577148438],["▁Някои",-13.853866577148438],["▁ընդգրկ",-13.853866577148438],["▁ייחודי",-13.853866577148438],["▁ابراهیم",-13.853866577148438],["▁جسٽس",-13.853866577148438],["▁نړيوال",-13.853866577148438],["▁ਪਿਛਲੇ",-13.853866577148438],["▁రికార్డు",-13.853866577148438],["▁Advokat",-13.853867530822754],["▁gespannt",-13.853867530822754],["▁oomblik",-13.853867530822754],["▁ເຊຍ",-13.853867530822754],["▁품질",-13.853867530822754],["ჯილდო",-13.85386848449707],["▁оружие",-13.85386848449707],["▁fizeram",-13.853870391845703],["▁gekauft",-13.853870391845703],["▁manajemen",-13.853870391845703],["แอนด์",-13.85387134552002],["▁όποιο",-13.85387134552002],["▁සලක",-13.85387134552002],["▁Dresden",-13.853873252868652],["▁ଖାଇବା",-13.853873252868652],["▁suprafata",-13.853874206542969],["▁ਆਦਿ",-13.853878021240234],["▁susisiek",-13.85387897491455],["▁చైనా",-13.853879928588867],["(!)",-13.853880882263184],["▁השאלה",-13.8538818359375],["▁ମିଳିଛି",-13.8538818359375],["▁Hardcore",-13.853882789611816],["▁skończy",-13.853883743286133],["异常",-13.853885650634766],["▁podobný",-13.853891372680664],["▁inclusief",-13.853894233703612],["▁iktidar",-13.853897094726562],["▁Melalui",-13.853900909423828],["▁кирип",-13.853900909423828],["▁другачиј",-13.853904724121094],["▁dramaturg",-13.853906631469728],["▁അദ്ദേഹത്തിന്റെ",-13.853907585144045],["özü",-13.853909492492676],["▁কিভাবে",-13.853909492492676],["▁Fiske",-13.853910446166992],["▁binatang",-13.85391616821289],["▁ଭାଇ",-13.853917121887209],["▁ukurasa",-13.853920936584473],["▁मासिक",-13.853926658630373],["▁шлюб",-13.853931427001951],["திர்",-13.85393238067627],["▁прогресс",-13.85393238067627],["▁perfettamente",-13.853934288024902],["కృత",-13.853938102722168],["視野",-13.8539400100708],["▁لديك",-13.853940963745115],["mieji",-13.853943824768066],["▁oveľa",-13.853949546813965],["ηκε",-13.853952407836914],["▁obeća",-13.853960990905762],["▁Terkini",-13.85396671295166],["行政院",-13.85397720336914],["▁Lukas",-13.85398292541504],["▁κάποιοι",-13.853988647460938],["▁Marzo",-13.853998184204102],["iridh",-13.853999137878418],["▁الأجهزة",-13.85400104522705],["▁Habib",-13.85400390625],["▁qoşul",-13.85400390625],["▁Πρόεδρος",-13.85400390625],["▁вибух",-13.854005813598633],["ोत्सव",-13.854009628295898],["▁задатак",-13.854012489318848],["▁위해서는",-13.854012489318848],["▁δομ",-13.85401439666748],["▁blizini",-13.854015350341797],["▁ritme",-13.85401725769043],["▁политическа",-13.854029655456545],["▁gcás",-13.854034423828123],["▁ଉଚିତ",-13.854034423828123],["▁chuma",-13.854036331176758],["▁የመጀመሪያ",-13.854040145874023],["▁eiusdem",-13.854043960571287],["彩色",-13.854053497314451],["▁വച്ച്",-13.85405731201172],["ക്കുകയും",-13.854058265686035],["▁тяхното",-13.854063034057615],["▁Atlants",-13.854071617126465],["▁Armen",-13.854076385498049],["▁знайде",-13.854077339172363],["ורא",-13.854090690612791],["▁সহকারী",-13.85409164428711],["▁berangkat",-13.854100227355955],["্যান্ড",-13.854101181030272],["्ग",-13.854113578796388],["▁Davies",-13.854121208190918],["▁ایالت",-13.854126930236816],["μενα",-13.854137420654297],["告诉我",-13.854141235351562],["▁rostlin",-13.854171752929688],["▁पूज",-13.8541841506958],["▁oikeu",-13.854192733764648],["라이프",-13.85419464111328],["▁તને",-13.854204177856444],["勞工",-13.854215621948242],["ുന്നവര്",-13.85421657562256],["▁Twój",-13.85421657562256],["▁tretë",-13.85421657562256],["세를",-13.854223251342772],["▁процедуры",-13.854249954223633],["ceanu",-13.854254722595217],["▁willkommen",-13.854263305664062],["▁Lengkap",-13.854275703430176],["▁kucheza",-13.85427951812744],["▁mažai",-13.854280471801758],["TARI",-13.85429859161377],["▁vertreten",-13.854317665100098],["UKAN",-13.854327201843262],["РЫ",-13.85434913635254],["▁Española",-13.85435390472412],["▁büdcəsi",-13.85435390472412],["ികളുടെ",-13.85435676574707],["▁ኮን",-13.854358673095703],["قين",-13.854362487792969],["▁историјата",-13.854366302490234],["▁shaka",-13.85438632965088],["▁prevale",-13.85440444946289],["fusión",-13.854414939880373],["▁essendo",-13.854430198669434],["▁Half",-13.854459762573242],["▁등으로",-13.85446071624756],["▁singură",-13.854467391967772],["▁Scoil",-13.854472160339355],["▁vermell",-13.854482650756836],["▁اكت",-13.854485511779783],["▁Пав",-13.854496002197266],["төрдү",-13.854499816894531],["პორ",-13.85450839996338],["▁észak",-13.85451316833496],["▁ہلاکت",-13.854520797729492],["▁ത്തന",-13.854522705078123],["過了",-13.854524612426758],["impatto",-13.854528427124023],["▁planı",-13.85453987121582],["▁سراغ",-13.854540824890137],["▁لیکنې",-13.854548454284668],["▁cavi",-13.854551315307615],["អន្តរជាតិ",-13.85456657409668],["ਬਾਰ",-13.854571342468262],["▁inače",-13.854609489440918],["这里的",-13.854620933532717],["▁trompe",-13.85462760925293],["คิว",-13.854631423950195],["▁уважа",-13.854643821716309],["161",-13.85464572906494],["▁установленном",-13.854653358459473],["ハー",-13.854666709899902],["▁싶어",-13.854671478271484],["▁serija",-13.8546781539917],["▁המפ",-13.85470199584961],["▁kultuur",-13.854721069335938],["ДҮ",-13.854738235473633],["achais",-13.85477352142334],["▁llamar",-13.854780197143556],["▁finanzia",-13.85478687286377],["치료",-13.854846954345703],["▁komentáre",-13.85484790802002],["▁կառավար",-13.854866027832031],["▁(44)",-13.854899406433104],["▁účtov",-13.854915618896484],["▁paitsi",-13.854934692382812],["▁creativa",-13.854951858520508],["▁Хер",-13.85496711730957],["ନୋ",-13.854968070983888],["▁fiara",-13.854972839355469],["▁waxaanu",-13.854974746704102],["▁способен",-13.85497760772705],["střed",-13.854987144470217],["▁reliqui",-13.854999542236328],["مانع",-13.85500144958496],["Француск",-13.855008125305176],["▁described",-13.855016708374023],["▁новац",-13.855047225952148],["▁określa",-13.855055809020996],["▁проблеме",-13.855069160461426],["ინს",-13.855070114135742],["เมา",-13.855088233947754],["▁траже",-13.85508918762207],["ընկալ",-13.855097770690918],["kaava",-13.855117797851562],["1976",-13.855128288269045],["▁پراخ",-13.855134963989258],["就开始",-13.855142593383787],["পাত",-13.855146408081056],["કૂ",-13.85515022277832],["раць",-13.855154991149902],["LEC",-13.855157852172852],["▁შემდგომ",-13.85516357421875],["க்குப்",-13.855169296264648],["gallery",-13.855181694030762],["▁പച്ച",-13.855182647705078],["▁düştü",-13.85522174835205],["४६",-13.855262756347656],["성의",-13.855265617370604],["▁evaluat",-13.855298042297363],["शिप",-13.85533332824707],["仍有",-13.855360984802246],["ΑΝΤ",-13.855369567871094],["▁algas",-13.855374336242676],["▁petición",-13.855374336242676],["zzy",-13.855388641357422],["pender",-13.855398178100586],["ವರೆಗೂ",-13.855398178100586],["kumppani",-13.855403900146484],["▁بتن",-13.8554048538208],["▁querem",-13.855406761169434],["▁pokret",-13.855408668518066],["▁දුන්නා",-13.855423927307127],["▁зоне",-13.855424880981444],["תוס",-13.855451583862305],["▁αγαπ",-13.85547924041748],["▁आत्मा",-13.855502128601074],["▁ծանոթ",-13.855508804321287],["▁разговори",-13.85551929473877],["χες",-13.855539321899414],["ТАЙ",-13.855547904968262],["▁Joko",-13.855555534362791],["ඳි",-13.855573654174805],["▁अना",-13.85559368133545],["▁árak",-13.855624198913574],["▁pierre",-13.855639457702637],["племен",-13.855642318725586],["▁جواد",-13.855645179748535],["▁αποτελεσμα",-13.855661392211914],["▁preventiva",-13.85568618774414],["▁Bahia",-13.855705261230469],["▁Protocol",-13.855711936950684],["ुका",-13.855755805969238],["▁приговор",-13.85576057434082],["▁eblas",-13.855775833129885],["▁Κό",-13.855792045593262],["Cycl",-13.85580062866211],["▁епі",-13.85581398010254],["▁האָ",-13.855878829956056],["▁Wann",-13.855917930603027],["398",-13.855918884277344],["▁zahtjeva",-13.85592269897461],["▁relationships",-13.855923652648926],["▁பெரும்",-13.855935096740724],["מניה",-13.855945587158203],["ที่ผม",-13.855945587158203],["▁వేడుక",-13.855949401855469],["ែត",-13.855961799621582],["▁verdient",-13.855974197387695],["▁ගීතය",-13.855975151062012],["▁திருமண",-13.85597801208496],["▁Mersin",-13.85598087310791],["▁beses",-13.856008529663086],["етата",-13.856019020080566],["ставіць",-13.856022834777832],["대상",-13.856022834777832],["klę",-13.856024742126465],["▁בריא",-13.856033325195312],["▁paredes",-13.856039047241213],["▁probablement",-13.85604476928711],["Malaysia",-13.85604763031006],["lääkäri",-13.856053352355955],["เหล",-13.856067657470703],["▁tanesi",-13.856070518493652],["▁மக்கள",-13.856075286865234],["Παιδ",-13.85607624053955],["▁oră",-13.856077194213867],["想知道",-13.856090545654297],["▁portata",-13.856093406677246],["found",-13.856149673461914],["دوار",-13.85616970062256],["▁automobila",-13.856178283691406],["▁betapa",-13.856183052062988],["mantas",-13.856191635131836],["▁oyunda",-13.856219291687012],["ճի",-13.85622501373291],["chilari",-13.856245994567873],["▁pomagal",-13.856273651123049],["BUT",-13.856285095214844],["▁puertas",-13.85628604888916],["▁pò",-13.85629653930664],["IGN",-13.85629940032959],["▁portant",-13.856322288513184],["▁säsongen",-13.856330871582031],["▁транспорту",-13.856346130371094],["▁нашли",-13.856351852416992],["挥",-13.856374740600586],["▁tanıma",-13.856379508972168],["6,8",-13.856382369995115],["▁marchio",-13.856395721435549],["ረኛ",-13.856407165527344],["▁بھول",-13.856420516967772],["хард",-13.85642433166504],["遞",-13.856438636779783],["▁többek",-13.85645866394043],["亏",-13.85645866394043],["税务",-13.85646152496338],["▁normes",-13.856462478637695],["▁මාරු",-13.856464385986328],["black",-13.856470108032228],["▁αριθμό",-13.85647201538086],["▁एशिया",-13.856512069702148],["關懷",-13.85651397705078],["izol",-13.85651683807373],["ህወሃት",-13.856518745422363],["▁naleznete",-13.856518745422363],["▁αναφέρεται",-13.856518745422363],["▁μνήμη",-13.856518745422363],["▁тиешелүү",-13.856518745422363],["▁ئىجتىمائىي",-13.856518745422363],["▁فرآیند",-13.856518745422363],["▁ఆందోళన",-13.856518745422363],["▁ఘటన",-13.856518745422363],["▁เมษายน",-13.856518745422363],["▁სურვილი",-13.856518745422363],["▁እስኪ",-13.856518745422363],["▁የንግድ",-13.856518745422363],["▁활성화",-13.856518745422363],["",-13.856518745422363],["▁incredere",-13.85651969909668],["▁korrupsion",-13.85651969909668],["▁memasukkan",-13.85651969909668],["▁menyadari",-13.85651969909668],["▁కార్యక్రమాల",-13.85651969909668],["βεβαίω",-13.856520652770996],["▁ziyarət",-13.856520652770996],["▁സൃഷ്ടിക്ക",-13.856520652770996],["▁маанилүү",-13.856521606445312],["▁mbrojtur",-13.856522560119627],["▁muassasalari",-13.856522560119627],["▁नवंबर",-13.856522560119627],["▁ክፍሎች",-13.856522560119627],["▁πολλούς",-13.856525421142578],["▁skladb",-13.856526374816896],["▁прыватнасці",-13.856528282165527],["▁adatvédelmi",-13.856529235839844],["▁tranquila",-13.856531143188477],["▁uzavřen",-13.856531143188477],["▁dijital",-13.856536865234377],["▁მნიშვნელობა",-13.856538772583008],["▁Susiję",-13.856541633605955],["▁мағына",-13.856541633605955],["▁Phải",-13.856542587280272],["▁Lifestyle",-13.856544494628906],["ארוחת",-13.856549263000488],["▁الأبيض",-13.856557846069336],["▁трафик",-13.856562614440918],["▁प्लान",-13.856562614440918],["▁جنھن",-13.856565475463867],["▁мерзімі",-13.8565673828125],["▁Wagen",-13.856571197509766],["▁zaščite",-13.856572151184082],["vísinda",-13.856574058532717],["▁peněz",-13.856575012207031],["コスト",-13.85657787322998],["▁ଘଟି",-13.856579780578612],["▁დაგვ",-13.856592178344728],["intero",-13.856596946716309],["၂၀၁၆",-13.856602668762209],["▁skorzystać",-13.856603622436523],["▁политици",-13.856608390808104],["▁прибира",-13.856616020202637],["▁Termék",-13.856626510620115],["▁multor",-13.856627464294434],["▁तीव्र",-13.856635093688965],["▁cubrir",-13.856637954711914],["klipp",-13.85664176940918],["▁testvér",-13.856642723083496],["▁waarvoor",-13.856647491455078],["▁определяется",-13.85665225982666],["▁የመንግስት",-13.856654167175291],["▁associations",-13.856656074523926],["▁შევი",-13.856660842895508],["勝利",-13.856669425964355],["▁obdobju",-13.856674194335938],["▁poinformowa",-13.856688499450684],["ልማ",-13.856698989868164],["▁uspješn",-13.856721878051758],["▁своєю",-13.856724739074709],["▁крупных",-13.856733322143556],["▁rs",-13.856735229492188],["▁nějakou",-13.856741905212402],["▁linija",-13.85675048828125],["▁priveste",-13.856759071350098],["ndë",-13.85676097869873],["▁практични",-13.856761932373049],["▁relazioni",-13.85676383972168],["מסורת",-13.856765747070312],["▁Encontro",-13.856766700744627],["▁óleo",-13.856767654418944],["▁généralement",-13.856768608093262],["▁સવારે",-13.856770515441896],["อย่างรวดเร็ว",-13.856778144836426],["▁покращ",-13.856781959533691],["▁трудового",-13.856783866882324],["▁ενημέρωσης",-13.856796264648438],["▁jogador",-13.856799125671388],["تأخر",-13.856803894042969],["மன்ற",-13.856807708740234],["▁niðurstöðu",-13.85682201385498],["▁yaşayır",-13.856822967529297],["לוח",-13.856825828552246],["чика",-13.85683250427246],["▁perfectamente",-13.85683822631836],["▁წმიდა",-13.856844902038574],["▁இரவு",-13.85684585571289],["▁پهچي",-13.856871604919434],["▁fyller",-13.8568754196167],["▁улут",-13.856884002685549],["proszeni",-13.85688591003418],["▁מהיר",-13.856893539428713],["▁knygos",-13.85690689086914],["μπερι",-13.856913566589355],["တဲ",-13.856918334960938],["▁većina",-13.856926918029783],["▁الإسرائيلية",-13.856929779052734],["▁karşılaşma",-13.856934547424316],["▁предлагат",-13.85693645477295],["schlagen",-13.856945991516112],["▁frutta",-13.856953620910645],["иця",-13.856969833374023],["▁Ramos",-13.856973648071287],["▁smör",-13.856978416442873],["▁világos",-13.856993675231934],["▁დროის",-13.857007026672363],["▁দিনের",-13.857022285461426],["▁руске",-13.857029914855955],["▁좋아하는",-13.85703182220459],["િશ",-13.857091903686523],["hlášení",-13.857094764709473],["▁ampuh",-13.857096672058104],["▁Kū",-13.857123374938965],["▁آخري",-13.857160568237305],["πες",-13.857170104980469],["ልፍ",-13.857179641723633],["▁Čast",-13.857185363769531],["▁öldü",-13.857213020324709],["▁młodzieży",-13.857215881347656],["▁objective",-13.857216835021973],["છા",-13.857234954833984],["éves",-13.857237815856934],["▁Hijer",-13.857237815856934],["▁stało",-13.85723876953125],["▁మారి",-13.857245445251465],["▁SEA",-13.857248306274414],["▁markaas",-13.857248306274414],["▁GAZ",-13.857264518737791],["ຫຼັງຈາກ",-13.85728645324707],["កុ",-13.857287406921388],["▁Üç",-13.85728931427002],["cafe",-13.857304573059082],["▁ଲିଙ୍କ",-13.857314109802246],["▁душата",-13.857359886169434],["▁причинам",-13.8573637008667],["▁galvas",-13.857367515563965],["▁Muista",-13.857426643371582],["▁өзүн",-13.857428550720217],["טיפול",-13.857454299926758],["▁lëviz",-13.857470512390137],["▁Dizi",-13.857471466064451],["สห",-13.85750675201416],["▁Lieb",-13.857518196105955],["▁Воло",-13.857521057128906],["▁Ilustr",-13.857561111450195],["ЕЛЕ",-13.85756492614746],["ڌا",-13.857582092285156],["ວ່າດ້ວຍ",-13.857583999633787],["ขึ้นอยู่กับ",-13.857584953308104],["IDH",-13.857599258422852],["▁ödə",-13.857601165771484],["▁gyűjt",-13.857603073120115],["▁3-2",-13.857616424560549],["▁Ruta",-13.857617378234863],["▁රවී",-13.857626914978027],["▁قبلا",-13.857640266418455],["▁Børne",-13.857641220092772],["▁пропуск",-13.857653617858888],["用地",-13.857674598693848],["aiatzaren",-13.857678413391112],["न्तर",-13.85769271850586],["幸福的",-13.857693672180176],["▁سرڪاري",-13.857725143432615],["になること",-13.85773754119873],["ባረ",-13.857746124267578],["▁tipy",-13.857749938964844],["téve",-13.85775661468506],["▁Papp",-13.857773780822754],["▁գիշեր",-13.857796669006348],["plaça",-13.857806205749512],["▁aprobat",-13.85782527923584],["კალი",-13.857832908630373],["kistan",-13.857845306396484],["ခ်မ္း",-13.857848167419434],["▁vorbim",-13.85788631439209],["▁රථම",-13.85789680480957],["▁sáu",-13.857900619506836],["BIA",-13.857938766479492],["▁seçimi",-13.85796356201172],["▁Гай",-13.857970237731934],["косовск",-13.857995986938477],["▁врста",-13.857996940612791],["ก่อ",-13.858002662658691],["▁comparativ",-13.858004570007324],["▁سايت",-13.85800552368164],["RMA",-13.858039855957031],["サイトの",-13.85805892944336],["▁nacionalista",-13.858073234558104],["▁правах",-13.85810375213623],["▁общност",-13.858113288879396],["▁ለኢትዮጵያ",-13.858128547668455],["લાઈ",-13.858134269714355],["फोन",-13.858155250549316],["▁urobil",-13.858195304870604],["▁seksuelle",-13.858263969421388],["leido",-13.858274459838867],["▁albüm",-13.858282089233398],["ecekler",-13.858325004577637],["▁kreu",-13.858343124389648],["▁agencije",-13.858351707458496],["strate",-13.858355522155762],["ycznie",-13.85836410522461],["phezulu",-13.858404159545898],["▁possano",-13.858442306518556],["منطقة",-13.858445167541504],["▁Hannah",-13.858480453491213],["Ogóln",-13.858484268188477],["▁tynn",-13.858521461486816],["▁dhawaan",-13.858525276184082],["▁Zákon",-13.858563423156738],["▁ambasad",-13.858575820922852],["▁ինչու",-13.858580589294434],["▁алты",-13.858607292175291],["▁의한",-13.858610153198242],["▁силна",-13.858614921569824],["▁infine",-13.858628273010254],["▁nežino",-13.858641624450684],["▁Dorot",-13.858661651611328],["▁Кари",-13.85866355895996],["szczu",-13.858664512634276],["▁Rootsi",-13.85869026184082],["▁secundar",-13.858707427978516],["ираме",-13.85871124267578],["▁məktəblər",-13.858716011047363],["▁જોઇ",-13.858728408813477],["▁minore",-13.858738899230955],["שקיע",-13.85874080657959],["자동차",-13.858759880065918],["ချက်များ",-13.858760833740234],["肌の",-13.858768463134766],["ຍິງ",-13.858774185180664],["▁ನೆರವ",-13.858777046203612],["▁Hvid",-13.85877799987793],["▁poslovno",-13.858783721923828],["დვა",-13.85880184173584],["▁دیئے",-13.858819007873535],["▁Interven",-13.858824729919434],["accelera",-13.858826637268066],["▁budur",-13.8588285446167],["款式",-13.858844757080078],["▁electrònica",-13.858848571777344],["нически",-13.85885238647461],["slått",-13.858858108520508],["▁հեռաց",-13.858858108520508],["多多",-13.85886573791504],["ることができ",-13.858882904052734],["სათვის",-13.858888626098633],["مرت",-13.858895301818848],["▁bhain",-13.85890007019043],["▁uporabnikov",-13.858911514282228],["▁miere",-13.858976364135742],["▁بانکی",-13.85898208618164],["▁adipisci",-13.859014511108398],["größe",-13.859036445617676],["▁ਅਸਲ",-13.859042167663574],["偿",-13.85906982421875],["▁مجدد",-13.859070777893066],["当店",-13.859084129333496],["ΙΑΣ",-13.859094619750977],["▁батал",-13.859097480773926],["▁informuje",-13.859098434448242],["▁حلب",-13.85911464691162],["▁turku",-13.85911750793457],["▁Fruit",-13.859118461608888],["АБА",-13.859124183654783],["微软",-13.859142303466797],["▁சமய",-13.859143257141112],["ரூ",-13.859148979187012],["▁వివరాల",-13.85915184020996],["闷",-13.85916233062744],["快递",-13.859169006347656],["借助",-13.859169960021973],["▁स्मार्ट",-13.859177589416504],["وثائق",-13.85917854309082],["▁Tuấn",-13.85917854309082],["▁Veľmi",-13.85917854309082],["▁keyakinan",-13.85917854309082],["▁pomôcť",-13.85917854309082],["▁përgjegjësi",-13.85917854309082],["▁Αυτά",-13.85917854309082],["▁Шымкент",-13.85917854309082],["▁интервью",-13.85917854309082],["▁دويچه",-13.85917854309082],["▁चांगले",-13.85917854309082],["▁কপিরাইট",-13.85917854309082],["▁பிள்ளை",-13.85917854309082],["▁ಟಿಕೆಟ್",-13.85917854309082],["▁സ്ഥിര",-13.85917854309082],["jitokeza",-13.859179496765137],["▁hvornår",-13.859179496765137],["▁ymarfer",-13.859179496765137],["▁письма",-13.859179496765137],["▁ज़रूर",-13.859179496765137],["▁అవసరమ",-13.859179496765137],["▁Maďar",-13.859180450439451],["▁പരസ്യ",-13.859180450439451],["▁dëshmi",-13.85918140411377],["▁Leopold",-13.85918426513672],["▁இல்லாமல்",-13.859185218811035],["▁struggle",-13.859186172485352],["▁سلسلة",-13.859187126159668],["▁हुनुपर्ने",-13.859187126159668],["▁ulaşabilirsiniz",-13.859188079833984],["▁opiskelu",-13.8591890335083],["▁sadəcə",-13.8591890335083],["▁ngenxa",-13.859189987182615],["▁empieza",-13.85919189453125],["▁otobüs",-13.85919189453125],["▁tyrimų",-13.859193801879885],["▁હજુ",-13.859193801879885],["随便",-13.859193801879885],["▁đậu",-13.859195709228516],["▁Einfluss",-13.859200477600098],["▁годинава",-13.859200477600098],["▁obligatoire",-13.859201431274414],["▁جوړولو",-13.859203338623049],["▁wontên",-13.859204292297363],["▁Sumatera",-13.85920524597168],["▁gènere",-13.859206199645996],["▁Uniunii",-13.859209060668944],["▁태양",-13.859210014343262],["▁Cənubi",-13.85921573638916],["▁нийслэлийн",-13.859217643737791],["▁accurate",-13.85921859741211],["▁kolom",-13.85921859741211],["▁keletą",-13.859224319458008],["▁bertanggung",-13.859227180480955],["▁baliabide",-13.85922908782959],["▁Бял",-13.85922908782959],["▁mugimendu",-13.859234809875488],["▁השנייה",-13.859244346618652],["EFE",-13.859253883361816],["luvulla",-13.85927391052246],["change",-13.859277725219728],["▁ചെയ്യൂ",-13.859277725219728],["kommenden",-13.859285354614258],["▁Kromě",-13.85929012298584],["फाय",-13.859292984008787],["▁КК",-13.859293937683104],["▁esitatud",-13.859297752380373],["▁kvalifika",-13.859301567077637],["▁זאגט",-13.85930347442627],["▁sicherlich",-13.85930633544922],["▁gimnast",-13.859320640563965],["▁जमिन",-13.85932445526123],["▁обработка",-13.859330177307127],["▁상호",-13.859330177307127],["▁изгради",-13.859333038330078],["▁شوہر",-13.859335899353027],["▁ویاند",-13.859342575073242],["▁الأستاذ",-13.859346389770508],["▁არავის",-13.859346389770508],["▁موافقت",-13.859374046325684],["▁زخمي",-13.859376907348633],["▁Ishq",-13.859380722045898],["▁longitud",-13.859380722045898],["▁trygt",-13.85938835144043],["лиев",-13.859394073486328],["▁الأزمة",-13.859400749206545],["▁المنش",-13.859407424926758],["▁Resource",-13.859408378601074],["foar",-13.859416961669922],["▁καθημεριν",-13.859418869018556],["▁Harapan",-13.859427452087402],["▁hierbij",-13.859442710876465],["ოში",-13.859469413757324],["Green",-13.859477996826172],["▁potrošn",-13.859490394592283],["સ્વામી",-13.859493255615234],["▁hissed",-13.859493255615234],["▁эрхийг",-13.8594970703125],["▁واپسی",-13.859502792358398],["▁представителя",-13.859517097473145],["▁təşkilatlar",-13.859519004821776],["▁Pemain",-13.859524726867676],["▁mlrd",-13.859524726867676],["▁löö",-13.859530448913574],["▁никакой",-13.859532356262209],["▁врх",-13.859539031982422],["▁Zustand",-13.859542846679688],["▁Надвор",-13.859542846679688],["▁prezentare",-13.859545707702637],["וואַל",-13.859582901000977],["▁назначения",-13.859586715698242],["▁வாகன",-13.859613418579102],["▁Schreib",-13.859620094299316],["▁chiesa",-13.859620094299316],["▁europei",-13.859644889831545],["▁Narva",-13.859654426574709],["▁הבלוג",-13.859662055969238],["規範",-13.859668731689451],["▁выража",-13.859675407409668],["▁Відповідно",-13.8596773147583],["циялық",-13.8596830368042],["▁Pärit",-13.859683990478516],["▁टीका",-13.85969352722168],["▁крыніц",-13.859694480895996],["▁pokles",-13.859707832336426],["▁shari",-13.859708786010742],["▁জনের",-13.85970973968506],["նեմ",-13.859711647033691],["▁bënë",-13.859745979309082],["▁فضلا",-13.85975742340088],["ਜੀਵ",-13.859758377075195],["למו",-13.859760284423828],["▁Analiza",-13.859773635864258],["നാമ",-13.859779357910156],["▁CDU",-13.859786033630373],["▁Dreve",-13.859789848327637],["▁należą",-13.859793663024902],["▁զին",-13.859807014465332],["▁habelə",-13.859825134277344],["ರಾವ್",-13.85983657836914],["Dev",-13.85985279083252],["บุตร",-13.859868049621582],["▁huwag",-13.859875679016112],["▁पश्य",-13.859889030456545],["▁بالج",-13.859912872314451],["CIS",-13.859918594360352],["ისტ",-13.859925270080566],["▁זוגי",-13.85993194580078],["ตามที่",-13.859952926635742],["▁шықты",-13.859957695007324],["▁дзяржава",-13.859987258911133],["▁fideo",-13.859991073608398],["▁Kirol",-13.860010147094728],["▁руско",-13.860010147094728],["ાએ",-13.860023498535156],["日本で",-13.860023498535156],["▁представления",-13.860030174255373],["▁Glav",-13.86005687713623],["高等",-13.860057830810549],["▁చిత్రంలో",-13.860078811645508],["Нова",-13.860088348388672],["ಿಕೆಯ",-13.860089302062988],["▁trekke",-13.860093116760254],["▁rabbi",-13.860101699829102],["▁proposal",-13.860106468200684],["▁असली",-13.860109329223633],["▁muzyki",-13.86011028289795],["▁შესაბამისი",-13.860121726989746],["▁стані",-13.860129356384276],["▁бағасы",-13.860135078430176],["koski",-13.860154151916504],["▁аймақ",-13.860159873962402],["उप",-13.860166549682615],["hodný",-13.860172271728516],["▁spesa",-13.860190391540527],["▁hundar",-13.86019229888916],["အလွ",-13.860214233398438],["പ്പന",-13.860215187072754],["ationis",-13.860247611999512],["▁följd",-13.860247611999512],["ুষ",-13.86026096343994],["▁manzara",-13.860280990600586],["▁Olympic",-13.860331535339355],["▁कैद",-13.86035442352295],["nčias",-13.860357284545898],["▁începutul",-13.860358238220217],["▁شباب",-13.860361099243164],["▁megvalósítás",-13.860386848449709],["მზ",-13.860396385192873],["▁vanlige",-13.860435485839844],["▁wadand",-13.860466957092283],["▁σκοπ",-13.860466957092283],["გერ",-13.860469818115234],["ellus",-13.860471725463867],["▁bón",-13.86048698425293],["▁конец",-13.860487937927246],["▁ମାରି",-13.860493659973145],["▁прихвати",-13.860523223876951],["ročné",-13.860536575317385],["ολα",-13.860554695129396],["▁dalším",-13.860608100891112],["លោកអ្នក",-13.860630989074709],["▁mutum",-13.860638618469238],["▁брег",-13.860639572143556],["▁Teodor",-13.860644340515137],["▁тешки",-13.86064910888672],["▁наведени",-13.860650062561035],["▁qızıl",-13.8606538772583],["yuq",-13.860699653625488],["835",-13.86072826385498],["▁Kalli",-13.860760688781738],["pravljanje",-13.860766410827637],["▁현상",-13.860807418823242],["ασμός",-13.860815048217772],["hlala",-13.860817909240724],["▁meldinger",-13.86083698272705],["▁Braun",-13.860840797424316],["ישער",-13.86088752746582],["▁vooraf",-13.860913276672363],["が悪い",-13.860918045043944],["▁γιορτ",-13.86092758178711],["०५",-13.86093044281006],["▁izstrādāt",-13.86093521118164],["▁الرد",-13.86097526550293],["▁συστ",-13.860977172851562],["▁কমিশন",-13.860980987548828],["קסט",-13.860981941223145],["会不会",-13.860991477966309],["▁ഉണ്ടാവ",-13.861024856567385],["▁compagnia",-13.86103057861328],["▁ئۆزى",-13.861035346984863],["▁Üld",-13.86103630065918],["५३",-13.86105251312256],["▁Rath",-13.861069679260254],["▁नेताहरु",-13.86112117767334],["ומב",-13.86113452911377],["▁ያለበት",-13.861157417297363],["sgatan",-13.86117935180664],["局长",-13.861184120178224],["teenistus",-13.861231803894045],["▁주제",-13.861231803894045],["havia",-13.861255645751951],["काय",-13.861270904541016],["▁малого",-13.861271858215332],["▁Matth",-13.86127471923828],["▁Тот",-13.86130142211914],["ročný",-13.861305236816406],["▁Elvis",-13.861313819885254],["▁ստանալ",-13.861330032348633],["▁низки",-13.861337661743164],["年开始",-13.861342430114746],["▁తాగ",-13.861352920532228],["တြင္း",-13.861363410949709],["stoß",-13.861369132995604],["▁betrokke",-13.861411094665527],["тэхнік",-13.86141586303711],["وزارة",-13.861444473266602],["▁जाम",-13.861449241638184],["の一部",-13.86146640777588],["ratkaisu",-13.861489295959473],["▁Kerana",-13.861495018005373],["োন",-13.861501693725586],["terveys",-13.861512184143066],["▁Aussage",-13.86156177520752],["ዳቸው",-13.861598014831545],["▁පමණි",-13.861600875854492],["హే",-13.861602783203123],["▁Tvoj",-13.861608505249023],["▁representar",-13.86162281036377],["▁İnsanlar",-13.86163330078125],["нский",-13.861639976501465],["▁Holy",-13.861669540405272],["▁horri",-13.861669540405272],["▁skolā",-13.861693382263184],["ួយ",-13.861702919006348],["▁mellores",-13.86170768737793],["▁বলতে",-13.861708641052246],["أكل",-13.86171054840088],["感じた",-13.861714363098145],["ോസ്",-13.861735343933104],["fhe",-13.861736297607422],["▁главна",-13.861766815185549],["गू",-13.861767768859863],["▁Lajos",-13.861769676208496],["砖",-13.861769676208496],["faqat",-13.861772537231444],["▁pripravil",-13.861783027648926],["症狀",-13.86178970336914],["öljy",-13.861802101135254],["雀",-13.861809730529783],["ផ្សារ",-13.861810684204102],["招标",-13.861814498901367],["▁տեղում",-13.86181640625],["995",-13.861820220947266],["▁øjeblik",-13.861825942993164],["間の",-13.861828804016112],["▁oživ",-13.86182975769043],["ஜே",-13.861833572387695],["徳",-13.861839294433594],["เยาวชน",-13.86184310913086],["ຜະລິດຕະພັນ",-13.86184310913086],["▁ئېرىش",-13.86184310913086],["▁୨୦୦",-13.861844062805176],["ອາກາດ",-13.861845016479492],["ຮ້ານ",-13.861845016479492],["▁batzuetan",-13.861845016479492],["▁chtějí",-13.861845016479492],["▁gradonačelnik",-13.861845016479492],["▁watumishi",-13.861845016479492],["▁ήρθε",-13.861845016479492],["▁παραδοσιακ",-13.861845016479492],["▁Алмазбек",-13.861845016479492],["▁ближайше",-13.861845016479492],["▁жаңғыру",-13.861845016479492],["▁задължения",-13.861845016479492],["▁التابعة",-13.861845016479492],["▁تأکید",-13.861845016479492],["▁சிறுகதை",-13.861845016479492],["칸",-13.861845016479492],["▁Fotoğraf",-13.861845970153809],["▁athygli",-13.861845970153809],["▁menyentuh",-13.861845970153809],["▁ангажира",-13.861845970153809],["▁епизод",-13.861845970153809],["ፈልገው",-13.861846923828123],["▁Jylland",-13.861846923828123],["▁außerhalb",-13.861846923828123],["▁doggystyle",-13.861846923828123],["▁ಕಾರ್ಯಕರ್ತ",-13.861846923828123],["▁оцінки",-13.861848831176758],["▁Wananchi",-13.861849784851074],["▁srityje",-13.861849784851074],["▁अस्पतालमा",-13.861854553222656],["▁penyokong",-13.861855506896973],["জাতি",-13.861856460571287],["▁Yeşil",-13.861856460571287],["▁gelmesi",-13.861857414245604],["▁అందరూ",-13.861858367919922],["▁Литурги",-13.86186695098877],["▁postępowania",-13.861868858337402],["▁डॉलर",-13.861870765686035],["▁Sommige",-13.861878395080566],["▁નીતિ",-13.861878395080566],["▁روبرو",-13.861884117126465],["ربط",-13.861886978149414],["▁անձնական",-13.861886978149414],["▁болмайды",-13.86188793182373],["opinió",-13.861889839172363],["эгдэх",-13.86189079284668],["▁ауылдық",-13.86189079284668],["▁jonkun",-13.861892700195312],["▁sculpt",-13.861894607543944],["▁caranya",-13.861895561218262],["▁městě",-13.861900329589844],["▁повідомили",-13.861900329589844],["▁lažje",-13.861910820007324],["▁duzun",-13.861919403076172],["▁Guvernului",-13.861936569213867],["емым",-13.861937522888184],["▁Basket",-13.861943244934082],["▁رہتا",-13.861948013305664],["▁Memiliki",-13.86195182800293],["▁närmare",-13.861961364746094],["RÍ",-13.861964225769045],["▁глубоко",-13.861968040466309],["▁fein",-13.861973762512209],["▁ప్రో",-13.861978530883787],["▁Efendi",-13.8620023727417],["▁neraz",-13.862005233764648],["▁korzystać",-13.862011909484863],["▁Magnús",-13.862013816833496],["िको",-13.862022399902344],["Assemblea",-13.862045288085938],["▁wiedzą",-13.862046241760254],["步驟",-13.862054824829102],["లొ",-13.862068176269531],["▁masti",-13.862072944641112],["▁காரண",-13.862077713012695],["▁Bērnu",-13.862080574035645],["ලම",-13.86209774017334],["▁مقدمہ",-13.862103462219238],["▁Stern",-13.862107276916504],["▁Умовы",-13.86210823059082],["biologi",-13.862109184265137],["▁चरणमा",-13.862112045288086],["的同時",-13.862115859985352],["▁mindez",-13.86213207244873],["▁erabiltzaile",-13.862157821655272],["ltiin",-13.862170219421388],["▁interesē",-13.862171173095703],["▁מזו",-13.862187385559082],["▁ocupar",-13.862199783325195],["▁الهی",-13.862200736999512],["▁dostať",-13.862212181091309],["▁lausa",-13.862218856811523],["▁university",-13.862222671508787],["▁матері",-13.862223625183104],["▁Scen",-13.862228393554688],["▁Campos",-13.862235069274902],["短信",-13.862258911132812],["9°",-13.862262725830078],["ασμένο",-13.862281799316406],["kování",-13.862284660339355],["kampen",-13.862306594848633],["▁Pridaj",-13.862308502197266],["egyház",-13.862335205078123],["▁erros",-13.862340927124023],["▁جانبه",-13.862345695495604],["▁സംഭവിച്ച",-13.862360954284668],["▁جدي",-13.862363815307615],["End",-13.862367630004885],["▁minutu",-13.86237335205078],["แลก",-13.86238956451416],["ບູ",-13.862397193908691],["▁filled",-13.86240577697754],["heyr",-13.862433433532717],["▁smule",-13.862439155578612],["EIT",-13.862441062927246],["៥០",-13.862468719482422],["本周",-13.862500190734863],["▁killed",-13.86250114440918],["▁rekon",-13.862502098083496],["▁računalni",-13.862505912780762],["从未",-13.862509727478027],["átok",-13.862518310546877],["▁capito",-13.862519264221191],["而来",-13.862543106079102],["▁Вики",-13.862546920776367],["▁знаят",-13.862610816955566],["غىچە",-13.862614631652832],["▁učinkovito",-13.862631797790527],["▁Dům",-13.862638473510742],["عسكر",-13.862646102905272],["▁vijeća",-13.86265754699707],["▁châ",-13.862669944763184],["臨時",-13.862674713134766],["৩০",-13.862676620483398],["▁Ewrop",-13.862683296203612],["▁قطعا",-13.86270236968994],["▁csökken",-13.862707138061523],["▁ಟಿವಿ",-13.862726211547852],["ляют",-13.8627347946167],["▁வாக்கு",-13.86275863647461],["▁Berliner",-13.862774848937988],["▁ئاخىر",-13.862794876098633],["ánok",-13.862799644470217],["▁Maxaa",-13.86280345916748],["▁یونان",-13.862823486328123],["▁сезону",-13.862825393676758],["▁प्राथमिक",-13.862825393676758],["მომ",-13.862852096557615],["▁බයි",-13.86286449432373],["▁بالله",-13.862871170043944],["কালে",-13.862905502319336],["การออกแบบ",-13.862905502319336],["ljak",-13.862934112548828],["▁Agosti",-13.862951278686523],["ंद्र",-13.862958908081056],["૧૧",-13.862969398498535],["ավան",-13.863025665283203],["▁සිසුන්",-13.86303424835205],["▁ruce",-13.86303997039795],["วิล",-13.863049507141112],["▁applicable",-13.863064765930176],["▁spí",-13.86306858062744],["▁դիմաց",-13.863078117370604],["▁Sering",-13.863099098205566],["▁догађаја",-13.863101959228516],["merken",-13.863102912902832],["पुर्ण",-13.863119125366213],["▁PREMI",-13.863130569458008],["▁operação",-13.863136291503906],["קני",-13.863178253173828],["▁действует",-13.863189697265623],["ਇਸ",-13.86319065093994],["nello",-13.863191604614258],["▁решавање",-13.86319351196289],["Wil",-13.863216400146484],["▁lämnar",-13.863224983215332],["vnega",-13.863226890563965],["▁Options",-13.863240242004396],["▁belê",-13.863313674926758],["▁Турци",-13.863313674926758],["▁ժողովի",-13.863323211669922],["sender",-13.863341331481934],["▁მზე",-13.863374710083008],["▁xidh",-13.863394737243652],["▁yardımı",-13.863399505615234],["▁تغ",-13.863409042358398],["▁हमले",-13.86341381072998],["▁Американски",-13.863414764404297],["▁దారి",-13.86342430114746],["▁føles",-13.863443374633787],["নিয়া",-13.863454818725586],["diff",-13.863478660583496],["454",-13.863489151000977],["هري",-13.863516807556152],["▁კანდიდატ",-13.86352252960205],["▁فول",-13.863526344299316],["အသား",-13.863530158996582],["▁नवा",-13.863539695739746],["▁hmotnost",-13.863554000854492],["Back",-13.86355972290039],["▁Listo",-13.863567352294922],["▁lancé",-13.863582611083984],["▁көзі",-13.863582611083984],["arrek",-13.863595008850098],["▁начинают",-13.863619804382324],["viesť",-13.863621711730955],["▁хураа",-13.863624572753906],["തന്ത്ര",-13.863646507263184],["lehden",-13.863648414611816],["itario",-13.86365032196045],["▁වෙච්ච",-13.863665580749512],["▁taybetî",-13.863672256469728],["▁topish",-13.863675117492676],["очок",-13.863676071166992],["▁نعر",-13.863686561584473],["▁scenen",-13.863701820373535],["▁uitgebreid",-13.863723754882812],["▁Hoog",-13.86373805999756],["▁дозволе",-13.863752365112305],["▁өту",-13.863758087158203],["▁sēd",-13.863781929016112],["ගන්",-13.86378574371338],["▁afeta",-13.863804817199709],["పోయే",-13.863824844360352],["▁bhun",-13.863825798034668],["▁listesi",-13.863842010498049],["在使用",-13.863868713378906],["ЕНТ",-13.863892555236816],["▁الكون",-13.86390209197998],["▁incercat",-13.863913536071776],["▁Спе",-13.863926887512209],["▁niekas",-13.863945960998535],["▁solucionar",-13.8639554977417],["▁දැම්ම",-13.863961219787598],["▁приняли",-13.863962173461914],["іту",-13.86397933959961],["▁sammu",-13.863981246948242],["▁Πρό",-13.864008903503418],["▁Román",-13.864022254943848],["▁한국의",-13.86402416229248],["▁Kerst",-13.864033699035645],["▁оставить",-13.864036560058594],["صحيح",-13.864047050476074],["▁calories",-13.86404800415039],["dejte",-13.864049911499023],["ecimiento",-13.864049911499023],["aldiak",-13.864056587219238],["▁wirst",-13.864059448242188],["kompromi",-13.864060401916504],["▁creativo",-13.864066123962402],["▁Landschaft",-13.864106178283691],["opéra",-13.864117622375488],["▁Hazır",-13.864117622375488],["▁suivantes",-13.864121437072754],["vlak",-13.86415672302246],["label",-13.864174842834473],["▁kyau",-13.864203453063965],["ლაგ",-13.864217758178713],["гнати",-13.864234924316406],["今年は",-13.864243507385254],["▁Centar",-13.864256858825684],["czonych",-13.864258766174316],["▁αριθμ",-13.864274978637695],["▁leyti",-13.864295959472656],["▁Vasile",-13.86431121826172],["ƏR",-13.864314079284668],["▁veliku",-13.864351272583008],["learning",-13.864354133605955],["llisuuden",-13.864368438720703],["▁Lī",-13.86439037322998],["▁отворени",-13.86439037322998],["érica",-13.864401817321776],["▁labā",-13.864405632019045],["▁discuter",-13.864407539367676],["▁સામ",-13.864415168762209],["殊",-13.864429473876951],["▁específicos",-13.86443328857422],["Ας",-13.86444854736328],["丟",-13.864449501037598],["赖",-13.86446475982666],["传递",-13.86447811126709],["▁தெரிவிக்க",-13.86449146270752],["▁سست",-13.864501953125],["翻譯",-13.864508628845217],["ацијом",-13.86451530456543],["นนทบุรี",-13.86451530456543],["ሰብአዊ",-13.86451530456543],["สถิติ",-13.86451816558838],["▁ଅଧ୍ୟକ୍ଷ",-13.86451816558838],["▁Esimerkiksi",-13.864519119262695],["▁Persatuan",-13.864519119262695],["▁Samarqand",-13.864519119262695],["▁interracial",-13.864519119262695],["▁körülmények",-13.864519119262695],["▁működő",-13.864519119262695],["▁penghasilan",-13.864519119262695],["▁uchrashuv",-13.864519119262695],["▁Μαΐου",-13.864519119262695],["▁αρμόδι",-13.864519119262695],["▁свързва",-13.864519119262695],["▁цілком",-13.864519119262695],["▁Խաչատրյան",-13.864519119262695],["▁եկեղեցու",-13.864519119262695],["▁זונטאג",-13.864519119262695],["▁להוסיף",-13.864519119262695],["▁تئاتر",-13.864519119262695],["▁مۇمكىن",-13.864519119262695],["▁प्रतिस्पर्धा",-13.864519119262695],["▁ਵਾਲਿਆਂ",-13.864519119262695],["▁anúncio",-13.864520072937012],["▁navçeya",-13.864520072937012],["▁पाण्डे",-13.864520072937012],["▁Управління",-13.864521026611328],["▁აქტივ",-13.864521026611328],["▁যুদ্ধ",-13.86452293395996],["▁Xwedê",-13.864523887634276],["▁erscheint",-13.864523887634276],["▁सक्दैन",-13.864523887634276],["แผ่นดิน",-13.864524841308594],["▁Release",-13.864524841308594],["▁ستونزو",-13.864524841308594],["▁አገሮች",-13.864524841308594],["▁ଦିଅ",-13.86452579498291],["▁வீட்டில்",-13.86452579498291],["▁Pflicht",-13.864526748657228],["▁তোমার",-13.864529609680176],["▁galben",-13.864532470703123],["▁recupero",-13.864532470703123],["▁ережелер",-13.86453342437744],["▁විහාර",-13.86453342437744],["▁герої",-13.864534378051758],["▁આરોપ",-13.864534378051758],["▁nerede",-13.86453628540039],["▁Onthou",-13.864537239074709],["▁තිබුණු",-13.864538192749023],["▁cheltuieli",-13.864541053771973],["▁tekmoval",-13.864541053771973],["号码",-13.864542007446287],["Кү",-13.864542961120604],["▁diezgan",-13.864542961120604],["▁ihracat",-13.864543914794922],["▁भट्ट",-13.864543914794922],["▁ڪيائين",-13.864544868469238],["ჭირდება",-13.864545822143556],["▁अनुवाद",-13.864547729492188],["хвърли",-13.86455249786377],["▁بھرپور",-13.86455249786377],["▁წლების",-13.86455535888672],["▁వివరాలు",-13.864557266235352],["▁ЦСКА",-13.864559173583984],["bjørn",-13.8645601272583],["सारख्या",-13.864567756652832],["▁बहस",-13.864569664001465],["▁забыл",-13.864575386047363],["▁نفسي",-13.864580154418944],["▁ବାପା",-13.864580154418944],["的现象",-13.864598274230955],["▁isteyenler",-13.86460018157959],["▁septembro",-13.864614486694336],["нцу",-13.8646240234375],["ائنا",-13.8646240234375],["▁gefragt",-13.86462688446045],["▁Abonner",-13.864627838134766],["▁schützen",-13.864630699157717],["▁empati",-13.864638328552246],["▁almaktadır",-13.864643096923828],["交付",-13.864644050598145],["▁المقرر",-13.864652633666992],["ėvė",-13.864653587341309],["kumpulan",-13.864654541015623],["▁Innhold",-13.864663124084473],["▁Aprende",-13.864665031433104],["▁다이",-13.864668846130373],["america",-13.86467170715332],["полнение",-13.864672660827637],["ોનું",-13.864673614501951],["▁төлеу",-13.86467456817627],["מיות",-13.864681243896484],["▁palce",-13.864683151245115],["סרט",-13.864686012268066],["▁انعام",-13.86469268798828],["▁չունեն",-13.864710807800291],["▁Scal",-13.864716529846191],["▁proveden",-13.864730834960938],["長く",-13.86473274230957],["פֿן",-13.864741325378418],["▁egyben",-13.864741325378418],["▁beauté",-13.864752769470217],["ländska",-13.864753723144531],["▁ciidam",-13.864766120910645],["ողները",-13.864782333374023],["▁Горан",-13.864794731140137],["▁الكاتب",-13.864800453186035],["▁التفاصيل",-13.8648099899292],["▁temporibus",-13.864818572998049],["▁العودة",-13.864830017089844],["piece",-13.86483097076416],["וניים",-13.8648681640625],["шње",-13.864872932434082],["Rah",-13.864877700805664],["لاشتۇرۇش",-13.864891052246094],["ńskim",-13.864912033081056],["platser",-13.864914894104004],["ացիայի",-13.864934921264648],["енное",-13.864944458007812],["▁rovin",-13.864968299865724],["sikre",-13.86496925354004],["▁fusha",-13.86496925354004],["क्री",-13.864978790283203],["▁ليلة",-13.864995956420898],["接下來",-13.865002632141112],["ဆရာေတာ္",-13.86500644683838],["▁materialların",-13.865008354187012],["▁Mestre",-13.865009307861328],["▁ನೋಡಿದ",-13.865012168884276],["цог",-13.865013122558594],["▁contratar",-13.86502456665039],["สร้างสรรค์",-13.865050315856934],["bilirler",-13.865055084228516],["генде",-13.865062713623049],["학회",-13.865073204040527],["clé",-13.86508083343506],["▁fëmijëve",-13.86508560180664],["▁curiosa",-13.86508846282959],["▁гнев",-13.865111351013184],["▁vlerëso",-13.865113258361816],["434",-13.865118980407717],["▁четко",-13.865150451660156],["▁нашій",-13.865158081054688],["▁hoang",-13.865180015563965],["▁informal",-13.86518096923828],["▁caawi",-13.865192413330078],["▁ਰਾਤ",-13.865196228027344],["Tim",-13.865201950073242],["▁айтыш",-13.865214347839355],["મેલ",-13.865220069885254],["▁məkan",-13.865221977233888],["▁제거",-13.865227699279783],["▁lerni",-13.865238189697266],["▁kažu",-13.865241050720217],["க்கிறது",-13.865270614624023],["一颗",-13.865273475646973],["gaadh",-13.865277290344238],["▁začela",-13.865278244018556],["skirt",-13.86528778076172],["▁szerepe",-13.865314483642578],["ستثمار",-13.865324020385742],["▁komunitas",-13.865330696105955],["437",-13.865363121032717],["▁مانا",-13.865363121032717],["▁시기",-13.865399360656738],["แผนที่",-13.865400314331056],["ቅና",-13.86541748046875],["tävissä",-13.86542510986328],["▁yeah",-13.865429878234863],["▁linia",-13.865432739257812],["▁egokia",-13.86546516418457],["▁arroz",-13.865467071533203],["▁kirkko",-13.86548900604248],["▁khen",-13.865495681762695],["ကိုင်",-13.86549949645996],["▁மைய",-13.86550521850586],["▁etmeyi",-13.865513801574709],["زنی",-13.865522384643556],["▁osebne",-13.865523338317873],["ብስ",-13.86552906036377],["ņā",-13.865533828735352],["▁большей",-13.865544319152832],["▁끝나",-13.86557674407959],["関わ",-13.865582466125488],["سوس",-13.865596771240234],["лагане",-13.865643501281738],["价值观",-13.865646362304688],["화가",-13.865647315979004],["-->",-13.865659713745115],["▁zahlreichen",-13.8656644821167],["▁වර්ධනය",-13.865671157836914],["posições",-13.865676879882812],["▁गट",-13.865681648254396],["▁reward",-13.865692138671877],["▁اللهم",-13.865693092346191],["▁картон",-13.865708351135254],["▁Humor",-13.865711212158203],["▁geratu",-13.865727424621582],["▁tramp",-13.865741729736328],["▁gördük",-13.86574935913086],["▁бидете",-13.86575412750244],["留学生",-13.865765571594238],["ījusi",-13.865766525268556],["verndar",-13.865767478942873],["界面",-13.865795135498049],["▁شوهر",-13.865796089172363],["▁yillarda",-13.865805625915527],["▁arqui",-13.865824699401855],["▁կատարում",-13.865835189819336],["▁behandelt",-13.865837097167969],["▁задоволення",-13.86584758758545],["ことはありません",-13.86585807800293],["▁обиде",-13.865862846374512],["OTI",-13.865878105163574],["▁generacije",-13.865880012512209],["▁ಅನ್ನ",-13.865882873535156],["▁amei",-13.865887641906738],["▁Developer",-13.865889549255373],["▁popri",-13.865903854370115],["వర్త",-13.865909576416016],["▁utilizzati",-13.865909576416016],["▁kitle",-13.865937232971191],["▁प्राप्ति",-13.865938186645508],["▁финансово",-13.86594295501709],["шать",-13.86595630645752],["ШТИ",-13.865960121154783],["▁егер",-13.865982055664062],["▁kór",-13.86599349975586],["▁שהמ",-13.866008758544922],["▁impus",-13.866024017333984],["▁celú",-13.86605739593506],["50€",-13.86606502532959],["▁meglát",-13.8660888671875],["▁potrebni",-13.866097450256348],["▁барак",-13.866103172302246],["▁maqola",-13.86610507965088],["▁налазе",-13.86612319946289],["▁Europie",-13.866124153137209],["▁curto",-13.86614227294922],["▁satisfaction",-13.866146087646484],["ляції",-13.866150856018066],["▁NNE",-13.866155624389648],["එන්",-13.866159439086914],["ໂທ",-13.866201400756836],["▁курсе",-13.866203308105469],["روش",-13.866214752197266],["detto",-13.866219520568848],["払",-13.86622714996338],["▁სიტყვები",-13.866233825683594],["ዎን",-13.86623477935791],["▁musiek",-13.866243362426758],["▁Лек",-13.866262435913086],["▁өзүнө",-13.866272926330566],["▁Spark",-13.866283416748049],["▁පොර",-13.866293907165527],["▁දෙනවා",-13.866344451904297],["▁парка",-13.866358757019045],["▁najít",-13.866362571716309],["vanlig",-13.866392135620115],["ेन्स",-13.866394996643066],["▁dôvodu",-13.866397857666016],["▁bacteria",-13.866403579711914],["ærer",-13.866405487060549],["当時",-13.866418838500977],["ύρη",-13.866474151611328],["▁katılan",-13.86648654937744],["esnės",-13.866504669189451],["ለጥ",-13.86655330657959],["▁sektöründe",-13.866559028625488],["▁ಬಿಸಿ",-13.86656379699707],["申请人",-13.866573333740234],["▁ئىشلى",-13.866591453552246],["▁létrehoz",-13.866606712341309],["153",-13.866619110107422],["ወጣው",-13.866620063781738],["▁светска",-13.8666410446167],["▁бағытта",-13.86664867401123],["letta",-13.866655349731444],["കാരന്",-13.866679191589355],["▁fremmed",-13.866680145263672],["142",-13.866683959960938],["▁taxes",-13.866683959960938],["▁இள",-13.866689682006836],["ლზე",-13.86669635772705],["▁हालत",-13.86669635772705],["▁tempada",-13.866698265075684],["برنامج",-13.866740226745604],["veida",-13.86674690246582],["名为",-13.86675262451172],["订单",-13.866754531860352],["ρεί",-13.866766929626465],["נדע",-13.86678409576416],["明らかに",-13.866799354553224],["▁powietrza",-13.866801261901855],["xada",-13.86683750152588],["▁veids",-13.866844177246094],["▁heroin",-13.866851806640623],["▁Heç",-13.866853713989258],["▁आये",-13.866867065429688],["▁Sekali",-13.86686897277832],["Projekt",-13.866876602172852],["わけではない",-13.866883277893066],["▁ырда",-13.866888046264648],["চো",-13.866893768310549],["▁VAI",-13.866896629333496],["額外",-13.866897583007812],["plé",-13.866903305053713],["भोग",-13.866909980773926],["▁இனிய",-13.866914749145508],["▁сторін",-13.866955757141112],["▁potrdil",-13.866957664489746],["▁падпіс",-13.866962432861328],["▁түрдө",-13.86696720123291],["अनु",-13.866982460021973],["libet",-13.8670015335083],["Update",-13.867015838623049],["▁کچ",-13.867016792297363],["හල",-13.867029190063477],["▁ფინ",-13.867036819458008],["אמן",-13.867060661315918],["berendezés",-13.86706829071045],["▁ലക്ഷ്യ",-13.86707592010498],["跨境",-13.867082595825195],["▁TRT",-13.86708927154541],["並不是",-13.867094039916992],["頗",-13.867115020751951],["颁",-13.86713409423828],["▁يحب",-13.867138862609863],["策划",-13.867140769958496],["▁1.200",-13.867147445678713],["세포",-13.867148399353027],["罢",-13.867152214050291],["▁Kalle",-13.86716079711914],["盐",-13.867178916931152],["ntotdeauna",-13.867196083068848],["▁wunderschöne",-13.867196083068848],["พยาบาล",-13.86719799041748],["教練",-13.86719799041748],["ပစၥည္း",-13.867198944091797],["ဖ်က္",-13.867198944091797],["旁邊",-13.867198944091797],["ഛ",-13.867199897766112],["คัด",-13.867199897766112],["▁Khánh",-13.867199897766112],["▁axaftin",-13.867199897766112],["▁catálogo",-13.867199897766112],["▁každá",-13.867199897766112],["▁mkataba",-13.867199897766112],["▁nejvyšší",-13.867199897766112],["▁nhảy",-13.867199897766112],["▁pagbubuntis",-13.867199897766112],["▁эмнэлэг",-13.867199897766112],["▁مازندران",-13.867199897766112],["▁रिकॉर्ड",-13.867199897766112],["▁তদন্ত",-13.867199897766112],["▁সংক্রান্ত",-13.867199897766112],["▁લગભગ",-13.867199897766112],["▁ობიექტ",-13.867199897766112],["섯",-13.867199897766112],["▁binlerce",-13.86720085144043],["▁défaut",-13.86720085144043],["▁баспасөз",-13.86720085144043],["▁դատախազ",-13.86720085144043],["▁छोटा",-13.86720085144043],["▁Mientras",-13.867201805114746],["▁menengah",-13.867201805114746],["▁unmittelbar",-13.867201805114746],["▁zachęca",-13.867201805114746],["▁քարոզ",-13.867201805114746],["▁lurralde",-13.867202758789062],["▁verhoog",-13.867202758789062],["▁визначити",-13.867202758789062],["▁Estudos",-13.86720371246338],["▁източник",-13.86720371246338],["▁TIDAK",-13.867204666137695],["▁empresário",-13.867204666137695],["▁nedeljo",-13.867204666137695],["▁ღვინის",-13.867204666137695],["▁സ്വീകരിച്ച",-13.867205619812012],["▁iestādē",-13.867206573486328],["▁କରାଯାଇଥିଲା",-13.867206573486328],["▁రోడ్డు",-13.867207527160645],["▁namijenjen",-13.867210388183594],["▁ikkinchi",-13.86721134185791],["▁гэвэл",-13.86721134185791],["▁근처",-13.86721134185791],["▁выкарыстання",-13.867213249206545],["▁تەۋە",-13.867216110229492],["▁hydraul",-13.867218017578123],["▁Андрэй",-13.867218017578123],["▁تعزيز",-13.867218017578123],["▁đập",-13.867220878601074],["▁ordezkari",-13.86722183227539],["▁вартості",-13.867222785949709],["▁izdarīt",-13.867223739624023],["▁захиргааны",-13.867226600646973],["▁codzienne",-13.867227554321287],["▁kuukauden",-13.867228507995604],["▁გეგმა",-13.867233276367188],["▁करु",-13.86723518371582],["▁плаћа",-13.86723804473877],["▁هلمند",-13.867241859436035],["▁progettazione",-13.867243766784668],["ອື່ນ",-13.867247581481934],["接收",-13.867249488830566],["▁چڪو",-13.867250442504885],["▁sindaco",-13.867255210876465],["▁měly",-13.86725616455078],["▁ətraflı",-13.86725616455078],["▁dollár",-13.867258071899414],["▁réduit",-13.867258071899414],["▁gérer",-13.86726188659668],["知名度",-13.86727523803711],["▁səfəri",-13.86727809906006],["▁giật",-13.867280960083008],["▁двадесет",-13.867281913757324],["▁anumang",-13.867284774780272],["▁осталось",-13.86729335784912],["atteindre",-13.86729907989502],["▁natūrali",-13.867305755615234],["▁رویداد",-13.867305755615234],["▁žmones",-13.86730670928955],["▁Prosím",-13.86733055114746],["▁색상",-13.86734390258789],["▁전환",-13.86734676361084],["JANA",-13.867361068725586],["▁Kolkata",-13.867363929748535],["▁ततः",-13.8673734664917],["強制",-13.8673734664917],["домашн",-13.867376327514648],["▁സമരം",-13.867376327514648],["▁borbe",-13.867388725280762],["▁światła",-13.867389678955078],["▁Probabil",-13.867392539978027],["kártya",-13.86739444732666],["ஞான",-13.867402076721191],["▁mieszkanie",-13.867405891418455],["รุก",-13.86740779876709],["▁देखकर",-13.867411613464355],["ស្វាយ",-13.86743450164795],["▁terbuat",-13.867437362670898],["▁corrida",-13.867438316345217],["560",-13.867441177368164],["▁sigorta",-13.867449760437012],["▁pátek",-13.867462158203123],["▁міжнародних",-13.867465019226074],["▁COP",-13.86746597290039],["ล้ม",-13.867466926574709],["▁الأخ",-13.86750602722168],["▁있으면",-13.86750602722168],["▁Paypal",-13.86752700805664],["▁liittyvä",-13.86753273010254],["именован",-13.867539405822754],["▁канон",-13.867542266845703],["▁Управление",-13.867549896240234],["țională",-13.86757755279541],["▁budeš",-13.867599487304688],["друштвен",-13.867609024047852],["▁angkat",-13.867609977722168],["▁täyden",-13.867616653442385],["▁Kosong",-13.867629051208496],["ረግ",-13.867633819580078],["▁међународни",-13.86763858795166],["իչը",-13.867660522460938],["▁vydá",-13.867671012878418],["▁למען",-13.867671966552734],["បឹង",-13.86767578125],["เคส",-13.86767864227295],["▁بالىلار",-13.867680549621582],["▁ആള്",-13.867682456970217],["▁الإنسانية",-13.86769962310791],["心脏",-13.867700576782228],[".11.2016",-13.867706298828123],["īdu",-13.867714881896973],["▁ölkələr",-13.867714881896973],["▁interesados",-13.86772918701172],["▁Flex",-13.867734909057615],["▁ümid",-13.86775016784668],["대리",-13.867752075195312],["▁համարվում",-13.867792129516602],["으려",-13.867796897888184],["▁Motion",-13.867802619934082],["▁kalın",-13.867807388305664],["▁opplever",-13.867813110351562],["▁అవుతున్న",-13.867817878723145],["ग्राफ",-13.867819786071776],["▁Ға",-13.867831230163574],["▁sidaas",-13.867847442626951],["▁አዳ",-13.867873191833496],["▁konular",-13.867875099182127],["▁ահա",-13.867876052856444],["ਰਕ",-13.867877006530762],["ക്കാണ്",-13.867877960205078],["必要的",-13.867892265319824],["▁dûr",-13.86789608001709],["ሃል",-13.867897033691406],["▁integración",-13.86790370941162],["▁melyik",-13.86790657043457],["▁Бель",-13.867920875549316],["▁yildan",-13.86792278289795],["àtiques",-13.867932319641112],["▁Çalış",-13.867944717407228],["рівня",-13.86794662475586],["schouw",-13.867953300476074],["▁polític",-13.867956161499023],["▁жазган",-13.867985725402832],["每人",-13.867998123168944],["နို",-13.868005752563477],["近几年",-13.868008613586426],["ttujen",-13.868014335632324],["вас",-13.868020057678224],["▁pembeli",-13.868022918701172],["▁rodinu",-13.868026733398438],["▁česko",-13.868034362792969],["▁lífi",-13.868038177490234],["ंध",-13.86805534362793],["▁tržište",-13.868122100830078],["▁Beş",-13.86812686920166],["ន័យ",-13.868155479431152],["▁Startup",-13.868158340454102],["رعا",-13.868169784545898],["▁vaali",-13.868169784545898],["▁Тоді",-13.868184089660645],["▁Spek",-13.868197441101074],["औँ",-13.86821460723877],["▁келетін",-13.868215560913086],["ბრუნე",-13.868227005004885],["تصفح",-13.868230819702148],["هذا",-13.868249893188477],["▁barw",-13.86825466156006],["▁dönemin",-13.86825466156006],["▁სააკაშვილი",-13.868258476257324],["▁angegeben",-13.868261337280272],["όμενος",-13.868274688720703],["明显的",-13.868278503417969],["tekintés",-13.86828327178955],["慢性",-13.868284225463867],["▁odziv",-13.868297576904297],["▁Bryan",-13.868311882019045],["ଳୁ",-13.868322372436523],["民國",-13.868338584899902],["ُّ",-13.868348121643066],["▁қабілет",-13.868349075317385],["▁spēli",-13.86836051940918],["ŁA",-13.86837673187256],["▁Δημ",-13.86839199066162],["▁кәсіпкерлік",-13.86839199066162],["ಪಾರ್",-13.86839485168457],["utvalget",-13.868414878845217],["▁удел",-13.868428230285645],["ТАН",-13.868433952331545],["してみて",-13.868488311767578],["465",-13.868504524230955],["ుంది",-13.868517875671388],["▁Αντ",-13.868520736694336],["▁investere",-13.868531227111816],["▁договорот",-13.868542671203612],["▁බඩ",-13.868548393249512],["▁oblek",-13.868571281433104],["љуби",-13.868599891662598],["▁qaranka",-13.868600845336914],["▁انقلابی",-13.86860466003418],["▁Herrian",-13.868608474731444],["▁ورکړ",-13.868622779846191],["▁أخر",-13.868633270263672],["γραφο",-13.86864185333252],["▁మార్చి",-13.868654251098633],["biji",-13.86865520477295],["konomi",-13.868658065795898],["увало",-13.868659019470217],["khwa",-13.86866569519043],["Kö",-13.868671417236328],["οντα",-13.86867618560791],["▁anasema",-13.868694305419922],["ኖረ",-13.86870002746582],["რავი",-13.868700981140137],["期货",-13.86870574951172],["வில",-13.868724822998049],["▁граничн",-13.868744850158691],["▁finanzielle",-13.868745803833008],["▁добуш",-13.868762016296388],["zamos",-13.868804931640623],["之處",-13.868826866149902],["ੰਦਾ",-13.868856430053713],["μένει",-13.868881225585938],["▁razini",-13.868888854980469],["maydigan",-13.86890697479248],["smith",-13.868908882141112],["▁preventivo",-13.86890983581543],["▁බිඳ",-13.86891746520996],["ந்த்",-13.86892318725586],["ZON",-13.868927001953123],["ምዕ",-13.868938446044922],["▁кіріс",-13.868948936462402],["চর",-13.8689546585083],["▁Лоз",-13.868956565856934],["enseigne",-13.868963241577148],["ശ്യ",-13.868976593017578],["▁Nutzen",-13.86901569366455],["ઝર",-13.8690185546875],["▁వీటి",-13.869056701660156],["reiden",-13.869077682495115],["すぎて",-13.869081497192385],["बुद्ध",-13.869091033935549],["▁mostly",-13.869108200073242],["seaduse",-13.869112968444824],["گري",-13.869126319885254],["munka",-13.86912727355957],["ತ್ತರ",-13.869129180908203],["ចេញពី",-13.869147300720217],["trą",-13.86915683746338],["▁Mál",-13.869166374206545],["개를",-13.869174003601074],["▁õnnestu",-13.86920166015625],["法規",-13.869208335876465],["▁հասնել",-13.869219779968262],["ವಳ",-13.869224548339844],["▁terminie",-13.869224548339844],["▁namnet",-13.869230270385742],["▁sanna",-13.869233131408691],["▁रेड",-13.869245529174805],["▁użytkownika",-13.869263648986816],["又要",-13.86926555633545],["tumėte",-13.869297981262209],["▁Podpor",-13.86931324005127],["ేసిన",-13.869314193725586],["▁Dakle",-13.869318962097168],["▁kohtaan",-13.869338035583496],["▁původní",-13.869338989257812],["▁Úc",-13.869346618652344],["대책",-13.869351387023926],["ርቶ",-13.869357109069824],["REG",-13.869378089904783],["▁तत्व",-13.86942195892334],["▁Пита",-13.869429588317873],["▁граб",-13.869430541992188],["▁kadrlar",-13.869431495666504],["ผู้มี",-13.869434356689451],["▁ulasan",-13.8694486618042],["ወራ",-13.869468688964844],["▁intimida",-13.86950397491455],["▁පුර",-13.86951732635498],["íčky",-13.869543075561523],["▁keelt",-13.869551658630373],["хали",-13.869562149047852],["▁הדג",-13.869571685791016],["drukken",-13.86959171295166],["ວະ",-13.86959743499756],["▁ajira",-13.869613647460938],["▁अनिल",-13.869632720947266],["放入",-13.869632720947266],["confort",-13.869678497314451],["志愿",-13.869685173034668],["environ",-13.8696870803833],["▁אלינו",-13.869695663452148],["▁मम",-13.869717597961426],["phase",-13.869720458984377],["咪",-13.869722366333008],["▁cuairt",-13.869723320007324],["ಿಸಲಾಗಿದೆ",-13.869725227355955],["▁నాయ",-13.869736671447754],["▁நோக்கி",-13.86974048614502],["насцю",-13.869741439819336],["▁νεο",-13.869745254516602],["έτο",-13.869746208190918],["טשע",-13.869749069213867],["▁جادو",-13.869768142700195],["▁subter",-13.86977767944336],["勉",-13.869780540466309],["пустити",-13.869792938232422],["方针",-13.86982250213623],["▁ремонта",-13.869823455810549],["հուն",-13.869832992553713],["ikiwa",-13.869844436645508],["循环",-13.869845390319824],["淘汰",-13.869845390319824],["▁साँ",-13.86985206604004],["遣",-13.869853019714355],["▁censura",-13.869867324829102],["辭",-13.869873046875],["受験",-13.869874954223633],["▁തട്ടി",-13.86987590789795],["キャンセル",-13.869885444641112],["อัพเดท",-13.869888305664062],["ႏွိပ္",-13.869888305664062],["▁Desarrollo",-13.869888305664062],["▁UNICEF",-13.869888305664062],["▁erthygl",-13.869888305664062],["▁geschlossen",-13.869888305664062],["▁piedalīties",-13.869888305664062],["▁reconocimiento",-13.869888305664062],["▁szczególności",-13.869888305664062],["▁săptămână",-13.869888305664062],["▁tveimur",-13.869888305664062],["▁zagotovo",-13.869888305664062],["▁þangað",-13.869888305664062],["▁Články",-13.869888305664062],["▁ədalət",-13.869888305664062],["▁вітамін",-13.869888305664062],["▁изненада",-13.869888305664062],["▁табиғи",-13.869888305664062],["▁үзэсгэлэн",-13.869888305664062],["▁ئەمما",-13.869888305664062],["▁एउटै",-13.869888305664062],["▁कितना",-13.869888305664062],["▁સતત",-13.869888305664062],["▁କୃଷି",-13.869888305664062],["▁எத்தனை",-13.869888305664062],["▁చిరంజీవి",-13.869888305664062],["▁ದಾಖಲಿಸ",-13.869888305664062],["Stránky",-13.86988925933838],["▁Rektor",-13.86988925933838],["復興",-13.86988925933838],["▁միջավայր",-13.869890213012695],["▁מאחורי",-13.869890213012695],["▁тавьж",-13.869891166687012],["▁kunnossa",-13.869892120361328],["▁mexicano",-13.869893074035645],["▁tunnelma",-13.869893074035645],["▁προϊόντων",-13.869893074035645],["▁գտնել",-13.869893074035645],["чыгарылыш",-13.86989402770996],["▁fødselsdag",-13.86989402770996],["▁അല്ലെങ്കില്",-13.86989402770996],["ጀመሩ",-13.869894981384276],["▁ඇතුළත්",-13.869895935058594],["▁ହୋଇପାରେ",-13.86989974975586],["▁TPHCM",-13.869901657104492],["淑",-13.869901657104492],["▁ଖୋଲି",-13.869902610778809],["▁vacanze",-13.869903564453123],["▁ווארט",-13.86990451812744],["▁Մինչ",-13.869906425476074],["▁Schloss",-13.86990737915039],["▁επιστροφή",-13.86990737915039],["▁베스트",-13.869908332824709],["▁шийдвэрлэх",-13.869909286499023],["▁ભારે",-13.86991024017334],["▁الجهات",-13.869911193847656],["▁cultuur",-13.869919776916504],["▁ఉండాలి",-13.86992073059082],["▁qisqa",-13.869922637939451],["ลุ้น",-13.869924545288086],["▁РТС",-13.869924545288086],["▁omsorgs",-13.8699312210083],["▁רואים",-13.8699312210083],["▁κακό",-13.869932174682615],["▁kjøtt",-13.869933128356934],["▁ilmaiseksi",-13.86993408203125],["యె",-13.869939804077148],["▁ಎಂದರೆ",-13.869942665100098],["副作用",-13.869946479797363],["▁වුවත්",-13.86994743347168],["▁tanıtım",-13.869956016540527],["▁취소",-13.86996364593506],["▁maschi",-13.869965553283691],["▁դեպքեր",-13.869966506958008],["▁(1998)",-13.86996841430664],["ጉዳዩ",-13.86998176574707],["▁विक्रम",-13.869983673095703],["▁ഒപ്പം",-13.869988441467283],["▁Penulis",-13.869991302490234],["▁pengundi",-13.869994163513184],["▁golygu",-13.869997024536133],["▁některých",-13.869998931884766],["▁mielessä",-13.869999885559082],["心灵",-13.869999885559082],["▁erlebt",-13.870001792907717],["▁Moovit",-13.870002746582031],["లకి",-13.870014190673828],["▁poiché",-13.870014190673828],["▁numerosos",-13.870015144348145],["▁ಇವು",-13.870019912719728],["吉林",-13.87002182006836],["▁његовим",-13.870047569274902],["▁საღამოს",-13.870047569274902],["räkna",-13.87006378173828],["▁जल्दी",-13.870071411132812],["▁Close",-13.87007999420166],["▁компаниуд",-13.87008285522461],["▁בעיה",-13.87008571624756],["驚喜",-13.87008571624756],["▁ოპერაცი",-13.87009048461914],["▁издания",-13.87010669708252],["ცვლი",-13.870107650756836],["▁poszt",-13.870107650756836],["▁yəni",-13.8701171875],["▁Buffet",-13.870121955871582],["▁അതിനെ",-13.870122909545898],["kohti",-13.87013053894043],["▁mörgum",-13.870134353637695],["כתיב",-13.87013816833496],["ційного",-13.870141983032228],["▁strike",-13.870145797729492],["较为",-13.870155334472656],["ainm",-13.87016487121582],["▁yerleştir",-13.870171546936035],["588",-13.870179176330566],["▁Lopez",-13.8701810836792],["SNS",-13.870182037353516],["▁domum",-13.870189666748049],["▁хипер",-13.870190620422363],["▁revolucion",-13.870194435119627],["▁věcí",-13.870203018188477],["▁cefn",-13.870206832885742],["▁pienāk",-13.87021255493164],["▁පැහැ",-13.870216369628906],["▁Sahara",-13.870227813720703],["▁κυβερνητικ",-13.870238304138184],["ျခင္း။",-13.87024211883545],["▁preferencia",-13.870253562927246],["▁otoč",-13.870254516601562],["▁হয়েছেন",-13.870256423950195],["▁българите",-13.870259284973145],["▁комплексу",-13.870267868041992],["▁беларускія",-13.870274543762209],["▁conseguido",-13.870277404785156],["▁थापाले",-13.870278358459473],["▁eralda",-13.870279312133787],["platte",-13.870298385620115],["▁ଆଇନ",-13.870341300964355],["▁entier",-13.870342254638672],["▁yakhe",-13.870342254638672],["صورة",-13.87035083770752],["တူး",-13.870355606079102],["zdá",-13.870357513427734],["▁дэлхий",-13.870379447937012],["▁sveika",-13.870383262634276],["▁کاپي",-13.87039852142334],["ፓር",-13.870403289794922],["を入れて",-13.870423316955566],["ၾကည့္",-13.870436668395996],["▁tıkla",-13.870441436767578],["▁கன",-13.87045669555664],["ystyczny",-13.870468139648438],["بيا",-13.870487213134766],["▁соода",-13.870489120483398],["分為",-13.870521545410156],["80.000",-13.870532035827637],["הוצאת",-13.870532989501951],["kursus",-13.87053394317627],["▁порад",-13.87053680419922],["पुल",-13.87055492401123],["▁якою",-13.870556831359863],["▁vermeye",-13.87055778503418],["▁проводити",-13.870561599731444],["▁Sabato",-13.870562553405762],["ရာသီ",-13.870565414428713],["▁neboli",-13.87057399749756],["õhu",-13.870595932006836],["▁Akizungumza",-13.870612144470217],["▁университета",-13.870619773864746],["ക്കാർ",-13.870638847351074],["▁yanına",-13.870649337768556],["▁స్త్రీ",-13.870652198791504],["就这样",-13.870656967163086],["▁салуу",-13.870671272277832],["effectue",-13.870680809020996],["дорож",-13.87070369720459],["नियम",-13.87071704864502],["ටන්",-13.870726585388184],["▁elektronikus",-13.870759963989258],["▁макала",-13.870783805847168],["ጋራ",-13.870784759521484],["პუ",-13.8707914352417],["▁കണ്",-13.870804786682127],["ایس",-13.87082576751709],["▁منځه",-13.870843887329102],["▁diagnosti",-13.870845794677734],["LÁS",-13.87087631225586],["▁Ruben",-13.870877265930176],["國中",-13.870888710021973],["▁bağı",-13.87091827392578],["▁jučer",-13.870919227600098],["uluyor",-13.87093448638916],["各個",-13.870949745178224],["ාධිපති",-13.87095069885254],["事前",-13.870962142944336],["ğumuz",-13.870973587036133],["▁právní",-13.870976448059082],["ئال",-13.870977401733398],["热爱",-13.870980262756348],["スポット",-13.87099838256836],["▁límites",-13.871001243591309],["ច្រ",-13.871030807495115],["▁ይዘ",-13.871038436889648],["▁geologi",-13.871047973632812],["klá",-13.871060371398926],["▁елит",-13.871084213256836],["▁дефект",-13.87109375],["▁rynek",-13.871110916137695],["együnk",-13.871129989624023],["▁станица",-13.871139526367188],["▁versatil",-13.871172904968262],["5.8",-13.871195793151855],["▁sumus",-13.871200561523438],["▁հիվանդ",-13.87120246887207],["▁වුව",-13.871211051940918],["▁intihar",-13.87121295928955],["▁loše",-13.871235847473145],["▁እንዳል",-13.871260643005373],["▁jokio",-13.871275901794434],["▁1878",-13.87127685546875],["ରାଇ",-13.871315956115724],["擁",-13.871315956115724],["arðandi",-13.87131690979004],["▁aiutare",-13.871320724487305],["Народ",-13.87132167816162],["▁zodpovedn",-13.87132167816162],["▁կոմիտեի",-13.87132453918457],["ໂກ",-13.871343612670898],["がたくさん",-13.871350288391112],["▁տեսնել",-13.871357917785645],["▁повышение",-13.871370315551758],["▁ადგილობრივი",-13.871373176574709],["సులు",-13.871377944946287],["▁јавности",-13.871378898620604],["▁ביקור",-13.871383666992188],["أحداث",-13.871402740478516],["ГЛ",-13.871408462524414],["นซ์",-13.871408462524414],["▁láthat",-13.871408462524414],["▁informatika",-13.87142276763916],["▁pokreta",-13.87142562866211],["▁magistral",-13.871431350708008],["▁കരി",-13.87143611907959],["扩",-13.871465682983398],["▁알아보",-13.871477127075195],["▁දවසක",-13.871479034423828],["安徽",-13.871504783630373],["teenuse",-13.871536254882812],["▁oppdrag",-13.871548652648926],["▁මරණය",-13.871553421020508],["中国经济",-13.871559143066406],["születés",-13.871588706970217],["ECA",-13.871594429016112],["▁மார்",-13.87159538269043],["ಸೇ",-13.871644973754885],["▁osatzen",-13.8716459274292],["▁internaţional",-13.871665000915527],["бесе",-13.871736526489258],["▁Partai",-13.871745109558104],["ையா",-13.871750831604004],["тэл",-13.871773719787598],["▁обзира",-13.871773719787598],["خدمة",-13.871783256530762],["普通の",-13.871785163879396],["还需要",-13.87181282043457],["▁группе",-13.871817588806152],["▁dealbh",-13.871829986572266],["躍",-13.871833801269531],["▁называют",-13.871851921081545],["▁gydag",-13.871854782104492],["▁Zü",-13.87186336517334],["▁terza",-13.871885299682615],["そうだ",-13.871891021728516],["▁vijnë",-13.871950149536133],["ෂන්",-13.871952056884766],["▁մարմինների",-13.87195873260498],["ลดราคา",-13.871971130371094],["▁599",-13.871975898742676],["▁сећа",-13.871978759765623],["▁gooi",-13.871988296508787],["▁halos",-13.872000694274902],["▁ngã",-13.8720064163208],["zinto",-13.872010231018066],["ว่ามี",-13.872011184692385],["recept",-13.872023582458496],["▁биха",-13.872026443481444],["სვა",-13.872031211853027],["▁zamanlar",-13.872052192687988],["焦点",-13.87205410003662],["▁шешім",-13.87208080291748],["λεια",-13.87209129333496],["itteet",-13.872111320495604],["▁automobile",-13.872129440307615],["▁καταφέρ",-13.87214469909668],["JES",-13.87215805053711],["▁намалување",-13.872172355651855],["▁dæmi",-13.872177124023438],["日程",-13.872194290161133],["だし",-13.872200012207031],["▁podporuje",-13.87221622467041],["İYA",-13.872234344482422],["ગાર",-13.872236251831056],["llisiä",-13.872244834899902],["▁Universe",-13.872284889221191],["▁Ошондо",-13.872320175170898],["atçı",-13.872350692749023],["frí",-13.872364044189451],["▁moarte",-13.872365951538086],["बाज",-13.872366905212402],["▁Albania",-13.872377395629885],["អាមេរិក",-13.872392654418944],["ჯარი",-13.872417449951172],["▁climate",-13.872434616088867],["▁учебни",-13.872458457946776],["▁rizika",-13.87246036529541],["▁себептер",-13.872474670410156],["模糊",-13.872485160827637],["▁Sarri",-13.87250518798828],["▁объединен",-13.87250804901123],["រូបភាព",-13.872517585754396],["லன்",-13.87252140045166],["▁എണ്ണ",-13.872522354125977],["合肥",-13.87252426147461],["帐",-13.872530937194824],["厨房",-13.87253761291504],["轰",-13.872572898864746],["គុណ",-13.872576713562012],["gerät",-13.872577667236328],["ավայր",-13.87258243560791],["เคล็ดลับ",-13.87258243560791],["ပေါက်",-13.87258243560791],["Баттулга",-13.872583389282228],["វប្បធម៌",-13.872583389282228],["▁Alfonso",-13.872583389282228],["▁Hợp",-13.872583389282228],["▁atsakomybė",-13.872583389282228],["▁físeán",-13.872583389282228],["▁praktycznie",-13.872583389282228],["▁raccoglie",-13.872583389282228],["▁responsibility",-13.872583389282228],["▁άσκηση",-13.872583389282228],["▁αρέσει",-13.872583389282228],["▁здароўя",-13.872583389282228],["▁најмногу",-13.872583389282228],["▁Өзгөчө",-13.872583389282228],["▁זײַן",-13.872583389282228],["▁زمونږ",-13.872583389282228],["▁ठूला",-13.872583389282228],["▁વીડિયો",-13.872583389282228],["▁වෙළඳ",-13.872583389282228],["▁ብዙኃን",-13.872583389282228],["▁Vyberte",-13.872584342956545],["▁qadağan",-13.872584342956545],["▁uczestniczy",-13.872584342956545],["▁Köszönöm",-13.87258529663086],["▁melanggar",-13.87258529663086],["▁ortodox",-13.87258529663086],["▁Української",-13.87258529663086],["▁డాక్టర్",-13.87258529663086],["skrywing",-13.872586250305176],["▁Käyttö",-13.872586250305176],["▁ਕਾਰਵਾਈ",-13.872586250305176],["▁일어나",-13.872586250305176],["▁advertising",-13.872587203979492],["▁клітин",-13.872587203979492],["തൊക്കെ",-13.872588157653809],["▁मशीन",-13.872588157653809],["▁δηλώσεις",-13.872589111328123],["▁Najlepš",-13.87259006500244],["▁prijíma",-13.872591018676758],["球场",-13.872591018676758],["▁pengajian",-13.87259578704834],["▁താളിൽ",-13.872596740722656],["▁ሳይንስ",-13.872596740722656],["▁sähköpostitse",-13.872599601745604],["▁motocykl",-13.872600555419922],["▁sõlmi",-13.872601509094238],["▁ඇහුව",-13.872602462768556],["▁પ્રવાસ",-13.87260627746582],["▁Kayseri",-13.872607231140137],["猶",-13.872607231140137],["古典",-13.87260913848877],["август",-13.87261199951172],["▁Anzeige",-13.87261199951172],["▁Brot",-13.87261199951172],["▁mínútu",-13.87261199951172],["▁педагогикалық",-13.872612953186035],["▁ايجاد",-13.872613906860352],["▁குறிப்புகள்",-13.872613906860352],["▁voorkeur",-13.872617721557615],["▁dinosaur",-13.872618675231934],["▁сусід",-13.87261962890625],["审议",-13.87261962890625],["ហោ",-13.872621536254885],["▁এলাকায়",-13.872621536254885],["▁dipendenti",-13.872624397277832],["▁ਕਿਸਾਨਾਂ",-13.872624397277832],["▁대응",-13.872626304626465],["▁особе",-13.872633934020996],["Tour",-13.872636795043944],["▁imellem",-13.872636795043944],["▁المشاركات",-13.872647285461426],["청주",-13.87265968322754],["▁Privasi",-13.872660636901855],["▁ajánlott",-13.872661590576172],["▁iniciatyv",-13.872663497924805],["▁Đá",-13.87266445159912],["▁propietat",-13.87267017364502],["▁translation",-13.872675895690918],["आज",-13.8726806640625],["▁mértékben",-13.872684478759766],["▁učenika",-13.872692108154297],["िँदै",-13.872693061828612],["▁소속",-13.872699737548828],["波动",-13.872699737548828],["▁tiếc",-13.872709274291992],["▁entrambi",-13.872713088989258],["▁",-13.87271499633789],["gjin",-13.872716903686523],["▁öte",-13.872726440429688],["▁Miaka",-13.87272834777832],["▁পরীক্ষার",-13.872732162475586],["▁الممكن",-13.872735023498535],["▁жашаган",-13.872754096984863],["▁confirme",-13.872757911682127],["▁miliar",-13.872761726379396],["▁kulturális",-13.872762680053713],["▁skórę",-13.87276554107666],["▁peči",-13.872772216796877],["支払い",-13.872772216796877],["穿著",-13.872772216796877],["▁Robinson",-13.872773170471191],["▁Želim",-13.872785568237305],["viiko",-13.872796058654783],["krotnie",-13.872800827026367],["▁condizione",-13.872800827026367],["eeeee",-13.872810363769531],["▁казна",-13.872821807861328],["▁svarte",-13.87282371520996],["▁compreend",-13.872831344604492],["▁Blanca",-13.872835159301758],["▁Βασιλ",-13.872836112976074],["▁пересмотра",-13.872844696044922],["就像是",-13.87285041809082],["▁stundas",-13.872858047485352],["بيئة",-13.872859954833984],["▁പൊട്ടി",-13.87286376953125],["▁nástup",-13.872895240783691],["▁papil",-13.872896194458008],["▁بينها",-13.87290859222412],["▁aperiam",-13.872916221618652],["▁curtha",-13.872923851013184],["▁законот",-13.872949600219728],["▁preturi",-13.872960090637209],["▁lejos",-13.872969627380373],["▁bezpečn",-13.872973442077637],["▁Welke",-13.872992515563965],["▁الوجه",-13.873000144958496],["▁symptoms",-13.873004913330078],["▁محم",-13.873019218444824],["▁penjaga",-13.873022079467772],["▁وبعض",-13.873028755187988],["▁చిత్రాలు",-13.873037338256836],["▁ٿين",-13.873047828674316],["Пред",-13.87307071685791],["▁Venäjän",-13.873071670532228],["케이스",-13.873090744018556],["գիտ",-13.873093605041504],["ುವಾಗ",-13.873093605041504],["▁loistava",-13.873104095458984],["दाता",-13.87311553955078],["tivno",-13.87314224243164],["▁ответственн",-13.87314224243164],["ဆုိင္",-13.873187065124512],["▁мило",-13.87322235107422],["▁Parha",-13.873236656188965],["ruza",-13.873248100280762],["ბათ",-13.873261451721191],["hintay",-13.873266220092772],["▁тәсіл",-13.873273849487305],["▁jistě",-13.87330150604248],["に比べ",-13.873306274414062],["ABU",-13.873311042785645],["▁azóta",-13.873326301574709],["ยาย",-13.873336791992188],["▁Renk",-13.873342514038086],["ESP",-13.873345375061035],["▁sasniegt",-13.873373985290527],["▁سینمایی",-13.873393058776855],["▁educativos",-13.873404502868652],["แข่งขัน",-13.873406410217283],["▁malapit",-13.8734130859375],["▁reitera",-13.873427391052246],["▁사람의",-13.87343406677246],["wóz",-13.873435020446776],["▁promoção",-13.873446464538574],["▁norāda",-13.873453140258787],["▁mérték",-13.873472213745115],["▁technikai",-13.873475074768066],["prinder",-13.873479843139648],["▁diebus",-13.873496055603027],["▁Cümhuriyyəti",-13.87349796295166],["▁stevig",-13.873505592346191],["▁देवा",-13.87350845336914],["▁manifestación",-13.873513221740724],["จัดงาน",-13.873525619506836],["▁makini",-13.87355899810791],["▁એન્",-13.873559951782228],["▁Psycho",-13.873568534851074],["բար",-13.87357234954834],["▁megbíz",-13.873576164245604],["▁Әл",-13.873579025268556],["▁zostaje",-13.87358283996582],["فجر",-13.873592376708984],["▁distribuit",-13.873607635498049],["▁prosimy",-13.87363052368164],["▁ёсны",-13.87364387512207],["発送",-13.8736572265625],["ುದು",-13.873664855957031],["▁atlas",-13.873665809631348],["▁sonuçlar",-13.873689651489258],["▁tistih",-13.87369155883789],["૧૬",-13.873695373535156],["धिक",-13.873698234558104],["raste",-13.873699188232422],["िणी",-13.873703956604004],["▁अर्ध",-13.873735427856444],["▁ایالات",-13.873751640319824],["▁adeilad",-13.873757362365724],["▁гүйцэтгэх",-13.873761177062988],["▁వంట",-13.873761177062988],["内地",-13.873778343200684],["▁Bauer",-13.87378215789795],["▁духовни",-13.87378215789795],["▁1861",-13.873802185058594],["▁свободы",-13.873807907104492],["ರೊ",-13.87381649017334],["▁hittat",-13.873820304870604],["وارد",-13.873822212219238],["努",-13.873833656311035],["ടും",-13.873865127563477],["789",-13.873888969421388],["▁Plac",-13.873923301696776],["▁gratuït",-13.873939514160156],["ෙකුට",-13.873940467834473],["▁instituição",-13.87394905090332],["▁وقع",-13.87397003173828],["ऋ",-13.873990058898926],["ଭଳି",-13.873990058898926],["▁Iraku",-13.87399196624756],["いち",-13.873998641967772],["câ",-13.874001502990724],["କ୍ରି",-13.87401008605957],["▁seiso",-13.874011993408203],["▁фінал",-13.87402057647705],["▁συμβολ",-13.874040603637695],["שיר",-13.874045372009276],["▁creando",-13.874049186706545],["FAR",-13.874058723449709],["▁transmitir",-13.874066352844238],["▁concerné",-13.874099731445312],["▁internetowych",-13.874114990234377],["▁sladkor",-13.874131202697754],["akademi",-13.8741455078125],["▁registrira",-13.874154090881348],["pesticid",-13.874167442321776],["وسط",-13.87419319152832],["▁čudn",-13.874241828918455],["030",-13.87424373626709],["▁आयु",-13.87424373626709],["▁jubila",-13.874244689941406],["влечение",-13.874249458312988],["▁խոսք",-13.874256134033203],["gudes",-13.874274253845217],["jába",-13.874277114868164],["ుకున్నాడు",-13.874300003051758],["체인",-13.874306678771973],["▁rokok",-13.87435531616211],["▁profiel",-13.874357223510742],["▁பக்",-13.874374389648438],["▁Rusiyada",-13.874391555786133],["▁informativo",-13.874425888061523],["帶給",-13.87442684173584],["▁nút",-13.8744478225708],["льскі",-13.874467849731444],["ისას",-13.874471664428713],["把你",-13.874483108520508],["▁агентство",-13.87449073791504],["▁primus",-13.874505996704102],["รํา",-13.874507904052734],["▁Matematik",-13.874510765075684],["▁snelle",-13.87451171875],["јском",-13.874524116516112],["▁abandonar",-13.87452507019043],["▁നമുക്ക",-13.874533653259276],["ทางด้าน",-13.874537467956545],["ுக்கும்",-13.874573707580566],["config",-13.874579429626465],["▁hemat",-13.874598503112791],["ੈਂਸ",-13.874600410461426],["▁Creek",-13.874635696411133],["▁нөөц",-13.87465763092041],["▁روڈ",-13.874664306640623],["▁kreip",-13.87467098236084],["▁saamiseks",-13.874677658081056],["ფრინ",-13.874695777893066],["▁overgang",-13.8746976852417],["ДЫН",-13.874703407287598],["пур",-13.874704360961914],["ುತ್ತಿಲ್ಲ",-13.874733924865724],["▁יעל",-13.87474536895752],["offici",-13.874773979187012],["▁kajak",-13.874783515930176],["うこと",-13.87478733062744],["ນາຍ",-13.874789237976074],["▁bocca",-13.874794960021973],["學者",-13.874820709228516],["▁SAY",-13.87482452392578],["▁banaan",-13.874855995178224],["▁చదువు",-13.874876022338867],["▁kulturno",-13.874917984008787],["▁Khả",-13.874922752380373],["ရောင်း",-13.874932289123535],["▁keskmise",-13.875006675720217],["▁mraz",-13.87501049041748],["▁Cím",-13.87501621246338],["пты",-13.875044822692873],["▁посете",-13.87505054473877],["採購",-13.87506866455078],["iiii",-13.875112533569336],["▁Bildungs",-13.875158309936523],["DAI",-13.875161170959473],["سوف",-13.875162124633787],["▁Vesi",-13.875171661376951],["▁Hakimiyyəti",-13.875174522399902],["úð",-13.875221252441406],["贪",-13.87522315979004],["ília",-13.875225067138672],["ป่วย",-13.875231742858888],["▁Ferie",-13.875235557556152],["撲",-13.875251770019531],["tiradi",-13.875252723693848],["ZAL",-13.875259399414062],["▁Бүгүн",-13.87526512145996],["랄",-13.875282287597656],["ؒ",-13.875286102294922],["သူငယ္ခ်င္း",-13.875286102294922],["▁cấm",-13.875286102294922],["▁hervorragend",-13.875286102294922],["▁keturunan",-13.875286102294922],["▁mahkamah",-13.875286102294922],["▁műszaki",-13.875286102294922],["▁purtroppo",-13.875286102294922],["▁tölvupóst",-13.875286102294922],["▁verpflichtet",-13.875286102294922],["▁Заштита",-13.875286102294922],["▁Уикипедия",-13.875286102294922],["▁дзейнасць",-13.875286102294922],["▁ежедневно",-13.875286102294922],["▁сонирхолтой",-13.875286102294922],["▁Қызылорда",-13.875286102294922],["▁հունիսի",-13.875286102294922],["▁مخامخ",-13.875286102294922],["▁अक्टूबर",-13.875286102294922],["▁आतंकवाद",-13.875286102294922],["▁ንግግር",-13.875286102294922],["រដូវ",-13.875287055969238],["▁cwmni",-13.875287055969238],["▁gyfrifol",-13.875287055969238],["▁ръководство",-13.875287055969238],["▁ډاکټر",-13.875287055969238],["▁ক্ষতি",-13.875287055969238],["▁குறிப்பிட்ட",-13.875287055969238],["랩",-13.875287055969238],["▁ngừa",-13.875288963317873],["▁фінансування",-13.875288963317873],["ព្រឹត្ត",-13.875289916992188],["▁yhdistä",-13.875289916992188],["▁Emakume",-13.875290870666504],["▁pasiekti",-13.875290870666504],["▁आदिवासी",-13.87529182434082],["▁كله",-13.87529468536377],["▁नरेन्द्र",-13.87529468536377],["ေဆာက္",-13.87529754638672],["▁테스트",-13.87529754638672],["▁Geographic",-13.875299453735352],["ຖາມ",-13.875300407409668],["▁توغرا",-13.875301361083984],["▁Įmonės",-13.8753023147583],["▁Protams",-13.875303268432615],["▁논란",-13.875303268432615],["▁شیشه",-13.875304222106934],["▁رامنځته",-13.87530517578125],["▁samfunnet",-13.875307083129885],["▁поселения",-13.8753080368042],["▁ਆਤਮ",-13.8753080368042],["▁thaimassage",-13.875308990478516],["パパ",-13.875309944152832],["दम",-13.875311851501465],["▁digit",-13.87531566619873],["▁सक्षम",-13.87531852722168],["▁Implement",-13.875319480895996],["▁pohjois",-13.875321388244627],["▁علائم",-13.875323295593262],["▁utorak",-13.875324249267578],["▁լինում",-13.875327110290527],["▁giocatori",-13.875329971313477],["▁obrazovanja",-13.875330924987791],["▁ಆದೇಶ",-13.87533473968506],["▁изготовлен",-13.875344276428224],["▁جاسوس",-13.875344276428224],["▁здравља",-13.875346183776855],["▁изпрати",-13.875346183776855],["▁করলেন",-13.875347137451172],["▁eelarve",-13.875348091125488],["修复",-13.875348091125488],["▁удаагийн",-13.875354766845703],["moment",-13.875364303588867],["▁የትምህርት",-13.875370979309082],["▁содержания",-13.87538242340088],["▁løbende",-13.875401496887209],["▁beklenti",-13.875410079956056],["▁որքան",-13.875411033630373],["▁978-",-13.875412940979004],["▁Υγείας",-13.875415802001951],["▁الوزير",-13.875420570373535],["▁senarai",-13.875438690185549],["▁żar",-13.875444412231444],["▁відповіді",-13.875473022460938],["▁europski",-13.875476837158203],["парушэнн",-13.875483512878418],["கிட்ட",-13.875484466552734],["▁Galleri",-13.875484466552734],["▁namenom",-13.875484466552734],["▁säsong",-13.87550449371338],["▁kuolema",-13.87551498413086],["▁पोर्टल",-13.87551498413086],["▁sağlayacak",-13.87552547454834],["논술",-13.875555038452148],["▁gjerë",-13.875561714172363],["▁ಫ್ರ",-13.875567436218262],["▁tandem",-13.87557601928711],["▁ايف",-13.875577926635742],["▁зварот",-13.87559413909912],["▁ответчик",-13.875606536865234],["▁dã",-13.87561321258545],["▁WOW",-13.875661849975586],["▁desideri",-13.875666618347168],["▁lásky",-13.875666618347168],["▁ایام",-13.875666618347168],["▁وأنه",-13.875680923461914],["▁superiores",-13.875685691833496],["▁netew",-13.87574577331543],["▁asculta",-13.875754356384276],["រៈ",-13.875755310058594],["ləşdirmə",-13.875760078430176],["▁உங்க",-13.875778198242188],["▁keldi",-13.875792503356934],["▁нива",-13.875804901123049],["Бул",-13.875819206237791],["Міжнародн",-13.875819206237791],["▁קורה",-13.875826835632324],["▁colazione",-13.875834465026855],["ценз",-13.87584400177002],["▁Rata",-13.875852584838867],["Единствен",-13.875862121582031],["▁Nieuws",-13.875866889953612],["请求",-13.875874519348145],["sýni",-13.875892639160156],["▁מילים",-13.87590503692627],["živi",-13.875905990600586],["▁προβ",-13.875906944274902],["▁ուսանող",-13.875906944274902],["hadap",-13.875909805297852],["我一直",-13.875909805297852],["▁शकते",-13.875917434692385],["▁ባለው",-13.87592601776123],["общение",-13.875927925109863],["အသစ္",-13.875931739807127],["164",-13.875936508178713],["▁كنيد",-13.875948905944824],["▁hyödy",-13.875967025756836],["МАН",-13.875974655151367],["▁pinapa",-13.875977516174316],["▁ಜಾಗ",-13.87597942352295],["▁SAV",-13.876002311706545],["beidz",-13.87601089477539],["▁Aureli",-13.87602424621582],["impact",-13.87602710723877],["лерінің",-13.876039505004885],["ปัก",-13.876089096069336],["▁සුන්දර",-13.876100540161133],["▁terjed",-13.876111030578612],["▁tunjuk",-13.876118659973145],["▁ficha",-13.876123428344728],["なければなりません",-13.876136779785156],["▁amaiera",-13.87614631652832],["▁Стој",-13.876155853271484],["שמר",-13.876160621643066],["ผลการ",-13.876160621643066],["▁Cél",-13.8761625289917],["ရႈ",-13.876174926757812],["լով",-13.876249313354492],["453",-13.876269340515137],["unkban",-13.876272201538086],["▁piedāvājumus",-13.876302719116213],["▁objavila",-13.876307487487791],["ഭാഷ",-13.87630844116211],["▁Nü",-13.876314163208008],["ذلك",-13.87632656097412],["нулись",-13.87632942199707],["▁takvim",-13.876351356506348],["▁aktivno",-13.876355171203612],["miðstöð",-13.876359939575195],["▁grud",-13.876386642456056],["ሏል",-13.876398086547852],["▁трябвало",-13.87641716003418],["▁následne",-13.876418113708496],["▁گیس",-13.876418113708496],["ічнага",-13.876439094543455],["kirju",-13.87644100189209],["▁سماجی",-13.876445770263672],["▁Ұлы",-13.876461029052734],["Pir",-13.876466751098633],["Kompeten",-13.876486778259276],["▁britansk",-13.876486778259276],["stämme",-13.876490592956545],["ຊັ້ນ",-13.876522064208984],["▁avalik",-13.876535415649414],["▁mujh",-13.876535415649414],["▁అర్",-13.876540184020996],["▁ورودی",-13.87655258178711],["▁volon",-13.876561164855955],["▁Champ",-13.876575469970703],["▁bussen",-13.876622200012209],["▁flokë",-13.876626014709473],["കളിലും",-13.876627922058104],["▁сэнс",-13.876627922058104],["▁клубу",-13.8766450881958],["▁поднима",-13.876646995544434],["ισμένα",-13.876663208007812],["▁sirk",-13.876690864562988],["▁Svēt",-13.876691818237305],["bho",-13.876701354980469],["несете",-13.876736640930176],["spēju",-13.876742362976074],["▁rutier",-13.876749038696287],["▁Bertan",-13.876773834228516],["▁გამომ",-13.87677764892578],["▁mulki",-13.87678050994873],["▁περάσει",-13.87679958343506],["сийн",-13.876800537109377],["խա",-13.876809120178224],["റ്റോ",-13.876811027526855],["გვიან",-13.876819610595703],["▁dæmis",-13.876819610595703],["优质的",-13.876829147338867],["▁рачунар",-13.876863479614258],["pô",-13.876875877380373],["▁haberleri",-13.876903533935549],["所以在",-13.876925468444824],["ნქ",-13.876958847045898],["നൈ",-13.876968383789062],["▁lykkes",-13.876982688903809],["可能性があります",-13.877009391784668],["▁leşker",-13.87701416015625],["▁cumva",-13.877035140991213],["▁adding",-13.877036094665527],["дэнт",-13.877037048339844],["▁delete",-13.877037048339844],["ановић",-13.877052307128906],["▁Olvas",-13.877053260803224],["▁Svět",-13.877056121826172],["▁ਪਾਸ",-13.877058029174805],["▁Violet",-13.877069473266602],["▁atractivo",-13.877070426940918],["▁suivante",-13.877079963684082],["ライフ",-13.877083778381348],["ቤቱ",-13.877092361450195],["新娘",-13.877092361450195],["ىلدى",-13.877108573913574],["▁قىلىدىغان",-13.877115249633787],["▁китайски",-13.877121925354004],["▁فرس",-13.877134323120115],["▁Мет",-13.877138137817385],["άτων",-13.877145767211914],["їзду",-13.877161026000977],["▁करावा",-13.877161026000977],["iscrizione",-13.877178192138672],["▁sveitarfélag",-13.87718391418457],["▁kunnia",-13.877184867858888],["ต่อการ",-13.877189636230469],["▁breite",-13.877198219299316],["▁Igår",-13.877202987670898],["の人が",-13.877203941345217],["▁barbat",-13.877208709716797],["maatti",-13.877209663391112],["▁міндеттері",-13.877237319946287],["▁కవి",-13.87724494934082],["▁ocorrer",-13.877253532409668],["▁1893",-13.877256393432615],["соціал",-13.877259254455566],["花费",-13.8772611618042],["送信",-13.877267837524414],["стил",-13.877302169799805],["гендер",-13.87730884552002],["нинг",-13.877321243286133],["▁inchis",-13.877326965332031],["▁kulutus",-13.87735652923584],["▁رحمه",-13.877361297607422],["rillo",-13.877373695373535],["ційної",-13.87738037109375],["▁послужи",-13.877387046813965],["▁Azərbaycanlı",-13.877401351928713],["จักรยาน",-13.87740707397461],["otreb",-13.877449989318848],["मत्",-13.877520561218262],["▁skabt",-13.87752628326416],["▁suyun",-13.877533912658691],["▁අලි",-13.877535820007324],["६८",-13.87753677368164],["ብሩ",-13.87757682800293],["▁Course",-13.877593994140623],["▁Programı",-13.877593994140623],["స్క్",-13.877610206604004],["هاجم",-13.87761688232422],["這就是",-13.877630233764648],["Egy",-13.877663612365724],["▁खड",-13.877663612365724],["▁insiste",-13.877673149108888],["▁tornou",-13.877686500549316],["▁Մաս",-13.877702713012695],["▁ενημέρωση",-13.877714157104492],["ázó",-13.877715110778809],["兩種",-13.877737045288086],["▁એમાં",-13.877739906311035],["your",-13.877740859985352],["Мобил",-13.877740859985352],["▁nhị",-13.87775993347168],["▁piedra",-13.877768516540527],["ការប្រកួត",-13.877779006958008],["▁당신이",-13.877787590026855],["መቅ",-13.877790451049805],["▁narys",-13.87779426574707],["ဂီ",-13.87780475616455],["▁орієнт",-13.877836227416992],["▁compared",-13.877838134765623],["▁കാട്ട",-13.87784481048584],["vnitř",-13.877884864807127],["XH",-13.877896308898926],["NUS",-13.877906799316406],["▁tokį",-13.877937316894531],["塘",-13.877939224243164],["亩",-13.877949714660645],["▁sankcij",-13.877952575683594],["Tie",-13.877955436706545],["մեր",-13.87795639038086],["וזי",-13.877960205078123],["gusto",-13.87796688079834],["▁холбогд",-13.877975463867188],["▁отбора",-13.877981185913086],["ลิ้ง",-13.877985954284668],["撃",-13.877986907958984],["สงขลา",-13.8779935836792],["လြယ္",-13.877994537353516],["😘",-13.877994537353516],["ຂື້ນ",-13.877995491027832],["জামায়াত",-13.877996444702148],["▁Hüseyin",-13.877996444702148],["▁HỌC",-13.877996444702148],["▁sąlygos",-13.877996444702148],["▁uvjeren",-13.877996444702148],["▁væsentlig",-13.877996444702148],["▁фрагмент",-13.877996444702148],["▁կնոջ",-13.877996444702148],["▁हरियाणा",-13.877996444702148],["▁রংপুর",-13.877996444702148],["▁ପାକିସ୍ତାନ",-13.877996444702148],["▁ଲେଖି",-13.877996444702148],["▁వైరల్",-13.877996444702148],["▁අවබෝධ",-13.877996444702148],["▁შეფასება",-13.877996444702148],["▁leheküljelt",-13.877997398376465],["▁αξιολόγηση",-13.877997398376465],["▁ഒരുപാട്",-13.877997398376465],["▁heimasíðu",-13.87799835205078],["▁Αιγαίο",-13.87799835205078],["▁Weiterlesen",-13.877999305725098],["▁comenzó",-13.878000259399414],["▁polgármester",-13.87800121307373],["▁निर्मला",-13.87800121307373],["▁పోస్టు",-13.87800121307373],["▁լույս",-13.878002166748049],["▁mišljenje",-13.878003120422363],["▁csúcs",-13.87800407409668],["▁التجارة",-13.87800407409668],["▁samazināt",-13.878005027770996],["▁Nhập",-13.878005981445312],["▁среќа",-13.878005981445312],["▁Cynllun",-13.878006935119627],["▁Konstantin",-13.878006935119627],["▁καφέ",-13.878006935119627],["শক্তি",-13.878007888793944],["▁जानेवारी",-13.878010749816896],["▁BDP",-13.878012657165527],["sýslu",-13.87801456451416],["▁राष्ट्रवादी",-13.87801456451416],["▁ପଣ୍ଡା",-13.87802028656006],["▁ارزښت",-13.878021240234377],["▁Gestão",-13.878040313720703],["تاريخ",-13.878046035766602],["▁வகையில்",-13.878056526184082],["▁selvsagt",-13.878059387207031],["▁Kryetari",-13.87806510925293],["▁райондук",-13.87806510925293],["▁списку",-13.87806510925293],["▁അമിത",-13.878070831298828],["▁абсолют",-13.87807273864746],["▁ଦେବାକୁ",-13.87807273864746],["只会",-13.878073692321776],["▁sübut",-13.878076553344728],["▁vermoed",-13.87807846069336],["▁תנאים",-13.87807846069336],["▁хормон",-13.87808609008789],["▁редица",-13.878090858459473],["也就是说",-13.878093719482422],["ហ៊",-13.87809944152832],["▁المستوى",-13.878101348876951],["▁binêre",-13.87810230255127],["▁eleccions",-13.878107070922852],["▁ئاۋ",-13.878110885620115],["▁coverage",-13.878113746643066],["▁ustida",-13.878113746643066],["gearbeitet",-13.878114700317385],["▁tiềm",-13.878138542175291],["▁mataifa",-13.878145217895508],["▁השופט",-13.878148078918455],["▁vanligvis",-13.878155708312988],["▁לשלם",-13.878156661987305],["▁instalacji",-13.878159523010254],["▁pinggang",-13.878162384033203],["โดยการ",-13.878171920776367],["▁vybrat",-13.878188133239746],["▁الموجودة",-13.878193855285645],["▁senzill",-13.878213882446287],["▁زماني",-13.878214836120604],["ítva",-13.878217697143556],["چے",-13.878220558166504],["▁adequada",-13.87822151184082],["ניב",-13.878228187561035],["▁الثقافة",-13.878230094909668],["▁ढंग",-13.878240585327148],["▁Anschluss",-13.878246307373049],["▁Sitesi",-13.87827968597412],["▁Böylece",-13.878280639648438],["▁التركية",-13.87829303741455],["▁הפנים",-13.878293991088867],["▁держава",-13.878294944763184],["▁وڌي",-13.8782958984375],["ועים",-13.87829875946045],["waxa",-13.878300666809082],["измот",-13.878303527832031],["களுக்கான",-13.878314018249512],["▁gebou",-13.87833023071289],["▁sallam",-13.878337860107422],["installer",-13.8783540725708],["▁کارگاه",-13.8783597946167],["▁грађе",-13.878378868103027],["▁gedib",-13.878417015075684],["▁ಸಾಗ",-13.878419876098633],["▁bacon",-13.878429412841797],["förbund",-13.878439903259276],["▁Martí",-13.878439903259276],["▁논의",-13.878443717956545],["▁podobnie",-13.878457069396973],["▁papildu",-13.878469467163086],["▁tokios",-13.87847137451172],["▁Första",-13.87847900390625],["ଜର",-13.8784818649292],["▁Hela",-13.87849235534668],["මද",-13.87850570678711],["▁размери",-13.878517150878906],["▁ujedno",-13.87851905822754],["muzik",-13.878520011901855],["▁certame",-13.87852668762207],["▁있을까",-13.878527641296388],["ಕರಣ",-13.878531455993652],["▁законодател",-13.878538131713867],["▁Penggunaan",-13.8785400390625],["▁compromisso",-13.878555297851562],["▁الدينية",-13.87859344482422],["▁Марс",-13.878617286682127],["ଏଲ",-13.878618240356444],["▁brunette",-13.878633499145508],["发放",-13.87863826751709],["ເນື້ອ",-13.87865161895752],["spirit",-13.87865924835205],["logisch",-13.878673553466797],["▁съвети",-13.878674507141112],["МЕР",-13.878684997558594],["ődik",-13.87869644165039],["▁զար",-13.878700256347656],["▁가을",-13.878713607788086],["▁Jugendliche",-13.878725051879885],["▁Corpo",-13.87874984741211],["icità",-13.87875747680664],["▁mendalam",-13.878768920898438],["▁Basel",-13.878775596618652],["LŐ",-13.878776550292969],["▁Distan",-13.87879467010498],["▁Slovak",-13.878798484802246],["▁greš",-13.878808975219728],["izmir",-13.878816604614258],["▁gacanta",-13.8788423538208],["▁condiziona",-13.878844261169434],["▁Rekord",-13.878863334655762],["▁पोल",-13.878884315490724],["immobile",-13.878902435302734],["แทบ",-13.878929138183594],["kurangnya",-13.878946304321287],["huquqiy",-13.87895679473877],["▁telefonic",-13.87899398803711],["▁știi",-13.879000663757324],["▁лесен",-13.879003524780272],["увальник",-13.879015922546388],["tegning",-13.879055976867676],["▁facilidade",-13.879055976867676],["▁palsu",-13.879056930541992],["▁podrían",-13.879066467285156],["▁sklop",-13.879084587097168],["פשוט",-13.879103660583496],["▁അഞ്ചു",-13.879110336303713],["ታወቅ",-13.879115104675291],["баш",-13.879117965698242],["▁osnovno",-13.879121780395508],["bestand",-13.879127502441406],["▁klasické",-13.879128456115724],["▁وأم",-13.879143714904783],["▁सकिए",-13.879152297973633],["žná",-13.87916088104248],["▁միության",-13.879172325134276],["▁народного",-13.879180908203123],["▁dibanding",-13.879192352294922],["▁Drau",-13.879194259643556],["▁օդ",-13.879212379455566],["▁салым",-13.87923526763916],["▁освоји",-13.879297256469728],["自由行",-13.87929916381836],["▁escapa",-13.879302024841309],["▁لگی",-13.879327774047852],["▁كلها",-13.879334449768066],["▁Subha",-13.879369735717772],["▁اختصاصی",-13.879416465759276],["യാവ",-13.879417419433594],["джер",-13.879432678222656],["▁даярда",-13.879433631896973],["▁piešķirt",-13.879435539245604],["шылары",-13.879443168640137],["▁있으",-13.879444122314451],["▁Kalla",-13.879449844360352],["▁циклус",-13.879476547241213],["▁Қар",-13.87948226928711],["سافر",-13.879498481750488],["▁માગ",-13.87954044342041],["▁Máme",-13.879582405090332],["▁резултата",-13.879597663879396],["起き",-13.879636764526367],["milyen",-13.879656791687012],["popol",-13.87966537475586],["kötés",-13.879667282104492],["▁grut",-13.879681587219238],["▁videoklip",-13.87970733642578],["уваше",-13.879717826843262],["赛事",-13.879719734191896],["खेल",-13.879724502563477],["▁narave",-13.87972640991211],["▁Chanel",-13.879728317260742],["▁dolaze",-13.87972927093506],["▁επα",-13.879732131958008],["▁периодот",-13.87974739074707],["▁pārstāvji",-13.87975025177002],["အတိုင်း",-13.879755020141602],["▁κοινή",-13.879762649536133],["스테",-13.879762649536133],["▁hız",-13.879806518554688],["▁خريد",-13.879819869995115],["▁slank",-13.87982177734375],["rādīju",-13.879826545715332],["က်င့္",-13.87983226776123],["日まで",-13.87983512878418],["ບໍ່ໄດ້",-13.879855155944824],["လာတဲ့",-13.87986183166504],["단계",-13.87986946105957],["▁ಅಷ್ಟ",-13.879887580871582],["▁मिर",-13.87989330291748],["ต้องใช้",-13.879900932312012],["▁bebidas",-13.879901885986328],["ોનો",-13.879932403564451],["▁පිටි",-13.879952430725098],["▁איתו",-13.879960060119627],["▁locis",-13.879989624023438],["▁пазна",-13.879995346069336],["RADI",-13.880003929138184],["▁утвержда",-13.880008697509766],["▁دلچسپ",-13.880016326904297],["OSO",-13.880032539367676],["▁komision",-13.88004207611084],["▁octa",-13.880064964294434],["▁ჟურნალისტ",-13.880077362060549],["三次",-13.880081176757812],["▁naturligt",-13.880087852478027],["իկայի",-13.88009548187256],["▁nobili",-13.880098342895508],["▁theory",-13.88011646270752],["▁uitstekende",-13.88012409210205],["рство",-13.88016414642334],["ТРА",-13.880178451538086],["▁Territori",-13.880196571350098],["▁показват",-13.880196571350098],["diğinde",-13.88020133972168],["עניין",-13.880208969116213],["되었",-13.880212783813477],["筆者",-13.88025951385498],["ระเบียบ",-13.880268096923828],["▁khích",-13.880281448364258],["पुराण",-13.880319595336914],["▁etdiyini",-13.88034439086914],["توانند",-13.880390167236328],["科目",-13.880390167236328],["ທີ່ເປັນ",-13.880406379699709],["▁ஆரம்பித்த",-13.880438804626465],["▁názory",-13.880444526672363],["ምንድን",-13.880460739135742],["▁હસ",-13.880485534667969],["▁GIÁ",-13.880487442016602],["▁reconnu",-13.880511283874512],["дэмакраты",-13.880513191223145],["▁നേതൃത്വ",-13.880517959594728],["▁chtěla",-13.880523681640623],["▁неопходно",-13.880525588989258],["DNI",-13.880534172058104],["եցինք",-13.880552291870115],["▁NATUR",-13.88059902191162],["лаар",-13.880630493164062],["輔",-13.88063144683838],["▁কমিটি",-13.88063621520996],["鋁",-13.88063907623291],["ក្រោម",-13.88064670562744],["阿里巴巴",-13.88064670562744],["鬧",-13.88064670562744],["▁последно",-13.880650520324709],["▁그가",-13.88065242767334],["वाडी",-13.880659103393556],["rinha",-13.880695343017578],["ສໍາເລັດ",-13.880696296691896],["▁xüsusilə",-13.880699157714844],["利亞",-13.88071060180664],["พลาสติก",-13.880712509155272],["బాహుబలి",-13.88071346282959],["ഡ്രൈവ",-13.88071346282959],["▁Bovendien",-13.88071346282959],["▁Gemeinschaft",-13.88071346282959],["▁Tottenham",-13.88071346282959],["▁Vancouver",-13.88071346282959],["▁hvenær",-13.88071346282959],["▁odstotkov",-13.88071346282959],["▁príspevky",-13.88071346282959],["▁οποίων",-13.88071346282959],["▁Сёння",-13.88071346282959],["▁Хэрэв",-13.88071346282959],["▁истраживања",-13.88071346282959],["▁помещений",-13.88071346282959],["▁काठमाण्डौ",-13.88071346282959],["▁कान्तिपुर",-13.88071346282959],["▁স্টাইল",-13.88071346282959],["▁ਜਲੰਧਰ",-13.88071346282959],["▁અભ્યાસ",-13.88071346282959],["▁ಎಂಬುದನ್ನು",-13.88071346282959],["▁ಪರಿಚಯ",-13.88071346282959],["▁ರಕ್ತ",-13.88071346282959],["▁ሰላማዊ",-13.88071346282959],["▁커뮤니티",-13.88071346282959],["‘",-13.880714416503906],["▁Gebrauch",-13.880714416503906],["▁bērns",-13.880714416503906],["▁dimasukkan",-13.880714416503906],["▁sbaglia",-13.880714416503906],["▁taktiež",-13.880714416503906],["▁μερικές",-13.880714416503906],["▁хиљада",-13.880714416503906],["▁ўніверсітэт",-13.880714416503906],["▁ଉଇକିପିଡ଼ିଆ",-13.880714416503906],["▁ଦେଉ",-13.880714416503906],["▁அதனால்",-13.880714416503906],["▁వేదిక",-13.880714416503906],["▁කොමිසම",-13.880714416503906],["▁เข้าสู่ระบบ",-13.880714416503906],["▁anbieten",-13.880715370178224],["▁છોકરી",-13.880715370178224],["leverancier",-13.88071632385254],["▁σύνολο",-13.88071632385254],["▁төрагасы",-13.880717277526855],["▁ներքին",-13.880718231201172],["сінде",-13.880720138549805],["කාලීන",-13.880720138549805],["▁Terve",-13.880720138549805],["狗狗",-13.880720138549805],["▁निर्यात",-13.880722999572754],["▁пратэст",-13.880725860595703],["▁വെറും",-13.88072681427002],["สะสม",-13.880728721618652],["▁Zombie",-13.880731582641602],["สู",-13.880732536315918],["▁קבוע",-13.880733489990234],["▁मिळाले",-13.88073444366455],["▁дәрі",-13.880736351013184],["▁முதலில்",-13.8807373046875],["▁අමතරව",-13.880741119384766],["▁बाध्य",-13.880743980407717],["▁Would",-13.880744934082031],["▁đốt",-13.880751609802246],["▁soạn",-13.880757331848145],["ຈິດ",-13.880759239196776],["▁Postboks",-13.880763053894045],["▁Captain",-13.88076400756836],["▁영역",-13.880764961242676],["תוספת",-13.880765914916992],["▁Vlorë",-13.880769729614258],["សម័យ",-13.880770683288574],["▁удирдлага",-13.880773544311523],["▁kaŭz",-13.880784034729004],["meistri",-13.880789756774902],["▁사망",-13.880792617797852],["▁מעניין",-13.880802154541016],["▁النتائج",-13.880805015563965],["▁rezultatai",-13.88080596923828],["罗马",-13.880809783935549],["▁regna",-13.880816459655762],["athlon",-13.88082504272461],["▁برطرف",-13.880826950073242],["▁prijava",-13.880829811096191],["▁Nabad",-13.88083267211914],["▁odrasli",-13.880834579467772],["▁szkoła",-13.880850791931152],["▁صحفي",-13.880858421325684],["▁masakan",-13.880860328674316],["▁סתם",-13.880884170532228],["עסוק",-13.880888938903809],["ščini",-13.88089084625244],["▁tænkt",-13.880895614624023],["等问题",-13.880906105041504],["多樣",-13.880915641784668],["▁gdzieś",-13.880916595458984],["▁írja",-13.880918502807615],["жують",-13.88093376159668],["▁sisaldab",-13.880948066711426],["jenek",-13.880966186523438],["▁الشرقية",-13.880979537963867],["Marc",-13.880990028381348],["ाधिक",-13.88099765777588],["▁характеризу",-13.88100242614746],["ೋಪ",-13.881006240844728],["▁451",-13.881007194519045],["▁пісні",-13.881010055541992],["▁կանխ",-13.881016731262209],["▁ગરમ",-13.881044387817385],["▁юридических",-13.881056785583496],["▁त्रास",-13.881068229675291],["ESO",-13.88106918334961],["我會",-13.88106918334961],["INK",-13.881075859069824],["වීමෙන්",-13.88108730316162],["▁posljednjih",-13.881109237670898],["情報が",-13.88111972808838],["▁xâm",-13.881125450134276],["ျဖစ္သည္။",-13.881148338317873],["öffentlich",-13.881152153015137],["ສັກ",-13.88117218017578],["▁мъжете",-13.88117504119873],["มีประสิทธิภาพ",-13.881179809570312],["richting",-13.88118839263916],["များ၏",-13.881203651428224],["▁delegado",-13.881220817565918],["到位",-13.881233215332031],["자들은",-13.881244659423828],["▁Aboneaz",-13.88124942779541],["▁spalio",-13.881251335144045],["▁výške",-13.881280899047852],["▁silenci",-13.88129425048828],["Uradni",-13.881321907043455],["▁elevato",-13.88132667541504],["xeta",-13.881336212158203],["▁долга",-13.88135051727295],["▁연기",-13.881353378295898],["▁hilbijartin",-13.881362915039062],["▁Теги",-13.88136386871338],["▁площі",-13.88138198852539],["过于",-13.881383895874023],["▁prosim",-13.881394386291504],["▁krátko",-13.881412506103516],["▁ισο",-13.881417274475098],["ılmaktadır",-13.881418228149414],["эхэд",-13.881418228149414],["▁peter",-13.881429672241213],["▁дето",-13.881444931030272],["▁Luxus",-13.881468772888184],["▁ତୁମ",-13.8814697265625],["▁Verona",-13.881470680236816],["▁gerak",-13.881471633911133],["▁Faust",-13.881473541259766],["▁aplikace",-13.881475448608398],["▁earum",-13.881502151489258],["ینہ",-13.881512641906738],["sbehandling",-13.881519317626951],["高于",-13.881519317626951],["▁czasach",-13.88152027130127],["▁dobrih",-13.881549835205078],["▁улогу",-13.881555557250977],["▁nyhets",-13.88157081604004],["ገት",-13.881590843200684],["vkou",-13.881593704223633],["▁ಅಸ",-13.881625175476074],["▁borç",-13.881657600402832],["List",-13.881667137145996],["▁Logistik",-13.881677627563477],["ଚ୍ଚ",-13.88168716430664],["▁pesmi",-13.881693840026855],["نیوز",-13.88170051574707],["▁любові",-13.881705284118652],["▁ответа",-13.881706237792969],["ဆုံ",-13.8817138671875],["▁Dapatkan",-13.88171672821045],["▁Kalb",-13.881721496582031],["▁nganti",-13.881726264953612],["▁aldım",-13.88174057006836],["▁Пошто",-13.881741523742676],["▁физик",-13.881741523742676],["你看",-13.88175106048584],["▁tarefas",-13.881786346435549],["▁Brom",-13.881810188293455],["工会",-13.881816864013672],["▁luovut",-13.88181972503662],["▁oftest",-13.881821632385254],["▁הינה",-13.881824493408203],["▁fóru",-13.881828308105469],["ในชีวิต",-13.881829261779783],["▁fournir",-13.881848335266112],["blanc",-13.88185977935791],["▁заявіў",-13.88187026977539],["▁supón",-13.881940841674805],["ครอง",-13.881967544555664],["ђена",-13.881970405578612],["олгон",-13.88197135925293],["▁счета",-13.881972312927246],["hessen",-13.881983757019045],["▁xətti",-13.881987571716309],["▁koži",-13.882001876831056],["▁comprado",-13.882003784179688],["▁газарт",-13.882006645202637],["ліць",-13.882010459899902],["▁veebilehel",-13.88201141357422],["▁buôn",-13.882019996643066],["▁Першы",-13.88203239440918],["出した",-13.882037162780762],["kvöld",-13.882038116455078],["ghal",-13.88204574584961],["рения",-13.88206386566162],["حساس",-13.882094383239746],["▁skrifað",-13.882097244262695],["▁Сабор",-13.88210105895996],["▁серйозн",-13.882107734680176],["清新",-13.882109642028809],["णार्",-13.882121086120604],["▁வீட்டு",-13.882124900817873],["ડું",-13.882143020629885],["▁Англия",-13.882159233093262],["가족",-13.882179260253906],["强制",-13.882216453552246],["јско",-13.882221221923828],["▁DKK",-13.882238388061523],["ótica",-13.882245063781738],["▁мовах",-13.8822603225708],["▁biryara",-13.882281303405762],["▁አልነበረ",-13.88230037689209],["▁специални",-13.882304191589355],["tuksella",-13.882306098937988],["क्ता",-13.882329940795898],["່ອນ",-13.882333755493164],["▁muset",-13.882341384887695],["▁vegye",-13.882352828979492],["sommer",-13.882360458374023],["gångar",-13.88236141204834],["▁toward",-13.882364273071287],["▁trovo",-13.882368087768556],["▁كتبت",-13.88237476348877],["ກົງ",-13.882394790649414],["▁zakoni",-13.882438659667969],["▁фигура",-13.882447242736816],["▁પહોંચી",-13.882463455200195],["גיל",-13.882475852966309],["מיום",-13.882475852966309],["లేరు",-13.882497787475586],["▁ලැබී",-13.882501602172852],["ਪੁਰਾ",-13.882506370544434],["န္႕",-13.882511138916016],["▁dhul",-13.882511138916016],["▁받았",-13.88253688812256],["▁അച്ഛന്",-13.882537841796877],["▁العز",-13.882546424865724],["を通して",-13.882548332214355],["ଃ",-13.88255500793457],["irsiniz",-13.882558822631836],["ξω",-13.882561683654783],["▁простора",-13.882574081420898],["lanmıştır",-13.882585525512695],["▁жуу",-13.882585525512695],["значається",-13.882587432861328],["▁cadw",-13.882587432861328],["පිටිය",-13.88259983062744],["dømt",-13.882603645324709],["▁savunma",-13.882610321044922],["▁სცენ",-13.882615089416504],["avtal",-13.882636070251465],["▁лёс",-13.88263702392578],["▁(50)",-13.882650375366213],["▁hotelov",-13.88265895843506],["▁Που",-13.882667541503906],["▁feidhm",-13.882678985595703],["中部",-13.8826904296875],["お問い合わせください",-13.88270378112793],["▁лицом",-13.88271141052246],["▁казак",-13.882742881774902],["▁കയ്യ",-13.882750511169434],["▁காம",-13.882752418518066],["▁ویزا",-13.88276481628418],["बेर",-13.882772445678713],["▁ጦር",-13.882776260375977],["抱着",-13.882776260375977],["▁მთელ",-13.88280963897705],["Pero",-13.882810592651367],["▁lichid",-13.882816314697266],["▁βαθμό",-13.88282299041748],["208",-13.882852554321287],["▁ზღვა",-13.882879257202148],["2.6",-13.882885932922363],["▁bestilt",-13.882894515991213],["kladá",-13.88291835784912],["kalni",-13.882930755615234],["Ням",-13.882936477661133],["▁kifejezés",-13.882939338684082],["▁krevet",-13.882959365844728],["CRA",-13.88300323486328],["▁bekrefte",-13.88300609588623],["เห็นว่า",-13.883008003234863],["▁നീണ്ട",-13.883014678955078],["▁Знам",-13.883026123046877],["▁telinga",-13.883034706115724],["▁Alexandru",-13.883068084716797],["▁оборони",-13.883074760437012],["▁calça",-13.883075714111328],["pót",-13.883084297180176],["▁стручни",-13.883084297180176],["▁dažādas",-13.883085250854492],["▁molest",-13.883085250854492],["▁renka",-13.883087158203123],["▁Slobo",-13.88310718536377],["vægt",-13.883115768432615],["▁Recon",-13.883115768432615],["▁първа",-13.883131980895996],["▁galw",-13.883138656616213],["▁патрэб",-13.883138656616213],["▁സീറ്റ",-13.88315200805664],["▁Kick",-13.883171081542969],["feminism",-13.8831787109375],["чувати",-13.88318157196045],["▁Taas",-13.883186340332031],["վոր",-13.88320255279541],["▁svemu",-13.883240699768066],["▁తిన",-13.883244514465332],["ት፡፡",-13.883249282836914],["谱",-13.883256912231444],["▁sram",-13.883257865905762],["▁Hänen",-13.883259773254396],["нскім",-13.883261680603027],["▁jāsa",-13.883262634277344],["filo",-13.883283615112305],["పడుతున్న",-13.88328456878662],["▁szakember",-13.883298873901367],["ա՞",-13.88330078125],["▁الروسي",-13.88330078125],["ılmasına",-13.883328437805176],["▁রাতে",-13.88333511352539],["審核",-13.883359909057615],["▁25.000",-13.883368492126465],["ላላ",-13.88336944580078],["懒",-13.883379936218262],["▁दिव्य",-13.883382797241213],["▁құрал",-13.883384704589844],["ภิ",-13.88338565826416],["speel",-13.88339138031006],["▁Голем",-13.883397102355955],["▁różni",-13.883410453796388],["华盛顿",-13.883415222167969],["έφερε",-13.883418083190918],["耍",-13.883418083190918],["济南",-13.883420944213867],["▁потребности",-13.883421897888184],["溫馨",-13.883424758911133],["넓",-13.88343620300293],["▁оприлюдн",-13.883437156677246],["ល្បី",-13.883438110351562],["▁انفجار",-13.883438110351562],["▁नियंत्रण",-13.883438110351562],["پنهنجن",-13.88343906402588],["സംരക്ഷണ",-13.88343906402588],["▁Giorgio",-13.88343906402588],["▁Qüvvələri",-13.88343906402588],["▁Trzeba",-13.88343906402588],["▁beszámoló",-13.88343906402588],["▁birželio",-13.88343906402588],["▁ciężki",-13.88343906402588],["▁comerciais",-13.88343906402588],["▁destnîşan",-13.88343906402588],["▁kogoś",-13.88343906402588],["▁kuboresha",-13.88343906402588],["▁müğənni",-13.88343906402588],["▁vücut",-13.88343906402588],["▁ΠΑΟΚ",-13.88343906402588],["▁відновлення",-13.88343906402588],["▁някакъв",-13.88343906402588],["▁подоцна",-13.88343906402588],["▁приблизно",-13.88343906402588],["▁сақтандыру",-13.88343906402588],["▁съжаление",-13.88343906402588],["▁удоволствие",-13.88343906402588],["▁Աշոտ",-13.88343906402588],["▁ئېنىق",-13.88343906402588],["▁فرودگاه",-13.88343906402588],["▁ورسره",-13.88343906402588],["▁कार्रवाई",-13.88343906402588],["▁বিনামূল্যে",-13.88343906402588],["▁சர்வதேச",-13.88343906402588],["▁ಗೂಗಲ್",-13.88343906402588],["▁အေမရိကန္",-13.88343906402588],["▁훨씬",-13.88343906402588],["▁Aldundia",-13.883440017700195],["▁ուրախ",-13.883440017700195],["▁befolyásol",-13.883440971374512],["▁Дмитрий",-13.883440971374512],["▁अतिशय",-13.883440971374512],["▁మ్యాచ్",-13.883440971374512],["▁δέρμα",-13.883442878723145],["▁строителство",-13.883442878723145],["▁وردپرس",-13.88344383239746],["▁Миколаїв",-13.883444786071776],["ضت",-13.883445739746094],["▁رہائش",-13.883445739746094],["▁alegría",-13.883448600769045],["▁nettleser",-13.883448600769045],["▁specjalny",-13.883448600769045],["▁vaizdo",-13.88344955444336],["▁fysiske",-13.883450508117676],["▁jadwal",-13.883452415466309],["▁२०७३",-13.883456230163574],["▁சொந்த",-13.883459091186523],["▁vakillari",-13.883460998535156],["▁Oktoba",-13.883462905883787],["咳",-13.883463859558104],["▁ବିଜୁ",-13.883472442626951],["▁mainstream",-13.883474349975586],["▁літак",-13.88347625732422],["▁welchen",-13.883478164672852],["▁رسیدگی",-13.88348388671875],["▁енергії",-13.883484840393066],["▁Qas",-13.8834867477417],["▁godkendt",-13.883487701416016],["▁තෙල්",-13.883489608764648],["▁doping",-13.883501052856444],["▁актілер",-13.883506774902344],["▁Још",-13.88352108001709],["▁Kembali",-13.883524894714355],["▁നിലയില്",-13.883525848388672],["▁companiilor",-13.883540153503418],["▁ਮਾਰਚ",-13.883546829223633],["▁الفساد",-13.883551597595217],["▁ubicación",-13.883567810058594],["▁utilisateurs",-13.883575439453123],["נקודת",-13.883584976196287],["▁Patricia",-13.883591651916504],["▁എന്നതാണ്",-13.88359546661377],["▁להשיג",-13.88361358642578],["▁izazov",-13.883614540100098],["▁लोगो",-13.883614540100098],["▁işıq",-13.88364028930664],["▁शेवट",-13.88365077972412],["▁සේනා",-13.88366985321045],["▁izračun",-13.883686065673828],["▁agencia",-13.883689880371094],["▁येतो",-13.88371467590332],["▁pirtûk",-13.883727073669434],["▁сайжруулах",-13.883736610412598],["▁прогул",-13.883739471435549],["▁usuaris",-13.883749008178713],["▁faktu",-13.883760452270508],["▁காரணமாக",-13.883769035339355],["▁Comisia",-13.883792877197266],["▁साँझ",-13.88381004333496],["▁широм",-13.883819580078123],["▁ulët",-13.88382625579834],["▁Промени",-13.883828163146973],["ائك",-13.883832931518556],["▁dużych",-13.88383674621582],["kiwemo",-13.883846282958984],["▁मिलता",-13.883848190307615],["▁Hiri",-13.8838529586792],["▁пеша",-13.883855819702148],["▁druhá",-13.883862495422363],["erija",-13.883864402770996],["▁ღამე",-13.883871078491213],["ವಾಯಿತು",-13.883893013000488],["קוו",-13.88389492034912],["▁mecanismos",-13.883898735046388],["ặng",-13.883906364440918],["▁የጠ",-13.883909225463867],["▁meenu",-13.883910179138184],["ബൂ",-13.883931159973145],["▁Individual",-13.883936882019045],["▁Vendi",-13.883951187133787],["▁jujur",-13.883955001831056],["▁familii",-13.883957862854004],["▁comezar",-13.883974075317385],["▁żyw",-13.883976936340332],["的时代",-13.884017944335938],["Matt",-13.884021759033203],["▁თვე",-13.88402271270752],["▁отличи",-13.884023666381836],["pakkumis",-13.884035110473633],["▁Образ",-13.884035110473633],["▁świec",-13.884047508239746],["МР",-13.884048461914062],["ណ៍",-13.884048461914062],["▁mapenzi",-13.884082794189451],["ekspert",-13.88410186767578],["Martin",-13.88410472869873],["stavila",-13.88413143157959],["HEL",-13.884160995483398],["продукт",-13.884162902832031],["uổi",-13.884174346923828],["DDD",-13.88417911529541],["▁ದೂರು",-13.884187698364258],["▁hatırlat",-13.884194374084473],["▁Могу",-13.884206771850586],["▁КОР",-13.884231567382812],["▁calculator",-13.884237289428713],["▁numerose",-13.884239196777344],["▁lidah",-13.884248733520508],["등급",-13.884270668029783],["שיפור",-13.88428783416748],["▁හෙළි",-13.884288787841797],["▁Candi",-13.884312629699709],["▁अत",-13.884313583374023],["▁ויו",-13.884342193603516],["一個月",-13.884353637695312],["▁Sabab",-13.884361267089844],["γραμμα",-13.884368896484377],["ನಾಗಿ",-13.88437557220459],["▁должности",-13.884401321411133],["▁товарищ",-13.884403228759766],["的價格",-13.884404182434082],["▁Elə",-13.884408950805664],["▁vaheta",-13.884410858154297],["▁senatu",-13.884428977966309],["மையான",-13.884432792663574],["▁tå",-13.884432792663574],["已经是",-13.88443660736084],["σιμη",-13.884437561035156],["svärd",-13.884441375732422],["аттык",-13.884449005126951],["が出る",-13.884449005126951],["▁péri",-13.884469032287598],["▁Antti",-13.884479522705078],["▁медии",-13.884491920471191],["▁comerciales",-13.88450813293457],["tellaan",-13.884531021118164],["▁மறை",-13.88453483581543],["▁доц",-13.884538650512695],["注意力",-13.884539604187012],["▁Նյու",-13.88454246520996],["▁Foar",-13.884549140930176],["▁ناپ",-13.884552001953123],["▁сахар",-13.884566307067873],["ИРА",-13.88457489013672],["▁mintea",-13.884577751159668],["▁Ню",-13.884577751159668],["▁dramatic",-13.884598731994627],["neuvo",-13.88462257385254],["▁Intra",-13.88464069366455],["fragt",-13.8846435546875],["擬",-13.884674072265623],["▁недель",-13.88467502593994],["وشی",-13.884682655334473],["▁koren",-13.884683609008787],["▁uzata",-13.884692192077637],["▁galeria",-13.884695053100586],["▁великог",-13.884710311889648],["רמי",-13.884733200073242],["니스",-13.884737968444824],["▁വാങ്ങി",-13.88473892211914],["▁կտա",-13.884740829467772],["▁публично",-13.884743690490724],["ಸೋ",-13.884751319885254],["vajanje",-13.884754180908203],["०३",-13.884872436523438],["দল",-13.88488483428955],["▁ĉiujn",-13.88489055633545],["▁የጎ",-13.88489818572998],["▁Petrov",-13.884900093078612],["▁Essay",-13.884902954101562],["▁Budapesten",-13.884906768798828],["仍是",-13.884906768798828],["▁baseado",-13.884928703308104],["▁vnútorn",-13.884936332702637],["▁turgan",-13.884958267211914],["▁бодол",-13.884971618652344],["▁kiirus",-13.884974479675291],["োপ",-13.88498592376709],["▁Преглед",-13.884995460510254],["▁vaatama",-13.884998321533203],["▁12,5",-13.885008811950684],["▁ചെന്ന",-13.885017395019531],["▁fortsetter",-13.885055541992188],["▁əvvəlki",-13.885069847106934],["άτε",-13.885077476501465],["בורג",-13.885077476501465],["cuje",-13.885087013244627],["▁vyššie",-13.885088920593262],["reklam",-13.885107040405272],["UKO",-13.88510799407959],["ของพวกเขา",-13.88515567779541],["▁ഇന്ന",-13.885211944580078],["ثبات",-13.885224342346191],["시의",-13.885224342346191],["修復",-13.885233879089355],["UNTA",-13.88524055480957],["હિત",-13.88524341583252],["▁nagrada",-13.885265350341797],["▁obiteljsk",-13.88528060913086],["▁equivalente",-13.885282516479492],["▁kohaliku",-13.8853120803833],["내용",-13.885321617126465],["терорист",-13.885355949401855],["ው።",-13.885355949401855],["▁televisie",-13.885383605957031],["▁trevligt",-13.885393142700195],["▁gerektiği",-13.885397911071776],["▁vrtić",-13.885400772094728],["▁activități",-13.885438919067385],["队长",-13.885442733764648],["дух",-13.885454177856444],["▁بازیکنان",-13.885467529296877],["▁Iată",-13.88547706604004],["することも",-13.88548469543457],["୧୨",-13.885520935058594],["449",-13.885546684265137],["▁vindeca",-13.885547637939451],["▁холбооны",-13.88554859161377],["തെല്ലാം",-13.885563850402832],["▁Rubrik",-13.88556671142578],["byś",-13.885581016540527],["ಭಾಷ",-13.88559341430664],["क्षु",-13.88560962677002],["лагын",-13.885612487792969],["ിക്ക്",-13.88561725616455],["▁gestiona",-13.88565444946289],["▁Begleit",-13.885672569274902],["▁Nawaz",-13.885672569274902],["magnet",-13.885697364807127],["ებოდნენ",-13.885705947875977],["▁addas",-13.885717391967772],["նջ",-13.885740280151367],["▁іншим",-13.885741233825684],["もらい",-13.885751724243164],["▁явуул",-13.885759353637695],["ફ્ર",-13.885765075683594],["▁비해",-13.885794639587402],["свобод",-13.88579559326172],["άτα",-13.885796546936035],["▁hulka",-13.8858060836792],["561",-13.885823249816896],["ivité",-13.885831832885742],["็กซ์",-13.885833740234377],["ķa",-13.885860443115234],["更快",-13.885893821716309],["当たり前",-13.885910987854004],["▁ідзе",-13.88592529296875],["▁debatten",-13.885931015014648],["▁museu",-13.885940551757812],["പരമായ",-13.88595485687256],["▁أين",-13.886011123657228],["▁ποσό",-13.88602066040039],["ేమో",-13.886022567749023],["▁настаняване",-13.886032104492188],["▁Куба",-13.886054039001465],["▁પુત્ર",-13.886056900024414],["▁약간",-13.886061668395996],["▁დადგ",-13.886063575744627],["▁pindah",-13.886068344116213],["▁Ось",-13.886069297790527],["一千",-13.886069297790527],["漠",-13.88608741760254],["▁básicas",-13.886091232299805],["▁تفر",-13.88609790802002],["反馈",-13.886098861694336],["聪明",-13.886104583740234],["▁страха",-13.886107444763184],["更重要的是",-13.8861083984375],["▁mərkəz",-13.88612174987793],["为你",-13.886130332946776],["賜",-13.886136054992676],["equilibri",-13.88616180419922],["慶祝",-13.88616180419922],["รบกวน",-13.88616943359375],["",-13.886170387268066],["ഢ",-13.886170387268066],["႒",-13.886170387268066],["ፏ",-13.886170387268066],["▁Gestaltung",-13.886170387268066],["▁Notícies",-13.886170387268066],["▁trấn",-13.886170387268066],["▁αναζήτηση",-13.886170387268066],["▁властивості",-13.886170387268066],["▁поддержку",-13.886170387268066],["▁رپوټ",-13.886170387268066],["▁अंतर्गत",-13.886170387268066],["▁గౌరవ",-13.886170387268066],["▁బ్రేకింగ్",-13.886170387268066],["▁స్టైల్",-13.886170387268066],["▁ആത്മഹത്യ",-13.886170387268066],["▁කොච්චර",-13.886170387268066],["쌍",-13.886170387268066],["небудзь",-13.886171340942385],["▁Wystarczy",-13.886171340942385],["▁anstataŭ",-13.886171340942385],["▁kvazaŭ",-13.886171340942385],["▁zwembad",-13.886171340942385],["▁омогући",-13.886171340942385],["▁ներգրավ",-13.886171340942385],["▁تصادف",-13.886171340942385],["▁روایات",-13.886171340942385],["▁দায়িত্ব",-13.886171340942385],["▁দীর্ঘ",-13.886171340942385],["▁පිස්සු",-13.886171340942385],["▁กรกฎาคม",-13.886171340942385],["勤務",-13.886171340942385],["▁gyntaf",-13.8861722946167],["▁предупреди",-13.8861722946167],["▁جارہا",-13.8861722946167],["▁زیارت",-13.8861722946167],["▁قهوه",-13.8861722946167],["▁हस्ताक्षर",-13.8861722946167],["വർഗ്ഗ",-13.886173248291016],["▁نافذ",-13.886173248291016],["▁టీఆర్ఎస్",-13.886173248291016],["▁አይችልም",-13.886173248291016],["▁yogurt",-13.886174201965332],["▁ሚዲያ",-13.886174201965332],["เดิมพัน",-13.886175155639648],["▁सलाह",-13.886175155639648],["▁প্রশাসন",-13.886176109313965],["▁vyskúša",-13.88617706298828],["▁აკეთებ",-13.88617706298828],["▁Մայր",-13.886178016662598],["▁رفاه",-13.886178016662598],["▁виникнення",-13.886178970336914],["▁გაერთიანებ",-13.886180877685549],["▁ezaugarri",-13.886181831359863],["▁παραμένει",-13.886181831359863],["▁зберігання",-13.886181831359863],["▁атрымала",-13.886183738708496],["▁огромни",-13.886184692382812],["▁doirasida",-13.886186599731444],["▁высокі",-13.886190414428713],["▁acelaşi",-13.886192321777344],["▁पर्याय",-13.886194229125977],["▁qanunsuz",-13.886195182800291],["beharrezkoa",-13.88619613647461],["▁հանրային",-13.886197090148926],["▁Hardware",-13.886199951171877],["▁σημαντικά",-13.886199951171877],["▁ବାବୁ",-13.886199951171877],["▁ലോഡ്",-13.886199951171877],["▁עוסק",-13.886204719543455],["頃から",-13.886205673217772],["▁Oliveira",-13.886208534240724],["▁henkilöstö",-13.886223793029783],["▁студэнтаў",-13.886231422424316],["▁iuxta",-13.886237144470217],["▁мировой",-13.886242866516112],["дадзена",-13.88624382019043],["▁назвать",-13.88624668121338],["▁mengalir",-13.886249542236328],["▁perpetua",-13.88625717163086],["▁renkli",-13.886260986328123],["▁изложени",-13.886269569396973],["Беларус",-13.886275291442873],["▁applicazioni",-13.88627815246582],["דני",-13.886282920837402],["software",-13.886290550231934],["▁шкіл",-13.8862943649292],["▁sporočilo",-13.886298179626465],["eerib",-13.88630199432373],["▁باشم",-13.886303901672363],["▁patung",-13.886322975158691],["▁Ikea",-13.88632869720459],["▁навигаци",-13.886334419250488],["▁reader",-13.886347770690918],["▁Szkole",-13.886351585388184],["▁dubbio",-13.886364936828612],["▁укупно",-13.886388778686523],["ការពារ",-13.886392593383787],["φάλ",-13.886397361755373],["▁אלעס",-13.88641357421875],["▁Jeszcze",-13.886414527893066],["▁kontrollere",-13.886427879333496],["▁ಚಿತ್ರಗಳು",-13.886444091796877],["▁კახ",-13.886445999145508],["▁Xoán",-13.886448860168455],["▁सोही",-13.886467933654783],["湖北",-13.886469841003418],["▁artifici",-13.886472702026367],["▁Falk",-13.886484146118164],["▁tətbiqi",-13.88648796081543],["▁ברמה",-13.886492729187012],["▁układu",-13.886496543884276],["▁منحصر",-13.886497497558594],["▁баланың",-13.88650131225586],["▁اميد",-13.886513710021973],["వైపు",-13.88652515411377],["▁greke",-13.88652515411377],["▁diskutere",-13.886539459228516],["wens",-13.886542320251465],["▁לידי",-13.886547088623049],["කරණ",-13.88654899597168],["fluent",-13.886560440063477],["▁الجوية",-13.886574745178224],["▁മതം",-13.886577606201172],["記事を",-13.886581420898438],["▁ističe",-13.886588096618652],["▁көшесі",-13.886590957641602],["▁Crne",-13.88659381866455],["θου",-13.886609077453612],["▁Primaria",-13.886626243591309],["мыл",-13.886638641357422],["್ರೆ",-13.886646270751951],["▁solucion",-13.886651992797852],["▁Кредит",-13.886659622192385],["MEI",-13.886662483215332],["これら",-13.88670539855957],["कुठ",-13.886712074279783],["▁15:30",-13.886735916137695],["▁ricco",-13.886736869812012],["▁Brasileira",-13.886754989624023],["stätte",-13.886761665344238],["▁VPorn",-13.886762619018556],["▁дайындық",-13.8867769241333],["日記",-13.886795997619627],["ជាមួយនឹង",-13.88680934906006],["▁చేతుల",-13.88681411743164],["▁എടുത്തു",-13.886836051940918],["▁വിട്ട",-13.88685703277588],["▁humbje",-13.886863708496094],["wyłącz",-13.886890411376951],["一つの",-13.886903762817385],["性質",-13.886903762817385],["▁каждому",-13.886910438537598],["ക്യ",-13.886956214904783],["▁dicha",-13.886957168579102],["ସ୍ଥିତ",-13.88698673248291],["用來",-13.887011528015137],["▁Zahid",-13.887027740478516],["ដេ",-13.887059211730955],["काश",-13.88706111907959],["▁Officer",-13.88707160949707],["uudella",-13.88707447052002],["إبداع",-13.887094497680664],["▁BBM",-13.88711166381836],["▁õpeta",-13.887113571166992],["መሳ",-13.88711643218994],["ਚਰ",-13.88711929321289],["крытие",-13.887121200561523],["ుల్లో",-13.88713836669922],["有一种",-13.887155532836914],["дици",-13.887157440185549],["▁тысячи",-13.887157440185549],["thile",-13.887174606323242],["pentru",-13.887184143066406],["ოვან",-13.887200355529783],["看來",-13.887212753295898],["▁berilgan",-13.887224197387695],["▁dibayar",-13.887226104736328],["ベスト",-13.887246131896973],["נגר",-13.887252807617188],["▁Schwa",-13.8872652053833],["نضمام",-13.887276649475098],["▁Lucah",-13.887276649475098],["ীয়া",-13.887292861938477],["▁بیدار",-13.88730812072754],["torna",-13.887310028076172],["▁uporabnišk",-13.887321472167969],["▁23:00",-13.887324333190918],["кок",-13.887347221374512],["▁kualifik",-13.887367248535156],["terus",-13.887380599975586],["▁мења",-13.88738250732422],["ንበት",-13.887398719787598],["▁ოთხ",-13.887406349182127],["แปร",-13.887429237365724],["▁Griff",-13.88743019104004],["▁Convention",-13.887499809265137],["קומה",-13.88750457763672],["時から",-13.887506484985352],["▁mədəni",-13.887520790100098],["▁σημαντική",-13.887523651123049],["わかって",-13.887529373168944],["▁akhirat",-13.887535095214844],["发言人",-13.88753604888916],["▁kurām",-13.887543678283691],["功效",-13.88755989074707],["មួយចំនួន",-13.887568473815918],["▁aeroporto",-13.887585639953612],["▁Genera",-13.88760757446289],["informació",-13.887618064880373],["การตลาด",-13.88762664794922],["▁Qaar",-13.887629508972168],["ונן",-13.887645721435549],["▁solare",-13.887653350830078],["और",-13.88766384124756],["ілуі",-13.887673377990724],["▁nødvendigt",-13.887721061706545],["▁chô",-13.887737274169922],["നാഥ",-13.887772560119627],["▁որում",-13.887787818908691],["▁korporat",-13.887797355651855],["貼心",-13.887824058532717],["▁кыйын",-13.887826919555664],["1900",-13.887828826904297],["▁западни",-13.88783359527588],["▁Sancto",-13.887847900390623],["▁peuple",-13.887847900390623],["ÁNY",-13.887866973876951],["▁manifestation",-13.887871742248535],["▁Camar",-13.887884140014648],["▁tränar",-13.88792896270752],["▁Susu",-13.887941360473633],["▁megnéz",-13.887946128845217],["प्रकाश",-13.887965202331545],["▁koox",-13.887969017028809],["ೋದು",-13.887991905212402],["▁vepra",-13.8879976272583],["▁ಕಾಡ",-13.888021469116213],["▁Scot",-13.888023376464844],["▁pruži",-13.888031005859377],["Pak",-13.888039588928224],["▁написать",-13.888059616088867],["▁తీసుకొ",-13.888079643249512],["પ્ત",-13.888084411621094],["得出",-13.888087272644045],["▁ಕಾಮ",-13.888096809387209],["klubi",-13.888100624084473],["▁javít",-13.88810920715332],["▁faigh",-13.888115882873535],["▁Klienta",-13.8881196975708],["▁Aloe",-13.888127326965332],["さない",-13.888128280639648],["▁Zadar",-13.888131141662598],["▁dirigida",-13.88815212249756],["▁шер",-13.888161659240724],["드로",-13.888187408447266],["▁अट",-13.888200759887695],["▁животно",-13.888205528259276],["391",-13.88820743560791],["自分は",-13.88820743560791],["▁tədbirləri",-13.888213157653809],["Алтан",-13.888263702392578],["▁Ρο",-13.888325691223145],["▁беларуска",-13.888327598571776],["▁commentaire",-13.88832950592041],["高手",-13.888335227966309],["▁idején",-13.888336181640623],["▁השכר",-13.888338088989258],["सेवक",-13.888339042663574],["▁Avem",-13.888361930847168],["▁സെന്",-13.888368606567385],["▁dhawaa",-13.888384819030762],["▁sheet",-13.888395309448242],["▁фудбалер",-13.888400077819824],["අපි",-13.88840103149414],["▁tittar",-13.888407707214355],["▁Gray",-13.888419151306152],["▁praktiske",-13.888442993164062],["典型",-13.88845920562744],["▁Huo",-13.888463973999023],["▁استرس",-13.888479232788086],["이라며",-13.888486862182615],["▁síður",-13.888489723205566],["▁athugasemd",-13.888511657714844],["▁anunciado",-13.888543128967283],["▁Analiz",-13.888544082641602],["▁додаде",-13.888558387756348],["gën",-13.888562202453612],["ໂປ",-13.88857650756836],["годишно",-13.888583183288574],["▁Dinam",-13.888585090637209],["▁amarga",-13.888587951660156],["ksista",-13.888595581054688],["▁Definition",-13.888607025146484],["▁хуралд",-13.888630867004396],["▁кредити",-13.88863468170166],["▁جمہوری",-13.888714790344238],["▁Ceann",-13.888721466064451],["▁rezultatele",-13.888723373413086],["luniau",-13.888737678527832],["sætter",-13.888751029968262],["ുകൊണ്ട്",-13.888766288757324],["▁دیتی",-13.888775825500488],["暂",-13.888781547546388],["中国科学院",-13.888798713684082],["ให้เห็น",-13.888809204101562],["pensi",-13.888824462890623],["דרכה",-13.88882827758789],["繁荣",-13.888835906982422],["▁маси",-13.888839721679688],["ZIE",-13.888845443725586],["mailadresse",-13.888860702514648],["挣",-13.888867378234863],["厳",-13.888879776000977],["▁ibunya",-13.888887405395508],["▁аюул",-13.88888931274414],["▁personalizado",-13.888891220092772],["お互い",-13.888893127441406],["フランス",-13.888895988464355],["ಳಿಗೆ",-13.888901710510254],["動態",-13.888908386230469],["▁Facultad",-13.888910293579102],["▁Lawrence",-13.888910293579102],["▁Sembilan",-13.888910293579102],["▁erlaubt",-13.888910293579102],["▁membersihkan",-13.888910293579102],["▁Σχόλια",-13.888910293579102],["▁διοίκηση",-13.888910293579102],["▁ξεκίνησε",-13.888910293579102],["▁Порядок",-13.888910293579102],["▁воспользоваться",-13.888910293579102],["▁дъщеря",-13.888910293579102],["▁лауреат",-13.888910293579102],["▁моладзі",-13.888910293579102],["▁нуклеарн",-13.888910293579102],["▁сәуір",-13.888910293579102],["▁тәртіп",-13.888910293579102],["▁կապակցությամբ",-13.888910293579102],["▁القيادة",-13.888910293579102],["▁جایگزین",-13.888910293579102],["▁වර්තමාන",-13.888910293579102],["▁የአማርኛ",-13.888910293579102],["▁រង្ស៊ី",-13.888910293579102],["੪",-13.888911247253418],["▁Intézet",-13.888911247253418],["▁Magdalena",-13.888911247253418],["▁erakusketa",-13.888911247253418],["▁цілому",-13.888911247253418],["▁ئالدىنقى",-13.888911247253418],["▁ग्रंथ",-13.888911247253418],["ฑ",-13.888912200927734],["▁certifikat",-13.888912200927734],["▁Дизайн",-13.888912200927734],["▁хувцас",-13.888912200927734],["▁majetku",-13.888914108276367],["אלבום",-13.888915061950684],["▁אמנם",-13.888915061950684],["▁mobiili",-13.888916015625],["▁належить",-13.888916015625],["▁ຕົນ",-13.888916969299316],["▁أمريكا",-13.888917922973633],["ยอดนิยม",-13.88891887664795],["▁θέλετε",-13.888921737670898],["▁இதனை",-13.888922691345217],["▁kehendak",-13.888924598693848],["▁dâu",-13.888925552368164],["นั่นเอง",-13.88892650604248],["▁farko",-13.88892936706543],["▁ömür",-13.88892936706543],["▁нийтлэл",-13.888930320739746],["▁reģionā",-13.88893222808838],["▁çıxıb",-13.88893222808838],["▁Exerci",-13.888935089111328],["▁المسجد",-13.88893699645996],["▁Departemen",-13.888938903808594],["▁igapäeva",-13.888938903808594],["▁स्वर्ग",-13.888940811157228],["[22]",-13.888941764831545],["ΑΙ",-13.888941764831545],["▁хареса",-13.888941764831545],["▁근무",-13.888941764831545],["▁martxan",-13.888943672180176],["▁patrocina",-13.888944625854492],["Без",-13.888946533203123],["▁காந்தி",-13.888948440551758],["▁uchunguzi",-13.88895320892334],["▁scientia",-13.888955116271973],["▁sorri",-13.888955116271973],["▁Xabier",-13.888958930969238],["မှူး",-13.88896369934082],["グル",-13.888965606689451],["▁nimetatud",-13.88896942138672],["▁समेट",-13.888973236083984],["▁amizade",-13.8889741897583],["▁స్థాన",-13.88897705078125],["▁신규",-13.888978004455566],["yxati",-13.888995170593262],["ъгъл",-13.889007568359377],["▁wersja",-13.889009475708008],["帶來的",-13.889012336730955],["하려면",-13.889022827148438],["▁fjar",-13.88902473449707],["▁vantaggio",-13.889041900634766],["下半年",-13.889047622680664],["▁Prešov",-13.88905143737793],["världen",-13.88905906677246],["Barcelona",-13.889063835144045],["▁centrál",-13.889081001281738],["gip",-13.889086723327637],["▁Alber",-13.889087677001951],["▁Vapaa",-13.889087677001951],["строения",-13.889092445373535],["▁यामुळे",-13.889093399047852],["深夜",-13.889093399047852],["▁Бидний",-13.889098167419434],["▁አቅም",-13.889098167419434],["▁అటు",-13.88910675048828],["▁आराम",-13.889107704162598],["▁lleno",-13.889110565185549],["Enter",-13.889116287231444],["▁వీడియోలు",-13.889119148254396],["▁Öster",-13.889121055603027],["▁притвор",-13.889142990112305],["▁konzol",-13.88914394378662],["▁благодарност",-13.889161109924316],["▁оформлення",-13.889161109924316],["▁natija",-13.889169692993164],["ሸት",-13.889187812805176],["▁šef",-13.889187812805176],["▁bokstav",-13.889190673828123],["使其",-13.889190673828123],["▁bedoel",-13.889202117919922],["▁propad",-13.889216423034668],["▁Həsən",-13.889220237731934],["धारी",-13.889254570007324],["▁zarząd",-13.889256477355955],["▁단어",-13.889259338378906],["érő",-13.88926124572754],["▁Hodi",-13.889272689819336],["розуміння",-13.889281272888184],["▁Barisan",-13.889284133911133],["▁näitab",-13.88928508758545],["άνο",-13.889287948608398],["▁eventually",-13.88929557800293],["▁մանկ",-13.889297485351562],["▁банкны",-13.88930606842041],["రాజ",-13.889318466186523],["▁wusste",-13.88933277130127],["ย่อ",-13.889344215393066],["▁ವೀರ",-13.889348983764648],["▁гімн",-13.889360427856444],["▁otorga",-13.889363288879396],["эгдсэн",-13.88936996459961],["▁Rien",-13.889370918273926],["▁AAA",-13.88937282562256],["▁qayğı",-13.889373779296877],["▁punuar",-13.889376640319824],["▁ကိုယ့္",-13.88938045501709],["▁מחירים",-13.889404296875],["▁distrito",-13.889416694641112],["אמי",-13.889423370361328],["bungkus",-13.88944149017334],["▁viitorul",-13.889443397521973],["owałam",-13.889461517333984],["პოლიტ",-13.889463424682615],["▁Mundu",-13.88947582244873],["부는",-13.889491081237791],["▁educat",-13.88949489593506],["слыша",-13.889498710632324],["reyn",-13.88951587677002],["▁Вами",-13.889525413513184],["▁menekül",-13.889531135559082],["▁ανθ",-13.88953971862793],["▁odottaa",-13.889544486999512],["▁خەت",-13.889561653137209],["▁financement",-13.88957405090332],["ዝግ",-13.889588356018066],["▁негативни",-13.889596939086914],["▁navedeno",-13.889606475830078],["▁проходил",-13.889629364013672],["▁PILI",-13.88963508605957],["▁يعلم",-13.889636993408203],["පිය",-13.889657020568848],["юль",-13.889667510986328],["mekko",-13.88969612121582],["▁miezi",-13.889699935913086],["close",-13.8897123336792],["▁beseda",-13.889715194702148],["▁кээ",-13.889719009399414],["Nord",-13.889720916748049],["▁מבית",-13.889724731445312],["▁meelde",-13.889728546142578],["▁sovint",-13.889734268188477],["▁monumental",-13.889735221862791],["জ্ঞ",-13.889738082885742],["▁সাল",-13.889748573303224],["▁določa",-13.88975429534912],["▁skrin",-13.889756202697754],["iềm",-13.889764785766602],["▁balita",-13.88976764678955],["▁arribat",-13.889769554138184],["▁қарау",-13.8897705078125],["▁Øster",-13.889776229858398],["▁creada",-13.889801025390623],["▁kacang",-13.88980197906494],["זות",-13.889811515808104],["▁உரு",-13.889829635620115],["ରୋଧ",-13.889838218688965],["546",-13.88983917236328],["临时",-13.88984489440918],["▁Siyah",-13.889849662780762],["рф",-13.889874458312988],["▁Tyto",-13.889892578125],["कम्प",-13.88992691040039],["▁pör",-13.889941215515137],["▁сцени",-13.88994312286377],["파일",-13.889949798583984],["▁chả",-13.889955520629885],["০৪",-13.889959335327148],["▁accede",-13.88996696472168],["ետադարձ",-13.889988899230955],["▁अनुप",-13.889988899230955],["▁promova",-13.890003204345703],["▁privée",-13.890021324157717],["▁എന്താ",-13.890021324157717],["▁synny",-13.890032768249512],["▁almenning",-13.890043258666992],["ଧର",-13.89004611968994],["▁uspostav",-13.890048027038574],["ทําอะไร",-13.890069961547852],["▁அங்க",-13.890093803405762],["▁hekim",-13.890124320983888],["▁lojal",-13.890128135681152],["skuld",-13.890151023864746],["▁lihas",-13.890178680419922],["שעה",-13.890179634094238],["セル",-13.890188217163086],["▁Abid",-13.89019012451172],["ьба",-13.890198707580566],["▁Rész",-13.890215873718262],["ευτική",-13.890217781066896],["▁DVB",-13.89022731781006],["▁gauzak",-13.890233993530272],["kkonen",-13.890238761901855],["ጥራት",-13.890246391296388],["的优势",-13.890280723571776],["▁ଦିଗ",-13.890281677246094],["▁գլխ",-13.890284538269045],["▁ফিরে",-13.890303611755373],["▁पक्क",-13.890304565429688],["なのに",-13.890314102172852],["ОНА",-13.89033317565918],["แข็งแรง",-13.89033317565918],["भग",-13.890345573425291],["ใหม่ๆ",-13.890360832214355],["hnúť",-13.890361785888672],["▁favorita",-13.890379905700684],["▁لعل",-13.890385627746582],["008",-13.890388488769531],["▁učiteľ",-13.89040184020996],["▁ცენტრის",-13.890411376953123],["▁Със",-13.890421867370604],["▁آه",-13.890421867370604],["▁valja",-13.890475273132324],["▁karşılaştı",-13.89051342010498],["ക്കെട്ട",-13.890521049499512],["▁պատերազմի",-13.89052677154541],["तीन",-13.890544891357422],["▁Украин",-13.890545845031738],["▁colectivos",-13.890564918518066],["ДЗ",-13.890573501586914],["帰って",-13.890583992004396],["▁ನುಡಿ",-13.890584945678713],["▁Avto",-13.890585899353027],["▁aanval",-13.890592575073242],["เล็กๆ",-13.890595436096191],["▁closed",-13.890604972839355],["ತೋ",-13.890630722045898],["156",-13.89064121246338],["▁sultan",-13.890643119812012],["▁tjene",-13.890647888183594],["стратег",-13.89066219329834],["▁эрхийн",-13.890673637390137],["▁הגדולה",-13.890674591064451],["▁parhau",-13.890681266784668],["▁направо",-13.8906831741333],["▁यावर",-13.890745162963867],["塑造",-13.890746116638184],["▁വലിച്ച",-13.890806198120115],["вядома",-13.890807151794434],["гддаг",-13.890811920166016],["▁запази",-13.890841484069824],["ríem",-13.890850067138672],["今晚",-13.890876770019531],["sistencia",-13.89088249206543],["不断的",-13.890899658203123],["▁Správ",-13.890904426574709],["▁Guia",-13.89093017578125],["▁Данил",-13.89093017578125],["2,3",-13.8909330368042],["▁xatırla",-13.890965461730955],["▁ათას",-13.890979766845703],["Љуб",-13.89098072052002],["▁amesteca",-13.890993118286133],["▁tiendas",-13.89099407196045],["▁Riga",-13.890995025634766],["▁amaç",-13.891008377075195],["▁bezpośredni",-13.89101219177246],["아야",-13.89102554321289],["ிக்கும்",-13.891029357910156],["▁kolmas",-13.89104175567627],["▁கற்ப",-13.891050338745115],["▁ਐਨ",-13.8910551071167],["SPEC",-13.891064643859863],["に必要な",-13.891105651855469],["нцев",-13.891121864318848],["▁курал",-13.891139030456545],["వరి",-13.89118480682373],["▁teikiam",-13.891196250915527],["▁alanları",-13.891201972961426],["▁bewys",-13.89121150970459],["ጥሮ",-13.89121437072754],["受到了",-13.891233444213867],["czona",-13.891243934631348],["წეს",-13.891281127929688],["▁איד",-13.891283988952637],["▁törəd",-13.891292572021484],["▁güzellik",-13.891298294067385],["▁določene",-13.8912992477417],["▁Hapa",-13.891301155090332],["▁നന്നായ",-13.891312599182127],["▁bv",-13.891313552856444],["▁výrobce",-13.891321182250977],["▁gəlmiş",-13.891361236572266],["▁ریاض",-13.891387939453123],["色々",-13.891403198242188],["▁minimaal",-13.891404151916504],["族群",-13.89141082763672],["▁לבר",-13.891438484191896],["▁аўтара",-13.891451835632324],["bilmente",-13.891457557678224],["شکل",-13.891491889953612],["▁ejecut",-13.891498565673828],["前进",-13.89150333404541],["▁pracovník",-13.8915433883667],["▁conquistar",-13.891544342041016],["▁سلف",-13.891551971435549],["▁കവി",-13.891596794128418],["▁যাওয়া",-13.891597747802734],["誼",-13.89161491394043],["jenga",-13.89161777496338],["របួស",-13.891618728637695],["億美元",-13.891618728637695],["苑",-13.891629219055176],["▁FAC",-13.891630172729492],["▁nástroje",-13.89163303375244],["▁Σο",-13.891640663146973],["▁അശ്ലീല",-13.891656875610352],["трымліваць",-13.891657829284668],["Հրապարակ",-13.891657829284668],["లక్ష్మి",-13.891657829284668],["መስከረም",-13.891657829284668],["ማኅበረ",-13.891657829284668],["▁atbrīvo",-13.891657829284668],["▁dërguar",-13.891657829284668],["▁samhälls",-13.891657829284668],["▁Þannig",-13.891657829284668],["▁някоя",-13.891657829284668],["▁եղանակ",-13.891657829284668],["▁ունեցավ",-13.891657829284668],["▁आधिकारिक",-13.891657829284668],["▁त्यस्तो",-13.891657829284668],["▁पर्यावरण",-13.891657829284668],["▁सधै",-13.891657829284668],["▁ନମ୍ବର",-13.891657829284668],["▁இலக்கிய",-13.891657829284668],["▁தமிழ்நாடு",-13.891657829284668],["▁საუკუნის",-13.891657829284668],["hlásiť",-13.891658782958984],["▁καρκίνο",-13.891658782958984],["▁मुसलमान",-13.891658782958984],["▁সচিব",-13.891658782958984],["▁Jawatankuasa",-13.8916597366333],["▁αργότερα",-13.8916597366333],["▁لټون",-13.8916597366333],["▁جہاز",-13.891660690307615],["▁объяснить",-13.891661643981934],["▁ਸਰੀਰ",-13.89166259765625],["▁պլան",-13.891664505004885],["ብርሃን",-13.8916654586792],["▁जेव्हा",-13.891666412353516],["▁defensor",-13.891667366027832],["▁ciąży",-13.891668319702148],["▁ತಿಳಿಸಿದ್ದಾರೆ",-13.891669273376465],["▁güvenilir",-13.89167022705078],["ဇာတ္",-13.891671180725098],["ючыся",-13.89167308807373],["▁zerbait",-13.891674041748049],["▁поръчки",-13.891674041748049],["▁tarkoitettu",-13.891676902770996],["▁תגיות",-13.891676902770996],["▁Қы",-13.891678810119627],["▁ανέφερε",-13.891680717468262],["▁চীন",-13.891681671142578],["เกล้า",-13.891682624816896],["▁Pardubic",-13.891685485839844],["▁lãi",-13.891688346862791],["▁teikia",-13.891690254211426],["vergoeding",-13.891697883605955],["▁përkrah",-13.891698837280272],["▁цікаво",-13.891701698303224],["▁Kultury",-13.891703605651855],["▁okazję",-13.891708374023438],["▁ستمبر",-13.891708374023438],["▁muove",-13.891712188720703],["▁ወራት",-13.891714096069336],["发明",-13.891714096069336],["▁ningunha",-13.891716003417969],["▁एसिया",-13.891718864440918],["▁Τύπου",-13.891719818115234],["▁သူ႕",-13.891738891601562],["цвяр",-13.89174461364746],["▁lançado",-13.891752243041992],["▁esitys",-13.891767501831056],["▁guste",-13.891769409179688],["▁nevű",-13.891779899597168],["▁Australien",-13.8917818069458],["▁motivasi",-13.89178466796875],["▁কাদের",-13.89178466796875],["▁iuris",-13.891796112060549],["dash",-13.891802787780762],["▁colesterol",-13.891809463500977],["▁בחודש",-13.89181137084961],["▁барање",-13.891813278198242],["▁difficultés",-13.89181423187256],["ffurf",-13.891818046569824],["▁સરકારે",-13.89181900024414],["▁ცხოველ",-13.891828536987305],["MCA",-13.891833305358888],["ਰੀਆ",-13.891846656799316],["pozíció",-13.891847610473633],["þjóð",-13.891847610473633],["▁esplora",-13.89184856414795],["רצח",-13.891855239868164],["先行",-13.891862869262695],["ផ្នែក",-13.891863822937012],["等級",-13.891902923583984],["▁стању",-13.891908645629885],["انګ",-13.891911506652832],["▁беріп",-13.891911506652832],["▁Пут",-13.891924858093262],["▁κοπ",-13.891928672790527],["▁ADN",-13.89194107055664],["お電話",-13.89194393157959],["ទៅកាន់",-13.891948699951172],["▁Ugyan",-13.891965866088867],["▁Edin",-13.891972541809082],["▁previše",-13.89197826385498],["▁heimild",-13.891979217529297],["▁общения",-13.891985893249512],["▁mrz",-13.891995429992676],["▁registreeri",-13.89199924468994],["▁Главни",-13.892005920410156],["ವರ್ತ",-13.892007827758787],["०००",-13.892008781433104],["▁jongeren",-13.892008781433104],["▁traccia",-13.892016410827637],["▁sentencia",-13.89202880859375],["▁verby",-13.892029762268066],["▁பாதி",-13.892041206359863],["▁არჩევ",-13.892045974731444],["▁تولۇق",-13.892053604125977],["▁ولسوال",-13.89205837249756],["ėčiau",-13.892059326171877],["КБ",-13.892060279846191],["argument",-13.89206886291504],["▁nuværende",-13.892099380493164],["▁ڪير",-13.892099380493164],["БХ",-13.89210033416748],["▁Meget",-13.89210033416748],["▁فكان",-13.89210319519043],["▁חולים",-13.892117500305176],["▁dichiarato",-13.892118453979492],["▁Italija",-13.892120361328123],["rizo",-13.892127990722656],["▁खाद्य",-13.892129898071287],["拿着",-13.892135620117188],["▁дошао",-13.892157554626465],["▁dünyası",-13.89217472076416],["▁snagu",-13.89219093322754],["▁sprawę",-13.89219093322754],["▁garām",-13.892200469970703],["▁характеру",-13.892203330993652],["▁height",-13.892215728759766],["▁categorii",-13.892240524291992],["▁виробництво",-13.892245292663574],["▁Pretoria",-13.89226531982422],["zungumzia",-13.892282485961914],["▁traduce",-13.892282485961914],["▁الطاف",-13.892301559448242],["электрон",-13.892324447631836],["្ស",-13.892333030700684],["ֆի",-13.892362594604492],["▁properly",-13.892363548278809],["▁vəkili",-13.892378807067873],["知名的",-13.892407417297363],["сады",-13.892425537109377],["▁takšne",-13.892430305480955],["რომა",-13.892462730407717],["▁szembe",-13.89246654510498],["▁Constitución",-13.892485618591309],["▁Спец",-13.892495155334473],["▁сочета",-13.892518043518066],["聚焦",-13.8925199508667],["▁የኮ",-13.892538070678713],["▁관리자",-13.892539024353027],["▁получават",-13.892542839050291],["▁ngom",-13.89255428314209],["▁полном",-13.892555236816406],["OPA",-13.892578125],["▁grutte",-13.892593383789062],["▁إح",-13.892602920532228],["▁fizikai",-13.892607688903809],["▁শুভ",-13.892607688903809],["▁naučil",-13.89260959625244],["▁Ρό",-13.89262580871582],["▁nazist",-13.892627716064451],["behov",-13.892642974853516],["▁инвестира",-13.892643928527832],["ത്തന്നെ",-13.892664909362791],["▁बं",-13.89267635345459],["公交",-13.892695426940918],["彼は",-13.892704963684082],["▁Memper",-13.892712593078612],["بته",-13.892718315124512],["אהבת",-13.892742156982422],["▁tomuto",-13.892778396606444],["▁korralik",-13.89279079437256],["▁градуса",-13.892792701721191],["жку",-13.892797470092772],["▁empiri",-13.892823219299316],["▁trebala",-13.892844200134276],["▁budžet",-13.892848014831545],["▁Søren",-13.892857551574709],["▁Patrimonio",-13.89288330078125],["▁عسل",-13.892910957336426],["yacaq",-13.892919540405272],["▁미국의",-13.89292049407959],["αφή",-13.892921447753906],["ዲት",-13.892926216125488],["▁ضعي",-13.892943382263184],["▁Ghana",-13.892990112304688],["▁ЦК",-13.893000602722168],["▁vỡ",-13.893009185791016],["▁tenåring",-13.89302921295166],["▁aanpak",-13.89305019378662],["と呼ばれる",-13.893052101135254],["yenler",-13.893078804016112],["▁عربية",-13.893078804016112],["паднал",-13.893096923828123],["另有",-13.893101692199709],["▁художн",-13.893111228942873],["ਗਾਰ",-13.89311981201172],["▁сургалтын",-13.893120765686035],["▁ігра",-13.893128395080566],["itària",-13.893146514892578],["▁tukea",-13.893179893493652],["ðað",-13.893203735351562],["▁помнік",-13.893206596374512],["حى",-13.893207550048828],["▁ספי",-13.89321231842041],["▁mauvais",-13.893227577209473],["▁báb",-13.893243789672852],["▁دانشگاهی",-13.893260955810549],["ושבים",-13.893268585205078],["護理",-13.89327907562256],["▁polja",-13.89328956604004],["▁kusoo",-13.893290519714355],["tahay",-13.893308639526367],["▁فکری",-13.89331340789795],["▁likte",-13.893343925476074],["▁izobraževanje",-13.893365859985352],["يڊ",-13.893383979797363],["నున్నారు",-13.893400192260742],["SOL",-13.893406867980955],["έστη",-13.893413543701172],["士兵",-13.893418312072754],["▁cihaz",-13.893438339233398],["λεύ",-13.89346408843994],["miesto",-13.89346694946289],["▁medalje",-13.893472671508787],["▁சொல்",-13.893481254577637],["▁Chili",-13.893513679504396],["社会保障",-13.893529891967772],["▁గాయ",-13.893577575683594],["▁krisis",-13.893599510192873],["▁данните",-13.893625259399414],["▁chaos",-13.893674850463867],["▁Positiv",-13.893702507019045],["▁Стари",-13.893706321716309],["對我",-13.89372730255127],["▁ತಿಳಿಸಿ",-13.89373779296875],["全面的",-13.893742561340332],["▁மனம்",-13.893749237060549],["ביטול",-13.89375114440918],["▁කොයි",-13.893771171569824],["դար",-13.893794059753418],["мога",-13.893800735473633],["ርነት",-13.89380168914795],["▁ພາຍ",-13.893807411193848],["▁praktično",-13.893815994262695],["▁sepeda",-13.893818855285645],["ڪش",-13.893839836120604],["▁Жаш",-13.893841743469238],["မှုများ",-13.893850326538086],["ević",-13.893865585327148],["▁түсіндір",-13.893877029418944],["ယို",-13.89390754699707],["instituut",-13.893916130065918],["opati",-13.89391803741455],["▁кезеңде",-13.89392375946045],["woning",-13.893949508666992],["のだろう",-13.893959999084473],["▁പാടില്ല",-13.893972396850586],["▁sahihi",-13.89398193359375],["▁Среди",-13.893994331359863],["Alle",-13.894002914428713],["工場",-13.89400577545166],["▁затвора",-13.894012451171877],["▁подходит",-13.89402961730957],["ሁት",-13.894072532653809],["▁technin",-13.89407730102539],["▁सारा",-13.894081115722656],["Stop",-13.894083023071287],["▁regjeringen",-13.894092559814451],["량을",-13.894102096557615],["řízení",-13.894103050231934],["РОВ",-13.894128799438477],["▁smooth",-13.894132614135742],["▁poruch",-13.89415454864502],["saimniecības",-13.894176483154297],["ปั",-13.894180297851562],["▁דיני",-13.894182205200195],["▁Toll",-13.89419651031494],["يوې",-13.894222259521484],["▁Metode",-13.89422607421875],["▁şəxsin",-13.894227027893066],["ਟਾਂ",-13.894245147705078],["رشاد",-13.894251823425291],["▁ნებისმიერ",-13.89426040649414],["▁творча",-13.894265174865724],["▁Dievo",-13.894271850585938],["льскага",-13.894279479980469],["ડુ",-13.894309043884276],["forza",-13.89431858062744],["ثور",-13.894335746765137],["漏洞",-13.894356727600098],["Card",-13.894379615783691],["茅",-13.894386291503906],["▁posat",-13.89439582824707],["▁Trabzon",-13.894403457641602],["▁tıklayın",-13.894410133361816],["หญ้า",-13.89441204071045],["ពិភាក្សា",-13.89441204071045],["▁Múzeum",-13.894412994384766],["▁Mətbuat",-13.894412994384766],["▁beantwoord",-13.894412994384766],["▁involucra",-13.894412994384766],["▁mezinárodní",-13.894412994384766],["▁myfyrwyr",-13.894412994384766],["▁övrigt",-13.894412994384766],["▁θεραπεία",-13.894412994384766],["▁հասանելի",-13.894412994384766],["▁تبریک",-13.894412994384766],["▁تصريحات",-13.894412994384766],["▁گوناگون",-13.894412994384766],["▁प्रेरणा",-13.894412994384766],["▁स्पर्धे",-13.894412994384766],["▁কক্সবাজার",-13.894412994384766],["▁ರೋಚಕ",-13.894412994384766],["▁മുംബൈ",-13.894412994384766],["▁හැමෝම",-13.894412994384766],["▁საიდუმლო",-13.894412994384766],["▁სამოქალაქო",-13.894412994384766],["▁አጋጣሚ",-13.894412994384766],["▁오히려",-13.894412994384766],["",-13.894412994384766],["▁Poslední",-13.894413948059082],["▁ସଭାପତି",-13.894413948059082],["▁nowoczesne",-13.894415855407717],["▁ಅಂಶ",-13.894415855407717],["▁ამბავი",-13.894415855407717],["▁क्रमांक",-13.894416809082031],["▁addımlar",-13.894417762756348],["▁хамрагд",-13.894418716430664],["▁ಆತ್ಮ",-13.894418716430664],["▁భారతీయ",-13.894420623779297],["▁reforça",-13.894421577453612],["Громадськ",-13.89442253112793],["▁Penyebab",-13.894424438476562],["▁शिखर",-13.894424438476562],["▁același",-13.89442539215088],["▁plateforme",-13.894426345825195],["▁twoje",-13.894426345825195],["▁zapoznani",-13.894428253173828],["vědomí",-13.894429206848145],["▁entstanden",-13.89443016052246],["▁Ordnung",-13.894432067871094],["▁beharreko",-13.894433975219728],["損失",-13.894433975219728],["▁kažko",-13.894434928894045],["▁ریشه",-13.894436836242676],["պաշտ",-13.894438743591309],["▁ముఖ్యంగా",-13.89444065093994],["▁domáce",-13.89444351196289],["▁управител",-13.894449234008787],["kamatwa",-13.894452095031738],["▁izeneko",-13.894452095031738],["▁गेम",-13.894453048706056],["▁Košice",-13.89446258544922],["▁गज",-13.89446258544922],["▁የተዘጋጀ",-13.89446258544922],["▁menemui",-13.894465446472168],["▁köşe",-13.89447021484375],["məsini",-13.894472122192385],["▁ЗОШ",-13.894474029541016],["мачка",-13.89448356628418],["ခင္ဗ်ာ",-13.89448356628418],["考えた",-13.89448356628418],["▁Bagaimanapun",-13.894489288330078],["▁השרון",-13.894497871398926],["ФИ",-13.894498825073242],["iacich",-13.894501686096191],["▁Vraag",-13.894503593444824],["▁හිටියේ",-13.894505500793455],["▁معرفت",-13.894519805908203],["biljett",-13.894521713256836],["jechać",-13.894522666931152],["▁לשימוש",-13.894523620605469],["▁Craig",-13.89453411102295],["Java",-13.894536018371582],["▁майрам",-13.894536018371582],["854",-13.89454460144043],["ത്തുന്ന",-13.894548416137695],["▁youth",-13.894551277160645],["▁chối",-13.894553184509276],["▁siasatan",-13.894556045532228],["▁එකෙක්",-13.894556999206545],["പ്പെടുന്നത്",-13.89455795288086],["јих",-13.894559860229492],["▁الصغيرة",-13.894561767578123],["▁додатни",-13.894583702087402],["▁световна",-13.8945894241333],["166",-13.894591331481934],["عترف",-13.894601821899414],["▁cruel",-13.89460277557373],["▁неопходн",-13.894603729248049],["▁మార్పులు",-13.894606590270996],["架構",-13.894608497619627],["▁Γιάννης",-13.894610404968262],["▁ನಡೆಯ",-13.894613265991213],["▁പുസ്തകം",-13.894620895385742],["rekord",-13.894622802734377],["一層",-13.894638061523438],["▁rinkos",-13.894652366638184],["▁Maklumat",-13.894660949707031],["▁smidig",-13.894660949707031],["ありませんでした",-13.894660949707031],["കുറ",-13.89466953277588],["▁União",-13.894670486450195],["03.2017",-13.894671440124512],["frågor",-13.894673347473145],["និយាយ",-13.89467430114746],["HIL",-13.894678115844728],["ဖုန္း",-13.894679069519045],["ίδι",-13.894689559936523],["▁jolas",-13.894692420959473],["▁budućnosti",-13.894704818725586],["mmän",-13.894707679748535],["▁Inspired",-13.894710540771484],["▁сѐ",-13.894712448120115],["▁okolja",-13.89471435546875],["▁നാല",-13.894715309143066],["▁gebrek",-13.894728660583496],["님이",-13.894730567932127],["▁Brü",-13.894736289978027],["▁negru",-13.894746780395508],["▁Khair",-13.894749641418455],["үүлэн",-13.894757270812988],["drev",-13.89476490020752],["▁verzoek",-13.89477252960205],["heat",-13.894775390625],["જ્",-13.894786834716797],["▁Frieden",-13.894816398620604],["▁Colorado",-13.894824981689451],["▁overblik",-13.894827842712402],["▁ugen",-13.894840240478516],["тров",-13.894842147827148],["▁ਖੋ",-13.894843101501465],["▁personām",-13.894851684570312],["▁sufrir",-13.894857406616213],["▁tehdit",-13.89488124847412],["▁alegeri",-13.894886016845703],["▁sogno",-13.894911766052246],["▁didžiul",-13.894920349121094],["▁Evident",-13.894947052001951],["▁යාපාරය",-13.894948959350586],["▁столе",-13.894949913024902],["וקר",-13.8949556350708],["▁müdür",-13.8949556350708],["६३",-13.894964218139648],["▁القوى",-13.894983291625977],["spruch",-13.894993782043455],["▁Vrh",-13.895018577575684],["ascens",-13.895044326782228],["▁ezagun",-13.895051956176758],["▁репрезентација",-13.895081520080566],["राह",-13.895089149475098],["▁árinu",-13.895094871520996],["نگاهی",-13.895098686218262],["დს",-13.895105361938477],["ATM",-13.895133018493652],["▁tunggal",-13.895136833190918],["指摘",-13.895151138305664],["▁nasil",-13.895170211791992],["▁Kanisa",-13.895184516906738],["ontzi",-13.895204544067385],["▁بودیم",-13.895207405090332],["▁kulanka",-13.895208358764648],["▁روھى",-13.89521026611328],["▁doutro",-13.895212173461914],["▁жасай",-13.895224571228027],["▁جائزہ",-13.89525032043457],["▁imperio",-13.895251274108888],["గ్గ",-13.895264625549316],["▁nở",-13.89527416229248],["читайте",-13.89527702331543],["▁وٺڻ",-13.895286560058594],["▁mikoa",-13.895320892333984],["▁fortsætter",-13.895357131958008],["usį",-13.895358085632324],["差し",-13.895358085632324],["▁missed",-13.895370483398438],["记住",-13.895374298095703],["▁משחקי",-13.895398139953612],["ल्याचे",-13.895404815673828],["ረቱ",-13.895408630371094],["繰り返し",-13.895425796508787],["βό",-13.895435333251951],["▁totusi",-13.895444869995115],["▁demektir",-13.8954496383667],["ಪರ",-13.895458221435549],["▁lehibe",-13.895465850830078],["šťas",-13.895466804504396],["▁دی۔",-13.89549160003662],["▁yolcu",-13.895500183105469],["▁Corpora",-13.8955078125],["▁մեղադր",-13.89553165435791],["யிர",-13.895569801330566],["▁egyetemi",-13.8955717086792],["řád",-13.895586013793944],["كما",-13.89559555053711],["kumā",-13.895604133605955],["פין",-13.895614624023438],["▁sipër",-13.895634651184082],["▁Eure",-13.895652770996094],["▁gogor",-13.89566421508789],["▁بجا",-13.89566421508789],["fyld",-13.895687103271484],["▁выглядит",-13.89569854736328],["▁շուկա",-13.895699501037598],["▁зрел",-13.895703315734863],["फ्रि",-13.895709037780762],["▁selaku",-13.895721435546877],["മീറ്റ",-13.895722389221191],["beton",-13.895742416381836],["▁buux",-13.895743370056152],["kaudella",-13.895764350891112],["▁సబ్",-13.895771026611328],["▁Butik",-13.895803451538086],["▁거부",-13.895804405212402],["上司",-13.895830154418944],["▁borgere",-13.895838737487791],["▁fugle",-13.895846366882324],["▁maži",-13.89585304260254],["▁decorativ",-13.89590835571289],["ುತ್ತಿವೆ",-13.895919799804688],["▁καί",-13.8959379196167],["▁Capitol",-13.895939826965332],["▁Nilai",-13.895957946777344],["▁annons",-13.895957946777344],["▁zwarte",-13.895959854125977],["LICA",-13.895987510681152],["线路",-13.896007537841797],["corri",-13.896014213562012],["▁өгүүл",-13.89601993560791],["▁pastikan",-13.896020889282228],["▁රේම",-13.896026611328123],["прив",-13.896028518676758],["▁pasou",-13.896028518676758],["▁විශ්",-13.89612102508545],["قبول",-13.896125793457031],["ুদ্ধ",-13.89614963531494],["ნეტ",-13.896153450012209],["ണമെന്നും",-13.89615535736084],["▁бөлүгү",-13.896177291870115],["જય",-13.896188735961914],["▁susţin",-13.896218299865724],["▁замина",-13.896233558654783],["▁Дай",-13.896245956420898],["▁smeri",-13.896254539489746],["▁সত্য",-13.896273612976074],["▁annoncen",-13.896279335021973],["िर्",-13.896296501159668],["▁Roda",-13.896323204040527],["▁hoping",-13.896331787109377],["Watch",-13.896373748779297],["▁درصدی",-13.896397590637209],["▁informativa",-13.89641284942627],["koihin",-13.896448135375977],["γού",-13.896451950073242],["ల్లె",-13.89645767211914],["▁gurb",-13.89647388458252],["▁Acord",-13.896489143371582],["▁ಮನವಿ",-13.89650058746338],["ίσκο",-13.896503448486328],["▁beber",-13.89650535583496],["āžu",-13.896524429321287],["тілді",-13.896531105041504],["ेति",-13.896535873413086],["gerçekleştirilen",-13.8965425491333],["▁ruhu",-13.896543502807615],["▁busty",-13.896546363830566],["▁ಆಪ್",-13.896563529968262],["▁Būt",-13.896564483642578],["▁хэрэгжүүл",-13.896571159362791],["мај",-13.89658546447754],["РОД",-13.896589279174805],["▁personligt",-13.89659023284912],["▁Nerv",-13.896592140197754],["▁замисли",-13.89663791656494],["▁utgör",-13.896644592285156],["▁rivojlanish",-13.896655082702637],["▁repli",-13.896722793579102],["▁କରା",-13.896723747253418],["看好",-13.896724700927734],["▁Asif",-13.896726608276367],["ительный",-13.896729469299316],["võimalus",-13.896746635437012],["cão",-13.89675235748291],["ួត",-13.89675521850586],["▁Gastro",-13.896768569946287],["▁ಮಾತನಾಡಿ",-13.896768569946287],["▁සිහින",-13.896769523620604],["WT",-13.896770477294922],["外觀",-13.89681625366211],["kraan",-13.896830558776855],["girtî",-13.89684772491455],["▁التش",-13.896856307983398],["▁marcada",-13.896859169006348],["τικούς",-13.896864891052246],["▁Чл",-13.896888732910156],["▁εκπαίδευση",-13.896892547607422],["▁kilometro",-13.896896362304688],["▁postupak",-13.896896362304688],["Frank",-13.89690399169922],["▁παραγωγή",-13.896906852722168],["▁ሚስ",-13.89695930480957],["مستشفى",-13.89697551727295],["▁hatırla",-13.89697551727295],["rayı",-13.896981239318848],["▁Regler",-13.896981239318848],["નક",-13.896995544433594],["uteen",-13.897007942199709],["▁żywi",-13.897008895874023],["▁úsáide",-13.897015571594238],["反對",-13.897015571594238],["一遍",-13.89704704284668],["▁місяці",-13.897058486938477],["ுவதற்கு",-13.897061347961426],["ย้อน",-13.897065162658691],["מחה",-13.897092819213867],["▁environmental",-13.897095680236816],["紧紧",-13.897096633911133],["叙利亚",-13.897099494934082],["砸",-13.897103309631348],["▁kjære",-13.89710807800293],["▁දන්නෙ",-13.897124290466309],["國民黨",-13.897128105163574],["锡",-13.897130966186523],["▁минава",-13.897136688232422],["▁konvenci",-13.897139549255373],["▁Frid",-13.897150039672852],["阿拉伯",-13.897157669067385],["▁Abşeron",-13.897174835205078],["▁Discovery",-13.897174835205078],["▁Whether",-13.897174835205078],["▁ausführlich",-13.897174835205078],["▁bắn",-13.897174835205078],["▁instituições",-13.897174835205078],["▁susipažin",-13.897174835205078],["▁αφορούν",-13.897174835205078],["▁διεθνή",-13.897174835205078],["▁υποστήριξη",-13.897174835205078],["▁Прежде",-13.897174835205078],["▁Тръмп",-13.897174835205078],["▁העובדה",-13.897174835205078],["▁تخفیف",-13.897174835205078],["▁फीसदी",-13.897174835205078],["▁मलेसिया",-13.897174835205078],["▁ମୂଲ୍ୟ",-13.897174835205078],["▁ଶିକ୍ଷକ",-13.897174835205078],["▁කැබිනට්",-13.897174835205078],["hreyfing",-13.897175788879396],["▁cannabis",-13.897175788879396],["▁mengerjakan",-13.897175788879396],["▁stāvokli",-13.897175788879396],["▁официално",-13.897175788879396],["▁નંબર",-13.897175788879396],["цэцэг",-13.897176742553713],["▁besplatno",-13.897176742553713],["▁ninguén",-13.897176742553713],["▁λιγότερο",-13.897176742553713],["▁Тэгвэл",-13.897176742553713],["▁शराब",-13.897176742553713],["▁પ્લાન",-13.897176742553713],["投标",-13.897176742553713],["▁varumärke",-13.897177696228027],["▁Москви",-13.897177696228027],["▁صوبہ",-13.897177696228027],["▁perioade",-13.897178649902344],["▁подчерта",-13.897178649902344],["▁giardia",-13.89717960357666],["▁umfangreiche",-13.89717960357666],["▁Gemeente",-13.897180557250977],["▁Європі",-13.89718246459961],["▁წითელ",-13.89718532562256],["פגיעה",-13.897186279296877],["႐ိုက္",-13.897187232971191],["▁स्मृति",-13.897187232971191],["转换",-13.897188186645508],["képes",-13.897192001342772],["▁bëjmë",-13.897193908691406],["▁разработан",-13.897198677062988],["▁పక్క",-13.897199630737305],["▁주시기",-13.897199630737305],["▁Ngunit",-13.89720344543457],["▁сайлау",-13.897204399108888],["▁गोल्ड",-13.897204399108888],["▁આપણી",-13.897209167480469],["▁Artikkel",-13.897211074829102],["▁იმაზე",-13.897217750549316],["▁העיקרי",-13.897218704223633],["▁radšej",-13.897224426269531],["的表情",-13.897226333618164],["▁більшості",-13.897231101989746],["▁לאחרונה",-13.897233963012695],["▁통하여",-13.897239685058594],["▁искуства",-13.897247314453123],["▁Tuttavia",-13.897249221801758],["▁ಸಾವು",-13.897253036499023],["▁Indhold",-13.897259712219238],["▁zrealizowa",-13.897266387939451],["τυπο",-13.8972749710083],["اهد",-13.897300720214844],["▁leyendo",-13.897302627563477],["▁nekon",-13.897303581237791],["▁juoda",-13.89730453491211],["を購入",-13.897340774536133],["▁predomina",-13.897343635559082],["▁dijmin",-13.897345542907717],["▁sølv",-13.897346496582031],["注入",-13.897356033325195],["oldeb",-13.89736270904541],["koder",-13.897368431091309],["▁cự",-13.89737319946289],["▁القدرة",-13.897387504577637],["▁футболист",-13.89739227294922],["ภั",-13.897394180297852],["▁országok",-13.89739990234375],["▁دلم",-13.897420883178713],["▁rizgar",-13.897421836853027],["▁dakle",-13.897427558898926],["▁izvajal",-13.897432327270508],["▁menjamin",-13.897455215454102],["▁asosan",-13.8974609375],["▁dijeli",-13.897461891174316],["▁מקרה",-13.897467613220217],["▁מכשיר",-13.89748191833496],["▁overleden",-13.897497177124023],["▁berulang",-13.897500038146973],["召開",-13.897521018981934],["▁красива",-13.897547721862791],["▁prévue",-13.897564888000488],["▁maximaal",-13.897570610046388],["▁էինք",-13.897577285766602],["▁nemaz",-13.897603034973145],["▁Certifi",-13.89760398864746],["▁Forside",-13.89760398864746],["▁татаж",-13.89760971069336],["עקב",-13.897621154785156],["社長",-13.897627830505373],["▁Kikuu",-13.89763641357422],["▁verkauft",-13.897642135620115],["global",-13.897652626037598],["▁flaen",-13.89765739440918],["▁silence",-13.897662162780762],["პის",-13.897671699523926],["водиться",-13.897674560546877],["▁조치",-13.897709846496582],["在美國",-13.897719383239746],["ЕННЯ",-13.897729873657228],["▁terrà",-13.897734642028809],["▁Helga",-13.89773941040039],["▁ideju",-13.8977632522583],["▁αναπτ",-13.897787094116213],["▁adventure",-13.89780044555664],["ष्टि",-13.897815704345703],["נכון",-13.89781665802002],["річного",-13.897821426391602],["ाद्",-13.897821426391602],["▁അറ",-13.89783000946045],["複",-13.897869110107422],["▁piknik",-13.897875785827637],["▁قىسىم",-13.897883415222168],["付いて",-13.897904396057127],["ுகிறார்",-13.897908210754396],["▁sadarbības",-13.89791202545166],["年级",-13.897913932800291],["teater",-13.897939682006836],["▁kvalitné",-13.897939682006836],["▁bilancio",-13.897953987121582],["▁naudojami",-13.89796543121338],["▁sütik",-13.897968292236328],["▁guardare",-13.898046493530272],["ಟೊ",-13.89806079864502],["ערכים",-13.898070335388184],["▁poslovnih",-13.8980712890625],["▁گردش",-13.89807415008545],["▁پارت",-13.898075103759766],["▁takav",-13.898076057434082],["▁ugodno",-13.898076057434082],["ỉnh",-13.898089408874512],["грамма",-13.89809513092041],["凌晨",-13.898104667663574],["Like",-13.898112297058104],["▁глазах",-13.898118019104004],["تحكم",-13.898122787475586],["▁mësim",-13.898128509521484],["3.9",-13.898158073425291],["▁Источн",-13.89816951751709],["живання",-13.898186683654783],["jusies",-13.898200988769531],["▁Five",-13.898202896118164],["društven",-13.898211479187012],["▁prepoved",-13.898221015930176],["бирати",-13.898258209228516],["ຕໍາ",-13.898289680480955],["▁પ્રા",-13.898306846618652],["പേര",-13.898310661315918],["ŠKO",-13.89833164215088],["egiak",-13.898344039916992],["▁Comunitat",-13.898358345031738],["ទី១",-13.898398399353027],["▁reprezentativ",-13.89840602874756],["5.1",-13.898407936096191],["ವರಿ",-13.898414611816406],["▁oferite",-13.898418426513672],["▁Yunus",-13.898460388183594],["▁भारतात",-13.89846897125244],["▁neznám",-13.898512840270996],["ტეტ",-13.898544311523438],["horizon",-13.89854907989502],["▁распоред",-13.89855670928955],["వుల",-13.898576736450195],["▁Colon",-13.898578643798828],["▁valvo",-13.898582458496094],["小学生",-13.89858341217041],["▁bandy",-13.898584365844728],["KRI",-13.898585319519045],["▁инсталира",-13.898602485656738],["▁tinham",-13.898619651794434],["▁ਖੁ",-13.89862060546875],["만으로",-13.898664474487305],["▁matrac",-13.898706436157228],["▁spart",-13.898706436157228],["▁ಹತ್ತಿರ",-13.898711204528809],["hayag",-13.898762702941896],["▁poboljša",-13.898763656616213],["▁Stift",-13.898777961730955],["▁කිරීමෙන්",-13.89878749847412],["자치",-13.898788452148438],["▁나를",-13.89879322052002],["▁seguiment",-13.898811340332031],["SJON",-13.898825645446776],["▁vagon",-13.898829460144045],["▁dignitat",-13.898836135864258],["▁бројот",-13.898855209350586],["▁închis",-13.898859024047852],["▁přidá",-13.898877143859863],["VAC",-13.898879051208496],["▁libroj",-13.898882865905762],["▁нарық",-13.898882865905762],["不只",-13.898882865905762],["ようになります",-13.898890495300291],["▁biomas",-13.898897171020508],["▁attesta",-13.898930549621582],["▁төгс",-13.89893627166748],["▁límite",-13.89894199371338],["▁суть",-13.89894199371338],["▁noticed",-13.898959159851074],["Over",-13.898980140686035],["▁explicación",-13.89901351928711],["▁tahaks",-13.89902114868164],["correndo",-13.899032592773438],["▁yatır",-13.899033546447754],["SMS",-13.899035453796388],["▁hampa",-13.899062156677246],["这对",-13.899067878723145],["▁posebnim",-13.899081230163574],["▁Руд",-13.899099349975586],["ሀብ",-13.899107933044434],["▁ферм",-13.899117469787598],["ीसाठी",-13.89915657043457],["sóknar",-13.899169921875],["▁službeno",-13.899175643920898],["െയും",-13.89918327331543],["▁අතට",-13.899189949035645],["▁emléke",-13.89920139312744],["▁bakin",-13.899206161499023],["perhe",-13.89920711517334],["nyelv",-13.899211883544922],["ûnê",-13.899224281311035],["verkko",-13.8992280960083],["מאר",-13.89926528930664],["istanê",-13.899273872375488],["ضطر",-13.899283409118652],["▁каго",-13.899300575256348],["分手",-13.899311065673828],["▁күніне",-13.89931869506836],["কৈ",-13.899372100830078],["יונות",-13.899384498596191],["▁Esca",-13.899384498596191],["▁عرفان",-13.899438858032228],["▁Babi",-13.899449348449709],["ૂલ",-13.899450302124023],["▁онај",-13.899452209472656],["多年来",-13.899476051330566],["ธร",-13.89948558807373],["षित",-13.899490356445312],["▁породица",-13.899517059326172],["୧୯",-13.89955234527588],["ໍ້",-13.89955234527588],["SPORT",-13.899564743041992],["▁деңгей",-13.899566650390623],["▁sticker",-13.899580001831056],["▁বাংলাদেশি",-13.899643898010254],["▁სისტემის",-13.899649620056152],["ђено",-13.89968490600586],["▁темно",-13.899697303771973],["▁omasta",-13.899701118469238],["▁thjeshtë",-13.899703979492188],["ങ്കര",-13.899737358093262],["▁betekenis",-13.899744987487791],["▁fuqaro",-13.899767875671388],["ობი",-13.899827003479004],["渐渐",-13.899845123291016],["چونڊن",-13.899856567382812],["▁гэтая",-13.89987087249756],["将继续",-13.89987850189209],["模拟",-13.899880409240724],["漸",-13.899883270263672],["കുറി",-13.89988613128662],["▁reel",-13.89988899230957],["视觉",-13.89988899230957],["躁",-13.899898529052734],["▁ସଂଖ୍ୟା",-13.899911880493164],["▁xususiy",-13.899928092956545],["セックス",-13.89993381500244],["チケット",-13.899935722351074],["茲",-13.89993667602539],["▁Рэспубл",-13.899941444396973],["ඡ",-13.899944305419922],["อักษร",-13.899944305419922],["þjónusta",-13.899945259094238],["عقوبات",-13.899945259094238],["ዮሐንስ",-13.899945259094238],["ចោទ",-13.899945259094238],["▁budapesti",-13.899945259094238],["▁huấn",-13.899945259094238],["▁pembentukan",-13.899945259094238],["▁phấn",-13.899945259094238],["▁vrijblijvend",-13.899945259094238],["▁ЖШС",-13.899945259094238],["▁внимателно",-13.899945259094238],["▁забяспеч",-13.899945259094238],["▁многе",-13.899945259094238],["▁обличчя",-13.899945259094238],["▁партнёр",-13.899945259094238],["▁съгласие",-13.899945259094238],["▁уралдаан",-13.899945259094238],["▁فاطمه",-13.899945259094238],["▁مذاکره",-13.899945259094238],["▁ඒකාබද්ධ",-13.899945259094238],["▁හැමදාම",-13.899945259094238],["▁ፈቃድ",-13.899945259094238],["▁intenzív",-13.899946212768556],["▁storočia",-13.899946212768556],["▁исхрана",-13.899946212768556],["▁көзқарас",-13.899946212768556],["▁страните",-13.899946212768556],["▁दीर्घ",-13.899946212768556],["▁antibiotic",-13.899947166442873],["▁nghìn",-13.899947166442873],["βάθμι",-13.899948120117188],["▁cümle",-13.899948120117188],["▁noqday",-13.899948120117188],["▁Autónoma",-13.899950981140137],["▁Pierwszy",-13.899950981140137],["▁déanamh",-13.899950981140137],["▁Եվրոպայի",-13.899950981140137],["▁eingerichtet",-13.899951934814451],["▁Комисс",-13.899951934814451],["▁существуют",-13.899951934814451],["▁Агенция",-13.89995288848877],["▁Mengatasi",-13.899953842163086],["▁poškoden",-13.899953842163086],["▁szezon",-13.899953842163086],["▁zhduk",-13.899956703186035],["▁رسنیو",-13.899956703186035],["举报",-13.899957656860352],["डम",-13.8999605178833],["▁मोठी",-13.8999605178833],["▁opplevd",-13.899962425231934],["▁эфир",-13.899962425231934],["▁uvedom",-13.89996337890625],["▁arrazoi",-13.8999662399292],["▁ograničen",-13.8999662399292],["▁szívesen",-13.8999662399292],["▁возможностей",-13.899967193603516],["▁hjarta",-13.899971961975098],["▁అవుతుంది",-13.899975776672363],["▁акций",-13.899979591369627],["▁Tomislav",-13.899980545043944],["▁intercambio",-13.899981498718262],["▁ఉంటారు",-13.899981498718262],["▁රැගෙන",-13.899981498718262],["▁javasol",-13.899983406066896],["▁aparılır",-13.899985313415527],["▁hatrany",-13.899985313415527],["▁көмектес",-13.89998722076416],["▁הצבא",-13.89998722076416],["▁жұмса",-13.89999008178711],["▁انتہا",-13.89999294281006],["ரிடம்",-13.899994850158691],["▁مساله",-13.89999771118164],["▁халқының",-13.90000820159912],["▁ಜಾತಿ",-13.900012969970703],["▁शक्यता",-13.90001392364502],["ccato",-13.900017738342283],["לום",-13.900035858154297],["Opera",-13.90003776550293],["▁myšlen",-13.900039672851562],["ગ્રહ",-13.900043487548828],["▁сторони",-13.900054931640623],["စိုက္",-13.90005588531494],["▁የወያኔ",-13.90005588531494],["▁socday",-13.900069236755373],["▁Jaroslav",-13.900074005126951],["▁qolish",-13.900076866149902],["โครง",-13.900078773498535],["▁Europë",-13.900078773498535],["步伐",-13.900087356567385],["▁måder",-13.900099754333496],["▁драм",-13.900103569030762],["▁mostrando",-13.900105476379396],["▁médiá",-13.90011978149414],["▁telefonnummer",-13.900120735168455],["▁Michelin",-13.90013313293457],["▁Дуже",-13.900138854980469],["▁tarjolla",-13.900145530700684],["▁Бишкекте",-13.900147438049316],["นั",-13.900154113769531],["▁ikerketa",-13.900157928466797],["lánc",-13.900176048278809],["▁يحصل",-13.900185585021973],["▁seinä",-13.900195121765137],["▁એડ",-13.9002046585083],["▁перевірки",-13.900212287902832],["▁ఇవి",-13.900217056274414],["റുകള്",-13.900253295898438],["▁הסכם",-13.90025520324707],["▁kever",-13.90025806427002],["▁zemër",-13.900264739990234],["▁enhance",-13.900280952453612],["නාව",-13.900286674499512],["▁aftësi",-13.900286674499512],["▁dermatolog",-13.900287628173828],["▁타고",-13.900294303894045],["▁privatiz",-13.90029525756836],["▁neviens",-13.90030002593994],["▁γνώση",-13.900306701660156],["virksomhed",-13.900312423706056],["▁جماعتوں",-13.9003267288208],["ဝင်း",-13.900339126586914],["▁сјај",-13.900349617004396],["▁Значит",-13.900352478027344],["▁celana",-13.900358200073242],["▁ሕገ",-13.900361061096191],["▁दिनु",-13.900370597839355],["▁Preberi",-13.900375366210938],["▁Päivi",-13.900382995605469],["1800",-13.900407791137695],["▁серця",-13.900419235229492],["bøker",-13.900435447692873],["ferðir",-13.900450706481934],["▁publicidad",-13.900465965270996],["▁እነሱ",-13.900465965270996],["പ്രകാരം",-13.900472640991213],["disse",-13.90047836303711],["bevaring",-13.900482177734377],["▁reuse",-13.900497436523438],["▁Pilar",-13.900501251220703],["зіць",-13.90053367614746],["▁ordentlig",-13.900535583496094],["ബര്",-13.900542259216309],["वानी",-13.90057373046875],["▁തോന്നുന്നു",-13.900575637817385],["▁కోల్",-13.900577545166016],["▁završn",-13.900585174560549],["▁caught",-13.900589942932127],["▁ректор",-13.900589942932127],["▁kazni",-13.900592803955078],["▁jätti",-13.900596618652344],["▁næg",-13.900620460510254],["аваных",-13.90062141418457],["▁የወጣ",-13.90062427520752],["▁hozott",-13.900646209716797],["▁slnečn",-13.900647163391112],["▁разкри",-13.900679588317873],["grafik",-13.90068244934082],["▁Lips",-13.900683403015137],["▁направят",-13.900686264038086],["▁കഴിച്ച",-13.900691986083984],["DING",-13.90070629119873],["▁Adalah",-13.900712966918944],["▁mengingatkan",-13.900715827941896],["bevidst",-13.900748252868652],["▁Uttar",-13.900748252868652],["▁znakom",-13.900769233703612],["1969",-13.900776863098145],["päivää",-13.900784492492676],["▁hľadá",-13.900792121887209],["既有",-13.900795936584473],["xerunt",-13.900809288024902],["▁Αργ",-13.90081024169922],["179",-13.9008150100708],["▁ryby",-13.9008207321167],["▁voimassa",-13.900835037231444],["fər",-13.900843620300291],["ခင္း",-13.900850296020508],["▁స్థాయి",-13.900871276855469],["▁dezvolta",-13.900874137878418],["▁цари",-13.900891304016112],["▁сэрца",-13.90089225769043],["אזרח",-13.90090560913086],["▁Kombëtar",-13.900911331176758],["ბინა",-13.90093994140625],["▁බැංකුව",-13.900941848754885],["加密",-13.900951385498049],["▁voodi",-13.900960922241213],["افع",-13.90096664428711],["▁التك",-13.90096664428711],["▁obdob",-13.90098476409912],["çağı",-13.900997161865234],["avfall",-13.901004791259766],["sairaala",-13.90101146697998],["▁kellele",-13.901025772094728],["▁Реш",-13.90103244781494],["▁komentarą",-13.901062965393066],["പുല",-13.901092529296877],["▁Павлодар",-13.901097297668455],["メイン",-13.90110683441162],["ዎችና",-13.901119232177734],["▁Nancy",-13.901131629943848],["▁Soros",-13.90114402770996],["вршен",-13.901145935058594],["▁господарств",-13.901165008544922],["▁প্রতিবাদ",-13.901175498962402],["ნატ",-13.901179313659668],["▁Keine",-13.901211738586426],["▁Oulu",-13.901222229003906],["▁передан",-13.901225090026855],["▁funkce",-13.901226997375488],["穿越",-13.901280403137209],["▁flaske",-13.901288986206056],["▁vrsto",-13.9013090133667],["▁Kommunen",-13.901309967041016],["6.6",-13.90133285522461],["▁Kalk",-13.901339530944824],["חזה",-13.90135383605957],["డింగ్",-13.901372909545898],["organisasjon",-13.90138339996338],["▁cîh",-13.901386260986328],["▁шут",-13.901397705078123],["▁gesig",-13.901412963867188],["תמיד",-13.9014310836792],["мън",-13.90146255493164],["▁लामा",-13.901476860046388],["▁كۆرۈش",-13.901480674743652],["ڳو",-13.901491165161133],["اءات",-13.901497840881348],["▁răspunde",-13.90150260925293],["▁нээлт",-13.901514053344728],["▁Masar",-13.90152359008789],["ოპერ",-13.901548385620115],["▁تتم",-13.901571273803713],["▁bombo",-13.901575088500977],["▁ഗവ",-13.901575088500977],["үмдү",-13.90157699584961],["4.9",-13.901594161987305],["▁Saksamaa",-13.901638984680176],["▁پوچھا",-13.90168571472168],["▁obou",-13.901716232299805],["tyksiä",-13.901718139648438],["▁Кот",-13.901727676391602],["▁olaylar",-13.90173625946045],["▁paysage",-13.90174388885498],["▁wiatr",-13.901753425598145],["טקסט",-13.901773452758787],["tagna",-13.901774406433104],["ച്ചാ",-13.901823043823242],["▁underhåll",-13.901824951171877],["▁profundidad",-13.901826858520508],["▁iştirakçıları",-13.901835441589355],["▁nxit",-13.90184497833252],["λαμβάνουν",-13.901856422424316],["▁1882",-13.901867866516112],["vance",-13.901869773864746],["▁cù",-13.90187931060791],["▁زب",-13.901881217956545],["▁tuua",-13.901885986328123],["▁bármi",-13.901891708374023],["ಲ್ಲು",-13.901914596557615],["▁dirigido",-13.901917457580566],["▁локално",-13.901944160461426],["▁legis",-13.901947975158691],["▁komunikat",-13.901968002319336],["が無い",-13.9019775390625],["ได้ยิน",-13.901983261108398],["DİR",-13.901989936828612],["كۈنى",-13.901994705200195],["ίτες",-13.90199851989746],["▁dibagi",-13.90200424194336],["提高了",-13.902032852172852],["▁моє",-13.90204906463623],["▁розгляду",-13.902054786682127],["▁tæp",-13.90206813812256],["յին",-13.90208339691162],["▁provocat",-13.902135848999023],["အဆို",-13.902140617370604],["▁kupuje",-13.902156829833984],["似的",-13.902174949645996],["▁Билим",-13.902189254760742],["чую",-13.90220069885254],["Bad",-13.902203559875488],["▁එල",-13.902203559875488],["的具体",-13.902244567871094],["என்ன",-13.902273178100586],["አንድ",-13.902287483215332],["▁provedení",-13.902338027954102],["ຜ່ານມາ",-13.90234375],["expo",-13.90234661102295],["ស៊ូ",-13.902348518371582],["ítható",-13.902352333068848],["▁Νό",-13.9024019241333],["даган",-13.902409553527832],["自觉",-13.902410507202148],["歩いて",-13.902411460876465],["468",-13.90241527557373],["▁Γιώργο",-13.902441024780272],["▁Карта",-13.902460098266602],["▁초기",-13.902466773986816],["生活方式",-13.90249729156494],["DIY",-13.902501106262209],["▁kamieni",-13.902512550354004],["IDEN",-13.90252685546875],["▁darajada",-13.9025297164917],["▁leagan",-13.902533531188965],["▁sinus",-13.902533531188965],["▁دیدم",-13.902547836303713],["▁Usko",-13.902551651000977],["784",-13.902563095092772],["▁lyser",-13.902581214904783],["ნური",-13.90259075164795],["වුණු",-13.902607917785645],["▁tədbirlərin",-13.90260887145996],["jević",-13.902628898620604],["不变",-13.9026460647583],["▁निवास",-13.90264892578125],["姚",-13.90266227722168],["荷兰",-13.902681350708008],["▁adayı",-13.902687072753906],["敘",-13.902687072753906],["▁βήμα",-13.902688026428224],["遗憾",-13.902692794799805],["▁nouvel",-13.902703285217283],["弦",-13.90270709991455],["▁అద్భుత",-13.902713775634766],["เคลื่อน",-13.902718544006348],["ត្បូង",-13.902721405029297],["ຖະຫນົນ",-13.902722358703612],["▁DAAWO",-13.902722358703612],["▁Intelligence",-13.902722358703612],["▁چوڭقۇر",-13.902722358703612],["갤러리",-13.902722358703612],["ঢ়",-13.90272331237793],["▁Akershus",-13.90272331237793],["▁Brifysgol",-13.90272331237793],["▁Grâce",-13.90272331237793],["▁Massachusetts",-13.90272331237793],["▁dzisiejszy",-13.90272331237793],["▁entièrement",-13.90272331237793],["▁indépendant",-13.90272331237793],["▁nedjelju",-13.90272331237793],["▁τόνισε",-13.90272331237793],["▁Републици",-13.90272331237793],["▁Церкви",-13.90272331237793],["▁Человек",-13.90272331237793],["▁енциклопедија",-13.90272331237793],["▁сарадње",-13.90272331237793],["▁съоръжения",-13.90272331237793],["▁ընդամենը",-13.90272331237793],["▁اړیکي",-13.90272331237793],["▁کانگریس",-13.90272331237793],["▁मुद्दे",-13.90272331237793],["▁ਹਮਲਾ",-13.90272331237793],["▁అర్జున్",-13.90272331237793],["▁ലൈംഗിക",-13.90272331237793],["▁მიმართულებით",-13.90272331237793],["▁ግልጽ",-13.90272331237793],["▁Kiekviena",-13.902724266052246],["▁поколение",-13.902724266052246],["▁વિરોધ",-13.902724266052246],["▁እስካሁን",-13.902724266052246],["優良",-13.902724266052246],["▁Možda",-13.902725219726562],["▁afternoon",-13.902725219726562],["▁paziente",-13.902725219726562],["▁صوبے",-13.902725219726562],["▁उत्सव",-13.902725219726562],["▁विकसित",-13.902725219726562],["ေရာဂါ",-13.90272617340088],["▁Yaponiya",-13.90272617340088],["▁izbjegl",-13.90272617340088],["لەنگەن",-13.902727127075195],["▁Toscana",-13.902727127075195],["▁lämplig",-13.902727127075195],["▁التسجيل",-13.902727127075195],["▁అధ్యక్షుడు",-13.902728080749512],["▁выйти",-13.902729034423828],["▁କିପରି",-13.902729034423828],["κολούθ",-13.902729988098145],["▁elemzés",-13.902729988098145],["▁спецыяліст",-13.90273094177246],["▁ئەگەر",-13.902731895446776],["▁نداره",-13.902732849121094],["▁চোখ",-13.902732849121094],["▁ਚਿੱ",-13.902732849121094],["▁ಟೈ",-13.902735710144045],["▁география",-13.902737617492676],["▁جهڙو",-13.902737617492676],["▁નાના",-13.902738571166992],["ohjelmisto",-13.902739524841309],["▁moat",-13.90274143218994],["▁mendakwa",-13.902743339538574],["แห่งประเทศไทย",-13.90274429321289],["▁uitvoeren",-13.902745246887209],["▁ରକ୍ତ",-13.902745246887209],["▁szempont",-13.902749061584473],["▁ymateb",-13.902750015258787],["▁conegut",-13.902751922607422],["ロード",-13.902751922607422],["▁دشوار",-13.902753829956056],["▁MySQL",-13.902754783630373],["▁очекива",-13.902758598327637],["▁почетка",-13.902758598327637],["▁menderita",-13.90276050567627],["▁vahvista",-13.90276050567627],["פסטיבל",-13.902765274047852],["▁بسیج",-13.902765274047852],["▁voulais",-13.902766227722168],["▁የተሻለ",-13.902766227722168],["▁വിവരങ്ങള്",-13.902767181396484],["▁férias",-13.9027681350708],["▁könyvtár",-13.90277099609375],["▁përshtat",-13.902772903442385],["▁першому",-13.9027738571167],["ສະຫນອງ",-13.902780532836914],["▁predpisov",-13.902783393859863],["▁दिखाई",-13.902788162231444],["▁લોક",-13.902789115905762],["▁baaqay",-13.902791976928713],["lokasi",-13.902793884277344],["▁تقدير",-13.902793884277344],["▁distingui",-13.90280818939209],["▁Torsdag",-13.902814865112305],["▁ಮಾಡಬೇಕು",-13.902814865112305],["▁tjäna",-13.90281867980957],["російсько",-13.902822494506836],["▁Юрій",-13.902825355529783],["▁psov",-13.902827262878418],["▁جنوری",-13.902827262878418],["▁סיבה",-13.902841567993164],["▁reside",-13.902856826782228],["▁втрат",-13.90285873413086],["▁порівнян",-13.902859687805176],["ühingu",-13.90286350250244],["cțiune",-13.902881622314451],["▁obdobie",-13.902891159057615],["▁жаша",-13.902894020080566],["▁Melihat",-13.902905464172363],["ຍຸ",-13.90290641784668],["▁לראש",-13.90290641784668],["▁utkání",-13.902912139892578],["▁הביטחון",-13.902913093566896],["目前已",-13.90292263031006],["આર",-13.902923583984377],["ವೈ",-13.902932167053224],["▁ඉතාම",-13.902938842773438],["▁batik",-13.902948379516602],["▁recorrido",-13.90295124053955],["▁þessara",-13.90295124053955],["通販",-13.902956008911133],["▁그들은",-13.902957916259766],["▁Grön",-13.902958869934082],["უბნებ",-13.902960777282717],["▁բովանդակության",-13.902963638305664],["▁ಹಾಗಾಗಿ",-13.90296745300293],["▁nuklear",-13.902971267700195],["▁parkov",-13.90297508239746],["점검",-13.902976989746094],["▁potrete",-13.90298080444336],["▁ενέργειας",-13.902996063232422],["▁deadline",-13.902997016906738],["قاعدہ",-13.903003692626951],["vieta",-13.903005599975586],["▁забележа",-13.903006553649902],["ष्क",-13.903019905090332],["ေဆာ",-13.90302276611328],["▁prill",-13.903023719787598],["▁безліч",-13.903029441833496],["▁Lời",-13.903037071228027],["exploitation",-13.903042793273926],["▁موسى",-13.90304470062256],["▁غالبا",-13.903051376342772],["krav",-13.90305995941162],["Тернопіль",-13.90306568145752],["▁الإلكترونية",-13.90306568145752],["方々",-13.903070449829102],["▁مدة",-13.903077125549316],["▁tamquam",-13.90308666229248],["▁sweat",-13.90309238433838],["especial",-13.903101921081545],["▁Navíc",-13.903105735778809],["▁տեղեկ",-13.903106689453123],["плей",-13.90310764312744],["▁teruk",-13.90310764312744],["érico",-13.903118133544922],["홈",-13.90314769744873],["єкти",-13.903154373168944],["▁dostępny",-13.90317726135254],["▁ඇන්",-13.903178215026855],["▁Berlín",-13.903183937072754],["kommunikatsiya",-13.90321445465088],["▁wynik",-13.903228759765623],["▁அப்ப",-13.90323257446289],["▁మాటలు",-13.903244018554688],["▁Meddela",-13.903249740600586],["ଯାଇଥିବା",-13.903254508972168],["▁vidaus",-13.903254508972168],["писано",-13.903267860412598],["ご了承ください",-13.903294563293455],["ינר",-13.90330410003662],["വൃത്തി",-13.903306007385254],["تدريب",-13.903311729431152],["▁súng",-13.90332317352295],["足夠",-13.903339385986328],["▁spremni",-13.903355598449709],["第二个",-13.903356552124023],["▁подій",-13.903363227844238],["▁reabilit",-13.903366088867188],["ால",-13.90339183807373],["462",-13.903399467468262],["מגוון",-13.903403282165527],["ألم",-13.903412818908691],["▁зависност",-13.903417587280272],["▁магазини",-13.903440475463867],["▁effekter",-13.903441429138184],["▁kopalni",-13.903459548950195],["โฮม",-13.903483390808104],["▁tashrif",-13.903491973876951],["インド",-13.903499603271484],["增值",-13.903509140014648],["▁पाठव",-13.903517723083496],["▁තිර",-13.903556823730469],["▁Looking",-13.903569221496582],["▁ფონდი",-13.903571128845217],["▁ලබාදීම",-13.903573989868164],["▁kroku",-13.903575897216797],["owned",-13.903608322143556],["▁проценти",-13.903609275817873],["▁თამარ",-13.903615951538086],["▁Miha",-13.903635025024414],["▁புலி",-13.903658866882324],["▁गुल",-13.903664588928224],["▁издаден",-13.903666496276855],["▁anjing",-13.903668403625488],["▁મેચ",-13.90367031097412],["▁bouton",-13.903679847717283],["direktør",-13.903691291809082],["▁krajev",-13.903691291809082],["▁mielenkiinto",-13.903700828552246],["▁кафедри",-13.903716087341309],["layabilirsiniz",-13.903732299804688],["▁etməklə",-13.903743743896484],["مىنى",-13.90378189086914],["incl",-13.903789520263672],["tutkinto",-13.903800964355469],["växt",-13.903817176818848],["▁DEI",-13.903826713562012],["შვა",-13.903838157653809],["ກູ້",-13.903843879699709],["▁ആദ്യത്തെ",-13.903844833374023],["不仅是",-13.90384578704834],["▁steek",-13.903864860534668],["▁Regeln",-13.90388011932373],["စည်း",-13.903885841369627],["▁praksi",-13.903887748718262],["▁सरकारलाई",-13.903889656066896],["▁navodil",-13.903905868530272],["▁بهداشتی",-13.9039306640625],["크림",-13.903934478759766],["ЕЧ",-13.903940200805664],["обласного",-13.90394115447998],["िब",-13.903945922851562],["▁പന്ത",-13.903950691223145],["синин",-13.903963088989258],["شید",-13.903971672058104],["一起来",-13.903998374938965],["IDAD",-13.90401554107666],["▁1876",-13.90401554107666],["fähigkeit",-13.904019355773926],["ാണല്ലോ",-13.904023170471191],["පාර",-13.904024124145508],["рича",-13.90404987335205],["▁searching",-13.904067993164062],["▁faydalan",-13.904069900512695],["▁botên",-13.904094696044922],["▁rêya",-13.904095649719238],["▁moka",-13.904104232788086],["▁dodáva",-13.9041109085083],["爱心",-13.904111862182615],["▁ప్రయత్నం",-13.904117584228516],["做出了",-13.904122352600098],["▁депутата",-13.90412712097168],["dorp",-13.904129028320312],["▁наступа",-13.90413761138916],["▁ачаа",-13.904142379760742],["ерів",-13.90415096282959],["▁нээлттэй",-13.904163360595703],["▁മകന്",-13.904186248779297],["▁Тел",-13.904197692871094],["kowo",-13.904200553894045],["定め",-13.904216766357422],["loženie",-13.904217720031738],["▁улан",-13.904221534729004],["ທະເລ",-13.90423583984375],["▁turma",-13.904254913330078],["▁nevet",-13.904266357421877],["▁energijos",-13.904288291931152],["ચુ",-13.904356956481934],["▁virtuvė",-13.904366493225098],["७६",-13.904380798339844],["▁унија",-13.904409408569336],["▁анықтама",-13.904410362243652],["▁የሚታ",-13.9044189453125],["ประกอบด้วย",-13.904433250427246],["▁вниманието",-13.90444564819336],["▁వేసి",-13.904452323913574],["▁نشو",-13.904473304748535],["▁integration",-13.904478073120115],["▁edebilir",-13.904504776000977],["နစ်",-13.904542922973633],["▁،‬",-13.904555320739746],["▁duhej",-13.904569625854492],["▁ବହୁତ",-13.904595375061035],["▁Znam",-13.904622077941896],["▁ribuan",-13.904632568359377],["τία",-13.904637336730955],["▁raconte",-13.904638290405272],["▁Dacia",-13.90464210510254],["▁captiv",-13.90464973449707],["적이고",-13.9046630859375],["▁मोहन",-13.904672622680664],["▁fjalla",-13.904678344726562],["त्रे",-13.904680252075195],["▁amire",-13.90468692779541],["▁René",-13.90468978881836],["▁adatto",-13.904698371887209],["▁zakonom",-13.904747009277344],["விட்டார்",-13.904773712158203],["▁stałe",-13.904779434204102],["▁opozici",-13.904790878295898],["478",-13.904802322387695],["▁əmr",-13.90480899810791],["学期",-13.904841423034668],["▁काठमाडौंमा",-13.904868125915527],["▁Анатол",-13.904870986938477],["▁pamant",-13.904881477355955],["▁BRU",-13.904895782470703],["▁ถ้าคุณ",-13.904930114746094],["▁tuossa",-13.904939651489258],["▁شفا",-13.904949188232422],["▁ଶୁଣି",-13.904951095581056],["▁oferim",-13.904984474182127],["роскоп",-13.904986381530762],["ЕТЕ",-13.904990196228027],["▁Յու",-13.90499210357666],["furi",-13.90499496459961],["▁barbati",-13.905030250549316],["▁admis",-13.905041694641112],["▁মানব",-13.905065536499023],["▁cuplu",-13.905104637145996],["▁kakav",-13.905121803283691],["▁arbejdsplads",-13.90513038635254],["gerðir",-13.90514850616455],["▁συνδέ",-13.905181884765623],["브리",-13.905244827270508],["▁דמ",-13.905261993408203],["▁planeet",-13.90528392791748],["▁재미있",-13.905284881591797],["Гар",-13.905285835266112],["▁повезан",-13.905295372009276],["3-7",-13.905296325683594],["ንዴ",-13.905308723449709],["ဗာ",-13.90532112121582],["▁твои",-13.905343055725098],["▁eniten",-13.905364990234377],["▁ملکي",-13.905376434326172],["ξουμε",-13.905378341674805],["▁күл",-13.90538215637207],["▁biztonságos",-13.905394554138184],["的空間",-13.905415534973145],["متحان",-13.905437469482422],["练习",-13.905437469482422],["卧",-13.905440330505373],["▁reunió",-13.905445098876951],["▁රටවල",-13.90544605255127],["وافق",-13.905461311340332],["ריקה",-13.905472755432127],["趴",-13.905478477478027],["氛圍",-13.905479431152344],["ttömyys",-13.90548324584961],["ाचार्य",-13.905485153198242],["▁регионе",-13.905488967895508],["疯狂",-13.90549087524414],["协商",-13.90549373626709],["宿泊",-13.905506134033203],["ຂະຫນາດ",-13.90550708770752],["বাহিনী",-13.905508041381836],["ຕ້ອນຮັບ",-13.905508041381836],["Ồ",-13.905508041381836],["▁Europejskiej",-13.905508041381836],["▁Luxemburg",-13.905508041381836],["▁háirithe",-13.905508041381836],["▁kebahagiaan",-13.905508041381836],["▁nGaeilge",-13.905508041381836],["▁najważniejszy",-13.905508041381836],["▁perspiciatis",-13.905508041381836],["▁vëmendje",-13.905508041381836],["▁Маркетинг",-13.905508041381836],["▁ашыруу",-13.905508041381836],["▁декілька",-13.905508041381836],["▁стосується",-13.905508041381836],["▁সপ্তাহ",-13.905508041381836],["▁ਸਾਹਮਣੇ",-13.905508041381836],["▁ନିମନ୍ତେ",-13.905508041381836],["▁ఖర్చు",-13.905508041381836],["▁ಮುಂಬೈ",-13.905508041381836],["▁angezeigt",-13.905508995056152],["▁nakalipas",-13.905508995056152],["▁sutinkate",-13.905508995056152],["▁zukünftig",-13.905508995056152],["▁Ольга",-13.905508995056152],["▁актриса",-13.905508995056152],["▁алгачкы",-13.905508995056152],["▁ନୂଆଦିଲ୍ଲୀ",-13.905508995056152],["▁ସାମିଲ",-13.905508995056152],["잊",-13.905508995056152],["▁Hercegovina",-13.905509948730469],["▁názov",-13.905509948730469],["▁Почетна",-13.905509948730469],["▁прынцып",-13.905509948730469],["▁परिस्थिती",-13.905509948730469],["▁స్పష్టం",-13.905509948730469],["▁숙소",-13.905509948730469],["▁esforzo",-13.905510902404783],["▁beigās",-13.905511856079102],["гуманітар",-13.905512809753418],["▁liczba",-13.905512809753418],["▁مصنوعی",-13.905512809753418],["▁välkomna",-13.905513763427734],["▁ахвяр",-13.905513763427734],["່ຽງ",-13.905519485473633],["▁ինչպիսի",-13.905519485473633],["▁хөдөө",-13.905521392822266],["▁ללכת",-13.905521392822266],["▁ಮಾನವ",-13.905522346496582],["▁හඬ",-13.905523300170898],["▁Fidesz",-13.905525207519531],["▁Paggamot",-13.905525207519531],["▁українська",-13.905525207519531],["თანხმებ",-13.905526161193848],["呼び",-13.905526161193848],["▁Arizona",-13.905527114868164],["▁분쟁",-13.905527114868164],["展望",-13.90552806854248],["▁prirode",-13.905529975891112],["INFO",-13.90553379058838],["▁Remember",-13.90553379058838],["▁ਕਰਦਿਆਂ",-13.905536651611328],["▁Դատ",-13.905537605285645],["▁ఉదయం",-13.905537605285645],["ျပည္သူ",-13.90553855895996],["▁решења",-13.90555191040039],["▁tentokrát",-13.905552864074709],["▁گویا",-13.90555477142334],["▁borra",-13.905564308166504],["▁పేరుతో",-13.90556526184082],["▁दायर",-13.905571937561035],["▁riduzione",-13.9055757522583],["▁kenderaan",-13.905577659606934],["▁відкриття",-13.905579566955566],["▁ಬಂದಿದೆ",-13.905584335327148],["▁napokon",-13.905588150024414],["預約",-13.90558910369873],["INGER",-13.905604362487791],["▁έτος",-13.905604362487791],["▁זכות",-13.905606269836426],["▁Johannesburg",-13.905609130859377],["▁বাবে",-13.90561294555664],["公示",-13.90561294555664],["▁הציבורי",-13.905614852905272],["κλει",-13.905617713928224],["▁ब्राह्मण",-13.905620574951172],["▁Umsatz",-13.905628204345703],["▁дээрээ",-13.905632972717283],["▁veoma",-13.90563678741455],["▁Mufti",-13.9056396484375],["▁አቅ",-13.90564250946045],["強大的",-13.905649185180664],["▁особисто",-13.905661582946776],["▁הנושא",-13.905662536621094],["▁ਆਸ",-13.905675888061523],["▁የእግዚአብሔር",-13.90567684173584],["मनु",-13.905679702758787],["ม่วง",-13.905679702758787],["▁Marshall",-13.905681610107422],["▁Cikgu",-13.905682563781738],["▁ترجمو",-13.90568733215332],["індивідуальн",-13.905698776245115],["▁beatae",-13.905699729919434],["▁그것을",-13.90570068359375],["▁ஆறு",-13.9057035446167],["▁kompaniya",-13.905706405639648],["▁relações",-13.90574073791504],["LÜ",-13.905742645263672],["▁खून",-13.905742645263672],["СКЕ",-13.905743598937988],["▁त्यसलाई",-13.905749320983888],["▁specielt",-13.90575122833252],["▁dificuldades",-13.905774116516112],["▁suplemento",-13.905784606933594],["▁ребята",-13.905787467956545],["▁zaměstna",-13.90579319000244],["▁матер",-13.90579319000244],["▁плач",-13.90585708618164],["അത",-13.90586757659912],["癫痫病",-13.905888557434082],["▁вестник",-13.905890464782717],["راپ",-13.905892372131348],["റിൽ",-13.90589427947998],["යෝජන",-13.905924797058104],["▁كش",-13.90593147277832],["టె",-13.905942916870115],["▁Động",-13.905965805053713],["▁හානි",-13.90597152709961],["iqtisadi",-13.90598964691162],["北方",-13.905999183654783],["연구원",-13.90601921081543],["见面",-13.90605354309082],["▁главным",-13.906061172485352],["▁Klage",-13.90606689453125],["▁тіркеу",-13.906067848205566],["▁фокусира",-13.906068801879885],["▁الفنية",-13.90607738494873],["▁대체",-13.906084060668944],["сайхан",-13.906107902526855],["▁славе",-13.906113624572754],["▁estime",-13.906123161315918],["▁تارىخ",-13.906129837036133],["▁zvýši",-13.906150817871094],["▁ужасно",-13.90615177154541],["▁ماقال",-13.906157493591309],["وڪي",-13.906183242797852],["▁reached",-13.9061861038208],["امات",-13.90618896484375],["▁McG",-13.9061918258667],["موضوع",-13.906194686889648],["▁Brz",-13.906200408935549],["ጌታ",-13.90620231628418],["マル",-13.906203269958496],["eszköz",-13.906208992004396],["ができます",-13.906211853027344],["találkozó",-13.906219482421877],["بيان",-13.906285285949709],["▁جتن",-13.906285285949709],["्यः",-13.90628719329834],["sõidu",-13.906288146972656],["▁председателя",-13.90629768371582],["▁futbolcu",-13.906312942504885],["hängen",-13.90632152557373],["发生在",-13.906333923339844],["▁веће",-13.906338691711426],["trukket",-13.90634059906006],["▁quantité",-13.906347274780272],["▁прошли",-13.906359672546388],["พูดถึง",-13.9063720703125],["▁folyamatos",-13.906375885009766],["زول",-13.906379699707031],["мых",-13.906384468078612],["▁الفقر",-13.90638542175293],["▁медија",-13.906389236450195],["▁Сак",-13.906391143798828],["▁Estoy",-13.90639877319336],["▁gimana",-13.90640926361084],["warszaw",-13.906416893005373],["▁алушы",-13.906424522399902],["▁Duty",-13.906431198120115],["▁antiqui",-13.906471252441406],["▁يرى",-13.90648078918457],["סול",-13.906497955322266],["▁Служба",-13.906513214111328],["prawdziw",-13.90653133392334],["▁Wiele",-13.906548500061035],["φίλ",-13.90658473968506],["登上",-13.906593322753906],["▁kümme",-13.906599044799805],["▁parlato",-13.906607627868652],["▁włos",-13.9066162109375],["邻",-13.90662956237793],["▁ገቢ",-13.906636238098145],["▁এসে",-13.906667709350586],["▁tõsta",-13.906676292419434],["▁budd",-13.906692504882812],["▁fiyatlar",-13.906694412231444],["▁noemen",-13.90670108795166],["▁czarny",-13.906715393066406],["▁მიწა",-13.906725883483888],["▁qaydada",-13.906737327575684],["ਸ਼ਰ",-13.90673828125],["▁ಗೆದ್ದ",-13.906753540039062],["▁čakal",-13.906770706176758],["▁نداد",-13.906790733337402],["shak",-13.906822204589844],["▁పైన",-13.906827926635742],["▁geração",-13.906835556030272],["การค้นหา",-13.906861305236816],["▁baigi",-13.906878471374512],["▁bilioni",-13.906882286071776],["မက",-13.906883239746094],["▁Možnost",-13.906893730163574],["▁अन्त",-13.906904220581056],["▁Læg",-13.906907081604004],["▁మనకు",-13.906923294067385],["▁OLED",-13.906925201416016],["▁வேக",-13.906936645507812],["のなら",-13.906944274902344],["▁դեպքերում",-13.90694522857666],["▁அவை",-13.906970024108888],["కపోవడం",-13.906973838806152],["普通的",-13.906982421875],["ukia",-13.906991004943848],["арне",-13.907010078430176],["▁Meneja",-13.907012939453123],["美景",-13.90701961517334],["▁bellach",-13.9070405960083],["məsində",-13.907048225402832],["▁продукты",-13.907061576843262],["▁rekod",-13.907072067260742],["▁пътища",-13.90707778930664],["izării",-13.907098770141602],["時報",-13.90710163116455],["▁suggestions",-13.907130241394045],["держав",-13.907143592834473],["energie",-13.907176971435549],["▁पार्टीका",-13.907180786132812],["まったく",-13.90718936920166],["▁текстове",-13.907203674316406],["kunan",-13.907207489013672],["▁કહેવા",-13.907217025756836],["▁dzīve",-13.907233238220217],["▁राति",-13.907236099243164],["▁стена",-13.90724277496338],["上記",-13.907255172729492],["голяма",-13.907276153564451],["▁trygghet",-13.907281875610352],["▁çekim",-13.907282829284668],["σαι",-13.9072847366333],["▁თამ",-13.907286643981934],["▁бога",-13.907296180725098],["NIT",-13.907299995422363],["وجد",-13.907320022583008],["▁střel",-13.90732479095459],["的歷史",-13.907349586486816],["ופו",-13.907392501831056],["químic",-13.907403945922852],["▁gefur",-13.90741729736328],["nõuete",-13.907423973083496],["▁gacan",-13.907440185546877],["▁525",-13.907441139221191],["Հայաստան",-13.907447814941406],["sphere",-13.90747356414795],["డిగా",-13.90748691558838],["▁жері",-13.90750503540039],["▁hoogste",-13.907522201538086],["なりません",-13.907526969909668],["▁mieszkań",-13.907548904418944],["уулдаг",-13.90756130218506],["▁granice",-13.90756893157959],["▁чтоб",-13.90757656097412],["▁walki",-13.907588958740234],["人もいる",-13.907620429992676],["▁hodet",-13.907674789428713],["▁500.000",-13.907676696777344],["▁смислу",-13.907697677612305],["تدخل",-13.907705307006836],["▁الاج",-13.907707214355469],["▁айылында",-13.907710075378418],["تقن",-13.907716751098633],["▁ਵਸ",-13.90773105621338],["бързо",-13.90775203704834],["▁Ajax",-13.907764434814451],["▁брут",-13.907776832580566],["▁Србин",-13.907792091369627],["στί",-13.907816886901855],["▁razgled",-13.907893180847168],["▁organiseren",-13.907937049865724],["▁mexi",-13.907964706420898],["esnį",-13.907987594604492],["▁tryb",-13.908024787902832],[";;;",-13.908029556274414],["ֵי",-13.908040046691896],["▁jmen",-13.908111572265623],["ขวัญ",-13.90813159942627],["▁piper",-13.908169746398926],["的压力",-13.908185958862305],["▁λιγ",-13.908193588256836],["▁Chancen",-13.90821361541748],["▁покупка",-13.908214569091797],["▁σχετική",-13.90822696685791],["▁նախագահը",-13.908252716064451],["брудн",-13.908282279968262],["横浜",-13.908285140991213],["紛紛",-13.908287048339844],["▁بچی",-13.908289909362791],["愁",-13.908289909362791],["ـــــ",-13.908295631408691],["အိပ္",-13.908300399780272],["نۇ",-13.90830135345459],["රජාතන්ත්",-13.90830135345459],["▁baarlamaanka",-13.90830135345459],["▁kolačića",-13.90830135345459],["▁manutenzione",-13.90830135345459],["▁mércores",-13.90830135345459],["▁reikalų",-13.90830135345459],["▁réservé",-13.90830135345459],["▁verkrijgbaar",-13.90830135345459],["▁συνήθως",-13.90830135345459],["▁Чернігів",-13.90830135345459],["▁залишається",-13.90830135345459],["▁тікелей",-13.90830135345459],["▁اڳوڻي",-13.90830135345459],["▁نتيجي",-13.90830135345459],["▁پنځه",-13.90830135345459],["▁अत्यंत",-13.90830135345459],["▁डाऊनलोड",-13.90830135345459],["▁पूर्वाधार",-13.90830135345459],["▁प्रतिकृया",-13.90830135345459],["▁গ্রেপ্তার",-13.90830135345459],["▁ਆਨਲਾਈਨ",-13.90830135345459],["▁સ્થિતિ",-13.90830135345459],["▁ఎందుకంటే",-13.90830135345459],["▁ಖಾಸಗಿ",-13.90830135345459],["▁ეკლესიის",-13.90830135345459],["▁მოსახლეობის",-13.90830135345459],["▁მუშაობს",-13.90830135345459],["▁즐길",-13.90830135345459],["ഃ",-13.908302307128906],["▁Məktəb",-13.908302307128906],["▁civakî",-13.908302307128906],["▁έναρξη",-13.908302307128906],["▁варыянт",-13.908302307128906],["▁компаній",-13.908302307128906],["▁көзөмөл",-13.908302307128906],["▁размишља",-13.908302307128906],["▁съобщава",-13.908302307128906],["▁قوماندان",-13.908302307128906],["▁फाइदा",-13.908302307128906],["▁ట్రైలర్",-13.908302307128906],["▁කැමැත්ත",-13.908302307128906],["▁አድርገው",-13.908302307128906],["▁voedsel",-13.908303260803224],["▁ansökan",-13.90830421447754],["▁գրառում",-13.90830421447754],["ត្រឹម",-13.908305168151855],["▁pozemku",-13.908305168151855],["▁riconosce",-13.908305168151855],["▁शेरबहादुर",-13.908305168151855],["▁лучших",-13.908306121826172],["▁หางาน",-13.908306121826172],["▁тармагы",-13.908308029174805],["大きさ",-13.908308029174805],["▁bathroom",-13.90830898284912],["▁αληθ",-13.90830898284912],["▁എന്തെങ്കിലും",-13.908309936523438],["▁perfection",-13.908312797546388],["▁döntött",-13.908313751220703],["▁Автомат",-13.90831470489502],["▁цркви",-13.90831470489502],["▁લાગી",-13.908315658569336],["▁Еңбек",-13.908318519592283],["▁мәтін",-13.908318519592283],["冷静",-13.908319473266602],["gebühr",-13.908320426940918],["纖維",-13.908320426940918],["▁ismerős",-13.908324241638184],["▁обвинува",-13.9083251953125],["تكلفة",-13.908327102661133],["▁αναφορά",-13.908327102661133],["▁Kriminal",-13.90832805633545],["▁αποκλειστικά",-13.90832805633545],["▁Найбільш",-13.90832805633545],["كىن",-13.908329963684082],["▁विशेषज्ञ",-13.908330917358398],["▁عاشقانه",-13.908333778381348],["[21]",-13.908337593078612],["▁తెలుగులో",-13.90833854675293],["▁réduire",-13.908339500427246],["▁violência",-13.90834140777588],["▁deportivo",-13.908349990844728],["▁cinematograf",-13.908352851867676],["▁नसकेको",-13.908357620239258],["▁реттеу",-13.90836238861084],["▁tregojnë",-13.908365249633787],["▁Jaamac",-13.908367156982422],["▁heldigvis",-13.908370018005373],["▁chục",-13.908370971679688],["▁מאכן",-13.908371925354004],["之家",-13.908374786376951],["▁ಕಾಯ",-13.9083833694458],["▁ಮಾಡುವುದು",-13.908385276794434],["▁ისიც",-13.908400535583496],["▁decizie",-13.908401489257812],["▁Segons",-13.908404350280762],["▁збереження",-13.908409118652344],["▁Interessant",-13.908411979675291],["▁بسا",-13.90842342376709],["▁прекрати",-13.908427238464355],["▁треће",-13.908432960510254],["▁проводится",-13.908443450927734],["▁성경",-13.908443450927734],["▁ಮತ್ತ",-13.908445358276367],["▁explicó",-13.908459663391112],["▁चूत",-13.908480644226074],["▁njegovu",-13.908491134643556],["▁щастя",-13.908492088317873],["७७",-13.90849781036377],["▁Sofa",-13.90852165222168],["▁сокращен",-13.908528327941896],["ೇಷನ್",-13.90853500366211],["不止",-13.908535957336426],["▁prvky",-13.908554077148438],["▁මංගල",-13.90856647491455],["▁легче",-13.908567428588867],["▁mồ",-13.908570289611816],["▁ציון",-13.90858268737793],["▁ministrijas",-13.908604621887209],["▁właściwie",-13.908613204956056],["spieler",-13.908615112304688],["幼儿",-13.908629417419434],["结束后",-13.908656120300291],["▁hidung",-13.908685684204102],["▁التعليمية",-13.908693313598633],["▁হাসপাতালে",-13.908699989318848],["ഫൈ",-13.90872573852539],["▁комітету",-13.90874481201172],["▁Airbus",-13.908753395080566],["▁першай",-13.908758163452148],["▁TƯ",-13.908769607543944],["▁گفتن",-13.908774375915527],["க்கொள்ள",-13.90878200531006],["▁biçimde",-13.908815383911133],["的各种",-13.908821105957031],["पुरुष",-13.908839225769045],["▁발행",-13.908860206604004],["ฮั",-13.908873558044434],["▁tillräckligt",-13.9088773727417],["搭建",-13.9088773727417],["ضبط",-13.908890724182127],["▁глубин",-13.908896446228027],["grožen",-13.90891170501709],["▁تفکر",-13.90892219543457],["▁forestal",-13.90894889831543],["Cİ",-13.908955574035645],["▁Žiad",-13.908957481384276],["▁העד",-13.908964157104492],["▁Popula",-13.908968925476074],["ണര്",-13.908977508544922],["▁Nagusia",-13.908985137939451],["भूमी",-13.908987998962402],["▁силни",-13.909003257751465],["▁эрхтэй",-13.909037590026855],["▁pieniä",-13.909049034118652],["штер",-13.909050941467283],["ವೃತ್ತ",-13.909088134765623],["▁जातात",-13.909123420715332],["조선",-13.909141540527344],["▁Chinna",-13.909149169921877],["▁pristupa",-13.90916919708252],["▁방법을",-13.909184455871582],["ያደርገ",-13.909187316894531],["саңыз",-13.909192085266112],["▁shafin",-13.909192085266112],["neitä",-13.909193992614746],["▁Birgit",-13.90919589996338],["▁دیکھی",-13.909220695495604],["▁definido",-13.909224510192873],["նիկ",-13.909244537353516],["च्छा",-13.90924835205078],["▁efektu",-13.909257888793944],["όφ",-13.909259796142578],["▁Weiß",-13.90928077697754],["פרד",-13.909317016601562],["ιώτες",-13.909319877624512],["▁жыве",-13.90932846069336],["▁Abad",-13.90933895111084],["▁Апа",-13.90933895111084],["▁starkt",-13.909348487854004],["在未来",-13.909360885620115],["шүү",-13.90937042236328],["дерди",-13.90937614440918],["▁agored",-13.909379005432127],["ਰਥ",-13.909391403198242],["قامت",-13.90939235687256],["sugár",-13.90941333770752],["беди",-13.909424781799316],["▁tímto",-13.909436225891112],["▁заняття",-13.909443855285645],["▁ігри",-13.909453392028809],["ПИС",-13.909470558166504],["▁potrebujem",-13.9094820022583],["▁gluco",-13.90949821472168],["黄色",-13.909502983093262],["▁daugiausia",-13.909507751464844],["大脑",-13.90951442718506],["ခတ္",-13.909516334533691],["▁panggilan",-13.909557342529297],["▁அலை",-13.909560203552246],["チン",-13.90957260131836],["▁седми",-13.909579277038574],["▁ngón",-13.90961456298828],["båd",-13.90961742401123],["▁tisztelet",-13.909626007080078],["▁ଖେଳ",-13.909626007080078],["仍在",-13.909638404846191],["▁pád",-13.909646034240724],["▁еднаш",-13.909652709960938],["ционно",-13.909658432006836],["▁криз",-13.909687995910645],["▁жоо",-13.909697532653809],["▁finansowe",-13.90974235534668],["dimensional",-13.909745216369627],["▁obično",-13.909753799438477],["▁tiveram",-13.90977668762207],["አላህ",-13.90977954864502],["▁ઘરમાં",-13.909782409667969],["айық",-13.909783363342283],["▁limpa",-13.909801483154297],["materiaali",-13.909854888916016],["▁comentariile",-13.90985870361328],["▁Charm",-13.909867286682127],["▁куди",-13.90987491607666],["Агро",-13.909886360168455],["▁toilette",-13.909893035888672],["తుంది",-13.909897804260254],["ธง",-13.909908294677734],["▁omfatte",-13.909927368164062],["▁pisang",-13.909930229187012],["οδότη",-13.909945487976074],["▁кодексу",-13.909966468811035],["ίτσα",-13.909978866577148],["િમ",-13.909984588623049],["happy",-13.909988403320312],["▁Новий",-13.90999984741211],["thom",-13.910026550292969],["<3",-13.910030364990234],["ุ่น",-13.910036087036133],["λερ",-13.910039901733398],["▁Δικ",-13.910039901733398],["▁voulu",-13.910048484802246],["шчэ",-13.910057067871094],["▁prettig",-13.910066604614258],["▁докази",-13.910073280334473],["ාවේ",-13.910113334655762],["▁citado",-13.910117149353027],["gtay",-13.910123825073242],["▁carinho",-13.910137176513672],["▁инфекци",-13.910157203674316],["1962",-13.910162925720217],["▁Garage",-13.910173416137695],["ָר",-13.910176277160645],["▁ketgan",-13.910181999206545],["▁humili",-13.910191535949709],["▁Ohio",-13.910195350646973],["▁silný",-13.910201072692873],["▁фирме",-13.91020679473877],["場合がございます",-13.910236358642578],["▁cutie",-13.910240173339844],["▁Jsem",-13.910272598266602],["▁pardon",-13.910298347473145],["的特点",-13.910308837890623],["▁የተሰጠ",-13.91030979156494],["去世",-13.910335540771484],["▁හිතුන",-13.91034698486328],["ഗു",-13.910350799560549],["Граждан",-13.910351753234863],["▁стручно",-13.910354614257812],["лууда",-13.910372734069824],["▁placed",-13.910422325134276],["สาวๆ",-13.910426139831545],["▁Pereira",-13.910435676574709],["▁historiske",-13.910435676574709],["▁Петровић",-13.910441398620604],["▁Giv",-13.910457611083984],["пса",-13.910463333129885],["tisíc",-13.910468101501465],["▁gig",-13.910482406616213],["▁gjennomføre",-13.91050910949707],["▁წვე",-13.910517692565918],["▁stjerner",-13.91055679321289],["▁Bundestag",-13.910558700561523],["▁Вит",-13.910562515258787],["HID",-13.910581588745115],["מידע",-13.910588264465332],["▁खालील",-13.91062831878662],["▁politiek",-13.91063117980957],["▁маалыматты",-13.91064167022705],["▁guardia",-13.910674095153809],["不论",-13.910690307617188],["▁izvērtē",-13.91069507598877],["▁hjemmet",-13.910711288452148],["▁басары",-13.910719871520996],["▁greatest",-13.910721778869627],["▁Μάρ",-13.910722732543944],["▁İndir",-13.910727500915527],["▁κυβερν",-13.910730361938477],["հավ",-13.910743713378906],["▁תלוי",-13.910747528076172],["▁ගැට",-13.910772323608398],["▁કાવ્ય",-13.910778999328612],["▁golden",-13.910783767700195],["675",-13.910799026489258],["▁regalar",-13.910804748535156],["▁kulta",-13.910809516906738],["▁uğraş",-13.910832405090332],["▁الطبيعي",-13.910840034484863],["▁избега",-13.910850524902344],["əsən",-13.910883903503418],["тические",-13.910889625549316],["▁lungi",-13.910910606384276],["ISTO",-13.910911560058594],["გრაფი",-13.910918235778809],["▁solicitude",-13.910943031311035],["െപ്പോലെ",-13.91095733642578],["▁Цэ",-13.91102695465088],["▁herkese",-13.911036491394045],["9.00",-13.91103744506836],["絡",-13.911052703857422],["▁რამდენი",-13.911057472229004],["烦",-13.911059379577637],["瓷",-13.911063194274902],["震撼",-13.91107940673828],["ศิลป",-13.911080360412598],["גישות",-13.911083221435549],["मूर्त",-13.91109561920166],["zwart",-13.911096572875977],["겐",-13.911096572875977],["···",-13.911099433898926],["บุรีรัมย์",-13.911100387573242],["ត្រៀម",-13.911102294921877],["ម៉ឺន",-13.911102294921877],["▁സുപ്രീം",-13.911102294921877],["評判",-13.911102294921877],["Korisničko",-13.911103248596191],["הזדמנות",-13.911103248596191],["▁AZƏRTAC",-13.911103248596191],["▁Birthday",-13.911103248596191],["▁Shavkat",-13.911103248596191],["▁Thầy",-13.911103248596191],["▁cidadanía",-13.911103248596191],["▁láithreán",-13.911103248596191],["▁podmienok",-13.911103248596191],["▁πρώτες",-13.911103248596191],["▁дополнительные",-13.911103248596191],["▁коопсуздук",-13.911103248596191],["▁անվճար",-13.911103248596191],["▁ստուգ",-13.911103248596191],["▁ئەيدىز",-13.911103248596191],["▁تشریح",-13.911103248596191],["▁जिम्मेदार",-13.911103248596191],["▁बौद्ध",-13.911103248596191],["▁रणनीति",-13.911103248596191],["▁আমাকে",-13.911103248596191],["▁ଶ୍ରେଣୀ",-13.911103248596191],["▁ସ୍ୱାମୀ",-13.911103248596191],["▁വെറുതെ",-13.911103248596191],["▁විජේ",-13.911103248596191],["▁ฯลฯ",-13.911103248596191],["▁ນະຄອນຫຼວງວຽງຈັນ",-13.911103248596191],["▁bivše",-13.911104202270508],["▁հաշվարկ",-13.911104202270508],["▁coinvolge",-13.911105155944824],["▁navyše",-13.911105155944824],["▁Далее",-13.911105155944824],["▁денежных",-13.91110610961914],["▁तारीख",-13.91110610961914],["▁барилгын",-13.911107063293455],["▁ሰነድ",-13.911107063293455],["▁Keskustelu",-13.91110897064209],["▁ਹੋਵੇਗੀ",-13.91110897064209],["▁అడిగి",-13.91110897064209],["▁Michigan",-13.911109924316406],["▁iegādāties",-13.911109924316406],["▁infraestructura",-13.911109924316406],["▁Тэгээд",-13.911109924316406],["▁вагітності",-13.911109924316406],["▁podijeli",-13.911110877990724],["▁웹사이트",-13.911110877990724],["શાસ્ત્ર",-13.911113739013672],["▁tilmaamay",-13.911113739013672],["▁veľkosť",-13.911113739013672],["ไซส์",-13.911114692687988],["▁volontà",-13.911114692687988],["▁припрема",-13.911115646362305],["▁notizie",-13.91111660003662],["▁انداخت",-13.911117553710938],["无奈",-13.91111946105957],["▁hosszabb",-13.911121368408203],["▁ilginç",-13.911121368408203],["ländische",-13.91112232208252],["▁اليابان",-13.911123275756836],["▁düşüb",-13.911124229431152],["▁kalbėjo",-13.911128997802734],["▁වනවිට",-13.911130905151367],["▁Hussein",-13.911131858825684],["▁czytelnik",-13.911131858825684],["▁संकल्प",-13.911131858825684],["รัด",-13.911142349243164],["董事長",-13.911142349243164],["▁iyagoo",-13.91114616394043],["▁giày",-13.911148071289062],["▁erfolgen",-13.911151885986328],["▁настройки",-13.911151885986328],["▁Slovenijo",-13.911152839660645],["▁samospráv",-13.911152839660645],["▁progetta",-13.91115951538086],["▁SISTEM",-13.911161422729492],["▁실시간",-13.91116714477539],["▁верзија",-13.911171913146973],["▁stručnjak",-13.911174774169922],["▁klachten",-13.911177635192873],["ถัง",-13.91118049621582],["▁షేర్",-13.91118049621582],["▁pomiar",-13.911187171936035],["▁반영",-13.911188125610352],["▁വരിക",-13.911203384399414],["▁השונים",-13.911211967468262],["▁ميمبر",-13.911216735839844],["▁collezione",-13.91122055053711],["▁draugu",-13.911224365234377],["▁niedrig",-13.911229133605955],["▁Poslan",-13.911230087280272],["▁აგვისტოს",-13.911235809326172],["▁Fjern",-13.911240577697754],["▁omistaja",-13.911245346069336],["▁हक्क",-13.911245346069336],["▁campionat",-13.911246299743652],["▁እኮ",-13.911248207092283],["▁موزه",-13.911261558532717],["ЛИЙН",-13.911266326904297],["▁semmilyen",-13.911267280578612],["▁jartzen",-13.911274909973145],["▁Tanjung",-13.911284446716309],["oggend",-13.911295890808104],["▁стабільн",-13.911297798156738],["Naj",-13.91130256652832],["▁अंध",-13.91130256652832],["▁प्रसार",-13.911312103271484],["▁לעבור",-13.911321640014648],["▁sessão",-13.911343574523926],["更容易",-13.911344528198242],["▁მოძრაობის",-13.91134548187256],["▁όσοι",-13.911358833312988],["▁goedkope",-13.911364555358888],["luettelo",-13.91136646270752],["▁Ruby",-13.911369323730469],["árás",-13.91137409210205],["最初は",-13.911375999450684],["▁Uzņēm",-13.911383628845217],["▁агентства",-13.911386489868164],["▁Mulher",-13.91141414642334],["▁Klimat",-13.9114408493042],["▁ബാങ്ക",-13.911452293395996],["▁הרע",-13.91146183013916],["▁Функц",-13.911471366882324],["RIKA",-13.911473274230955],["▁afraid",-13.911476135253906],["▁вреди",-13.911480903625488],["BAND",-13.91148853302002],["raamatu",-13.91150188446045],["OOM",-13.91151237487793],["▁مهمی",-13.91151237487793],["▁በጎ",-13.911514282226562],["يىل",-13.91151523590088],["▁dimensions",-13.911531448364258],["სწავლა",-13.911540985107422],["▁izliv",-13.911544799804688],["▁Cậu",-13.911548614501951],["▁పనిచేస",-13.911551475524902],["▁احوال",-13.911569595336914],["մատ",-13.911575317382812],["▁wadanka",-13.911581993103027],["▁skutočn",-13.911587715148926],["هائي",-13.911590576171877],["wymi",-13.911599159240724],["وعية",-13.91161060333252],["▁Kuinka",-13.911629676818848],["▁Ямар",-13.911641120910645],["471",-13.911649703979492],["▁ανάπτυξης",-13.911672592163086],["▁tidigt",-13.911678314208984],["앞",-13.911690711975098],["▁Pocket",-13.911699295043944],["▁компанијата",-13.91171932220459],["관련",-13.91172981262207],["ניסיון",-13.911763191223145],["Авто",-13.91176414489746],["▁Dawn",-13.91176414489746],["UNDI",-13.911771774291992],["հեր",-13.91177749633789],["▁армии",-13.911805152893066],["betalning",-13.91183853149414],["▁통일",-13.91185188293457],["ερό",-13.911894798278809],["▁național",-13.911903381347656],["▁dixital",-13.911920547485352],["Sor",-13.911924362182615],["▁velem",-13.911944389343262],["հակ",-13.911952018737791],["بقية",-13.911970138549805],["βέ",-13.911986351013184],["▁Ekonomika",-13.911996841430664],["▁maaş",-13.912006378173828],["ปีนี้",-13.912012100219728],["有名な",-13.912019729614258],["тяга",-13.912032127380373],["▁tacaíocht",-13.912044525146484],["▁المنتج",-13.912056922912598],["▁새로",-13.91206169128418],["▁ڏنا",-13.912064552307127],["▁modifier",-13.912076950073242],["おいて",-13.912086486816406],["▁Patrik",-13.91209602355957],["Music",-13.912110328674316],["νομο",-13.912123680114746],["მირ",-13.912123680114746],["▁aferoj",-13.912139892578123],["leacht",-13.912149429321287],["ക്കൂട്ട",-13.912166595458984],["▁вторник",-13.912184715270996],["ിനുള്ള",-13.912189483642578],["▁Braga",-13.912196159362791],["▁вышел",-13.91221046447754],["▁Раш",-13.912222862243652],["▁paprasta",-13.912237167358398],["여자",-13.912246704101562],["▁zhvillimin",-13.91225814819336],["Roman",-13.912267684936523],["▁suaminya",-13.912271499633787],["▁yerə",-13.9122896194458],["▁Θέ",-13.91229248046875],["▁Tıp",-13.912299156188965],["▁visiter",-13.912299156188965],["▁staðir",-13.912318229675291],["▁целата",-13.912334442138672],["свещен",-13.91235065460205],["kkö",-13.912354469299316],["▁შედეგები",-13.912364959716797],["▁forhandler",-13.912365913391112],["▁perceber",-13.91236972808838],["piger",-13.912378311157228],["▁steel",-13.912378311157228],["ક્રો",-13.912384033203123],["489",-13.912389755249023],["interpret",-13.912397384643556],["infarkt",-13.912399291992188],["ાયેલા",-13.912433624267578],["אלט",-13.912445068359377],["เปล",-13.91245937347412],["▁минуты",-13.912506103515623],["▁siyahısı",-13.912525177001951],["ίθ",-13.9125337600708],["5.2",-13.912553787231444],["່ວ",-13.912557601928713],["▁bebê",-13.912564277648926],["▁ascult",-13.91257667541504],["Электр",-13.912596702575684],["ngumpul",-13.91259765625],["▁Прием",-13.912599563598633],["▁Дума",-13.912601470947266],["1.9",-13.912604331970217],["▁plimba",-13.912605285644531],["▁müddəti",-13.912614822387695],["▁ocenia",-13.912622451782228],["▁Suami",-13.912631034851074],["▁کلب",-13.91263484954834],["nnuksen",-13.9126558303833],["▁పట్టు",-13.912687301635742],["폴리",-13.912693977355955],["▁bendi",-13.91272258758545],["▁olmaması",-13.912748336791992],["▁pornire",-13.912750244140623],["гүйгээр",-13.912755966186523],["ውጣ",-13.912757873535156],["▁rozhodla",-13.912763595581056],["වුම්",-13.91277027130127],["旅行社",-13.912796020507812],["႐ွိ",-13.912798881530762],["östä",-13.912813186645508],["▁reprise",-13.91281509399414],["▁hatinya",-13.912826538085938],["▁vertrek",-13.912834167480469],["▁сурагч",-13.912846565246582],["pikir",-13.912848472595217],["▁Toinen",-13.912880897521973],["gjatë",-13.912884712219238],["Қаз",-13.912886619567873],["gjør",-13.912898063659668],["460",-13.9128999710083],["▁awalnya",-13.912914276123049],["▁pudiera",-13.912922859191896],["▁انگل",-13.912925720214844],["▁Așa",-13.912928581237791],["▁opremo",-13.912936210632324],["gatta",-13.912946701049805],["▁යුත්",-13.912961959838867],["▁խաղում",-13.912967681884766],["▁сөздөр",-13.912993431091309],["▁miliónov",-13.91301727294922],["▁greinar",-13.913022994995115],["▁faktorer",-13.91302490234375],["წყე",-13.913029670715332],["▁komandi",-13.913043975830078],["分散",-13.913047790527344],["bhadh",-13.913049697875977],["നൊ",-13.91305446624756],["▁тулга",-13.91306495666504],["▁Miš",-13.913095474243164],["▁Riya",-13.913100242614746],["ZH",-13.91310691833496],["企业家",-13.913114547729492],["νδυ",-13.91313934326172],["identité",-13.913178443908691],["▁blogue",-13.913203239440918],["джэн",-13.913227081298828],["자금",-13.913249015808104],["نسي",-13.913286209106444],["▁погреш",-13.913331031799316],["▁groene",-13.913339614868164],["▁prezentate",-13.913339614868164],["AVO",-13.913347244262695],["ίσματα",-13.913348197937012],["产能",-13.913363456726074],["707",-13.9133882522583],["▁انگلیس",-13.913434028625488],["▁осуществлять",-13.913448333740234],["▁त्यावर",-13.913454055786133],["▁जुड़",-13.913461685180664],["การท่องเที่ยว",-13.913482666015623],["▁okolicy",-13.913494110107422],["luca",-13.91349983215332],["maðurinn",-13.913540840148926],["▁beyin",-13.913557052612305],["dinho",-13.913567543029783],["▁liittyvät",-13.913576126098633],["▁Савез",-13.91359806060791],["▁ମ୍ୟା",-13.913599014282228],["▁słów",-13.91362190246582],["ິ່ງ",-13.91363525390625],["▁Επιτροπή",-13.913637161254885],["▁وصلت",-13.913640975952148],["▁магистрал",-13.913657188415527],["கிறோம்",-13.913667678833008],["平和",-13.913677215576172],["▁makinë",-13.91370677947998],["आइ",-13.91371726989746],["ในเมือง",-13.9137544631958],["▁tādā",-13.913766860961914],["তেই",-13.913778305053713],["彰",-13.913793563842772],["▁πρά",-13.913800239562988],["ιστικού",-13.91382122039795],["▁நீதிமன்ற",-13.91382884979248],["娃",-13.913854598999023],["שואה",-13.913861274719238],["ബ്ബ",-13.91386890411377],["вички",-13.913872718811035],["vuoro",-13.913873672485352],["铜",-13.913878440856934],["▁Cố",-13.913881301879885],["HON",-13.913894653320312],["욱",-13.913895606994627],["4.3",-13.913896560668944],["▁આપના",-13.913896560668944],["▁ਅਧਿਕਾਰ",-13.913902282714844],["磚",-13.913904190063477],["סטודנט",-13.913908004760742],["อ้วน",-13.913910865783691],["പരിശോധന",-13.913911819458008],["▁Artikulli",-13.913911819458008],["▁boodskap",-13.913911819458008],["▁eléctrico",-13.913911819458008],["▁mirovan",-13.913911819458008],["▁qualunque",-13.913911819458008],["▁tafadhali",-13.913911819458008],["▁zdravstveno",-13.913911819458008],["▁zverejnen",-13.913911819458008],["▁ďakujem",-13.913911819458008],["▁δύσκολο",-13.913911819458008],["▁κόστος",-13.913911819458008],["▁τηλεόραση",-13.913911819458008],["▁вероватно",-13.913911819458008],["▁вёсцы",-13.913911819458008],["▁кырдаал",-13.913911819458008],["▁сякаш",-13.913911819458008],["▁միասին",-13.913911819458008],["▁קילומטר",-13.913911819458008],["▁اختتام",-13.913911819458008],["▁انٹرویو",-13.913911819458008],["▁ٿرپارڪر",-13.913911819458008],["▁आन्तरिक",-13.913911819458008],["▁फॉलो",-13.913911819458008],["▁କେମିତି",-13.913911819458008],["▁ಮಹಿಳಾ",-13.913911819458008],["▁გამოყენების",-13.913911819458008],["▁cidadão",-13.913912773132324],["▁conținut",-13.913912773132324],["▁oktoobri",-13.913912773132324],["▁Димитри",-13.913912773132324],["▁канцэрт",-13.913912773132324],["▁остальные",-13.913912773132324],["▁универс",-13.913912773132324],["▁щастие",-13.913912773132324],["▁રાહુલ",-13.913912773132324],["▁კულტურის",-13.913912773132324],["▁Beschwerde",-13.91391372680664],["▁Liepājas",-13.91391372680664],["▁ውጪ",-13.91391372680664],["טיח",-13.913914680480955],["▁କୋର୍ଟ",-13.913914680480955],["▁vīrieti",-13.913915634155272],["▁ივნისი",-13.91391658782959],["▁روانشناسی",-13.913917541503906],["▁ಮಹತ್ವ",-13.913917541503906],["▁રજૂ",-13.913918495178224],["▁παράδοση",-13.91391944885254],["▁амьдрах",-13.91391944885254],["▁ügyfél",-13.913920402526855],["▁Miskolc",-13.913921356201172],["▁pumunta",-13.913922309875488],["sprøjt",-13.913923263549805],["▁Heinrich",-13.913923263549805],["▁Stefano",-13.913925170898438],["▁ಪ್ರಚಾರ",-13.913925170898438],["▁προσφορά",-13.91392707824707],["▁размислува",-13.91392707824707],["▁зглоб",-13.91392993927002],["thuajse",-13.913930892944336],["▁жөнөт",-13.913930892944336],["▁هجڻ",-13.913930892944336],["▁പ്രമുഖ",-13.913930892944336],["▁kurban",-13.913932800292969],["▁нејзината",-13.913933753967283],["▁నిద్ర",-13.913938522338867],["▁بقیه",-13.91394329071045],["▁Spoločnosť",-13.913952827453612],["増加",-13.913959503173828],["ຈັບ",-13.913965225219728],["增強",-13.913966178894045],["▁esperança",-13.91396713256836],["▁tuduhan",-13.913973808288574],["▁зөвлөлийн",-13.91398811340332],["▁उन्ह",-13.913989067077637],["▁즉시",-13.913997650146484],["▁נולד",-13.914000511169434],["▁완성",-13.91400909423828],["ເຈົ້າຫນ້າທີ່",-13.914013862609863],["▁ներկայացված",-13.914013862609863],["พอใจ",-13.91401481628418],["វិភាគ",-13.914015769958496],["ຍອດ",-13.914016723632812],["▁хадгала",-13.914024353027344],["▁ආපු",-13.914029121398926],["▁paskelbt",-13.914031982421877],["یلا",-13.914036750793455],["▁لگانے",-13.91403865814209],["安静",-13.914045333862305],["▁ഭക്ഷണം",-13.914056777954102],["▁супольна",-13.914063453674316],["▁സ്ഥലം",-13.914069175720217],["▁כבוד",-13.914084434509276],["▁hozta",-13.914091110229492],["တာဝန်",-13.914092063903809],["▁خنده",-13.914098739624023],["网页",-13.914098739624023],["▁lägre",-13.914102554321287],["ravno",-13.914114952087402],["▁متداول",-13.9141206741333],["▁ddinas",-13.914121627807615],["▁bàsic",-13.91413116455078],["▁Obraz",-13.914154052734377],["▁بیماران",-13.914158821105955],["▁gesunde",-13.914162635803224],["▁cuireadh",-13.914165496826172],["3-1",-13.91418743133545],["യാണു",-13.91422176361084],["▁06:00",-13.91423511505127],["sūtīju",-13.9142427444458],["പ്പൂ",-13.914249420166016],["▁posiziona",-13.914254188537598],["▁күз",-13.914257049560549],["ګې",-13.914263725280762],["▁Zatím",-13.914263725280762],["日報",-13.914270401000977],["ፈልግ",-13.914283752441406],["▁quaedam",-13.914284706115724],["▁strikt",-13.914286613464355],["▁اتار",-13.914286613464355],["iúna",-13.91430377960205],["جنگ",-13.914304733276367],["▁доступны",-13.914307594299316],["אנשים",-13.914310455322266],["▁avšak",-13.91431999206543],["▁kalibr",-13.914324760437012],["▁preprost",-13.914326667785645],["▁përgjigj",-13.91432762145996],["▁rəhbərlik",-13.914331436157228],["笔者",-13.914331436157228],["▁Outlet",-13.914362907409668],["▁evitando",-13.9143705368042],["▁Σαν",-13.914372444152832],["在过去",-13.914374351501465],["▁lukija",-13.914385795593262],["▁akrab",-13.914398193359377],["דיג",-13.914401054382324],["▁руководителя",-13.9144287109375],["దర్శన",-13.914441108703612],["ավորված",-13.914450645446776],["▁داند",-13.914453506469728],["ବାଇ",-13.91445541381836],["▁gavo",-13.914459228515623],["▁հղում",-13.914461135864258],["▁Hùng",-13.914472579956056],["▁ծրագիրը",-13.914475440979004],["▁Carra",-13.914478302001951],["privat",-13.91450309753418],["▁جراح",-13.914510726928713],["▁Америк",-13.914520263671877],["▁verkopen",-13.914521217346191],["役割",-13.914521217346191],["zketa",-13.914538383483888],["▁भन",-13.914538383483888],["▁possess",-13.91454029083252],["ସାଇ",-13.914542198181152],["личные",-13.914546966552734],["방침",-13.914558410644531],["வீடியோ",-13.91456127166748],["หนังโป๊",-13.914565086364746],["▁સંત",-13.914567947387695],["ല്ലാതെ",-13.914579391479492],["▁bezahlen",-13.914589881896973],["▁مردمی",-13.914612770080566],["▁coltiva",-13.914616584777832],["▁સારા",-13.914661407470703],["▁მამაკაცი",-13.914669036865234],["▁byłoby",-13.91467571258545],["平方",-13.914680480957031],["▁прават",-13.914687156677246],["▁wyświetla",-13.91468906402588],["▁molde",-13.914690017700195],["▁lăsat",-13.914695739746094],["tetut",-13.91470718383789],["▁independiente",-13.9147310256958],["itātē",-13.914751052856444],["奥运会",-13.91478443145752],["▁قهرمان",-13.914809226989746],["▁Pós",-13.91481113433838],["▁Flower",-13.91481590270996],["▁deputetë",-13.914817810058594],["▁paglala",-13.914849281311035],["▁republike",-13.914860725402832],["لەردە",-13.914864540100098],["▁Говори",-13.914864540100098],["suvremen",-13.914872169494627],["úgy",-13.914872169494627],["▁doğur",-13.914889335632324],["विद",-13.914897918701172],["ttaisi",-13.914920806884766],["своє",-13.914920806884766],["▁Анна",-13.914958000183104],["▁شاعري",-13.914960861206056],["▁графика",-13.914968490600586],["相手に",-13.914973258972168],["▁ماست",-13.914998054504396],["KTU",-13.91501522064209],["▁tørre",-13.915017127990724],["▁krásné",-13.915064811706545],["▁merawat",-13.915088653564451],["oorlog",-13.915099143981934],["▁ድረ",-13.915121078491213],["▁denunciar",-13.915132522583008],["етки",-13.915141105651855],["▁Çelik",-13.915151596069336],["BOOK",-13.91516399383545],["▁shine",-13.91516399383545],["מטופל",-13.915206909179688],["▁Μη",-13.915207862854004],["ēlu",-13.915224075317385],["▁Komponen",-13.915225982666016],["▁Αυτός",-13.915243148803713],["▁өзіне",-13.915275573730469],["ираше",-13.915283203125],["▁Ömer",-13.91530418395996],["▁venez",-13.915326118469238],["▁לעצט",-13.91533088684082],["▁skaper",-13.91537094116211],["▁ekstremt",-13.91538143157959],["▁луч",-13.91542911529541],["▁tänapäeva",-13.915451049804688],["▁ridica",-13.91545295715332],["▁väikse",-13.915467262268066],["▁Nên",-13.915492057800291],["ルール",-13.915515899658203],["▁undrar",-13.915532112121582],["တယ္လို႔",-13.91554069519043],["▁યુવા",-13.915546417236328],["ॅक",-13.915587425231934],["▁felicidade",-13.915603637695312],["▁Ratu",-13.915609359741213],["プラス",-13.915627479553224],["▁podzim",-13.915630340576172],["▁обеспечен",-13.915721893310549],["▁rozlicz",-13.915729522705078],["▁الحس",-13.915735244750977],["පෝ",-13.915748596191406],["港口",-13.915751457214355],["▁darrera",-13.915754318237305],["▁izvoz",-13.915762901306152],["טק",-13.915764808654783],["▁souhlasu",-13.915780067443848],["▁ұрпақ",-13.915803909301758],["▁Şam",-13.91581916809082],["▁Picture",-13.91584014892578],["477",-13.91584587097168],["▁umrl",-13.915852546691896],["▁договоре",-13.915882110595703],["palvelujen",-13.915897369384766],["increment",-13.91592025756836],["feltétel",-13.91592502593994],["▁나타",-13.915926933288574],["▁سماجي",-13.915937423706056],["даваць",-13.91594123840332],["eysaa",-13.915964126586914],["▁praktiko",-13.915977478027344],["teľné",-13.916001319885254],["ग्ने",-13.916024208068848],["经历了",-13.916061401367188],["▁പതിവ",-13.916068077087402],["فط",-13.91606903076172],["ברית",-13.916083335876465],["심사",-13.916094779968262],["YEN",-13.916097640991213],["ハン",-13.916112899780272],["രചന",-13.91612434387207],["▁Марија",-13.916125297546388],["lærer",-13.916126251220703],["ერთიან",-13.916133880615234],["obligatoriu",-13.916136741638184],["klänning",-13.916152000427246],["▁realizan",-13.916152000427246],["▁постигнат",-13.91616153717041],["▁melon",-13.916163444519045],["▁podkreśla",-13.916178703308104],["▁aparent",-13.916180610656738],["▁Suntem",-13.916187286376951],["sikring",-13.916192054748535],["サイトで",-13.916193008422852],["▁begrep",-13.91620635986328],["เมืองไทย",-13.916213989257812],["ಧರ",-13.91624355316162],["Благодар",-13.91628646850586],["ของเล่น",-13.916297912597656],["174",-13.91632080078125],["▁Druh",-13.916332244873049],["ίνου",-13.91634464263916],["גרים",-13.916376113891602],["речен",-13.916385650634766],["▁fixé",-13.916388511657717],["品味",-13.916393280029297],["▁dikim",-13.91639518737793],["켓",-13.91641616821289],["百萬",-13.916417121887209],["▁کانون",-13.916436195373535],["▁personoj",-13.916438102722168],["無人",-13.916451454162598],["▁rodina",-13.916457176208496],["▁ప్రయత్న",-13.91649055480957],["▁desarrollado",-13.91650390625],["▁തിരിച്ചറിയ",-13.916507720947266],["ல்கள்",-13.916540145874023],["▁vybavení",-13.916543006896973],["▁իրականացման",-13.916543960571287],["ようにしましょう",-13.916549682617188],["频道",-13.916584014892578],["▁qetë",-13.916592597961426],["▁Håpe",-13.91659927368164],["▁Geir",-13.916622161865234],["부를",-13.916622161865234],["▁seçə",-13.916638374328612],["枯",-13.916662216186523],["猎",-13.916680335998535],["▁الثلاث",-13.916693687438965],["▁manzil",-13.916696548461914],["▁üheks",-13.916702270507812],["ಪದ",-13.916706085205078],["纠纷",-13.916707038879396],["シーズン",-13.916707992553713],["中小企業",-13.916709899902344],["盜",-13.916719436645508],["▁lounge",-13.91672420501709],["kaké",-13.91672706604004],["ตะวันออก",-13.916728019714355],["บัตรเครดิต",-13.916728019714355],["ေျပး",-13.916728019714355],["សរសេរ",-13.916728019714355],["әдістемелік",-13.916728973388672],["ቿ",-13.916728973388672],["▁Rodriguez",-13.916728973388672],["▁benieuwd",-13.916728973388672],["▁künftig",-13.916728973388672],["▁mengungkapkan",-13.916728973388672],["▁mesyuarat",-13.916728973388672],["▁muodostu",-13.916728973388672],["▁podjetij",-13.916728973388672],["▁représentant",-13.916728973388672],["▁strinjate",-13.916728973388672],["▁târziu",-13.916728973388672],["▁uzinduzi",-13.916728973388672],["▁wniosku",-13.916728973388672],["▁încredere",-13.916728973388672],["▁Ərdoğan",-13.916728973388672],["▁έγιναν",-13.916728973388672],["▁επικεφαλής",-13.916728973388672],["▁ιστότοπο",-13.916728973388672],["▁Истанбул",-13.916728973388672],["▁Політика",-13.916728973388672],["▁дальнейшем",-13.916728973388672],["▁рашэнне",-13.916728973388672],["▁эцэг",-13.916728973388672],["▁փետրվարի",-13.916728973388672],["▁خشونت",-13.916728973388672],["▁پوئتي",-13.916728973388672],["▁खेळाडू",-13.916728973388672],["▁बिजली",-13.916728973388672],["▁ਜ਼ਿਆਦਾ",-13.916728973388672],["▁ପୂଜା",-13.916728973388672],["▁இளைஞர்",-13.916728973388672],["▁ಅನಂತ",-13.916728973388672],["បញ្ចូល",-13.916729927062988],["▁okolnosti",-13.916729927062988],["▁réxime",-13.916729927062988],["▁éjszaka",-13.916729927062988],["▁жөнінде",-13.916729927062988],["▁традиция",-13.916729927062988],["▁կասկած",-13.916729927062988],["▁କ୍ଷତି",-13.916729927062988],["หลอด",-13.916730880737305],["▁prelucra",-13.916730880737305],["▁suknelė",-13.916730880737305],["▁സ്വതന്ത്ര",-13.916730880737305],["▁chiqqan",-13.91673183441162],["▁ਹਜ਼ਾਰ",-13.91673183441162],["▁செலுத்த",-13.91673183441162],["▁saugumo",-13.916732788085938],["▁superficial",-13.916732788085938],["▁δικαιώματα",-13.916732788085938],["▁вятър",-13.916732788085938],["▁የጦር",-13.916733741760254],["▁ಪೋಸ್ಟ್",-13.91673469543457],["나눔",-13.916735649108888],["耳朵",-13.91673755645752],["▁кабінет",-13.916739463806152],["▁ਜੇਕਰ",-13.916740417480469],["▁یادآور",-13.916741371154783],["▁batzuek",-13.916742324829102],["▁କମିଟି",-13.916742324829102],["▁නියෝග",-13.916742324829102],["tuotteiden",-13.916743278503418],["▁неправильно",-13.916743278503418],["▁రక్త",-13.91674518585205],["▁inmediato",-13.916747093200684],["▁kcal",-13.916747093200684],["公眾",-13.916748046875],["▁शेअर",-13.916749000549316],["▁నడి",-13.91675090789795],["▁Ботев",-13.916756629943848],["▁بدلون",-13.916757583618164],["ตลอดเวลา",-13.916759490966797],["▁sodeloval",-13.916760444641112],["▁лъжи",-13.91676139831543],["▁зөрч",-13.916780471801758],["▁Hír",-13.916781425476074],["▁ārpus",-13.91678237915039],["▁நேரத்தில்",-13.916784286499023],["▁дріб",-13.91679573059082],["ååå",-13.916800498962402],["▁افسران",-13.916800498962402],["▁cáncer",-13.916807174682615],["二胎",-13.916807174682615],["▁xeyli",-13.916821479797363],["▁Einfach",-13.916826248168944],["▁비판",-13.916830062866213],["▁თემაზე",-13.91683578491211],["สว่าง",-13.91683864593506],["రియల్",-13.916841506958008],["▁الغذائية",-13.916842460632324],["▁primă",-13.916845321655272],["▁diskusjon",-13.916855812072754],["ကြိုး",-13.91685962677002],["▁fixeron",-13.916862487792969],["▁вулкан",-13.916863441467283],["▁Саюз",-13.916874885559082],["▁onderwys",-13.91688346862793],["▁общественных",-13.916884422302246],["▁тужи",-13.916895866394045],["▁resolution",-13.916898727416992],["▁atuação",-13.916903495788574],["▁вклучен",-13.916910171508787],["▁przegląda",-13.916912078857422],["▁орындар",-13.916913032531738],["تقان",-13.916919708251951],["▁ehrlich",-13.91692352294922],["▁kvinnen",-13.916924476623535],["rassen",-13.916929244995115],["▁عطر",-13.916934967041016],["▁хармон",-13.916948318481444],["▁ocorreu",-13.916953086853027],["登入",-13.916953086853027],["▁pārbau",-13.916967391967772],["פלו",-13.916975021362305],["▁тізімі",-13.916975021362305],["ിലേക്ക",-13.916979789733888],["▁stimola",-13.916993141174316],["inscrire",-13.917003631591797],["▁پکې",-13.917003631591797],["▁тренутно",-13.91701602935791],["▁Хятадын",-13.91701889038086],["ољуб",-13.917025566101074],["▁ефективност",-13.917047500610352],["▁confident",-13.917055130004885],["▁оплату",-13.91706657409668],["▁участков",-13.917078018188477],["▁rahasia",-13.917108535766602],["จะสามารถ",-13.917120933532717],["▁sealt",-13.917129516601562],["rrupció",-13.91714572906494],["▁funções",-13.91714572906494],["▁opgenomen",-13.917146682739258],["▁ברחוב",-13.9171781539917],["▁torstai",-13.917181968688965],["▁Russell",-13.917183876037598],["▁Sepak",-13.917213439941406],["▁facultate",-13.91722297668457],["一直都",-13.917224884033203],["tilladelse",-13.917234420776367],["▁specifiche",-13.917236328125],["hání",-13.917240142822266],["▁adran",-13.917254447937012],["▁обществото",-13.917282104492188],["ここでは",-13.917304039001465],["▁Tôn",-13.917305946350098],["▁സെക്",-13.917335510253906],["Ș",-13.917339324951172],[",000,000",-13.917340278625488],["▁lepszy",-13.917354583740234],["▁आयोगले",-13.917354583740234],["комерційн",-13.917372703552246],["▁kişiye",-13.917376518249512],["携帯",-13.917380332946776],["▁Česku",-13.917384147644045],["עיד",-13.917386054992676],["▁Vladi",-13.917407989501951],["乡村",-13.917413711547852],["SQL",-13.917425155639648],["▁elevada",-13.917434692382812],["ជីវិត",-13.917435646057127],["رسول",-13.917438507080078],["我现在",-13.91744327545166],["▁ਸਵਾਲ",-13.917448043823242],["▁수입",-13.917458534240724],["ვდნენ",-13.917473793029783],["ערכה",-13.917518615722656],["▁tanuló",-13.91752815246582],["친구",-13.917532920837402],["ባቸውን",-13.917536735534668],["သိန်း",-13.917539596557615],["▁kəndində",-13.917545318603516],["لیف",-13.917547225952148],["働いて",-13.917564392089844],["ଗର",-13.917566299438477],["▁iptal",-13.917577743530272],["학기",-13.917577743530272],["▁säilita",-13.91758155822754],["న్నది",-13.917585372924805],["▁improviz",-13.917596817016602],["stöd",-13.917600631713867],["▁хранения",-13.917614936828612],["▁રજ",-13.917616844177246],["▁утвр",-13.917617797851562],["▁خواص",-13.91762924194336],["▁mandra",-13.917635917663574],["してくれた",-13.917656898498535],["▁šoli",-13.917681694030762],["▁blink",-13.917686462402344],["excl",-13.91768741607666],["واری",-13.917712211608888],["▁lived",-13.917715072631836],["998",-13.917716026306152],["▁gistingu",-13.917730331420898],["▁Praegu",-13.917746543884276],["▁felismer",-13.917757034301758],["▁Такім",-13.917774200439451],["bənd",-13.917794227600098],["хаар",-13.917797088623049],["▁Bildiri",-13.917819023132324],["սկի",-13.917824745178224],["捨",-13.917828559875488],["Oct",-13.917853355407717],["▁lugemis",-13.917876243591309],["▁kendilerini",-13.91788101196289],["▁факторов",-13.917916297912598],["▁prázdn",-13.917924880981444],["▁Ayah",-13.917969703674316],["▁niezbędne",-13.917972564697266],["ကလည္း",-13.917980194091797],["▁שאינ",-13.918008804321287],["▁giáp",-13.918014526367188],["▁colaborare",-13.918023109436035],["నట",-13.918108940124512],["▁štart",-13.91812515258789],["၀၀၀",-13.918126106262209],["មហា",-13.918126106262209],["美洲",-13.918146133422852],["далеку",-13.918158531188965],["писки",-13.918168067932127],["▁ভয়",-13.918169021606444],["▁Deux",-13.918170928955078],["시험",-13.91817855834961],["▁կարգավորման",-13.91818141937256],["ловић",-13.918193817138672],["▁टूट",-13.91819667816162],["զում",-13.918197631835938],["▁действи",-13.918205261230469],["▁agência",-13.91821575164795],["▁variera",-13.918242454528809],["גוש",-13.91826343536377],["ക്കാരന്",-13.918286323547363],["qala",-13.918290138244627],["▁1887",-13.918290138244627],["រត់",-13.918295860290527],["พร้อมส่ง",-13.918296813964844],["жня",-13.918301582336426],["553",-13.91830825805664],["北美",-13.91831874847412],["▁primary",-13.918328285217283],["ルーム",-13.918330192565918],["▁ئۈز",-13.918338775634766],["▁droši",-13.918341636657717],["▁Yuri",-13.91834545135498],["माझ",-13.918362617492676],["▁যুবক",-13.918376922607422],["一一",-13.918378829956056],["▁nepār",-13.91838550567627],["▁Televizi",-13.918417930603027],["▁Anteil",-13.918461799621582],["løg",-13.918475151062012],["באנגלית",-13.91847801208496],["▁ovogodišnj",-13.918479919433594],["▁بیگ",-13.918498992919922],["viol",-13.91850471496582],["▁skutecznie",-13.91852569580078],["interessa",-13.918532371520996],["ویز",-13.918548583984377],["▁Lubi",-13.918553352355955],["долларын",-13.918561935424805],["▁Francio",-13.91856288909912],["משרד",-13.91856575012207],["▁מפר",-13.91856575012207],["वार्ता",-13.918570518493652],["ROV",-13.91857624053955],["▁vokse",-13.91857624053955],["▁eskubide",-13.91858196258545],["▁hvít",-13.918590545654297],["gratis",-13.918593406677246],["사고",-13.918594360351562],["impia",-13.918596267700195],["iseerde",-13.918608665466309],["▁Gilbert",-13.918612480163574],["Park",-13.918618202209473],["дзень",-13.91862678527832],["▁ఇచ్చే",-13.918645858764648],["▁Harmoni",-13.918646812438965],["льская",-13.918658256530762],["பாய்",-13.918660163879396],["▁Europi",-13.918672561645508],["1111",-13.918679237365724],["▁Foot",-13.918684005737305],["▁beaux",-13.91869068145752],["ପତ୍ର",-13.918701171875],["▁porrfilm",-13.918729782104492],["лювання",-13.918744087219238],["▁କରିବାର",-13.918747901916504],["▁नारा",-13.918766975402832],["1982",-13.918774604797363],["▁Онда",-13.918777465820312],["▁calling",-13.918783187866213],["▁አላቸው",-13.91878890991211],["▁sageli",-13.918807983398438],["سطح",-13.918808937072754],["▁pjeva",-13.918816566467283],["▁ხალხის",-13.918816566467283],["▁atrakci",-13.918827056884766],["варот",-13.918838500976562],["▁аав",-13.918858528137209],["ூர",-13.918926239013672],["izálás",-13.918933868408203],["▁දුටු",-13.918937683105469],["basert",-13.918943405151367],["▁ikinä",-13.91896915435791],["▁sasaran",-13.918972969055176],["▁impor",-13.918990135192873],["ادت",-13.918994903564451],["лля",-13.919000625610352],["▁бүлийн",-13.919010162353516],["▁rewş",-13.919013023376465],["การขาย",-13.919037818908691],["▁súbor",-13.919037818908691],["駅から",-13.919045448303224],["ഭ്",-13.919075012207031],["riel",-13.919079780578612],["▁kaçır",-13.91908073425293],["leiden",-13.91908359527588],["▁логика",-13.919093132019045],["våning",-13.919107437133787],["PPA",-13.91916561126709],["Академ",-13.919178009033203],["سيس",-13.91917896270752],["▁dráma",-13.919180870056152],["ಸ್ಟರ್",-13.919184684753418],["我々",-13.919190406799316],["Angel",-13.919196128845217],["Temp",-13.91921615600586],["▁naročila",-13.91922092437744],["▁Lirik",-13.919245719909668],["▁Phar",-13.919252395629885],["▁инициатива",-13.919264793395996],["คุม",-13.919291496276855],["▁کڻ",-13.91929817199707],["ของประเทศ",-13.919337272644045],["▁ალექსანდრე",-13.919343948364258],["▁громади",-13.919371604919434],["ztatás",-13.919398307800291],["▁valstybė",-13.919415473937988],["▁грађани",-13.919463157653809],["经济社会发展",-13.91947078704834],["▁gospodarski",-13.919486045837402],["▁قىلىشقا",-13.91948699951172],["泳",-13.919500350952148],["撕",-13.919504165649414],["坛",-13.919510841369627],["▁mainos",-13.91952133178711],["▁хоног",-13.919549942016602],["こういった",-13.919549942016602],["ప్రదేశ్",-13.919551849365234],["นวัตกรรม",-13.919551849365234],["ជំនួយ",-13.91955280303955],["នាយករដ្ឋមន្ត្រី",-13.919553756713867],["▁Amnesty",-13.919553756713867],["▁entdecken",-13.919553756713867],["▁entdeckt",-13.919553756713867],["▁kisheria",-13.919553756713867],["▁llywodraeth",-13.919553756713867],["▁réfléchi",-13.919553756713867],["▁tecnológica",-13.919553756713867],["▁võrreldes",-13.919553756713867],["▁Тәрбие",-13.919553756713867],["▁папярэдн",-13.919553756713867],["▁политичке",-13.919553756713867],["▁середовища",-13.919553756713867],["▁филозоф",-13.919553756713867],["▁қауіпсіздік",-13.919553756713867],["▁الصهيوني",-13.919553756713867],["▁المنظمة",-13.919553756713867],["▁शानदार",-13.919553756713867],["▁সম্ভব",-13.919553756713867],["▁කමෙන්ට්",-13.919553756713867],["▁සහභාගී",-13.919553756713867],["▁အမေရိကန်",-13.919553756713867],["화장품",-13.919553756713867],["Калифорни",-13.919554710388184],["▁iseloomu",-13.919554710388184],["▁činjenica",-13.919554710388184],["▁көрініс",-13.919554710388184],["▁притисок",-13.919554710388184],["▁ದರ್ಶನ್",-13.919554710388184],["กองทัพ",-13.9195556640625],["▁bụi",-13.9195556640625],["▁dhjetor",-13.9195556640625],["▁બેન્ક",-13.9195556640625],["▁ਸੜਕ",-13.919556617736816],["โครงสร้าง",-13.919557571411133],["▁attualmente",-13.919557571411133],["▁Жаңалықтар",-13.919557571411133],["▁వైసీపీ",-13.919557571411133],["▁కలిపి",-13.919559478759766],["เยี่ยมชม",-13.919560432434082],["▁χωριό",-13.919560432434082],["▁dixwaze",-13.919561386108398],["▁hatramin",-13.919561386108398],["▁дэмжлэг",-13.919562339782717],["კვირვე",-13.919564247131348],["▁kedvező",-13.919564247131348],["▁तत्कालीन",-13.919565200805664],["▁Canadá",-13.919567108154297],["▁полувреме",-13.919569969177246],["যাত্রা",-13.91957187652588],["▁datorită",-13.919574737548828],["▁ಒಟ್ಟು",-13.91957664489746],["▁파악",-13.919577598571776],["▁հայերեն",-13.919580459594728],["[23]",-13.919581413269045],["▁parasysh",-13.919583320617676],["▁ترمیم",-13.919589042663574],["▁우승",-13.919591903686523],["▁Trị",-13.91959285736084],["▁Ndër",-13.919594764709473],["▁ciągle",-13.919594764709473],["▁sivîl",-13.919594764709473],["ကြေး",-13.919595718383787],["▁flashback",-13.919596672058104],["▁raznih",-13.919596672058104],["▁فهذا",-13.919596672058104],["▁가득",-13.919598579406738],["▁ekziston",-13.919599533081056],["▁наказание",-13.919599533081056],["▁سیریز",-13.919600486755373],["▁krašto",-13.919602394104004],["станак",-13.919610023498535],["▁kopumā",-13.919610023498535],["▁ዘርፍ",-13.919611930847168],["ୀତ",-13.9196138381958],["▁Střed",-13.919618606567385],["หายไป",-13.919625282287598],["▁zakres",-13.919628143310549],["▁manuel",-13.91963005065918],["▁bercakap",-13.919631004333496],["▁musela",-13.919631004333496],["כרת",-13.919636726379396],["▁därefter",-13.919637680053713],["▁Übung",-13.919645309448242],["fjall",-13.919647216796877],["▁empleados",-13.919651985168455],["▁fejezet",-13.919651985168455],["▁φυσικό",-13.91965389251709],["ዳሴ",-13.91965675354004],["▁kênh",-13.919663429260254],["▁tóm",-13.919670104980469],["▁דפים",-13.919673919677734],["▁לציין",-13.919676780700684],["▁Ministar",-13.91970920562744],["▁למקום",-13.919715881347656],["分割",-13.919722557067873],["▁intreaga",-13.919723510742188],["านุ",-13.91972541809082],["▁białe",-13.919733047485352],["▁ئىچىدە",-13.919740676879885],["▁ארום",-13.919742584228516],["လဲ။",-13.919744491577148],["▁łyż",-13.919756889343262],["▁игроков",-13.919759750366213],["▁elektrische",-13.919763565063477],["▁cyhoeddus",-13.919764518737791],["彈性",-13.91976547241211],["▁Максім",-13.919785499572754],["▁pasako",-13.91978645324707],["▁контр",-13.91978645324707],["だな",-13.91979694366455],["zzare",-13.919804573059082],["▁commemora",-13.91981315612793],["કલ્પ",-13.919821739196776],["▁giành",-13.91982364654541],["▁dříve",-13.919842720031738],["▁للاست",-13.919849395751951],["▁מועד",-13.919857025146484],["▁kellemes",-13.9198579788208],["▁Kyk",-13.919858932495115],["▁seguindo",-13.919864654541016],["▁കൊല",-13.919877052307127],["פֿט",-13.919880867004396],["▁konkrétní",-13.919893264770508],["▁IKT",-13.91989803314209],["СТВА",-13.919901847839355],["▁urmatoarele",-13.919906616210938],["桌上",-13.919913291931152],["เหลือง",-13.919918060302734],["634",-13.91993808746338],["▁gerus",-13.919947624206545],["▁bestemme",-13.91995906829834],["кажуваат",-13.919984817504885],["odás",-13.919995307922363],["ង្ក",-13.919999122619627],["▁retraite",-13.919999122619627],["▁құны",-13.920018196105955],["▁vogla",-13.920042991638184],["▁전용",-13.9200439453125],["▁Eskort",-13.920049667358398],["لتنسيق",-13.92006778717041],["その他の",-13.92007064819336],["▁ներքո",-13.920074462890623],["▁Annars",-13.92008113861084],["यौ",-13.920093536376951],["천안",-13.9201021194458],["bazi",-13.920105934143066],["▁жағдайы",-13.920106887817385],["▁Један",-13.92011260986328],["▁podatku",-13.920113563537598],["poort",-13.920136451721191],["從而",-13.920141220092772],["▁harakati",-13.920145988464355],["ວິທີ",-13.920164108276367],["▁wykorzystanie",-13.920171737670898],["▁fijne",-13.92017936706543],["størrelse",-13.920180320739746],["spielen",-13.9202241897583],["leiro",-13.920246124267578],["▁Jesper",-13.920249938964844],["▁ուղարկ",-13.920251846313477],["▁온도",-13.920268058776855],["arbeider",-13.920269966125488],["▁외부",-13.920269966125488],["▁Դրան",-13.920280456542969],["شرق",-13.920283317565918],["▁շրջանի",-13.92028522491455],["стандарт",-13.920286178588867],["evalua",-13.920289039611816],["▁coram",-13.920297622680664],["leyecek",-13.920299530029297],["▁Pine",-13.920316696166992],["▁montaj",-13.92033576965332],["▁welatê",-13.920340538024902],["▁prvá",-13.920352935791016],["vjetori",-13.920370101928713],["▁собствени",-13.920380592346191],["▁саюз",-13.920382499694824],["க்கொண்டு",-13.92038345336914],["▁zahrada",-13.92041015625],["льну",-13.920412063598633],["രിപ്പ",-13.920413970947266],["▁ספרי",-13.920421600341797],["▁modificat",-13.92043399810791],["ਭੁ",-13.92043685913086],["▁Bapa",-13.920451164245604],["▁பிக்",-13.920454025268556],["▁ஆட்ட",-13.9204683303833],["小吃",-13.9204740524292],["▁erősít",-13.920490264892578],["▁받았다",-13.920490264892578],["ಿರಬೇಕು",-13.920506477355955],["പ്പില്",-13.920516967773438],["گیا",-13.920533180236816],["prés",-13.920541763305664],["ították",-13.920570373535156],["ഗീ",-13.920571327209473],["▁እያሉ",-13.920589447021484],["▁поведения",-13.920598030090332],["▁걸어",-13.920598983764648],["▁Referat",-13.920607566833496],["▁svečano",-13.920612335205078],["▁cruth",-13.92062282562256],["▁openbaar",-13.920623779296877],["▁নেয়া",-13.920628547668455],["▁древни",-13.920629501342772],["▁kasutatav",-13.920632362365724],["▁правоў",-13.92064380645752],["▁berriro",-13.920649528503418],["latina",-13.92065715789795],["ukkaa",-13.92067813873291],["▁pudr",-13.920681953430176],["साव",-13.920695304870604],["ΝΗ",-13.920696258544922],["▁спортивно",-13.920701026916504],["ంక్",-13.920719146728516],["▁ditanya",-13.920719146728516],["▁comporte",-13.920726776123049],["▁מיטן",-13.920729637145996],["密集",-13.920746803283691],["Indi",-13.92076587677002],["aikata",-13.920780181884766],["▁colocado",-13.920791625976562],["تولى",-13.920798301696776],["intézet",-13.92081069946289],["▁назвал",-13.920814514160156],["żdż",-13.92085075378418],["▁īpašuma",-13.920865058898926],["ACTIV",-13.920869827270508],["paikat",-13.920886039733888],["▁ավելին",-13.920899391174316],["แนะนําให้",-13.920921325683594],["nějších",-13.920924186706545],["▁അപ",-13.9209623336792],["▁Funny",-13.92097282409668],["տին",-13.920973777770996],["άρχη",-13.921028137207031],["▁Багато",-13.921055793762209],["▁اتصالات",-13.921069145202637],["▁pipar",-13.92107391357422],["ไปกับ",-13.9210844039917],["▁yasak",-13.921092987060549],["▁Jimbo",-13.921102523803713],["▁разделя",-13.921138763427734],["ADOS",-13.921149253845217],["▁مومن",-13.921154022216797],["ستند",-13.921157836914062],["服飾",-13.921160697937012],["▁stemmen",-13.92116928100586],["▁denaro",-13.921173095703123],["arjev",-13.921175956726074],["タン",-13.92121410369873],["General",-13.921216011047363],["006",-13.921218872070312],["▁identidad",-13.921246528625488],["▁kurulan",-13.921255111694336],["▁litoral",-13.921277046203612],["▁удален",-13.92128849029541],["ունը",-13.921330451965332],["▁Järjest",-13.921344757080078],["ಗಳೇ",-13.921360969543455],["ციო",-13.921364784240724],["▁problémov",-13.921372413635254],["755",-13.92137336730957],["▁executive",-13.921393394470217],["▁عباد",-13.921412467956545],["▁শেষে",-13.92141819000244],["▁достави",-13.921425819396973],["▁работат",-13.921426773071287],["▁sevdiği",-13.921463012695312],["Cel",-13.92147159576416],["▁duši",-13.921476364135742],["▁फ्री",-13.921488761901855],["uokite",-13.921499252319336],["▁अंश",-13.921507835388184],["▁culto",-13.921515464782717],["▁Listen",-13.92156219482422],["▁lotura",-13.921565055847168],["▁tanulás",-13.9215669631958],["▁Lounge",-13.9215726852417],["▁asiasta",-13.921606063842772],["ケー",-13.92162799835205],["▁genocid",-13.921628952026367],["ಬಹುದ",-13.921638488769531],["▁anlayış",-13.921642303466797],["экономических",-13.921645164489746],["▁حجر",-13.921645164489746],["ביק",-13.921652793884276],["▁तेन",-13.921660423278809],["▁ganun",-13.921669006347656],["▁beskou",-13.921671867370604],["szerek",-13.921673774719238],["▁şahid",-13.921685218811035],["▁acompanhar",-13.921697616577148],["փոփոխ",-13.92174243927002],["▁арышт",-13.921749114990234],["あい",-13.92175579071045],["▁న్యాయ",-13.921758651733398],["▁вечери",-13.921793937683104],["浙江省",-13.921796798706056],["מנים",-13.921798706054688],["झर",-13.921799659729004],["▁scam",-13.9218111038208],["気がする",-13.921833992004396],["лазе",-13.921839714050291],["▁никој",-13.921845436096191],["Medic",-13.921869277954102],["▁gabim",-13.921874046325684],["▁rhestr",-13.921892166137695],["▁thre",-13.921904563903809],["▁guanyar",-13.921913146972656],["συρ",-13.921914100646973],["▁disebutkan",-13.921920776367188],["▁pikema",-13.92193603515625],["킬",-13.9219388961792],["▁skratt",-13.921990394592283],["親自",-13.922000885009766],["ဆူ",-13.922002792358398],["ізацыі",-13.922019958496094],["美白",-13.922030448913574],["ymui",-13.922048568725586],["▁уступ",-13.922064781188965],["ചിത്ര",-13.922080039978027],["መተ",-13.922086715698242],["бачы",-13.922093391418455],["▁Comput",-13.922112464904783],["有時候",-13.922113418579102],["▁تسم",-13.922121047973633],["我市",-13.922121047973633],["▁дохода",-13.92213535308838],["พรรณ",-13.922136306762695],["laringiz",-13.922144889831545],["سئل",-13.922147750854492],["klärung",-13.922148704528809],["▁GST",-13.922151565551758],["ицама",-13.92215347290039],["чений",-13.922170639038086],["▁मान्य",-13.922173500061035],["კმა",-13.922197341918944],["قلم",-13.922253608703612],["▁ციხე",-13.922271728515623],["▁سأ",-13.922273635864258],["nejših",-13.922281265258787],["արշավ",-13.922281265258787],["法律法规",-13.922290802001951],["▁węd",-13.922300338745115],["▁காட்டு",-13.922304153442385],["していますが",-13.922307014465332],["吸引力",-13.922307014465332],["▁Miele",-13.922313690185549],["忽视",-13.922313690185549],["▁технички",-13.922345161437988],["获悉",-13.922346115112305],["▁független",-13.922367095947266],["▁புகைப்பட",-13.922370910644531],["構築",-13.922370910644531],["▁végét",-13.922380447387695],["แข้ง",-13.92238426208496],["ትክክለኛ",-13.922386169433594],["▁Feijóo",-13.922386169433594],["▁Gubernur",-13.922386169433594],["▁Lingüística",-13.922386169433594],["▁Schlüssel",-13.922386169433594],["▁Yalnız",-13.922386169433594],["▁confidence",-13.922386169433594],["▁dobbiamo",-13.922386169433594],["▁hulladék",-13.922386169433594],["▁szoftver",-13.922386169433594],["▁İnkişaf",-13.922386169433594],["▁αστυνομία",-13.922386169433594],["▁μοναδικό",-13.922386169433594],["▁најдобар",-13.922386169433594],["▁разнообразие",-13.922386169433594],["▁آنکھوں",-13.922386169433594],["▁پەقەت",-13.922386169433594],["▁अधिवेशन",-13.922386169433594],["▁मंजूर",-13.922386169433594],["▁ಪ್ರಾಥಮಿಕ",-13.922386169433594],["▁ഏതെങ്കിലും",-13.922386169433594],["▁Arbejds",-13.92238712310791],["▁Forfatter",-13.92238712310791],["▁berkongsi",-13.92238712310791],["▁beruházás",-13.92238712310791],["▁gyakori",-13.92238712310791],["▁приготвя",-13.92238712310791],["▁الفيلم",-13.92238712310791],["▁ಭಾಗವಹಿಸ",-13.92238712310791],["▁მეშვეობით",-13.92238712310791],["부스타빗",-13.92238712310791],["Գյումրի",-13.922388076782228],["▁Schlafzimmer",-13.922388076782228],["▁Sonuç",-13.922388076782228],["▁madaxweynaha",-13.922388076782228],["▁Úsáid",-13.922388076782228],["▁زمستان",-13.922388076782228],["▁બજાર",-13.922388076782228],["▁انڈیا",-13.922389030456545],["▁คลิปโป๊",-13.92238998413086],["▁otwiera",-13.922390937805176],["▁സൂര്യ",-13.922390937805176],["▁игнор",-13.922391891479492],["▁آیفون",-13.922391891479492],["႐ႈ",-13.92239475250244],["▁szuper",-13.92239475250244],["▁Премиер",-13.922395706176758],["▁партыі",-13.922395706176758],["▁مىليون",-13.922395706176758],["▁जुलै",-13.922395706176758],["▁Συρία",-13.922396659851074],["▁이뤄",-13.92239761352539],["▁دانشجویی",-13.922399520874023],["လိပ်",-13.92240047454834],["▁ڪميشن",-13.92240047454834],["▁హక్కు",-13.922402381896973],["संपादन",-13.922403335571287],["▁دسترس",-13.922405242919922],["▁nesmí",-13.922407150268556],["▁sorumluluk",-13.922407150268556],["▁heureux",-13.922410011291504],["▁vizsga",-13.922410011291504],["▁seetõttu",-13.922412872314451],["graff",-13.92241668701172],["▁zayıf",-13.922419548034668],["▁සටන්",-13.922422409057615],["▁السياسة",-13.9224271774292],["ပုဒ္",-13.922429084777832],["▁Usług",-13.922432899475098],["ությունում",-13.92243480682373],["▁ਅਜੀਤ",-13.922438621520996],["▁specifieke",-13.922442436218262],["▁жълт",-13.922444343566896],["▁awayekî",-13.922447204589844],["▁bjó",-13.92245864868164],["▁것처럼",-13.922459602355955],["▁özünə",-13.922460556030272],["KETA",-13.922463417053224],["ಪಾಟೀಲ",-13.922465324401855],["▁transcend",-13.922471046447754],["▁родителям",-13.922473907470703],["ателем",-13.922477722167969],["▁бисмо",-13.922484397888184],["▁metabolism",-13.9224853515625],["▁Kooli",-13.922489166259766],["matult",-13.922497749328612],["รัส",-13.922507286071776],["התפתחות",-13.92250919342041],["▁நாங்கள்",-13.922511100769045],["▁практику",-13.92251682281494],["▁iesaka",-13.922533988952637],["년부터",-13.922533988952637],["лянд",-13.922537803649902],["ដែន",-13.922537803649902],["▁добиться",-13.92253875732422],["工夫",-13.922539710998535],["▁Tomar",-13.922540664672852],["технолошк",-13.922545433044434],["▁dydžio",-13.922577857971191],["▁Təşkilatı",-13.922585487365724],["▁mistet",-13.922595024108888],["▁inteles",-13.922603607177734],["▁consumir",-13.922605514526367],["▁փող",-13.922640800476074],["▁aplicada",-13.92264461517334],["▁చెప్పు",-13.922648429870604],["▁पावसा",-13.922651290893556],["▁مشيرا",-13.922677993774414],["▁tomando",-13.922680854797363],["▁Одна",-13.922689437866213],["eithwyr",-13.92269229888916],["▁tulajdonság",-13.922694206237791],["▁무료로",-13.922698974609377],["▁pulis",-13.922718048095703],["参赛",-13.922718048095703],["▁ഹര്",-13.922721862792969],["차를",-13.922727584838867],["▁خمسة",-13.922743797302246],["▁98%",-13.92275619506836],["やす",-13.922765731811523],["▁ölüb",-13.922788619995115],["▁پلي",-13.922789573669434],["▁ömrü",-13.922794342041016],["▁upplýsinga",-13.922805786132812],["ിറങ്ങി",-13.922811508178713],["▁szabályok",-13.922833442687988],["▁مريم",-13.92284870147705],["▁چىقىش",-13.922852516174316],["БД",-13.922858238220217],["▁диван",-13.922869682312012],["▁கூறி",-13.922869682312012],["▁məlumatları",-13.922874450683594],["▁عموم",-13.922891616821287],["mişler",-13.922893524169922],["▁Sted",-13.922905921936035],["▁poderoso",-13.922916412353516],["ြာ",-13.922924995422363],["▁Soovi",-13.922928810119627],["▁turniej",-13.922935485839844],["medal",-13.922943115234377],["這段",-13.922947883605955],["▁окремо",-13.92294979095459],["▁adabiyot",-13.922978401184082],["▁tient",-13.922985076904297],["▁пренесува",-13.922993659973145],["lizar",-13.923011779785156],["▁Associa",-13.923015594482422],["557",-13.923020362854004],["▁gyfle",-13.923025131225586],["▁Ravn",-13.92302703857422],["▁facelo",-13.923033714294434],["▁chudai",-13.923043251037598],["▁הלאומי",-13.923048973083496],["prüfung",-13.923056602478027],["▁oddych",-13.92309284210205],["▁الحالية",-13.92309284210205],["ఎందుక",-13.923101425170898],["▁Vester",-13.923101425170898],["വിശ്വാസ",-13.92310905456543],["▁सल्लाह",-13.923112869262695],["▁доводи",-13.923114776611328],["вац",-13.923117637634276],["▁специалисты",-13.923123359680176],["できれば",-13.923150062561035],["▁prijateljima",-13.92317008972168],["provo",-13.923210144042969],["▁parhaat",-13.923212051391602],["▁jednotlivé",-13.923213005065918],["住民",-13.9232177734375],["▁विना",-13.923260688781738],["▁identificado",-13.923274993896484],["给出",-13.923282623291016],["優化",-13.923287391662598],["ΣΑ",-13.92328929901123],["水泥",-13.923294067382812],["▁Bauch",-13.923311233520508],["▁tuvāk",-13.92331886291504],["是一位",-13.923319816589355],["▁emekli",-13.92333698272705],["sztett",-13.923345565795898],["▁belleza",-13.923347473144531],["▁sorular",-13.923358917236328],["▁բազմա",-13.92336654663086],["vizor",-13.923405647277832],["glut",-13.923423767089844],["ڑوں",-13.923433303833008],["▁infection",-13.923434257507324],["▁propiedades",-13.923452377319336],["▁hotar",-13.923458099365234],["▁oranž",-13.923474311828612],["經理",-13.923501968383787],["luokan",-13.923507690429688],["▁fogyasztó",-13.92353057861328],["IFF",-13.92353630065918],["szyć",-13.923537254333496],["▁Kaise",-13.923542022705078],["ฝา",-13.923542976379396],["486",-13.923544883728027],["▁araçları",-13.923574447631836],["▁čitav",-13.923589706420898],["เผย",-13.92361831665039],["únta",-13.923636436462402],["▁revue",-13.923661231994627],["ກວດ",-13.923662185668944],["చ్చిన",-13.92368221282959],["▁Koki",-13.923693656921388],["పులు",-13.923694610595703],["▁achegar",-13.923697471618652],["▁predstavuje",-13.923724174499512],["டைந்த",-13.923744201660156],["▁prijatelje",-13.92379093170166],["kindlus",-13.92379665374756],["értékű",-13.92380142211914],["▁aporte",-13.923803329467772],["▁Galerie",-13.923830032348633],["▁hvar",-13.923851013183594],["ETİ",-13.923873901367188],["▁눈에",-13.923877716064451],["▁pięknie",-13.92387866973877],["▁späť",-13.92388916015625],["▁ඉන්දියාව",-13.92388916015625],["▁законов",-13.92390251159668],["▁Unidas",-13.923916816711426],["▁iusto",-13.92392635345459],["доступ",-13.923943519592283],["NÄ",-13.923945426940918],["▁Nende",-13.923958778381348],["▁organiziran",-13.923965454101562],["▁ესე",-13.923967361450195],["karên",-13.923991203308104],["▁સાં",-13.924025535583496],["ຮ່ວມມື",-13.924039840698242],["▁saavutta",-13.924073219299316],["Віцебск",-13.92408561706543],["ליווי",-13.92410373687744],["的聲音",-13.924123764038086],["іміздің",-13.92414665222168],["▁gemeinsame",-13.924155235290527],["▁vencer",-13.924163818359377],["▁жума",-13.92417812347412],["▁thesis",-13.924189567565918],["▁किम",-13.92422103881836],["▁winner",-13.92423152923584],["▁shirkad",-13.924235343933104],["▁Լո",-13.924254417419434],["▁Корол",-13.924263954162598],["ပိုး",-13.924264907836914],["▁previste",-13.924314498901367],["װע",-13.924324035644531],["▁tranquilidad",-13.92432975769043],["▁jarayon",-13.924339294433594],["▁օրեր",-13.924352645874023],["▁ჯონ",-13.924372673034668],["水中",-13.924373626708984],["飲んで",-13.924376487731934],["▁Stamp",-13.924388885498049],["▁చూసిన",-13.924402236938477],["▁성적",-13.924402236938477],["▁ଓଡ଼ିଶାର",-13.924412727355955],["▁napsal",-13.924424171447754],["ર્ગ",-13.924428939819336],["STAV",-13.924436569213867],["▁Dermed",-13.924446105957031],["暫時",-13.924455642700195],["▁կտր",-13.924463272094728],["▁Nordisk",-13.92447280883789],["▁KAMI",-13.924488067626951],["েক্ট",-13.924498558044434],["รบ",-13.924503326416016],["инструкци",-13.92450714111328],["▁бараг",-13.924511909484863],["ирањето",-13.92452907562256],["件事",-13.92456340789795],["▁партиясы",-13.924579620361328],["▁الإعلامي",-13.924589157104492],["داعش",-13.924604415893556],["быць",-13.924609184265137],["▁ಆನ್",-13.9246187210083],["કત",-13.92466163635254],["ılmalıdır",-13.924687385559082],["दिवस",-13.924711227416992],["lusió",-13.924748420715332],["▁ինքնա",-13.92475414276123],["▁solicitud",-13.924763679504396],["▁ಮಹಿಳೆ",-13.92477035522461],["▁Savjet",-13.92478084564209],["▁складзе",-13.924785614013672],["▁танилц",-13.924785614013672],["▁атам",-13.92479419708252],["▁hvite",-13.924814224243164],["▁කරමු",-13.924826622009276],["หมายเลข",-13.924837112426758],["▁ustez",-13.924847602844238],["मुद्",-13.924878120422363],["Pil",-13.924921035766602],["寄り",-13.924930572509766],["▁hvide",-13.924933433532717],["▁प्रतिभा",-13.924934387207031],["더라도",-13.924970626831056],["工作人員",-13.924972534179688],["未成年",-13.92497730255127],["▁madaniyati",-13.924983024597168],["▁fallo",-13.924992561340332],["▁dokumentum",-13.925012588500977],["▁žanr",-13.925012588500977],["▁profiad",-13.925026893615724],["肚",-13.925043106079102],["ଫର",-13.925053596496582],["▁തുക",-13.92506217956543],["ratti",-13.925093650817873],["党的十九大",-13.925126075744627],["下班",-13.925158500671388],["▁fû",-13.925159454345703],["▁ඇයට",-13.925169944763184],["伪",-13.92518138885498],["θρησκ",-13.925193786621094],["烹",-13.92519474029541],["磅",-13.92519760131836],["éirigh",-13.925209045410156],["▁تەشكىل",-13.925211906433104],["▁කාන්තා",-13.92522144317627],["屁",-13.92522144317627],["เกณฑ์",-13.92522430419922],["സമ്മേളന",-13.925225257873535],["กระดาษ",-13.925226211547852],["កូរ៉េ",-13.925226211547852],["Ժամանակ",-13.925227165222168],["ພື້ນຖານ",-13.925227165222168],["▁Aberystwyth",-13.925227165222168],["▁Alapítvány",-13.925227165222168],["▁ciutadans",-13.925227165222168],["▁niektóre",-13.925227165222168],["▁použít",-13.925227165222168],["▁priemysel",-13.925227165222168],["▁ricevuto",-13.925227165222168],["▁uwielbia",-13.925227165222168],["▁Πρέπει",-13.925227165222168],["▁διοργανώ",-13.925227165222168],["▁Зохиогч",-13.925227165222168],["▁изилдөө",-13.925227165222168],["▁октобар",-13.925227165222168],["▁пользоваться",-13.925227165222168],["▁приготування",-13.925227165222168],["▁различитих",-13.925227165222168],["▁улмаас",-13.925227165222168],["▁өглөө",-13.925227165222168],["▁ধর্ষণ",-13.925227165222168],["▁প্রেসিডেন্ট",-13.925227165222168],["▁ଗୃହ",-13.925227165222168],["▁හදිසි",-13.925227165222168],["▁თანახმად",-13.925227165222168],["▁მობილური",-13.925227165222168],["▁reikalinga",-13.925228118896484],["▁Вялікі",-13.925228118896484],["▁ավտոմեքենա",-13.925228118896484],["▁möjliga",-13.9252290725708],["▁terdekat",-13.9252290725708],["▁Младен",-13.9252290725708],["▁танхим",-13.9252290725708],["▁ausgewählt",-13.925230026245115],["▁separuh",-13.925230026245115],["▁подготви",-13.925230026245115],["▁действовать",-13.925230979919434],["▁үнэхээр",-13.925230979919434],["尊敬",-13.925230979919434],["▁hirugarren",-13.92523193359375],["▁Украйна",-13.92523193359375],["▁പലപ്പോഴും",-13.92523193359375],["▁čvrst",-13.925232887268066],["▁Інститут",-13.925232887268066],["▁MasterCard",-13.925233840942385],["▁örugg",-13.925233840942385],["▁лікарськ",-13.925233840942385],["▁следующим",-13.925233840942385],["▁הולדת",-13.925233840942385],["▁iubesc",-13.925235748291016],["▁kevésbé",-13.925235748291016],["▁належать",-13.925235748291016],["▁reciproc",-13.925236701965332],["▁Değer",-13.92523956298828],["▁tehnološk",-13.92523956298828],["▁Drejtori",-13.925240516662598],["▁أربعة",-13.925241470336914],["▁Karriere",-13.925243377685549],["▁köteles",-13.925244331359863],["▁Читайте",-13.92524528503418],["▁ສຫລ",-13.925246238708496],["▁заболяване",-13.925247192382812],["▁psicholog",-13.925249099731444],["▁замежных",-13.925249099731444],["▁Interpreta",-13.925254821777344],["▁toestemming",-13.925254821777344],["▁ପାଳନ",-13.925259590148926],["▁nykyään",-13.925260543823242],["▁აქცია",-13.92526149749756],["▁uporaba",-13.925263404846191],["▁оорукана",-13.925268173217772],["▁shekaru",-13.925281524658203],["▁зарцуул",-13.92529010772705],["▁fokozat",-13.925294876098633],["▁etdikdə",-13.925300598144531],["▁연예",-13.925302505493164],["▁측정",-13.925305366516112],["arrêter",-13.92530918121338],["▁प्रवाह",-13.925312042236328],["业主",-13.92531681060791],["▁possesso",-13.92532444000244],["▁പേടി",-13.925325393676758],["▁jubilej",-13.925328254699709],["▁Desuden",-13.925333023071287],["▁సంవత్సరం",-13.925336837768556],["▁그때",-13.925337791442873],["▁Bekannt",-13.925339698791504],["▁सप्त",-13.92534065246582],["▁ответить",-13.925344467163086],["▁dispuesto",-13.925350189208984],["许可",-13.925350189208984],["▁ذڪر",-13.925354957580566],["▁znów",-13.9253568649292],["▁meistar",-13.925363540649414],["AMBI",-13.92537784576416],["▁gezonde",-13.925379753112791],["ארד",-13.92538070678711],["▁Турк",-13.925384521484377],["ಮೆಂಟ್",-13.925387382507324],["▁يجوز",-13.925389289855955],["765",-13.925395011901855],["▁központi",-13.925403594970703],["▁ohranja",-13.925410270690918],["▁жылдық",-13.925424575805664],["▁қараша",-13.92542552947998],["▁Kabul",-13.92542839050293],["▁dialogu",-13.925430297851562],["▁šesti",-13.925432205200195],["っこ",-13.925440788269045],["▁arrisku",-13.925450325012209],["▁هماهنگی",-13.925461769104004],["▁gedachten",-13.925463676452637],["▁shirika",-13.92546844482422],["▁gelungen",-13.925488471984863],["ဓိ",-13.925495147705078],["▁בקשה",-13.925496101379396],["▁skjut",-13.92549991607666],["▁വിവരം",-13.92549991607666],["▁vaadi",-13.925508499145508],["▁امنيتي",-13.925531387329102],["▁معادل",-13.925533294677734],["▁لديها",-13.92553997039795],["▁بۇل",-13.925541877746582],["▁вклучува",-13.925554275512695],["▁दर्जन",-13.925577163696287],["▁Школа",-13.92558765411377],["▁Kursus",-13.92559051513672],["▁bekræfte",-13.92563819885254],["ለች፡፡",-13.925639152526855],["६९",-13.925641059875488],["廣大",-13.92564582824707],["կետ",-13.925670623779297],["▁palata",-13.92567253112793],["▁doboz",-13.925689697265623],["▁väz",-13.92569065093994],["▁നിയമം",-13.92569351196289],["▁hacía",-13.925697326660156],["▁Postime",-13.925711631774902],["ติน",-13.925719261169434],["不含",-13.9257230758667],["غيب",-13.92573356628418],["▁liepos",-13.925734519958496],["iyordu",-13.925743103027344],["▁Pitt",-13.92574691772461],["▁معلوماتو",-13.925763130187988],["чнае",-13.925777435302734],["婚活",-13.92578411102295],["▁chùa",-13.925789833068848],["▁зустр",-13.925792694091797],["ങ്ക്",-13.92580795288086],["▁کالم",-13.92580795288086],["▁افتاده",-13.925829887390137],["掃除",-13.925835609436035],["▁любые",-13.925836563110352],["▁vertina",-13.92585277557373],["▁Simpson",-13.925858497619627],["strahl",-13.925861358642578],["▁poses",-13.92587184906006],["▁tillögu",-13.925884246826172],["ິດ",-13.925885200500488],["▁Марина",-13.925895690917969],["▁పనులు",-13.92591953277588],["▁koupel",-13.925943374633787],["044",-13.925944328308104],["▁voelt",-13.925976753234863],["▁Pix",-13.925992012023926],["▁uyku",-13.92599391937256],["▁legata",-13.925999641418455],["américa",-13.926008224487305],["▁bawak",-13.926011085510254],["піз",-13.926023483276367],["pandi",-13.926024436950684],["▁لرز",-13.92602825164795],["▁চায়",-13.926045417785645],["▁invito",-13.92604637145996],["▁Bebek",-13.92605972290039],["▁postali",-13.926084518432615],["rasyon",-13.926091194152832],["▁ڪام",-13.92609691619873],["▁माइ",-13.92609977722168],["▁Patch",-13.926106452941896],["山市",-13.926109313964844],["▁xalqı",-13.92613697052002],["▁трябваше",-13.926141738891602],["architecture",-13.926166534423828],["VIA",-13.926176071166992],["▁Goth",-13.92617893218994],["▁nastavlja",-13.926196098327637],["րու",-13.926203727722168],["mänsklig",-13.92622184753418],["▁tiujn",-13.926234245300291],["▁Hebr",-13.926237106323242],["λαϊκ",-13.926249504089355],["▁कुर",-13.926255226135254],["▁8.30",-13.926258087158203],["▁ועו",-13.92625904083252],["Esto",-13.926280975341797],["▁(46)",-13.92628574371338],["lelwa",-13.926294326782228],["▁различен",-13.92630100250244],["▁बन्दै",-13.926340103149414],["specific",-13.926351547241213],["вшиеся",-13.926355361938477],["ABO",-13.926374435424805],["▁declare",-13.926389694213867],["سلطة",-13.926392555236816],["▁exempla",-13.92640495300293],["स्मि",-13.926408767700195],["▁kultúra",-13.926408767700195],["▁1871",-13.92641830444336],["ுவதாக",-13.926427841186523],["گذشت",-13.92644214630127],["villkor",-13.926454544067385],["ૂન",-13.92647647857666],["▁ಗಂಡ",-13.92650318145752],["▁የነበሩት",-13.926513671875],["▁نجوم",-13.926526069641112],["αστικά",-13.926532745361328],["▁1864",-13.926535606384276],["ילן",-13.926543235778809],["▁elmond",-13.92660140991211],["▁roupas",-13.92660140991211],["Združen",-13.926616668701172],["mbria",-13.92661952972412],["เนื่อง",-13.926620483398438],["ketina",-13.926621437072754],["▁Бит",-13.926630973815918],["▁것과",-13.926631927490234],["▁ტექსტი",-13.926645278930664],["ろうと",-13.926652908325195],["▁miesiąca",-13.926655769348145],["שפת",-13.926661491394045],["▁맞아",-13.926675796508787],["gjafa",-13.926678657531738],["作战",-13.92668342590332],["▁katılma",-13.926705360412598],["▁вільно",-13.926725387573242],["▁vadīt",-13.926733016967772],["40,000",-13.926740646362305],["▁ජාල",-13.926782608032228],["可以使用",-13.92678451538086],["▁Технологи",-13.926790237426758],["щиеся",-13.926800727844238],["▁Target",-13.92682647705078],["four",-13.92683219909668],["tycznego",-13.926850318908691],["▁nurse",-13.92686367034912],["▁Jaurlaritza",-13.926874160766602],["ולט",-13.926875114440918],["▁образу",-13.92687702178955],["▁ilivyo",-13.926902770996094],["▁stopper",-13.926913261413574],["▁mateixos",-13.926921844482422],["▁Šim",-13.926934242248535],["ພໍ່",-13.926957130432127],["개혁",-13.926958084106444],["▁бутон",-13.926963806152344],["▁успели",-13.92698860168457],["eceğiniz",-13.92700481414795],["622",-13.92703914642334],["altezza",-13.9270658493042],["▁sensul",-13.927067756652832],["▁جگ",-13.927077293395996],["176",-13.927084922790527],["pojení",-13.927112579345703],["ફાઈ",-13.927116394042969],["▁vlogo",-13.927123069763184],["▁enheter",-13.9271240234375],["▁ndryshime",-13.92714500427246],["ակտ",-13.927149772644045],["द्धि",-13.927152633666992],["pakken",-13.927180290222168],["▁proef",-13.927181243896484],["ઇલ",-13.927186965942385],["▁наследник",-13.927191734313965],["▁քննարկել",-13.927199363708496],["▁škoda",-13.927221298217772],["▁родов",-13.927228927612305],["▁Хил",-13.927231788635254],["▁الجزائري",-13.92725658416748],["Person",-13.927258491516112],["koppling",-13.92727279663086],["መልከት",-13.927275657653809],["▁Tarp",-13.927278518676758],["这一次",-13.92728042602539],["מיני",-13.92729663848877],["▁школах",-13.927303314208984],["уусун",-13.927321434020996],["▁Baat",-13.927323341369627],["▁అన్నా",-13.927339553833008],["▁alud",-13.927342414855955],["წარ",-13.927355766296388],["dıqları",-13.927372932434082],["▁պատվ",-13.927372932434082],["▁தரும்",-13.927388191223145],["▁hoë",-13.92739200592041],["▁Τρι",-13.92741584777832],["চু",-13.927443504333496],["▁Koop",-13.927444458007812],["▁observer",-13.927460670471191],["אוט",-13.927486419677734],["ερά",-13.927510261535645],["▁használni",-13.927518844604492],["▁პასუხ",-13.927539825439451],["が可能",-13.927539825439451],["დრა",-13.927556037902832],["▁llai",-13.927557945251465],["gazda",-13.927570343017578],["Каз",-13.927593231201172],["שנה",-13.927597999572754],["▁свободу",-13.927617073059082],["有机",-13.92763614654541],["▁корак",-13.927672386169434],["▁அமைச்ச",-13.9276762008667],["ശോ",-13.927681922912598],["geschenk",-13.927685737609863],["വാര",-13.927685737609863],["▁ergeben",-13.927696228027344],["ავენ",-13.927735328674316],["▁spell",-13.927756309509276],["ىكى",-13.92778491973877],["əddin",-13.927788734436035],["▁Εθνική",-13.927793502807615],["▁제일",-13.92780590057373],["▁законодателство",-13.92785358428955],["▁orðin",-13.927854537963867],["▁معين",-13.927854537963867],["▁مرو",-13.927871704101562],["▁şaş",-13.927878379821776],["সভা",-13.927879333496094],["▁모르겠",-13.92788028717041],["इन्",-13.927884101867676],["根本就",-13.927884101867676],["万吨",-13.927885055541992],["▁והן",-13.927886962890623],["▁ljetn",-13.92789077758789],["ਬੀਰ",-13.927912712097168],["▁कही",-13.927935600280762],["▁preklad",-13.927949905395508],["尸",-13.927963256835938],["纽",-13.92796516418457],["图书馆",-13.92796802520752],["၂၂",-13.928003311157228],["僚",-13.928007125854492],["Occident",-13.92801284790039],["丧",-13.928020477294922],["▁мектепте",-13.92802906036377],["麻煩",-13.928030014038086],["▁Ет",-13.92803192138672],["箭",-13.928034782409668],["湧",-13.9280366897583],["กัด",-13.928037643432615],["▁qəsəbə",-13.9280424118042],["冻",-13.9280424118042],["寶貝",-13.928050994873049],["▁прекрасни",-13.92805290222168],["▁факултета",-13.928057670593262],["ຣັດເຊຍ",-13.928075790405272],["▁اڪثر",-13.928075790405272],["▁परम्परा",-13.928075790405272],["멋",-13.928075790405272],["ադրբեջանական",-13.92807674407959],["ପାରନ୍ତି",-13.92807674407959],["სტრუქტურ",-13.92807674407959],["▁atsauksmes",-13.92807674407959],["▁bütçe",-13.92807674407959],["▁ciutadania",-13.92807674407959],["▁enerxía",-13.92807674407959],["▁inscrições",-13.92807674407959],["▁kapcsán",-13.92807674407959],["▁petróleo",-13.92807674407959],["▁pròxim",-13.92807674407959],["▁qytetarëve",-13.92807674407959],["▁stiahnutie",-13.92807674407959],["▁zhvillohe",-13.92807674407959],["▁üzərinə",-13.92807674407959],["▁Λάρισα",-13.92807674407959],["▁αποφάσεις",-13.92807674407959],["▁Расеі",-13.92807674407959],["▁тогтолцоо",-13.92807674407959],["▁դատապարտ",-13.92807674407959],["▁آژانس",-13.92807674407959],["▁جنہوں",-13.92807674407959],["▁لارښود",-13.92807674407959],["▁نتیجہ",-13.92807674407959],["▁ويجهو",-13.92807674407959],["▁आम्हाला",-13.92807674407959],["▁खिलाड़ी",-13.92807674407959],["▁शिवाजी",-13.92807674407959],["▁ଆନ୍ଦୋଳନ",-13.92807674407959],["▁சுதந்திர",-13.92807674407959],["▁ಕೊಪ್ಪಳ",-13.92807674407959],["▁අපහසු",-13.92807674407959],["▁පිණිස",-13.92807674407959],["▁პროგრამის",-13.92807674407959],["▁페이스북",-13.92807674407959],["हिल्यै",-13.928077697753906],["▁Rosalía",-13.928077697753906],["▁bedrifter",-13.928077697753906],["▁entrenamiento",-13.928077697753906],["▁rilascia",-13.928077697753906],["▁rulla",-13.928077697753906],["▁ātrāk",-13.928077697753906],["▁ଧର୍ମେନ୍ଦ୍ର",-13.928077697753906],["▁knjigo",-13.928078651428224],["▁lãng",-13.928078651428224],["▁астроном",-13.928078651428224],["▁внутренних",-13.928078651428224],["▁перейти",-13.928078651428224],["▁verrattuna",-13.92807960510254],["▁tvrtka",-13.928080558776855],["▁കണ്ണികൾ",-13.928081512451172],["▁ક્યારેય",-13.928082466125488],["▁Sinop",-13.928083419799805],["▁пристигна",-13.928083419799805],["▁ausgezeichnet",-13.92808437347412],["เชียร์",-13.928086280822754],["▁ગુરુ",-13.928086280822754],["▁ፈተና",-13.92808723449707],["▁Sometimes",-13.928088188171388],["▁slapukai",-13.928088188171388],["▁rivolge",-13.928089141845703],["▁Austausch",-13.92809009552002],["▁lufthavn",-13.928091049194336],["中國大陸",-13.928091049194336],["▁þörf",-13.928092956542969],["▁abbandona",-13.928095817565918],["طائرات",-13.92809772491455],["▁pravidelne",-13.92809772491455],["▁хийдэг",-13.92809772491455],["▁טאָן",-13.92809772491455],["▁मिलेगी",-13.928101539611816],["▁tajriba",-13.92810344696045],["▁Schwerpunkt",-13.928110122680664],["▁nekatere",-13.92811107635498],["ԵՂ",-13.928112030029297],["▁hiperlink",-13.928112030029297],["▁література",-13.928112983703612],["▁откровен",-13.928114891052246],["▁прекрасна",-13.92811679840088],["いつでも",-13.92811679840088],["ክራ",-13.928119659423828],["▁ډله",-13.928120613098145],["orllewin",-13.92812156677246],["▁سليمان",-13.92812156677246],["▁त्वचा",-13.928123474121094],["▁నేడు",-13.928123474121094],["▁trámite",-13.92812728881836],["▁áirithe",-13.928133964538574],["▁કરતી",-13.92813491821289],["▁stærkt",-13.928140640258787],["▁هیوادونو",-13.928140640258787],["លាក់",-13.928145408630373],["▁подраздел",-13.928152084350586],["ผูก",-13.9281587600708],["మతి",-13.928166389465332],["▁ဝေမျှ",-13.928167343139648],["▁주말",-13.928171157836914],["▁प्रतिष्ठान",-13.928177833557127],["līni",-13.928182601928713],["▁wybrany",-13.92818832397461],["▁Rasmussen",-13.928192138671877],["했어요",-13.92819595336914],["▁hikoya",-13.928200721740724],["▁פאָר",-13.928204536437988],["▁взнос",-13.928210258483888],["▁Lesbian",-13.928215026855469],["▁complètement",-13.92821979522705],["▁उद्या",-13.928220748901367],["▁onderdelen",-13.928223609924316],["▁լուս",-13.928228378295898],["▁тисячі",-13.928231239318848],["▁babak",-13.928234100341797],["▁එකඟ",-13.928236961364746],["173",-13.928237915039062],["ƏT",-13.928248405456545],["▁തെളിവ",-13.928250312805176],["▁نئون",-13.928255081176758],["▁durere",-13.928256034851074],["▁поставить",-13.928267478942873],["ரல்",-13.928274154663086],["▁पुराने",-13.9282808303833],["▁напрямку",-13.928284645080566],["▁فرص",-13.928284645080566],["▁tuntutan",-13.928288459777832],["▁равен",-13.928296089172363],["▁ٿيندا",-13.928296089172363],["▁szkolny",-13.928298950195312],["▁liiklus",-13.928301811218262],["▁Νίκος",-13.928304672241213],["▁Gençlik",-13.928308486938477],["▁harganya",-13.928312301635742],["▁Bianca",-13.92832374572754],["kabili",-13.928336143493652],["ለስ",-13.92834186553955],["▁regno",-13.928345680236816],["▁KOMP",-13.928351402282717],["ebatur",-13.92836093902588],["▁Uniunea",-13.92836093902588],["▁akitoa",-13.928364753723145],["▁Ruoka",-13.92836570739746],["▁castra",-13.92836570739746],["ຄອງ",-13.928372383117676],["anlagen",-13.928380966186523],["▁അതെ",-13.928418159484863],["ходження",-13.92841911315918],["▁цікавы",-13.928430557250977],["▁Giulia",-13.928474426269531],["▁daljnj",-13.92847728729248],["▁ricordi",-13.928479194641112],["▁hukumar",-13.92849063873291],["▁verbinding",-13.928500175476074],["إش",-13.928509712219238],["ుతున్నాయి",-13.92851734161377],["▁ushirikiano",-13.928523063659668],["▁جميلة",-13.9285249710083],["▁întrebări",-13.928529739379885],["hesi",-13.928534507751465],["ຂໍ້",-13.928536415100098],["четата",-13.928556442260742],["▁oricum",-13.92856502532959],["▁tayin",-13.928580284118652],["考虑到",-13.928585052490234],["▁aumentando",-13.928592681884766],["▁wheel",-13.928607940673828],["▁Phá",-13.92862606048584],["▁română",-13.92864990234375],["▁торговли",-13.92864990234375],["▁vaker",-13.928653717041016],["▁ಮಾಡಿದ್ದ",-13.928656578063965],["▁echipei",-13.928691864013672],["bonați",-13.928705215454102],["liquant",-13.928706169128418],["▁कडा",-13.928706169128418],["▁autonomi",-13.928709030151367],["▁рукой",-13.928714752197266],["▁salvare",-13.928717613220217],["▁откаже",-13.92873191833496],["▁гранат",-13.928732872009276],["ուղի",-13.928736686706545],["REZ",-13.928741455078123],["▁projektą",-13.928757667541504],["රං",-13.92875862121582],["▁Barca",-13.928763389587402],["▁încet",-13.928765296936035],["карт",-13.928766250610352],["kunstnik",-13.9287748336792],["▁rectifica",-13.928776741027832],["बृ",-13.928788185119627],["▁Νο",-13.928791999816896],["លើកទី",-13.928800582885742],["▁képzel",-13.928804397583008],["▁gördü",-13.92880630493164],["▁Chú",-13.928827285766602],["frame",-13.928831100463867],["▁rodiče",-13.928872108459473],["▁bidaia",-13.928876876831056],["▁Mgr",-13.928877830505373],["গো",-13.92888355255127],["▁predhodn",-13.928888320922852],["گراف",-13.9288911819458],["पुरी",-13.928895950317385],["▁הבאה",-13.928949356079102],["75%",-13.928959846496582],["▁dettaglio",-13.928959846496582],["▁corsa",-13.928994178771973],["▁bucurie",-13.92902946472168],["▁членство",-13.929041862487791],["▁согласност",-13.929044723510742],["▁решать",-13.92904567718506],["▁Rann",-13.929076194763184],["बन्दी",-13.929085731506348],["▁venituri",-13.929101943969728],["▁ئىتتىپاق",-13.929120063781738],["▁resepti",-13.929139137268066],["Nav",-13.929163932800291],["▁saturu",-13.929163932800291],["today",-13.929165840148926],["Забав",-13.929165840148926],["▁Investition",-13.929171562194824],["ેન્ટ",-13.929176330566406],["5.7",-13.92917823791504],["▁Telecom",-13.929180145263672],["▁atletas",-13.92918300628662],["ڳڻ",-13.929197311401367],["顔を",-13.929198265075684],["enbach",-13.929200172424316],["ရံ",-13.929201126098633],["▁نیازی",-13.929207801818848],["노동",-13.929208755493164],["▁Ekonomik",-13.929213523864746],["▁Stell",-13.929213523864746],["▁capri",-13.929226875305176],["▁camina",-13.929239273071287],["▁Savaşı",-13.929242134094238],["▁abstrakt",-13.929265022277832],["▁adequat",-13.929266929626465],["▁العرض",-13.929276466369627],["работки",-13.92928695678711],["▁ملکوں",-13.92928695678711],["▁Շատ",-13.92928981781006],["▁Μαν",-13.929301261901855],["dimento",-13.929302215576172],["ติก",-13.929327964782717],["▁Falcon",-13.929332733154297],["▁okresu",-13.929339408874512],["▁възможността",-13.929368019104004],["▁Bupati",-13.929394721984863],["▁позитивни",-13.929401397705078],["▁Şəki",-13.929404258728027],["▁справе",-13.929408073425291],["▁պատմությունը",-13.929420471191406],["▁55%",-13.929430961608888],["କାଶ",-13.929435729980469],["▁վարձ",-13.929439544677734],["▁Engelsk",-13.929461479187012],["▁доступу",-13.929471969604492],["▁завода",-13.929473876953123],["France",-13.929489135742188],["†",-13.929490089416504],["jímá",-13.9295015335083],["▁लेखा",-13.929502487182615],["▁ஆன்",-13.929519653320312],["▁babası",-13.929532051086426],["▁dėlto",-13.92953872680664],["teksti",-13.92954158782959],["mniej",-13.929545402526855],["బడ్డ",-13.929567337036133],["lygu",-13.929584503173828],["نصر",-13.929607391357422],["▁operate",-13.929625511169434],["完全に",-13.929628372192385],["▁كەنت",-13.9296293258667],["▁અમાર",-13.929638862609863],["▁špeciálne",-13.929643630981444],["gangur",-13.929644584655762],["的過程",-13.929678916931152],["ಬಾರ",-13.929680824279783],["▁सांगत",-13.929695129394531],["เหมาะกับ",-13.929702758789062],["▁přece",-13.929709434509276],["қор",-13.929710388183594],["がありません",-13.929754257202148],["▁אמור",-13.929757118225098],["ご参加",-13.929762840270996],["▁квартира",-13.92977237701416],["▁והם",-13.92978572845459],["▁pretul",-13.929808616638184],["▁muistaa",-13.929818153381348],["▁bellum",-13.929847717285156],["좋아요",-13.92986011505127],["njwa",-13.929875373840332],["を感じる",-13.929887771606444],["▁foglalt",-13.92990779876709],["ٽل",-13.92991828918457],["▁можеше",-13.929944038391112],["▁Doha",-13.929946899414062],["olewa",-13.92995834350586],["▁PAP",-13.929966926574709],["▁неког",-13.929966926574709],["КЛ",-13.930008888244627],["ധാര",-13.930017471313477],["bäck",-13.930023193359377],["▁ಗುರಿ",-13.930024147033691],["輛",-13.930049896240234],["▁lanean",-13.930068969726562],["шува",-13.930093765258787],["▁வழ",-13.930097579956056],["▁billigste",-13.930129051208496],["559",-13.930147171020508],["▁mladost",-13.930157661437988],["▁հոդվածներ",-13.930169105529783],["אפן",-13.930171012878418],["▁ventet",-13.930171966552734],["شحن",-13.930177688598633],["алиева",-13.93017864227295],["に登録",-13.930195808410645],["▁ಅಪ್",-13.930214881896973],["ვნება",-13.930225372314451],["此時",-13.930240631103516],["▁aylarında",-13.930243492126465],["ಸಿಸಿ",-13.930248260498049],["▁emigrant",-13.93025016784668],["着他",-13.930270195007324],["▁குண",-13.93027114868164],["▁fraud",-13.930302619934082],["▁വാർത്ത",-13.930306434631348],["ивание",-13.93031883239746],["خلاف",-13.930323600769045],["▁indeed",-13.930331230163574],["▁അവിട",-13.93033218383789],["képesség",-13.93034839630127],["▁поздрави",-13.930371284484863],["züge",-13.930395126342772],["בנית",-13.930435180664062],["▁prototyp",-13.9304838180542],["راڻي",-13.93048858642578],["යුක්ත",-13.93051815032959],["▁férj",-13.93052577972412],["▁هویت",-13.930545806884766],["στών",-13.930566787719728],["▁toiseen",-13.930577278137209],["▁estaría",-13.930590629577637],["▁examine",-13.9306058883667],["laşmış",-13.930649757385254],["လေ့",-13.930692672729492],["▁Цо",-13.93070125579834],["пела",-13.930705070495604],["保管",-13.930707931518556],["▁sədr",-13.93071174621582],["ကုသ",-13.9307222366333],["▁Serena",-13.930728912353516],["▁SMK",-13.93073844909668],["并将",-13.930744171142578],["▁belgeler",-13.930770874023438],["▁xist",-13.930822372436523],["▁Сей",-13.930825233459473],["▁کھلاڑی",-13.93084716796875],["selskaber",-13.93086051940918],["▁న్యూ",-13.930864334106444],["袭击",-13.930878639221191],["裝潢",-13.930882453918455],["阶级",-13.930882453918455],["δοση",-13.93088436126709],["结论",-13.93089485168457],["録",-13.930896759033203],["ിറങ്ങിയ",-13.93089771270752],["ငံ",-13.93089771270752],["獵",-13.93089771270752],["ట్టే",-13.930907249450684],["▁брака",-13.930908203125],["ülesande",-13.93091106414795],["▁ಸಾಧನೆ",-13.930915832519531],["窩",-13.930915832519531],["蔵",-13.930923461914062],["အနေနဲ့",-13.93092441558838],["▁қылмыс",-13.93092441558838],["▁ترهګر",-13.93092441558838],["▁Азамат",-13.930926322937012],["▁հանձնաժողով",-13.930926322937012],["QT",-13.930928230285645],["▁auctoritate",-13.93092918395996],["▁לפר",-13.93092918395996],["お世話",-13.930930137634276],["始まり",-13.930930137634276],["▁тийиш",-13.930931091308594],["▁adăuga",-13.930932998657228],["▁personnalisé",-13.930932998657228],["▁життєв",-13.930932998657228],["▁подкрепя",-13.930932998657228],["▁पदाधिकारी",-13.930932998657228],["▁விற்பனை",-13.930932998657228],["박물관",-13.930932998657228],["Ử",-13.930933952331545],["▁Mahasiswa",-13.930933952331545],["▁econômica",-13.930933952331545],["▁oktobro",-13.930933952331545],["▁prêmio",-13.930933952331545],["▁sfârşit",-13.930933952331545],["▁vajadzētu",-13.930933952331545],["▁Πανεπιστήμιο",-13.930933952331545],["▁απώλεια",-13.930933952331545],["▁πάντως",-13.930933952331545],["▁Поскольку",-13.930933952331545],["▁диагноз",-13.930933952331545],["▁дополнительных",-13.930933952331545],["▁жетілдіру",-13.930933952331545],["▁وزیراعلی",-13.930933952331545],["▁কার্যক্রম",-13.930933952331545],["▁ਤੁਹਾਡਾ",-13.930933952331545],["▁టాలీవుడ్",-13.930933952331545],["▁నోటిఫికేషన్",-13.930933952331545],["▁სივრცე",-13.930933952331545],["▁ცუდი",-13.930933952331545],["ˈ",-13.93093490600586],["▁Comercio",-13.93093490600586],["▁blommor",-13.93093490600586],["▁расказ",-13.93093490600586],["▁ہوجائے",-13.93093490600586],["▁izdelke",-13.930935859680176],["▁puissance",-13.930935859680176],["▁இதனால்",-13.930935859680176],["▁Srinivas",-13.930936813354492],["▁zelfstandig",-13.930936813354492],["▁મીડિયા",-13.930936813354492],["复杂的",-13.930936813354492],["▁Eskişehir",-13.930937767028809],["▁száraz",-13.930937767028809],["▁اعزاز",-13.930937767028809],["▁FETÖ",-13.93093967437744],["മണ്ഡല",-13.930941581726074],["ይቅርታ",-13.930941581726074],["apstrāde",-13.93094253540039],["▁යුගය",-13.93094253540039],["視聴",-13.930943489074709],["内蒙古",-13.930944442749023],["▁associé",-13.93094539642334],["▁지켜",-13.93094539642334],["頭髮",-13.93094539642334],["▁სჭირდება",-13.930949211120604],["▁ফ্রি",-13.930950164794922],["▁žiadny",-13.930952072143556],["▁comerciant",-13.930953025817873],["▁පිළිතුරු",-13.930953025817873],["▁கமல்",-13.930956840515137],["▁обърна",-13.930957794189451],["▁veulent",-13.93095874786377],["▁πόνο",-13.930967330932615],["▁ڈیسک",-13.930971145629885],["▁דרכים",-13.9309720993042],["▁14:30",-13.930973052978516],["▁나타나",-13.930987358093262],["තුංග",-13.930991172790527],["разработчик",-13.930994033813477],["▁자녀",-13.930994987487791],["▁үлдээ",-13.931002616882324],["保安",-13.931002616882324],["▁কমিটির",-13.931015968322754],["ಲಾಗಿತ್ತು",-13.931020736694336],["▁tareas",-13.931024551391602],["▁Bhr",-13.93102741241455],["ปั่น",-13.931034088134766],["▁सानो",-13.931036949157717],["▁Chuyện",-13.93105125427246],["ಹೊ",-13.931055068969728],["▁apetit",-13.931055068969728],["▁Greek",-13.93105697631836],["▁rodinné",-13.931057929992676],["▁Stå",-13.931097984313965],["فارق",-13.93111801147461],["▁көчөсү",-13.93112564086914],["▁봉사",-13.931137084960938],["▁детайл",-13.931139945983888],["▁súlade",-13.931160926818848],["▁obscur",-13.93116855621338],["▁φθ",-13.93116855621338],["▁ಕಟ್ಟ",-13.931170463562012],["▁الأحداث",-13.931172370910645],["▁ansvaret",-13.931198120117188],["▁ಬೆಳೆ",-13.931207656860352],["ცოცხ",-13.931225776672363],["▁άνω",-13.931244850158691],["▁Deputat",-13.93124771118164],["▁bilateral",-13.931256294250488],["▁gật",-13.93126392364502],["▁Úvodní",-13.931268692016602],["▁ٻارن",-13.931273460388184],["▁החומר",-13.931279182434082],["▁Ülikooli",-13.931285858154297],["▁EES",-13.93129062652588],["▁loyal",-13.931296348571776],["ίνοντας",-13.931303977966309],["▁360°",-13.931323051452637],["▁mdogo",-13.931323051452637],["기구",-13.931329727172852],["גרסה",-13.931349754333496],["▁271",-13.931352615356444],["бежал",-13.931353569030762],["▁pasuri",-13.931368827819824],["ЙН",-13.93138027191162],["XP",-13.931384086608888],["▁социјални",-13.931394577026367],["▁Dobra",-13.931402206420898],["▁किनार",-13.931404113769531],["▁Quá",-13.931416511535645],["▁цену",-13.93142318725586],["▁podpira",-13.931437492370604],["▁residents",-13.931439399719238],["在線",-13.931485176086426],["▁Правилник",-13.931490898132324],["▁logro",-13.931511878967283],["▁රයේ",-13.931537628173828],["▁tratamentul",-13.931541442871094],["プリ",-13.931562423706056],["በዛ",-13.93156623840332],["▁exercicio",-13.93157196044922],["▁امیدوارم",-13.931591987609863],["▁ιδέα",-13.931593894958496],["▁боса",-13.931596755981444],["▁پيء",-13.93160629272461],["▁Biztos",-13.931641578674316],["▁dodati",-13.931641578674316],["tamil",-13.931668281555176],["qualität",-13.931676864624023],["▁biura",-13.931676864624023],["▁hazırlıq",-13.931680679321287],["ಮರ್",-13.931700706481934],["ኤርትራ",-13.931707382202148],["▁domāt",-13.931711196899414],["▁წერილ",-13.931724548339844],["ضحك",-13.931735038757324],["▁shahri",-13.93174934387207],["▁perdeu",-13.931764602661133],["шының",-13.931791305541992],["▁0-2",-13.931802749633787],["主导",-13.931807518005373],["▁Wide",-13.931815147399902],["ECI",-13.931818962097168],["▁amestec",-13.931828498840332],["▁amaitu",-13.931836128234863],["िफ",-13.931845664978027],["▁attendant",-13.93185043334961],["▁готелі",-13.93185043334961],["Наш",-13.931879997253418],["▁цалин",-13.931891441345217],["▁ئاچ",-13.931893348693848],["stunud",-13.931903839111328],["口腔",-13.931907653808594],["▁Regelung",-13.931917190551758],["เหรียญ",-13.931926727294922],["ប្រចាំ",-13.931934356689451],["1/10",-13.931941032409668],["प्रमुख",-13.931963920593262],["פטי",-13.931982040405272],["▁गुम",-13.931984901428224],["▁Guud",-13.931986808776855],["การบริหาร",-13.932024002075195],["ก้น",-13.93202781677246],["▁fuel",-13.932053565979004],["brojne",-13.932082176208496],["ितम्",-13.93213176727295],["▁certific",-13.932146072387695],["▁الاخر",-13.932147026062012],["▁инструкции",-13.932153701782228],["ものだ",-13.932162284851074],["▁olmayı",-13.93217658996582],["نقاذ",-13.932181358337402],["озбиљн",-13.9321928024292],["▁భాగం",-13.932196617126465],["χειρο",-13.932208061218262],["заборав",-13.93222427368164],["▁Ilir",-13.932232856750488],["移植",-13.932238578796388],["▁inedit",-13.932251930236816],["कस",-13.932294845581056],["▁dialogo",-13.93231201171875],["▁gesetzlich",-13.932316780090332],["▁пусты",-13.932316780090332],["▁şiş",-13.93232250213623],["▁лукс",-13.932324409484863],["▁بەلگى",-13.932324409484863],["▁esquecer",-13.93233585357666],["▁조회",-13.932340621948242],["ČA",-13.932357788085938],["කූ",-13.93235969543457],["▁išli",-13.93237018585205],["ໂຣ",-13.932384490966797],["dím",-13.932390213012695],["▁izveden",-13.932409286499023],["▁후원",-13.93244743347168],["▁հայերի",-13.932456970214844],["▁Slide",-13.932465553283691],["▁قىلىن",-13.932466506958008],["▁चोट",-13.93247890472412],["▁miejscach",-13.93249225616455],["▁сочи",-13.932501792907717],["▁آمن",-13.93252944946289],["▁Blas",-13.932538986206056],["▁великом",-13.932550430297852],["1961",-13.932557106018066],["▁hoče",-13.932572364807127],["▁instalado",-13.932584762573242],["▁ruokaa",-13.932602882385254],["▁Fica",-13.932612419128418],["tiboa",-13.932613372802734],["ύλου",-13.932616233825684],["간호",-13.932631492614746],["▁нацист",-13.932639122009276],["מבקר",-13.93265438079834],["▁+7",-13.932661056518556],["心理学",-13.932677268981934],["出门",-13.932686805725098],["▁creier",-13.93269157409668],["▁busz",-13.932698249816896],["起床",-13.932710647583008],["▁qilishga",-13.932731628417969],["ustega",-13.932733535766602],["▁געב",-13.932733535766602],["▁úver",-13.932734489440918],["பார்ப்ப",-13.932745933532717],["说到",-13.932759284973145],["▁tilfelle",-13.93278694152832],["안내",-13.932788848876951],["亲自",-13.932801246643066],["▁доктора",-13.932823181152344],["▁בבי",-13.932830810546877],["એફ",-13.93285846710205],["NGI",-13.932876586914062],["OMI",-13.932896614074709],["▁jatkuu",-13.932927131652832],["யின",-13.932931900024414],["घन",-13.932936668395996],["ଗୋ",-13.932941436767578],["いただけ",-13.932985305786133],["▁accesa",-13.932989120483398],["▁رسوم",-13.93299674987793],["▁udviklet",-13.933006286621094],["▁utilitzar",-13.93303108215332],["▁კომპანიის",-13.933038711547852],["▁ställer",-13.933043479919434],["廣州",-13.933064460754396],["464",-13.933066368103027],["گې",-13.93307399749756],["▁Mkoani",-13.933100700378418],["▁rimark",-13.933137893676758],["▁দেব",-13.93313980102539],["ுவதை",-13.933165550231934],["▁središč",-13.933170318603516],["▁посебни",-13.933172225952148],["▁vəfat",-13.93320083618164],["▁feminino",-13.933210372924805],["zând",-13.933232307434082],["▁Ää",-13.933235168457031],["ٹھا",-13.933247566223145],["ነበረው",-13.933255195617676],["▁வைர",-13.93328094482422],["ให้เรา",-13.933297157287598],["▁यज्ञ",-13.933321952819824],["wskiego",-13.933353424072266],["▁زنگ",-13.93336582183838],["▁Barry",-13.93337059020996],["▁színes",-13.933395385742188],["दोष",-13.93339729309082],["личност",-13.93340301513672],["КАН",-13.933414459228516],["▁ennå",-13.933420181274414],["ვლილი",-13.93342399597168],["▁Perkara",-13.933464050292969],["ಂಕ್",-13.933473587036133],["▁عادي",-13.933485984802246],["▁kjøper",-13.933496475219728],["ნევ",-13.933500289916992],["▁úspor",-13.933507919311523],["ቀቅ",-13.933520317077637],["▁Flip",-13.933521270751951],["▁الإيراني",-13.933531761169434],["▁İngiliz",-13.93354034423828],["▁liikumis",-13.93354320526123],["reddy",-13.933547019958496],["之上",-13.933553695678713],["କଙ୍କ",-13.93360710144043],["ለቅ",-13.933618545532228],["ക്കര്",-13.933624267578123],["టరీ",-13.933653831481934],["▁حکومتی",-13.933676719665527],["▁važi",-13.933706283569336],["▁qilin",-13.933724403381348],["▁Corso",-13.93372631072998],["侵害",-13.933754920959473],["砍",-13.933765411376951],["닌",-13.93378448486328],["中華民國",-13.933794975280762],["ѕ",-13.933795928955078],["ယာဉ်",-13.933795928955078],["ระเบิด",-13.933797836303713],["▁Chất",-13.933798789978027],["▁evidenzia",-13.933798789978027],["▁reklāmas",-13.933798789978027],["▁riwayat",-13.933798789978027],["▁äntligen",-13.933798789978027],["▁İletişim",-13.933798789978027],["▁Просмотров",-13.933798789978027],["▁допоможе",-13.933798789978027],["▁коэффициент",-13.933798789978027],["▁төмөр",-13.933798789978027],["▁բաժնի",-13.933798789978027],["▁اضافی",-13.933798789978027],["▁وېبپاڼ",-13.933798789978027],["▁پەيغەمبەر",-13.933798789978027],["▁वगैरे",-13.933798789978027],["▁सावधान",-13.933798789978027],["▁অনুমতি",-13.933798789978027],["▁এছাড়া",-13.933798789978027],["▁শহীদ",-13.933798789978027],["▁ਮਦਦ",-13.933798789978027],["▁વડાપ્રધાન",-13.933798789978027],["▁ಸಿಬ್ಬಂದಿ",-13.933798789978027],["▁መስጠት",-13.933798789978027],["잃",-13.933798789978027],["▁bestätigt",-13.933799743652344],["▁eSakal",-13.933799743652344],["▁funcţie",-13.933799743652344],["▁kompjuter",-13.933799743652344],["▁məğlub",-13.933799743652344],["▁ξεκινά",-13.933799743652344],["▁સંસ્થા",-13.933799743652344],["▁подржава",-13.933801651000977],["▁Będzie",-13.933802604675291],["▁इंसान",-13.933804512023926],["▁ਥਾਂ",-13.933804512023926],["▁vinsæl",-13.933805465698242],["▁გაიმართა",-13.93380641937256],["▁ιδανικ",-13.933807373046877],["▁Όχι",-13.933810234069824],["▁בזכות",-13.933810234069824],["ജീവി",-13.933815002441406],["编制",-13.93381690979004],["▁vaziyat",-13.933819770812988],["▁ਹੱਕ",-13.93382167816162],["▁нэмэгдүүлэх",-13.933822631835938],["▁ବିଚାର",-13.933822631835938],["▁doseže",-13.933823585510254],["▁आयोजक",-13.933823585510254],["주간",-13.933832168579102],["▁деңгейде",-13.93384838104248],["▁વૈ",-13.933849334716797],["▁asyik",-13.93385124206543],["背包",-13.933855056762695],["觀點",-13.933855056762695],["▁Kuopio",-13.933856010437012],["▁رایانه",-13.933856010437012],["▁árvore",-13.933859825134276],["▁дзеяч",-13.933862686157228],["▁Ústí",-13.93386459350586],["▁Thống",-13.933870315551758],["神経",-13.933876037597656],["▁नेताओं",-13.933878898620604],["▁convierte",-13.933881759643556],["▁svarbiausia",-13.933883666992188],["▁በታች",-13.933883666992188],["▁hosil",-13.93388557434082],["▁етикет",-13.93389129638672],["▁амбиц",-13.933892250061035],["▁خیز",-13.933892250061035],["▁certainement",-13.933895111083984],["▁wiedzę",-13.93389892578125],["ολογικό",-13.933899879455566],["▁spotyka",-13.933902740478516],["▁groupsex",-13.933903694152832],["▁Versch",-13.933904647827148],["▁የመንግሥት",-13.933907508850098],["▁төгрөгийн",-13.933920860290527],["▁võimalust",-13.933924674987791],["▁Uudised",-13.933930397033691],["ٻو",-13.933931350708008],["▁Gled",-13.933937072753906],["hormon",-13.93394660949707],["▁politikus",-13.933948516845703],["▁yağmur",-13.933951377868652],["▁dopravní",-13.933952331542969],["▁HINDI",-13.933961868286133],["▁силата",-13.933969497680664],["▁pomembne",-13.93397331237793],["ીટ",-13.933975219726562],["ค่ํา",-13.93397617340088],["▁melhoria",-13.933977127075195],["Megosztás",-13.933979988098145],["▁folla",-13.933984756469728],["သောက်",-13.933990478515623],["▁teoretic",-13.933995246887209],["punktet",-13.933998107910156],["▁वेतन",-13.934001922607422],["▁Podrobn",-13.934004783630373],["▁ତାକୁ",-13.934008598327637],["▁নিবন্ধন",-13.93402862548828],["cristian",-13.93403148651123],["군요",-13.934045791625977],["▁окончания",-13.93405055999756],["▁Chăm",-13.934052467346191],["έτη",-13.93406581878662],["▁ಸಂಸ್ಥೆಯ",-13.934083938598633],["城区",-13.934086799621582],["▁milijonov",-13.934093475341797],["▁maşın",-13.934104919433594],["▁позади",-13.93410873413086],["▁zwiększa",-13.93411350250244],["▁נובע",-13.934117317199709],["▁পৰি",-13.934125900268556],["▁პროექტ",-13.934144020080566],["▁بجے",-13.934149742126465],["▁고용",-13.934151649475098],["honneur",-13.934176445007324],["▁ראוי",-13.934181213378906],["▁الحقوق",-13.934189796447754],["▁डाटा",-13.934195518493652],["▁Idol",-13.934210777282717],["▁ніби",-13.934211730957031],["▁installere",-13.934215545654297],["▁internationella",-13.934221267700195],["▁البناء",-13.934221267700195],["▁региону",-13.934269905090332],["▁liburan",-13.93428897857666],["▁salariés",-13.93429470062256],["每一次",-13.934295654296877],["▁матэрыялы",-13.934298515319824],["▁corda",-13.934317588806152],["▁Father",-13.934318542480469],["▁masker",-13.934329986572266],["▁benze",-13.93434238433838],["▁hänelle",-13.934367179870604],["▁Diari",-13.934372901916504],["▁Словен",-13.934391021728516],["▁tây",-13.934417724609377],["చిత్ర",-13.934418678283691],["▁усеща",-13.934431076049805],["▁psikologi",-13.934440612792969],["▁kasvata",-13.934441566467283],["▁strijd",-13.934444427490234],["▁Lois",-13.934453964233398],["▁ഉട",-13.934473991394045],["ированию",-13.934494972229004],["▁ഇല",-13.934494972229004],["▁Puma",-13.934499740600586],["▁આવેલા",-13.934528350830078],["立场",-13.934531211853027],["▁ավագանու",-13.934536933898926],["로우",-13.934560775756836],["▁franchise",-13.934561729431152],["▁Предмет",-13.934576034545898],["måling",-13.934588432312012],["遅れ",-13.934593200683594],["▁அறிவ",-13.934602737426758],["izácie",-13.934609413146973],["▁Aadan",-13.934662818908691],["▁ہوئی۔",-13.934662818908691],["▁samler",-13.934677124023438],["▁věku",-13.934678077697754],["▁вечеря",-13.934687614440918],["▁detyrë",-13.934703826904297],["▁bhfad",-13.934704780578612],["高潮",-13.934710502624512],["▁Αθήνας",-13.93471622467041],["▁vlastiti",-13.93472671508789],["小学校",-13.934727668762209],["ସୀ",-13.934754371643066],["kakve",-13.9347562789917],["identità",-13.934775352478027],["▁gereed",-13.93480396270752],["tugas",-13.934810638427734],["▁Allar",-13.934810638427734],["▁مراجع",-13.934840202331545],["redaktə",-13.934852600097656],["्का",-13.934855461120604],["Address",-13.934860229492188],["мындан",-13.93486785888672],["megtekint",-13.934874534606934],["ЕРА",-13.934879302978516],["▁ubicado",-13.934914588928224],["▁ანგარიში",-13.93492317199707],["▁standartlar",-13.934942245483398],["▁istnieją",-13.934950828552246],["ರ್ಸ್",-13.93495273590088],["▁membroj",-13.934964179992676],["▁suurta",-13.934965133666992],["▁mājā",-13.93498706817627],["▁Silvia",-13.935025215148926],["▁colectiva",-13.935029029846191],["▁urząd",-13.935060501098633],["використовується",-13.93506145477295],["▁வகுப்பு",-13.93507957458496],["λεξ",-13.935084342956545],["606",-13.935091018676758],["▁Scopri",-13.935091972351074],["▁ආණ්ඩුවේ",-13.935103416442873],["კლას",-13.935117721557615],["▁сидел",-13.93512725830078],["▁Ihmis",-13.935147285461426],["ырып",-13.935169219970703],["▁معهم",-13.935193061828612],["▁Майдан",-13.935194969177246],["▁이것",-13.935218811035156],["▁cantitate",-13.935221672058104],["húsið",-13.935256004333496],["▁hånden",-13.935277938842772],["▁بكر",-13.935279846191406],["снаар",-13.935283660888672],["▁Ostatni",-13.935306549072266],["442",-13.935309410095217],["จากปรสิต",-13.935335159301758],["ՈՎ",-13.93535614013672],["клімат",-13.935359001159668],["▁meddela",-13.935361862182615],["▁kasvava",-13.935444831848145],["▁stopniu",-13.93547821044922],["▁עג",-13.935482025146484],["座位",-13.935489654541016],["▁بیچ",-13.935491561889648],["zielone",-13.93549346923828],["▁voľný",-13.935501098632812],["▁айтқан",-13.935503005981444],["มันก็",-13.935511589050291],["▁Christine",-13.935527801513672],["▁drugem",-13.935527801513672],["▁районах",-13.935528755187988],["tänd",-13.935529708862305],["▁olti",-13.935532569885254],["▁cirkul",-13.935582160949709],["▁solist",-13.935591697692873],["▁kooperat",-13.93561840057373],["from",-13.93562126159668],["▁estrena",-13.93564510345459],["varky",-13.935663223266602],["ଏମ",-13.935664176940918],["ısını",-13.93566608428955],["▁לספר",-13.935675621032717],["မှုကို",-13.93568992614746],["സുന്ദര",-13.935694694519045],["▁سکھ",-13.93570327758789],["▁osobom",-13.935741424560549],["طين",-13.935748100280762],["mély",-13.935775756835938],["▁টেক",-13.935799598693848],["▁Часто",-13.935835838317873],["ென்று",-13.9358491897583],["პუტ",-13.935869216918944],["▁kestää",-13.935876846313477],["czysz",-13.935887336730955],["ుకోవడానికి",-13.935887336730955],["▁nömrə",-13.93589687347412],["сунда",-13.935900688171388],["изировать",-13.935905456542969],["・・・・",-13.935917854309082],["▁iesaistīt",-13.935931205749512],["▁viesu",-13.93598461151123],["▁коммуника",-13.935986518859863],["บ่",-13.935996055603027],["▁стріл",-13.936001777648926],["解决问题",-13.936010360717772],["▁laurea",-13.93602180480957],["▁таста",-13.936083793640137],["▁എത്തുന്ന",-13.9360933303833],["▁oldukları",-13.936101913452148],["▁орхи",-13.93610382080078],["ัช",-13.936104774475098],["▁Сыр",-13.936113357543944],["▁Швейцар",-13.936115264892578],["▁играет",-13.936138153076172],["ဒဏ္",-13.93617057800293],["されていない",-13.936182022094728],["▁tihti",-13.936193466186523],["▁Armi",-13.936206817626951],["▁vương",-13.936220169067385],["▁pleti",-13.93623161315918],["▁පිහිටි",-13.936271667480469],["▁ولاړ",-13.936285972595217],["▁immagina",-13.936291694641112],["intaj",-13.936299324035645],["▁Clic",-13.93630313873291],["▁dĩ",-13.93631362915039],["▁siivo",-13.936322212219238],["▁anvende",-13.936330795288086],["▁møtte",-13.936341285705566],["▁Рај",-13.9363431930542],["▁رجب",-13.936345100402832],["▁metód",-13.936363220214844],["▁шарттар",-13.93636417388916],["夜市",-13.936375617980955],["▁faida",-13.93638038635254],["▁ενέργεια",-13.936389923095703],["朋友圈",-13.93640422821045],["的战略",-13.93642234802246],["265",-13.936444282531738],["▁võimalda",-13.936445236206056],["▁bunyi",-13.936450958251951],["▁nadaje",-13.936453819274902],["fiets",-13.936455726623535],["▁gaudi",-13.936461448669434],["ვრი",-13.936503410339355],["▁הנתבע",-13.936546325683594],["▁nationell",-13.93655490875244],["5:30",-13.936559677124023],["крыть",-13.936559677124023],["ကၽြန္",-13.936561584472656],["▁സംബന്ധിച്ച",-13.936577796936035],["▁referans",-13.936591148376465],["▁ലഭിക്കുന്ന",-13.936614990234377],["担忧",-13.93661880493164],["доста",-13.93662452697754],["▁velho",-13.936625480651855],["糟糕",-13.93662929534912],["霊",-13.936640739440918],["TIVO",-13.936652183532717],["防范",-13.936652183532717],["픈",-13.93665885925293],["μακρ",-13.936662673950195],["対処",-13.93666934967041],["ေလွ်ာက္",-13.936671257019045],["តាមរយៈ",-13.936671257019045],["ភ្លើង",-13.936671257019045],["framleiðslu",-13.93667221069336],["österreich",-13.93667221069336],["ОЛОГ",-13.93667221069336],["እንግዲህ",-13.93667221069336],["អំណាច",-13.93667221069336],["ῦ",-13.93667221069336],["▁Budějovic",-13.93667221069336],["▁Insgesamt",-13.93667221069336],["▁Vấn",-13.93667221069336],["▁chiusura",-13.93667221069336],["▁condividere",-13.93667221069336],["▁matemática",-13.93667221069336],["▁təzyiq",-13.93667221069336],["▁ychydig",-13.93667221069336],["▁Εκπαίδευση",-13.93667221069336],["▁σκέψη",-13.93667221069336],["▁Ажлын",-13.93667221069336],["▁Септември",-13.93667221069336],["▁Скупштине",-13.93667221069336],["▁тээвэр",-13.93667221069336],["▁Վարչապետ",-13.93667221069336],["▁המערכת",-13.93667221069336],["▁آسٹریلیا",-13.93667221069336],["▁ഒരിക്കലും",-13.93667221069336],["▁ოცნება",-13.93667221069336],["▁ደብዳቤ",-13.93667221069336],["▁ឯកឧត្តម",-13.93667221069336],["▁멋진",-13.93667221069336],["Ź",-13.936673164367676],["▁Đơn",-13.936673164367676],["▁εταιρία",-13.936673164367676],["▁натыйжа",-13.936673164367676],["▁انگشت",-13.936673164367676],["▁निवेदन",-13.936673164367676],["▁বাতিল",-13.936673164367676],["▁аралығында",-13.936674118041992],["▁мамыр",-13.936674118041992],["▁પાર્ટી",-13.936674118041992],["▁skolēni",-13.936675071716309],["▁ബുക്ക്",-13.936675071716309],["▁surpreende",-13.936676025390623],["▁yemeği",-13.936676025390623],["▁спілкування",-13.93667697906494],["▁ohrani",-13.936677932739258],["▁სწრაფად",-13.936677932739258],["▁okoliczności",-13.936678886413574],["▁isterseniz",-13.93667984008789],["▁Учир",-13.93667984008789],["▁Војводине",-13.936680793762209],["▁Dimension",-13.93668270111084],["▁emergency",-13.93668270111084],["▁vesmír",-13.93668270111084],["▁duomenys",-13.936685562133787],["▁רשימת",-13.936685562133787],["▁ሰልፍ",-13.936685562133787],["挑戦",-13.936685562133787],["▁stoletja",-13.936686515808104],["▁autoturism",-13.936687469482422],["▁homosexual",-13.936689376831056],["▁గతంలో",-13.936694145202637],["čokolád",-13.93669605255127],["īrs",-13.93669605255127],["ភ័យ",-13.93669605255127],["TÍ",-13.936697959899902],["▁είχαμε",-13.936697959899902],["▁העליון",-13.936697959899902],["▁oikeastaan",-13.93669891357422],["▁êriş",-13.93669891357422],["▁członków",-13.936700820922852],["▁실패",-13.936700820922852],["▁vahoaka",-13.936701774597168],["▁مشکلی",-13.936704635620115],["▁αέρα",-13.93670654296875],["▁смерті",-13.93670654296875],["▁Permanent",-13.9367094039917],["▁krievu",-13.936712265014648],["▁violenza",-13.93671703338623],["વીર",-13.936718940734863],["▁Elegant",-13.936721801757812],["▁одобри",-13.936721801757812],["▁سياسة",-13.936721801757812],["▁kterého",-13.936723709106444],["októl",-13.936728477478027],["▁qədim",-13.93673324584961],["▁Gerald",-13.936738014221191],["▁tartibi",-13.936739921569824],["▁Hətta",-13.936749458312988],["▁Erklärung",-13.936752319335938],["▁Петър",-13.936752319335938],["▁खबरें",-13.936753273010254],["▁дапамогай",-13.93675422668457],["▁ඔහොම",-13.936758995056152],["impôt",-13.936760902404783],["▁ზღვის",-13.936767578125],["โดยสาร",-13.936784744262695],["▁મૃત્યુ",-13.936786651611328],["▁impiega",-13.936796188354492],["▁Ponúkame",-13.936798095703123],["▁पुराना",-13.93679904937744],["▁адамдардың",-13.93680477142334],["▁могући",-13.936805725097656],["टॅ",-13.936806678771973],["▁verfügen",-13.936807632446287],["▁практик",-13.936817169189451],["▁неговия",-13.936829566955566],["▁tēr",-13.93683910369873],["▁ceramic",-13.936850547790527],["▁सहमत",-13.936851501464844],["ազար",-13.936856269836426],["▁odloča",-13.936857223510742],["▁actores",-13.936863899230955],["ідомий",-13.936867713928224],["▁rango",-13.93687915802002],["▁hujung",-13.93688678741455],["▁მნიშვნელოვანია",-13.936895370483398],["▁hoggaa",-13.93690013885498],["▁frigo",-13.936903953552246],["ידים",-13.93691635131836],["▁զենք",-13.93692684173584],["▁ژب",-13.936929702758787],["▁الصيف",-13.936931610107422],["▁Yılı",-13.93693733215332],["дэд",-13.93694305419922],["▁указанных",-13.936946868896484],["▁দেন",-13.936960220336914],["▁දිනා",-13.936962127685549],["▁obiecte",-13.936963081359863],["▁tartalmazza",-13.936971664428713],["▁Koca",-13.936976432800291],["योग्य",-13.936978340148926],["▁направени",-13.936978340148926],["▁cărți",-13.93699073791504],["▁Concili",-13.936994552612305],["▁Schnee",-13.93699836730957],["▁fariĝis",-13.93702220916748],["CEN",-13.937028884887695],["ódó",-13.937028884887695],["▁bertso",-13.93704319000244],["▁эсеп",-13.937052726745604],["▁Олардың",-13.937056541442873],["▁Vrei",-13.93706512451172],["គណ",-13.937071800231934],["▁stadionu",-13.937078475952148],["▁мною",-13.93708324432373],["▁níveis",-13.937088966369627],["▁Oulun",-13.93709659576416],["トー",-13.937105178833008],["schmerz",-13.937108039855955],["▁قىي",-13.937110900878906],["▁남아",-13.937121391296388],["धाम",-13.937124252319336],["ได้แล้ว",-13.937128067016602],["▁kapak",-13.937162399291992],["▁nahráv",-13.937163352966309],["▁ಮಾಡಿದೆ",-13.937166213989258],["ట్రో",-13.937174797058104],["▁الشاب",-13.937175750732422],["ouez",-13.937176704406738],["▁Техни",-13.93718719482422],["▁Evolution",-13.9371919631958],["poca",-13.937203407287598],["自拍",-13.937209129333496],["▁مسول",-13.937235832214355],["▁flinke",-13.937247276306152],["▁जाकर",-13.937256813049316],["▁ಕೇಳಿದ",-13.937294006347656],["事先",-13.937301635742188],["заавал",-13.937304496765137],["ספו",-13.93730640411377],["ທີ່ທ່ານ",-13.937325477600098],["▁كود",-13.937328338623049],["STÄ",-13.937329292297363],["▁istiqamətdə",-13.937329292297363],["▁Çift",-13.93733024597168],["▁radnika",-13.937349319458008],["▁viktima",-13.937399864196776],["aalsus",-13.937402725219728],["▁истиот",-13.937403678894045],["▁Nasza",-13.93741512298584],["▁zaposlenih",-13.93741512298584],["▁хипо",-13.937418937683104],["èng",-13.93742561340332],["linja",-13.937427520751951],["כיכר",-13.937429428100586],["CERT",-13.937430381774902],["କ୍ଷା",-13.93743896484375],["▁lafiya",-13.937440872192385],["買了",-13.937454223632812],["▁bông",-13.937481880187988],["poñer",-13.937485694885254],["ërisht",-13.937493324279783],["▁кутия",-13.937507629394531],["រ៉ូ",-13.937519073486328],["▁cineál",-13.937543869018556],["ঝ",-13.937548637390137],["beruf",-13.937579154968262],["▁sistemą",-13.937596321105955],["▁공정",-13.937617301940918],["برای",-13.937620162963867],["મારી",-13.937676429748535],["進去",-13.937697410583496],["▁уақыты",-13.937702178955078],["おう",-13.937705993652344],["▁mně",-13.937714576721191],["WIE",-13.937719345092772],["▁Yong",-13.93773078918457],["▁Glan",-13.937735557556152],["▁արժե",-13.937736511230469],["யைக்",-13.93775749206543],["過來",-13.937777519226074],["▁záver",-13.937782287597656],["미디어",-13.937786102294922],["が好き",-13.937790870666504],["▁kultūra",-13.937796592712402],["▁NAG",-13.937800407409668],["פרסום",-13.937801361083984],["Երեւան",-13.937807083129885],["ਵੱਖ",-13.937810897827148],["▁נייע",-13.937814712524414],["ඩෝ",-13.937821388244627],["▁Grubu",-13.937822341918944],["▁աշխատանքներ",-13.937824249267578],["▁груди",-13.937830924987791],["זמין",-13.93783187866211],["ធ្",-13.937862396240234],["ମନ୍ତ",-13.937877655029297],["଼",-13.937878608703612],["▁mijlocul",-13.937878608703612],["نكر",-13.937893867492676],["483",-13.937899589538574],["▁идеята",-13.937902450561523],["▁estampa",-13.937928199768066],["høy",-13.937932968139648],["ກຸ",-13.937959671020508],["tyyli",-13.937968254089355],["▁Warner",-13.937987327575684],["質感",-13.938024520874023],["▁poszukiwani",-13.938026428222656],["ປະທານ",-13.938060760498049],["TASR",-13.93807601928711],["échange",-13.938087463378906],["▁tādi",-13.938095092773438],["Оскар",-13.938100814819336],["▁Такое",-13.938100814819336],["Kara",-13.93812084197998],["▁kamarát",-13.938127517700195],["▁dönüştü",-13.93813419342041],["્હ",-13.938138961791992],["იძ",-13.938150405883787],["先前",-13.938188552856444],["wikkel",-13.938204765319824],["▁چراغ",-13.938214302062988],["▁tulema",-13.938225746154783],["AMMA",-13.938228607177734],["▁meteorologi",-13.938241958618164],["▁የማያ",-13.93824291229248],["▁paqe",-13.93825340270996],["▁identitate",-13.93825912475586],["▁najboljih",-13.93825912475586],["767",-13.938260078430176],["▁производа",-13.938278198242188],["488",-13.938284873962402],["▁ਖੁਸ਼",-13.938307762145996],["ლიტ",-13.938332557678224],["talent",-13.93833351135254],["▁Παναγ",-13.938343048095703],["ẦN",-13.938353538513184],["ຸ່ນ",-13.938360214233398],["▁službo",-13.938385009765623],["itzak",-13.93839168548584],["▁tudod",-13.938393592834473],["ುತ್ತಿತ್ತು",-13.938400268554688],["▁ஜோ",-13.938410758972168],["პლა",-13.938416481018066],["▁höhere",-13.938438415527344],["▁sasniedz",-13.938454627990724],["▁dagarna",-13.938472747802734],["بلا",-13.938494682312012],["കാശ",-13.938494682312012],["▁거리에",-13.93850040435791],["▁16.30",-13.938560485839844],["▁ବୟସ",-13.93857192993164],["▁مالڪ",-13.938572883605955],["▁Konserva",-13.93861198425293],["▁weydii",-13.938653945922852],["▁خونه",-13.938654899597168],["感冒",-13.9386625289917],["▁ይፈ",-13.938666343688965],["ներ՝",-13.93866729736328],["۔۔۔۔",-13.93867015838623],["▁κακο",-13.93869400024414],["پک",-13.938714027404783],["▁போராட்டம்",-13.938714981079102],["Ռո",-13.938725471496582],["▁планов",-13.938725471496582],["▁spomenik",-13.938730239868164],["stød",-13.93877410888672],["▁αρχι",-13.938777923583984],["▁substance",-13.9387788772583],["▁новому",-13.93878173828125],["भद्र",-13.93880558013916],["▁Gujarat",-13.938817977905272],["▁uboj",-13.938830375671388],["듣",-13.938835144042969],["▁Buena",-13.938837051391602],["▁बॉस",-13.938849449157717],["יגער",-13.93886375427246],["▁Franklin",-13.938875198364258],["看待",-13.938882827758787],["▁varēja",-13.938907623291016],["▁SUP",-13.93891716003418],["▁क्वा",-13.938922882080078],["▁transf",-13.938923835754396],["luck",-13.938962936401367],["▁Neces",-13.938963890075684],["▁ravimi",-13.93899154663086],["சீ",-13.939008712768556],["ెక్క",-13.93901252746582],["▁flutur",-13.939017295837402],["▁அளிக்க",-13.9390230178833],["▁nimeta",-13.93903636932373],["▁paying",-13.939047813415527],["▁pealt",-13.939099311828612],["▁Mhic",-13.939129829406738],["▁sabun",-13.939129829406738],["▁шарттары",-13.939181327819824],["ိဳင္",-13.939188957214355],["ນຸ",-13.939190864562988],["▁tirdzniecības",-13.93919849395752],["იზაციის",-13.939212799072266],["▁kreta",-13.93921947479248],["भास",-13.939233779907228],["парад",-13.93924045562744],["▁ikiwemo",-13.939247131347656],["גבעת",-13.939254760742188],["मित",-13.93925666809082],["▁sexuelle",-13.939273834228516],["ยังเป็น",-13.939281463623049],["▁väljend",-13.93929958343506],["▁Nám",-13.93930435180664],["oleva",-13.939311981201172],["の様子",-13.939336776733398],["даецца",-13.939346313476562],["▁adaugat",-13.939350128173828],["ගැනීමට",-13.93936252593994],["▁إنسان",-13.93936252593994],["▁الجمهور",-13.939435958862305],["▁통해서",-13.939441680908203],["화재",-13.939446449279783],["▁Jesteś",-13.93946647644043],["մայի",-13.939480781555176],["使っている",-13.939499855041504],["▁directament",-13.939504623413086],["▁ойындар",-13.939510345458984],["▁წევრები",-13.93951416015625],["鉄",-13.939518928527832],["▁басқармасы",-13.939520835876465],["▁nelze",-13.939525604248049],["參賽",-13.939531326293944],["セキュリティ",-13.939532279968262],["ゾ",-13.939536094665527],["稽",-13.939542770385742],["▁velkou",-13.939549446105955],["ยี่ห้อ",-13.939552307128906],["▁týždňa",-13.939553260803224],["ປັດຈຸບັນ",-13.93955421447754],["▁Gelukkig",-13.93955421447754],["▁Piemēram",-13.93955421447754],["▁UPDATE",-13.93955421447754],["▁Wachstum",-13.93955421447754],["▁galvenais",-13.93955421447754],["▁gjenopprett",-13.93955421447754],["▁lapkričio",-13.93955421447754],["▁nauðsynleg",-13.93955421447754],["▁prevăzut",-13.93955421447754],["▁prvenstvu",-13.93955421447754],["▁təxminən",-13.93955421447754],["▁vakuut",-13.93955421447754],["▁zazwyczaj",-13.93955421447754],["▁εξωτερικό",-13.93955421447754],["▁στήριξη",-13.93955421447754],["▁закінчення",-13.93955421447754],["▁пољопривредн",-13.93955421447754],["▁արձագանք",-13.93955421447754],["▁ابراهيم",-13.93955421447754],["▁دارالحکومت",-13.93955421447754],["▁مظاہرہ",-13.93955421447754],["▁پاورپوینت",-13.93955421447754],["▁हाम्रा",-13.93955421447754],["▁ਗਿਣਤੀ",-13.93955421447754],["▁క్రితం",-13.93955421447754],["▁소프트웨어",-13.93955421447754],["랭",-13.93955421447754],["😎",-13.93955421447754],["οπούλου",-13.939555168151855],["▁Ansvarlig",-13.939555168151855],["▁slightly",-13.939555168151855],["▁treballadors",-13.939555168151855],["▁Овие",-13.939555168151855],["▁میکنه",-13.939555168151855],["▁ustrezno",-13.939556121826172],["ເດັກນ້ອຍ",-13.939557075500488],["▁uamuzi",-13.939557075500488],["▁الدراسات",-13.939557075500488],["▁адзначыў",-13.939558029174805],["▁۲۰۱۷",-13.939558029174805],["▁സന്ദേശ",-13.939558029174805],["▁බැලීම",-13.93955898284912],["▁아이템",-13.93955898284912],["εχθρ",-13.939559936523438],["▁Sprawdź",-13.939560890197754],["▁menyiapkan",-13.939560890197754],["ነጋገር",-13.93956184387207],["▁вимагає",-13.93956184387207],["▁تکنیک",-13.93956184387207],["▁rehiyon",-13.939565658569336],["▁съвременни",-13.939565658569336],["เทียน",-13.939566612243652],["▁παίξ",-13.939566612243652],["▁адекватно",-13.939566612243652],["▁ենթակա",-13.939569473266602],["▁వుంది",-13.939570426940918],["▁đạp",-13.939573287963867],["▁زرداري",-13.939577102661133],["יטן",-13.93957805633545],["▁Qaramada",-13.93957805633545],["▁ವಿದೇಶ",-13.939579010009766],["മാണെന്ന്",-13.939580917358398],["▁появилась",-13.939581871032717],["▁Ursache",-13.939582824707031],["▁निवेश",-13.939582824707031],["▁uključen",-13.93958854675293],["▁Ryanair",-13.939589500427246],["▁позволяют",-13.939594268798828],["നുസരിച്ച്",-13.93959617614746],["▁ఎక్స్",-13.939598083496094],["ਸੋਧੋ",-13.939599990844728],["整備",-13.939600944519045],["▁ነዉ።",-13.939602851867676],["▁pričakuje",-13.939603805541992],["▁දැනුම්",-13.939604759216309],["▁ಸಮಯದಲ್ಲಿ",-13.93960666656494],["▁görünür",-13.939615249633787],["▁åtgärder",-13.939620971679688],["▁Raymond",-13.939623832702637],["▁часопис",-13.939626693725586],["▁bertanding",-13.939627647399902],["▁ਕਤਲ",-13.939635276794434],["centro",-13.939638137817385],["▁tworzą",-13.9396390914917],["▁Hoạt",-13.93964958190918],["ଟାଇ",-13.939653396606444],["▁ሰባት",-13.939661026000977],["▁ನೈ",-13.939664840698242],["票房",-13.939666748046877],["▁시절",-13.939668655395508],["▁모양",-13.939677238464355],["▁שיכול",-13.939680099487305],["▁Putra",-13.939682006835938],["▁ಮೇಲ್",-13.939682960510254],["យោ",-13.939689636230469],["▁suuruse",-13.939693450927734],["kaappi",-13.939697265625],["▁고급",-13.93970012664795],["▁улаз",-13.939704895019531],["הוא",-13.939705848693848],["▁каржы",-13.939717292785645],["▁stomac",-13.93971824645996],["▁아내",-13.93971824645996],["▁نبوده",-13.939726829528809],["▁Impact",-13.93973445892334],["ចរ",-13.939748764038086],["▁себебі",-13.939756393432615],["▁របៀប",-13.939764976501465],["▁Impuls",-13.939767837524414],["秘书长",-13.939770698547363],["στολή",-13.939773559570312],["эндээ",-13.939776420593262],["▁Ebene",-13.939785957336426],["teraka",-13.93979835510254],["verðlaun",-13.939799308776855],["▁бизге",-13.93982982635498],["▁Brak",-13.939831733703612],["▁creador",-13.939837455749512],["Categori",-13.939845085144045],["▁झाड",-13.939846992492676],["▁головного",-13.939861297607422],["ິ່ນ",-13.93986988067627],["▁ڪال",-13.939878463745115],["▁მინისტრის",-13.9398832321167],["出色",-13.939884185791016],["▁Keller",-13.93989086151123],["▁Moet",-13.939894676208496],["▁raíz",-13.939905166625977],["▁العنف",-13.939908027648926],["▁хэлний",-13.939908981323242],["▁ٻڌي",-13.939922332763672],["▁dostatočn",-13.939967155456545],["gugu",-13.939970970153809],["ာ္",-13.940022468566896],["▁ایوان",-13.940024375915527],["דבק",-13.94002628326416],["▁jelez",-13.94003200531006],["▁apoiar",-13.94004249572754],["ssica",-13.940067291259766],["▁rendszerek",-13.94007682800293],["▁osnovnih",-13.940078735351562],["▁Àir",-13.940078735351562],["വുമായ",-13.940080642700195],["▁확인할",-13.940080642700195],["▁раствор",-13.940089225769045],["▁رهڻ",-13.940092086791992],["▁minimálne",-13.940115928649902],["ාංශ",-13.940130233764648],["屋さん",-13.940142631530762],["▁hrvatskom",-13.940155982971191],["၂၃",-13.940156936645508],["▁dehors",-13.940157890319824],["Tab",-13.940166473388672],["▁स्टेट",-13.940170288085938],["데요",-13.940173149108888],["▁engelli",-13.940189361572266],["▁WLAN",-13.940191268920898],["▁főz",-13.94019603729248],["▁hängt",-13.940197944641112],["▁поема",-13.940202713012695],["▁Банка",-13.940215110778809],["ලන්ත",-13.940230369567873],["▁timur",-13.940235137939451],["▁ვითარ",-13.94023895263672],["ពិតជា",-13.940239906311035],["കൃതി",-13.9402437210083],["วาย",-13.940253257751465],["לייגן",-13.940255165100098],["ឆ",-13.940274238586426],["藍色",-13.940327644348145],["655",-13.940332412719728],["iaethau",-13.940337181091309],["član",-13.940348625183104],["▁नव्या",-13.940352439880373],["kunnassa",-13.94035530090332],["▁национална",-13.940356254577637],["▁političar",-13.940359115600586],["▁Měl",-13.94036865234375],["gehaald",-13.94038200378418],["▁wkład",-13.940382957458496],["▁الاش",-13.940383911132812],["▁kovasti",-13.940402030944824],["寄せ",-13.940402030944824],["զբ",-13.940411567687988],["тэк",-13.94041347503662],["▁повика",-13.940414428710938],["▁дүнг",-13.940428733825684],["▁גודל",-13.940452575683594],["диагност",-13.94046115875244],["▁vorrei",-13.940463066101074],["tsotra",-13.940474510192873],["sangiz",-13.940512657165527],["အပြင်",-13.94053077697754],["▁prilog",-13.94054126739502],["中學",-13.94056224822998],["▁подготвен",-13.940566062927246],["▁Tanrı",-13.94056797027588],["▁والمع",-13.940571784973145],["▁tartja",-13.940574645996094],["▁diktator",-13.940584182739258],["آب",-13.940590858459473],["▁Granada",-13.9406099319458],["▁ogłosz",-13.940614700317385],["▁பேசி",-13.940614700317385],["▁yerlərdə",-13.940637588500977],["▁hoekom",-13.94065761566162],["▁conformitate",-13.940661430358888],["خانا",-13.940673828125],["▁mnogim",-13.940682411193848],["▁SÅ",-13.940689086914062],["▁webbplatser",-13.940692901611328],["▁materijala",-13.94070816040039],["ుతాయి",-13.940732955932615],["▁인물",-13.940735816955566],["▁Тодор",-13.9407377243042],["larımızda",-13.940744400024414],["▁sentimental",-13.940751075744627],["▁banyo",-13.940763473510742],["사용",-13.940773963928224],["▁պարտավոր",-13.940817832946776],["▁Ráð",-13.940831184387209],["▁substitut",-13.940834999084473],["ዓት",-13.940836906433104],["teistä",-13.940842628479004],["پزشک",-13.940853118896484],["্যাক",-13.940874099731444],["▁skål",-13.940882682800291],["ગુજરાત",-13.940885543823242],["▁naslovom",-13.940889358520508],["▁recunoscut",-13.940911293029783],["Maria",-13.940913200378418],["կառ",-13.94093132019043],["▁Lepo",-13.94093894958496],["▁Philosophi",-13.94094467163086],["विद्य",-13.94095230102539],["ຣະ",-13.940958023071287],["▁քայլեր",-13.940960884094238],["▁hamısı",-13.94096565246582],["▁Tutte",-13.940998077392578],["ศิริ",-13.941007614135742],["сцяў",-13.941031455993652],["▁треб",-13.941033363342283],["소리",-13.94105052947998],["▁dnevi",-13.941068649291992],["▁Amateur",-13.941080093383787],["▁composición",-13.941091537475586],["سیا",-13.941102027893066],["фей",-13.941162109375],["▁органах",-13.941166877746582],["▁etenkin",-13.941170692443848],["▁Choć",-13.941181182861328],["▁রাখতে",-13.941182136535645],["▁հայրեն",-13.941195487976074],["पाक",-13.941203117370604],["publ",-13.941235542297363],["eystay",-13.941246032714844],["▁중간",-13.941250801086426],["ējusi",-13.941264152526855],["▁પ્રશ્ન",-13.941290855407717],["▁هندو",-13.941299438476562],["▁გარეთ",-13.94131088256836],["づけ",-13.94131088256836],["▁Ryg",-13.941326141357422],["▁budovy",-13.94133472442627],["▁garda",-13.941349983215332],["ወጥ",-13.941380500793455],["爭取",-13.94138526916504],["భాగ",-13.941393852233888],["394",-13.941396713256836],["oitetaan",-13.941396713256836],["quality",-13.941397666931152],["२४",-13.941425323486328],["▁bouche",-13.941425323486328],["ficar",-13.941428184509276],["▁ideya",-13.94144344329834],["▁verlies",-13.941485404968262],["▁ülkelerin",-13.941520690917969],["चुर",-13.941522598266602],["▁Juru",-13.941526412963867],["ისკენ",-13.941539764404297],["▁стороне",-13.941540718078612],["єктивн",-13.94158172607422],["▁przepisy",-13.94162368774414],["▁Reuni",-13.941631317138672],["▁uhka",-13.941636085510254],["Bio",-13.941668510437012],["▁Gitar",-13.941678047180176],["▁fattori",-13.941678047180176],["ҮҮЛ",-13.941683769226074],["ópolis",-13.941737174987791],["UMU",-13.94176959991455],["起動",-13.941774368286133],["▁časom",-13.941777229309082],["▁දක්ෂ",-13.941778182983398],["▁Camere",-13.941784858703612],["▁чуулган",-13.941805839538574],["بدع",-13.941808700561523],["tērē",-13.94184398651123],["▁ditugun",-13.94184684753418],["official",-13.941852569580078],["ូស",-13.94185733795166],["▁Aðal",-13.941865921020508],["▁prender",-13.941882133483888],["▁ühenda",-13.941882133483888],["▁frivillige",-13.941902160644531],["▁Dock",-13.94191074371338],["▁DELLA",-13.941922187805176],["▁αλλάξει",-13.941923141479492],["▁Engle",-13.94194221496582],["▁მოჰ",-13.941974639892578],["עניש",-13.941987037658691],["営",-13.941999435424805],["hartze",-13.942009925842283],["्यांच्या",-13.94205379486084],["ndamiseks",-13.942073822021484],["animal",-13.942076683044434],["TOA",-13.94209098815918],["▁ajattele",-13.942106246948242],["닥",-13.942111015319824],["▁progressive",-13.94212245941162],["ącego",-13.942139625549316],["වීමක්",-13.942142486572266],["达到了",-13.942143440246582],["▁সমা",-13.94216251373291],["▁نابود",-13.942172050476074],["▁перво",-13.942185401916504],["мень",-13.94218921661377],["▁haust",-13.94219207763672],["▁patinka",-13.94222640991211],["രേഖ",-13.94226360321045],["▁efectiv",-13.942279815673828],["ാലയ",-13.94228458404541],["ІК",-13.942299842834473],["▁Troch",-13.942304611206056],["▁kuulla",-13.942305564880373],["▁ətraf",-13.942322731018066],["વેદ",-13.942330360412598],["傳統的",-13.942330360412598],["▁نیت",-13.942336082458496],["▁halkaasi",-13.942358016967772],["▁அனுபவ",-13.942360877990724],["▁আল্লাহ",-13.942381858825684],["诊断",-13.942383766174316],["▁Koke",-13.94239902496338],["▁добије",-13.94239902496338],["绕",-13.942399978637695],["▁acreditar",-13.942410469055176],["▁තීරණ",-13.942411422729492],["シンプル",-13.942426681518556],["ികളെ",-13.942431449890137],["▁Egyéb",-13.942432403564451],["▁சொல்லு",-13.942435264587402],["ในฐานะ",-13.942441940307615],["លិខិត",-13.942442893981934],["싼",-13.942442893981934],["▁пассажир",-13.94244384765625],["ទូរទស្សន៍",-13.942444801330566],["▁Copenhagen",-13.942444801330566],["▁Ferienwohnung",-13.942444801330566],["▁Szolgáltatás",-13.942444801330566],["▁Zerbitzu",-13.942444801330566],["▁birbirinden",-13.942444801330566],["▁doplnky",-13.942444801330566],["▁fikambanana",-13.942444801330566],["▁génération",-13.942444801330566],["▁nedrīkst",-13.942444801330566],["▁protsenti",-13.942444801330566],["▁sobotę",-13.942444801330566],["▁évidemment",-13.942444801330566],["▁Иногда",-13.942444801330566],["▁нощувки",-13.942444801330566],["▁қасиеттер",-13.942444801330566],["▁نەتىجى",-13.942444801330566],["▁ଚର୍ଚ୍ଚା",-13.942444801330566],["▁പരാമര്",-13.942444801330566],["▁სალომე",-13.942444801330566],["데일리",-13.942444801330566],["됨",-13.942444801330566],["▁конструктив",-13.942445755004885],["▁tøff",-13.9424467086792],["▁yeyote",-13.9424467086792],["▁түшүндүр",-13.9424467086792],["▁інтегр",-13.9424467086792],["▁அறிவியல்",-13.9424467086792],["továbbiakban",-13.942448616027832],["ระบาย",-13.942448616027832],["▁quajtur",-13.942449569702148],["▁regisztrál",-13.942449569702148],["▁malattie",-13.942450523376465],["▁pakkuda",-13.942450523376465],["▁chystá",-13.94245147705078],["▁ਜਿਵੇਂ",-13.94245147705078],["▁комбинира",-13.94245433807373],["▁Championship",-13.942456245422363],["▁친절",-13.942456245422363],["▁کیجیے",-13.942458152770996],["ໂທດ",-13.942459106445312],["▁Turklāt",-13.942460060119627],["priz",-13.942461013793944],["▁legătură",-13.942461013793944],["▁долю",-13.942461013793944],["▁మోదీ",-13.942461013793944],["▁საბჭოთა",-13.942461967468262],["1963",-13.942463874816896],["▁військових",-13.942463874816896],["▁Fekete",-13.942464828491213],["▁problemų",-13.942476272583008],["▁వస్తే",-13.942477226257324],["▁nogensinde",-13.942479133605955],["▁બેંક",-13.942479133605955],["กทม",-13.942480087280272],["▁अग्नि",-13.942480087280272],["▁cidadáns",-13.942482948303224],["▁speciális",-13.942485809326172],["▁Mahendra",-13.942486763000488],["ბდა",-13.942487716674805],["▁antiguo",-13.942487716674805],["▁намайг",-13.94248867034912],["▁қазір",-13.942493438720703],["▁almacena",-13.94249439239502],["▁statybos",-13.942499160766602],["▁ADHD",-13.942502975463867],["▁mednarodni",-13.942505836486816],["▁prinaša",-13.942508697509766],["▁విషయాలు",-13.942509651184082],["▁Asisten",-13.942512512207031],["▁mahkeme",-13.94251537322998],["▁تاخیر",-13.94251537322998],["▁இடத்தில்",-13.942520141601562],["▁നിലപാട്",-13.942523002624512],["▁TRANS",-13.942523956298828],["▁Silicon",-13.942524909973145],["▁এক্স",-13.94253158569336],["▁illető",-13.942537307739258],["▁waaronder",-13.942541122436523],["▁príomh",-13.942543029785156],["▁पालक",-13.942543983459473],["ရှိနေ",-13.942547798156738],["▁قاضی",-13.942551612854004],["▁önemi",-13.94255542755127],["▁наступныя",-13.942560195922852],["räumen",-13.942564964294434],["▁endavant",-13.942564964294434],["▁edəcəyi",-13.942567825317385],["▁сучасні",-13.942571640014648],["▁lösningar",-13.94257926940918],["▁क्षेत्रीय",-13.94259548187256],["校長",-13.942609786987305],["▁Бъ",-13.942614555358888],["▁aşağıda",-13.942625999450684],["ermittlung",-13.942649841308594],["ຫນ້ອຍ",-13.942657470703123],["▁galėjo",-13.942662239074709],["▁персонала",-13.942662239074709],["▁ටෙලි",-13.942662239074709],["Arabi",-13.94266414642334],["▁вибрати",-13.942667007446287],["▁oznámil",-13.94267463684082],["კად",-13.942700386047363],["▁مٺي",-13.942709922790527],["telija",-13.942710876464844],["▁қаржылық",-13.94271755218506],["▁төвийн",-13.94272232055664],["ತೇ",-13.942728996276855],["▁inimigo",-13.942730903625488],["但如果",-13.94273281097412],["пруж",-13.942736625671388],["▁felirat",-13.942739486694336],["klimat",-13.942753791809082],["zemē",-13.942755699157717],["▁يزال",-13.942757606506348],["▁Кабінету",-13.942766189575195],["▁studijų",-13.942777633666992],["▁кватэр",-13.942793846130373],["▁کردستان",-13.94279956817627],["Email",-13.942824363708496],["▁pihaknya",-13.942824363708496],["イヤ",-13.942824363708496],["õppe",-13.942831039428713],["▁сезоне",-13.942832946777344],["▁आहार",-13.94283390045166],["▁تأتي",-13.942835807800291],["มันจะ",-13.94283962249756],["▁Enjoy",-13.94283962249756],["▁privatnosti",-13.94284725189209],["▁yrittää",-13.942849159240724],["▁යවස්ථාව",-13.942852020263672],["လူထု",-13.94285488128662],["▁विद्युत्",-13.942856788635254],["esistenza",-13.94286060333252],["▁keisti",-13.94287109375],["ચિત",-13.942909240722656],["▁zorla",-13.942914962768556],["Улаан",-13.942922592163086],["▁Suisse",-13.942930221557615],["▁õhtul",-13.942943572998049],["▁аймактар",-13.94295883178711],["▁heyvan",-13.942975997924805],["▁გარკვეული",-13.94297695159912],["판매",-13.942977905273438],["▁жакында",-13.943015098571776],["▁operazioni",-13.943028450012209],["▁партий",-13.943044662475586],["нијих",-13.9430513381958],["stationen",-13.94308376312256],["▁siyaasadda",-13.943087577819824],["นพ",-13.943090438842772],["PTI",-13.943105697631836],["▁2016/2017",-13.943119049072266],["▁وأنا",-13.94314193725586],["▁Қаржы",-13.943154335021973],["▁ಸರ್ವ",-13.943188667297363],["▁పిల్లలు",-13.943195343017578],["▁కావాల",-13.943202018737791],["▁నీకు",-13.94320297241211],["తల",-13.943217277526855],["▁leikur",-13.943217277526855],["▁თავისუფლების",-13.943243980407717],["izáció",-13.943254470825195],["▁azonos",-13.943266868591309],["▁mərasimi",-13.94326877593994],["લ્લા",-13.943293571472168],["▁Escol",-13.9432954788208],["选择了",-13.94329833984375],["ոստ",-13.943307876586914],["461",-13.94333839416504],["▁radosť",-13.943346977233888],["▁რეიტინგი",-13.943347930908203],["▁vietų",-13.94336223602295],["▁grain",-13.94338321685791],["▁shooting",-13.943391799926758],["▁Прилеп",-13.943395614624023],["енце",-13.943408966064451],["▁iliaj",-13.943408966064451],["▁આગ",-13.94341278076172],["▁Important",-13.943419456481934],["▁estetic",-13.943426132202148],["మాల",-13.94344425201416],["▁fillon",-13.94344997406006],["วก",-13.943482398986816],["▁oprit",-13.943488121032717],["▁Ленін",-13.943490028381348],["▁बंग",-13.943490982055664],["▁ligjit",-13.943506240844728],["▁бөлу",-13.94351577758789],["▁buvusi",-13.943524360656738],["▁combinación",-13.943533897399902],["▁etkinlik",-13.943535804748535],["เเ",-13.943546295166016],["aihe",-13.943554878234863],["ていて",-13.943567276000977],["simies",-13.943577766418455],["תרבות",-13.943583488464355],["▁ఆహార",-13.943584442138672],["▁nastaviti",-13.943595886230469],["確実に",-13.943604469299316],["Ψ",-13.943617820739746],["illian",-13.943623542785645],["▁користувача",-13.943633079528809],["▁garantiza",-13.94363784790039],["举行的",-13.943669319152832],["Global",-13.94367504119873],["▁Bhaile",-13.943679809570312],["567",-13.943689346313477],["▁rischi",-13.943695068359377],["جے",-13.943703651428224],["금융",-13.943710327148438],["▁የክ",-13.943714141845703],["▁Hash",-13.943726539611816],["▁කාන්තාවන්",-13.943742752075195],["▁uitleg",-13.94374942779541],["▁процесот",-13.943751335144045],["▁ΕΠΙ",-13.943769454956056],["изиране",-13.943774223327637],["▁першим",-13.943777084350586],["보면",-13.943777084350586],["▁dezvoltat",-13.943785667419434],["ლილი",-13.943792343139648],["▁Globe",-13.943798065185549],["ມັດ",-13.943800926208496],["▁конкурсе",-13.94381046295166],["▁නිසි",-13.943815231323242],["이었",-13.943854331970217],["▁considerando",-13.943856239318848],["ізацію",-13.943869590759276],["부산",-13.943883895874023],["▁უმაღლესი",-13.943917274475098],["प्रीत",-13.94393539428711],["ויה",-13.943937301635742],["也可能",-13.94394588470459],["äisiä",-13.943967819213867],["702",-13.94397258758545],["ાક",-13.943986892700195],["plasma",-13.943994522094728],["▁zadky",-13.943998336791992],["▁المنت",-13.944012641906738],["▁elementer",-13.94406509399414],["此刻",-13.944080352783203],["▁نماد",-13.94410800933838],["▁американськ",-13.944134712219238],["▁11.30",-13.944146156311035],["ภาพถ่าย",-13.944161415100098],["Nuk",-13.944165229797363],["▁nyert",-13.944168090820312],["isiú",-13.944173812866213],["▁సంఘ",-13.944194793701172],["ŢI",-13.944204330444336],["▁Gibt",-13.944209098815918],["▁cauze",-13.944225311279297],["БРА",-13.94423007965088],["▁نوشتن",-13.944250106811523],["▁Nekem",-13.944256782531738],["腔",-13.944280624389648],["▁deyilir",-13.944284439086914],["Эрдэнэт",-13.944310188293455],["▁Kadir",-13.94433307647705],["▁мото",-13.944339752197266],["▁ప్రయాణ",-13.944385528564451],["▁KAY",-13.94440460205078],["▁bevæge",-13.94443416595459],["▁Καρα",-13.944435119628906],["▁prach",-13.944451332092283],["▁Baron",-13.944457054138184],["▁покушај",-13.94447135925293],["▁ਗੀਤ",-13.944475173950195],["▁لیا۔",-13.944504737854004],["▁ecologic",-13.94454574584961],["▁बनाएर",-13.944552421569824],["genoten",-13.94459056854248],["მეტრი",-13.94459056854248],["▁मधु",-13.944591522216797],["▁Zip",-13.944592475891112],["стоп",-13.944599151611328],["▁kullanan",-13.944631576538086],["చన",-13.944676399230955],["▁ਮੁੜ",-13.944706916809082],["Стат",-13.944722175598145],["імнің",-13.94472599029541],["▁Հարց",-13.944738388061523],["ڑنے",-13.944750785827637],["▁تعلیمی",-13.944756507873535],["baldintzak",-13.944791793823242],["▁informou",-13.944794654846191],["öző",-13.94479751586914],["▁Deck",-13.944812774658203],["それだけ",-13.944815635681152],["▁kthyer",-13.94483470916748],["▁golaha",-13.94486141204834],["▁hoose",-13.944896697998049],["▁любым",-13.944910049438477],["ពាន",-13.944944381713867],["ረፍ",-13.944945335388184],["▁SOCIAL",-13.944953918457031],["नेस",-13.944973945617676],["▁αμερικανικ",-13.945013999938965],["Съ",-13.945025444030762],["берете",-13.945048332214355],["女主角",-13.945117950439451],["६२",-13.945123672485352],["▁recuperare",-13.945138931274414],["GOV",-13.945157051086426],["значено",-13.94517707824707],["ဝန်ကြီး",-13.945178985595703],["මං",-13.945183753967283],["ድህ",-13.945204734802246],["▁радость",-13.945232391357422],["ടിച്ച",-13.945242881774902],["ότητά",-13.945274353027344],["▁блага",-13.945282936096191],["瞧",-13.945302963256836],["имся",-13.94532871246338],["▁приказна",-13.94532871246338],["リリース",-13.945330619812012],["オリジナル",-13.94533348083496],["ພະຍາຍາມ",-13.94533634185791],["ภายนอก",-13.945341110229492],["虛擬",-13.945341110229492],["ໂຮງຮຽນ",-13.945342063903809],["ជំរុញ",-13.945342063903809],["ស្នេហ៍",-13.945342063903809],["▁ଧର୍ମ",-13.945342063903809],["▁ფეხბურთი",-13.945342063903809],["yxatdan",-13.945343017578123],["ប៉ុន្មាន",-13.945343017578123],["▁Chứng",-13.945343017578123],["▁Viongozi",-13.945343017578123],["▁berpendapat",-13.945343017578123],["▁důležité",-13.945343017578123],["▁erwähnt",-13.945343017578123],["▁grąžin",-13.945343017578123],["▁kerusakan",-13.945343017578123],["▁kikubwa",-13.945343017578123],["▁pengiriman",-13.945343017578123],["▁uavhengig",-13.945343017578123],["▁Συμβούλιο",-13.945343017578123],["▁диабет",-13.945343017578123],["▁дітям",-13.945343017578123],["▁катэгорыі",-13.945343017578123],["▁Հովհաննես",-13.945343017578123],["▁עפעס",-13.945343017578123],["▁المتعلقة",-13.945343017578123],["▁تخریب",-13.945343017578123],["▁केजरीवाल",-13.945343017578123],["▁रमेश",-13.945343017578123],["▁सुनिश्चित",-13.945343017578123],["▁ਅਗਵਾਈ",-13.945343017578123],["▁ਸਥਿਤ",-13.945343017578123],["▁ઉપલબ્ધ",-13.945343017578123],["▁ସମେତ",-13.945343017578123],["▁சந்தேக",-13.945343017578123],["▁பிரச்சனை",-13.945343017578123],["▁ಸಮುದಾಯ",-13.945343017578123],["▁සැබෑ",-13.945343017578123],["▁ქირავდება",-13.945343017578123],["▁khidmat",-13.94534397125244],["▁සකස්",-13.94534397125244],["▁déclaré",-13.945344924926758],["▁gyvenime",-13.945344924926758],["▁तर्फबाट",-13.945344924926758],["▁აღმოჩნდა",-13.945344924926758],["▁commercio",-13.94534683227539],["▁enfermidade",-13.94534683227539],["▁prírodné",-13.94534683227539],["▁korisničko",-13.945347785949709],["を探す",-13.945347785949709],["▁Stortinget",-13.945348739624023],["▁prostredia",-13.945348739624023],["▁gjithnjë",-13.945350646972656],["▁фирмы",-13.945350646972656],["▁ভারতীয়",-13.945350646972656],["▁الأحمر",-13.945354461669922],["▁ഇപ്പോഴും",-13.945358276367188],["▁අන්තර්",-13.945358276367188],["▁pencarian",-13.945361137390137],["滋味",-13.94536304473877],["▁լայն",-13.945363998413086],["▁ostvaren",-13.945368766784668],["▁ዕለት",-13.945368766784668],["▁կրում",-13.945371627807615],["▁كەڭ",-13.945375442504885],["▁પહેલી",-13.945375442504885],["▁מורכב",-13.9453763961792],["▁החברתי",-13.945377349853516],["▁చిత్రానికి",-13.945380210876465],["▁conseguenza",-13.94538116455078],["▁octo",-13.945383071899414],["▁sinyal",-13.945385932922363],["▁ଓଡିଆ",-13.945385932922363],["szervezet",-13.945387840270996],["▁tâche",-13.945390701293944],["▁ٻوليء",-13.945396423339844],["▁justicia",-13.94539737701416],["▁розслідування",-13.94540023803711],["▁ललितपुर",-13.945402145385742],["▁берсе",-13.945405960083008],["▁Vilanova",-13.94541072845459],["▁təkrar",-13.945412635803224],["▁называется",-13.945417404174805],["▁مطبوعات",-13.945422172546388],["▁ದಿನಗಳಲ್ಲಿ",-13.945430755615234],["▁medikament",-13.945436477661133],["▁dokładn",-13.945446968078612],["▁koszul",-13.945451736450195],["▁έργου",-13.945459365844728],["▁байжээ",-13.945465087890623],["▁очекује",-13.945467948913574],["▁विजेता",-13.945467948913574],["▁paruoš",-13.945483207702637],["▁Peale",-13.9454927444458],["▁forventer",-13.945494651794434],["▁کربلا",-13.945508003234863],["▁calore",-13.945510864257812],["▁tashqi",-13.94551944732666],["▁حصے",-13.945523262023926],["▁svetovni",-13.945531845092772],["dibile",-13.945544242858888],["▁ಚಿತ್ರದಲ್ಲಿ",-13.945548057556152],["լավ",-13.94555377960205],["ҚА",-13.945556640625],["▁večina",-13.94555950164795],["▁Smer",-13.94557762145996],["▁ehf",-13.945578575134276],["▁روڊ",-13.945597648620604],["வரா",-13.945601463317873],["LIVE",-13.9456148147583],["▁ठेवा",-13.9456148147583],["▁chiesto",-13.945615768432615],["국가",-13.94562530517578],["▁приложи",-13.945673942565918],["▁povodom",-13.945688247680664],["更高的",-13.945701599121094],["▁nombor",-13.945712089538574],["▁Aelod",-13.945721626281738],["▁ढो",-13.945721626281738],["▁chanson",-13.945735931396484],["δότηση",-13.945738792419434],["▁پہن",-13.945738792419434],["قاض",-13.9457426071167],["▁આવું",-13.94575309753418],["▁sisalda",-13.945761680603027],["▁권리",-13.945768356323242],["சோ",-13.945780754089355],["kerék",-13.945783615112305],["ాలన్న",-13.945792198181152],["▁zemljama",-13.945792198181152],["▁własnej",-13.945795059204102],["▁považuje",-13.945802688598633],["ความต้องการ",-13.945805549621582],["▁Bölgesi",-13.945809364318848],["▁수익",-13.945809364318848],["▁ወረ",-13.945817947387695],["▁poznato",-13.945831298828123],["ତନ",-13.94583225250244],["스키",-13.945833206176758],["▁offensiv",-13.9458589553833],["▁nevnt",-13.945863723754885],["▁produktů",-13.94586944580078],["ENDO",-13.94587230682373],["▁ਜੈ",-13.945881843566896],["▁اسباب",-13.945893287658691],["ክም",-13.945904731750488],["▁власні",-13.945915222167969],["▁예방",-13.945918083190918],["보건",-13.945921897888184],["▁घु",-13.9459228515625],["▁tulossa",-13.945927619934082],["▁enormt",-13.945966720581056],["▁канцы",-13.945974349975586],["ոցի",-13.945982933044434],["▁Лос",-13.945989608764648],["▁menningar",-13.946002006530762],["▁ජේ",-13.946009635925291],["하이",-13.94601345062256],["ésében",-13.946016311645508],["▁углед",-13.946037292480469],["▁የሞ",-13.946050643920898],["ในเกม",-13.946085929870604],["▁Semester",-13.94609260559082],["有过",-13.946099281311035],["▁கொண்டிருந்த",-13.94612979888916],["▁Храм",-13.946138381958008],["▁dépense",-13.946147918701172],["比價",-13.946157455444336],["NICI",-13.946161270141602],["ийнхээ",-13.946184158325195],["▁mnogih",-13.946197509765623],["▁simplific",-13.946197509765623],["▁बोलत",-13.946236610412598],["▁turul",-13.946239471435549],["▁normalno",-13.946261405944824],["▁zbatim",-13.946267127990724],["▁nebūtu",-13.946276664733888],["հայ",-13.946288108825684],["▁amelyben",-13.946313858032228],["▁kötet",-13.946319580078123],["▁постаје",-13.946348190307615],["▁Wayne",-13.946355819702148],["▁çekdar",-13.946368217468262],["חיים",-13.94637393951416],["▁исходя",-13.946375846862791],["▁FTP",-13.946377754211426],["▁herrialde",-13.946383476257324],["▁siège",-13.94639492034912],["▁šķē",-13.94639778137207],["可持续发展",-13.946404457092283],["▁резултате",-13.946410179138184],["見到",-13.946413040161133],["sprung",-13.94642162322998],["▁ohjaa",-13.94642734527588],["յթ",-13.946436882019045],["▁aktsia",-13.946443557739258],["▁rekor",-13.946453094482422],["වො",-13.946467399597168],["ătorul",-13.946468353271484],["▁focused",-13.94647216796875],["▁ಲಾಭ",-13.946473121643066],["ΥΣ",-13.946491241455078],["▁قربان",-13.946511268615724],["▁modificări",-13.946516036987305],["▁sağlıyor",-13.946592330932615],["▁Jelly",-13.94660758972168],["▁תס",-13.94662380218506],["可能性がある",-13.94663429260254],["وټ",-13.946638107299805],["▁кошул",-13.946640968322754],["▁chceli",-13.946654319763184],["▁निम्ति",-13.946654319763184],["▁მილიონი",-13.946656227111816],["拿下",-13.946660995483398],["ELT",-13.94667625427246],["ravit",-13.946683883666992],["▁suprasti",-13.946687698364258],["▁одговорности",-13.946697235107422],["вшей",-13.946700096130373],["▁Sinon",-13.946721076965332],["▁meitä",-13.946728706359863],["▁apsaugo",-13.946736335754396],["▁sänk",-13.946741104125977],["▁Bangor",-13.94679069519043],["面临的",-13.94680404663086],["▁бербей",-13.94681453704834],["▁المرض",-13.946819305419922],["▁средствами",-13.946836471557615],["對此",-13.946861267089844],["ździ",-13.946868896484377],["ព័ត៌មានអន្តរជាតិ",-13.946928024291992],["▁դասարան",-13.946928977966309],["▁junija",-13.94693088531494],["ńca",-13.946932792663574],["تصوير",-13.946935653686523],["Key",-13.946999549865724],["urbo",-13.94700050354004],["▁suuntaan",-13.947003364562988],["brengen",-13.94701099395752],["▁नित्य",-13.947012901306152],["▁fungi",-13.94703483581543],["▁пружи",-13.947047233581545],["にあります",-13.947052001953123],["Уч",-13.94706916809082],["租金",-13.947070121765137],["▁własnym",-13.947114944458008],["▁slová",-13.947125434875488],["▁સાવ",-13.947139739990234],["авым",-13.9471435546875],["eolaíocht",-13.947163581848145],["▁pacar",-13.947190284729004],["ร่วมงาน",-13.947214126586914],["ෙන්නට",-13.947274208068848],["▁láska",-13.94729995727539],["tzearen",-13.947300910949709],["αστή",-13.947307586669922],["▁pensione",-13.947314262390137],["▁Основно",-13.947349548339844],["▁aprovecha",-13.94735622406006],["▁jöv",-13.947358131408691],["▁승리",-13.94736099243164],["▁utgave",-13.947362899780272],["不满",-13.947373390197754],["▁dagana",-13.947393417358398],["▁ході",-13.947393417358398],["וֹת",-13.947395324707031],["UJE",-13.947402000427246],["алогія",-13.947404861450195],["▁SBS",-13.947443962097168],["▁cibi",-13.947444915771484],["တာက",-13.947447776794434],["▁duktig",-13.947450637817385],["▁contribute",-13.947474479675291],["lenir",-13.947487831115724],["▁наскоро",-13.947540283203123],["▁yapabilir",-13.94754695892334],["▁почнува",-13.947556495666504],["▁exploit",-13.947565078735352],["youtu",-13.947585105895996],["▁господарськ",-13.947613716125488],["▁काममा",-13.947619438171388],["نئ",-13.947626113891602],["▁govore",-13.947630882263184],["ెస్ట్",-13.947653770446776],["▁аут",-13.94766330718994],["▁احساسات",-13.947677612304688],["ącym",-13.947729110717772],["▁hêla",-13.947729110717772],["▁errores",-13.947736740112305],["បាយ",-13.947751998901367],["râm",-13.947755813598633],["pakket",-13.947769165039062],["▁Leita",-13.947781562805176],["ብሮ",-13.947786331176758],["▁klimaat",-13.947787284851074],["eçmiş",-13.947794914245604],["▁ժաման",-13.947796821594238],["▁Ֆր",-13.94780731201172],["▁Inspiration",-13.947813034057615],["לסי",-13.947833061218262],["▁Контрол",-13.94784450531006],["▁останал",-13.947855949401855],["▁Европейски",-13.947870254516602],["▁incerc",-13.9478759765625],["Tip",-13.947885513305664],["▁İzle",-13.947946548461914],["▁lieve",-13.947949409484863],["▁шиш",-13.947953224182127],["บก",-13.947978019714355],["▁સર્જ",-13.94798469543457],["▁אייער",-13.947992324829102],["ในระดับ",-13.94799518585205],["מופע",-13.947996139526367],["▁cuptor",-13.948002815246582],["▁bidali",-13.94800853729248],["กันได้",-13.948025703430176],["▁hverjum",-13.94802951812744],["မလို",-13.948033332824709],["▁desenvolupa",-13.948038101196287],["▁yaşat",-13.94804859161377],["▁purta",-13.94806957244873],["▁combination",-13.948078155517578],["សូម",-13.94809341430664],["ყავ",-13.94809627532959],["476",-13.94809913635254],["▁підприємство",-13.948127746582031],["▁obvious",-13.94813346862793],["কাৰ",-13.948147773742676],["ন্দি",-13.948149681091309],["▁پاور",-13.9481782913208],["dzības",-13.9481840133667],["侨",-13.948187828063965],["kostnader",-13.948192596435549],["▁сайтове",-13.948196411132812],["▁शेतकऱ्यां",-13.948203086853027],["夹",-13.94820499420166],["姑",-13.948222160339355],["▁дистриб",-13.948224067687988],["妄",-13.948225021362305],["maß",-13.948238372802734],["ກ້າວ",-13.948247909545898],["▁arkeolog",-13.948247909545898],["maatschappij",-13.948249816894531],["▁Christelike",-13.948249816894531],["▁Gjithashtu",-13.948249816894531],["▁Natomiast",-13.948249816894531],["▁Nemzetközi",-13.948249816894531],["▁cysylltu",-13.948249816894531],["▁dhatêng",-13.948249816894531],["▁diperhatikan",-13.948249816894531],["▁pengambilan",-13.948249816894531],["▁předchozí",-13.948249816894531],["▁Эмгек",-13.948249816894531],["▁творчості",-13.948249816894531],["▁Өйткені",-13.948249816894531],["▁עצמם",-13.948249816894531],["▁حداکثر",-13.948249816894531],["▁आकर्षक",-13.948249816894531],["▁ट्रेलर",-13.948249816894531],["▁प्राविधिक",-13.948249816894531],["▁ଆସନ୍ତା",-13.948249816894531],["▁தயாரிப்ப",-13.948249816894531],["▁ശേഖരിച്ചത്",-13.948249816894531],["▁აპრილი",-13.948249816894531],["▁შედარებით",-13.948249816894531],["▁수많은",-13.948249816894531],["させていただきました",-13.948249816894531],["롯",-13.948249816894531],["▁rifiuti",-13.948250770568848],["▁svømme",-13.948250770568848],["▁одлуку",-13.948250770568848],["▁اگست",-13.948250770568848],["▁بۇرۇن",-13.948250770568848],["▁තිබිය",-13.948250770568848],["▁menjelang",-13.948251724243164],["▁մասնագիտական",-13.948251724243164],["ድርጅቱ",-13.94825267791748],["▁메시지",-13.94825267791748],["bürger",-13.948253631591797],["ဖြတ်",-13.948254585266112],["▁Breizh",-13.94825553894043],["▁ಮುಕ್ತ",-13.948256492614746],["▁ហៅ",-13.948261260986328],["▁අර්ථ",-13.948262214660645],["ធិការ",-13.948264122009276],["▁daşıyır",-13.948265075683594],["▁kierowa",-13.94826602935791],["老百姓",-13.94826602935791],["▁qëndron",-13.948266983032228],["▁തടയ",-13.948266983032228],["ផងដែរ",-13.94826889038086],["▁widzę",-13.94826889038086],["艰难",-13.948269844055176],["▁mitandao",-13.948270797729492],["forschung",-13.94827651977539],["▁przeznaczony",-13.948278427124023],["▁vključen",-13.94827938079834],["variant",-13.948280334472656],["▁Ĉar",-13.948284149169922],["▁کتابخانه",-13.94828987121582],["▁dèidh",-13.94829273223877],["▁sustancia",-13.948294639587402],["▁aadressil",-13.948297500610352],["▁Белград",-13.948301315307615],["▁вернуться",-13.94830322265625],["ซับ",-13.9483060836792],["versammlung",-13.948309898376465],["▁ciepła",-13.948312759399414],["続く",-13.948314666748049],["▁Ericsson",-13.94831657409668],["▁Montreal",-13.948317527770996],["▁Shumë",-13.948319435119627],["▁హ్యా",-13.948321342468262],["▁MART",-13.948322296142578],["▁ನೀಡಿದ್ದಾರೆ",-13.948324203491213],["▁ətrafında",-13.948325157165527],["▁menghalang",-13.94832706451416],["▁ಹಾಗು",-13.94832706451416],["▁მყოფ",-13.948333740234377],["▁կնք",-13.948338508605955],["▁kendilerine",-13.948346138000488],["▁инж",-13.948352813720703],["防水",-13.948356628417969],["▁uvrsti",-13.94836711883545],["▁لتحقيق",-13.948371887207031],["बाल",-13.948372840881348],["▁dâm",-13.948372840881348],["▁बढ्ने",-13.948372840881348],["▁numerosas",-13.94837474822998],["▁ආදී",-13.94837760925293],["▁Дачић",-13.948378562927246],["▁kukka",-13.948382377624512],["▁കേരളത്തില്",-13.94838809967041],["▁kịch",-13.94841480255127],["▁მინი",-13.948418617248535],["高新技术",-13.948436737060549],["▁المنتجات",-13.94843864440918],["дөрдү",-13.948439598083496],["▁lutem",-13.948446273803713],["▁Lumia",-13.948447227478027],["σύμ",-13.948451042175291],["▁prąd",-13.94846534729004],["เทน",-13.948473930358888],["▁exposição",-13.948480606079102],["▁משתמשים",-13.948482513427734],["▁atualmente",-13.948498725891112],["▁የመጨረሻ",-13.948501586914062],["olesterol",-13.948518753051758],["ריז",-13.94852352142334],["▁odgoj",-13.948525428771973],["▁לדי",-13.948537826538086],["▁alalan",-13.948538780212402],["▁հայտնվել",-13.948538780212402],["▁vinho",-13.948545455932615],["▁krónu",-13.94854736328125],["▁absolutamente",-13.948552131652832],["▁düşünül",-13.948553085327148],["彼得",-13.948578834533691],["କ୍ଷଣ",-13.948593139648438],["ССР",-13.948607444763184],["▁képet",-13.948610305786133],["▁необходимых",-13.94862174987793],["มีอะไร",-13.94862461090088],["▁pełny",-13.948663711547852],["▁ഇതിന്റെ",-13.948668479919434],["▁ТОП",-13.948670387268066],["▁diskon",-13.948678016662598],["▁בשי",-13.948687553405762],["▁منفرد",-13.948690414428713],["▁jautājumiem",-13.948700904846191],["▁BTS",-13.948729515075684],["▁ചെയ്",-13.94873046875],["在我的",-13.948731422424316],["▁उम्मीदवार",-13.948736190795898],["▁Netti",-13.94874382019043],["▁εταιρείας",-13.94874668121338],["ဘိ",-13.94878387451172],["▁nascita",-13.94880199432373],["▁qiziq",-13.948808670043944],["자들의",-13.948822021484377],["▁Minulla",-13.948826789855955],["▁кожни",-13.948830604553224],["▁Pedra",-13.948838233947754],["▁sabon",-13.94884204864502],["▁absen",-13.94884967803955],["حمي",-13.94886302947998],["▁Публикувано",-13.948870658874512],["▁gereği",-13.948896408081056],["▁pourront",-13.948896408081056],["分解",-13.948904037475586],["▁sepertinya",-13.94892120361328],["▁зголеми",-13.94892406463623],["▁autorización",-13.948975563049316],["使い方",-13.949024200439451],["▁Indah",-13.94902515411377],["565",-13.949031829833984],["▁steden",-13.949052810668944],["නයට",-13.949053764343262],["▁Hulu",-13.949063301086426],["▁recomand",-13.94906520843506],["isivat",-13.949069023132324],["▁säljer",-13.94906997680664],["국민",-13.949081420898438],["香水",-13.949090957641602],["▁wathi",-13.949102401733398],["▁soveel",-13.949109077453612],["ਸ਼ੋ",-13.94912338256836],["했다는",-13.949124336242676],["▁llegada",-13.949126243591309],["▁трудности",-13.949132919311523],["▁даволі",-13.949135780334473],["IGO",-13.949137687683104],["▁ఆరు",-13.949138641357422],["▁Europeană",-13.949153900146484],["▁cities",-13.94915771484375],["άδ",-13.949177742004396],["▁მისთვის",-13.94919776916504],["▁connected",-13.949240684509276],["々に",-13.949261665344238],["▁keskel",-13.949262619018556],["สระ",-13.949263572692873],["▁Buck",-13.94926643371582],["ருவ",-13.94926929473877],["▁станции",-13.949283599853516],["的衣服",-13.949283599853516],["▁borsa",-13.949317932128906],["▁Fug",-13.949325561523438],["▁мәселелер",-13.949325561523438],["သူတွေ",-13.94935703277588],["▁ادبی",-13.949360847473145],["▁възрастни",-13.94937801361084],["▁Grote",-13.949390411376951],["▁פּל",-13.94941234588623],["OOT",-13.94942569732666],["▁Septemba",-13.94943618774414],["▁täyttä",-13.949461936950684],["▁çözümler",-13.949461936950684],["TIG",-13.949464797973633],["▁айтқанда",-13.94946575164795],["▁पक्षमा",-13.949470520019531],["▁bharr",-13.949522018432615],["▁religione",-13.949525833129885],["▁mekanik",-13.949549674987791],["▁Peak",-13.949556350708008],["▁triple",-13.949556350708008],["▁قوية",-13.94956874847412],["▁mjera",-13.949583053588867],["▁öğret",-13.949585914611816],["▁pertanto",-13.949607849121094],["▁térség",-13.949626922607422],["forsyning",-13.94963836669922],["vitra",-13.9496431350708],["▁nową",-13.949645042419434],["▁rörelse",-13.9496488571167],["kordse",-13.949650764465332],["không",-13.949660301208496],["▁levantar",-13.94967269897461],["ልብ",-13.94970989227295],["▁향상",-13.949711799621582],["▁abusi",-13.949719429016112],["地域の",-13.94972038269043],["αρχία",-13.949725151062012],["שאלות",-13.94973087310791],["▁tíre",-13.949732780456545],["▁Idara",-13.949735641479492],["▁Protección",-13.949735641479492],["▁ამბ",-13.949756622314451],["▁החליט",-13.949759483337402],["エル",-13.9497709274292],["▁ეროვნულ",-13.949785232543944],["ແກ",-13.949820518493652],["上课",-13.949823379516602],["పోయ",-13.949830055236816],["▁письме",-13.949847221374512],["페이",-13.94985008239746],["persoon",-13.94986057281494],["▁detox",-13.94986057281494],["▁seviyesin",-13.949868202209473],["menetelmä",-13.94987964630127],["▁კურსი",-13.949880599975586],["▁Transit",-13.9498929977417],["▁söyleyen",-13.949899673461914],["▁ట్రా",-13.949911117553713],["▁glavi",-13.94994068145752],["▁vedeti",-13.949995040893556],["▁چندان",-13.950027465820312],["▁stiamo",-13.950040817260742],["▁колеба",-13.950060844421388],["▁talebi",-13.950064659118652],["▁Ему",-13.950064659118652],["▁prodaju",-13.95008659362793],["אקס",-13.95009422302246],["▁özünün",-13.950098991394045],["ШЫ",-13.950103759765623],["アナ",-13.950109481811523],["▁sørger",-13.950119972229004],["▁மரண",-13.9501371383667],["▁آیات",-13.950154304504396],["双手",-13.950172424316406],["▁ይታ",-13.950186729431152],["▁Venez",-13.950201988220217],["▁kallar",-13.95020580291748],["ራሽ",-13.95021152496338],["▁vyrobené",-13.95024299621582],["▁अटक",-13.95024299621582],["▁అక్కడి",-13.95024871826172],["rollen",-13.950261116027832],["▁lokalni",-13.950262069702148],["ఎన్",-13.950274467468262],["eddau",-13.950288772583008],["▁Medya",-13.950288772583008],["mbwa",-13.950307846069336],["सत्ता",-13.9503173828125],["▁educational",-13.9503173828125],["▁GPU",-13.95035457611084],["ttiä",-13.9503755569458],["▁feminista",-13.9503812789917],["▁প্রেম",-13.95038604736328],["▁rezon",-13.950398445129396],["▁весник",-13.950414657592772],["▁talleres",-13.950435638427734],["▁նախագծեր",-13.95044231414795],["fény",-13.950450897216797],["lösung",-13.950506210327148],["▁جاچ",-13.95054054260254],["ливість",-13.950554847717283],["áistí",-13.950560569763184],["▁FRE",-13.950566291809082],["▁Meteor",-13.950567245483398],["494",-13.950571060180664],["▁plače",-13.950572967529297],["РОП",-13.950608253479004],["૧૦",-13.950636863708496],["▁suprug",-13.950639724731444],["казав",-13.950658798217772],["時代の",-13.95068073272705],["kaubandus",-13.950682640075684],["▁Britse",-13.950691223144531],["løft",-13.950693130493164],["▁Pekka",-13.95071792602539],["یستی",-13.950722694396973],["▁rense",-13.950743675231934],["▁සීමා",-13.950743675231934],["▁Concurso",-13.950761795043944],["▁Krij",-13.950772285461426],["▁ұр",-13.950782775878906],["艺术家",-13.950791358947754],["senteret",-13.95080852508545],["ശേഷം",-13.950818061828612],["მიდ",-13.950819969177246],["▁Суда",-13.95086669921875],["▁قصور",-13.950870513916016],["ሊን",-13.950947761535645],["▁glasov",-13.950950622558594],["ቀጠል",-13.95095157623291],["▁Ամերիկ",-13.95095157623291],["▁praž",-13.950968742370604],["աղի",-13.950970649719238],["овото",-13.950979232788086],["▁frétta",-13.950987815856934],["▁среднем",-13.951022148132324],["▁hygge",-13.951035499572754],["▁Round",-13.951037406921388],["Ű",-13.951054573059082],["▁miło",-13.951065063476562],["▁കമന്റ",-13.951075553894045],["攀",-13.951095581054688],["дъх",-13.95110034942627],["盃",-13.951123237609863],["▁knižn",-13.95113754272461],["멀",-13.95114040374756],["dinha",-13.951151847839355],["▁тэмдэг",-13.951151847839355],["副主席",-13.951154708862305],["▁şansı",-13.95115852355957],["3⁄4",-13.951165199279783],["ကျောက်",-13.951165199279783],["ሼ",-13.951165199279783],["እስራኤል",-13.951165199279783],["እግዚአብሄር",-13.951165199279783],["ỵ",-13.951165199279783],["▁Cəfər",-13.951165199279783],["▁Skaityti",-13.951165199279783],["▁Toisaalta",-13.951165199279783],["▁curriculum",-13.951165199279783],["▁duminică",-13.951165199279783],["▁ezagutzen",-13.951165199279783],["▁fafanje",-13.951165199279783],["▁murder",-13.951165199279783],["▁speisialta",-13.951165199279783],["▁wspomina",-13.951165199279783],["▁Περιφέρεια",-13.951165199279783],["▁домаќин",-13.951165199279783],["▁эрсдэл",-13.951165199279783],["▁існування",-13.951165199279783],["▁حیرت",-13.951165199279783],["▁ଯୋଗୁଁ",-13.951165199279783],["▁సంబంధిత",-13.951165199279783],["▁අහිමි",-13.951165199279783],["▁ඉන්දීය",-13.951165199279783],["▁Společnost",-13.951166152954102],["▁beschrieben",-13.951166152954102],["▁mijloace",-13.951166152954102],["▁nokkrum",-13.951166152954102],["▁одлуке",-13.951166152954102],["▁ייתכן",-13.951166152954102],["▁ଜଙ୍ଗଲ",-13.951166152954102],["▁accusantium",-13.951167106628418],["▁köögi",-13.951167106628418],["▁utnyttja",-13.951167106628418],["▁zdieľa",-13.951167106628418],["▁πολιτών",-13.951167106628418],["▁մասնավորապես",-13.951167106628418],["▁અનુસાર",-13.951167106628418],["다이어트",-13.951167106628418],["ศิลปิน",-13.951168060302734],["▁јавља",-13.951168060302734],["▁ਖੋਜ",-13.951168060302734],["ความเป็นส่วนตัว",-13.95116901397705],["▁angielski",-13.95116901397705],["▁грамадскі",-13.95116901397705],["▁জানতে",-13.95116901397705],["判斷",-13.95116901397705],["▁болашақ",-13.951169967651367],["လႊတ္ေတာ္",-13.951170921325684],["▁turmush",-13.951170921325684],["▁цэргийн",-13.951170921325684],["豹",-13.951170921325684],["▁прадстаўнікоў",-13.951171875],["▁hviezd",-13.951173782348633],["▁Вчера",-13.951173782348633],["▁думки",-13.95117473602295],["▁זיכער",-13.95117473602295],["▁अपघात",-13.95117473602295],["▁nuotrauka",-13.951176643371582],["▁звільнення",-13.951176643371582],["▁eşkere",-13.951178550720217],["ເພງ",-13.95118522644043],["▁ddewis",-13.95118522644043],["ျမန္",-13.95118808746338],["▁Vuonna",-13.95118808746338],["▁üstünlük",-13.95118808746338],["▁enfoque",-13.951189041137695],["▁غونډې",-13.951189041137695],["▁شروط",-13.951189994812012],["ляются",-13.951193809509276],["▁podnikateľ",-13.95119857788086],["▁slijedi",-13.95119857788086],["simamizi",-13.95120620727539],["▁получится",-13.951207160949709],["▁шебер",-13.951210021972656],["λφ",-13.951213836669922],["▁Editorial",-13.951216697692873],["▁aprobación",-13.951216697692873],["▁SOBRE",-13.951217651367188],["▁Главное",-13.951218605041504],["▁ওয়েব",-13.951218605041504],["▁Vídeo",-13.95121955871582],["▁تەتقىقات",-13.951220512390137],["▁هندستان",-13.951221466064451],["的情況下",-13.951223373413086],["▁онези",-13.951224327087402],["▁진출",-13.95123291015625],["▁જેવું",-13.951255798339844],["▁गुदा",-13.951266288757324],["▁інакш",-13.95126724243164],["равно",-13.951268196105955],["ូន",-13.951269149780272],["▁lubatud",-13.95127010345459],["▁репутаци",-13.951272010803224],["▁pokazala",-13.951279640197754],["▁Handlung",-13.95129108428955],["▁اللاعب",-13.951294898986816],["してみました",-13.951297760009766],["▁Kompetenz",-13.951300621032717],["▁החולים",-13.951302528381348],["▁asook",-13.95130443572998],["不符合",-13.951306343078612],["▁Орон",-13.95131015777588],["模特",-13.951324462890623],["▁تنب",-13.951333999633787],["▁הבנק",-13.95134449005127],["长江",-13.951345443725586],["▁ھوقۇقى",-13.951364517211914],["▁სოფლის",-13.95136547088623],["▁դրական",-13.951366424560549],["▁młodych",-13.951367378234863],["▁Porte",-13.951372146606444],["383",-13.951388359069824],["taseme",-13.95139503479004],["▁ảo",-13.951412200927734],["▁nylon",-13.951446533203123],["▁Gull",-13.951454162597656],["▁оқып",-13.951483726501465],["▁никому",-13.951489448547363],["▁partaja",-13.951494216918944],["▁салата",-13.951496124267578],["▁золото",-13.951498031616213],["▁Դի",-13.95150375366211],["▁쓰는",-13.951508522033691],["siehe",-13.951532363891602],["▁സ്വര്",-13.951545715332031],["aðgerð",-13.951550483703612],["અર",-13.951563835144045],["▁режиму",-13.951565742492676],["▁deklaruar",-13.951582908630373],["▁ነፃነት",-13.951590538024902],["య్యాయి",-13.95159149169922],["▁cijele",-13.95159912109375],["جڻ",-13.951601028442385],["▁အီ",-13.951604843139648],["Има",-13.951605796813965],["▁Притисн",-13.951611518859863],["▁لابد",-13.951620101928713],["▁privacidad",-13.95163631439209],["▁стандарди",-13.951640129089355],["▁Джи",-13.951642990112305],["▁એન્ડ",-13.951650619506836],["▁برخو",-13.95166015625],["tungan",-13.95167636871338],["▁tetep",-13.951688766479492],["▁장비",-13.951703071594238],["▁suiker",-13.951711654663086],["ుడిగా",-13.951732635498049],["ίδας",-13.951736450195312],["නියා",-13.951767921447754],["▁joie",-13.951786994934082],["dunia",-13.951801300048828],["▁metrů",-13.951805114746094],["▁slovenskega",-13.95181369781494],["▁аталған",-13.951824188232422],["High",-13.951848983764648],["▁изнад",-13.951848983764648],["▁неё",-13.951851844787598],["assistent",-13.951857566833496],["৭১",-13.951869010925291],["▁Bref",-13.951869010925291],["▁사실을",-13.95188331604004],["▁yağı",-13.951895713806152],["▁Közép",-13.951910972595217],["▁spons",-13.95192813873291],["പോയി",-13.951940536499023],["▁ଅର୍",-13.95195484161377],["МЖ",-13.951977729797363],["▁årsag",-13.951983451843262],["▁környék",-13.95199203491211],["科学技术",-13.95199203491211],["ликова",-13.952003479003906],["▁Budaya",-13.95200538635254],["▁Шве",-13.95201301574707],["ಂದರೆ",-13.952018737792969],["▁äter",-13.952054023742676],["dávajú",-13.952102661132812],["▁spécialiste",-13.952103614807127],["▁وإنما",-13.952109336853027],["ىلىپ",-13.95211696624756],["mudahan",-13.952134132385254],["▁kainą",-13.952153205871582],["ப்படு",-13.952155113220217],["▁behovet",-13.952173233032228],["ємося",-13.952181816101074],["▁1883",-13.95224952697754],["▁Nachricht",-13.95226001739502],["▁moneda",-13.952275276184082],["ेसन",-13.95230484008789],["심을",-13.952308654785156],["ड्र",-13.95231819152832],["modeller",-13.952363967895508],["▁குறைந்த",-13.95236587524414],["466",-13.95237636566162],["に応じて",-13.952396392822266],["▁крайна",-13.952414512634276],["始める",-13.952439308166504],["▁నాని",-13.95244026184082],["▁beszélgetés",-13.952484130859377],["▁üzerindeki",-13.95249366760254],["▁reĝ",-13.952505111694336],["▁custodia",-13.952507972717283],["եւոր",-13.952540397644045],["▁labaad",-13.95254135131836],["▁konačno",-13.952543258666992],["▁dolci",-13.952550888061523],["▁lukema",-13.95255184173584],["▁Kongo",-13.95256805419922],["▁Techno",-13.952582359313965],["▁රාව",-13.952582359313965],["ブリ",-13.952585220336914],["▁Япония",-13.952594757080078],["▁Magda",-13.95261573791504],["通行",-13.952621459960938],["ଚର",-13.952635765075684],["illusion",-13.95265769958496],["▁ունենում",-13.952702522277832],["மாய்",-13.952733993530272],["▁marcador",-13.952752113342283],["lender",-13.952773094177246],["1,6",-13.952787399291992],["▁discutir",-13.952789306640623],["195",-13.952795028686523],["▁విషయాల",-13.952802658081056],["▁Augustus",-13.95280933380127],["働",-13.95280933380127],["▁návrhu",-13.95281219482422],["▁bibig",-13.952816009521484],["क्ल",-13.952817916870115],["▁स्वतन्त्र",-13.952821731567385],["டியா",-13.952826499938965],["▁eldri",-13.952831268310549],["▁joihin",-13.952840805053713],["▁қатысушылар",-13.952874183654783],["▁Tract",-13.95287799835205],["▁melior",-13.952885627746582],["▁ønskede",-13.952898979187012],["당한",-13.952909469604492],["▁sztuki",-13.95291805267334],["लस",-13.952922821044922],["ologiczne",-13.952925682067873],["hören",-13.952930450439451],["▁байка",-13.95297908782959],["▁харчов",-13.952991485595703],["▁замор",-13.952999114990234],["▁તારા",-13.953003883361816],["▁yaptıkları",-13.953024864196776],["▁donation",-13.953031539916992],["▁Capel",-13.953045845031738],["▁încerca",-13.953045845031738],["ർന്നു",-13.953055381774902],["บ่อ",-13.953088760375977],["intervista",-13.953089714050291],["▁لديهم",-13.953102111816406],["▁искал",-13.953109741210938],["方法は",-13.953112602233888],["▁කාර්",-13.953113555908203],["▁hò",-13.953120231628418],["▁македонските",-13.953140258789062],["▁LÄ",-13.95315647125244],["پوش",-13.95315933227539],["▁inaugur",-13.953185081481934],["קינג",-13.953202247619627],["аваная",-13.953213691711426],["เดินทางไป",-13.953213691711426],["▁hodí",-13.953219413757324],["▁literature",-13.953228950500488],["428",-13.953241348266602],["だよ",-13.9532470703125],["▁װי",-13.95327854156494],["▁1865",-13.953279495239258],["טווח",-13.953287124633787],["▁دورا",-13.953288078308104],["sókn",-13.953293800354004],["▁тэнд",-13.95329475402832],["▁කඩු",-13.953317642211914],["▁დასახ",-13.95331859588623],["Phone",-13.953326225280762],["▁атасы",-13.953372955322266],["▁Kosta",-13.953391075134276],["▁τέλει",-13.953399658203123],["351",-13.953420639038086],["▁ettikleri",-13.9534273147583],["▁Batteri",-13.95343780517578],["▁ملازم",-13.953445434570312],["▁skupino",-13.953454971313477],["kasva",-13.953455924987791],["▁funding",-13.953458786010742],["第三者",-13.95347023010254],["▁پابند",-13.953473091125488],["▁pareigūn",-13.953482627868652],["ფრანგ",-13.953486442565918],["▁jokatu",-13.9534912109375],["áčky",-13.953496932983398],["▁Ferrer",-13.953505516052246],["영어",-13.953511238098145],["▁fyrri",-13.95352840423584],["▁నిర్ణయ",-13.953539848327637],["යම්",-13.953548431396484],["▁такве",-13.953548431396484],["549",-13.953557014465332],["ඥ",-13.953558921813965],["▁yakun",-13.953601837158203],["▁моја",-13.953618049621582],["▁gambaran",-13.95363998413086],["VIP",-13.953670501708984],["▁castigat",-13.953680038452148],["זש",-13.953686714172363],["▁Θεού",-13.953709602355955],["▁siopa",-13.953730583190918],["▁Ісус",-13.953730583190918],["ॅट",-13.953743934631348],["▁plomb",-13.953768730163574],["sayt",-13.953776359558104],["ходом",-13.953777313232422],["▁பங்கு",-13.953780174255373],["▁платить",-13.953810691833496],["▁이사",-13.953813552856444],["▁အရွင္",-13.95383644104004],["ாய",-13.95384693145752],["頂いた",-13.953861236572266],["GRAD",-13.953863143920898],["▁അമ്മയുടെ",-13.95387840270996],["▁qytetin",-13.953892707824709],["ბოროტ",-13.953899383544922],["米国",-13.953902244567873],["ລັງ",-13.953916549682615],["▁माय",-13.953930854797363],["▁theatr",-13.953932762145996],["مملكة",-13.953934669494627],["യാത്ര",-13.953937530517578],["屠",-13.95394515991211],["▁مورخ",-13.953951835632324],["▁fremstille",-13.95396614074707],["▁강조",-13.95396614074707],["▁Klang",-13.953977584838867],["▁සිටීම",-13.95398235321045],["novell",-13.953988075256348],["▁ইউনিয়ন",-13.954001426696776],["▁හදන",-13.954012870788574],["杉",-13.954022407531738],["上演",-13.954028129577637],["活跃",-13.954029083251951],["▁12.30",-13.954033851623535],["酵",-13.954039573669434],["▁politischen",-13.954056739807127],["糧",-13.954062461853027],["വേഗ",-13.954071044921877],["更改",-13.95408058166504],["웅",-13.954084396362305],["သွေး",-13.954087257385254],["យន្តហោះ",-13.95408821105957],["ပြည့်",-13.954089164733888],["ცალკე",-13.954089164733888],["លួច",-13.954089164733888],["▁BlackBerry",-13.954089164733888],["▁Naidheachdan",-13.954089164733888],["▁Sludinājumi",-13.954089164733888],["▁THÔNG",-13.954089164733888],["▁Xuất",-13.954089164733888],["▁asseblief",-13.954089164733888],["▁eelkõige",-13.954089164733888],["▁karaktär",-13.954089164733888],["▁lūdzu",-13.954089164733888],["▁lưới",-13.954089164733888],["▁perspektyw",-13.954089164733888],["▁podujatia",-13.954089164733888],["▁tavoitteena",-13.954089164733888],["▁zwyczaj",-13.954089164733888],["▁įstaigos",-13.954089164733888],["▁αναρτήσεις",-13.954089164733888],["▁παίκτες",-13.954089164733888],["▁заједница",-13.954089164733888],["▁историји",-13.954089164733888],["▁патријарх",-13.954089164733888],["▁учитывать",-13.954089164733888],["▁քարտուղար",-13.954089164733888],["▁ئېغىر",-13.954089164733888],["▁التغيير",-13.954089164733888],["▁उपभोक्ता",-13.954089164733888],["▁दुसऱ्या",-13.954089164733888],["▁অপেশাদার",-13.954089164733888],["▁ପୁରୁଷ",-13.954089164733888],["▁ಕಾರ್ಯದರ್ಶಿ",-13.954089164733888],["▁დასახელება",-13.954089164733888],["▁criada",-13.954090118408203],["▁lêkolîn",-13.954090118408203],["▁Şirkət",-13.954090118408203],["▁вдалося",-13.954090118408203],["▁император",-13.954090118408203],["▁ёсьць",-13.954090118408203],["▁ماضی",-13.954090118408203],["▁ନୀତି",-13.954090118408203],["▁컬러",-13.954090118408203],["▁überzeugen",-13.95409107208252],["詳情",-13.95409107208252],["ເຊົ້າ",-13.954092025756836],["▁الإنتاج",-13.954093933105469],["ffyrdd",-13.954094886779783],["▁tényező",-13.954094886779783],["hyggju",-13.954096794128418],["▁hədəf",-13.954096794128418],["▁enthält",-13.954097747802734],["▁ټولنیز",-13.954097747802734],["▁Streaming",-13.95409870147705],["▁déanaí",-13.95409870147705],["▁ქართლ",-13.954099655151367],["▁વિજય",-13.954100608825684],["aamisen",-13.954105377197266],["▁παράλληλα",-13.954108238220217],["▁достатньо",-13.954109191894531],["▁तकनीक",-13.95411205291748],["▁uzņēmums",-13.954113960266112],["▁hierzu",-13.954116821289062],["▁vertrouwen",-13.954118728637695],["▁այլեւս",-13.954118728637695],["▁spēkā",-13.954119682312012],["▁రివ్యూలను",-13.954119682312012],["▁μόνη",-13.95412254333496],["ဒုကၡ",-13.954123497009276],["▁उनसे",-13.954127311706545],["▁mancanza",-13.954132080078123],["▁الروح",-13.954136848449709],["▁објекат",-13.95413875579834],["▁sposa",-13.954139709472656],["▁reflet",-13.954143524169922],["▁habitacions",-13.954145431518556],["Urgell",-13.95414924621582],["▁നേതാവ്",-13.95414924621582],["▁malých",-13.954154014587402],["▁uşağı",-13.954154014587402],["▁draudimo",-13.954157829284668],["▁마시",-13.954161643981934],["▁կարողանում",-13.9541654586792],["▁automaticamente",-13.954168319702148],["▁Ausnahme",-13.954169273376465],["▁Mnoh",-13.954176902770996],["▁conocida",-13.954180717468262],["полковник",-13.954181671142578],["▁hračky",-13.954190254211426],["▁prasības",-13.954194068908691],["▁опреме",-13.954203605651855],["bevis",-13.954211235046388],["▁שמא",-13.954222679138184],["২১",-13.954235076904297],["▁ଅତି",-13.95424461364746],["ध्",-13.954246520996094],["▁Salomon",-13.95424747467041],["масов",-13.954252243041992],["▁සියලුම",-13.954280853271484],["ோட",-13.9542818069458],["Run",-13.954288482666016],["▁hladno",-13.954301834106444],["▁баштап",-13.954327583312988],["1,4",-13.954331398010254],["▁dnů",-13.95433235168457],["▁інший",-13.95433235168457],["▁söylə",-13.954333305358888],["▁hidupnya",-13.954339981079102],["чука",-13.954341888427734],["▁Լու",-13.954355239868164],["繞",-13.954373359680176],["▁Επιτροπής",-13.954383850097656],["说着",-13.954390525817873],["▁Platon",-13.954395294189451],["modellen",-13.954416275024414],["▁Hivi",-13.954428672790527],["▁Robo",-13.95443344116211],["バレ",-13.954437255859377],["ສະຫະ",-13.95444107055664],["だろうか",-13.954461097717283],["sisällö",-13.954476356506348],["იხ",-13.95448875427246],["ചെയ്യ",-13.954522132873535],["ုး",-13.954523086547852],["▁ставок",-13.954524040222168],["iếng",-13.954533576965332],["િયર",-13.954547882080078],["▁liggaam",-13.954550743103027],["δικές",-13.954554557800291],["▁Sûrî",-13.954556465148926],["▁प्राय",-13.954560279846191],["▁생성",-13.954566955566406],["ziaren",-13.954574584960938],["רגיש",-13.954574584960938],["▁leiðin",-13.954574584960938],["ijekom",-13.954575538635254],["▁hantar",-13.95458698272705],["▁сапар",-13.954591751098633],["▁Bobby",-13.954597473144531],["ראית",-13.954598426818848],["▁יומי",-13.95460319519043],["▁योजने",-13.954620361328123],["▁בימים",-13.954657554626465],["▁Америки",-13.954684257507324],["שוי",-13.954744338989258],["▁erfüllen",-13.95475959777832],["▁ticari",-13.95476245880127],["▁Experi",-13.954763412475586],["▁tiesų",-13.954763412475586],["▁aiheuttaa",-13.954766273498535],["▁sunat",-13.954774856567385],["▁pasaport",-13.954779624938965],["▁زیان",-13.954792022705078],["▁получат",-13.954804420471191],["▁представа",-13.95480728149414],["▁избрани",-13.954808235168455],["▁pratar",-13.954832077026367],["▁идентич",-13.954833030700684],["▁gedung",-13.95483684539795],["はあまり",-13.954839706420898],["▁මරා",-13.954841613769531],["ୁଛନ୍ତି",-13.954853057861328],["▁lintas",-13.954859733581545],["▁procedūra",-13.954859733581545],["захисн",-13.954872131347656],["שימוש",-13.954900741577148],["▁pētī",-13.95492935180664],["רכים",-13.954952239990234],["تاح",-13.954952239990234],["▁eskatzen",-13.954954147338867],["▁بخاری",-13.954992294311523],["▁récent",-13.954997062683104],["plein",-13.955001831054688],["▁kallade",-13.955016136169434],["▁تعز",-13.955016136169434],["▁appelé",-13.95504665374756],["▁ffilm",-13.955055236816406],["האלטן",-13.95506477355957],["▁պատահ",-13.955085754394531],["▁больших",-13.955093383789062],["▁прозору",-13.95509433746338],["▁үзүүл",-13.95510196685791],["կամ",-13.95510482788086],["שתה",-13.95511245727539],["▁곳이",-13.955123901367188],["556",-13.955160140991213],["▁Cliente",-13.95517635345459],["▁regat",-13.955178260803224],["РВ",-13.95518970489502],["▁yazır",-13.955194473266602],["စနစ်",-13.955227851867676],["▁Муз",-13.955240249633787],["നട",-13.95525074005127],["pharma",-13.955265998840332],["ЫП",-13.955293655395508],["▁ఏక",-13.955328941345217],["▁oceny",-13.955334663391112],["ശേഷി",-13.95534324645996],["▁bersiv",-13.95534324645996],["▁choklad",-13.955364227294922],["AMIN",-13.955378532409668],["▁قدرتمند",-13.955382347106934],["ଲ୍ଲ",-13.955395698547363],["▁obdobje",-13.955398559570312],["▁Telah",-13.955411911010742],["ितः",-13.955418586730955],["▁prezentuje",-13.955426216125488],["▁zdravé",-13.955504417419434],["ගන්නා",-13.955510139465332],["▁لكنها",-13.955510139465332],["сид",-13.955521583557127],["ရပါတယ္။",-13.95554256439209],["▁asistentes",-13.955561637878418],["▁ապահովել",-13.955562591552734],["1958",-13.955564498901367],["▁ביים",-13.955573081970217],["Астана",-13.955598831176758],["▁квот",-13.955602645874023],["▁iraun",-13.955613136291504],["あっ",-13.955618858337402],["▁horumar",-13.955629348754885],["▁amable",-13.95565414428711],["armée",-13.955660820007324],["▁думала",-13.95567512512207],["joukkue",-13.955687522888184],["▁opdateret",-13.955696105957031],["方針",-13.95569896697998],["▁хүрэх",-13.955702781677246],["дахгүй",-13.955721855163574],["▁winnen",-13.955730438232422],["▁pitch",-13.955753326416016],["बिया",-13.955756187438965],["▁наруши",-13.95577335357666],["kedő",-13.955777168273926],["▁настроен",-13.95578670501709],["hlou",-13.955787658691406],["günstig",-13.955793380737305],["ехаў",-13.955793380737305],["▁Meeting",-13.955798149108888],["▁pozitivno",-13.955818176269531],["未知",-13.955841064453123],["▁industriel",-13.955848693847656],["տոմ",-13.955855369567873],["ರಾಷ್ಟ್ರ",-13.955860137939451],["▁ajouter",-13.955877304077148],["▁روسي",-13.95588207244873],["負け",-13.955950736999512],["▁camisa",-13.955958366394045],["५४",-13.95596694946289],["おすすめです",-13.955967903137209],["▁iqtisadiyyatın",-13.955981254577637],["▁තැනක",-13.955988883972168],["▁reĝo",-13.95600414276123],["▁නොවී",-13.956007957458496],["写作",-13.956027030944824],["ឆា",-13.956053733825684],["▁նավ",-13.956066131591797],["actualitat",-13.956067085266112],["Toutes",-13.956080436706545],["▁પેટ",-13.956087112426758],["▁löyty",-13.956121444702148],["mbres",-13.956124305725098],["มวย",-13.956130027770996],["ຟ້າ",-13.956130027770996],["▁решен",-13.956137657165527],["▁немају",-13.956143379211426],["స్టు",-13.95615005493164],["▁əsrin",-13.956184387207031],["ozbiljn",-13.95618724822998],["ที่เกิดขึ้น",-13.95619010925293],["ဝတ္",-13.956196784973145],["▁అల్ల",-13.956228256225586],["ுள்ளன",-13.95624828338623],["▁forsvare",-13.956268310546877],["▁կայքի",-13.956275939941406],["3-3",-13.956284523010254],["▁असता",-13.95630168914795],["▁Poll",-13.95631504058838],["▁tavolo",-13.95633029937744],["▁installé",-13.956357955932615],["▁оборудване",-13.95635986328125],["න්ද්",-13.9563627243042],["گرا",-13.956372261047363],["▁minimalist",-13.956405639648438],["這邊",-13.956460952758787],["न्थ",-13.956473350524902],["рита",-13.9564790725708],["гүүд",-13.956480026245115],["ντερ",-13.956482887268066],["ESTO",-13.956483840942385],["▁mengirim",-13.956507682800291],["▁কবি",-13.95650863647461],["▁стоји",-13.95652961730957],["QI",-13.956542015075684],["щастлив",-13.956579208374023],["hetsz",-13.956584930419922],["▁değişik",-13.956585884094238],["DUS",-13.95661735534668],["YAL",-13.956620216369627],["خورد",-13.956649780273438],["▁ගැල",-13.95665168762207],["▁Bestell",-13.956659317016602],["▁generál",-13.956661224365234],["lijep",-13.956694602966309],["ടിച്ച്",-13.956695556640623],["ของโรคสะเก็ดเงิน",-13.956697463989258],["▁plovi",-13.95670223236084],["▁Resolu",-13.95672607421875],["▁состава",-13.95672607421875],["▁Τσίπρα",-13.95673942565918],["asafn",-13.95674991607666],["കളുമായി",-13.95676612854004],["ದ್ದಾಗ",-13.956815719604492],["ביטחון",-13.95682144165039],["▁várt",-13.956853866577148],["▁paytaxtı",-13.95685863494873],["▁Haug",-13.95687198638916],["ๆๆๆ",-13.956875801086426],["दिप",-13.956878662109377],["дырып",-13.956899642944336],["▁ഉയര",-13.956931114196776],["Jika",-13.95694351196289],["vidék",-13.956951141357422],["679",-13.956954956054688],["▁δυνατό",-13.95697021484375],["ಹೇ",-13.956971168518066],["ásában",-13.956974983215332],["秉承",-13.956982612609863],["▁aralarında",-13.95700454711914],["မယ့်",-13.957016944885254],["ดีไซน์",-13.95702075958252],["สัมภาษณ์",-13.95702075958252],["เงียบ",-13.95702075958252],["Միացյալ",-13.957021713256836],["เคลื่อนไหว",-13.957021713256836],["▁Európe",-13.957021713256836],["▁Szerintem",-13.957021713256836],["▁Tilføj",-13.957021713256836],["▁VIỆT",-13.957021713256836],["▁ettepaneku",-13.957021713256836],["▁gældende",-13.957021713256836],["▁hỏng",-13.957021713256836],["▁keberadaan",-13.957021713256836],["▁litzateke",-13.957021713256836],["▁nevojshme",-13.957021713256836],["▁rəqəm",-13.957021713256836],["▁sijaitseva",-13.957021713256836],["▁sjæl",-13.957021713256836],["▁stránkách",-13.957021713256836],["▁vožnje",-13.957021713256836],["▁μυαλό",-13.957021713256836],["▁Биринчи",-13.957021713256836],["▁заканчива",-13.957021713256836],["▁мүдде",-13.957021713256836],["▁երրորդ",-13.957021713256836],["▁توغرىسىدا",-13.957021713256836],["▁معاہدے",-13.957021713256836],["▁ھەمكارلىق",-13.957021713256836],["▁संस्करण",-13.957021713256836],["▁সাহায্য",-13.957021713256836],["▁হাসান",-13.957021713256836],["▁ਕਿਤਾਬ",-13.957021713256836],["▁ଦୃଷ୍ଟି",-13.957021713256836],["▁చికిత్స",-13.957021713256836],["▁느껴",-13.957021713256836],["፩",-13.957022666931152],["▁Karadeniz",-13.957022666931152],["▁wsparcie",-13.957022666931152],["▁Извештај",-13.957022666931152],["▁Գործ",-13.957022666931152],["▁სტანდარტ",-13.957022666931152],["Selengkapnya",-13.957023620605469],["ពេញនិយម",-13.957023620605469],["អាកាស",-13.957023620605469],["▁אָנליין",-13.957023620605469],["▁ਆਜ਼ਾਦ",-13.957023620605469],["▁Blockchain",-13.957024574279783],["▁Najlepsz",-13.957024574279783],["▁ontbijt",-13.957024574279783],["▁шығып",-13.957024574279783],["गीत",-13.957025527954102],["▁muodosta",-13.957025527954102],["▁oppfordre",-13.957025527954102],["▁ಸ್ಪಷ್ಟ",-13.957025527954102],["▁ઇચ્છ",-13.957029342651367],["▁Israël",-13.95703125],["▁începând",-13.957032203674316],["▁típico",-13.957033157348633],["▁တကယ္",-13.95703411102295],["습니까",-13.95703411102295],["▁läinud",-13.957036018371582],["Name",-13.957036972045898],["▁भक्तपुर",-13.957036972045898],["▁Sophia",-13.957037925720217],["yydestä",-13.957039833068848],["▁szemlél",-13.957039833068848],["▁كەتكەن",-13.957040786743164],["▁bedankt",-13.957042694091797],["▁Yritys",-13.957046508789062],["▁සහතික",-13.95705223083496],["yaṃ",-13.957053184509276],["▁Airways",-13.95705509185791],["宣言",-13.957056045532228],["ტესტ",-13.957056999206545],["▁විචාර",-13.957056999206545],["▁Статут",-13.957058906555176],["▁सन्देश",-13.957058906555176],["▁Imperial",-13.957059860229492],["▁acquire",-13.957061767578123],["เปิดเผย",-13.95706272125244],["▁مساحة",-13.957063674926758],["▁русский",-13.957064628601074],["▁රීපාල",-13.95706558227539],["文艺",-13.95706558227539],["▁찾을",-13.957070350646973],["▁защити",-13.957072257995604],["▁повыша",-13.957080841064451],["▁garçon",-13.957082748413086],["ธ์",-13.95708465576172],["▁hænder",-13.957085609436035],["▁Artıq",-13.957086563110352],["▁የዋ",-13.957087516784668],["ΑΘ",-13.957088470458984],["▁terraza",-13.9570894241333],["ヘル",-13.95709228515625],["▁زهرا",-13.957101821899414],["გარე",-13.957104682922363],["▁약관",-13.957104682922363],["ക്കുമ്പോള്",-13.957109451293944],["▁प्रहार",-13.95711612701416],["▁اسحاق",-13.957143783569336],["▁ymmärtää",-13.957154273986816],["▁провідн",-13.95715618133545],["協会",-13.95716381072998],["看出",-13.95716953277588],["▁vorgestellt",-13.957181930541992],["שיווק",-13.95718479156494],["▁письмов",-13.957191467285156],["▁отваря",-13.95720100402832],["vitamiini",-13.95720672607422],["▁jauns",-13.957212448120115],["▁hovedstad",-13.957213401794434],["▁каждым",-13.957213401794434],["خیل",-13.957216262817385],["▁القائمة",-13.957222938537598],["▁המוצר",-13.957230567932127],["▁ژوبله",-13.957232475280762],["накрая",-13.957234382629396],["แม้ว่า",-13.957237243652344],["554",-13.957249641418455],["▁elettro",-13.957252502441406],["▁hunder",-13.957266807556152],["ækkun",-13.957268714904783],["▁promuovere",-13.957283973693848],["▁şirkətinin",-13.957283973693848],["▁nespēj",-13.957286834716797],["правил",-13.957297325134276],["其中包括",-13.957297325134276],["▁paytda",-13.95729923248291],["чилсан",-13.957313537597656],["▁blender",-13.957321166992188],["▁ਭੀ",-13.957321166992188],["▁openbare",-13.957353591918944],["▁destroy",-13.957357406616213],["สาระ",-13.957359313964844],["▁ഇല്ലാത്ത",-13.957361221313477],["kohan",-13.957382202148438],["▁hasar",-13.95739459991455],["▁कागज",-13.95740795135498],["▁погреб",-13.957408905029297],["也好",-13.95741367340088],["▁himla",-13.95742130279541],["▁räägib",-13.957427978515623],["▁ورځو",-13.957433700561523],["▁Pobla",-13.957448959350586],["▁Muzeum",-13.957451820373535],["године",-13.957505226135254],["▁도와",-13.95751667022705],["▁మొదలు",-13.957518577575684],["▁saknar",-13.957554817199709],["ဝန်",-13.957555770874023],["▁Bühne",-13.957557678222656],["▁düşündü",-13.95756721496582],["▁локалне",-13.957568168640137],["SKRI",-13.95757007598877],["を行い",-13.957573890686035],["の影響",-13.957609176635742],["▁ایسنا",-13.957612991333008],["639",-13.95761489868164],["▁Morning",-13.957630157470703],["लग",-13.957639694213867],["799",-13.95766258239746],["மய",-13.95766258239746],["ጎዳ",-13.95766258239746],["やっている",-13.95766544342041],["యల్",-13.957669258117676],["▁ligado",-13.95767307281494],["लिङ्ग",-13.957684516906738],["▁외국인",-13.957687377929688],["▁موڪل",-13.957703590393066],["▁Fisch",-13.957723617553713],["ለሙ",-13.95772647857666],["▁Артем",-13.95772647857666],["ഭക്ത",-13.957743644714355],["▁potius",-13.957756996154783],["▁gwir",-13.957775115966797],["▁dodatnih",-13.957805633544922],["Krist",-13.957810401916504],["▁alkalmazott",-13.957810401916504],["▁Tým",-13.957815170288086],["▁Circle",-13.957833290100098],["ပါပဲ",-13.957834243774414],["▁bogati",-13.957850456237791],["▁എഴുതുക",-13.95786952972412],["▁qarış",-13.95787239074707],["808",-13.957878112792969],["▁فلز",-13.957921981811523],["快速发展",-13.957924842834473],["ിച്ചിരുന്നു",-13.957925796508787],["▁Кала",-13.957925796508787],["дора",-13.957928657531738],["▁دنده",-13.957942008972168],["ଣ୍ଟି",-13.957953453063965],["▁سیب",-13.957959175109863],["高层",-13.957975387573242],["hûs",-13.957978248596191],["▁complemento",-13.957978248596191],["▁בבר",-13.957985877990724],["可以说是",-13.95798683166504],["の違い",-13.957996368408203],["▁cetak",-13.95802116394043],["tīrī",-13.958032608032228],["▁militer",-13.958064079284668],["▁있으니",-13.95807647705078],["۴۵",-13.95808219909668],["▁доприне",-13.95810890197754],["▁Magas",-13.95812702178955],["▁mladí",-13.958130836486816],["▁ඉඳන්",-13.958158493041992],["▁Nizami",-13.958166122436523],["▁thách",-13.958169937133787],["שיטה",-13.958209037780762],["▁anvender",-13.958221435546877],["▁Bernardo",-13.958245277404783],["▁Araç",-13.958247184753418],["▁Исаков",-13.958251953125],["所属",-13.958271980285645],["ένο",-13.95827293395996],["▁certificate",-13.958291053771973],["▁verecek",-13.958300590515137],["schade",-13.95830249786377],["▁seuraavi",-13.958303451538086],["▁लिस्ट",-13.958308219909668],["▁sebesség",-13.958309173583984],["▁paraprak",-13.958313941955566],["▁moartea",-13.958329200744627],["▁Сочи",-13.958344459533691],["▁Pita",-13.95835018157959],["ρούν",-13.958379745483398],["▁Готов",-13.958413124084473],["▁කිරිම",-13.958425521850586],["คอล",-13.958447456359863],["ተኝነት",-13.958455085754396],["писва",-13.958470344543455],["▁крие",-13.958470344543455],["▁сверх",-13.958484649658203],["▁vicepresident",-13.958531379699709],["▁Beraz",-13.958537101745604],["▁ഇര",-13.958560943603516],["▁regiune",-13.958565711975098],["▁Граф",-13.958568572998049],["国产",-13.958568572998049],["▁عسکری",-13.958578109741213],["배송",-13.95858383178711],["▁EOS",-13.95858669281006],["▁consumat",-13.958587646484377],["▁shëno",-13.958596229553224],["▁հոր",-13.958633422851562],["▁NTB",-13.958640098571776],["▁моим",-13.958645820617676],["▁ಹುಡುಗ",-13.95865535736084],["ไปได้",-13.958662033081056],["▁اې",-13.958678245544434],["▁reizen",-13.958684921264648],["▁없었",-13.958699226379396],["▁obrigada",-13.958701133728027],["varp",-13.958712577819824],["▁interpretación",-13.958714485168455],["▁наравно",-13.958746910095217],["▁fushën",-13.958775520324709],["▁बातम्यांच",-13.9587984085083],["current",-13.958810806274414],["netto",-13.95882511138916],["Kap",-13.958827018737791],["▁nhá",-13.958850860595703],["▁Atlanta",-13.958874702453612],["ėlis",-13.958890914916992],["▁నవ",-13.958895683288574],["WG",-13.958901405334473],["▁yüklə",-13.958945274353027],["▁vzhled",-13.95895290374756],["kundu",-13.958954811096191],["▁моментов",-13.958965301513672],["▁tirada",-13.958982467651367],["▁בילד",-13.958988189697266],["이니",-13.959012031555176],["ЕЊЕ",-13.959014892578123],["▁1:2",-13.959019660949709],["ነቶች",-13.959028244018556],["適當",-13.959052085876465],["ilemaan",-13.959078788757324],["▁сохранить",-13.959086418151855],["казати",-13.959087371826172],["ుతారు",-13.959115028381348],["▁enginn",-13.959134101867676],["▁სოციალურ",-13.959150314331056],["hariri",-13.959160804748535],["▁piş",-13.959187507629396],["sensor",-13.959200859069824],["1941",-13.95920753479004],["imams",-13.959213256835938],["anjur",-13.959222793579102],["▁sving",-13.959235191345217],["▁sauber",-13.959236145019531],["▁ରହିବା",-13.959253311157228],["▁accomplish",-13.959266662597656],["▁управување",-13.959268569946287],["這兩",-13.959273338317873],["▁שמואל",-13.95927906036377],["▁kenmerk",-13.959284782409668],["ĆE",-13.959296226501465],["▁козго",-13.959306716918944],["▁посту",-13.959314346313477],["▁కారణంగా",-13.959325790405272],["ANNA",-13.959352493286133],["▁खस",-13.959386825561523],["克拉",-13.959395408630373],["تىڭ",-13.959426879882812],["Արարատ",-13.959431648254396],["nięty",-13.959463119506836],["هرب",-13.95946979522705],["cidit",-13.9595308303833],["ΟΥΣ",-13.959540367126465],["▁podran",-13.959542274475098],["▁අනතුරුව",-13.959550857543944],["надцат",-13.959567070007324],["▁εμπορ",-13.959593772888184],["▁stykker",-13.95960807800293],["▁esperado",-13.959628105163574],["ంత్",-13.959630966186523],["▁тежи",-13.959636688232422],["▁Suosit",-13.959646224975586],["▁operare",-13.959678649902344],["▁cuento",-13.95967960357666],["sababisha",-13.959694862365724],["▁alumno",-13.959707260131836],["Yang",-13.959733963012695],["▁جزا",-13.95974349975586],["támadás",-13.959760665893556],["▁HOTEL",-13.959760665893556],["인터",-13.95983600616455],["▁амьдралын",-13.959846496582031],["8.4",-13.959860801696776],["▁ففي",-13.959866523742676],["ుకోవచ్చు",-13.959871292114258],["抄",-13.959872245788574],["畜",-13.959875106811523],["▁subordin",-13.959901809692385],["哼",-13.959919929504396],["安慰",-13.959927558898926],["contact",-13.959928512573242],["▁мерапрыемства",-13.959928512573242],["avons",-13.95992946624756],["节奏",-13.95993423461914],["▁vaso",-13.959936141967772],["クリーム",-13.959946632385254],["丁寧に",-13.959954261779783],["mpiä",-13.959956169128418],["ອາເມລິກາ",-13.959961891174316],["fhéidir",-13.959962844848633],["ਗਿਆਨਕੋਸ਼",-13.959962844848633],["ኢንዱስትሪ",-13.959962844848633],["ፌዴራል",-13.959962844848633],["፲",-13.959962844848633],["▁Assalamualaikum",-13.959962844848633],["▁kebangsaan",-13.959962844848633],["▁kupunguza",-13.959962844848633],["▁zucchero",-13.959962844848633],["▁Διοικητ",-13.959962844848633],["▁Πανεπιστημίου",-13.959962844848633],["▁έρθει",-13.959962844848633],["▁πρωθυπουργός",-13.959962844848633],["▁Епископ",-13.959962844848633],["▁сяброў",-13.959962844848633],["▁томоохон",-13.959962844848633],["▁הבריאות",-13.959962844848633],["▁چيئرمين",-13.959962844848633],["▁घरेलु",-13.959962844848633],["▁नृत्य",-13.959962844848633],["▁संपादक",-13.959962844848633],["▁সংগ্রহ",-13.959962844848633],["▁അധ്യാപക",-13.959962844848633],["▁ဝီကီပီးဒီးယား",-13.959962844848633],["▁წლამდე",-13.959962844848633],["▁ተከትሎ",-13.959962844848633],["▁priporoča",-13.95996379852295],["▁saudável",-13.95996379852295],["▁допомогти",-13.95996379852295],["▁પ્રવેશ",-13.95996379852295],["▁அலுவலக",-13.95996379852295],["▁ക്രിക്കറ്റ്",-13.95996379852295],["ម៉ែត្រ",-13.959964752197266],["▁Björk",-13.959964752197266],["▁Emprego",-13.959964752197266],["▁beskjed",-13.959964752197266],["▁krāsu",-13.959964752197266],["▁võttis",-13.959964752197266],["▁тооцоо",-13.959964752197266],["▁ਰਿਪੋਰਟ",-13.959964752197266],["▁შტატ",-13.959964752197266],["อินเดีย",-13.959966659545898],["▁smješten",-13.959966659545898],["選挙",-13.959966659545898],["▁hljóð",-13.959967613220217],["▁όνειρ",-13.959967613220217],["▁ફક્ત",-13.959967613220217],["▁eļļa",-13.959968566894531],["▁prozesua",-13.959968566894531],["▁пронајде",-13.959968566894531],["▁añadir",-13.959969520568848],["▁kirjelda",-13.959969520568848],["▁lừa",-13.959969520568848],["▁બાજુ",-13.959969520568848],["▁Century",-13.959970474243164],["▁московск",-13.959970474243164],["▁хэрэгжилт",-13.959970474243164],["投注",-13.959970474243164],["นิยาย",-13.95997142791748],["▁Arbeitnehmer",-13.959972381591797],["▁Kérjük",-13.959975242614746],["פעילויות",-13.959976196289062],["▁ಆರೋಪಿ",-13.959976196289062],["▁agnos",-13.95997714996338],["▁Νέο",-13.95997714996338],["▁Constitution",-13.959978103637695],["▁үүнийг",-13.959978103637695],["▁синдикат",-13.959980010986328],["▁ਮੇਰਾ",-13.959980010986328],["哪裡買",-13.959980010986328],["إزالة",-13.959980964660645],["▁ווייס",-13.959980964660645],["▁vysvetl",-13.959982872009276],["▁कोणत्याही",-13.959982872009276],["▁있었습니다",-13.959988594055176],["▁Vidal",-13.959990501403809],["▁asukoha",-13.959993362426758],["▁sourire",-13.959993362426758],["▁ગઇ",-13.959996223449709],["▁حدوث",-13.95999813079834],["▁नसक्ने",-13.960003852844238],["▁گیاه",-13.960004806518556],["▁виконавчої",-13.960015296936035],["▁identiteit",-13.960016250610352],["▁Македонската",-13.960018157958984],["▁ಮಾಡಬಹುದು",-13.960027694702148],["පුරු",-13.960030555725098],["▁ಐದು",-13.960030555725098],["gewenste",-13.96003246307373],["▁wartawan",-13.96003532409668],["▁دیکھتے",-13.960036277770996],["▁deplasa",-13.960038185119627],["▁(1997)",-13.96004581451416],["▁ጤና",-13.96005153656006],["▁අමුතු",-13.96005916595459],["事實上",-13.96006679534912],["▁Մարտի",-13.960074424743652],["▁વિચારો",-13.960075378417969],["▁የለውም",-13.960079193115234],["▁بیانیه",-13.96009922027588],["▁Muž",-13.960105895996094],["▁Schuld",-13.960105895996094],["ອາຊີ",-13.960108757019045],["سعة",-13.960110664367676],["แช",-13.960116386413574],["▁deməkdir",-13.96012020111084],["▁Minyak",-13.960121154785156],["當初",-13.960121154785156],["presedinte",-13.960124969482422],["juntamente",-13.9601469039917],["▁நடக்க",-13.9601469039917],["▁imparare",-13.960149765014648],["▁судья",-13.960152626037598],["▁Mogu",-13.96015739440918],["▁кожнага",-13.960182189941406],["ჭო",-13.960185050964355],["▁перших",-13.96018886566162],["▁Onsdag",-13.960190773010254],["▁Îl",-13.960200309753418],["▁hả",-13.960201263427734],["▁puguin",-13.96020221710205],["▁शकतात",-13.960205078125],["▁Погода",-13.96021556854248],["▁בערך",-13.960216522216797],["მშობლ",-13.96022129058838],["farë",-13.960222244262695],["▁paesaggi",-13.960232734680176],["ଜିତ",-13.960265159606934],["ယုံ",-13.960277557373049],["口座",-13.96029281616211],["▁výše",-13.960294723510742],["▁assessment",-13.960296630859377],["აღიარებ",-13.960308074951172],["щият",-13.960309982299805],["▁Sachsen",-13.960312843322754],["märkt",-13.960321426391602],["ाभि",-13.960321426391602],["▁янз",-13.96032428741455],["▁boqol",-13.960326194763184],["已是",-13.960334777832031],["▁argentino",-13.960350036621094],["низу",-13.960362434387209],["▁Acestea",-13.960371017456056],["ยนต์",-13.960375785827637],["सौ",-13.960397720336914],["▁элементов",-13.960416793823242],["ברה",-13.960421562194824],["▁pasirinkti",-13.960421562194824],["▁rajone",-13.960429191589355],["ដក",-13.960439682006836],["▁Индия",-13.960441589355469],["進め",-13.96044921875],["เมตร",-13.960450172424316],["DRI",-13.960453987121582],["secret",-13.96047306060791],["▁herself",-13.960474014282228],["1974",-13.960474967956545],["Универ",-13.96047592163086],["今年の",-13.960482597351074],["為止",-13.960505485534668],["▁salīdzināt",-13.960538864135742],["▁TEKN",-13.960541725158691],["▁pampa",-13.960545539855955],["инга",-13.960546493530272],["就算是",-13.960552215576172],["▁istiyoruz",-13.96055507659912],["▁જમીન",-13.960558891296388],["▁serbë",-13.960559844970703],["bragð",-13.960603713989258],["管理局",-13.960633277893066],["ineadh",-13.960647583007812],["υπο",-13.96066188812256],["حاف",-13.960668563842772],["ፍን",-13.960671424865724],["▁tevens",-13.960710525512695],["▁நடை",-13.960710525512695],["▁Такие",-13.960715293884276],["izator",-13.960721015930176],["ერთმა",-13.96073055267334],["▁memaksa",-13.9607515335083],["▁комфортно",-13.960758209228516],["nière",-13.96076202392578],["കൃ",-13.96076202392578],["特點",-13.960776329040527],["▁NFC",-13.960779190063477],["▁خالص",-13.960786819458008],["▁хуур",-13.960798263549805],["▁Τά",-13.960816383361816],["▁corpul",-13.960819244384766],["▁Europako",-13.960855484008787],["▁jornalista",-13.960869789123535],["ැල්ල",-13.960885047912598],["▁bahu",-13.960885047912598],["kaitė",-13.960904121398926],["ICT",-13.960909843444824],["▁خورده",-13.960914611816406],["▁приеме",-13.960929870605469],["汇报",-13.960929870605469],["▁Croc",-13.960930824279783],["ի՞",-13.960951805114746],["محاضر",-13.96097469329834],["▁excita",-13.960977554321287],["മായിരുന്ന",-13.960981369018556],["▁நல்லா",-13.960983276367188],["dēt",-13.960992813110352],["▁внатрешни",-13.961019515991213],["όπουλου",-13.961029052734377],["新一代",-13.961030006408691],["håndtering",-13.961042404174805],["ătură",-13.961082458496094],["▁срещата",-13.961087226867676],["▁Poland",-13.961088180541992],["▁katram",-13.961092948913574],["▁објављени",-13.96109390258789],["ण्डा",-13.961094856262209],["værker",-13.961113929748535],["▁قاعد",-13.961187362670898],["гласно",-13.961190223693848],["flanke",-13.961199760437012],["▁Henk",-13.961215019226074],["써",-13.961223602294922],["▁பிள",-13.961241722106934],["▁xukuumad",-13.961271286010742],["▁әкім",-13.96127223968506],["ତ୍ୟ",-13.96128749847412],["▁тэма",-13.96129035949707],["صوت",-13.961308479309082],["ໄອ",-13.96132755279541],["ელში",-13.961346626281738],["ЧЫ",-13.961380004882812],["DAV",-13.961395263671877],["צדיק",-13.961447715759276],["▁نیل",-13.961469650268556],["▁Σου",-13.961491584777832],["ไปดู",-13.961507797241213],["▁Saiba",-13.961507797241213],["Бай",-13.961515426635742],["யில்லை",-13.961583137512209],["▁مربوطه",-13.961586952209473],["▁ditahan",-13.961605072021484],["▁keresztény",-13.961610794067385],["með",-13.961621284484863],["насьць",-13.961626052856444],["ဧ",-13.961630821228027],["▁colegio",-13.961636543273926],["ांपासून",-13.961640357971191],["▁čiste",-13.961652755737305],["▁Ryb",-13.961660385131836],["管制",-13.961705207824709],["▁مناسبی",-13.961712837219238],["▁ಪ್ಲ",-13.961750984191896],["▁ولاية",-13.961777687072754],["▁ممن",-13.961804389953612],["▁presidencial",-13.961806297302246],["▁බැංකු",-13.961852073669434],["▁2557",-13.96185302734375],["▁illustration",-13.961862564086914],["▁کفش",-13.961867332458496],["▁מבלי",-13.961894989013672],["▁Olay",-13.96190357208252],["DAK",-13.961919784545898],["বাদী",-13.961922645568848],["▁Serok",-13.961932182312012],["▁Rambut",-13.961935997009276],["వడం",-13.961936950683594],["جواب",-13.96194839477539],["▁sitesinde",-13.96195125579834],["▁гонар",-13.961956977844238],["▁צפוי",-13.961956977844238],["egység",-13.961957931518556],["లియన్",-13.961962699890137],["ყველა",-13.96196460723877],["tsük",-13.961966514587402],["▁zdravie",-13.962003707885742],["▁vecchio",-13.96201229095459],["▁Memoria",-13.962014198303224],["シェ",-13.962066650390623],["овують",-13.962074279785156],["పోయారు",-13.962074279785156],["▁যেন",-13.96208381652832],["คืออะไร",-13.962085723876951],["▁Вір",-13.962118148803713],["တို့၏",-13.962197303771973],["▁такво",-13.962199211120604],["▁podaci",-13.962203025817873],["▁Περ",-13.962206840515137],["మార్",-13.962227821350098],["▁കാന്",-13.962230682373049],["▁अद",-13.962236404418944],["клуб",-13.962241172790527],["▁beelden",-13.962247848510742],["▁തിരക്ക",-13.962247848510742],["▁prelua",-13.962250709533691],["主力",-13.962257385253906],["ბოს",-13.96226978302002],["مدی",-13.962271690368652],["이기도",-13.962276458740234],["▁Pause",-13.962282180786133],["σιν",-13.962288856506348],["▁Kust",-13.96229362487793],["▁Миле",-13.96229648590088],["多彩",-13.962306022644045],["▁fəaliyyətinin",-13.962312698364258],["เป็นตัว",-13.962334632873535],["fidh",-13.962343215942385],["ପ୍ତ",-13.962377548217772],["▁συνέ",-13.962385177612305],["▁seuraavan",-13.962387084960938],["▁примет",-13.962416648864746],["▁dheeraad",-13.962425231933594],["qituvchi",-13.96242618560791],["▁vairāku",-13.96243953704834],["▁ceapa",-13.962451934814451],["▁надпис",-13.962479591369627],["次に",-13.962486267089844],["お楽しみ",-13.96248722076416],["▁Abhi",-13.962493896484377],["หรู",-13.962494850158691],["благополуч",-13.962542533874512],["▁tekintet",-13.962543487548828],["محافظات",-13.962569236755373],["tekening",-13.962570190429688],["ในตัว",-13.962589263916016],["OBI",-13.962611198425291],["pecah",-13.962645530700684],["რედ",-13.962651252746582],["गोल",-13.96267318725586],["▁салат",-13.96269989013672],["▁Hmmm",-13.96271514892578],["μετά",-13.962722778320312],["▁կենտրոնում",-13.962725639343262],["▁tərk",-13.962735176086426],["기능",-13.962753295898438],["▁vanilj",-13.962766647338867],["認同",-13.96277141571045],["ຕຸ",-13.962780952453612],["सेना",-13.962782859802246],["oitukset",-13.962785720825195],["▁bırakma",-13.962794303894045],["क्षि",-13.962824821472168],["舆论",-13.962825775146484],["▁ĐH",-13.962836265563965],["形态",-13.962855339050291],["拾",-13.962855339050291],["▁υπουργείο",-13.96286678314209],["芯片",-13.962881088256836],["競技",-13.962882995605469],["糾",-13.962890625],["膩",-13.962890625],["適切な",-13.962890625],["余裕",-13.962909698486328],["ТП",-13.962910652160645],["ໂຄສະນາ",-13.962912559509276],["ကြောင့်",-13.962912559509276],["▁Industry",-13.962912559509276],["▁Pflanzen",-13.962912559509276],["▁gruodžio",-13.962912559509276],["▁llavors",-13.962912559509276],["▁minūtes",-13.962912559509276],["▁paieškos",-13.962912559509276],["▁Всього",-13.962912559509276],["▁Россию",-13.962912559509276],["▁бичиж",-13.962912559509276],["▁международных",-13.962912559509276],["▁слънце",-13.962912559509276],["▁تګلاره",-13.962912559509276],["▁تۇنجى",-13.962912559509276],["▁مؤخرا",-13.962912559509276],["▁ઉત્પાદન",-13.962912559509276],["▁நிர்வாக",-13.962912559509276],["▁డిమాండ్",-13.962912559509276],["▁പിന്നാലെ",-13.962912559509276],["terjesztés",-13.962913513183594],["zgarishlar",-13.962913513183594],["өнеркәсіп",-13.962913513183594],["მსხვერპლ",-13.962913513183594],["▁considéré",-13.962913513183594],["▁indígena",-13.962913513183594],["▁Йордан",-13.962913513183594],["▁отечестве",-13.962913513183594],["▁поколения",-13.962913513183594],["▁слободни",-13.962913513183594],["▁بایگانی",-13.962913513183594],["▁شهروندان",-13.962913513183594],["▁सुचना",-13.962913513183594],["▁അവതരിപ്പിച്ച",-13.962913513183594],["পোর্ট",-13.96291446685791],["▁frekvenci",-13.96291446685791],["▁ସେହିପରି",-13.96291446685791],["▁dovranno",-13.962915420532228],["▁ظروف",-13.962915420532228],["▁préfère",-13.962916374206545],["▁reconhece",-13.962916374206545],["▁särskild",-13.962916374206545],["▁البداية",-13.962916374206545],["▁dirixida",-13.96291732788086],["▁općine",-13.96291732788086],["▁voertuig",-13.962918281555176],["▁Bordeaux",-13.962919235229492],["▁федерального",-13.962919235229492],["▁ପ୍ରଭାବ",-13.962919235229492],["▁Fråga",-13.96292209625244],["▁Szabó",-13.96292209625244],["▁tromsø",-13.96292209625244],["▁ਵੇਲੇ",-13.96292209625244],["▁السنوات",-13.962924003601074],["▁appointment",-13.96292495727539],["▁Барселона",-13.96292495727539],["▁mikroskop",-13.96292781829834],["च्छेद",-13.962929725646973],["▁Ничего",-13.962930679321287],["▁līgumu",-13.962931632995604],["▁deasupra",-13.962935447692873],["▁Bieber",-13.962936401367188],["▁skaitā",-13.962936401367188],["▁қонақ",-13.962937355041504],["▁रोहित",-13.962937355041504],["▁imprensa",-13.962943077087402],["▁ਅਰਥ",-13.962943077087402],["▁космос",-13.96294403076172],["▁शिविर",-13.9629487991333],["▁kezdődik",-13.962950706481934],["▁vyhledá",-13.962950706481934],["▁violència",-13.96295166015625],["ατομικ",-13.96295928955078],["▁nőtt",-13.962967872619627],["▁ведаў",-13.962967872619627],["▁በመሆን",-13.962974548339844],["商圈",-13.962979316711426],["หัก",-13.962984085083008],["▁etablert",-13.962985038757324],["充电",-13.962985038757324],["زرق",-13.96298599243164],["▁plura",-13.96298599243164],["▁Франција",-13.962986946105955],["▁Materiał",-13.962989807128906],["▁રહ્યાં",-13.963001251220703],["▁mokytoja",-13.96300220489502],["▁atjauno",-13.963004112243652],["כלכלה",-13.963006973266602],["ագիծ",-13.96300983428955],["創立",-13.963025093078612],["▁시각",-13.963027000427246],["▁oğlan",-13.963042259216309],["▁Citeste",-13.963045120239258],["▁කිව්වේ",-13.963045120239258],["▁Reprezent",-13.963067054748535],["▁اپیل",-13.963069915771484],["▁passie",-13.963071823120115],["▁Отдел",-13.96308135986328],["▁университеті",-13.963085174560549],["▁የቀረበ",-13.963085174560549],["▁respectivamente",-13.963094711303713],["▁അവൻ",-13.963098526000977],["▁drøm",-13.96311092376709],["▁изминат",-13.963114738464355],["▁Қазақша",-13.96312427520752],["▁inserire",-13.96313762664795],["足够的",-13.963142395019531],["▁இவை",-13.96314525604248],["▁начинается",-13.963152885437012],["ವಿಗೆ",-13.963163375854492],["▁priset",-13.963167190551758],["▁общественного",-13.963167190551758],["▁توانیم",-13.963167190551758],["▁الزواج",-13.963168144226074],["▁UFO",-13.96316909790039],["שמת",-13.96317195892334],["▁recomendable",-13.963179588317873],["▁Tingnan",-13.963181495666504],["▁Gezi",-13.963186264038086],["▁намаляване",-13.963199615478516],["▁veprimtari",-13.963202476501465],["▁yönünde",-13.96320343017578],["▁вакви",-13.96320915222168],["палітычны",-13.963215827941896],["▁окна",-13.96321964263916],["的权利",-13.963220596313477],["▁הקבוצה",-13.96323013305664],["▁սիրում",-13.963232040405272],["ԱԴ",-13.963237762451172],["▁сваке",-13.963238716125488],["ПРО",-13.96328067779541],["▁Павло",-13.96329402923584],["极大",-13.963299751281738],["▁Valitse",-13.96330738067627],["කරන්න",-13.963309288024902],["▁2700",-13.96331024169922],["נופש",-13.963314056396484],["▁Jsou",-13.963321685791016],["▁nosaka",-13.963326454162598],["▁utilización",-13.963388442993164],["▁klager",-13.963415145874023],["òries",-13.963456153869627],["▁важных",-13.963457107543944],["▁خطرہ",-13.963467597961426],["▁ಶೂ",-13.96347999572754],["▁Önnek",-13.963480949401855],["▁orduan",-13.963491439819336],["▁ഇതിന",-13.963492393493652],["▁plno",-13.963507652282717],["▁badira",-13.96353244781494],["▁dokler",-13.963542938232422],["række",-13.963560104370115],["▁ورش",-13.963573455810549],["▁жанчын",-13.963582992553713],["▁בכדי",-13.963584899902344],["மீன்",-13.963589668273926],["▁rûpel",-13.963605880737305],["▁Aşk",-13.963624000549316],["Хрват",-13.96362590789795],["▁исполнен",-13.96363925933838],["▁болсун",-13.963658332824709],["ҒА",-13.96367073059082],["Activ",-13.9636869430542],["▁bazo",-13.963699340820312],["▁antro",-13.963716506958008],["บ้า",-13.963729858398438],["高铁",-13.963763236999512],["▁probablemente",-13.963765144348145],["▁αγοράς",-13.963765144348145],["▁керівника",-13.963767051696776],["▁Calon",-13.963784217834473],["ിക്കപ്പെട്ട",-13.963811874389648],["었고",-13.963817596435549],["వంతు",-13.963833808898926],["ลดน้ําหนัก",-13.963842391967772],["ოლოგია",-13.963845252990724],["▁stranicama",-13.963868141174316],["▁kaNkulunkulu",-13.963871955871582],["การลงทุน",-13.963886260986328],["▁ქალის",-13.963886260986328],["ทุกๆ",-13.963906288146973],["▁bahut",-13.963915824890137],["▁kategorije",-13.963963508605955],["হীন",-13.963972091674805],["▁annál",-13.963976860046388],["▁կարգավոր",-13.963991165161133],["▁treti",-13.963996887207031],["▁Turva",-13.964062690734863],["▁наведено",-13.964089393615724],["▁donor",-13.96409511566162],["▁permitido",-13.96411418914795],["५८",-13.964117050170898],["▁undeva",-13.964122772216797],["▁vokser",-13.96414852142334],["▁წყალი",-13.964152336120604],["ចាក់",-13.9641752243042],["▁ಸುಂದರ",-13.964184761047363],["▁aanlyn",-13.964195251464844],["▁teratur",-13.964198112487791],["ሸን",-13.964232444763184],["▁stíl",-13.96425724029541],["孩",-13.964282035827637],["▁Jasmin",-13.964282989501951],["▁түзүү",-13.96428394317627],["▁آخرين",-13.964289665222168],["▁பேட்டி",-13.9642915725708],["▁swaar",-13.96429443359375],["進化",-13.964299201965332],["▁Chuck",-13.96430206298828],["לינג",-13.964303016662598],["▁disabili",-13.964336395263672],["मेट",-13.96433925628662],["▁Suatu",-13.96435832977295],["สิริ",-13.964362144470217],["integrazione",-13.964364051818848],["משפחה",-13.964370727539062],["▁ഇങ്ങ",-13.964373588562012],["krýva",-13.964385986328123],["もありません",-13.964393615722656],["brygg",-13.964397430419922],["▁tervezett",-13.964401245117188],["▁Raud",-13.96440887451172],["άζοντας",-13.964410781860352],["▁өмнөх",-13.9644136428833],["KAM",-13.964426040649414],["regulering",-13.964426040649414],["▁السوداني",-13.964430809020996],["▁dadaal",-13.964447975158691],["教え",-13.964451789855955],["হাজ",-13.964463233947754],["▁મહિલાઓ",-13.9644775390625],["▁çocukları",-13.964485168457031],["eiðsl",-13.964516639709473],["▁другото",-13.96453094482422],["▁gösteriyor",-13.96456527709961],["▁діни",-13.964571952819824],["ଓଡ଼ିଶା",-13.96457862854004],["▁generalmente",-13.964587211608888],["에만",-13.96461009979248],["▁Dobre",-13.96469020843506],["▁candidature",-13.964693069458008],["คุณแม่",-13.964694023132324],["▁भोलि",-13.964698791503906],["▁შექმნ",-13.964703559875488],["ದೂ",-13.964704513549805],["አፍሪካ",-13.964715957641602],["▁registration",-13.964725494384766],["▁تمدن",-13.964734077453612],["धक",-13.964750289916992],["▁rënd",-13.964754104614258],["▁Bloom",-13.964763641357422],["系の",-13.964766502380373],["▁شویم",-13.964792251586914],["▁الملف",-13.964795112609863],["마리",-13.964853286743164],["ходила",-13.964874267578123],["▁तर्क",-13.964882850646973],["iunilor",-13.96489715576172],["香り",-13.964937210083008],["computer",-13.964954376220703],["▁kasvoi",-13.964978218078612],["▁küçə",-13.964981079101562],["rebro",-13.964994430541992],["fwyd",-13.965007781982422],["▁шанса",-13.965020179748535],["▁Condo",-13.965027809143066],["▁주로",-13.965027809143066],["▁nëntor",-13.965045928955078],["født",-13.96507740020752],["▁Rücken",-13.965100288391112],["▁tranquilla",-13.965106964111328],["▁ਮੋਹ",-13.965107917785645],["▁اضطراب",-13.965124130249023],["izált",-13.965132713317873],["▁نانو",-13.96513557434082],["▁fleksi",-13.965155601501465],["ახურ",-13.965184211730955],["▁recorre",-13.965187072753906],["誕生日",-13.96519374847412],["installazione",-13.96519660949707],["НН",-13.965210914611816],["▁মুক্তি",-13.965224266052246],["▁جلوی",-13.9652738571167],["▁dritten",-13.965282440185549],["ခို",-13.965283393859863],["टाइम",-13.965298652648926],["▁गुरू",-13.965313911437988],["▁લેખક",-13.965313911437988],["밖",-13.96531581878662],["▁तिथि",-13.965319633483888],["▁CIG",-13.965324401855469],["ЛЫН",-13.965330123901367],["ทําให้เกิด",-13.965336799621582],["minibus",-13.965344429016112],["kierros",-13.965365409851074],["▁døgn",-13.965370178222656],["brille",-13.965371131896973],["虹",-13.965372085571287],["指引",-13.965376853942873],["▁приступа",-13.965396881103516],["▁turbin",-13.965414047241213],["▁составлен",-13.965431213378906],["▁успее",-13.965438842773438],["▁ისეთ",-13.96545696258545],["▁வெளியாக",-13.965465545654297],["いいのか",-13.965476989746094],["▁svojou",-13.965497016906738],["▁broder",-13.96550750732422],["が起き",-13.965534210205078],["២១",-13.965537071228027],["красив",-13.965553283691406],["வரி",-13.965553283691406],["dzeń",-13.965557098388672],["kobling",-13.965569496154783],["做一个",-13.965575218200684],["▁származ",-13.965600967407228],["೦ದ",-13.965646743774414],["ကြံ",-13.965665817260742],["▁разред",-13.965694427490234],["▁Путина",-13.965704917907717],["▁अझ",-13.965707778930664],["त्त्व",-13.965720176696776],["▁එයින්",-13.96573543548584],["पुढ",-13.965744972229004],["▁pressi",-13.96574878692627],["▁Estrada",-13.96575164794922],["▁հիմնադրամի",-13.96575164794922],["մալ",-13.96575927734375],["առաջ",-13.965764045715332],["કડી",-13.965764045715332],["▁vətən",-13.965800285339355],["课题",-13.965802192687988],["▁równie",-13.96580410003662],["워크",-13.965805053710938],["菇",-13.965813636779783],["ผา",-13.965829849243164],["惟",-13.965829849243164],["amivel",-13.965837478637695],["▁ჯგუფის",-13.965837478637695],["▁వ్యతిరేక",-13.965852737426758],["チャンス",-13.965855598449709],["กระตุ้น",-13.96586799621582],["วีดีโอ",-13.965869903564451],["▁अमेरिके",-13.96587085723877],["ಝ",-13.965871810913086],["ብሄራዊ",-13.965871810913086],["ብፁዕ",-13.965871810913086],["ኢሕአዴግ",-13.965871810913086],["∙",-13.965871810913086],["▁megpróbál",-13.965871810913086],["▁reynslu",-13.965871810913086],["▁yuxarı",-13.965871810913086],["▁ταυτόχρονα",-13.965871810913086],["▁ертөнц",-13.965871810913086],["▁кожної",-13.965871810913086],["▁күрделі",-13.965871810913086],["▁сътрудничество",-13.965871810913086],["▁тоглоом",-13.965871810913086],["▁універсітэт",-13.965871810913086],["▁بمناسبة",-13.965871810913086],["▁নির্বাহী",-13.965871810913086],["▁පවසා",-13.965871810913086],["▁යහපාලන",-13.965871810913086],["▁საარჩევნო",-13.965871810913086],["▁아닙니다",-13.965871810913086],["民進黨",-13.965871810913086],["▁تنازع",-13.965872764587402],["▁سلطنت",-13.965872764587402],["▁ያስፈልጋል",-13.965872764587402],["▁laudantium",-13.96587371826172],["▁múlva",-13.96587371826172],["▁माधव",-13.96587371826172],["▁establecimiento",-13.965875625610352],["▁оферти",-13.965875625610352],["▁ସ୍ଥାନୀୟ",-13.965877532958984],["▁ఎప్పుడూ",-13.965877532958984],["▁ഉദ്ദേശ",-13.965877532958984],["旬",-13.9658784866333],["▁πήγα",-13.965879440307615],["▁vyrába",-13.965880393981934],["▁प्रतिसाद",-13.965880393981934],["השוואת",-13.96588134765625],["كنيسة",-13.96588134765625],["▁кошарка",-13.965882301330566],["▁opozitë",-13.9658842086792],["希腊",-13.965885162353516],["▁חזרה",-13.96588897705078],["▁konečně",-13.965892791748049],["▁जन्मदिन",-13.965892791748049],["▁nhượng",-13.965893745422363],["▁peinture",-13.96589469909668],["▁ఏమిటి",-13.96589469909668],["puhkus",-13.965896606445312],["▁հանր",-13.965897560119627],["▁لزوم",-13.965897560119627],["ရိပ္",-13.965904235839844],["봐야",-13.965904235839844],["▁лічыць",-13.965907096862791],["▁kichik",-13.96590805053711],["▁xəstəliklər",-13.96590805053711],["▁srednji",-13.965912818908691],["▁camiseta",-13.965914726257324],["▁оқушылардың",-13.96591567993164],["▁padrão",-13.965919494628906],["▁kahani",-13.965930938720703],["ខាងក្រោម",-13.965934753417969],["▁Roskilde",-13.965934753417969],["▁trifft",-13.965936660766602],["რუსთავი",-13.965943336486816],["▁synonym",-13.965943336486816],["▁ascuns",-13.965948104858398],["▁የሚችል",-13.965950965881348],["▁FIS",-13.965954780578612],["▁ndarë",-13.965954780578612],["▁малък",-13.96595573425293],["▁շնորհավոր",-13.965956687927246],["▁рассмотрения",-13.96596336364746],["▁світової",-13.96596622467041],["終わって",-13.965970039367676],["▁putovanja",-13.965977668762209],["▁Newspaper",-13.96597957611084],["▁Maklum",-13.965985298156738],["▁upravljanja",-13.965991020202637],["▁CCTV",-13.96599578857422],["询问",-13.965997695922852],["▁baizik",-13.966007232666016],["▁taşıyan",-13.966010093688965],["▁Haddii",-13.966012954711914],["▁కార్య",-13.966018676757812],["▁wytrzyma",-13.966020584106444],["▁stundum",-13.96603012084961],["ပဲ။",-13.96603298187256],["▁එනම්",-13.966035842895508],["▁ابراز",-13.966058731079102],["▁mjölk",-13.966064453125],["▁serokê",-13.966064453125],["▁consuet",-13.96607494354248],["▁Christensen",-13.96608066558838],["töötle",-13.966082572937012],["在哪里",-13.966090202331545],["如果没有",-13.966093063354492],["▁Dũng",-13.966094017028809],["ソン",-13.966096878051758],["izëm",-13.966100692749023],["▁väärä",-13.966103553771973],["▁moindre",-13.96611213684082],["▁واسعة",-13.966116905212402],["▁કરવાનો",-13.966121673583984],["▁그들의",-13.9661226272583],["исани",-13.966129302978516],["▁nervios",-13.966132164001465],["ในแต่ละ",-13.96613883972168],["▁aparut",-13.966156959533691],["ඩේ",-13.966163635253906],["▁singolo",-13.96617603302002],["▁wolny",-13.96617603302002],["▁compañeros",-13.966176986694336],["▁ഒരാള",-13.966176986694336],["▁Raga",-13.966180801391602],["の声",-13.966185569763184],["▁Phân",-13.966197967529297],["▁táp",-13.966197967529297],["ตับ",-13.966212272644045],["เรื่องของ",-13.966214179992676],["ARRI",-13.966226577758787],["dagdag",-13.966233253479004],["▁gesp",-13.966233253479004],["Վրաստան",-13.96623992919922],["▁футбольн",-13.966243743896484],["``",-13.966265678405762],["▁розміщення",-13.966269493103027],["残り",-13.966275215148926],["▁સત્ય",-13.96627712249756],["▁vigtige",-13.966291427612305],["▁oppimis",-13.96629238128662],["▁myślą",-13.966310501098633],["▁personalitat",-13.96635627746582],["▁automobilio",-13.966357231140137],["רוץ",-13.966360092163086],["▁አለም",-13.9663667678833],["▁интересы",-13.96636962890625],["▁Annie",-13.96638298034668],["▁dificult",-13.96639347076416],["▁پاك",-13.966401100158691],["▁публиката",-13.966418266296388],["สายตา",-13.96643352508545],["▁Coaching",-13.96646785736084],["▁խորհրդարանական",-13.966468811035156],["వరకు",-13.966470718383787],["ËR",-13.966472625732422],["▁ट्या",-13.966476440429688],["▁Gondol",-13.966496467590332],["▁മനസ്സില്",-13.96650218963623],["▁ਸੰਗਤ",-13.966506958007812],["человечески",-13.96651840209961],["nęło",-13.96652126312256],["Зна",-13.96652126312256],["▁Ahl",-13.96653175354004],["▁אישה",-13.966535568237305],["▁ελληνικού",-13.96653938293457],["贝尔",-13.966543197631836],["EGI",-13.966551780700684],["▁വാർത്തകൾ",-13.966559410095217],["доль",-13.966560363769531],["ніших",-13.966561317443848],["કથા",-13.96656608581543],["تفق",-13.966572761535645],["▁edzés",-13.966581344604492],["ատիվ",-13.966586112976074],["▁poznać",-13.966594696044922],["เรียบร้อย",-13.96663761138916],["▁кип",-13.966672897338867],["深い",-13.966673851013184],["▁సమస్యలు",-13.966692924499512],["▁ICC",-13.966694831848145],["▁මිනිස්",-13.966702461242676],["▁Chúc",-13.966703414916992],["સ્વ",-13.966704368591309],["kérés",-13.96670627593994],["öistä",-13.966711044311523],["▁zoti",-13.96672248840332],["後悔",-13.96674346923828],["rými",-13.96676254272461],["▁регионални",-13.966815948486328],["乐观",-13.966816902160645],["▁vyroben",-13.96682357788086],["▁میده",-13.966837882995604],["claim",-13.966877937316896],["ποδ",-13.96688175201416],["▁խմ",-13.966901779174805],["▁Reyes",-13.96690845489502],["▁nè",-13.966915130615234],["▁במס",-13.966943740844728],["▁görüyor",-13.966949462890623],["▁الفعل",-13.966963768005373],["▁блюд",-13.966964721679688],["▁दलको",-13.966986656188965],["▁abstract",-13.967008590698242],["▁zināšanas",-13.967010498046877],["▁ervaringen",-13.967018127441406],["▁प्रदेशको",-13.96703052520752],["vorming",-13.967041969299316],["Adressen",-13.96709156036377],["私たちの",-13.967093467712402],["ッと",-13.96709442138672],["▁භූ",-13.9671049118042],["▁достигна",-13.967106819152832],["კაცი",-13.967134475708008],["▁Kavita",-13.96713638305664],["▁militär",-13.967137336730955],["▁*****",-13.967150688171388],["▁provozu",-13.9671630859375],["▁Oude",-13.967167854309082],["Word",-13.967175483703612],["சார",-13.967180252075195],["პონ",-13.967183113098145],["tilaan",-13.967191696166992],["▁партнери",-13.967192649841309],["▁Basti",-13.967196464538574],["▁Tweede",-13.967199325561523],["سكر",-13.967226028442385],["促使",-13.96723747253418],["785",-13.967244148254396],["▁Эмне",-13.96726417541504],["▁jediné",-13.96726894378662],["quvvat",-13.967290878295898],["もあった",-13.96729850769043],["▁traditionelle",-13.967313766479492],["frut",-13.967315673828123],["örö",-13.96733283996582],["▁كىم",-13.967336654663086],["▁hiše",-13.96733856201172],["▁Rendez",-13.967340469360352],["▁Candy",-13.96734619140625],["ບາຍ",-13.967355728149414],["рох",-13.96742820739746],["▁Schü",-13.967432022094728],["▁titulo",-13.967459678649902],["θελ",-13.96746826171875],["▁Ohne",-13.9674711227417],["فرج",-13.967480659484863],["တဦး",-13.967495918273926],["▁עשה",-13.967520713806152],["२८",-13.967537879943848],["شتى",-13.967554092407228],["▁Garant",-13.967569351196287],["boksen",-13.967573165893556],["▁پڪ",-13.967578887939451],["▁vətəndaşlar",-13.967598915100098],["▁коментарі",-13.967623710632324],["▁финансови",-13.96762466430664],["▁stimulere",-13.967645645141602],["dică",-13.96766757965088],["సో",-13.967671394348145],["בוק",-13.96768283843994],["▁liitu",-13.967720985412598],["envoi",-13.967721939086914],["୧୧",-13.967721939086914],["勞動",-13.96772575378418],["kondu",-13.967741012573242],["▁കാണിച്ച",-13.967744827270508],["ຂາດ",-13.967811584472656],["▁Какви",-13.967867851257324],["▁කාට",-13.967870712280272],["▁gitmek",-13.967886924743652],["▁estarán",-13.967897415161133],["▁लागे",-13.967905044555664],["▁мораль",-13.96790599822998],["▁शेर",-13.967907905578612],["▁ເວ",-13.967909812927246],["追い",-13.96794605255127],["רנ",-13.9679594039917],["▁нивно",-13.9679594039917],["▁pyl",-13.967967987060549],["૧૪",-13.967976570129396],["▁وتابع",-13.968000411987305],["eknél",-13.968003273010254],["▁fotogrāfi",-13.96800422668457],["▁sekitarnya",-13.968012809753418],["лово",-13.968027114868164],["▁vulgui",-13.968042373657228],["长沙",-13.968048095703123],["בוס",-13.968050003051758],["喝酒",-13.968063354492188],["ستعمل",-13.96807861328125],["▁Dunaj",-13.96807861328125],["patnam",-13.968091011047363],["▁సెట్",-13.96819019317627],["▁încep",-13.968210220336914],["▁коюш",-13.968212127685549],["eckého",-13.968231201171877],["▁mieti",-13.968231201171877],["공학",-13.968259811401367],["真理",-13.968278884887695],["▁billetter",-13.968287467956545],["CLE",-13.96829891204834],["▁выбрал",-13.968305587768556],["▁சிக்க",-13.968321800231934],["▁fronteira",-13.968327522277832],["▁описани",-13.968331336975098],["▁Beslut",-13.968345642089844],["▁Εγ",-13.968385696411133],["クリック",-13.968401908874512],["lítica",-13.968439102172852],["▁اليد",-13.968451499938965],["▁אריינ",-13.96847152709961],["võrgu",-13.968473434448242],["ස්ථ",-13.968476295471191],["▁veniva",-13.968476295471191],["▁بقا",-13.968483924865724],["มีเส้นเลือดขอด",-13.968505859375],["昭",-13.968507766723633],["ништва",-13.968515396118164],["▁neuž",-13.968523979187012],["íssimo",-13.968530654907228],["▁వర్గాల",-13.968530654907228],["▁Malaya",-13.96854305267334],["ាញ់",-13.968551635742188],["▁ګټو",-13.968565940856934],["രിഞ്ഞ",-13.968567848205566],["▁comezou",-13.968585014343262],["▁سڪ",-13.968599319458008],["▁ଉଦ୍",-13.96860122680664],["▁семејство",-13.968622207641602],["▁получила",-13.968632698059082],["▁smaller",-13.968647003173828],["根本不",-13.968657493591309],["γερμαν",-13.968658447265623],["▁asuma",-13.968667030334473],["▁సంగీతం",-13.968690872192385],["▁मंगल",-13.968698501586914],["JEM",-13.96870231628418],["▁scriptor",-13.968708038330078],["বার্ষিক",-13.96871852874756],["▁കടല",-13.968719482421877],["▁malonu",-13.96872901916504],["公布的",-13.968738555908203],["针对性",-13.968745231628418],["▁izravn",-13.96874713897705],["پسند",-13.968750953674316],["▁používateľ",-13.968768119812012],["ตัวนี้",-13.968772888183594],["ماسلىق",-13.968775749206545],["▁kādi",-13.968792915344238],["▁են՝",-13.968795776367188],["▁зөвшөөрөл",-13.968798637390137],["▁bassi",-13.96880054473877],["重庆时时彩",-13.968809127807615],["▁ဘုရားသခင်",-13.96882438659668],["ിച്ചിരുന്ന",-13.968830108642578],["▁грани",-13.968833923339844],["衝突",-13.968833923339844],["อวัยวะเพศชาย",-13.96883773803711],["fyfyrwyr",-13.968838691711426],["стракт",-13.968838691711426],["สุขุมวิท",-13.968838691711426],["▁Inglaterra",-13.968838691711426],["▁MMORPG",-13.968838691711426],["▁kateqoriya",-13.968838691711426],["▁ljubezni",-13.968838691711426],["▁muhteşem",-13.968838691711426],["▁mulțum",-13.968838691711426],["▁pasaulį",-13.968838691711426],["▁Ιστορία",-13.968838691711426],["▁мнозинство",-13.968838691711426],["▁тенденция",-13.968838691711426],["▁территория",-13.968838691711426],["▁Հիշեցնենք",-13.968838691711426],["▁ընթերց",-13.968838691711426],["▁ایتالیا",-13.968838691711426],["▁زبردست",-13.968838691711426],["▁ظریف",-13.968838691711426],["▁وبسایت",-13.968838691711426],["▁پایتخت",-13.968838691711426],["▁ਦੂਜੇ",-13.968838691711426],["▁ସହାୟତା",-13.968838691711426],["▁చర్యలు",-13.968838691711426],["▁බණ්ඩාර",-13.968838691711426],["멜",-13.968838691711426],["▁Enquanto",-13.968839645385742],["▁koľko",-13.968839645385742],["▁zëvendës",-13.968839645385742],["▁પ્રયાસ",-13.968839645385742],["▁സൗകര്യ",-13.968839645385742],["▁mewakili",-13.96884059906006],["▁околности",-13.968841552734377],["▁मिनिट",-13.968841552734377],["▁સૌરાષ્ટ્ર",-13.968841552734377],["▁ప్లాన్",-13.968841552734377],["▁ELEKTR",-13.968842506408691],["▁Tatsache",-13.968842506408691],["▁өдрүүдэд",-13.968842506408691],["▁ආදායම",-13.968842506408691],["▁Društvo",-13.968844413757324],["▁લીધી",-13.968844413757324],["▁bersetuju",-13.96884536743164],["▁molestiae",-13.96884536743164],["▁डिग्री",-13.96884536743164],["▁বেড়ে",-13.96884536743164],["▁යාංශය",-13.96884536743164],["▁ਉੱਤਰ",-13.968846321105955],["বাড়ী",-13.968847274780272],["▁Агенциј",-13.968847274780272],["▁anställda",-13.96884822845459],["▁ڇوڪري",-13.96884822845459],["無駄",-13.96884822845459],["▁महासचिव",-13.968849182128906],["▁urmeaza",-13.968850135803224],["▁уважение",-13.968850135803224],["▁fərdi",-13.96885108947754],["▁ተገቢ",-13.96885108947754],["廣東",-13.968852043151855],["▁shartnoma",-13.968856811523438],["fræðingur",-13.96885871887207],["▁asiakkaan",-13.968859672546388],["▁guberna",-13.968860626220703],["▁Surya",-13.96886157989502],["▁хліб",-13.968863487243652],["▁elettronica",-13.968868255615234],["▁concesión",-13.96886920928955],["▁Ciekaw",-13.968871116638184],["▁szegény",-13.968871116638184],["▁Louisiana",-13.9688720703125],["▁למנוע",-13.9688720703125],["▁পর্ব",-13.968873023986816],["सिस",-13.968873977661133],["▁කල්පනා",-13.96887493133545],["▁מלחמת",-13.968875885009766],["обов",-13.968877792358398],["▁xizmatlari",-13.968879699707031],["▁передачи",-13.968883514404297],["ຫວ",-13.968886375427246],["▁naturligtvis",-13.96888828277588],["▁Пусть",-13.968892097473145],["ขัน",-13.968894004821776],["▁ישנם",-13.96889591217041],["▁ሆይ",-13.968902587890623],["▁vuxen",-13.968904495239258],["▁сподіва",-13.96890640258789],["▁pagiging",-13.968907356262209],["▁архитект",-13.968908309936523],["▁본격",-13.968912124633787],["▁우려",-13.968914985656738],["▁jaučia",-13.968920707702637],["ագրական",-13.968927383422852],["▁dumating",-13.9689359664917],["▁ይኖር",-13.968936920166016],["einheit",-13.968939781188965],["▁разследва",-13.968939781188965],["▁کډوالو",-13.96894359588623],["▁jucător",-13.968945503234863],["▁postopek",-13.968950271606444],["▁mensaxe",-13.968952178955078],["▁chemical",-13.968963623046877],["▁Dadurch",-13.968969345092772],["▁skutečnost",-13.968969345092772],["ապար",-13.968981742858888],["▁patrimonial",-13.968985557556152],["אמין",-13.968986511230469],["▁անդամների",-13.96899127960205],["▁dizaina",-13.968996047973633],["▁Länge",-13.96900749206543],["להק",-13.969017028808594],["சில்",-13.969018936157228],["▁vidimo",-13.969026565551758],["▁ഇതിന്",-13.969027519226074],["▁تسليم",-13.969030380249023],["▁၄။",-13.969030380249023],["▁مجلة",-13.969046592712402],["▁못하고",-13.96906852722168],["ဖြစ်ကြောင်း",-13.969076156616213],["06.2017",-13.969077110290527],["▁eksempelvis",-13.96907901763916],["▁spēja",-13.96907901763916],["167",-13.969082832336426],["▁située",-13.969086647033691],["▁වර්ණ",-13.969088554382324],["Κοιν",-13.969094276428224],["▁삶을",-13.969109535217283],["▁патрон",-13.96911334991455],["▁یافتہ",-13.96911907196045],["▁වෙතත්",-13.96912670135498],["▁රදේශය",-13.969135284423828],["▁איפה",-13.969139099121094],["konvention",-13.969141006469728],["高度重视",-13.969143867492676],["▁נושאים",-13.969144821166992],["▁Veteran",-13.969149589538574],["▁convins",-13.96916675567627],["Hlavná",-13.969188690185549],["▁fossil",-13.969188690185549],["গুলোর",-13.969192504882812],["теоретичн",-13.969197273254396],["પં",-13.969208717346191],["▁lernejo",-13.969220161437988],["▁darunter",-13.969257354736328],["▁જેથી",-13.96925926208496],["▁xisbiga",-13.96928882598877],["▁הרוח",-13.9693021774292],["▁معيار",-13.969305992126465],["▁അസ്",-13.969305992126465],["▁literario",-13.969316482543944],["ဆင်း",-13.969317436218262],["▁सैन्य",-13.969318389892578],["开车",-13.969321250915527],["▁ואל",-13.969337463378906],["▁Գա",-13.969342231750488],["▁utlandet",-13.969343185424805],["有序",-13.96934413909912],["เชื่อมต่อ",-13.969345092773438],["полу",-13.969346046447754],["▁nastane",-13.969347953796388],["تعب",-13.969364166259766],["▁נהר",-13.969367027282717],["ခြင်း၊",-13.96938705444336],["mbet",-13.969389915466309],["▁пунктов",-13.969401359558104],["▁собственной",-13.969404220581056],["▁ରଖିବା",-13.969414710998535],["▁имун",-13.969423294067385],["▁стимулира",-13.969425201416016],["外汇",-13.96943187713623],["▁ämnen",-13.969435691833496],["▁تورم",-13.969443321228027],["448",-13.96944808959961],["TIONS",-13.969454765319824],["させた",-13.969454765319824],["▁שירותים",-13.969478607177734],["ිත්",-13.96947956085205],["申し込み",-13.969486236572266],["▁וחצי",-13.96949577331543],["ិល",-13.969496726989746],["▁خودروهای",-13.969518661499023],["UVA",-13.969531059265137],["ອັດ",-13.969531059265137],["ป้า",-13.9695405960083],["eerders",-13.96954345703125],["▁तरीका",-13.96954345703125],["positum",-13.96955394744873],["479",-13.969569206237791],["▁GNU",-13.969590187072754],["▁davantage",-13.969602584838867],["ลาว",-13.9696044921875],["▁Agnes",-13.969605445861816],["▁Teacher",-13.96960735321045],["▁yaklaşım",-13.96963119506836],["▁۹۵",-13.969643592834473],["▁රහාරය",-13.969654083251951],["▁ملڻ",-13.969664573669434],["▁forklaring",-13.969738960266112],["▁uğurlar",-13.96976089477539],["西南",-13.969773292541504],["▁தேடி",-13.96979808807373],["▁nümayəndəsi",-13.969831466674805],["▁Produse",-13.969837188720703],["ૃતિ",-13.96983814239502],["buruk",-13.969858169555664],["мээр",-13.969862937927246],["ोदय",-13.969863891601562],["▁Nedir",-13.969881057739258],["▁penjual",-13.96988582611084],["ঘর",-13.969893455505373],["▁zenekar",-13.969915390014648],["▁şöbəsi",-13.969924926757812],["▁ക്രിസ്",-13.969937324523926],["छाड",-13.969942092895508],["kvæði",-13.969953536987305],["ಮೂಲ",-13.969963073730469],["6.9",-13.96998119354248],["▁Miel",-13.96999454498291],["▁tavoite",-13.970001220703123],["บัง",-13.97002410888672],["▁implicit",-13.97002410888672],["▁zlasti",-13.9700345993042],["▁évad",-13.97003936767578],["▁सेल",-13.97004508972168],["Orient",-13.970048904418944],["ברר",-13.970060348510742],["सिद्ध",-13.970075607299805],["▁zárt",-13.970097541809082],["表格",-13.970109939575195],["▁методолог",-13.970137596130373],["▁куды",-13.97016143798828],["▁23%",-13.970163345336914],["ጠነ",-13.970170974731444],["gewinn",-13.970190048217772],["▁Prasad",-13.97019863128662],["ิ้ง",-13.970199584960938],["▁jednoj",-13.970207214355469],["できました",-13.970224380493164],["▁Бой",-13.970239639282228],["▁واسه",-13.970247268676758],["▁సహాయ",-13.970261573791504],["▁pimp",-13.970264434814451],["▁حملة",-13.97028636932373],["▁ತಪ್ಪು",-13.970294952392578],["昨晚",-13.970303535461426],["▁settori",-13.970304489135742],["▁lokalnih",-13.970321655273438],["נוסח",-13.970333099365234],["▁þarft",-13.97035026550293],["▁kongre",-13.970389366149902],["มอน",-13.970391273498535],["共产党",-13.970394134521484],["korban",-13.97041130065918],["▁personally",-13.970444679260254],["▁ಹೊತ್ತ",-13.970447540283203],["▁berezi",-13.970453262329102],["▁осуществлен",-13.970453262329102],["aðferð",-13.970466613769531],["hossz",-13.970483779907228],["მინა",-13.970547676086426],["▁asıl",-13.97054958343506],["▁udfordringer",-13.970561981201172],["▁fazia",-13.97060775756836],["sound",-13.970609664916992],["▁mazu",-13.97061252593994],["▁سرڪار",-13.970616340637209],["ເລື່ອງ",-13.970619201660156],["schrijven",-13.970623016357422],["▁onlarla",-13.97062873840332],["Вер",-13.970640182495115],["instagram",-13.970641136169434],["▁dzied",-13.970660209655762],["▁сенім",-13.97072696685791],["199",-13.970736503601074],["▁ସାମ",-13.970748901367188],["是真的",-13.970748901367188],["▁ငါ့",-13.970776557922363],["▁խնդիրը",-13.97077751159668],["ตําบล",-13.970815658569336],["▁miega",-13.970845222473145],["ദർ",-13.970849990844728],["告诉你",-13.970850944519045],["▁смысла",-13.970858573913574],["löndum",-13.97086238861084],["▁convive",-13.970894813537598],["走上",-13.97092342376709],["▁پدیده",-13.970938682556152],["▁gebeuren",-13.970942497253418],["везе",-13.970972061157228],["▁Рож",-13.970980644226074],["▁betaald",-13.970999717712402],["▁udvar",-13.971003532409668],["сілі",-13.971013069152832],["حمام",-13.971063613891602],["▁redaksi",-13.971075057983398],["ագրով",-13.971077919006348],["▁Provo",-13.97108268737793],["▁hilbijartinê",-13.97110366821289],["辦公",-13.971116065979004],["ในปัจจุบัน",-13.971124649047852],["▁вниз",-13.971136093139648],["▁متصل",-13.971137046813965],["▁legato",-13.971166610717772],["▁sarf",-13.97117519378662],["Store",-13.971181869506836],["▁raised",-13.971192359924316],["ことだ",-13.971192359924316],["▁अलि",-13.971198081970217],["▁pensée",-13.97122573852539],["▁Bóg",-13.971236228942873],["▁считал",-13.971261024475098],["躺",-13.971281051635742],["geladen",-13.971283912658691],["İNİ",-13.97129249572754],["的比赛",-13.971293449401855],["ੈਂਟ",-13.971306800842283],["បរ",-13.971322059631348],["kenttä",-13.971331596374512],["▁දො",-13.971349716186523],["▁көчө",-13.971360206604004],["institution",-13.971380233764648],["▁პრინცი",-13.971384048461914],["ਨਕ",-13.97139835357666],["ાતા",-13.971410751342772],["साय",-13.971417427062988],["κυκλ",-13.971418380737305],["▁వేయ",-13.971450805664062],["▁крепост",-13.971461296081545],["▁medlemskap",-13.97152042388916],["▁SARA",-13.971529006958008],["▁dificuldade",-13.97153091430664],["▁Ένωση",-13.971532821655272],["▁κοινωνική",-13.97153377532959],["▁դպրոցի",-13.971548080444336],["ЧУ",-13.97155475616455],["ที่ว่า",-13.971555709838867],["၁၈",-13.971564292907717],["▁dipercayai",-13.971564292907717],["হিত",-13.97157096862793],["▁Seif",-13.97158432006836],["▁שטו",-13.971590042114258],["▁shirin",-13.971593856811523],["ayıp",-13.971598625183104],["חברת",-13.971609115600586],["▁האלו",-13.971611976623535],["ερή",-13.971619606018066],["▁топто",-13.971652030944824],["▁vedeli",-13.971678733825684],["▁twist",-13.971714973449709],["וניות",-13.971724510192873],["▁정기",-13.971731185913086],["prys",-13.97174072265625],["▁ngươ",-13.971744537353516],["▁형성",-13.971755981445312],["▁gebed",-13.971759796142578],["仔細",-13.971776008605955],["▁követel",-13.971792221069336],["ガイド",-13.971795082092283],["初心者",-13.9718017578125],["專注",-13.971803665161133],["∀",-13.971809387207031],["ဆွဲ",-13.971813201904297],["ထည့်",-13.971814155578612],["ទំហំ",-13.97181510925293],["▁Espírito",-13.97181510925293],["▁Ndërkohë",-13.97181510925293],["▁georganiseerd",-13.97181510925293],["▁izgatavo",-13.97181510925293],["▁madrugada",-13.97181510925293],["▁pubbliche",-13.97181510925293],["▁spracúva",-13.97181510925293],["▁súvislosti",-13.97181510925293],["▁większą",-13.97181510925293],["▁Παιδείας",-13.97181510925293],["▁μπορέσ",-13.97181510925293],["▁Москву",-13.97181510925293],["▁будаўніцтва",-13.97181510925293],["▁воопшто",-13.97181510925293],["▁кіраўніцтва",-13.97181510925293],["▁освобожден",-13.97181510925293],["▁сообщения",-13.97181510925293],["▁հետաքրքիր",-13.97181510925293],["▁সহায়তা",-13.97181510925293],["▁ରାଜନୀତି",-13.97181510925293],["▁அல்லாஹ்",-13.97181510925293],["▁ඔස්සේ",-13.97181510925293],["▁နောက်ဆုံး",-13.97181510925293],["▁არასდროს",-13.97181510925293],["▁უმეტეს",-13.97181510925293],["▁롯데",-13.97181510925293],["뒷",-13.97181510925293],["▁langfristig",-13.971816062927246],["▁maadaama",-13.971816062927246],["▁rancangan",-13.971816062927246],["▁градоначалник",-13.971816062927246],["▁قيادة",-13.971816062927246],["▁खूब",-13.971816062927246],["▁заўваж",-13.971817016601562],["▁المباشر",-13.971817016601562],["គណៈ",-13.97181797027588],["ਪੋਰਟ",-13.971818923950195],["ଗାମୀ",-13.971818923950195],["▁ahaayeen",-13.971818923950195],["▁banheiro",-13.971818923950195],["▁setmanes",-13.971818923950195],["убуз",-13.971819877624512],["▁مجهز",-13.971819877624512],["ေခ်ာင္း",-13.971820831298828],["▁доступа",-13.971820831298828],["▁kahvaltı",-13.971821784973145],["קען",-13.97182273864746],["診療",-13.971823692321776],["▁Njihov",-13.971824645996094],["▁loĝantoj",-13.971824645996094],["そういった",-13.971824645996094],["▁어머니",-13.97182559967041],["▁Принцип",-13.971826553344728],["▁капитан",-13.97182846069336],["▁kongreso",-13.971829414367676],["▁ቀላል",-13.971829414367676],["kecil",-13.971832275390623],["▁ନାଁ",-13.97183322906494],["▁assegna",-13.971835136413574],["▁شیعه",-13.97183609008789],["▁yedi",-13.97183895111084],["▁بيروت",-13.97183895111084],["▁qaarkood",-13.971842765808104],["▁партизан",-13.971844673156738],["▁المشاريع",-13.971844673156738],["▁žádost",-13.971845626831056],["結論",-13.971853256225586],["▁أمريكي",-13.971857070922852],["▁හෙවත්",-13.971860885620115],["▁නිරත",-13.971861839294434],["▁بدانید",-13.97186279296875],["▁Київської",-13.971863746643066],["▁அணு",-13.971863746643066],["▁nadaljuje",-13.9718656539917],["▁доўгі",-13.9718656539917],["▁fenêtre",-13.971866607666016],["▁ცენტრალური",-13.97187042236328],["▁zemlju",-13.971871376037598],["เลอร์",-13.971875190734863],["▁Wielki",-13.971877098083496],["▁najpierw",-13.971885681152344],["提名",-13.97188663482666],["エステ",-13.97188949584961],["แบน",-13.971891403198242],["énnyel",-13.97189712524414],["▁پوندو",-13.971899032592772],["▁БНР",-13.971903800964355],["▁evaluare",-13.971906661987305],["▁MWAKA",-13.97191047668457],["肯定是",-13.97191333770752],["▁asioista",-13.971915245056152],["▁ostida",-13.971915245056152],["▁prestación",-13.971918106079102],["ULUI",-13.971921920776367],["▁ангийн",-13.971924781799316],["▁ਰੇਲ",-13.971927642822266],["儀式",-13.971932411193848],["▁ആവശ്യപ്പെട്ട",-13.97194004058838],["▁לבטל",-13.97195529937744],["▁transporter",-13.971961975097656],["▁услугите",-13.971970558166504],["schutzerklärung",-13.971985816955566],["▁partnership",-13.971988677978516],["▁ළමා",-13.971988677978516],["▁пайдалануға",-13.971990585327148],["▁कमाई",-13.971991539001465],["▁arrestuar",-13.972001075744627],["▁potpunosti",-13.972002983093262],["▁المتحدث",-13.972016334533691],["පොල",-13.972020149230955],["手間",-13.972023010253906],["▁analogi",-13.972023963928224],["▁saddex",-13.972028732299805],["▁unggul",-13.972041130065918],["▁Tourist",-13.97204303741455],["TIF",-13.972064018249512],["▁Fahrrad",-13.972067832946776],["▁الديمقراطية",-13.972074508666992],["itaanka",-13.972086906433104],["幸運",-13.97209358215332],["▁Recu",-13.97210693359375],["▁weiss",-13.972107887268066],["▁soggetto",-13.972113609313965],["▁παραγωγής",-13.97212028503418],["▁Seng",-13.972145080566406],["▁Mascus",-13.972155570983888],["▁książek",-13.97215747833252],["▁משק",-13.97216796875],["סיה",-13.972168922424316],["▁Avís",-13.972175598144531],["剧情",-13.972177505493164],["economic",-13.972179412841797],["▁تروریستی",-13.972179412841797],["▁cardiac",-13.972182273864746],["▁aluminio",-13.972187042236328],["▁22%",-13.97219944000244],["▁obvezni",-13.972204208374023],["▁ورکړل",-13.972220420837402],["▁Chyba",-13.972225189208984],["ევს",-13.972230911254885],["▁ਕੌ",-13.972233772277832],["▁participaron",-13.972237586975098],["▁materiály",-13.972238540649414],["▁ennakko",-13.972248077392578],["▁заяву",-13.972248077392578],["▁tillstånd",-13.972249984741213],["▁सोम",-13.972253799438477],["▁Вашият",-13.972256660461426],["▁పున",-13.972262382507324],["▁асобы",-13.97227954864502],["▁Phillip",-13.972286224365234],["حسین",-13.972304344177246],["pigwa",-13.972318649291992],["▁razlika",-13.972319602966309],["bhava",-13.97233772277832],["▁قهرمانی",-13.972346305847168],["блуд",-13.972356796264648],["▁szybki",-13.972378730773926],["▁მაგარი",-13.972386360168455],["577",-13.972387313842772],["avtale",-13.972392082214355],["▁продаже",-13.972394943237305],["zuführen",-13.972400665283203],["TURE",-13.972402572631836],["▁използвани",-13.97243595123291],["▁subur",-13.972448348999023],["gaande",-13.972455024719238],["っぱ",-13.972472190856934],["▁излаз",-13.972494125366213],["ിറങ്ങ",-13.972512245178224],["▁protecció",-13.972516059875488],["▁Alert",-13.972526550292969],["獨家",-13.972528457641602],["▁vaksin",-13.97253704071045],["▁sykkel",-13.97255802154541],["ធ្វើឲ្យ",-13.97256851196289],["५७",-13.972593307495115],["▁لامل",-13.972610473632812],["ोस्",-13.972613334655762],["ٹھی",-13.972614288330078],["▁vennskap",-13.972614288330078],["▁pravne",-13.972622871398926],["▁Addis",-13.97264289855957],["▁Féin",-13.972664833068848],["▁Skil",-13.972668647766112],["协作",-13.97266960144043],["▁panasz",-13.972671508789062],["organisatie",-13.97267723083496],["▁дін",-13.97268295288086],["存储",-13.972718238830566],["▁ಕರೆಯ",-13.972725868225098],["จะให้",-13.972734451293944],["Kristu",-13.972761154174805],["▁Vesti",-13.97276782989502],["国家安全",-13.972780227661133],["▁клиентите",-13.972796440124512],["▁Terms",-13.972800254821776],["▁الأمة",-13.972807884216309],["▁fhad",-13.972818374633787],["▁Lead",-13.972823143005373],["хаб",-13.97282886505127],["▁верује",-13.972832679748535],["▁tortu",-13.972838401794434],["▁menggantikan",-13.97284698486328],["▁समितिका",-13.972867965698242],["▁שס",-13.97287940979004],["досконалення",-13.972891807556152],["▁tutulmuş",-13.972898483276367],["váltó",-13.972929000854492],["▁esquema",-13.972938537597656],["▁hänet",-13.972946166992188],["ଜାତ",-13.973012924194336],["ILAN",-13.973014831542969],["ضرار",-13.973017692565918],["ສືບຕໍ່",-13.973018646240234],["▁failed",-13.973026275634766],["طرد",-13.973030090332031],["▁Між",-13.973033905029297],["▁pezzi",-13.973040580749512],["▁competitivo",-13.973057746887209],["▁Paesi",-13.973061561584473],["ແກ້",-13.973099708557127],["ຍາວ",-13.973103523254396],["▁ootab",-13.973126411437988],["დილი",-13.973130226135254],["▁дисциплина",-13.973172187805176],["▁Nanging",-13.973185539245604],["klių",-13.97319221496582],["▁נדרש",-13.973204612731934],["效应",-13.973204612731934],["▁коюу",-13.9732084274292],["LIE",-13.973248481750488],["▁memakan",-13.973259925842283],["▁Carme",-13.973267555236816],["▁Române",-13.97330093383789],["▁प्रहरीको",-13.973313331604004],["гыз",-13.973329544067385],["வளை",-13.973369598388672],["люється",-13.973388671875],["REP",-13.97340202331543],["▁Alpin",-13.973416328430176],["▁בהן",-13.97342586517334],["▁этими",-13.973431587219238],["чує",-13.973465919494627],["▁ostatné",-13.973474502563477],["▁Quba",-13.973483085632324],["▁tertawa",-13.97349739074707],["▁joaca",-13.973506927490234],["▁परवल",-13.973515510559082],["▁osuus",-13.973518371582031],["华为",-13.973535537719728],["effektiv",-13.97354793548584],["ี่ยว",-13.973550796508787],["▁vélo",-13.973553657531738],["АРГА",-13.973554611206056],["▁Badi",-13.973581314086914],["▁сыйла",-13.973586082458496],["▁выкарыста",-13.973589897155762],["▁شخصا",-13.973589897155762],["вании",-13.97361660003662],["人材",-13.97361660003662],["▁મેળવી",-13.973625183105469],["Най",-13.973633766174316],["▁ნიკო",-13.97364902496338],["▁Lance",-13.973661422729492],["歐美",-13.973695755004885],["▁koppel",-13.973697662353516],["▁ماهه",-13.973702430725098],["intervento",-13.97372817993164],["องค์การ",-13.973729133605955],["▁поэт",-13.973732948303224],["ቻሉ",-13.973743438720703],["平時",-13.973810195922852],["台上",-13.973830223083496],["▁vairākas",-13.973848342895508],["اثر",-13.973852157592772],["▁දෙවන",-13.973871231079102],["▁хаос",-13.973875045776367],["▁viisa",-13.973909378051758],["sniegt",-13.9739408493042],["▁ಹೇಗ",-13.973943710327148],["▁Truck",-13.973955154418944],["▁teniu",-13.973958015441896],["▁общински",-13.97397518157959],["אַז",-13.97397804260254],["زایی",-13.97397804260254],["ränd",-13.97404956817627],["ھڑ",-13.974059104919434],["深刻的",-13.9740629196167],["▁الأسد",-13.974129676818848],["weight",-13.974146842956545],["▁bewaar",-13.974184036254885],["▁алдына",-13.974209785461426],["▁κουρ",-13.97421932220459],["▁палац",-13.974237442016602],["▁ממנה",-13.974242210388184],["▁कार्यक्रमको",-13.974245071411133],["▁Medal",-13.974308967590332],["Soft",-13.974312782287598],["ַל",-13.974316596984863],["▁acceptat",-13.974316596984863],["▁očuva",-13.974335670471191],["▁видом",-13.97434425354004],["498",-13.97435474395752],["▁കുട്ടികളുടെ",-13.97435474395752],["▁върне",-13.974373817443848],["үүдэд",-13.974397659301758],["缺少",-13.974401473999023],["▁верну",-13.974411964416504],["ಕೊಟ್ಟ",-13.97443675994873],["летний",-13.974446296691896],["▁publicitar",-13.974448204040527],["▁döda",-13.974457740783691],["▁გოგ",-13.974459648132324],["ҮХ",-13.9744873046875],["▁ауа",-13.974495887756348],["一般人",-13.974507331848145],["யிலும்",-13.974534034729004],["▁największy",-13.97454833984375],["▁ከኢትዮጵያ",-13.974564552307127],["inntekter",-13.974568367004396],["ЯН",-13.974607467651367],["acaba",-13.974609375],["▁асыра",-13.974631309509276],["▁Тебе",-13.974660873413086],["ಮಾಡಿ",-13.974671363830566],["▁lexo",-13.97468090057373],["鍛",-13.974727630615234],["均衡",-13.974740028381348],["灵魂",-13.974742889404297],["鑑",-13.974743843078612],["▁tawag",-13.974756240844728],["▁Iaith",-13.974760055541992],["्थ्यो",-13.974766731262209],["謎",-13.974775314331056],["綁",-13.97477912902832],["钢铁",-13.97477912902832],["veiði",-13.974780082702637],["İSİ",-13.974786758422852],["公立",-13.97479248046875],["隙",-13.97479248046875],["アジア",-13.974794387817385],["လျှောက်",-13.974799156188965],["ជ្រើសរើស",-13.974799156188965],["▁सर्वसाधारण",-13.974799156188965],["♫",-13.974799156188965],["ягледзячы",-13.97480010986328],["เลื่อน",-13.97480010986328],["▁Lørdag",-13.97480010986328],["▁Nabízí",-13.97480010986328],["▁Njemačkoj",-13.97480010986328],["▁caffè",-13.97480010986328],["▁cyflwyno",-13.97480010986328],["▁eficiencia",-13.97480010986328],["▁incrível",-13.97480010986328],["▁priebehu",-13.97480010986328],["▁rozpoczyna",-13.97480010986328],["▁résidence",-13.97480010986328],["▁wziąć",-13.97480010986328],["▁βιβλία",-13.97480010986328],["▁παγκόσμια",-13.97480010986328],["▁Нацыянальны",-13.97480010986328],["▁Погледајте",-13.97480010986328],["▁сучасних",-13.97480010986328],["▁съответно",-13.97480010986328],["▁վերաբերում",-13.97480010986328],["▁טעקסאַס",-13.97480010986328],["▁امرأة",-13.97480010986328],["▁دھماکے",-13.97480010986328],["▁ناحیه",-13.97480010986328],["▁ਆਦਮੀ",-13.97480010986328],["▁ਪਹਿਲੇ",-13.97480010986328],["▁ଜବତ",-13.97480010986328],["▁பிரச்சினை",-13.97480010986328],["▁వరంగల్",-13.97480010986328],["▁ಅಧಿಸೂಚನೆ",-13.97480010986328],["▁ಉತ್ಪನ್ನ",-13.97480010986328],["▁ಸಮಾರಂಭ",-13.97480010986328],["▁පොලීසිය",-13.97480010986328],["▁გამოცდილება",-13.97480010986328],["▁განცხადებით",-13.97480010986328],["▁მომხდარ",-13.97480010986328],["▁შპს",-13.97480010986328],["▁ግለሰቦች",-13.97480010986328],["ဠ",-13.974801063537598],["អក្សរ",-13.974801063537598],["▁Gedagte",-13.974801063537598],["▁Maschinen",-13.974801063537598],["▁forcément",-13.974801063537598],["▁старшыня",-13.974801063537598],["หมุน",-13.974802017211914],["▁заседания",-13.974802017211914],["▁ننڍي",-13.974802017211914],["▁siyosati",-13.97480297088623],["▁Συμβουλίου",-13.97480297088623],["▁मेहनत",-13.97480297088623],["▁izredno",-13.974803924560549],["▁извърши",-13.974803924560549],["▁اچھے",-13.974803924560549],["ત્યા",-13.974804878234863],["สะดวกสบาย",-13.974804878234863],["▁Gwynedd",-13.97480583190918],["▁препятств",-13.974806785583496],["▁хороскоп",-13.974806785583496],["▁Physic",-13.974807739257812],["▁Trátase",-13.974807739257812],["▁දෙවැනි",-13.974807739257812],["▁เด็กหญิง",-13.974807739257812],["▁memnuniyet",-13.974809646606444],["▁почетку",-13.974810600280762],["నాథ్",-13.974811553955078],["λωση",-13.974812507629396],["▁mzuri",-13.974812507629396],["သို႕",-13.974813461303713],["▁novērtēt",-13.974813461303713],["▁പലരും",-13.974813461303713],["占据",-13.974813461303713],["▁ameaça",-13.974815368652344],["▁önmagá",-13.97481632232666],["▁Tàu",-13.974821090698242],["▁появява",-13.974821090698242],["▁valószínűleg",-13.97482204437256],["▁అందరికీ",-13.97482204437256],["▁khasiat",-13.974822998046877],["▁체크",-13.974827766418455],["▁교환",-13.974831581115724],["▁Triều",-13.974836349487305],["▁اروپایی",-13.974839210510254],["▁Đạo",-13.974846839904783],["▁kaydetti",-13.974849700927734],["▁ulinzi",-13.97485065460205],["▁Outdoor",-13.974851608276367],["▁pelancong",-13.974854469299316],["▁вещей",-13.974857330322266],["▁nỗ",-13.974858283996582],["▁Zentral",-13.974860191345217],["▁чаргу",-13.974860191345217],["▁impoten",-13.974863052368164],["機器人",-13.974865913391112],["▁gudanar",-13.974868774414062],["▁पुरानी",-13.974870681762695],["▁званични",-13.974875450134276],["▁Πως",-13.974889755249023],["学び",-13.974889755249023],["▁nereikia",-13.974895477294922],["kanalen",-13.97490692138672],["▁أبرز",-13.97490692138672],["がありますので",-13.974916458129885],["▁شۇنىڭ",-13.974920272827148],["▁турнира",-13.974921226501465],["▁litri",-13.974924087524414],["▁ලියා",-13.97492504119873],["▁успява",-13.974925994873049],["▁छात्रों",-13.974925994873049],["หรอก",-13.974932670593262],["▁талд",-13.974937438964844],["▁진단",-13.974943161010742],["▁посочени",-13.974945068359377],["▁हमसे",-13.974945068359377],["▁kredītu",-13.974946975708008],["▁ሀብት",-13.97494888305664],["▁приблизи",-13.974950790405272],["▁اندام",-13.974953651428224],["▁kambing",-13.974956512451172],["▁совршен",-13.974956512451172],["▁ഓണ്",-13.974970817565918],["▁palygin",-13.974974632263184],["▁vencedor",-13.97498893737793],["▁λέξη",-13.974990844726562],["▁статусу",-13.974990844726562],["միր",-13.97499942779541],["まれて",-13.97500705718994],["▁extremamente",-13.975024223327637],["▁መዋ",-13.975028038024902],["▁limitada",-13.975032806396484],["生長",-13.975043296813965],["▁معطل",-13.97504425048828],["▁Molin",-13.975056648254396],["▁მასთან",-13.975064277648926],["▁мектепке",-13.975069999694824],["▁gerektir",-13.975072860717772],["▁المهم",-13.975075721740724],["정치",-13.975089073181152],["▁digelar",-13.975092887878418],["▁ukumbi",-13.975098609924316],["▁силе",-13.975099563598633],["sargā",-13.97511863708496],["૧૫",-13.975120544433594],["▁informatique",-13.975125312805176],["ОФ",-13.97513484954834],["▁Akku",-13.975144386291504],["Història",-13.975146293640137],["▁impresión",-13.97515106201172],["▁segredo",-13.975152969360352],["yilgan",-13.975154876708984],["▁zgjedhjeve",-13.975162506103516],["ないといけない",-13.975168228149414],["▁üzeri",-13.975171089172363],["▁aranĝ",-13.975173950195312],["▁szerepet",-13.975178718566896],["▁araşdırmalar",-13.97519588470459],["▁անունը",-13.97519588470459],["▁lokacija",-13.975199699401855],["ಚಂದ್ರ",-13.975204467773438],["▁imigrant",-13.975208282470703],["▁القانونية",-13.975221633911133],["▁genomföra",-13.97524356842041],["▁defense",-13.975252151489258],["ោក",-13.975253105163574],["▁valdo",-13.975255012512209],["▁խոսում",-13.975260734558104],["▁문제는",-13.975261688232422],["funksjon",-13.97527313232422],["▁психология",-13.975287437438965],["▁регистрирани",-13.975303649902344],["ளவு",-13.97530746459961],["▁vër",-13.975311279296877],["zindua",-13.975329399108888],["1918",-13.975369453430176],["自动化",-13.975380897521973],["▁представляют",-13.97538948059082],["▁palë",-13.975401878356934],["7.1",-13.975403785705566],["▁KBS",-13.975414276123049],["▁природних",-13.975419044494627],["▁בארה",-13.97543239593506],["▁એક્સ",-13.975434303283691],["пресс",-13.97544002532959],["平台上",-13.975441932678224],["▁Historic",-13.97544765472412],["▁intreg",-13.975449562072754],["blank",-13.975469589233398],["เรื่องการ",-13.975499153137209],["occupazione",-13.97551727294922],["студен",-13.975518226623535],["也不會",-13.97552490234375],["▁баруун",-13.975543975830078],["delà",-13.975574493408203],["▁raó",-13.97557544708252],["▁humid",-13.97559642791748],["ారావు",-13.975597381591797],["▁पाण्या",-13.975601196289062],["వరం",-13.975603103637695],["因为他",-13.97561264038086],["väestö",-13.975616455078123],["▁Seele",-13.975628852844238],["▁Clan",-13.975652694702148],["АЊА",-13.975656509399414],["▁போராட்ட",-13.97567081451416],["ችግሩ",-13.97567367553711],["aptar",-13.975707054138184],["▁ghu",-13.975711822509766],["बीचको",-13.975714683532717],["▁فعلی",-13.975717544555664],["▁процедуру",-13.975722312927246],["▁aprovechar",-13.975729942321776],["▁लागेल",-13.975746154785156],["▁boljši",-13.975781440734863],["scere",-13.97578239440918],["▁Libra",-13.975796699523926],["▁రాసిన",-13.975807189941406],["▁Voraussetzungen",-13.97581386566162],["▁ortadan",-13.975828170776367],["▁rândul",-13.97583293914795],["▁Lieblings",-13.975837707519531],["ดั",-13.975850105285645],["▁1395",-13.975850105285645],["網購",-13.975889205932615],["▁അത്തര",-13.97590446472168],["▁Расс",-13.975933074951172],["fhág",-13.975940704345703],["▁təkliflər",-13.975940704345703],["▁이번에",-13.975950241088867],["普通に",-13.975954055786133],["▁ochrane",-13.975971221923828],["▁بارہ",-13.975984573364258],["▁చెప్పి",-13.975985527038574],["▁dísz",-13.976018905639648],["▁Valent",-13.976035118103027],["▁حبيب",-13.976044654846191],["▁Schie",-13.976070404052734],["RTI",-13.97607421875],["▁kožu",-13.976086616516112],["▁Halep",-13.976106643676758],["▁szczegółow",-13.976106643676758],["ત્રિ",-13.976119041442873],["を見せ",-13.976139068603516],["▁podvod",-13.97614288330078],["लॉ",-13.976156234741213],["ciclo",-13.97616958618164],["ચક",-13.976174354553224],["▁lögð",-13.976177215576172],["▁järele",-13.976183891296388],["piko",-13.976186752319336],["▁Уряд",-13.976198196411133],["▁katil",-13.976207733154297],["▁بدم",-13.976207733154297],["ωδία",-13.976211547851562],["మీద",-13.976224899291992],["bhra",-13.976247787475586],["▁итали",-13.976255416870115],["▁különös",-13.976274490356444],["▁Xəbərlər",-13.976293563842772],["르고",-13.976320266723633],["东京",-13.97633457183838],["▁doktrin",-13.976359367370604],["ጋግ",-13.976372718811035],["送料",-13.976373672485352],["▁hentet",-13.976381301879885],["▁Hilde",-13.976393699645996],["▁причината",-13.976394653320312],["helg",-13.976402282714844],["▁Jahan",-13.976407051086426],["▁postare",-13.976421356201172],["ってしまった",-13.976422309875488],["រីក",-13.976423263549805],["ակցի",-13.97642707824707],["▁निभा",-13.976430892944336],["▁proposto",-13.976433753967283],["▁skarb",-13.9764404296875],["▁бэлтгэл",-13.976459503173828],["▁stoppen",-13.976470947265623],["stijl",-13.976475715637209],["▁Academi",-13.976487159729004],["فوض",-13.97649097442627],["▁አድማ",-13.976491928100586],["的感受",-13.976497650146484],["▁гэв",-13.9764986038208],["▁أننا",-13.9764986038208],["▁zunaj",-13.97650909423828],["מגדל",-13.976524353027344],["▁фут",-13.976530075073242],["▁nesil",-13.976554870605469],["基督教",-13.976555824279783],["▁अल्प",-13.976567268371582],["▁Marino",-13.976571083068848],["▁contamos",-13.976598739624023],["ძიებ",-13.976612091064451],["▁bekam",-13.976617813110352],["▁sampe",-13.976624488830566],["▁teisel",-13.976639747619627],["▁реализация",-13.97665309906006],["▁масив",-13.976676940917969],["▁paprastai",-13.976689338684082],["▁должность",-13.976691246032717],["స్వామి",-13.976709365844728],["▁Ambiental",-13.97671604156494],["▁funkcije",-13.976757049560549],["▁Perry",-13.976776123046877],["alueella",-13.976777076721191],["ruldu",-13.97678279876709],["といわれ",-13.97683334350586],["▁штамп",-13.976877212524414],["▁ਲਾਈ",-13.976882934570312],["▁Gund",-13.976895332336426],["▁puisqu",-13.976896286010742],["▁botol",-13.976903915405272],["شرح",-13.97694206237793],["이라도",-13.976943016052246],["lägga",-13.976957321166992],["квартир",-13.976967811584473],["▁සිසු",-13.97697639465332],["చ్చే",-13.977005958557127],["karten",-13.977015495300291],["▁әңгіме",-13.977023124694824],["▁Garan",-13.977025985717772],["東方",-13.977036476135254],["▁udhëtim",-13.977058410644531],["▁nízko",-13.97706413269043],["▁прозорец",-13.977067947387695],["▁investisiya",-13.977090835571287],["▁жакты",-13.9771146774292],["▁որին",-13.97712230682373],["に興味",-13.977128028869627],["▁Registro",-13.97714138031006],["▁Учител",-13.977145195007324],["▁interessati",-13.977148056030272],["έργ",-13.97715187072754],["▁banche",-13.977192878723145],["▁തീരുമാനിച്ച",-13.97722053527832],["វីដេអូ",-13.97723388671875],["kkoja",-13.977248191833496],["▁baraza",-13.977255821228027],["▁számlá",-13.977256774902344],["signal",-13.977267265319824],["▁앞에",-13.977310180664062],["ボード",-13.97731113433838],["▁lustr",-13.977320671081545],["leyebilirsiniz",-13.977339744567873],["▁надежда",-13.977349281311035],["ร่ม",-13.977359771728516],["▁etiladi",-13.977365493774414],["635",-13.977367401123049],["北京时间",-13.977372169494627],["▁qurilish",-13.977386474609377],["mølle",-13.97741413116455],["▁faks",-13.977429389953612],["▁groeien",-13.977445602416992],["▁narejen",-13.97745418548584],["ARRA",-13.97746467590332],["▁Dạ",-13.977519989013672],["▁ומס",-13.977523803710938],["▁اللبناني",-13.977524757385254],["▁الأميركي",-13.97753620147705],["klasser",-13.97756004333496],["▁diaria",-13.977572441101074],["はっきり",-13.97757339477539],["▁రేప",-13.977579116821287],["▁قىلىشى",-13.97758960723877],["bilden",-13.977622032165527],["मेक",-13.977631568908691],["נדון",-13.977638244628906],["DAP",-13.97765064239502],["▁vacation",-13.977657318115234],["▁zaude",-13.977657318115234],["▁drugu",-13.977659225463867],["forbruk",-13.977683067321776],["திரு",-13.977703094482422],["▁hedh",-13.977704048156738],["▁associati",-13.977728843688965],["蹟",-13.977737426757812],["vrede",-13.977738380432127],["▁ରାତି",-13.977741241455078],["芯",-13.977744102478027],["▁Respublikasında",-13.97774600982666],["▁corff",-13.977753639221191],["炫",-13.97775650024414],["▁übrig",-13.977760314941406],["呼吁",-13.977760314941406],["脊",-13.977764129638672],["臓",-13.97776699066162],["百分之",-13.97777271270752],["淀",-13.977779388427734],["クリニック",-13.97778034210205],["報酬",-13.97778034210205],["▁бъдеще",-13.977789878845217],["ကမၻာ",-13.97779369354248],["ព្រំដែន",-13.97779369354248],["▁Verantwortlich",-13.97779369354248],["▁προτάσεις",-13.97779369354248],["▁স্বপ্ন",-13.97779369354248],["▁ଇଣ୍ଡିଆ",-13.97779369354248],["注射",-13.97779369354248],["อ่าง",-13.977794647216797],["ធម្មជាតិ",-13.977794647216797],["▁giovedì",-13.977794647216797],["▁každom",-13.977794647216797],["▁kifejezetten",-13.977794647216797],["▁partenariat",-13.977794647216797],["▁pokožku",-13.977794647216797],["▁vajadzībām",-13.977794647216797],["▁wniosek",-13.977794647216797],["▁zaledwie",-13.977794647216797],["▁Örneğin",-13.977794647216797],["▁Αλέξη",-13.977794647216797],["▁απόγευμα",-13.977794647216797],["▁забезпечити",-13.977794647216797],["▁мільярд",-13.977794647216797],["▁освобожда",-13.977794647216797],["▁отбелязва",-13.977794647216797],["▁կրթական",-13.977794647216797],["▁համեմատ",-13.977794647216797],["▁شرائط",-13.977794647216797],["▁قوبۇل",-13.977794647216797],["▁مقارنة",-13.977794647216797],["▁ټویټر",-13.977794647216797],["▁आलोचना",-13.977794647216797],["▁इन्टरनेट",-13.977794647216797],["▁धोनी",-13.977794647216797],["▁संभावना",-13.977794647216797],["▁स्थिर",-13.977794647216797],["▁ଦୁଷ୍କର୍ମ",-13.977794647216797],["▁சூப்பர்",-13.977794647216797],["▁ಸೂಕ್ತ",-13.977794647216797],["▁වෙනුවට",-13.977794647216797],["▁መሳሪያ",-13.977794647216797],["▁겁니다",-13.977794647216797],["▁충분히",-13.977794647216797],["빵",-13.977794647216797],["pośród",-13.977795600891112],["கிழக்கு",-13.977795600891112],["▁poškodb",-13.977795600891112],["▁נהדר",-13.97779655456543],["▁हाउस",-13.97779655456543],["▁ትንሽ",-13.97779655456543],["▁ambavyo",-13.977797508239746],["▁চ্যাট",-13.977798461914062],["▁ንብረት",-13.977798461914062],["戰鬥",-13.977798461914062],["▁Cumartesi",-13.97779941558838],["▁ansiosta",-13.97779941558838],["▁રાશિ",-13.97779941558838],["▁ጭምር",-13.97779941558838],["▁ঘুম",-13.977800369262695],["▁එහෙනම්",-13.977800369262695],["▁ugdymo",-13.977801322937012],["▁professeur",-13.977802276611328],["▁දේශීය",-13.977803230285645],["ໂມງ",-13.977805137634276],["▁motocikl",-13.977805137634276],["▁ସମ୍ମାନ",-13.977805137634276],["▁forhindre",-13.977806091308594],["▁εορτ",-13.977806091308594],["▁राजकारण",-13.977808952331545],["▁ಮುಖಂಡ",-13.97780990600586],["質疑",-13.977811813354492],["ទូទៅ",-13.977812767028809],["▁opmerking",-13.977812767028809],["▁illumina",-13.97781467437744],["▁keyword",-13.97781467437744],["反复",-13.977816581726074],["▁szórakoz",-13.97781753540039],["▁тұратын",-13.97782039642334],["▁పుట్టిన",-13.977823257446287],["ελαφρ",-13.977825164794922],["▁المدارس",-13.977825164794922],["▁yapabilirsiniz",-13.977827072143556],["▁Novembre",-13.977828979492188],["▁الجودة",-13.977829933166504],["▁παλ",-13.97783374786377],["▁ਪੈਦਾ",-13.97783374786377],["▁반복",-13.977834701538086],["▁bajenn",-13.977835655212402],["扶持",-13.977835655212402],["▁Colin",-13.977840423583984],["▁እስር",-13.977840423583984],["▁Seçim",-13.977842330932615],["▁buluştu",-13.977842330932615],["▁سپريم",-13.977843284606934],["▁hutang",-13.97784423828125],["เคร",-13.977850914001465],["▁daraus",-13.97785186767578],["▁święta",-13.977853775024414],["6.8",-13.977864265441896],["▁유명한",-13.977867126464844],["▁присутни",-13.97787094116211],["▁zasadzie",-13.977871894836426],["▁înscrie",-13.977877616882324],["▁jõudnud",-13.97789192199707],["στροφή",-13.977897644042969],["▁chave",-13.977903366088867],["ଢ଼ି",-13.977907180786133],["▁lurt",-13.977907180786133],["▁lētāk",-13.977907180786133],["▁tehtiin",-13.977910041809082],["▁перечень",-13.977910041809082],["Saint",-13.97792911529541],["▁वर्षी",-13.97793960571289],["▁виготовлення",-13.977943420410156],["▁Пошук",-13.977961540222168],["▁collo",-13.977967262268066],["▁обећа",-13.977972984313965],["ანგ",-13.977978706359863],["▁ಕನ್ನಡದ",-13.977984428405762],["▁jumladan",-13.977987289428713],["安倍",-13.977992057800291],["▁مبارك",-13.977997779846191],["▁treinamento",-13.977999687194824],["▁आंख",-13.977999687194824],["▁Villar",-13.97800636291504],["安置",-13.97801113128662],["▁Coral",-13.978017807006836],["▁Akses",-13.97803020477295],["▁oblikovanje",-13.978041648864746],["▁причиной",-13.978041648864746],["▁zorunlu",-13.978057861328123],["▁שקלים",-13.978066444396973],["▁lasciato",-13.978070259094238],["▁виборів",-13.97807502746582],["▁Taal",-13.978076934814451],["▁socialdemokrat",-13.978079795837402],["Map",-13.97808074951172],["▁головой",-13.978081703186035],["▁precipe",-13.978083610534668],["▁Николов",-13.978094100952148],["▁изискванията",-13.978100776672363],["ેન્ડ",-13.978102684020996],["▁בשוק",-13.978106498718262],["物业",-13.978116989135742],["▁pastra",-13.97812557220459],["▁어린",-13.978135108947754],["▁المناسبة",-13.978137969970703],["▁фанат",-13.978144645690918],["Block",-13.9781494140625],["▁источников",-13.978157043457031],["▁ochrana",-13.978158950805664],["▁דריי",-13.978163719177246],["▁доходів",-13.978177070617676],["dzielić",-13.97819995880127],["▁استخدم",-13.97820281982422],["ריט",-13.978221893310549],["▁спеціальні",-13.978229522705078],["▁Terrassa",-13.978257179260254],["▁Ambasad",-13.978271484375],["▁Ještě",-13.978279113769531],["▁Plej",-13.97830295562744],["▁avatar",-13.978315353393556],["▁сууц",-13.978316307067873],["Auf",-13.978330612182615],["▁Clare",-13.978337287902832],["▁någonting",-13.978353500366213],["なっています",-13.97838020324707],["vrouw",-13.978389739990234],["▁regem",-13.978413581848145],["▁открыть",-13.978466987609863],["5,9",-13.978475570678713],["▁firmalar",-13.97848129272461],["▁jedyny",-13.978486061096191],["▁Sambad",-13.978504180908203],["▁მუხლ",-13.97850513458252],["▁sharoit",-13.978508949279783],["▁შარ",-13.978516578674316],["▁criterio",-13.978529930114746],["▁situacijo",-13.97854995727539],["▁ମହିଳାଙ୍କ",-13.978560447692873],["▁методов",-13.97857666015625],["▁qəzet",-13.978585243225098],["▁impian",-13.978588104248049],["▁включає",-13.97860336303711],["▁trafił",-13.978610038757324],["▁ambulans",-13.97862434387207],["也非常",-13.978626251220703],["▁universiteit",-13.978643417358398],["ịch",-13.97864818572998],["▁কাপ",-13.978673934936523],["ٹین",-13.978680610656738],["演講",-13.978710174560549],["▁rendőrség",-13.978715896606444],["SAP",-13.97872543334961],["▁여부",-13.97875690460205],["▁Frost",-13.978761672973633],["▁Наші",-13.978764533996582],["▁హీరోగా",-13.978768348693848],["▁projev",-13.978777885437012],["▁смотрит",-13.97878074645996],["ராய்",-13.978781700134276],["▁آئیں",-13.978797912597656],["▁eğlenceli",-13.978805541992188],["▁điên",-13.978822708129885],["၈၈",-13.97883415222168],["▁povod",-13.97883415222168],["▁افغانان",-13.978853225708008],["СД",-13.978859901428224],["▁орнына",-13.978875160217283],["ləyən",-13.978878021240234],["μαζ",-13.97887897491455],["▁спортын",-13.97887897491455],["▁raames",-13.978900909423828],["▁rozhodli",-13.978907585144045],["▁indossa",-13.97893524169922],["īzes",-13.978939056396484],["▁Pyr",-13.978965759277344],["▁арман",-13.97897243499756],["▁erinevad",-13.978978157043455],["▁askea",-13.978999137878418],["wekwa",-13.979000091552734],["ંડા",-13.979007720947266],["▁سگھ",-13.97903537750244],["▁Zaidi",-13.979081153869627],["▁пластмас",-13.979084014892578],["▁Broj",-13.97911548614502],["▁отправля",-13.97911548614502],["▁نجد",-13.97915744781494],["ရရွိ",-13.979159355163574],["▁upokoj",-13.979168891906738],["▁yuva",-13.979182243347168],["▁toimet",-13.979196548461914],["دعى",-13.979229927062988],["คนหนึ่ง",-13.979238510131836],["melidir",-13.979247093200684],["പ്രതി",-13.979249954223633],["▁ଭୟ",-13.97926139831543],["▁važni",-13.97927951812744],["mluví",-13.979284286499023],["▁dhënat",-13.979289054870604],["yttävä",-13.979327201843262],["▁pakiet",-13.979347229003906],["ማችን",-13.979377746582031],["tinggal",-13.97938346862793],["աքար",-13.979418754577637],["วะ",-13.97943878173828],["▁Шер",-13.97946071624756],["▁Tesis",-13.979514122009276],["▁Olive",-13.9795503616333],["تصميم",-13.979574203491213],["њења",-13.97960090637207],["▁rationem",-13.979615211486816],["▁зүйлийн",-13.979618072509766],["гуч",-13.97964859008789],["新区",-13.979657173156738],["වීම්",-13.979662895202637],["▁عموما",-13.97966766357422],["▁innholdet",-13.979676246643066],["▁демократичн",-13.979676246643066],["▁moją",-13.979683876037598],["▁Saab",-13.97968864440918],["▁1868",-13.979697227478027],["▁ആദ്യമായി",-13.979710578918455],["xaad",-13.979743003845217],["▁почита",-13.97976303100586],["▁дайындау",-13.979764938354492],["▁звезди",-13.979764938354492],["‚",-13.979768753051758],["kelompok",-13.979771614074709],["▁քանակ",-13.979795455932615],["oplevelse",-13.979840278625488],["▁bloemen",-13.979840278625488],["റൈ",-13.979850769042969],["主辦",-13.979856491088867],["▁Слово",-13.979867935180664],["tenzij",-13.979909896850586],["loissa",-13.979913711547852],["bøger",-13.979921340942385],["▁stomatolog",-13.979942321777344],["▁አቋም",-13.979945182800291],["▁رکھیں",-13.97995376586914],["▁cualifica",-13.979955673217772],["▁vtip",-13.979990005493164],["おき",-13.979991912841797],["▁snovi",-13.980030059814451],["▁Yurt",-13.98003387451172],["չու",-13.98004150390625],["▁Bilag",-13.9800443649292],["ଯାଉଛି",-13.98005485534668],["▁içeren",-13.980073928833008],["▁selectie",-13.980077743530272],["Adam",-13.980090141296388],["▁Доро",-13.98012638092041],["ハウス",-13.980145454406738],["▁навео",-13.98017120361328],["▁بنے",-13.980185508728027],["▁مىللەتلەر",-13.980189323425291],["ຊຸ",-13.980202674865724],["▁գալու",-13.98020362854004],["▁consecu",-13.98020839691162],["כוב",-13.980210304260254],["▁търг",-13.980216979980469],["▁පොල්",-13.980216979980469],["giorno",-13.980223655700684],["ՈՒՆ",-13.980263710021973],["▁patra",-13.980291366577148],["▁নেওয়া",-13.980313301086426],["が多いです",-13.980334281921388],["▁emision",-13.98035717010498],["▁látsz",-13.980377197265623],["▁Hoga",-13.980386734008787],["▁плац",-13.980399131774902],["jegyzés",-13.98040008544922],["▁seakan",-13.980411529541016],["▁বৃ",-13.98042106628418],["▁desenho",-13.980440139770508],["▁sympati",-13.98044776916504],["▁samotný",-13.98045539855957],["▁Valentino",-13.980472564697266],["adevar",-13.980478286743164],["▁Kahani",-13.980480194091797],["technologie",-13.980494499206545],["▁добили",-13.98051643371582],["တို့သည်",-13.980518341064451],["▁dağıt",-13.980521202087402],["▁സഹായിക്ക",-13.9805326461792],["▁cioc",-13.980535507202148],["稳定的",-13.98054027557373],["▁Axborot",-13.980552673339844],["▁mangia",-13.98055362701416],["전략",-13.980557441711426],["ಶೆ",-13.98055934906006],["▁хтось",-13.980561256408691],["masıyla",-13.980562210083008],["▁இருக்கா",-13.980589866638184],["▁ലഭ്യ",-13.980598449707031],["യാണോ",-13.980603218078612],["allika",-13.980609893798828],["1971",-13.98062515258789],["เป็นสมาชิก",-13.980627059936523],["▁युरोप",-13.980632781982422],["vansa",-13.980650901794434],["інскі",-13.980650901794434],["▁Deposit",-13.98065948486328],["▁काठमाडौ",-13.980663299560549],["▁הקשור",-13.980680465698242],["▁çanta",-13.980690956115724],["६१",-13.980695724487305],["▁tomado",-13.98070240020752],["ুৱা",-13.980706214904783],["▁ٹرین",-13.980709075927734],["섹",-13.980737686157228],["唤",-13.980746269226074],["鬱",-13.980759620666504],["▁urush",-13.980761528015137],["労",-13.980772972106934],["▁दिनुहोस",-13.980775833129885],["吾",-13.980775833129885],["揉",-13.9807767868042],["വത്",-13.980778694152832],["獎勵",-13.980778694152832],["ষণ",-13.980779647827148],["狙",-13.98078155517578],["∼",-13.980783462524414],["gemeinschaft",-13.980796813964844],["ሲኖዶስ",-13.980796813964844],["ቭ",-13.980796813964844],["▁Ansprüche",-13.980796813964844],["▁Warszawy",-13.980796813964844],["▁entziklopedia",-13.980796813964844],["▁gadījumos",-13.980796813964844],["▁patrimoine",-13.980796813964844],["▁različite",-13.980796813964844],["▁rešitve",-13.980796813964844],["▁troisième",-13.980796813964844],["▁áherslu",-13.980796813964844],["▁υπάρξει",-13.980796813964844],["▁Патријарх",-13.980796813964844],["▁звычайна",-13.980796813964844],["▁някакви",-13.980796813964844],["▁ощущение",-13.980796813964844],["▁уламжлал",-13.980796813964844],["▁ښايي",-13.980796813964844],["▁ਕਰਵਾਇਆ",-13.980796813964844],["▁ਕਹਾਣੀ",-13.980796813964844],["▁ಪೂಜೆ",-13.980796813964844],["▁ദുരന്ത",-13.980796813964844],["▁සහභාගි",-13.980796813964844],["▁กิโลเมตร",-13.980796813964844],["▁ነጥብ",-13.980796813964844],["ανησυχ",-13.98079776763916],["ાયેલ",-13.98079776763916],["▁mexanizm",-13.98079776763916],["▁valamennyi",-13.98079776763916],["▁Çanakkale",-13.98079776763916],["▁ösztön",-13.98079776763916],["▁İÇİN",-13.98079776763916],["▁Дональд",-13.98079776763916],["▁كوماندىسى",-13.98079776763916],["▁अखिलेश",-13.98079776763916],["▁వయసు",-13.98079776763916],["▁വിശേഷങ്ങളുമായി",-13.98079776763916],["▁Gebühr",-13.980798721313477],["▁kiekvieno",-13.980798721313477],["▁съгласява",-13.980798721313477],["▁хэлтэс",-13.980798721313477],["▁ארגון",-13.980798721313477],["▁Οργαν",-13.980799674987791],["▁nacionales",-13.98080062866211],["▁ujumla",-13.980801582336426],["▁ପକାଇ",-13.980801582336426],["▁செயற்",-13.980801582336426],["▁ሃሳብ",-13.980801582336426],["▁fondamentali",-13.980802536010742],["პატრიარქ",-13.980804443359377],["▁Hoffnung",-13.980805397033691],["▁Газета",-13.980807304382324],["▁बहुमत",-13.980807304382324],["▁ചർച്ച",-13.980807304382324],["▁akadály",-13.98080825805664],["▁količina",-13.980810165405272],["▁טאָג",-13.98081111907959],["▁නායකත්වය",-13.98081111907959],["▁tamamilə",-13.980812072753906],["▁التونسي",-13.980813026428224],["▁టీవీ",-13.980817794799805],["▁කැඳ",-13.980819702148438],["▁Ապրիլ",-13.980820655822754],["▁இதுவரை",-13.980823516845703],["營造",-13.980823516845703],["▁pengusaha",-13.98082447052002],["▁όρους",-13.980825424194336],["▁razliku",-13.980826377868652],["▁появились",-13.980826377868652],["▁ერთადერთი",-13.980827331542969],["▁Flickr",-13.980830192565918],["▁păcate",-13.980834007263184],["▁следващата",-13.9808349609375],["▁bihurtu",-13.980838775634766],["▁خامنه",-13.980843544006348],["▁එහා",-13.980844497680664],["מחלקה",-13.98084831237793],["▁tatlong",-13.980849266052246],["▁ابتدائی",-13.98085117340088],["យ៉",-13.980853080749512],["ઓએ",-13.980854034423828],["▁Московской",-13.980854034423828],["▁hliník",-13.98085594177246],["▁pitänyt",-13.980864524841309],["▁ქუჩაზე",-13.980870246887209],["▁valkoinen",-13.980874061584473],["vyhľadáva",-13.980876922607422],["▁oublier",-13.980880737304688],["▁змагання",-13.98088264465332],["▁punggung",-13.980884552001951],["▁असल",-13.980884552001951],["▁evacua",-13.980886459350586],["附加",-13.980889320373535],["▁minučių",-13.980892181396484],["学ぶ",-13.980899810791016],["▁gospodarstva",-13.980901718139648],["▁Detroit",-13.980910301208496],["▁Horrela",-13.980910301208496],["▁Британија",-13.980916023254396],["▁בהמשך",-13.98093032836914],["మెయిల్",-13.980932235717772],["▁nižší",-13.980938911437988],["▁хэрэгцээ",-13.980948448181152],["▁afacere",-13.980950355529783],["▁իրենք",-13.980955123901367],["▁teenager",-13.980956077575684],["▁États",-13.98095989227295],["روت",-13.980965614318848],["sihteeri",-13.980975151062012],["▁емоции",-13.980981826782228],["▁അധികം",-13.980981826782228],["คลับ",-13.980985641479492],["▁družba",-13.980985641479492],["(^^)",-13.98099136352539],["▁것이라고",-13.98099136352539],["▁Enkelt",-13.980992317199709],["▁təsis",-13.98100471496582],["▁Municipi",-13.981032371520996],["▁emozioni",-13.981040000915527],["▁память",-13.98104190826416],["vaara",-13.98105239868164],["ในโรคสะเก็ดเงิน",-13.981057167053224],["总会",-13.98106288909912],["▁Hukuk",-13.981073379516602],["▁wymagają",-13.98107624053955],["▁būdas",-13.9810791015625],["▁teisių",-13.981080055236816],["ຂອງພວກເຮົາ",-13.981086730957031],["နာမည္",-13.981091499328612],["▁farkında",-13.98110294342041],["▁alguno",-13.98111057281494],["9001",-13.981117248535156],["▁Dydis",-13.98112678527832],["▁Λου",-13.981128692626951],["▁министерот",-13.981142044067385],["▁الحالات",-13.981178283691406],["▁MKOA",-13.981210708618164],["▁रेडियो",-13.98123264312744],["▁artikulua",-13.981233596801758],["تعداد",-13.98123550415039],["▁përkatës",-13.981239318847656],["▁لعام",-13.981261253356934],["▁нүд",-13.981264114379885],["▁خطة",-13.9812650680542],["▁Друж",-13.981273651123049],["▁vulnerable",-13.981274604797363],["▁розмірі",-13.981287002563477],["▁1884",-13.981294631958008],["▁Üye",-13.98130226135254],["▁karaan",-13.981306076049805],["▁المقبلة",-13.981318473815918],["賽事",-13.9813232421875],["ពោះ",-13.98135757446289],["▁ההג",-13.98137378692627],["▁Lift",-13.981403350830078],["▁ফলে",-13.981428146362305],["▁Spray",-13.98142910003662],["борд",-13.981462478637695],["CAF",-13.981477737426758],["runu",-13.981513023376465],["▁Vö",-13.981513023376465],["底下",-13.981518745422363],["ക്സി",-13.981521606445312],["▁قاضي",-13.981522560119627],["▁mengirimkan",-13.981557846069336],["праз",-13.981581687927246],["▁wamekuwa",-13.981581687927246],["▁jardunaldi",-13.981586456298828],["▁rádió",-13.981586456298828],["▁composition",-13.981597900390623],["▁socker",-13.981597900390623],["പിടി",-13.981603622436523],["▁chwilio",-13.98161792755127],["▁Tinh",-13.981624603271484],["▁СНС",-13.98162841796875],["▁өкүлү",-13.98165225982666],["▁kirjutab",-13.981698036193848],["▁Semasa",-13.98172092437744],["υμάτων",-13.981724739074709],["▁perdana",-13.981739044189451],["พ้น",-13.9817476272583],["solution",-13.981757164001465],["7,9",-13.981760025024414],["પાન",-13.981766700744627],["escenari",-13.981799125671388],["▁Лис",-13.98180866241455],["מוג",-13.981813430786133],["▁أحدث",-13.98182487487793],["ผู้จัดการ",-13.981844902038574],["yyttä",-13.981849670410156],["మాన్",-13.981849670410156],["▁Bulg",-13.981852531433104],["škog",-13.981884956359863],["几次",-13.981900215148926],["▁მუნიციპალიტეტის",-13.981925010681152],["▁Priser",-13.981973648071287],["▁εννο",-13.981975555419922],["ايدۇ",-13.981980323791504],["▁pokut",-13.981985092163086],["▁បង្",-13.981985092163086],["▁jatku",-13.9819974899292],["▁سینٹر",-13.981999397277832],["▁Саду",-13.982010841369627],["▁სურს",-13.98201847076416],["istettu",-13.982019424438477],["ោល",-13.98202419281006],["▁ଗଛ",-13.982056617736816],["▁converti",-13.982069969177246],["၀န္ႀကီး",-13.982097625732422],["עול",-13.982104301452637],["▁ಹಿಂದಿ",-13.98210906982422],["▁urejen",-13.982110977172852],["ವಾಗಿತ್ತು",-13.982121467590332],["▁נחשב",-13.982129096984863],["вероятно",-13.982139587402344],["גדול",-13.982141494750977],["മതി",-13.98215389251709],["රාජ්",-13.982154846191406],["▁невро",-13.982155799865724],["▁breá",-13.982187271118164],["10,000",-13.98219108581543],["▁Politiker",-13.982206344604492],["▁გადამ",-13.98220920562744],["▁военни",-13.982239723205566],["zugeben",-13.9822416305542],["▁Шап",-13.98225212097168],["▁سیٹ",-13.982254981994627],["▁creation",-13.98226547241211],["▁okazało",-13.982295989990234],["초등",-13.982309341430664],["թուրքական",-13.982311248779297],["▁ananas",-13.982341766357422],["λαμπ",-13.982354164123535],["ARBE",-13.982364654541016],["▁ಹೀಗ",-13.982366561889648],["secuti",-13.98237133026123],["▁sreča",-13.98237133026123],["ાલય",-13.982392311096191],["ുന്നതാണ്",-13.982452392578123],["doğan",-13.982457160949709],["▁rydy",-13.982462882995604],["▁생각해",-13.982501983642578],["▁ଆଶା",-13.982538223266602],["▁kestä",-13.9825439453125],["▁používat",-13.982577323913574],["▁Gerais",-13.982584953308104],["imizə",-13.98259162902832],["▁teret",-13.9826078414917],["發電",-13.982620239257812],["ೇಶನ್",-13.982641220092772],["▁площади",-13.982645988464355],["▁راهنمایی",-13.982653617858888],["बिन",-13.982691764831545],["Mbps",-13.982699394226074],["תחנת",-13.98270320892334],["▁Kadhal",-13.982709884643556],["כנע",-13.982749938964844],["▁zdravstven",-13.982775688171388],["▁미니",-13.982807159423828],["തിനുള്ള",-13.982817649841309],["ពិភពលោក",-13.982834815979004],["▁crezi",-13.982840538024902],["모델",-13.982847213745115],["▁rischia",-13.9828519821167],["▁aprire",-13.982876777648926],["▁omfang",-13.982877731323242],["▁रोम",-13.98289394378662],["▁ընտանիքի",-13.98289680480957],["▁cadou",-13.982900619506836],["▁augmenta",-13.982903480529783],["▁literaria",-13.982908248901367],["▁джерела",-13.98293113708496],["▁tahanan",-13.982937812805176],["▁Loma",-13.982953071594238],["▁miałem",-13.982954025268556],["光伏",-13.982965469360352],["rinho",-13.98297882080078],["khaya",-13.982999801635742],["មន្ត្រី",-13.983016967773438],["▁날씨",-13.983024597167969],["nettsteder",-13.983036994934082],["▁تط",-13.983050346374512],["▁هنگامی",-13.983059883117676],["തായ",-13.983080863952637],["▁කිව",-13.98308277130127],["▁Rolling",-13.98308563232422],["uojami",-13.983119010925291],["ಲ್ಡ್",-13.983119010925291],["▁мұғалім",-13.983135223388672],["Commun",-13.98314380645752],["▁značaja",-13.983184814453123],["▁हिमाल",-13.98324489593506],["▁Сайн",-13.983257293701172],["▁душевн",-13.983280181884766],["ellenes",-13.983284950256348],["ڈز",-13.98328685760498],["▁удовлетво",-13.983341217041016],["▁Bengal",-13.983353614807127],["▁Verkko",-13.983358383178713],["▁šire",-13.983358383178713],["ነፍ",-13.98336410522461],["INU",-13.983369827270508],["२३",-13.983407020568848],["▁Lekker",-13.983418464660645],["경주",-13.983428001403809],["malısınız",-13.983433723449709],["案内",-13.983445167541504],["ക്കൊപ്പം",-13.983447074890137],["▁kurzen",-13.983450889587402],["ЕНА",-13.9834566116333],["tadqiqot",-13.983490943908691],["▁فلسطینی",-13.983495712280272],["▁Kranken",-13.983501434326172],["▁artístico",-13.983515739440918],["zulu",-13.983527183532717],["▁богата",-13.98353385925293],["▁нечто",-13.983570098876951],["▁Dünyada",-13.983622550964355],["ţională",-13.983634948730469],["▁glave",-13.983640670776367],["▁desať",-13.983644485473633],["▁vroeger",-13.983644485473633],["毛泽东",-13.983664512634276],["ാകുന്ന",-13.983689308166504],["чиков",-13.98369026184082],["▁pupil",-13.983692169189451],["որդի",-13.983704566955566],["▁froh",-13.983710289001465],["▁قلبی",-13.98371124267578],["胀",-13.98371124267578],["▁şəkil",-13.983712196350098],["▁הלקוח",-13.983716011047363],["纪律",-13.983725547790527],["亚马逊",-13.983742713928224],["撰",-13.983747482299805],["מכירות",-13.98375415802002],["专题",-13.9837646484375],["სახე",-13.98376750946045],["▁먹는",-13.983770370483398],["элдэг",-13.983779907226562],["宛",-13.983782768249512],["あくまで",-13.983784675598145],["▁totalitar",-13.983786582946776],["仕組み",-13.983798027038574],["▁ոլորտի",-13.98379898071289],["早めに",-13.983802795410156],["ລະອຽດ",-13.983805656433104],["▁jualan",-13.983805656433104],["◇",-13.983806610107422],["̄",-13.983808517456056],["▁αδελφ",-13.983808517456056],["▁դերասան",-13.983808517456056],["església",-13.983809471130373],["technológiá",-13.983809471130373],["কিশোর",-13.983809471130373],["ဗိုလ္",-13.983809471130373],["ጳ",-13.983809471130373],["▁Xatırlada",-13.983809471130373],["▁matsayin",-13.983809471130373],["▁nabubuhay",-13.983809471130373],["▁pašvaldību",-13.983809471130373],["▁rozmawia",-13.983809471130373],["▁ursprünglich",-13.983809471130373],["▁įstatymo",-13.983809471130373],["▁Тобто",-13.983809471130373],["▁киселина",-13.983809471130373],["▁обръща",-13.983809471130373],["▁ръководител",-13.983809471130373],["▁דיגיטלי",-13.983809471130373],["▁לחלוטין",-13.983809471130373],["▁آمادگی",-13.983809471130373],["▁اسماعیل",-13.983809471130373],["▁आउँछ",-13.983809471130373],["▁क्यान्सर",-13.983809471130373],["▁ਵਾਪਸ",-13.983809471130373],["▁હેઠળ",-13.983809471130373],["▁ಅಭಿನಯ",-13.983809471130373],["▁ಕಾನೂನು",-13.983809471130373],["▁ತೀವ್ರ",-13.983809471130373],["캡",-13.983809471130373],["ធម៌",-13.983810424804688],["▁múzeum",-13.983810424804688],["▁naissance",-13.983810424804688],["▁saatavilla",-13.983810424804688],["뱅",-13.983810424804688],["▁Celsius",-13.983811378479004],["▁commitment",-13.983811378479004],["▁izmaksas",-13.983811378479004],["▁sudjeloval",-13.983811378479004],["▁அரசாங்க",-13.983811378479004],["▁വകുപ്പ്",-13.983811378479004],["▁Възможно",-13.98381233215332],["▁ترجمہ",-13.983813285827637],["▁nhãn",-13.983814239501951],["▁ನಿರ್ಧಾರ",-13.983814239501951],["ဂၢ",-13.98381519317627],["▁մնալ",-13.98381519317627],["▁nustatyta",-13.983816146850586],["▁фізичних",-13.983816146850586],["Газпром",-13.983817100524902],["▁сопруг",-13.983819007873535],["ذاكرة",-13.983819961547852],["▁Lämna",-13.983819961547852],["▁cuprinde",-13.983821868896484],["▁كېلىپ",-13.983821868896484],["▁പ്രകൃതി",-13.983823776245115],["▁Joomla",-13.983824729919434],["▁sydäme",-13.983824729919434],["▁көрсетеді",-13.983824729919434],["▁Oferujemy",-13.983826637268066],["▁neraka",-13.983827590942385],["▁matukio",-13.9838285446167],["▁барьж",-13.983831405639648],["▁합격",-13.983832359313965],["▁varstvo",-13.98383331298828],["▁Innehåll",-13.983834266662598],["▁zadovoljni",-13.98383617401123],["juusto",-13.983839988708496],["▁xóa",-13.983842849731444],["▁сторінці",-13.983846664428713],["▁Ως",-13.983847618103027],["▁ματς",-13.983847618103027],["▁šalyje",-13.98384952545166],["▁phiền",-13.983851432800291],["▁ՄԱԿ",-13.983851432800291],["تويتر",-13.983854293823242],["最愛",-13.983854293823242],["▁ордуна",-13.983856201171877],["▁Κίνα",-13.983857154846191],["▁منافس",-13.983857154846191],["▁մինչ",-13.983861923217772],["▁manglende",-13.983863830566406],["▁запрещен",-13.983863830566406],["▁staðfest",-13.98387050628662],["டில்",-13.983877182006836],["▁podnikatel",-13.983881950378418],["kontrakt",-13.983885765075684],["농업",-13.98388671875],["権利",-13.983888626098633],["käytäntö",-13.98388957977295],["▁drygt",-13.983890533447266],["▁Karp",-13.98389720916748],["▁भर्ना",-13.98390007019043],["▁ഉപയോഗിച്ച്",-13.983901977539062],["▁Huwag",-13.983903884887695],["▁traslado",-13.98391342163086],["▁държи",-13.98391342163086],["▁navegación",-13.983914375305176],["個人資料",-13.983930587768556],["▁Qirg",-13.983938217163086],["▁българското",-13.983942031860352],["▁istisna",-13.983943939208984],["طير",-13.983945846557615],["მაგი",-13.9839506149292],["▁Úvodná",-13.983952522277832],["▁ከላይ",-13.983952522277832],["局長",-13.98396110534668],["▁douleur",-13.983969688415527],["▁Hendrik",-13.983972549438477],["▁heilbrigðis",-13.983975410461426],["▁батько",-13.983976364135742],["กําลังใจ",-13.983981132507324],["▁ጉድ",-13.983983993530272],["cycling",-13.983986854553224],["▁البريطانية",-13.98399543762207],["▁sukurta",-13.984013557434082],["▁مربی",-13.984015464782717],["▁моделей",-13.984021186828612],["▁occorre",-13.984024047851562],["▁gabeko",-13.984026908874512],["▁prvak",-13.98403549194336],["▁מחוץ",-13.984036445617676],["မွန္း",-13.984041213989258],["▁Queste",-13.98404312133789],["▁دیروز",-13.984047889709473],["สูบ",-13.984064102172852],["▁Graeci",-13.984071731567385],["sztrál",-13.98408031463623],["▁īsti",-13.984087944030762],["▁ወረዳ",-13.984088897705078],["▁Höf",-13.984101295471191],["▁duša",-13.984101295471191],["ដំណើរការ",-13.984122276306152],["▁Слава",-13.984139442443848],["ได้ที่นี่",-13.984140396118164],["▁Investigación",-13.984140396118164],["▁területen",-13.984149932861328],["▁dialami",-13.98416233062744],["▁lortzeko",-13.984164237976074],["▁nazivom",-13.984167098999023],["することができ",-13.984172821044922],["▁pecado",-13.98418140411377],["监察",-13.984196662902832],["өсөн",-13.984197616577148],["払い",-13.984197616577148],["▁vieux",-13.984198570251465],["▁rough",-13.98420238494873],["▁maître",-13.984210968017578],["hraničn",-13.984232902526855],["▁zakłada",-13.984235763549805],["▁வாழ்வ",-13.984256744384766],["ταξη",-13.984272956848145],["▁chiqadi",-13.98427677154541],["▁inspiratie",-13.984313011169434],["tunne",-13.984315872192385],["▁холодно",-13.984323501586914],["плаща",-13.984329223632812],["▁испита",-13.984331130981444],["▁العهد",-13.984333038330078],["▁доноси",-13.984371185302734],["raamatukogu",-13.984376907348633],["▁Фри",-13.984405517578123],["▁თვალი",-13.9844331741333],["stigi",-13.984460830688477],["▁लेना",-13.984475135803224],["ตาก",-13.98447608947754],["▁morebitn",-13.984477996826172],["▁השאר",-13.984478950500488],["▁pilar",-13.984495162963867],["▁شارع",-13.984502792358398],["RAMA",-13.984519004821776],["▁chciała",-13.984525680541992],["▁Musim",-13.984530448913574],["▁слез",-13.984532356262209],["ของแท้",-13.984557151794434],["良いでしょう",-13.984559059143066],["▁pince",-13.984566688537598],["خيل",-13.984570503234863],["▁የሚያሳ",-13.984582901000977],["▁עלינו",-13.984588623046877],["sköpun",-13.98459529876709],["lėmis",-13.984597206115724],["▁השיר",-13.984631538391112],["店面",-13.984646797180176],["▁Legii",-13.98465347290039],["▁багийн",-13.984657287597656],["▁slang",-13.984688758850098],["▁Instant",-13.984707832336426],["брид",-13.984713554382324],["ографија",-13.98472785949707],["▁subiectul",-13.984743118286133],["▁melder",-13.984752655029297],["pood",-13.984777450561523],["කැ",-13.984787940979004],["dawał",-13.98482608795166],["▁frontera",-13.98482894897461],["stigning",-13.984837532043455],["▁поставки",-13.98484230041504],["境界",-13.984846115112305],["correre",-13.984850883483888],["кость",-13.984862327575684],["▁mapy",-13.984891891479492],["都非常",-13.984930038452148],["▁Продава",-13.984935760498049],["▁गर्नेछ",-13.984939575195312],["立足",-13.984943389892578],["Profes",-13.984973907470703],["▁cewar",-13.984977722167969],["gemäß",-13.985007286071776],["▁ट्वीट",-13.985007286071776],["נדה",-13.985008239746094],["▁движење",-13.985018730163574],["ttäminen",-13.985054969787598],["▁நிறை",-13.985062599182127],["▁kreativitet",-13.985074996948242],["ološke",-13.98509407043457],["सम्मको",-13.985097885131836],["▁خلافت",-13.98511791229248],["▁Zoek",-13.98512077331543],["érkezik",-13.985121726989746],["దారుల",-13.98514175415039],["ECO",-13.985156059265137],["▁Bring",-13.985173225402832],["▁законы",-13.98517608642578],["▁Poble",-13.985177040100098],["angebote",-13.985183715820312],["▁ზედ",-13.985187530517578],["▁Punë",-13.985194206237791],["▁vratiti",-13.985198974609377],["▁деталь",-13.985201835632324],["▁Camilla",-13.985220909118652],["NX",-13.985222816467283],["▁אורח",-13.985235214233398],["▁celebrat",-13.985238075256348],["тпай",-13.985239028930664],["▁بأنها",-13.985271453857422],["と言っても",-13.985279083251951],["▁piešķir",-13.985281944274902],["▁resolvi",-13.9852876663208],["▁apei",-13.98529052734375],["▁Lotus",-13.98530101776123],["▁Först",-13.985313415527344],["发达",-13.985318183898926],["▁лежи",-13.985322952270508],["▁Tiden",-13.985350608825684],["▁못하는",-13.98540210723877],["ነሣ",-13.98541259765625],["▁scherp",-13.985478401184082],["事例",-13.985496520996094],["がよく",-13.985501289367676],["始めて",-13.985527992248535],["æfing",-13.985540390014648],["▁prázdnin",-13.985557556152344],["する方法",-13.985559463500977],["▁średnio",-13.985563278198242],["կում",-13.985580444335938],["▁следната",-13.985580444335938],["の皆様",-13.985593795776367],["一向",-13.985597610473633],["▁شروعات",-13.985604286193848],["سىڭىز",-13.985623359680176],["▁Komitet",-13.985628128051758],["AGU",-13.98563003540039],["დებულ",-13.985630989074709],["▁Töl",-13.985644340515137],["▁követő",-13.985651969909668],["▁qarori",-13.985668182373049],["▁Madde",-13.985699653625488],["被称为",-13.985699653625488],["▁ആന",-13.985711097717283],["▁refiere",-13.985724449157717],["ஓ",-13.985727310180664],["▁گیم",-13.985745429992676],["ТЕХ",-13.985754013061523],["▁жағдайын",-13.985773086547852],["▁Statistika",-13.985774040222168],["ಯಂತೆ",-13.985780715942385],["上半年",-13.985830307006836],["▁киргизүү",-13.985840797424316],["效能",-13.985858917236328],["ಮಾಡ",-13.98586368560791],["桃園市",-13.98589038848877],["▁සැල",-13.985894203186035],["২০১৮",-13.985916137695312],["일본",-13.985932350158691],["സിനെ",-13.985939979553224],["팬",-13.98594093322754],["▁үйлчилгээг",-13.985958099365234],["شمار",-13.9859619140625],["▁ґрунт",-13.9859619140625],["▁שטר",-13.985967636108398],["▁nyawa",-13.98597240447998],["ذوق",-13.985980033874512],["ുന്നതു",-13.985981941223145],["գեն",-13.986000061035156],["▁втором",-13.98600959777832],["▁নব",-13.9860200881958],["ြား",-13.986087799072266],["▁açan",-13.986088752746582],["បាក់",-13.986092567443848],["ბია",-13.98609733581543],["▁funkcijo",-13.986102104187012],["stufe",-13.98611068725586],["场上",-13.986113548278809],["diskut",-13.986117362976074],["▁финансова",-13.986157417297363],["▁Gamle",-13.986217498779297],["▁aceptar",-13.986226081848145],["овський",-13.986227989196776],["ുവെന്ന്",-13.986231803894045],["▁cómodo",-13.986257553100586],["tubuh",-13.986279487609863],["▁Thala",-13.98629093170166],["▁tecken",-13.986298561096191],["ிருந்து",-13.986299514770508],["▁भीम",-13.98631763458252],["شبك",-13.98632526397705],["فکر",-13.98633098602295],["алното",-13.986357688903809],["ahtaa",-13.986367225646973],["を開始",-13.986371994018556],["▁evropski",-13.98637580871582],["込まれ",-13.98638916015625],["চক",-13.98640251159668],["ได้ใน",-13.986444473266602],["▁പരിചയ",-13.986448287963867],["▁целите",-13.98647403717041],["▁Tần",-13.986475944519045],["▁помогна",-13.986479759216309],["▁Hradec",-13.98649787902832],["▁investigar",-13.9865083694458],["))))))",-13.986536026000977],["важните",-13.986539840698242],["▁Михайло",-13.986560821533203],["▁گلی",-13.986566543579102],["期刊",-13.98657512664795],["ക്ഷന്",-13.98660945892334],["▁میگی",-13.986617088317873],["Кри",-13.986628532409668],["▁ඕනෑ",-13.98663330078125],["pakke",-13.986635208129885],["ബാധ",-13.986641883850098],["ittäin",-13.986650466918944],["దొ",-13.986659049987791],["▁collage",-13.98666000366211],["▁அவள",-13.986674308776855],["ნულ",-13.986701011657717],["▁باکس",-13.986736297607422],["▁święto",-13.986741065979004],["▁sneller",-13.986762046813965],["▁destinati",-13.986775398254396],["增添",-13.986776351928713],["▁бібліотека",-13.986777305603027],["▁оруул",-13.986777305603027],["沸",-13.986777305603027],["宰",-13.98677921295166],["▁이용할",-13.986787796020508],["ናችን",-13.986794471740724],["ぴ",-13.986823081970217],["▁populum",-13.986824035644531],["ຊອກຫາ",-13.98682689666748],["สระว่ายน้ํา",-13.986828804016112],["พาณิชย์",-13.98682975769043],["လြမ္း",-13.98682975769043],["▁маселеси",-13.98682975769043],["στροφ",-13.986830711364746],["▁Lloegr",-13.986830711364746],["▁défense",-13.986830711364746],["▁kategória",-13.986830711364746],["▁kemajuan",-13.986830711364746],["▁noskaidro",-13.986830711364746],["▁působí",-13.986830711364746],["▁tetszett",-13.986830711364746],["▁μακριά",-13.986830711364746],["▁будівництво",-13.986830711364746],["▁зеленчуци",-13.986830711364746],["▁местоположение",-13.986830711364746],["▁освіта",-13.986830711364746],["▁отвечает",-13.986830711364746],["▁промышленности",-13.986830711364746],["▁הבעיה",-13.986830711364746],["▁مقاصد",-13.986830711364746],["▁শতাংশ",-13.986830711364746],["▁નક્કી",-13.986830711364746],["▁ජනවාරි",-13.986830711364746],["▁တရုတ်",-13.986830711364746],["▁დღევანდელ",-13.986830711364746],["▁მიყვარს",-13.986830711364746],["믹",-13.986830711364746],["쌓",-13.986830711364746],["▁njëjtën",-13.986831665039062],["▁reușit",-13.986831665039062],["▁Ötən",-13.986831665039062],["▁غزني",-13.986831665039062],["▁मौके",-13.986831665039062],["▁טלפון",-13.98683261871338],["▁ಸೂಚಿಸ",-13.98683261871338],["ຝຶກ",-13.986834526062012],["▁mawazo",-13.986834526062012],["▁tiyatro",-13.986834526062012],["▁Segurança",-13.986836433410645],["▁φόβο",-13.98683738708496],["▁வளர்ச்சி",-13.986839294433594],["轨道",-13.986839294433594],["နည္းပညာ",-13.986841201782228],["▁Tamam",-13.986841201782228],["▁μεταφορά",-13.986842155456545],["▁dispositivi",-13.98684310913086],["▁실험",-13.98684310913086],["▁Pingback",-13.986844062805176],["▁२०७२",-13.986844062805176],["▁misstänk",-13.986845016479492],["▁азаматтық",-13.98684787750244],["▁الصراع",-13.986848831176758],["▁Muusika",-13.986849784851074],["▁улюблен",-13.98685073852539],["▁рассказать",-13.986852645874023],["▁morreu",-13.986860275268556],["▁Đặt",-13.986863136291504],["RIG",-13.98686408996582],["▁młode",-13.986865043640137],["▁recuerda",-13.986865043640137],["▁رشید",-13.986865997314451],["▁videregående",-13.98686695098877],["▁allons",-13.986868858337402],["▁Király",-13.986871719360352],["▁dětské",-13.986871719360352],["▁techniczny",-13.986879348754885],["▁обявен",-13.986879348754885],["條例",-13.986879348754885],["▁נעשה",-13.9868803024292],["▁životinja",-13.986882209777832],["▁Circuit",-13.986883163452148],["schläge",-13.986899375915527],["▁بلکې",-13.986899375915527],["▁செய்யவும்",-13.986903190612791],["▁dispozitie",-13.98690414428711],["▁କରିଥିଲା",-13.986908912658691],["▁используются",-13.986910820007324],["▁súkromn",-13.98691177368164],["▁qatnash",-13.986912727355955],["▁Toiminta",-13.986915588378906],["▁ରାତିରେ",-13.986920356750488],["▁transformation",-13.986921310424805],["▁Istor",-13.98692798614502],["skärm",-13.986931800842283],["▁సరే",-13.986932754516602],["▁ഉറക്ക",-13.986932754516602],["테이블",-13.986944198608398],["这句话",-13.986946105957031],["▁gətirib",-13.986953735351562],["▁Bundesliga",-13.986971855163574],["▁πάρα",-13.986974716186523],["▁کھانا",-13.986980438232422],["▁Lakukan",-13.986984252929688],["▁oglaševa",-13.98698616027832],["κόκκ",-13.986987113952637],["कारक",-13.98699188232422],["▁λειτουργεί",-13.987003326416016],["▁հղումը",-13.987005233764648],["%2",-13.98701000213623],["chodzić",-13.987010955810549],["▁покрену",-13.987011909484863],["▁עשרות",-13.987015724182127],["▁Baser",-13.98702335357666],["▁renkler",-13.98703670501709],["▁valuable",-13.987037658691406],["▁підвищен",-13.987037658691406],["ບອກ",-13.987043380737305],["bedömning",-13.987058639526367],["▁Intens",-13.98706340789795],["▁насилие",-13.98706340789795],["գոր",-13.987065315246582],["▁الوسط",-13.987077713012695],["▁Portanto",-13.98708438873291],["▁મૂળ",-13.98709487915039],["▁mazkur",-13.987102508544922],["▁намера",-13.987107276916504],["▁puerto",-13.987115859985352],["取り組み",-13.987127304077148],["берене",-13.98713493347168],["liða",-13.987142562866213],["▁Boom",-13.987142562866213],["▁میکرد",-13.987142562866213],["連れて",-13.987143516540527],["▁пријава",-13.987154006958008],["▁sanction",-13.987154960632324],["▁CONTRA",-13.98715591430664],["лиште",-13.98716926574707],["inām",-13.987171173095703],["▁verbringen",-13.987210273742676],["ბები",-13.987220764160156],["ajuda",-13.98723030090332],["ségben",-13.987231254577637],["▁fatte",-13.98725128173828],["▁معينة",-13.98725414276123],["▁необхідні",-13.987256050109863],["795",-13.987262725830078],["▁APEC",-13.987263679504396],["▁athugasemdir",-13.98727035522461],["▁विकासका",-13.987271308898926],["当今",-13.987284660339355],["▁klassieke",-13.987300872802734],["▁sneg",-13.98730182647705],["▁bilərlər",-13.987302780151367],["▁webside",-13.987310409545898],["▁siente",-13.98733139038086],["sikkerhet",-13.987337112426758],["▁abogado",-13.987342834472656],["ይህ",-13.98735809326172],["忘記",-13.987374305725098],["▁الفوز",-13.98737621307373],["を高め",-13.98738956451416],["▁سالگی",-13.987418174743652],["▁NOK",-13.987425804138184],["▁સમા",-13.987471580505373],["▁ನೀತಿ",-13.987482070922852],["▁hrať",-13.987489700317385],["മാല",-13.98749828338623],["monitor",-13.98750114440918],["▁уряду",-13.987508773803713],["▁kaybetme",-13.98751449584961],["▁Teď",-13.987531661987305],["üdür",-13.98753261566162],["▁odluku",-13.987534523010254],["ファー",-13.987541198730469],["▁המאה",-13.98754596710205],["▁приемат",-13.987547874450684],["▁mlado",-13.987570762634276],["▁رواج",-13.987570762634276],["▁Parco",-13.987577438354492],["▁ödənil",-13.987578392028809],["த்திரை",-13.987587928771973],["ថ្ម",-13.987601280212402],["▁палітычна",-13.987616539001465],["▁Tanggal",-13.987641334533691],["属性",-13.987648010253906],["▁желба",-13.987651824951172],["▁njalo",-13.987652778625488],["ведам",-13.987653732299805],["près",-13.987655639648438],["हास",-13.987669944763184],["▁اهميت",-13.987701416015623],["تیس",-13.987752914428713],["▁искусство",-13.98775863647461],["शाली",-13.987768173217772],["førelse",-13.987770080566406],["יאה",-13.987778663635254],["▁отпор",-13.98777961730957],["▁одразу",-13.98778247833252],["▁номын",-13.987787246704102],["▁ಮೂಡ",-13.987820625305176],["▁элементы",-13.98782730102539],["▁दया",-13.98784065246582],["граждан",-13.987847328186035],["▁Юго",-13.98785400390625],["▁ieşi",-13.987869262695312],["▁بفر",-13.987871170043944],["543",-13.987873077392578],["्याची",-13.987878799438477],["言葉を",-13.987890243530272],["▁ähnliche",-13.987908363342283],["▁Civic",-13.987909317016602],["насьці",-13.98791790008545],["▁تیزی",-13.987918853759766],["▁کارشناس",-13.987955093383787],["▁transformación",-13.987970352172852],["▁minema",-13.987974166870115],["▁දකින",-13.98798942565918],["хідні",-13.987992286682127],["մպ",-13.98803997039795],["▁угоди",-13.988043785095217],["▁várja",-13.988049507141112],["▁olikin",-13.98805809020996],["▁থানা",-13.98806095123291],["ESZ",-13.988066673278809],["常规",-13.98807430267334],["▁Membe",-13.988101959228516],["もらいました",-13.98812770843506],["fâ",-13.988164901733398],["▁mafê",-13.988165855407717],["▁манифестација",-13.988194465637209],["▁ሊቀ",-13.98819637298584],["▁długie",-13.988201141357422],["對手",-13.988204002380373],["▁ସର୍",-13.98822021484375],["HAD",-13.988224029541016],["▁కాగా",-13.98823070526123],["▁verbum",-13.988238334655762],["▁говорите",-13.988239288330078],["kání",-13.988245010375977],["▁Deixe",-13.988251686096191],["hverfi",-13.988258361816406],["ָא",-13.988262176513672],["ОДИ",-13.98827838897705],["АНИ",-13.988306999206545],["▁migliora",-13.988308906555176],["▁ಟ್ಯಾ",-13.988314628601074],["▁trekant",-13.988333702087402],["▁мирно",-13.988350868225098],["ประจําวัน",-13.988375663757324],["▁Лей",-13.98838233947754],["▁պայմանագիր",-13.988383293151855],["▁Meghan",-13.988389015197754],["ttivat",-13.988422393798828],["▁Jumalan",-13.988425254821776],["▁हंस",-13.988428115844728],["▁Juris",-13.988457679748535],["▁ਮਿਲੀ",-13.988457679748535],["▁dokonale",-13.98846435546875],["▁nəticəsi",-13.98847198486328],["▁ማንም",-13.988519668579102],["ເອີ",-13.988521575927734],["síðu",-13.988551139831545],["▁kirjoitti",-13.988554000854492],["▁adeiladu",-13.98855972290039],["וריה",-13.988571166992188],["▁highest",-13.988580703735352],["側の",-13.98859405517578],["649",-13.988597869873049],["▁مسیح",-13.988598823547363],["▁Тож",-13.988621711730955],["ятати",-13.988656997680664],["▁молитва",-13.988677024841309],["▁Castelló",-13.988683700561523],["GRAFI",-13.988706588745115],["સાય",-13.988707542419434],["▁gorde",-13.988710403442385],["▁Атлант",-13.988712310791016],["▁ఆసక్తి",-13.988714218139648],["▁mugi",-13.988719940185549],["▁അഞ്ച",-13.988720893859863],["▁تعديل",-13.988725662231444],["▁гама",-13.988754272460938],["▁Pull",-13.98875617980957],["▁מסע",-13.988775253295898],["▁двора",-13.98878288269043],["▁voelen",-13.988789558410645],["▁įdomi",-13.988804817199709],["▁আছেন",-13.9888334274292],["▁hytte",-13.988835334777832],["▁התנ",-13.988847732543944],["▁kabhi",-13.988851547241213],["ствует",-13.988862991333008],["▁ರಚಿಸ",-13.988873481750488],["▁SABA",-13.98888111114502],["▁Предлог",-13.98888111114502],["▁sceni",-13.988887786865234],["யையும்",-13.988893508911133],["▁короб",-13.988893508911133],["▁okazas",-13.988905906677246],["▁рукі",-13.988921165466309],["▁غلطی",-13.988924980163574],["ကင္း",-13.988950729370115],["▁хэрэглэ",-13.988954544067385],["카이",-13.988966941833496],["▁유리",-13.988969802856444],["▁Fleisch",-13.988984107971191],["rinko",-13.989001274108888],["czytać",-13.989006996154783],["▁клиника",-13.989012718200684],["tapahtuma",-13.989015579223633],["▁وجدت",-13.989026069641112],["fitri",-13.989027976989746],["▁skrit",-13.989033699035645],["▁Rink",-13.989091873168944],["▁њихова",-13.98910140991211],["▁एखाद",-13.98911190032959],["▁neprav",-13.989155769348145],["주기",-13.989157676696776],["空気",-13.989168167114258],["▁Singapur",-13.989169120788574],["ஒரு",-13.98918628692627],["▁լինեն",-13.989190101623535],["▁පින්",-13.989191055297852],["▁лажи",-13.989203453063965],["▁आधा",-13.98920726776123],["▁atrapa",-13.989208221435549],["▁informare",-13.989232063293455],["ଗ୍ରହ",-13.989290237426758],["▁chư",-13.989317893981934],["▁комиссиясы",-13.989319801330566],["▁detectar",-13.989339828491213],["▁کلان",-13.989370346069336],["려면",-13.989391326904297],["▁отац",-13.989441871643066],["▁ymmärtä",-13.989508628845217],["라며",-13.989510536193848],["▁култури",-13.9895601272583],["▁qiyin",-13.989604949951172],["▁باريس",-13.98962116241455],["Интер",-13.989624977111816],["기준",-13.989625930786133],["KÁ",-13.989641189575195],["▁fantastik",-13.989651679992676],["▁છુ",-13.989663124084473],["และไม่",-13.989670753479004],["medic",-13.989676475524902],["▁Makro",-13.989699363708496],["ounded",-13.98971462249756],["ดีที่สุด",-13.98971462249756],["▁نائ",-13.989731788635254],["励",-13.98977279663086],["擋",-13.989791870117188],["飘",-13.989810943603516],["細菌",-13.98981761932373],["孤独",-13.989823341369627],["阁",-13.989823341369627],["▁govoriti",-13.98983097076416],["▁նվիր",-13.98983097076416],["ໃຫ້ການ",-13.989838600158691],["実績",-13.989842414855955],["กระชับ",-13.989855766296388],["ຫມູ່",-13.989856719970703],["▁vítěz",-13.98985767364502],["ផ្សព្វផ្សាយ",-13.989858627319336],["✨",-13.989859580993652],["ВІДЕО",-13.989860534667969],["สิ่งแวดล้อม",-13.989860534667969],["ᅮ",-13.989860534667969],["▁Chọn",-13.989860534667969],["▁Następnie",-13.989860534667969],["▁Pádraig",-13.989860534667969],["▁elkarrizketa",-13.989860534667969],["▁inicjatyw",-13.989860534667969],["▁najwyższy",-13.989860534667969],["▁sarežģī",-13.989860534667969],["▁tampouco",-13.989860534667969],["▁επιλογές",-13.989860534667969],["▁πατρίδα",-13.989860534667969],["▁महसूस",-13.989860534667969],["▁এগিয়ে",-13.989860534667969],["▁ખરેખર",-13.989860534667969],["▁కొందరు",-13.989860534667969],["▁దక్షిణ",-13.989860534667969],["▁బ్యాంకు",-13.989860534667969],["▁තවදුරටත්",-13.989860534667969],["▁መነሻ",-13.989860534667969],["▁가까운",-13.989860534667969],["ରାଷ୍ଟ୍ର",-13.989861488342283],["▁Akdeniz",-13.989861488342283],["▁Chocolate",-13.989861488342283],["▁beskæftige",-13.989861488342283],["▁srijedu",-13.989861488342283],["▁омогућава",-13.989861488342283],["▁осъзна",-13.989861488342283],["▁جغرافی",-13.989861488342283],["▁භාෂාව",-13.989861488342283],["▁carriera",-13.989862442016602],["▁permalink",-13.989862442016602],["▁բարեկամ",-13.989862442016602],["▁၂၀၁၅",-13.989862442016602],["▁आसपास",-13.989863395690918],["▁kupambana",-13.98986530303955],["▁προτιμ",-13.98986530303955],["▁مرڪز",-13.989866256713867],["▁तक्रार",-13.989866256713867],["▁зүтгэ",-13.989867210388184],["▁усяго",-13.989867210388184],["▁vědět",-13.9898681640625],["▁առիթ",-13.9898681640625],["▁balandžio",-13.989870071411133],["▁ناراحت",-13.98987102508545],["▁Здравейте",-13.989871978759766],["▁змены",-13.989871978759766],["▁кампанія",-13.989871978759766],["▁ಸಾಧಿಸ",-13.989871978759766],["▁Nivîs",-13.989872932434082],["▁rupiah",-13.989874839782717],["▁infancia",-13.989875793457031],["파크",-13.989877700805664],["▁envoyé",-13.989880561828612],["▁αύριο",-13.98988151550293],["▁Hossain",-13.989882469177246],["▁befejez",-13.989887237548828],["▁संशय",-13.98989486694336],["▁mindkét",-13.989895820617676],["▁ھەمدە",-13.989896774291992],["ۈلگەن",-13.989903450012209],["ಷಾ",-13.989904403686523],["နိုင်ပါတယ်။",-13.989907264709473],["fyldt",-13.989908218383787],["▁Murphy",-13.989909172058104],["▁יורק",-13.989909172058104],["▁pomoże",-13.989910125732422],["▁అంటారు",-13.989911079406738],["▁ഉടന",-13.989912033081056],["▁selger",-13.989912986755373],["▁Huduma",-13.989913940429688],["▁rugăm",-13.989913940429688],["▁szoci",-13.989913940429688],["▁românesc",-13.989914894104004],["▁සිදුවූ",-13.98991870880127],["▁Antaŭ",-13.989919662475586],["▁ଦୁଇଟି",-13.989920616149902],["▁gertatzen",-13.98992156982422],["▁захвор",-13.98992156982422],["304",-13.989933013916016],["▁акцыі",-13.989933967590332],["▁condomini",-13.989934921264648],["▁확장",-13.989937782287598],["許多人",-13.989937782287598],["riedade",-13.98993968963623],["哇",-13.98993968963623],["▁dogodki",-13.989947319030762],["▁zieht",-13.989961624145508],["发射",-13.989965438842772],["quart",-13.989967346191406],["▁Χαρ",-13.98996925354004],["▁برپا",-13.989970207214355],["▁dicendum",-13.98997688293457],["▁jantar",-13.98997688293457],["▁Diskuse",-13.989980697631836],["激光",-13.989983558654783],["▁لأي",-13.989984512329102],["▁ዞን",-13.989984512329102],["▁ministerijos",-13.990004539489746],["▁culmina",-13.990009307861328],["▁Senegal",-13.990012168884276],["▁Filistin",-13.990013122558594],["▁Watoto",-13.990015029907228],["itzacions",-13.990019798278809],["▁ставлення",-13.990022659301758],["▁እዩ",-13.990025520324709],["εμα",-13.990028381347656],["ቆጣጠር",-13.990030288696287],["▁فواد",-13.990030288696287],["▁שאפשר",-13.990035057067873],["▁novidade",-13.99006462097168],["在他的",-13.990072250366213],["▁öffentliche",-13.990084648132324],["▁سريع",-13.99009609222412],["▁Hiệu",-13.99010181427002],["قراط",-13.990107536315918],["▁minusta",-13.99011516571045],["▁تېرو",-13.990116119384766],["▁változtat",-13.990126609802246],["▁තහනම්",-13.990129470825195],["▁유형",-13.990135192871094],["▁راضی",-13.990140914916992],["▁میلی",-13.990157127380373],["▁Pascal",-13.990169525146484],["▁دفاعی",-13.990174293518066],["想去",-13.990178108215332],["▁končí",-13.99018096923828],["▁escoles",-13.990196228027344],["▁ରଥ",-13.990200996398926],["mjöl",-13.990204811096191],["▁mogły",-13.990206718444824],["▁መቆ",-13.990215301513672],["▁Никак",-13.990217208862305],["▁الجلد",-13.99021816253662],["čiausi",-13.990220069885254],["▁efikas",-13.990227699279783],["▁Gazetesi",-13.990240097045898],["▁gidan",-13.990248680114746],["▁teknologji",-13.990285873413086],["avtalen",-13.990288734436035],["വുമ",-13.990318298339844],["▁शब्दों",-13.990328788757324],["occupation",-13.990337371826172],["hiana",-13.990338325500488],["▁Район",-13.990344047546388],["종합",-13.990374565124512],["אנק",-13.99037742614746],["yviai",-13.990415573120115],["▁durfte",-13.990423202514648],["▁стаць",-13.990433692932127],["▁Poor",-13.990450859069824],["ၾကြ",-13.99045181274414],["會計",-13.99045753479004],["▁situationer",-13.990469932556152],["yhdistys",-13.990483283996582],["▁उद्धार",-13.99048900604248],["KOT",-13.990490913391112],["istické",-13.99049472808838],["KLI",-13.990503311157228],["राय",-13.990509033203123],["ყურ",-13.990511894226074],["▁Fung",-13.990532875061035],["▁kryetar",-13.990541458129885],["▁regnet",-13.990555763244627],["▁شغلی",-13.99057674407959],["▁ഗുരു",-13.990594863891602],["▁Grá",-13.990609169006348],["▁Paš",-13.990618705749512],["仕事の",-13.990625381469728],["անքներ",-13.990641593933104],["▁සාකච්ඡාව",-13.990662574768066],["▁Кле",-13.990665435791016],["1968",-13.99067497253418],["▁വേണ",-13.99067497253418],["▁ministrija",-13.990677833557127],["tillatelse",-13.99068546295166],["▁Hru",-13.990721702575684],["▁Писа",-13.990726470947266],["▁plačila",-13.990736961364746],["▁kuleta",-13.990741729736328],["▁jogszabály",-13.990748405456545],["▁Matematika",-13.990752220153809],["▁potenza",-13.990753173828123],["▁imekuwa",-13.99075698852539],["▁Miloš",-13.990760803222656],["סיקה",-13.990772247314451],["▁நபி",-13.990782737731934],["▁rientra",-13.990796089172363],["▁Erityis",-13.990803718566896],["контрол",-13.990812301635742],["▁پذیری",-13.990819931030272],["▁PSP",-13.990825653076172],["▁நிர",-13.990825653076172],["▁likod",-13.990826606750488],["▁visām",-13.990827560424805],["▁kombinasi",-13.990849494934082],["▁નાની",-13.990893363952637],["לוס",-13.990901947021484],["▁makuu",-13.990942001342772],["OBRA",-13.990948677062988],["یا۔",-13.990959167480469],["ครั้งที่",-13.990965843200684],["မျ",-13.990979194641112],["ナイ",-13.991002082824709],["лбай",-13.99103832244873],["дигин",-13.991074562072754],["σαλ",-13.99107837677002],["▁अटल",-13.991100311279297],["▁otsustas",-13.991103172302246],["▁τρώ",-13.991121292114258],["klein",-13.991148948669434],["▁күбө",-13.9911527633667],["tokia",-13.99117374420166],["टिन",-13.991189002990724],["▁ሊግ",-13.991203308105469],["3-5",-13.991211891174316],["▁qurumları",-13.991214752197266],["高清",-13.99122428894043],["▁تقريب",-13.991226196289062],["ฉลอง",-13.991230010986328],["գնաց",-13.99123191833496],["▁sirri",-13.991233825683594],["▁ప్రొ",-13.991235733032228],["▁тврде",-13.99123764038086],["▁በተጨማሪ",-13.991238594055176],["típus",-13.99124240875244],["▁قیامت",-13.99125862121582],["▁ontvangt",-13.9912748336792],["上网",-13.991275787353516],["ացու",-13.991277694702148],["▁младих",-13.991277694702148],["▁meciul",-13.99128532409668],["အသစ်",-13.991289138793944],["मानस",-13.991291999816896],["холод",-13.991299629211426],["වෙනවා",-13.99132251739502],["▁новага",-13.991323471069336],["009",-13.991337776184082],["▁chocolat",-13.991348266601562],["▁Hinta",-13.991409301757812],["ቀርቡ",-13.99142360687256],["▁Kiva",-13.991429328918455],["▁vejr",-13.991436958312988],["模擬",-13.99143886566162],["▁bì",-13.991446495056152],["▁بلاد",-13.991463661193848],["▁зургаа",-13.991493225097656],["ነገሩ",-13.991504669189451],["▁2-5",-13.9915189743042],["lashgan",-13.991522789001465],["▁_____",-13.99155616760254],["▁открыва",-13.991564750671388],["ುವುದಾಗಿ",-13.9915771484375],["▁motivat",-13.99159336090088],["▁акумул",-13.99160861968994],["แม้",-13.991634368896484],["▁එකත්",-13.991652488708496],["werfen",-13.991658210754396],["▁tydligt",-13.99166202545166],["koosolek",-13.99167251586914],["▁የተባለው",-13.991674423217772],["ことができました",-13.991681098937988],["やめ",-13.991683959960938],["ымыздың",-13.991686820983888],["τούμε",-13.991690635681152],["ถอด",-13.991690635681152],["▁திருப்ப",-13.99169921875],["▁ljubi",-13.991711616516112],["▁djelatnosti",-13.991722106933594],["▁nýjum",-13.991737365722656],["▁불가",-13.991747856140137],["▁opisuje",-13.991777420043944],["▁fsheh",-13.991802215576172],["hojen",-13.991840362548828],["▁combinar",-13.991850852966309],["かというと",-13.99185848236084],["▁PCI",-13.991863250732422],["புர",-13.99187183380127],["stemning",-13.991887092590332],["▁afisa",-13.991888046264648],["▁çalışmalarını",-13.991894721984863],["▁gjelde",-13.99189567565918],["▁подання",-13.991924285888672],["▁त्यसमा",-13.991929054260254],["▁thảm",-13.99198055267334],["avion",-13.991984367370604],["▁জল",-13.991985321044922],["▁ਕ੍ਰਿ",-13.991987228393556],["ကြိမ်",-13.991997718811035],["▁FULL",-13.99203109741211],["▁Afro",-13.99205207824707],["SOP",-13.992059707641602],["teltiin",-13.9920654296875],["София",-13.992094993591309],["ករណី",-13.992095947265623],["▁հղումներ",-13.992130279541016],["▁redakcija",-13.99213695526123],["▁drevo",-13.992151260375977],["شویی",-13.992164611816406],["▁keluarganya",-13.992172241210938],["女性が",-13.99219799041748],["▁1840",-13.992202758789062],["បុរស",-13.99220371246338],["శాల",-13.992205619812012],["▁mafên",-13.992218017578123],["▁варианты",-13.99221897125244],["▁Glavni",-13.992243766784668],["▁ਦੇਖੋ",-13.992255210876465],["תוך",-13.992257118225098],["▁bigger",-13.99228286743164],["ຂອງຕົນ",-13.992290496826172],["belastning",-13.992327690124512],["▁proporcion",-13.992338180541992],["ლებული",-13.992352485656738],["ignor",-13.992359161376951],["ಡಿಸಿ",-13.992395401000977],["عاصمة",-13.992396354675291],["▁каала",-13.992470741271973],["▁शतक",-13.992484092712402],["▁PROJE",-13.992508888244627],["kriterier",-13.992530822753906],["خطوط",-13.992535591125488],["▁totdat",-13.992557525634766],["▁збору",-13.992565155029297],["▁یورو",-13.99256992340088],["▁cursul",-13.992573738098145],["▁rook",-13.992576599121094],["ერების",-13.99258518218994],["▁rojan",-13.992586135864258],["▁വായിച്ചു",-13.992599487304688],["წია",-13.99260139465332],["▁magnez",-13.992602348327637],["▁clare",-13.992618560791016],["▁takhle",-13.99262237548828],["제로",-13.992646217346191],["▁мөөнөт",-13.992650985717772],["IJI",-13.992656707763672],["▁gezellige",-13.992658615112305],["▁Konsep",-13.992668151855469],["▁abajo",-13.992668151855469],["2,00",-13.992669105529783],["▁indicato",-13.992679595947266],["ସ୍ୱ",-13.992688179016112],["441",-13.99268913269043],["▁mahsulotlari",-13.992706298828123],["▁religious",-13.992716789245604],["▁kendu",-13.992775917053224],["▁tät",-13.99278736114502],["▁кирген",-13.992792129516602],["مته",-13.99282455444336],["充分发挥",-13.992828369140623],["▁κομμάτι",-13.992844581604004],["矢",-13.992847442626951],["542",-13.992849349975586],["շխ",-13.992850303649902],["▁вызывает",-13.992865562438965],["休闲",-13.992870330810549],["販售",-13.992873191833496],["难题",-13.992874145507812],["特斯拉",-13.992876052856444],["tafuta",-13.992877960205078],["貰",-13.992881774902344],["倫敦",-13.992883682250977],["厳しい",-13.992886543273926],["ကမ္ဘာ",-13.992897987365724],["อักเสบ",-13.99289894104004],["מוזיאון",-13.992899894714355],["ጽሑፍ",-13.992899894714355],["▁Ecuador",-13.992899894714355],["▁Koonfur",-13.992899894714355],["▁Varaždin",-13.992899894714355],["▁annorlunda",-13.992899894714355],["▁corrupción",-13.992899894714355],["▁efnahags",-13.992899894714355],["▁girêdayî",-13.992899894714355],["▁jellegű",-13.992899894714355],["▁mangayayat",-13.992899894714355],["▁ugonjwa",-13.992899894714355],["▁viðkomandi",-13.992899894714355],["▁Υπάρχουν",-13.992899894714355],["▁βρέθηκε",-13.992899894714355],["▁Всъщност",-13.992899894714355],["▁дөрвөн",-13.992899894714355],["▁нэмэлт",-13.992899894714355],["▁урахуванням",-13.992899894714355],["▁інфармацыя",-13.992899894714355],["▁مصطفی",-13.992899894714355],["▁هېڅ",-13.992899894714355],["▁डिजाइन",-13.992899894714355],["▁শান্তি",-13.992899894714355],["▁ବିଧାନସଭା",-13.992899894714355],["▁ಸಂಘಟನೆ",-13.992899894714355],["▁උළෙල",-13.992899894714355],["▁පිලිබඳ",-13.992899894714355],["▁အဲဒါ",-13.992899894714355],["숲",-13.992899894714355],["ፔ",-13.992900848388672],["▁Europoje",-13.992900848388672],["▁xidhiidh",-13.992900848388672],["▁zamestnancov",-13.992900848388672],["▁Χρησιμοποι",-13.992900848388672],["▁Статья",-13.992900848388672],["▁апсолут",-13.992900848388672],["▁خواننده",-13.992900848388672],["▁टाइप",-13.992900848388672],["▁ನರೇಂದ್ರ",-13.992900848388672],["▁อย่างไรก็ตาม",-13.992900848388672],["▁protocolo",-13.992901802062988],["▁ଏଥିପାଇଁ",-13.992901802062988],["നാരായണ",-13.992902755737305],["▁sahələri",-13.99290370941162],["▁simpelthen",-13.99290370941162],["▁одночасно",-13.99290370941162],["▁Август",-13.992904663085938],["Vorsitzende",-13.992905616760254],["▁Навіны",-13.992905616760254],["▁viwanja",-13.99290657043457],["▁Internasional",-13.992907524108888],["▁Miklós",-13.992907524108888],["▁konferencji",-13.992907524108888],["▁පන්තිය",-13.992908477783203],["▁importància",-13.992911338806152],["▁списание",-13.992912292480469],["▁կարճ",-13.992912292480469],["อิสระ",-13.992913246154783],["▁zerrenda",-13.992916107177734],["▁Vakfı",-13.99292278289795],["צלם",-13.992924690246582],["▁seachad",-13.992925643920898],["▁Drink",-13.992926597595217],["▁vroeë",-13.992926597595217],["▁Programul",-13.99293041229248],["▁Trưởng",-13.99293041229248],["▁একটু",-13.992931365966797],["പ്പോഴും",-13.992932319641112],["▁ලඟ",-13.992934226989746],["▁그동안",-13.99294090270996],["▁እግር",-13.992942810058594],["בטיחות",-13.99294376373291],["▁부탁",-13.99294376373291],["▁Театр",-13.992945671081545],["▁تعالي",-13.992950439453123],["▁kérjük",-13.99295139312744],["▁الهواء",-13.99295139312744],["▁appunto",-13.992952346801758],["▁Supremo",-13.992959022521973],["演唱會",-13.992959976196287],["▁נכתב",-13.992960929870604],["两会",-13.992964744567873],["▁последици",-13.992969512939451],["▁ღვინო",-13.992976188659668],["សំរាប់",-13.992979049682615],["▁കഴിയും",-13.99298095703125],["▁المحيط",-13.992982864379885],["即使是",-13.992984771728516],["▁sesudah",-13.992990493774414],["▁ഒരാള്",-13.992997169494627],["カット",-13.993000984191896],["▁instructions",-13.993001937866213],["▁размещения",-13.993005752563477],["▁चाहती",-13.993009567260742],["▁zdravlja",-13.993019104003906],["▁মামলার",-13.993020057678224],["米兰",-13.993023872375488],["▁পথে",-13.993026733398438],["▁колькі",-13.993034362792969],["kår",-13.993040084838867],["မွေး",-13.993046760559082],["▁Kailangan",-13.993056297302246],["特價",-13.993060111999512],["▁Pyaar",-13.993067741394045],["ominaisuuksi",-13.99306869506836],["▁שאינו",-13.99306869506836],["▁keessa",-13.993070602416992],["▁حياتي",-13.993081092834473],["▁trägt",-13.993083953857422],["▁kromě",-13.993097305297852],["▁kaldt",-13.993122100830078],["▁celkové",-13.99312686920166],["▁بٹ",-13.993162155151367],["▁dispute",-13.993167877197266],["राख",-13.993173599243164],["▁Madhya",-13.993178367614746],["πραγματοποιήθηκε",-13.993186950683594],["▁ogromny",-13.99320125579834],["▁Motorola",-13.993208885192873],["▁elää",-13.99321746826172],["▁أساسي",-13.99322509765625],["▁Gestion",-13.993231773376465],["▁fogalmaz",-13.993239402770996],["▁מגיעים",-13.993247985839844],["▁tarım",-13.99326515197754],["▁էջը",-13.993279457092283],["▁entirely",-13.993282318115234],["▁úkol",-13.993282318115234],["资助",-13.993284225463867],["▁நினைக்க",-13.99328899383545],["하려고",-13.99328899383545],["▁Чаму",-13.993292808532717],["ที่เขา",-13.993295669555664],["লায়",-13.993311882019045],["meldung",-13.993321418762209],["riješ",-13.993334770202637],["ቁም",-13.993345260620115],["▁جبران",-13.993378639221191],["】【",-13.993380546569824],["▁ցուցադր",-13.99338722229004],["▁Zeland",-13.993388175964355],["的指导",-13.993410110473633],["▁সকালে",-13.993420600891112],["▁niczym",-13.993432998657228],["▁izteikt",-13.99344253540039],["▁ଅନ",-13.993444442749023],["▁Torne",-13.993448257446287],["▁طاهر",-13.993452072143556],["▁Miljö",-13.993457794189451],["▁Walking",-13.99347686767578],["▁νησί",-13.993494033813477],["▁glut",-13.993507385253906],["வதி",-13.99351406097412],["مسرح",-13.993517875671388],["▁tasvir",-13.993518829345703],["▁sinergi",-13.99351978302002],["▁προκαλεί",-13.993526458740234],["▁ഏതു",-13.9935302734375],["参考にして",-13.993541717529297],["schütz",-13.99356460571289],["▁правду",-13.993571281433104],["හාර",-13.993579864501951],["્લા",-13.993609428405762],["▁मध्यम",-13.993622779846191],["精密",-13.993630409240724],["▁οικο",-13.993645668029783],["жната",-13.993680953979492],["▁бүтээгдэхүүний",-13.993681907653809],["sjónvarp",-13.993685722351074],["▁paprik",-13.993685722351074],["โดยไม่ต้อง",-13.993690490722656],["▁בבני",-13.993701934814451],["事儿",-13.993706703186035],["ಲಯ",-13.99373722076416],["▁deneme",-13.993754386901855],["这一切",-13.993770599365234],["▁қуат",-13.993799209594728],["低い",-13.993809700012209],["▁Поповић",-13.993814468383787],["▁любы",-13.993864059448242],["▁Енді",-13.99386978149414],["这个世界",-13.99387264251709],["ଡୁ",-13.993873596191406],["▁geef",-13.993888854980469],["▁restauranger",-13.993921279907228],["letí",-13.993925094604492],["▁निज",-13.993968963623049],["▁trdi",-13.993993759155272],["說話",-13.994016647338867],["використовуються",-13.994023323059082],["▁করছি",-13.994030952453612],["▁Hypo",-13.994044303894045],["▁සති",-13.99405574798584],["▁Път",-13.994059562683104],["▁Trygg",-13.994065284729004],["▁መቀ",-13.99407196044922],["▁Rasmus",-13.9940767288208],["▁divisa",-13.994077682495115],["必需",-13.994078636169434],["බර්",-13.99412441253662],["色情",-13.99412441253662],["▁Abram",-13.994144439697266],["▁Բացի",-13.994153022766112],["ไม่ดี",-13.99415397644043],["ਕਰਮ",-13.994155883789062],["▁Änderungen",-13.994163513183594],["▁нәтижесі",-13.994179725646973],["▁igande",-13.994203567504885],["▁[#",-13.994205474853516],["▁aloe",-13.994205474853516],["គ្មាន",-13.994234085083008],["▁Mondo",-13.99423885345459],["▁პროდუქტი",-13.9942626953125],["▁అడ",-13.994318962097168],["cīgi",-13.994352340698242],["દન",-13.994352340698242],["▁కాం",-13.994427680969238],["▁manji",-13.994449615478516],["▁बाब",-13.994449615478516],["▁ארה",-13.99445915222168],["▁omavahel",-13.99446964263916],["▁Alaska",-13.994500160217283],["▁broses",-13.994536399841309],["▁financer",-13.9945650100708],["ສິ່ງທີ່",-13.994568824768066],["服裝",-13.99457836151123],["▁urme",-13.99458122253418],["▁Βου",-13.99459171295166],["▁Score",-13.994600296020508],["ოდნენ",-13.994604110717772],["▁אלפי",-13.994620323181152],["ETER",-13.994638442993164],["Afrique",-13.99464225769043],["▁үх",-13.99465560913086],["шева",-13.994678497314451],["▁повторя",-13.99467945098877],["እን",-13.9946870803833],["govern",-13.994709968566896],["▁permission",-13.99471664428711],["လိုက္ပါ",-13.994720458984377],["▁conectar",-13.994731903076172],["▁dinasti",-13.99473476409912],["▁વિસ્તાર",-13.99475383758545],["▁විදිය",-13.994757652282717],["▁strategisk",-13.994758605957031],["КОЛ",-13.994769096374512],["ượt",-13.994771003723145],["ริน",-13.994771003723145],["法令",-13.994773864746094],["անյան",-13.994806289672852],["▁ചടങ്ങ",-13.994810104370115],["▁Horoskop",-13.99481201171875],["▁барат",-13.994816780090332],["▁haluan",-13.994824409484863],["▁надає",-13.994836807250977],["▁Hosting",-13.994874954223633],["च्य",-13.994895935058594],["vietė",-13.99489688873291],["▁Cinc",-13.99489974975586],["तौ",-13.994925498962402],["มาก่อน",-13.99496364593506],["ציב",-13.994975090026855],["▁جوړه",-13.994985580444336],["कप",-13.994990348815918],["▁Klan",-13.995018005371094],["瞬",-13.99502182006836],["▁habitant",-13.995031356811523],["isyyden",-13.99504280090332],["▁пән",-13.995052337646484],["ወያኔ",-13.995078086853027],["тычны",-13.99510383605957],["▁رحمان",-13.99510383605957],["ήτρια",-13.995107650756836],["▁فراوان",-13.995118141174316],["▁gã",-13.99512004852295],["stecken",-13.995129585266112],["▁dziedzin",-13.995132446289062],["Кө",-13.995187759399414],["▁почео",-13.995192527770996],["▁කිහිපයක",-13.995216369628906],["کنندگان",-13.99522590637207],["ధీ",-13.995262145996094],["עבוד",-13.99526596069336],["▁drejtësi",-13.995269775390623],["▁mikäli",-13.995295524597168],["▁rywal",-13.995338439941406],["ब्रह्म",-13.995353698730469],["▁دکتری",-13.995354652404783],["▁давайте",-13.995357513427734],["økonomi",-13.995360374450684],["▁ΠΟΥ",-13.995361328125],["▁каштоўна",-13.99545669555664],["läst",-13.995463371276855],["▁vzhľad",-13.995469093322754],["我又",-13.995471000671388],["continuare",-13.9954833984375],["២៤",-13.995524406433104],["▁syvä",-13.995572090148926],["▁trajno",-13.995582580566406],["kasvu",-13.99558925628662],["szió",-13.995617866516112],["进球",-13.995641708374023],["ಂದ್ರ",-13.995656967163086],["▁Груп",-13.995670318603516],["лайды",-13.995684623718262],["▁występują",-13.995701789855955],["▁underbara",-13.99570369720459],["▁umferð",-13.995716094970703],["▁полос",-13.995722770690918],["▁जरुर",-13.995725631713867],["адзілі",-13.995733261108398],["තොට",-13.995744705200195],["זער",-13.995768547058104],["▁автоматически",-13.99577522277832],["▁පවුල",-13.995779991149902],["շի",-13.99579906463623],["▁големо",-13.995813369750977],["▁текстил",-13.99582290649414],["▁spremenil",-13.995826721191406],["▁burra",-13.99583339691162],["öldum",-13.995834350585938],["▁спожива",-13.995866775512695],["奉献",-13.995869636535645],["▁ajude",-13.995879173278809],["厦门",-13.99589729309082],["▁невероятн",-13.99590015411377],["▁szülők",-13.99590301513672],["淘宝",-13.99590301513672],["标签",-13.9959077835083],["熙",-13.995911598205566],["▁júna",-13.995912551879885],["냥",-13.995917320251465],["湛",-13.995924949645996],["لەن",-13.995941162109377],["▁цялост",-13.99594497680664],["กําแพง",-13.995945930480955],["ยาเสพติด",-13.995946884155272],["▁важным",-13.995946884155272],["វិនិយោគ",-13.99594783782959],["▁устанавливает",-13.99594783782959],["ចាប់ផ្តើម",-13.995948791503906],["▁PERATURAN",-13.995948791503906],["▁Verständnis",-13.995948791503906],["▁artêşa",-13.995948791503906],["▁dłużej",-13.995948791503906],["▁menyambut",-13.995948791503906],["▁mygtuką",-13.995948791503906],["▁názvom",-13.995948791503906],["▁pośrednictwem",-13.995948791503906],["▁převzetí",-13.995948791503906],["▁skelbimai",-13.995948791503906],["▁susceptible",-13.995948791503906],["▁veřejné",-13.995948791503906],["▁žrtve",-13.995948791503906],["▁εταιρείες",-13.995948791503906],["▁мишљење",-13.995948791503906],["▁мусульман",-13.995948791503906],["▁небольшой",-13.995948791503906],["▁тръгна",-13.995948791503906],["▁эхэллээ",-13.995948791503906],["▁հիշատակ",-13.995948791503906],["▁مخابرات",-13.995948791503906],["▁ਗੁਣਵੱਤਾ",-13.995948791503906],["▁అనంతరం",-13.995948791503906],["▁ಒಮ್ಮೆ",-13.995948791503906],["▁කර්මාන්ත",-13.995948791503906],["▁სამზარეულო",-13.995948791503906],["▁უფლებები",-13.995948791503906],["▁የህዝብ",-13.995948791503906],["ብሄር",-13.995949745178224],["▁geboorte",-13.995949745178224],["▁trại",-13.995949745178224],["▁өнүгүү",-13.995949745178224],["▁ئائىلى",-13.995949745178224],["▁இணையதள",-13.995949745178224],["▁တ႐ုတ္",-13.995949745178224],["▁პოზიცია",-13.995949745178224],["▁ምርጥ",-13.995949745178224],["研討會",-13.995949745178224],["▁Labem",-13.99595069885254],["▁juhudi",-13.99595069885254],["皱",-13.99595069885254],["▁hayırlı",-13.995951652526855],["▁הגבוה",-13.995951652526855],["▁Rewards",-13.995952606201172],["▁merveille",-13.995952606201172],["▁valamilyen",-13.995952606201172],["▁έστω",-13.995952606201172],["▁जिल्ह्यातील",-13.995952606201172],["▁व्याख्या",-13.995954513549805],["▁کھڑے",-13.99595546722412],["▁vigoare",-13.995956420898438],["▁жайгаштыр",-13.995957374572754],["▁цагаан",-13.99595832824707],["▁развитии",-13.995959281921388],["لىدۇ",-13.99596118927002],["▁digitális",-13.99596118927002],["▁احداث",-13.995964050292969],["▁இருந்தாலும்",-13.995964050292969],["▁предприема",-13.995965003967283],["▁wykończ",-13.99596881866455],["▁ट्रेड",-13.99596881866455],["▁ምግብ",-13.99596881866455],["▁చెప్త",-13.995969772338867],["▁våt",-13.995973587036133],["▁особистості",-13.995977401733398],["▁خدش",-13.995978355407717],["▁әкімінің",-13.99598789215088],["▁אהבתי",-13.995994567871094],["▁Хоёр",-13.995997428894045],["▁فرماتے",-13.995997428894045],["ізацыя",-13.995999336242676],["▁terrorçu",-13.996000289916992],["せない",-13.996000289916992],["фикации",-13.99600315093994],["▁хувийг",-13.99600601196289],["▁ఇంటికి",-13.996009826660156],["▁Dantza",-13.996012687683104],["▁ತಿಳಿದ",-13.996012687683104],["▁Trẻ",-13.996016502380373],["▁Услуг",-13.996017456054688],["ጥቁር",-13.99601936340332],["▁divorce",-13.996023178100586],["外表",-13.996026992797852],["▁लोकांना",-13.9960298538208],["零件",-13.9960298538208],["▁ଆପଣଙ୍କୁ",-13.996033668518066],["▁ټولنه",-13.996039390563965],["▁തുടങ്ങിയവ",-13.99604320526123],["▁exagera",-13.996045112609863],["▁uuringu",-13.99604606628418],["▁salarii",-13.996048927307127],["▁singurul",-13.996053695678713],["▁najwyższej",-13.996054649353027],["▁العصر",-13.99605655670166],["▁ھالدا",-13.996058464050291],["▁konyha",-13.996061325073242],["ห้า",-13.99606704711914],["▁Sosiaali",-13.99606704711914],["▁decizia",-13.996068000793455],["▁الشمالية",-13.996094703674316],["▁intrebare",-13.996098518371582],["▁любую",-13.99610424041748],["▁texture",-13.996108055114746],["qiniso",-13.996139526367188],["▁cărţi",-13.996145248413086],["▁ошибки",-13.99614715576172],["▁tipologia",-13.996155738830566],["▁godzinach",-13.996170997619627],["▁აწ",-13.996175765991213],["▁зможе",-13.99617862701416],["▁خبردار",-13.996186256408691],["bên",-13.996198654174805],["▁competitive",-13.99620246887207],["лёг",-13.996204376220703],["▁лікарні",-13.99620532989502],["บางส่วน",-13.996206283569336],["▁lugeja",-13.996217727661133],["▁átlag",-13.996217727661133],["▁regulamin",-13.996219635009766],["▁viking",-13.996219635009766],["▁የደረሰ",-13.996222496032717],["▁tivesse",-13.996260643005373],["▁afirmar",-13.996264457702637],["▁nyújtott",-13.996268272399902],["▁تبدأ",-13.996273040771484],["▁Bezirk",-13.996275901794434],["arribada",-13.996281623840332],["▁pomôže",-13.996283531188965],["▁گروہ",-13.99630641937256],["的房子",-13.99630641937256],["▁galerii",-13.996308326721191],["▁هلته",-13.99631118774414],["▁vỏ",-13.996329307556152],["მოყვ",-13.996344566345217],["▁Complete",-13.996345520019531],["▁ποιος",-13.996356010437012],["لەي",-13.996357917785645],["▁technický",-13.996368408203123],["▁можешь",-13.99637508392334],["▁kekasih",-13.996382713317873],["▁končno",-13.99638843536377],["ৰু",-13.996402740478516],["▁одлично",-13.996416091918944],["KUL",-13.996418952941896],["ডার",-13.996423721313477],["чкой",-13.996427536010742],["ရံု",-13.996431350708008],["ριών",-13.99648094177246],["ிருக்கிற",-13.99648666381836],["▁memesan",-13.996503829956056],["合わせて",-13.996508598327637],["▁ترین۔",-13.996529579162598],["▁استقرار",-13.996545791625977],["▁tuottaa",-13.996567726135254],["iếc",-13.99657917022705],["▁javnog",-13.996580123901367],["▁біреу",-13.996582984924316],["gunstig",-13.996607780456545],["мобиль",-13.996610641479492],["▁прадмет",-13.996618270874023],["ದೊ",-13.996627807617188],["▁урт",-13.99663543701172],["一副",-13.996642112731934],["အသုံး",-13.996644020080566],["▁hukm",-13.996660232543944],["▁გულის",-13.996661186218262],["▁rechter",-13.996665954589844],["▁شخصیات",-13.996676445007324],["genoemde",-13.996678352355955],["งง",-13.99670696258545],["▁2600",-13.996712684631348],["▁léi",-13.996713638305664],["▁ათასი",-13.996719360351562],["kandidat",-13.996721267700195],["激烈的",-13.996729850769045],["▁преимуществ",-13.996732711791992],["▁showed",-13.996745109558104],["▁Lån",-13.996749877929688],["▁henvendelse",-13.996750831604004],["▁gyvyb",-13.996771812438965],["▁찾는",-13.996800422668455],["▁మార్గ",-13.996840476989746],["▁Križ",-13.996843338012695],["▁STOR",-13.996844291687012],["▁vətəndaşların",-13.996853828430176],["▁Harrison",-13.996867179870604],["ıyorlar",-13.99689483642578],["▁лежит",-13.996896743774414],["ቀርብ",-13.996908187866213],["▁hovedet",-13.99691104888916],["▁smuča",-13.9969482421875],["街上",-13.996960639953612],["ισμών",-13.996963500976562],["▁решење",-13.99698257446289],["▁plantilla",-13.996994972229004],["▁Stoga",-13.997023582458496],["▁сачува",-13.997027397155762],["ელებს",-13.997029304504396],["▁создаде",-13.99704360961914],["Zİ",-13.997065544128418],["▁Бет",-13.997065544128418],["ເທີ",-13.997081756591797],["▁ფილმ",-13.997081756591797],["▁lumala",-13.997085571289062],["▁Siswa",-13.997096061706545],["▁відносини",-13.997096061706545],["▁tomato",-13.997099876403809],["konkurenčn",-13.997105598449709],["MINA",-13.997110366821287],["ผง",-13.9971342086792],["mondás",-13.99713897705078],["▁πάνε",-13.99715518951416],["▁يقت",-13.997162818908691],["▁способны",-13.997174263000488],["▁reset",-13.997198104858398],["▁ਸਹਿ",-13.99720287322998],["▁respeta",-13.997220039367676],["▁միակ",-13.997222900390623],["▁soubory",-13.997224807739258],["▁проекты",-13.997246742248535],["▁$10",-13.997262001037598],["798",-13.997269630432127],["▁tavoin",-13.997282028198242],["From",-13.997294425964355],["რეზ",-13.997304916381836],["▁atzīmē",-13.997306823730469],["ที่นั่ง",-13.997310638427734],["Azadlıq",-13.997313499450684],["場地",-13.997336387634276],["анасы",-13.997339248657228],["▁imenom",-13.997386932373049],["▁പങ്ക",-13.99739933013916],["▁புரிய",-13.99741554260254],["SKOLA",-13.99742317199707],["▁තිබුනා",-13.997429847717283],["▁బ్రా",-13.997442245483398],["▁meriv",-13.99747371673584],["isipan",-13.997488021850586],["pisali",-13.997488975524902],["▁passager",-13.997516632080078],["▁imirt",-13.997538566589355],["▁گیاهان",-13.997540473937988],["▁ავტორ",-13.997550964355469],["▁waliba",-13.997576713562012],["ဖြဲ႔",-13.997596740722656],["afgift",-13.997621536254885],["▁некое",-13.997638702392578],["▁Može",-13.99764633178711],["▁আক",-13.997684478759766],["▁впровадження",-13.997699737548828],["szállítás",-13.997714042663574],["▁limitado",-13.997724533081056],["가스",-13.997735023498535],["WF",-13.997762680053713],["rensning",-13.997764587402344],["▁tranzit",-13.99777603149414],["ग्री",-13.997783660888672],["▁τρίτο",-13.997787475585938],["▁богати",-13.997794151306152],["▁դեպք",-13.997809410095217],["ργη",-13.997817039489746],["▁հասկանալ",-13.997824668884276],["ုန်",-13.99783420562744],["▁ਲੱਗੇ",-13.997841835021973],["发布会",-13.997859954833984],["1973",-13.997861862182615],["▁besterik",-13.997868537902832],["▁akıl",-13.99787425994873],["▁marito",-13.997876167297363],["▁seçme",-13.997883796691896],["▁կյանքը",-13.997884750366213],["არჩენ",-13.99789810180664],["▁ترور",-13.997974395751951],["▁Թա",-13.997991561889648],["▁bangga",-13.997997283935549],["▁ministrul",-13.998014450073242],["▁کشیده",-13.99803638458252],["▁طلاب",-13.998043060302734],["▁látok",-13.998059272766112],["を始め",-13.998101234436035],["ουτ",-13.99813747406006],["▁Yhteis",-13.99813747406006],["ШТА",-13.998176574707031],["Ά",-13.99818992614746],["نڊي",-13.99819564819336],["▁বাড়া",-13.998205184936523],["ገነባ",-13.998208999633787],["▁rendit",-13.998212814331056],["▁младе",-13.998239517211914],["יקות",-13.99825668334961],["ειά",-13.99827480316162],["ketaren",-13.998284339904783],["izbový",-13.998286247253418],["экономикалық",-13.998315811157228],["인터넷",-13.998315811157228],["▁galā",-13.998326301574709],["放大",-13.998326301574709],["▁fyw",-13.998346328735352],["KTOR",-13.998347282409668],["▁ularga",-13.99835205078125],["▁ülkemizde",-13.998357772827148],["լիս",-13.998358726501465],["միտ",-13.998374938964844],["▁rational",-13.99838924407959],["▁любить",-13.998393058776855],["▁войни",-13.998405456542969],["utmaning",-13.998409271240234],["今後の",-13.998416900634766],["▁planteja",-13.998431205749512],["▁ക്യാമ്പ",-13.998433113098145],["▁informatik",-13.998441696166992],["种植",-13.998453140258787],["国土",-13.998491287231444],["▁poprawia",-13.99850368499756],["▁מרכזי",-13.998512268066406],["▁ralli",-13.998529434204102],["しまって",-13.998531341552734],["dengan",-13.998558044433594],["höll",-13.998604774475098],["כוונת",-13.998611450195312],["▁obrt",-13.998653411865234],["▁எரி",-13.998664855957031],["явиться",-13.998684883117676],["▁risponde",-13.998687744140623],["▁Spanje",-13.998709678649902],["ກາຍ",-13.998729705810549],["▁kilometrov",-13.998738288879396],["צוות",-13.998744010925291],["▁Skot",-13.998745918273926],["▁dəstə",-13.998750686645508],["dejting",-13.998756408691406],["ਰਾਮ",-13.998772621154783],["▁híres",-13.998783111572266],["제가",-13.998807907104492],["▁skjedde",-13.998848915100098],["bourg",-13.998897552490234],["▁Çocuklar",-13.998908996582031],["ठो",-13.998910903930664],["▁Sollten",-13.998915672302246],["ኋላ",-13.998924255371094],["899",-13.998931884765623],["▁polskie",-13.998940467834473],["陆续",-13.998943328857422],["▁гени",-13.998955726623535],["缓解",-13.998969078063965],["▁İnstitutu",-13.998985290527344],["септ",-13.998990058898926],["ADOR",-13.999001502990724],["밝",-13.999005317687988],["▁soyqırımı",-13.999006271362305],["▁ସପ୍ତାହ",-13.999006271362305],["▁ေဒၚေအာင္ဆန္းစု",-13.999006271362305],["ความอ่อนแอ",-13.99900722503662],["ຕື່ມ",-13.99900722503662],["ዣ",-13.99900722503662],["Ừ",-13.99900722503662],["▁Comisiei",-13.99900722503662],["▁Kategória",-13.99900722503662],["▁Szolgáltató",-13.99900722503662],["▁Tuairisc",-13.99900722503662],["▁attīstību",-13.99900722503662],["▁berkunjung",-13.99900722503662],["▁bələdiyyə",-13.99900722503662],["▁costruire",-13.99900722503662],["▁dhimbje",-13.99900722503662],["▁embarazo",-13.99900722503662],["▁forelsket",-13.99900722503662],["▁gjashtë",-13.99900722503662],["▁krepšinio",-13.99900722503662],["▁przechowywan",-13.99900722503662],["▁sgeulachd",-13.99900722503662],["▁επιπλέον",-13.99900722503662],["▁παροχή",-13.99900722503662],["▁Бясплатны",-13.99900722503662],["▁контейнер",-13.99900722503662],["▁обслуживания",-13.99900722503662],["▁ռեժիմ",-13.99900722503662],["▁בחינם",-13.99900722503662],["▁ביחד",-13.99900722503662],["▁سرانجام",-13.99900722503662],["▁فیسبوک",-13.99900722503662],["▁پارلیمان",-13.99900722503662],["▁નોકરી",-13.99900722503662],["▁විදෙස්",-13.99900722503662],["▁lahaayeen",-13.999008178710938],["▁oprindelig",-13.999008178710938],["▁tråkig",-13.999008178710938],["넥",-13.999008178710938],["ิ้ล",-13.999009132385254],["▁tanggungjawab",-13.999009132385254],["▁ισχύει",-13.999009132385254],["▁ұжым",-13.999009132385254],["▁Semarang",-13.99901008605957],["▁seirbhísí",-13.999011039733888],["▁ашиглалт",-13.999011039733888],["▁قوانين",-13.999011039733888],["▁وفاقي",-13.999011039733888],["▁২০১৪",-13.999011039733888],["▁paigalda",-13.999011993408203],["▁Рэспубліка",-13.999011993408203],["▁kunjungan",-13.99901294708252],["▁операций",-13.999013900756836],["▁ಹಿನ್ನೆಲೆಯಲ್ಲಿ",-13.999014854431152],["ሰማያዊ",-13.999015808105469],["▁شیمیایی",-13.999015808105469],["▁ವೆಬ್",-13.999015808105469],["▁итогам",-13.999017715454102],["bahoaka",-13.999019622802734],["▁Ježiš",-13.999019622802734],["▁مستويات",-13.99902057647705],["▁కాపాడ",-13.99902057647705],["▁urdhër",-13.999022483825684],["▁Zuckerberg",-13.999024391174316],["▁uşor",-13.99902629852295],["▁мынадай",-13.999028205871582],["Փաստ",-13.999031066894531],["เดรส",-13.999031066894531],["▁típica",-13.999031066894531],["punto",-13.999032020568848],["▁Потім",-13.999032020568848],["▁həcmi",-13.99903392791748],["▁πήρα",-13.99903392791748],["マーク",-13.99903392791748],["▁ایستاد",-13.999035835266112],["▁rujukan",-13.99903678894043],["▁Gesù",-13.999042510986328],["දිවයින",-13.999043464660645],["单独",-13.99904441833496],["▁efterfølgende",-13.999046325683594],["▁Васіль",-13.99904727935791],["▁offiziell",-13.99905014038086],["▁Вообще",-13.999052047729492],["ergebnis",-13.999053001403809],["▁stāsta",-13.999053001403809],["▁очигледно",-13.999075889587402],["▁řadu",-13.999082565307615],["ಲ್ಲದೆ",-13.99908447265625],["▁עבודות",-13.999088287353516],["▁ගරු",-13.999088287353516],["▁Ulusal",-13.99909210205078],["▁ወይስ",-13.99909496307373],["รถไฟฟ้า",-13.999099731445312],["▁तभी",-13.999099731445312],["▁portable",-13.999103546142578],["7,6",-13.999109268188477],["▁کانادا",-13.999116897583008],["1080",-13.99911880493164],["ർക്ക്",-13.999125480651855],["人民网",-13.999125480651855],["ಕೊಳ್ಳಿ",-13.999126434326172],["▁سازمانی",-13.999136924743652],["▁laissé",-13.999143600463867],["kwam",-13.999144554138184],["▁թանկ",-13.999146461486816],["発信",-13.999147415161133],["▁рамка",-13.999152183532717],["zweck",-13.999164581298828],["▁Джек",-13.999175071716309],["▁ھەي",-13.999176025390623],["▁عظمت",-13.999177932739258],["▁spelade",-13.999185562133787],["▁пяць",-13.99919319152832],["▁aanvraag",-13.999197959899902],["▁majhne",-13.9992094039917],["▁навучання",-13.999213218688965],["▁duża",-13.99921417236328],["▁remedyong",-13.99921417236328],["อุ่น",-13.999215126037598],["▁ברמת",-13.999222755432127],["▁أصدر",-13.999222755432127],["problemer",-13.999225616455078],["▁Catalan",-13.999234199523926],["▁Дакле",-13.999235153198242],["▁مناظر",-13.999239921569824],["▁ефикасност",-13.999242782592772],["▁ቅዱሳ",-13.99924659729004],["Show",-13.99925136566162],["▁tired",-13.999265670776367],["▁гдје",-13.999266624450684],["▁Behandling",-13.999284744262695],["▁nướng",-13.999297142028809],["▁GRUP",-13.999306678771973],["▁απόσταση",-13.999307632446287],["დად",-13.999309539794922],["▁zadeve",-13.999311447143556],["▁cột",-13.999329566955566],["▁dərman",-13.999333381652832],["▁Λά",-13.999342918395996],["▁Jeesus",-13.999345779418944],["▁מופ",-13.99936294555664],["являє",-13.999378204345703],["డంలో",-13.999399185180664],["inneach",-13.999406814575195],["▁מזל",-13.99942398071289],["Demo",-13.999429702758787],["▁hoffen",-13.999430656433104],["▁биік",-13.999431610107422],["значений",-13.999433517456056],["青海",-13.999434471130373],["อาหารเสริม",-13.999435424804688],["▁Navidad",-13.999449729919434],["▁priekšā",-13.999449729919434],["दैन",-13.99948024749756],["ោន",-13.999491691589355],["iziston",-13.999493598937988],["▁নির্বাচনের",-13.999510765075684],["৩৭",-13.999526977539062],["▁Čist",-13.999542236328123],["▁удара",-13.999563217163086],["▁zmeniť",-13.999576568603516],["ოთახიანი",-13.999601364135742],["▁geslo",-13.999618530273438],["/03/2018",-13.99963092803955],["▁Stiri",-13.99964714050293],["▁developing",-13.999649047851562],["▁predstavili",-13.999655723571776],["▁ölkəmizin",-13.999656677246094],["እነ",-13.999663352966309],["hjól",-13.99966526031494],["▁Jardin",-13.99967098236084],["aufnahme",-13.9996919631958],["▁tréner",-13.999693870544434],["▁ayollar",-13.999716758728027],["▁1875",-13.999746322631836],["punkten",-13.999750137329102],["տեն",-13.999762535095217],["▁zvlášt",-13.99976921081543],["▁Bemærk",-13.999774932861328],["ടോ",-13.999780654907228],["▁крену",-13.999780654907228],["▁лифт",-13.999786376953123],["此事",-13.99978733062744],["▁ලිපියක්",-13.999794006347656],["很强",-13.9998140335083],["ньняў",-13.99981689453125],["finder",-13.999850273132324],["▁Cohen",-13.99985122680664],["▁Erkenntnis",-13.999853134155272],["▁سینه",-13.999860763549805],["毫",-13.99986743927002],["▁අවම",-13.999869346618652],["KHA",-13.999872207641602],["▁водой",-13.99987506866455],["מקובל",-13.999881744384766],["▁vlasov",-13.999882698059082],["▁kawalan",-13.999889373779297],["▁огромное",-13.999893188476562],["район",-13.999900817871094],["ได้ทุก",-13.999933242797852],["ረዱ",-13.999939918518066],["യാക്കി",-13.999963760375977],["▁sigurnost",-14.000064849853516],["▁גרויסע",-14.00007152557373],["▁පක්",-14.000082969665527],["▁കുട്ടികളെ",-14.00008487701416],["मते",-14.000086784362791],["▁دورے",-14.00009059906006],["批發",-14.000096321105955],["▁resiko",-14.00010871887207],["Кі",-14.000120162963867],["▁부정",-14.000123023986816],["▁motív",-14.000133514404297],["heilig",-14.000134468078612],["▁цитат",-14.000146865844728],["BBC",-14.00014877319336],["▁Začni",-14.000149726867676],["▁destinacion",-14.000151634216309],["ıyorsunuz",-14.000161170959473],["ใช้ได้",-14.000166893005373],["▁suruh",-14.000194549560549],["Why",-14.000208854675291],["得分",-14.000218391418455],["▁savaitė",-14.000221252441406],["種類の",-14.000221252441406],["▁professionel",-14.000226020812988],["▁namesto",-14.00024700164795],["▁pauper",-14.000247955322266],["▁XXL",-14.000255584716797],["▁səfiri",-14.00026798248291],["▁Plot",-14.000271797180176],["▁ракета",-14.000280380249023],["λάμ",-14.00028133392334],["772",-14.000300407409668],["ತೆಯನ್ನು",-14.00033473968506],["evoluzione",-14.000346183776855],["دەر",-14.000359535217283],["▁remate",-14.000375747680664],["▁लगाउन",-14.00038242340088],["▁настоящим",-14.000393867492676],["名人",-14.000393867492676],["▁아는",-14.000401496887209],["φί",-14.000407218933104],["язана",-14.00042724609375],["▁vidū",-14.00046443939209],["▁храната",-14.000475883483888],["▁Игор",-14.000496864318848],["ילדים",-14.000499725341797],["▁гориво",-14.00052261352539],["偷偷",-14.000523567199709],["中秋",-14.00053596496582],["▁ಕಟ್ಟಿ",-14.00054931640625],["▁ნარკო",-14.000563621520996],["ؤكد",-14.000570297241213],["▁ਜਦ",-14.00058364868164],["レビュー",-14.0006103515625],["▁Адзін",-14.000619888305664],["ָּ",-14.00062656402588],["ደንብ",-14.000635147094728],["▁kylling",-14.000635147094728],["Main",-14.000638008117676],["▁кісі",-14.000657081604004],["ىندا",-14.000689506530762],["sortiment",-14.000702857971191],["кнути",-14.00070571899414],["▁ajaks",-14.000706672668455],["▁досі",-14.000709533691406],["ሳካ",-14.000712394714355],["ศิ",-14.00072956085205],["rennen",-14.000733375549316],["füllen",-14.000747680664062],["นําเข้า",-14.000799179077148],["Çık",-14.000800132751465],["▁називають",-14.000815391540527],["▁Madaxda",-14.000821113586426],["▁compost",-14.000828742980955],["▁materiały",-14.000836372375488],["posizio",-14.000837326049805],["▁vietni",-14.000864028930664],["ಡೀ",-14.000876426696776],["▁macera",-14.000886917114258],["xonasi",-14.000897407531738],["стату",-14.000905990600586],["०२",-14.000911712646484],["▁yönetmen",-14.000916481018066],["▁bestående",-14.00092601776123],["ณี",-14.000938415527344],["▁antoi",-14.000941276550291],["६६",-14.00094223022461],["▁використовують",-14.000943183898926],["ตกลง",-14.000953674316406],["▁sigara",-14.000953674316406],["▁аялдар",-14.000957489013672],["lumba",-14.000971794128418],["▁харуул",-14.000990867614746],["▁gisting",-14.001018524169922],["▁qoray",-14.001021385192873],["زعم",-14.001070022583008],["▁Коста",-14.001096725463867],["发送",-14.001096725463867],["▁Ramoso",-14.001115798950195],["stuen",-14.001140594482422],["െണ്ണ",-14.001141548156738],["धम",-14.001144409179688],["plika",-14.001164436340332],["▁ყოფილა",-14.001164436340332],["▁formā",-14.001169204711914],["相结合",-14.001171112060549],["▁бъдете",-14.00122356414795],["盲",-14.001248359680176],["▁ባለቤት",-14.00125503540039],["เล็บ",-14.001260757446287],["▁egunean",-14.001267433166504],["ADU",-14.001331329345703],["CBN",-14.001338005065918],["▁átlagos",-14.001358032226562],["بقى",-14.00136375427246],["▁материјали",-14.001378059387209],["たく",-14.001389503479004],["üüd",-14.001392364501951],["обикновен",-14.001392364501951],["शृ",-14.001399040222168],["wnętrz",-14.001407623291016],["▁Kolme",-14.00141143798828],["▁Priorit",-14.00141143798828],["▁začeli",-14.001435279846191],["अन्तर्गत",-14.00144100189209],["▁Pitkä",-14.001446723937988],["▁forced",-14.00145149230957],["▁फैल",-14.001465797424316],["▁ମଦ",-14.001495361328123],["▁لانے",-14.001510620117188],["436",-14.001514434814451],["▁Trykk",-14.001517295837402],["depth",-14.001538276672363],["▁копира",-14.00155544281006],["▁עשיר",-14.001583099365234],["kovski",-14.0015869140625],["ကြက္",-14.001587867736816],["ቸዋል።",-14.001595497131348],["業績",-14.001596450805664],["▁моди",-14.001620292663574],["บะ",-14.001668930053713],["ાતી",-14.001670837402344],["▁ઝડપ",-14.001672744750977],["▁Busca",-14.00169563293457],["فاج",-14.001720428466797],["▁mussten",-14.00172519683838],["▁sportif",-14.001748085021973],["句话",-14.001757621765137],["▁cujus",-14.001785278320312],["▁pedagogik",-14.00182056427002],["▁értékel",-14.001832962036133],["▁fjármála",-14.001842498779297],["▁architecto",-14.001846313476562],["▁fietsen",-14.001855850219728],["上限",-14.001859664916992],["▁Асан",-14.001872062683104],["OWE",-14.001875877380373],["איך",-14.001877784729004],["拯救",-14.001923561096191],["▁immediate",-14.001943588256836],["▁doriti",-14.001954078674316],["▁respecter",-14.001975059509276],["▁grym",-14.001976013183594],["तिहा",-14.001996040344238],["venind",-14.00200653076172],["仗",-14.00203800201416],["▁القادر",-14.002053260803224],["▁тіс",-14.002055168151855],["칙",-14.00205898284912],["▁gastar",-14.002065658569336],["滨",-14.002070426940918],["ประสบ",-14.002071380615234],["Klaipėda",-14.002074241638184],["ויקיפדיה",-14.002074241638184],["זיכרון",-14.002074241638184],["ລະບຽບ",-14.002074241638184],["እያንዳንዱ",-14.002074241638184],["អះអាង",-14.002074241638184],["▁Děkuji",-14.002074241638184],["▁Gómez",-14.002074241638184],["▁Kaymakam",-14.002074241638184],["▁Kovács",-14.002074241638184],["▁Multumesc",-14.002074241638184],["▁Podmínky",-14.002074241638184],["▁Uruguay",-14.002074241638184],["▁Xüsusi",-14.002074241638184],["▁indispensabil",-14.002074241638184],["▁kategooria",-14.002074241638184],["▁limpieza",-14.002074241638184],["▁matrícula",-14.002074241638184],["▁menggambarkan",-14.002074241638184],["▁pagtitistis",-14.002074241638184],["▁shtetëror",-14.002074241638184],["▁sjokolade",-14.002074241638184],["▁stosunku",-14.002074241638184],["▁suomalainen",-14.002074241638184],["▁tīmekļa",-14.002074241638184],["▁бүкіл",-14.002074241638184],["▁использовал",-14.002074241638184],["▁олимпиада",-14.002074241638184],["▁চাকরি",-14.002074241638184],["▁ઓનલાઇન",-14.002074241638184],["▁ପ୍ରତିଶତ",-14.002074241638184],["▁அதிர்ச்சி",-14.002074241638184],["▁ಚಿಕಿತ್ಸೆ",-14.002074241638184],["멸",-14.002074241638184],["▁Beautiful",-14.0020751953125],["▁aurkezpen",-14.0020751953125],["▁trưa",-14.0020751953125],["▁байгаль",-14.0020751953125],["▁възползва",-14.0020751953125],["▁захищені",-14.0020751953125],["▁майданчик",-14.0020751953125],["▁رفیق",-14.0020751953125],["▁নম্বর",-14.0020751953125],["▁ਅਹਿਮ",-14.0020751953125],["▁Hakkında",-14.002076148986816],["▁rapaces",-14.002076148986816],["▁کرمانشاه",-14.002076148986816],["▁संस्थे",-14.002076148986816],["▁सिर्जना",-14.002076148986816],["▁бременност",-14.002077102661133],["▁পরিচালক",-14.002077102661133],["▁ਕਿਉਂਕਿ",-14.002077102661133],["▁დაწესებულ",-14.002077102661133],["▁přehled",-14.00207805633545],["▁shaqaalaha",-14.00207805633545],["▁সম্পূর্ণ",-14.00207805633545],["▁Пікір",-14.002079010009766],["▁gondoskod",-14.002079963684082],["▁stosowania",-14.002079963684082],["▁चौक",-14.002080917358398],["▁ուղղակի",-14.002081871032717],["▁ոչինչ",-14.002082824707031],["▁কেমন",-14.00208568572998],["▁الثامن",-14.002086639404297],["▁érdeklődés",-14.002087593078612],["▁ಆದರೂ",-14.002087593078612],["ត្ដ",-14.002089500427246],["gujju",-14.002090454101562],["▁Frankrijk",-14.00209140777588],["▁παρέχει",-14.00209140777588],["▁erfitt",-14.002092361450195],["▁nhánh",-14.002092361450195],["▁କରିପାରିବେ",-14.002092361450195],["quvvatlash",-14.002093315124512],["▁þrjá",-14.002093315124512],["▁укуктуу",-14.002093315124512],["▁považova",-14.002095222473145],["▁pengganti",-14.00209617614746],["▁තේරෙන",-14.00209617614746],["▁nyugodt",-14.002097129821776],["▁үсэг",-14.002097129821776],["▁نمایندگی",-14.002097129821776],["ေက်ာင္းသား",-14.00209903717041],["▁المشاكل",-14.002100944519045],["▁Музеј",-14.002105712890623],["▁эхлэн",-14.002105712890623],["▁Bidh",-14.00210952758789],["▁obvesti",-14.00210952758789],["▁смело",-14.00211238861084],["▁მოკლედ",-14.002117156982422],["▁ዕድል",-14.002119064331056],["doktor",-14.002121925354004],["했으며",-14.002121925354004],["KIP",-14.00212287902832],["▁부르",-14.002126693725586],["▁pauvre",-14.002131462097168],["▁mënyra",-14.002140998840332],["▁Fundusz",-14.00214672088623],["▁showroom",-14.002151489257812],["▁Amazing",-14.002153396606444],["對外",-14.002153396606444],["▁آٹھ",-14.002157211303713],["▁پیسے",-14.002157211303713],["▁постављен",-14.00217056274414],["解消",-14.002174377441406],["▁കാണൂ",-14.002182960510254],["▁اصغر",-14.00218677520752],["ທາງດ້ານ",-14.002187728881836],["▁Војн",-14.002192497253418],["▁اچو",-14.00219440460205],["▁трећи",-14.002196311950684],["▁regény",-14.00221347808838],["▁lielākā",-14.002217292785645],["▁$100",-14.002220153808594],["ປະເພດ",-14.00222110748291],["▁reasonable",-14.00222396850586],["▁Давайте",-14.002237319946287],["ဝတ်",-14.002252578735352],["▁paktën",-14.002284049987791],["▁màng",-14.002302169799805],["▁създаването",-14.002302169799805],["عزز",-14.002306938171388],["▁príchod",-14.002310752868652],["▁მართლა",-14.002320289611816],["▁լրագրող",-14.00232219696045],["▁громадських",-14.002333641052246],["▁Perform",-14.00233554840088],["▁članice",-14.002345085144045],["▁жестоко",-14.002355575561523],["خوض",-14.002361297607422],["▁proveedor",-14.002364158630373],["更換",-14.002373695373535],["ാലോ",-14.002375602722168],["▁kwento",-14.002375602722168],["▁HII",-14.002389907836914],["▁jatka",-14.002399444580078],["▁הוסף",-14.00240421295166],["▁slovenských",-14.00240993499756],["▁տեղական",-14.00240993499756],["▁2014-2015",-14.002421379089355],["▁Lazio",-14.002429008483888],["▁puke",-14.002432823181152],["▁rêxistin",-14.00244140625],["▁Ilaah",-14.002469062805176],["ுவோம்",-14.002470016479492],["▁железни",-14.002473831176758],["▁Aufbau",-14.002474784851074],["husisha",-14.002483367919922],["vuoden",-14.002486228942873],["▁nefes",-14.002488136291504],["Administr",-14.002490997314451],["典型的",-14.002495765686035],["▁beweeg",-14.002500534057615],["▁Wheel",-14.002511024475098],["就已经",-14.002513885498049],["▁Boshqa",-14.002521514892578],["▁odwiedza",-14.002532958984377],["▁ప్రారంభం",-14.002546310424805],["▁zwrócić",-14.002548217773438],["▁попросил",-14.002554893493652],["យ៉ាងណា",-14.00256633758545],["▁Helaas",-14.002581596374512],["▁נשאר",-14.002581596374512],["▁vendégek",-14.002596855163574],["үүлнэ",-14.002609252929688],["▁تعود",-14.002619743347168],["(6)",-14.00263786315918],["branche",-14.002641677856444],["▁활동을",-14.002647399902344],["ారో",-14.00265121459961],["▁hodnotu",-14.00266456604004],["ポリ",-14.002683639526367],["20°",-14.00269603729248],["▁ליום",-14.002705574035645],["▁dostáva",-14.00273323059082],["▁പുന",-14.002737045288086],["▁ስነ",-14.002758026123049],["atsiyasi",-14.002764701843262],["▁procedemento",-14.002781867980955],["тамыр",-14.002790451049805],["жди",-14.002798080444336],["▁Regulamin",-14.002800941467283],["কৰ",-14.00282096862793],["▁inspekci",-14.002822875976562],["▁Вашия",-14.002824783325195],["▁Yaar",-14.002836227416992],["సీపీ",-14.002854347229004],["▁markalı",-14.002870559692385],["▁ignorant",-14.002873420715332],["▁szczegół",-14.002881050109863],["▁ansøgning",-14.00288200378418],["▁Сүй",-14.002886772155762],["▁esconde",-14.002893447875977],["▁ტექნოლოგიები",-14.002928733825684],["▁качественно",-14.002954483032228],["അബ്ദു",-14.002963066101074],["▁ਬੱਸ",-14.002984046936035],["▁slovenský",-14.00299835205078],["▁aplikazio",-14.003033638000488],["словенск",-14.003037452697754],["▁dependencia",-14.003042221069336],["▁dalies",-14.003057479858398],["▁ведаю",-14.003057479858398],["▁התר",-14.00306797027588],["▁кратки",-14.003095626831056],["▁tomber",-14.003098487854004],["▁publicação",-14.00312328338623],["▁앞에서",-14.003128051757812],["一體",-14.003159523010254],["▁Rebel",-14.003161430358888],["▁susțin",-14.003175735473633],["πος",-14.003180503845217],["ପାରୁ",-14.003216743469238],["ምንም",-14.003246307373049],["▁বাবা",-14.003252029418944],["పడే",-14.003266334533691],["▁perfectly",-14.003331184387209],["▁dubbi",-14.003348350524902],["▁ऋत",-14.00334930419922],["▁dasarnya",-14.003360748291016],["▁cevabı",-14.003378868103027],["▁አበ",-14.00339126586914],["▁재생",-14.003392219543455],["▁Kafe",-14.003403663635254],["vládn",-14.003477096557615],["jäsen",-14.0034818649292],["▁bestuurder",-14.00350570678711],["מכים",-14.003539085388184],["płacić",-14.003549575805664],["▁Риб",-14.003549575805664],["▁3:0",-14.003560066223145],["▁ietver",-14.003569602966309],["▁داسي",-14.003585815429688],["▁ποια",-14.003620147705078],["▁довело",-14.003620147705078],["ОЮ",-14.003622055053713],["▁fengu",-14.003633499145508],["▁tutun",-14.003640174865724],["▁બીજ",-14.003643989562988],["▁skilja",-14.003662109375],["▁zinātni",-14.003665924072266],["▁20.30",-14.003700256347656],["ๆๆ",-14.003727912902832],["▁mennyt",-14.003731727600098],["ىستى",-14.00374984741211],["▁tertia",-14.00375270843506],["ברג",-14.003792762756348],["▁മകന",-14.003804206848145],["▁Ostatnio",-14.003806114196776],["ైంది",-14.003819465637209],["美方",-14.003861427307127],["▁wychodzi",-14.003873825073242],["▁kontr",-14.003880500793455],["▁primært",-14.003894805908203],["▁termal",-14.003901481628418],["▁uniós",-14.003913879394531],["▁विशाल",-14.003960609436035],["വെച്ചു",-14.003963470458984],["▁десни",-14.003966331481934],["ыңды",-14.00397777557373],["lıyoruz",-14.003986358642578],["▁Relax",-14.003998756408691],["▁żoł",-14.004000663757324],["▁Leif",-14.004008293151855],["ദേവ",-14.004024505615234],["▁PROMO",-14.004029273986816],["සුන",-14.004040718078612],["▁COMO",-14.004050254821776],["အတိုင္း",-14.004067420959473],["▁პოლიტიკურ",-14.004082679748535],["nesite",-14.004088401794434],["▁মহ",-14.004088401794434],["Nkosi",-14.004090309143066],["öön",-14.00410270690918],["所在地",-14.004133224487305],["▁wakazi",-14.00413990020752],["▁signaler",-14.004142761230469],["ចៅ",-14.004178047180176],["▁sarva",-14.004195213317873],["ಲ್ಸ್",-14.004233360290527],["▁MSI",-14.004253387451172],["פסיכו",-14.004254341125488],["행정",-14.00425910949707],["▁folga",-14.004266738891602],["▁Gør",-14.0042724609375],["▁пац",-14.004283905029297],["▁фебруар",-14.00428581237793],["▁подари",-14.004310607910156],["▁онова",-14.004314422607422],["▁tabula",-14.004324913024902],["▁zápis",-14.004327774047852],["看過",-14.00436782836914],["त्रिक",-14.004369735717772],["▁учні",-14.004372596740724],["ໂອ",-14.004387855529783],["evolució",-14.004450798034668],["ልህ",-14.004472732543944],["▁অনুষ্ঠান",-14.00447940826416],["▁Абды",-14.00449562072754],["▁2017-18",-14.004496574401855],["▁cursuri",-14.004510879516602],["brez",-14.004514694213867],["▁percentage",-14.004528999328612],["گال",-14.00453758239746],["▁igényel",-14.00453758239746],["▁ўрад",-14.00454044342041],["AJU",-14.00455379486084],["▁magie",-14.004568099975586],["suunnittelu",-14.004575729370115],["▁вызов",-14.00457763671875],["▁mladim",-14.00460720062256],["▁stanza",-14.004619598388672],["▁πιστεύ",-14.004633903503418],["▁աշխատանքը",-14.004650115966797],["▁sempit",-14.00467014312744],["님께서",-14.004693031311035],["צרכים",-14.004724502563477],["▁правителство",-14.004741668701172],["▁conduz",-14.004742622375488],["▁midten",-14.004749298095703],["شاق",-14.004755973815918],["bavu",-14.00479507446289],["ժե",-14.004799842834473],["▁Chitt",-14.004817962646484],["▁açılışı",-14.004830360412598],["នៅថ្ងៃទី",-14.004864692687988],["▁Rapporter",-14.004913330078123],["session",-14.004915237426758],["▁etxean",-14.004915237426758],["ωτική",-14.004926681518556],["▁естествено",-14.004932403564451],["▁излага",-14.004937171936035],["အိုး",-14.004948616027832],["▁zdrojov",-14.004956245422363],["lapok",-14.004960060119627],["▁ලග",-14.004960060119627],["ARCH",-14.004962921142578],["▁erilaisi",-14.0050048828125],["▁selecció",-14.005008697509766],["▁confirmat",-14.005012512207031],["▁Fisher",-14.00503635406494],["öntö",-14.005041122436523],["▁destinacij",-14.00504207611084],["▁remet",-14.005059242248535],["慘",-14.005070686340332],["▁blízko",-14.005080223083496],["统筹",-14.005083084106444],["疊",-14.005088806152344],["霞",-14.00508975982666],["咒",-14.00509548187256],["흔",-14.00509548187256],["Донбас",-14.005098342895508],["▁baharu",-14.005112648010254],["▁κόμμα",-14.005115509033203],["مبي",-14.005119323730469],["黑龙江",-14.005123138427734],["สตรี",-14.005125045776367],["賠償",-14.005136489868164],["▁Məcəllə",-14.005139350891112],["โดดเด่น",-14.005149841308594],["fhoghlaim",-14.005151748657228],["omkostninger",-14.005151748657228],["Δημιουργ",-14.005151748657228],["ทาวน์",-14.005151748657228],["▁Découvrez",-14.005151748657228],["▁Stichting",-14.005151748657228],["▁Suruhanjaya",-14.005151748657228],["▁Vokietijos",-14.005151748657228],["▁amennyiben",-14.005151748657228],["▁arddangos",-14.005151748657228],["▁bilhões",-14.005151748657228],["▁clássico",-14.005151748657228],["▁geleentheid",-14.005151748657228],["▁geschafft",-14.005151748657228],["▁palakihin",-14.005151748657228],["▁speletjies",-14.005151748657228],["▁ytterligere",-14.005151748657228],["▁έλλειψη",-14.005151748657228],["▁Србе",-14.005151748657228],["▁Сұрақ",-14.005151748657228],["▁арганізацый",-14.005151748657228],["▁грамадства",-14.005151748657228],["▁заседании",-14.005151748657228],["▁некоторое",-14.005151748657228],["▁разобраться",-14.005151748657228],["▁ситуацію",-14.005151748657228],["▁історію",-14.005151748657228],["▁אפריל",-14.005151748657228],["▁اپوزیشن",-14.005151748657228],["▁بحاجة",-14.005151748657228],["▁पोहोच",-14.005151748657228],["▁ਕੋਸ਼ਿਸ਼",-14.005151748657228],["▁ਹਮੇਸ਼ਾ",-14.005151748657228],["▁ਹਾਦਸੇ",-14.005151748657228],["▁થોડી",-14.005151748657228],["▁භාණ්ඩ",-14.005151748657228],["▁πολεμ",-14.005152702331545],["▁מאוחר",-14.005152702331545],["▁언급",-14.005152702331545],["အခမဲ့",-14.00515365600586],["▁apkalpo",-14.00515365600586],["▁vuodessa",-14.00515365600586],["▁освіту",-14.00515365600586],["▁الظروف",-14.00515365600586],["▁রাশিয়া",-14.005154609680176],["▁nozīme",-14.005155563354492],["愧",-14.005156517028809],["▁φοιτ",-14.005157470703123],["▁недеље",-14.005157470703123],["▁хүрээлэн",-14.005157470703123],["організатор",-14.00515842437744],["תזונה",-14.00515842437744],["▁בלויז",-14.00515842437744],["▁бағытталған",-14.005159378051758],["▁организацији",-14.005159378051758],["รสชาติ",-14.005160331726074],["▁পুরো",-14.005160331726074],["▁Unternehmer",-14.00516128540039],["▁kilométer",-14.005163192749023],["▁жыхароў",-14.005167007446287],["▁ସାହୁ",-14.005167961120604],["▁spriječi",-14.005168914794922],["▁kyseessä",-14.005170822143556],["▁ditutup",-14.005171775817873],["▁Εγώ",-14.00517463684082],["▁Només",-14.005175590515137],["もう少し",-14.005175590515137],["▁گرافیک",-14.005176544189451],["▁közvetlenül",-14.00517749786377],["▁küpse",-14.00517749786377],["▁שניתן",-14.00517749786377],["▁சந்தித்த",-14.00517749786377],["▁জানিয়েছেন",-14.005178451538086],["▁ڈاٹ",-14.00518035888672],["▁गोपाल",-14.005183219909668],[".08.2017",-14.005184173583984],["▁szintű",-14.0051851272583],["▁длительно",-14.005187034606934],["▁Kolumbi",-14.005189895629885],["▁Huế",-14.005193710327148],["▁müzakirələr",-14.005193710327148],["▁الرأس",-14.005193710327148],["▁raspunde",-14.005194664001465],["▁keskellä",-14.00519847869873],["当地时间",-14.005206108093262],["▁zachwyc",-14.005216598510742],["▁صعود",-14.005216598510742],["▁hitsura",-14.005218505859377],["▁xahiş",-14.005218505859377],["▁ಊಟ",-14.005219459533691],["▁السكان",-14.00522804260254],["▁مياشت",-14.00523853302002],["រូបថត",-14.005244255065918],["▁laikyti",-14.005244255065918],["▁drew",-14.005247116088867],["▁némi",-14.005256652832031],["▁следващите",-14.00525951385498],["▁megjegyzés",-14.005261421203612],["▁मनोज",-14.005261421203612],["ឈឺ",-14.00526523590088],["▁آڻي",-14.005266189575195],["شوارع",-14.00527000427246],["▁Jackpot",-14.005271911621094],["▁eisiau",-14.00527572631836],["▁شارژ",-14.00527572631836],["▁permesso",-14.005276679992676],["▁Sakarya",-14.005282402038574],["▁وحشت",-14.005289077758787],["▁진실",-14.005292892456056],["▁අග්",-14.005297660827637],["▁Proszę",-14.00529956817627],["Germain",-14.00530242919922],["ಾಗಿದ್ದು",-14.005303382873535],["iannau",-14.0053071975708],["schot",-14.005309104919434],["▁конверт",-14.005311012268066],["▁متعارف",-14.005311965942385],["▁행복한",-14.00531768798828],["▁hevalê",-14.005325317382812],["bayar",-14.005330085754396],["▁snijeg",-14.005335807800291],["▁američko",-14.00534439086914],["▁Kwanza",-14.005345344543455],["▁pubblicazione",-14.005361557006836],["arbre",-14.005364418029783],["▁बचत",-14.005377769470217],["▁تعجب",-14.005378723144531],["ващите",-14.00538158416748],["▁sprijinul",-14.005382537841797],["чыл",-14.00538444519043],["KAZ",-14.005386352539062],["やりたい",-14.005391120910645],["นักแสดง",-14.00540542602539],["▁pervers",-14.005407333374023],["Ան",-14.005414962768556],["▁යාලුව",-14.005416870117188],["றேன்",-14.005430221557615],["▁teevad",-14.005433082580566],["▁живеят",-14.005433082580566],["▁ජලය",-14.005436897277832],["▁Pasukan",-14.005440711975098],["▁Ένωσης",-14.005443572998049],["▁насеље",-14.005452156066896],["ซื้อขาย",-14.005455017089844],["▁הטובים",-14.005463600158691],["▁Bəzi",-14.005465507507324],["▁vergeten",-14.005470275878906],["▁қызметкерлері",-14.00547695159912],["▁sprach",-14.005491256713867],["▁Veldig",-14.00550365447998],["kór",-14.005510330200195],["▁krajem",-14.005515098571776],["bleib",-14.005529403686523],["▁عابد",-14.005533218383787],["▁konstatera",-14.00554370880127],["ጠቀሙ",-14.005547523498535],["▁минуту",-14.005550384521484],["▁presence",-14.005559921264648],["大臣",-14.005587577819824],["▁трактор",-14.005592346191406],["▁merrni",-14.005617141723633],["▁verzió",-14.005617141723633],["relevant",-14.005622863769531],["▁ეკონომიკური",-14.00564670562744],["▁mleko",-14.005651473999023],["▁مگ",-14.005659103393556],["ようですが",-14.00568389892578],["▁preparación",-14.00568675994873],["agiza",-14.005695343017578],["▁വ്യാജ",-14.005702018737791],["пишите",-14.005712509155272],["▁tratti",-14.005712509155272],["trækning",-14.005719184875488],["Rama",-14.005744934082031],["▁tudott",-14.00575065612793],["▁dæm",-14.005765914916992],["▁dhintay",-14.005778312683104],["▁impedit",-14.005784034729004],["▁burgu",-14.005785942077637],["來電",-14.00579071044922],["▁Afrikaner",-14.005792617797852],["▁vietnam",-14.005805015563965],["▁oplossingen",-14.005806922912598],["▁ettevõtete",-14.005841255187988],["kṣa",-14.00584602355957],["▁jaunimo",-14.005850791931152],["▁želji",-14.005850791931152],["▁voter",-14.005863189697266],["ástica",-14.005866050720217],["▁trenér",-14.005867004394531],["oittanut",-14.00589370727539],["عتاد",-14.005906105041504],["▁истината",-14.005928993225098],["▁Федор",-14.005934715270996],["▁библиотек",-14.00594711303711],["▁وينا",-14.005958557128906],["▁confusion",-14.005961418151855],["▁fogyaszt",-14.005976676940918],["▁мемори",-14.006010055541992],["▁Hinweise",-14.006022453308104],["джээ",-14.00602912902832],["пель",-14.006044387817385],["▁התש",-14.006049156188965],["டெ",-14.006054878234863],["▁шешімі",-14.006056785583496],["▁Biaya",-14.00607681274414],["Total",-14.006089210510254],["verat",-14.006102561950684],["पत्ति",-14.006112098693848],["्याच्या",-14.00611400604248],["▁کردید",-14.006118774414062],["▁Apartamento",-14.00612735748291],["ဝ်",-14.00613021850586],["▁حوار",-14.0061674118042],["▁अजय",-14.006217956542969],["кваліфі",-14.006227493286133],["▁desespera",-14.006231307983398],["yhteiskunta",-14.006258010864258],["▁طبقات",-14.006269454956056],["różnego",-14.006275177001951],["▁अर्का",-14.006279945373535],["▁انبار",-14.006291389465332],["▁вечером",-14.006292343139648],["▁الطبيب",-14.006315231323242],["ZAN",-14.006336212158203],["ப்பே",-14.00633716583252],["arbeiter",-14.00635814666748],["ованої",-14.006380081176758],["ရတယ္",-14.006391525268556],["ићев",-14.00640869140625],["▁هنرمندان",-14.006415367126465],["μαντ",-14.006426811218262],["ઠો",-14.006426811218262],["▁jääda",-14.006429672241213],["▁әдістері",-14.006467819213867],["▁Padu",-14.006468772888184],["ുണ്ടെന്ന",-14.006481170654297],["ሏ",-14.006500244140623],["といっても",-14.006515502929688],["▁Danach",-14.006521224975586],["duduk",-14.006547927856444],["▁Ključne",-14.006548881530762],["▁sanāk",-14.00657081604004],["▁먹을",-14.006608963012695],["▁Samtidigt",-14.006622314453123],["алерги",-14.00662612915039],["โซน",-14.006627082824709],["▁leczenie",-14.006659507751465],["▁حلال",-14.00666332244873],["ମୟ",-14.006668090820312],["▁besvare",-14.00667953491211],["▁ryw",-14.006688117980955],["১৪",-14.006699562072754],["सहितको",-14.006710052490234],["▁проживаю",-14.006714820861816],["▁dispus",-14.006720542907717],["▁Turkiga",-14.00672435760498],["▁znalost",-14.00673007965088],["▁tikintisi",-14.006742477416992],["▁Адреса",-14.006743431091309],["▁بالایی",-14.006747245788574],["▁studera",-14.006752014160156],["▁dobrego",-14.0067777633667],["▁zunehmend",-14.00678825378418],["469",-14.00681209564209],["▁טאג",-14.006817817687988],["▁creștere",-14.006841659545898],["▁lùi",-14.006844520568848],["صطف",-14.006855010986328],["▁autentifica",-14.00686264038086],["fjorden",-14.006868362426758],["▁ಅವು",-14.006887435913086],["这时候",-14.006965637207031],["▁apporte",-14.00697422027588],["თავისუფლებ",-14.00697898864746],["バラ",-14.006990432739258],["に対し",-14.00699520111084],["▁Слов",-14.007002830505373],["ovega",-14.00700855255127],["▁선발",-14.007039070129396],["행사",-14.007039070129396],["ώτη",-14.00704574584961],["▁Recent",-14.00705909729004],["▁אונדזער",-14.007067680358888],["INAN",-14.00706958770752],["▁garrantzitsua",-14.007088661193848],["การทดสอบ",-14.007102966308594],["ਲੈਂਡ",-14.007122039794922],["▁খো",-14.00714111328125],["▁ترى",-14.007164001464844],["▁ئەھۋال",-14.007166862487791],["▁сынак",-14.007187843322754],["▁выезд",-14.007208824157717],["naslov",-14.007210731506348],["▁kontaktai",-14.007210731506348],["▁olmasının",-14.007211685180664],["▁तत्त्व",-14.007219314575195],["OWA",-14.007243156433104],["▁ಆವೃತ್ತಿ",-14.007245063781738],["▁osobito",-14.007314682006836],["Kategori",-14.007332801818848],["▁آیه",-14.007354736328123],["▁skull",-14.007373809814451],["▁боре",-14.007427215576172],["eeriv",-14.007436752319336],["▁հիման",-14.007452964782717],["ratibu",-14.007485389709473],["өнд",-14.007518768310549],["▁COMUN",-14.007529258728027],["aicināju",-14.007530212402344],["▁toxin",-14.00753116607666],["▁հին",-14.007556915283203],["▁გამართულ",-14.007568359375],["▁بمب",-14.007580757141112],["▁setup",-14.007593154907228],["ፖለቲካ",-14.007596015930176],["οκομ",-14.007616996765137],["▁মোট",-14.00761890411377],["▁عدن",-14.00762939453125],["▁SPM",-14.007633209228516],["ต่อเนื่อง",-14.007661819458008],["▁продолжает",-14.007673263549805],["▁Terry",-14.007680892944336],["geschrieben",-14.007719993591309],["פורסם",-14.007719993591309],["kręt",-14.007735252380373],["أنباء",-14.007736206054688],["ثمان",-14.007740020751951],["TRES",-14.007744789123535],["▁اختلافات",-14.007744789123535],["apertura",-14.007745742797852],["证实",-14.00776481628418],["ıldığını",-14.007765769958496],["business",-14.007769584655762],["▁qayd",-14.007786750793455],["ຮອງ",-14.00782299041748],["▁كېسەللىك",-14.007843017578123],["▁ஆகும்",-14.007877349853516],["▁සංවිධාන",-14.007885932922363],["nčiam",-14.007928848266602],["Нови",-14.007953643798828],["▁τάξη",-14.007956504821776],["▁kritiko",-14.007957458496094],["▁మారు",-14.007988929748535],["して下さい",-14.007996559143066],["▁oblike",-14.008004188537598],["▁Bistri",-14.008031845092772],["▁01.01.",-14.008041381835938],["മരുന്ന",-14.008042335510254],["▁ბიჭი",-14.008056640625],["▁Rahva",-14.008066177368164],["ก้อน",-14.00806999206543],["▁ირან",-14.008070945739746],["棒球",-14.008074760437012],["పాటు",-14.00809097290039],["蒜",-14.008092880249023],["▁kombëtar",-14.008105278015137],["▁crap",-14.00811004638672],["▁ministerija",-14.00812530517578],["▁દર્શન",-14.008139610290527],["▁shenja",-14.008143424987791],["▁പൊലീസ",-14.00814437866211],["शक्ती",-14.00814723968506],["循",-14.00815486907959],["▁وترك",-14.00816249847412],["窝",-14.008180618286133],["liwość",-14.00818157196045],["呼籲",-14.008184432983398],["▁včeraj",-14.008185386657717],["爷爷",-14.008187294006348],["虑",-14.008191108703612],["幽默",-14.008203506469728],["▁государственно",-14.008209228515623],["預測",-14.008213996887209],["スマートフォン",-14.00821590423584],["仪器",-14.008216857910156],["喫",-14.008216857910156],["债券",-14.008220672607422],["疑惑",-14.008223533630373],["レストラン",-14.008225440979004],["쇄",-14.008225440979004],["▁شۇنداق",-14.008227348327637],["မျက်နှာ",-14.00823211669922],["ecchia",-14.008233070373535],["ဒဏ်",-14.0082368850708],["▁ମାଧ୍ୟମ",-14.0082368850708],["novembrī",-14.008237838745115],["ՙ",-14.008237838745115],["အစည်းအဝေး",-14.008237838745115],["ေရြး",-14.008237838745115],["ፆ",-14.008237838745115],["សប្តាហ៍",-14.008237838745115],["▁Betroffene",-14.008237838745115],["▁Fjordane",-14.008237838745115],["▁Tiffany",-14.008237838745115],["▁fantástico",-14.008237838745115],["▁kinnisvara",-14.008237838745115],["▁maravilhoso",-14.008237838745115],["▁mengumumkan",-14.008237838745115],["▁thuyền",-14.008237838745115],["▁διάφορα",-14.008237838745115],["▁εντυπωσιακ",-14.008237838745115],["▁σχηματ",-14.008237838745115],["▁крэдыт",-14.008237838745115],["▁мерзімді",-14.008237838745115],["▁оказывается",-14.008237838745115],["▁първенство",-14.008237838745115],["▁тенденции",-14.008237838745115],["▁בינלאומי",-14.008237838745115],["▁سیکرٹری",-14.008237838745115],["▁پرانستل",-14.008237838745115],["▁सिफारिस",-14.008237838745115],["▁দ্রুত",-14.008237838745115],["▁జరిగే",-14.008237838745115],["▁నమోదు",-14.008237838745115],["▁పాయింట్",-14.008237838745115],["▁విచారణ",-14.008237838745115],["▁ವಿದ್ಯುತ್",-14.008237838745115],["▁ವೇದಿಕೆ",-14.008237838745115],["▁옵션",-14.008237838745115],["멍",-14.008237838745115],["မော်",-14.008238792419434],["▁attīstīt",-14.008238792419434],["▁održano",-14.008238792419434],["▁verarbeitet",-14.008238792419434],["▁пасьля",-14.008238792419434],["▁посматра",-14.008238792419434],["▁verletzt",-14.00823974609375],["▁පිළිබද",-14.008240699768066],["▁የበለጠ",-14.008240699768066],["visit",-14.008241653442385],["αίρεση",-14.008241653442385],["▁Problém",-14.008241653442385],["▁ပြန်လည်",-14.008241653442385],["▁විෂය",-14.0082426071167],["▁úvěr",-14.008243560791016],["évaluation",-14.008245468139648],["▁istiyorsanız",-14.008245468139648],["▁صلاة",-14.00824737548828],["▁बेलायत",-14.00824737548828],["▁Jeremy",-14.008248329162598],["▁заходу",-14.008248329162598],["▁అనుమాన",-14.008248329162598],["ဂၤ",-14.00825023651123],["▁antarabangsa",-14.00825023651123],["▁beteiligt",-14.00825023651123],["▁girlfriend",-14.00825023651123],["▁umfassende",-14.00825023651123],["▁wysyła",-14.008251190185549],["▁ውሃ",-14.008251190185549],["▁sandwich",-14.008255004882812],["▁περιόδου",-14.008255004882812],["▁עיניים",-14.008255958557127],["▁রক্ষা",-14.008256912231444],["▁требуют",-14.008258819580078],["勇士",-14.008258819580078],["▁Kaksi",-14.008259773254396],["ပျက်",-14.008262634277344],["▁ლიდერ",-14.008262634277344],["▁huomattavasti",-14.00826358795166],["▁ඉක්ම",-14.00826358795166],["▁pieejams",-14.008264541625977],["▁இணைப்பு",-14.008264541625977],["▁స్థానిక",-14.008264541625977],["▁ಇಬ್ಬರು",-14.008264541625977],["▁রক্ত",-14.008270263671877],["▁ഉദ്ഘാടനം",-14.008271217346191],["▁ശശി",-14.008272171020508],["▁kitiems",-14.008273124694824],["▁כנגד",-14.008275985717772],["ובים",-14.008277893066406],["▁nemokamai",-14.008278846740724],["▁poskuša",-14.008283615112305],["▁tivemos",-14.008283615112305],["▁ହୋଇଥିଲେ",-14.008285522460938],["▁informoval",-14.00828742980957],["▁modtaget",-14.00828742980957],["▁النائب",-14.00829029083252],["▁tanfolyam",-14.008293151855469],["▁nedēļā",-14.008296012878418],["▁אחרונים",-14.008298873901367],["▁lolote",-14.008299827575684],["▁להעביר",-14.008307456970217],["▁खुसी",-14.00831127166748],["▁кызматынын",-14.008316040039062],["適應",-14.00831699371338],["▁xavf",-14.008318901062012],["▁rejäl",-14.008320808410645],["實務",-14.008320808410645],["▁изучава",-14.008326530456545],["действа",-14.008338928222656],["▁лёгка",-14.008338928222656],["▁پڑھنے",-14.008339881896973],["▁kommenttia",-14.008345603942873],["▁آزار",-14.00836181640625],["tarvikkeet",-14.00836944580078],["▁δέκα",-14.008371353149414],["रुवा",-14.008377075195312],["▁látszik",-14.008377075195312],["▁корисно",-14.008389472961426],["réttar",-14.008390426635742],["ਵਿੱ",-14.008390426635742],["OBO",-14.008394241333008],["▁lykkelig",-14.008395195007324],["632",-14.008397102355955],["▁loodud",-14.008403778076172],["▁hjärn",-14.008405685424805],["▁فرن",-14.00841236114502],["▁судалгааны",-14.008419036865234],["▁alegerea",-14.00841999053955],["▁knä",-14.008435249328612],["▁החופש",-14.008435249328612],["▁кафедрасы",-14.008440017700195],["▁beszélt",-14.008451461791992],["guld",-14.008455276489258],["▁האנשים",-14.00845718383789],["▁glaci",-14.00847625732422],["▁Válasz",-14.008490562438965],["▁likumā",-14.008499145507812],["▁domaće",-14.008502006530762],["▁безбедно",-14.008512496948242],["▁občutek",-14.008527755737305],["▁encamê",-14.00854778289795],["▁свечано",-14.008556365966797],["▁ուժի",-14.00856113433838],["▁অবস্থান",-14.008572578430176],["在校",-14.008574485778809],["▁ბუნებრივი",-14.008575439453123],["▁Bằng",-14.00857639312744],["▁Navend",-14.00858211517334],["regnskap",-14.00859546661377],["ၿဖ",-14.008651733398438],["▁Ampli",-14.008654594421388],["▁падыход",-14.0086669921875],["▁Sexual",-14.008674621582031],["▁ئەمەل",-14.00867748260498],["લેટ",-14.00868034362793],["▁delovnih",-14.008686065673828],["്റെ",-14.00869369506836],["▁بڑھا",-14.00869846343994],["teollisuus",-14.008700370788574],["▁ఎదుర",-14.00870132446289],["▁Manau",-14.008706092834473],["▁suplimentare",-14.008718490600586],["▁tappe",-14.008728981018066],["जवळ",-14.008731842041016],["TIVE",-14.008740425109863],["ျဖစ္ေန",-14.008761405944824],["▁zerbitzuak",-14.008773803710938],["▁Grind",-14.008785247802734],["בוב",-14.008794784545898],["▁fetish",-14.008798599243164],["koloni",-14.00881004333496],["ிருந்தது",-14.008818626403809],["▁العدل",-14.00882053375244],["▁wirtschaftlich",-14.008824348449709],["▁Britt",-14.008832931518556],["တွေ့ဆုံ",-14.008845329284668],["▁Jahon",-14.008851051330566],["ભાગ",-14.008866310119627],["専",-14.008872985839844],["ksana",-14.008874893188477],["▁тяхната",-14.00887966156006],["▁frå",-14.008882522583008],["受歡迎",-14.008889198303224],["biór",-14.008892059326172],["▁bavê",-14.008913040161133],["▁datorita",-14.008918762207031],["▁revisar",-14.008933067321776],["éifeacht",-14.008953094482422],["ځو",-14.008954048156738],["▁мысал",-14.008967399597168],["▁elkövet",-14.008968353271484],["が良く",-14.0089693069458],["ങ",-14.00897216796875],["678",-14.008988380432127],["Τα",-14.00899887084961],["▁ricorre",-14.009016036987305],["▁gengi",-14.00903606414795],["heben",-14.009038925170898],["▁nhâ",-14.00904369354248],["ਟਿੰਗ",-14.009052276611328],["Звезд",-14.009065628051758],["årene",-14.009108543395996],["▁mână",-14.009111404418944],["▁trgovini",-14.009116172790527],["▁подобрување",-14.009117126464844],["▁широка",-14.009127616882324],["ūnų",-14.00913906097412],["▁kanuni",-14.009146690368652],["▁Spille",-14.009195327758787],["▁giga",-14.009220123291016],["▁пипер",-14.009224891662598],["▁լուծ",-14.00924301147461],["▁sıx",-14.009257316589355],["biegł",-14.009265899658203],["上課",-14.009267807006836],["யாள",-14.009281158447266],["गुरु",-14.009284973144531],["րով",-14.009285926818848],["నుంచి",-14.00929069519043],["▁katrs",-14.009302139282228],["ിന്റെയും",-14.009309768676758],["රියා",-14.009322166442873],["▁മേഖലയില",-14.009326934814451],["▁Navîn",-14.009328842163086],["▁zjarr",-14.00933837890625],["1939",-14.009342193603516],["抵达",-14.009347915649414],["סמים",-14.00935173034668],["▁Christum",-14.009416580200195],["ročná",-14.009419441223145],["▁gnó",-14.009441375732422],["导向",-14.00946044921875],["▁lähesty",-14.009471893310549],["▁Niemiec",-14.009476661682127],["▁трето",-14.00949001312256],["▁bawal",-14.009495735168455],["svenska",-14.009513854980469],["▁19%",-14.00952434539795],["016",-14.009528160095217],["▁spojení",-14.009530067443848],["▁నిజం",-14.00953197479248],["▁redelijk",-14.009532928466797],["pustí",-14.00954532623291],["myslí",-14.009600639343262],["▁ضیا",-14.009611129760742],["▁behoud",-14.009657859802246],["▁مواردی",-14.009660720825195],["▁řeč",-14.009679794311523],["ネットで",-14.009683609008787],["640",-14.009703636169434],["▁հոգե",-14.009705543518066],["ക്കാള്",-14.009713172912598],["▁referir",-14.009721755981444],["ลาม",-14.009730339050291],["だけでも",-14.00974178314209],["Caput",-14.009750366210938],["▁инноваци",-14.009765625],["CID",-14.009770393371582],["▁dangerous",-14.009817123413086],["▁Prabu",-14.009820938110352],["μιά",-14.009830474853516],["ημένα",-14.009842872619627],["λλον",-14.009842872619627],["▁Ыр",-14.009858131408691],["▁مستخدم",-14.009869575500488],["ජන්",-14.009881973266602],["▁alabilirsiniz",-14.00991916656494],["sumber",-14.009923934936523],["▁მილიონ",-14.009943008422852],["▁novú",-14.009946823120115],["▁monopoli",-14.009958267211914],["305",-14.009984970092772],["lyasiya",-14.010004043579102],["▁వర్మ",-14.010061264038086],["不懈",-14.010065078735352],["▁partide",-14.01007080078125],["▁първият",-14.010090827941896],["▁elkerül",-14.010122299194336],["▁beceri",-14.01015567779541],["▁bemutató",-14.010164260864258],["Minu",-14.0101900100708],["▁אזרחי",-14.0101957321167],["ಧಾನ",-14.01020050048828],["շու",-14.010210990905762],["▁Ники",-14.010228157043455],["weynaha",-14.010250091552734],["▁confirmation",-14.010261535644531],["本届",-14.010272979736328],["IMP",-14.010279655456545],["▁Emme",-14.010293006896973],["▁testar",-14.010300636291504],["▁gledati",-14.01032829284668],["会社に",-14.010334968566896],["ούσα",-14.010343551635742],["▁1857",-14.010358810424805],["ševal",-14.01036262512207],["vejte",-14.010397911071776],["Dekho",-14.010416984558104],["ょう",-14.010435104370115],["▁نوین",-14.010462760925291],["家電",-14.01046371459961],["▁નોટ",-14.01047420501709],["▁osnovni",-14.010485649108888],["exercici",-14.01048755645752],["出國",-14.010499954223633],["▁prilika",-14.010506629943848],["▁घरात",-14.010517120361328],["▁Jól",-14.010540962219238],["▁කීවේ",-14.010565757751465],["▁Europian",-14.010597229003906],["▁Ranta",-14.010615348815918],["ականների",-14.0106201171875],["▁мертв",-14.010627746582031],["▁bulmak",-14.010638236999512],["▁Tibor",-14.010649681091309],["▁mówiąc",-14.010660171508787],["▁የሆኑት",-14.010677337646484],["ทราบว่า",-14.01072120666504],["▁funksiya",-14.010729789733888],["▁vurderer",-14.010733604431152],["Това",-14.010737419128418],["▁Τί",-14.010748863220217],["▁მომავალი",-14.010761260986328],["▁sundhed",-14.01077365875244],["▁සාමාජික",-14.010809898376465],["▁ტელეფონი",-14.010818481445312],["सचिव",-14.0108642578125],["▁immobilier",-14.010868072509766],["сіл",-14.010873794555664],["▁megkap",-14.01087474822998],["သိမ်း",-14.01088809967041],["▁සිදුවීම",-14.010893821716309],["▁mahsulotlar",-14.01090145111084],["▁brindar",-14.010931015014648],["מנת",-14.010936737060549],["▁تھر",-14.011006355285645],["ғына",-14.011013984680176],["társaság",-14.011024475097656],["▁qiymətlər",-14.011029243469238],["trabaho",-14.011031150817873],["▁státu",-14.011031150817873],["▁hamı",-14.011046409606934],["aeroport",-14.0110502243042],["நான்",-14.011058807373049],["▁заправо",-14.011069297790527],["өзү",-14.01108169555664],["ປາກ",-14.011088371276855],["▁dejado",-14.011096000671388],["▁മലയാളത്തില",-14.011096954345703],["תושבי",-14.011098861694336],["வதில்லை",-14.011127471923828],["▁Livro",-14.011137008666992],["▁написав",-14.01116180419922],["תוצאות",-14.011163711547852],["▁نظریات",-14.011186599731444],["τόπι",-14.011194229125977],["包容",-14.011213302612305],["ਹੋ",-14.011228561401367],["▁SRL",-14.01124095916748],["ుతున్నా",-14.011241912841797],["▁Demand",-14.011247634887695],["தாம்",-14.011287689208984],["▁Фотограф",-14.0112943649292],["▁utføre",-14.011300086975098],["▁תוכל",-14.011303901672363],["人民幣",-14.011310577392578],["适用于",-14.011314392089844],["貨幣",-14.01131820678711],["▁οφείλ",-14.011322975158691],["▁రంగు",-14.011324882507324],["رىس",-14.011327743530272],["▁మరొక",-14.011327743530272],["戰爭",-14.011329650878906],["ເທິງ",-14.011332511901855],["փառատոն",-14.011333465576172],["ॊ",-14.011333465576172],["পশ্চিম",-14.011333465576172],["បណ្ឌិត",-14.011333465576172],["▁Mediterrane",-14.011333465576172],["▁Thompson",-14.011333465576172],["▁amlwg",-14.011333465576172],["▁galimybes",-14.011333465576172],["▁gevestigd",-14.011333465576172],["▁mảnh",-14.011333465576172],["▁opdrachtgever",-14.011333465576172],["▁tənqid",-14.011333465576172],["▁uğrunda",-14.011333465576172],["▁wątpliwości",-14.011333465576172],["▁švietimo",-14.011333465576172],["▁попробовать",-14.011333465576172],["▁початок",-14.011333465576172],["▁супрацоўніцтва",-14.011333465576172],["▁հանդեպ",-14.011333465576172],["▁بنگلہ",-14.011333465576172],["▁ځانګړي",-14.011333465576172],["▁अक्सर",-14.011333465576172],["▁সংগঠন",-14.011333465576172],["▁સંખ્યા",-14.011333465576172],["▁სამეცნიერო",-14.011333465576172],["▁መመሪያ",-14.011333465576172],["롬",-14.011333465576172],["សរុប",-14.011334419250488],["▁miniszterelnök",-14.011334419250488],["▁shuffle",-14.011334419250488],["▁гудамж",-14.011334419250488],["▁التقليدي",-14.011334419250488],["▁नव्हे",-14.011334419250488],["▁அதிரடி",-14.011334419250488],["กระเจี๊ยว",-14.011335372924805],["▁cunoaste",-14.011335372924805],["▁preizkus",-14.011335372924805],["▁republice",-14.011335372924805],["▁stærð",-14.011335372924805],["▁Докато",-14.011335372924805],["▁мільён",-14.011335372924805],["▁стадии",-14.011335372924805],["▁ኖሮ",-14.011335372924805],["▁caalamka",-14.01133632659912],["▁potvrde",-14.01133632659912],["▁европейско",-14.01133632659912],["▁Práve",-14.011337280273438],["▁империя",-14.011337280273438],["▁मुख्यपृष्ठ",-14.011337280273438],["▁ಮನರಂಜನಾ",-14.011337280273438],["चरित्र",-14.011338233947754],["▁eleições",-14.011338233947754],["▁întotdeauna",-14.01133918762207],["▁Скачать",-14.01133918762207],["▁नागपूर",-14.01133918762207],["▁Dibdib",-14.011340141296388],["▁perwerde",-14.011340141296388],["▁припинення",-14.011340141296388],["峽",-14.011340141296388],["▁cumplimiento",-14.011343002319336],["▁नगद",-14.011343002319336],["▁organisatsiooni",-14.011343955993652],["MeToo",-14.011344909667969],["็อต",-14.011346817016602],["▁rękaw",-14.011347770690918],["▁καθόλου",-14.011347770690918],["▁ਕੱਪ",-14.01134967803955],["▁makapag",-14.011353492736816],["పోతుంది",-14.011354446411133],["▁interieur",-14.011354446411133],["▁აპირებ",-14.011354446411133],["龜",-14.01135540008545],["▁વિદેશ",-14.011358261108398],["▁Kocaeli",-14.011359214782717],["▁ਅਮਰੀਕੀ",-14.011360168457031],["לאז",-14.011362075805664],["▁تشرين",-14.01136302947998],["▁വേറെ",-14.011363983154297],["彰化",-14.011363983154297],["▁едновременно",-14.01136589050293],["▁еркін",-14.011366844177246],["▁жұрт",-14.011366844177246],["misiones",-14.01136875152588],["▁ancaman",-14.011370658874512],["▁ଖସି",-14.011374473571776],["▁हौ",-14.011375427246094],["▁чытач",-14.011380195617676],["መለሱ",-14.011381149291992],["▁താങ്കള്",-14.011382102966309],["▁urednik",-14.011388778686523],["▁Σαμαρά",-14.011388778686523],["mthetho",-14.011397361755373],["▁가서",-14.01140022277832],["▁árbore",-14.011402130126951],["verkiezing",-14.01140594482422],["▁gelegenheid",-14.011407852172852],["▁İraq",-14.011407852172852],["▁siyaasadeed",-14.011415481567385],["▁හිතන්නේ",-14.011418342590332],["▁muskler",-14.011422157287598],["▁ikdienas",-14.011428833007812],["▁ունենք",-14.011433601379396],["▁Δυτικ",-14.01144313812256],["▁frutto",-14.011446952819824],["▁Каталог",-14.011455535888672],["▁جبهه",-14.011465072631836],["▁المبارك",-14.011481285095217],["▁identificat",-14.01148509979248],["▁boshqalar",-14.011489868164062],["▁жылдык",-14.01149559020996],["bwynt",-14.011497497558594],["▁pysyvä",-14.01149845123291],["▁haween",-14.011500358581545],["シャル",-14.01150131225586],["▁líbí",-14.011502265930176],["▁İdarəsi",-14.01151180267334],["▁произведения",-14.011512756347656],["▁korzystania",-14.011514663696287],["häiriö",-14.011517524719238],["▁Cade",-14.011521339416504],["▁bilimsel",-14.011526107788086],["érzékeny",-14.01152801513672],["▁хөнд",-14.0115385055542],["▁բեռ",-14.011542320251465],["▁المميز",-14.011551856994627],["▁Älä",-14.011556625366213],["▁Cég",-14.011567115783691],["▁sameeyay",-14.01157283782959],["▁scut",-14.011578559875488],["有效地",-14.01158046722412],["▁Afriko",-14.0115966796875],["elezea",-14.011597633361816],["▁fizika",-14.011600494384766],["▁rakk",-14.01160717010498],["ګان",-14.011618614196776],["▁postale",-14.011629104614258],["▁головний",-14.011634826660156],["ωτών",-14.011637687683104],["▁megállap",-14.01164436340332],["▁circonda",-14.011655807495115],["явився",-14.01165771484375],["tılar",-14.0116605758667],["▁ابتلا",-14.01168441772461],["▁चरणको",-14.01168441772461],["▁المركزي",-14.011693954467772],["▁dostupný",-14.011706352233888],["将成为",-14.011706352233888],["▁tilpasset",-14.011707305908203],["▁Cukup",-14.011710166931152],["్ని",-14.011717796325684],["inscripció",-14.011723518371582],["▁Wright",-14.01173496246338],["▁πολιτικού",-14.011754035949709],["▁prematur",-14.011754989624023],["▁šādi",-14.011756896972656],["▁ایئر",-14.011757850646973],["0.3",-14.011763572692873],["flam",-14.011770248413086],["基於",-14.011778831481934],["▁rezervēt",-14.01177978515625],["▁belirli",-14.011791229248049],["▁එකා",-14.01180648803711],["▁awayî",-14.011821746826172],["▁majalah",-14.011831283569336],["▁zásah",-14.011842727661133],["▁(54)",-14.01186466217041],["的高度",-14.011880874633787],["▁btw",-14.01189136505127],["▁gennych",-14.011898040771484],["ንጋ",-14.011910438537598],["▁אומרים",-14.011919975280762],["ējām",-14.01192569732666],["▁savaşı",-14.011932373046877],["Это",-14.011938095092772],["هندسة",-14.011956214904783],["▁recommended",-14.011960983276367],["▁tabletas",-14.011964797973633],["▁colegas",-14.01196575164795],["▁სახლის",-14.011981010437012],["▁രാജ്യത്തെ",-14.012001991271973],["▁generazione",-14.012039184570312],["▁ciuda",-14.012042045593262],["▁Мина",-14.01205825805664],["हारी",-14.01207447052002],["▁BTW",-14.012079238891602],["▁очима",-14.01209831237793],["meniť",-14.012110710144045],["▁połączenie",-14.012117385864258],["▁ট্যা",-14.012125968933104],["ቆር",-14.012133598327637],["ვრცელებ",-14.012136459350586],["▁zmienił",-14.01213836669922],["▁राज्यात",-14.012152671813965],["▁uskoro",-14.012176513671877],["▁социални",-14.012177467346191],["▁jeeda",-14.012189865112305],["▁variantları",-14.012198448181152],["▁фебруара",-14.012206077575684],["өсө",-14.012211799621582],["▁திரும்ப",-14.012225151062012],["▁сулуу",-14.012247085571287],["closure",-14.012248992919922],["соедини",-14.01225471496582],["ณฑ",-14.012258529663086],["▁juridik",-14.012269020080566],["可供",-14.012271881103516],["▁suši",-14.012284278869627],["▁мирис",-14.012298583984377],["▁мешка",-14.012299537658691],["ЛОВ",-14.012307167053224],["▁getirme",-14.012310981750488],["▁attenta",-14.01231288909912],["▁вратата",-14.01231575012207],["▁distante",-14.012316703796388],["ящие",-14.012322425842283],["▁дозволено",-14.012322425842283],["▁Európsk",-14.012353897094728],["▁засіб",-14.012389183044434],["▁paskutini",-14.012395858764648],["677",-14.012406349182127],["▁rappelle",-14.012406349182127],["▁Βαλ",-14.012408256530762],["有一次",-14.012412071228027],["▁طبی",-14.01241683959961],["▁അച്ഛന",-14.01243019104004],["น้า",-14.012444496154783],["▁Austin",-14.012454986572266],["patient",-14.012456893920898],["▁Coupe",-14.012471199035645],["▁한글",-14.012492179870604],["技术和",-14.012500762939451],["▁Срок",-14.012502670288086],["▁Kasar",-14.012507438659668],["▁vezetője",-14.01251983642578],["არული",-14.01258945465088],["地产",-14.01258945465088],["allergi",-14.012613296508787],["▁vraj",-14.01262664794922],["yritys",-14.0126314163208],["เซน",-14.012635231018066],["▁vrijedi",-14.01264190673828],["İnter",-14.012643814086914],["452",-14.012653350830078],["▁својом",-14.012653350830078],["▁odvisno",-14.01267147064209],["▁Дунав",-14.012710571289062],["▁monografi",-14.012720108032228],["▁өздері",-14.012734413146973],["మణి",-14.012737274169922],["politic",-14.012763023376465],["Ер",-14.012779235839844],["▁peşə",-14.012799263000488],["▁рекламы",-14.01282024383545],["▁Predaj",-14.012845993041992],["▁famous",-14.012845993041992],["ಿದಾಗ",-14.012850761413574],["▁considerato",-14.012850761413574],["lweni",-14.012866020202637],["▁huvudet",-14.01288890838623],["حضرت",-14.012896537780762],["を書いて",-14.012900352478027],["▁დოლარი",-14.01291275024414],["608",-14.01292324066162],["▁hozzájárul",-14.012928009033203],["▁වෙනව",-14.012956619262695],["數學",-14.012964248657228],["кард",-14.01296615600586],["459",-14.012974739074709],["량이",-14.013001441955566],["▁prawdziwe",-14.013029098510742],["▁sljedeći",-14.013050079345703],["ฒ",-14.013053894042969],["μπερ",-14.013076782226562],["▁Kadri",-14.013079643249512],["▁instrui",-14.013093948364258],["▁prispeva",-14.013100624084473],["▁xeración",-14.01313591003418],["▁distract",-14.013154029846191],["▁хаана",-14.01315975189209],["ованим",-14.013177871704102],["betjening",-14.013181686401367],["▁desiderio",-14.013195037841797],["▁управа",-14.013195991516112],["▁Lumin",-14.013216018676758],["▁থাকতে",-14.013233184814451],["لىنىشى",-14.013238906860352],["▁jumala",-14.013262748718262],["alueet",-14.01327896118164],["มาตรา",-14.013303756713867],["sessie",-14.013307571411133],["▁Mick",-14.013313293457031],["အမည်",-14.013336181640623],["▁אילו",-14.01334285736084],["▁Харків",-14.013357162475586],["▁учат",-14.013360977172852],["▁Такой",-14.01337432861328],["▁2556",-14.013387680053713],["ورڈ",-14.01340103149414],["ುತ್ತಿದ್ದು",-14.01340103149414],["▁Guardia",-14.01341724395752],["▁위기",-14.013421058654783],["▁ਸ਼ਾਹ",-14.013422966003418],["offen",-14.013429641723633],["市區",-14.01344871520996],["▁rahvusvahelise",-14.01348114013672],["過度",-14.01348114013672],["ଵ",-14.013529777526855],["▁Unidade",-14.013545989990234],["▁విని",-14.013548851013184],["▁genoem",-14.013551712036133],["▁exigent",-14.01355266571045],["ነብ",-14.013566970825195],["華人",-14.013587951660156],["డితే",-14.013588905334473],["▁begynde",-14.013591766357422],["▁hızla",-14.013598442077637],["▁ഉപയോഗിക്കുന്ന",-14.01360321044922],["▁temperaturen",-14.013606071472168],["Raja",-14.013672828674316],["▁sviði",-14.013721466064451],["സേന",-14.013742446899414],["▁kuishi",-14.01377296447754],["▁מרק",-14.013776779174805],["お願いします",-14.013789176940918],["▁zmene",-14.01381778717041],["ခ်စ္သူ",-14.013823509216309],["इल",-14.013826370239258],["ρίων",-14.013839721679688],["२६",-14.013849258422852],["पेट",-14.01386260986328],["▁қосу",-14.013869285583496],["新建",-14.013894081115724],["זאַ",-14.013896942138672],["▁ಹೊಂದಿರುವ",-14.013897895812988],["ωτικό",-14.01390552520752],["▁parametri",-14.013916015625],["ການເມືອງ",-14.013927459716797],["▁هرگون",-14.013927459716797],["▁почистване",-14.013940811157228],["▁Videre",-14.013955116271973],["Бук",-14.0139799118042],["ήκαμε",-14.013991355895996],["ย่า",-14.013991355895996],["▁પ્રાપ્ત",-14.013991355895996],["▁거기",-14.013998031616213],["▁chlor",-14.014004707336426],["▁Promotion",-14.014005661010742],["американ",-14.01400661468506],["▁sorunun",-14.014013290405272],["ことにより",-14.01402187347412],["bewusst",-14.014041900634766],["▁gulang",-14.014052391052246],["முடி",-14.014082908630373],["ငါး",-14.01408576965332],["▁tekme",-14.014141082763672],["▁napęd",-14.014145851135254],["tvena",-14.014151573181152],["▁såvel",-14.014159202575684],["하여야",-14.014208793640137],["▁relata",-14.0142240524292],["ძეს",-14.014235496520996],["기획",-14.014252662658691],["▁yönel",-14.014272689819336],["EDO",-14.014276504516602],["▁Antall",-14.014276504516602],["ผมก็",-14.01427936553955],["チャレンジ",-14.01427936553955],["▁classico",-14.014290809631348],["یزې",-14.014294624328612],["קטגוריה",-14.014302253723145],["▁képvisel",-14.014305114746094],["▁বিন",-14.014309883117676],["▁боје",-14.014312744140623],["อยากได้",-14.014318466186523],["▁Geliş",-14.01435375213623],["隱藏",-14.014358520507812],["经贸",-14.01436996459961],["侠",-14.014370918273926],["▁rõhu",-14.014371871948242],["ÁK",-14.01437282562256],["оўку",-14.014389991760254],["ированы",-14.01439094543457],["经销商",-14.01439094543457],["懷疑",-14.014391899108888],["減肥",-14.014392852783203],["▁представяне",-14.01439380645752],["流動",-14.014400482177734],["盾",-14.014413833618164],["ແສງ",-14.014437675476074],["劝",-14.014437675476074],["บัณฑิต",-14.014439582824709],["ฤกษ์",-14.014439582824709],["ၿပိဳင္",-14.014439582824709],["ይሖዋ",-14.014439582824709],["▁Município",-14.014439582824709],["▁Unterkunft",-14.014439582824709],["▁apdrošināšana",-14.014439582824709],["▁circumstances",-14.014439582824709],["▁impossibile",-14.014439582824709],["▁memikirkan",-14.014439582824709],["▁sərnişin",-14.014439582824709],["▁wyjątkowo",-14.014439582824709],["▁yuzasidan",-14.014439582824709],["▁zacząć",-14.014439582824709],["▁անդրադարձ",-14.014439582824709],["▁ճակատ",-14.014439582824709],["▁זייַן",-14.014439582824709],["▁کوئٹہ",-14.014439582824709],["▁झटका",-14.014439582824709],["▁सम्पादक",-14.014439582824709],["▁ইন্টারনেট",-14.014439582824709],["▁ଖୁବ୍",-14.014439582824709],["▁విజయవాడ",-14.014439582824709],["▁පදිංචි",-14.014439582824709],["▁δεκαετία",-14.014440536499023],["▁вечір",-14.014440536499023],["▁ئۇسۇل",-14.014440536499023],["▁مەزگىل",-14.014440536499023],["ลําดับ",-14.01444149017334],["▁минатата",-14.01444149017334],["▁արարող",-14.01444149017334],["▁бөлісіңіз",-14.014442443847656],["▁consequuntur",-14.014444351196287],["▁ගොඩනැ",-14.014444351196287],["▁ଫୁଟ",-14.014445304870604],["칼럼",-14.014445304870604],["ေပမယ့္",-14.014446258544922],["▁Emanuel",-14.014446258544922],["▁নিয়ম",-14.014446258544922],["▁तापमान",-14.014447212219238],["อีเมล์",-14.014449119567873],["▁didakwa",-14.014450073242188],["▁sociálnych",-14.014451026916504],["▁szabályozás",-14.014451026916504],["▁družbi",-14.01445198059082],["▁напитки",-14.014453887939451],["▁nøgle",-14.014455795288086],["▁기다리",-14.014455795288086],["▁Password",-14.014456748962402],["▁Sabadell",-14.014456748962402],["▁יוצרים",-14.01445770263672],["ホール",-14.01445770263672],["▁aðgengi",-14.014458656311035],["▁아빠",-14.014461517333984],["▁Dmitri",-14.0144624710083],["▁Lietuvių",-14.01446533203125],["▁Llibre",-14.01446533203125],["▁Procedur",-14.014470100402832],["▁Sesuai",-14.014470100402832],["▁սպասվում",-14.014471054077148],["▁ਖ਼ਬਰਾਂ",-14.01447296142578],["▁povinnosti",-14.014473915100098],["▁만약",-14.014473915100098],["▁tərəfdaş",-14.014474868774414],["▁سمنڊ",-14.01447868347168],["▁দাবিতে",-14.014481544494627],["▁זאָל",-14.014485359191896],["▁إقامة",-14.014492988586426],["▁скачать",-14.014493942260742],["▁Gjør",-14.014496803283691],["收購",-14.014498710632324],["จิตใจ",-14.01449966430664],["▁поддръжка",-14.01449966430664],["▁uusan",-14.014504432678224],["▁자랑",-14.014506340026855],["▁cúpla",-14.014512062072754],["لصناعة",-14.014513969421388],["放射",-14.01452350616455],["▁загальної",-14.0145263671875],["▁lengua",-14.014531135559082],["▁Curtea",-14.014532089233398],["▁yetenek",-14.01453685760498],["ಸಿಂಹ",-14.014537811279297],["▁ceteris",-14.014552116394045],["▁සිටියේය",-14.014554023742676],["▁itzuli",-14.01456356048584],["▁indipendente",-14.014568328857422],["▁Crazy",-14.014570236206056],["▁مدنی",-14.014572143554688],["▁vorbesc",-14.014578819274902],["好事",-14.014578819274902],["▁campuran",-14.014582633972168],["▁गरेकी",-14.01458740234375],["▁particulière",-14.014592170715332],["qizlar",-14.014599800109863],["▁שווער",-14.014601707458496],["95%",-14.014603614807127],["▁nihče",-14.014603614807127],["▁tertulis",-14.014603614807127],["ેન્દ્ર",-14.014605522155762],["▁משא",-14.014609336853027],["▁batay",-14.014626502990724],["▁Ευρω",-14.014628410339355],["▁Serwis",-14.014629364013672],["▁inspirerende",-14.014639854431152],["йцеся",-14.014649391174316],["▁ქალები",-14.014656066894531],["▁ਉੱਚ",-14.01465892791748],["▁parteneriat",-14.014659881591797],["▁sonido",-14.014665603637695],["▁črna",-14.01467990875244],["についても",-14.014680862426758],["▁бизнесмен",-14.01470184326172],["▁بنفسك",-14.014702796936035],["▁Liquid",-14.014707565307615],["fulltrúa",-14.014708518981934],["த்திலும்",-14.014725685119627],["▁እንድት",-14.014726638793944],["放到",-14.014727592468262],["▁kutafuta",-14.014735221862791],["රූප",-14.014742851257324],["老化",-14.014761924743652],["تأثر",-14.014763832092283],["מכירה",-14.01477336883545],["▁električne",-14.01477336883545],["▁огт",-14.014801025390623],["ակների",-14.014805793762209],["▁компоненти",-14.014806747436523],["▁посадов",-14.014816284179688],["▁билдирген",-14.014817237854004],["▁второто",-14.014849662780762],["472",-14.014853477478027],["主人公",-14.014866828918455],["בדיקת",-14.01486873626709],["▁глазами",-14.014873504638672],["主に",-14.014897346496582],["oqibat",-14.014898300170898],["▁framgång",-14.014899253845217],["▁යාලයේ",-14.014899253845217],["▁видях",-14.014909744262695],["▁tvář",-14.014915466308594],["▁ostatnim",-14.01492691040039],["श्रुत",-14.014927864074709],["▁Heikki",-14.014944076538086],["▁раман",-14.014957427978516],["▁बजट",-14.01496124267578],["รับจ้าง",-14.014975547790527],["▁թիվը",-14.015003204345703],["▁Respekt",-14.015037536621094],["▁Consultado",-14.015048027038574],["ലക്ഷ",-14.015057563781738],["▁inspirasjon",-14.015063285827637],["▁წიგნ",-14.0150728225708],["▁Evil",-14.015080451965332],["յական",-14.015082359313965],["იკო",-14.015100479125977],["▁kārtība",-14.01510238647461],["▁resaka",-14.015116691589355],["ගිය",-14.015130996704102],["sleep",-14.015146255493164],["▁spalva",-14.015182495117188],["それから",-14.015192031860352],["НИКА",-14.015203475952148],["▁Bilal",-14.015249252319336],["1964",-14.015259742736816],["▁Маз",-14.015265464782717],["্যে",-14.01529026031494],["▁štátu",-14.0153169631958],["▁tasarımı",-14.015321731567385],["▁Processo",-14.015324592590332],["▁sokeri",-14.015324592590332],["▁prehľad",-14.01533031463623],["▁żywo",-14.015340805053713],["▁brakuje",-14.015351295471191],["ወሰደ",-14.015352249145508],["▁estuvo",-14.015363693237305],["▁kryds",-14.01537799835205],["▁تحب",-14.015388488769531],["▁ბავშვები",-14.015398025512695],["▁હોત",-14.015406608581545],["▁საშუალებას",-14.015416145324709],["▁praten",-14.01542854309082],["▁opiera",-14.015470504760742],["ணர்",-14.015480995178224],["ąsias",-14.015485763549805],["프레",-14.01549243927002],["497",-14.01550006866455],["无限",-14.015509605407717],["▁plafond",-14.01551342010498],["요소",-14.01552677154541],["NUN",-14.015528678894045],["▁duyuru",-14.015533447265623],["▁ausencia",-14.015545845031738],["▁बढ्दै",-14.015548706054688],["ակազմ",-14.0155611038208],["▁направили",-14.015568733215332],["ಗರ",-14.015583992004396],["μιση",-14.015591621398926],["▁dygtig",-14.01559352874756],["▁1872",-14.01560401916504],["មន",-14.015605926513672],["▁Šād",-14.01561164855957],["lishiga",-14.015621185302734],["お腹",-14.015639305114746],["▁процесів",-14.015661239624023],["ਮੁ",-14.015664100646973],["休み",-14.015670776367188],["ännyt",-14.015685081481934],["▁ਚੀਨ",-14.01568603515625],["▁brinda",-14.015687942504885],["DEV",-14.015691757202148],["▁પ્રકાર",-14.015698432922363],["▁počin",-14.01569938659668],["▁ਤੋ",-14.015734672546388],["▁singgah",-14.015739440917969],["kaffe",-14.015743255615234],["۹۱",-14.015765190124512],["아리",-14.015771865844728],["ജ്ഞാന",-14.015788078308104],["▁Vijana",-14.0158109664917],["▁facilisi",-14.015814781188965],["▁नाका",-14.015816688537598],["▁Arany",-14.01581859588623],["▁nutrisi",-14.015825271606444],["▁Cosmo",-14.015828132629396],["▁thâm",-14.015836715698242],["少數",-14.015840530395508],["▁Сами",-14.015851020812988],["▁خیالات",-14.015851020812988],["▁салган",-14.01586627960205],["▁ölkələri",-14.01587963104248],["мыздың",-14.015880584716797],["ריח",-14.015892028808594],["▁Паў",-14.01590347290039],["▁सकेन",-14.0159273147583],["▁Fungsi",-14.015951156616213],["China",-14.01595973968506],["جتمع",-14.01598834991455],["ష్య",-14.015995025634766],["▁중고",-14.01601791381836],["▁obdar",-14.01602268218994],["れます",-14.016027450561523],["▁ලැබුන",-14.016048431396484],["▁скок",-14.016056060791016],["▁දූෂණ",-14.016079902648926],["▁Winx",-14.016095161437988],["▁கோப",-14.016101837158203],["ందరి",-14.01611042022705],["▁accompagné",-14.016112327575684],["▁Baixa",-14.016120910644531],["jevanja",-14.016128540039062],["▁přední",-14.016151428222656],["774",-14.016170501708984],["odpor",-14.0161714553833],["っていて",-14.016173362731934],["▁հայր",-14.016178131103516],["▁khal",-14.016180038452148],["▁žinia",-14.016183853149414],["комунисти",-14.01618480682373],["▁māja",-14.016193389892578],["หนี้",-14.016194343566896],["ασία",-14.01621150970459],["อยู่กับ",-14.016233444213867],["▁извести",-14.016236305236816],["▁ජො",-14.016240119934082],["วิช",-14.016268730163574],["ેન",-14.01626968383789],["▁ஏற்ற",-14.016291618347168],["بدل",-14.016298294067385],["৬০",-14.01633358001709],["angkut",-14.01642894744873],["▁இப்போ",-14.016438484191896],["▁νέας",-14.016443252563477],["665",-14.016456604003906],["CSI",-14.016459465026855],["を行います",-14.016473770141602],["ອາດຈະ",-14.016481399536133],["▁kõva",-14.016502380371094],["▁колата",-14.016508102416992],["▁rezerváci",-14.01653289794922],["TUAL",-14.016538619995115],["ტებს",-14.016538619995115],["▁trudne",-14.016541481018066],["ປ່າ",-14.01664924621582],["ــــــــــــــــ",-14.016700744628906],["၀ိ",-14.01670265197754],["初めての",-14.016711235046388],["▁koža",-14.016724586486816],["▁prichádza",-14.016728401184082],["778",-14.016735076904297],["ঘাত",-14.016741752624512],["▁uygulamaları",-14.016746520996094],["▁militante",-14.016761779785156],["▁Sklad",-14.016780853271484],["要知道",-14.0167875289917],["▁პერიოდი",-14.016796112060549],["уулан",-14.016818046569824],["kelta",-14.016836166381836],["かったです",-14.016844749450684],["▁madini",-14.016862869262695],["▁elmondta",-14.016868591308594],["▁açılan",-14.01686954498291],["▁довести",-14.016874313354492],["▁معني",-14.016878128051758],["▁имеющих",-14.016889572143556],["▁SAH",-14.016908645629885],["▁cégek",-14.0169095993042],["шките",-14.016918182373049],["流感",-14.016923904418944],["▁skryt",-14.016962051391602],["LASH",-14.016972541809082],["▁देऊन",-14.016980171203612],["INTI",-14.01700496673584],["▁Grin",-14.017005920410156],["▁තොරව",-14.017012596130373],["ලෝක",-14.017013549804688],["▁tartani",-14.017040252685549],["▁jamoasi",-14.017099380493164],["▁ГАЗ",-14.01710319519043],["几个月",-14.01711368560791],["▁जप",-14.01712417602539],["▁dobije",-14.01716423034668],["▁Partizan",-14.017193794250488],["δίων",-14.01720905303955],["▁sapiente",-14.0172119140625],["betingelser",-14.017213821411133],["▁определена",-14.01721477508545],["Samsung",-14.017229080200195],["▁rămân",-14.017231941223145],["▁võimu",-14.017236709594728],["प्रदेश",-14.017271041870115],["▁이와",-14.017271041870115],["▁vuelo",-14.017271995544434],["▁средње",-14.017281532287598],["பகுதி",-14.017282485961914],["▁ಉದ್",-14.01732063293457],["▁obrir",-14.017343521118164],["▁XXIII",-14.017370223999023],["▁lengan",-14.01738452911377],["▁хүсэл",-14.017388343811035],["decim",-14.01740550994873],["▁సమావేశ",-14.017411231994627],["▁가상",-14.017413139343262],["▁жеңил",-14.01743984222412],["ங்களே",-14.017454147338867],["▁կայքը",-14.017454147338867],["葡萄牙",-14.017459869384766],["▁tenkte",-14.01749038696289],["▁хөдөлмөр",-14.017497062683104],["悶",-14.017498970031738],["ישו",-14.017510414123535],["▁nafto",-14.017513275146484],["▁sufiĉe",-14.017518043518066],["冒險",-14.01753044128418],["▁individuellen",-14.017532348632812],["毫米",-14.017533302307127],["▁замест",-14.017537117004396],["ทุ่ง",-14.017542839050291],["总额",-14.017542839050291],["一会儿",-14.01754379272461],["疯",-14.01755142211914],["久しぶり",-14.017553329467772],["فيسبوك",-14.01755428314209],["▁Bilgisayar",-14.01755428314209],["▁Etazonia",-14.01755428314209],["▁Penelitian",-14.01755428314209],["▁istraživanje",-14.01755428314209],["▁jebkurā",-14.01755428314209],["▁kemerdekaan",-14.01755428314209],["▁menghabiskan",-14.01755428314209],["▁menyelamatkan",-14.01755428314209],["▁runsaasti",-14.01755428314209],["▁tufayli",-14.01755428314209],["▁unglaublich",-14.01755428314209],["▁uruchomi",-14.01755428314209],["▁ενίσχυση",-14.01755428314209],["▁борьбы",-14.01755428314209],["▁будинків",-14.01755428314209],["▁פּלאַץ",-14.01755428314209],["▁جاسکتا",-14.01755428314209],["▁هدايت",-14.01755428314209],["▁फ्रान्स",-14.01755428314209],["▁म्हटले",-14.01755428314209],["▁वैदेशिक",-14.01755428314209],["▁கிரிக்கெட்",-14.01755428314209],["▁ಆನ್ಲೈನ್",-14.01755428314209],["▁සැලසුම්",-14.01755428314209],["▁ຕຸ້ຍ",-14.01755428314209],["꿀",-14.01755428314209],["მართლმადიდებ",-14.017555236816406],["ረጋገጥ",-14.017555236816406],["▁amaterji",-14.017555236816406],["▁erfüllt",-14.017555236816406],["▁raccomanda",-14.017555236816406],["▁жігіт",-14.017555236816406],["▁традиционно",-14.017555236816406],["▁түмний",-14.017555236816406],["▁հրատարակ",-14.017555236816406],["▁آرشیو",-14.017555236816406],["▁কর্মসূচি",-14.017555236816406],["▁ବନ୍ଧୁ",-14.017555236816406],["▁പ്രവേശന",-14.017555236816406],["▁ආරංචි",-14.017555236816406],["▁διαχειρ",-14.017556190490724],["▁допомагає",-14.017556190490724],["▁दिसंबर",-14.017556190490724],["▁కొడుకు",-14.017556190490724],["▁ორგანო",-14.017556190490724],["▁obvykle",-14.01755714416504],["▁изобрет",-14.01755714416504],["▁מחפש",-14.01755714416504],["మూర్తి",-14.017558097839355],["▁erheblich",-14.017558097839355],["▁moitié",-14.017558097839355],["▁лінії",-14.017558097839355],["▁പ്രതികരിക്ക",-14.017558097839355],["▁berättelse",-14.017559051513672],["▁reuşit",-14.017559051513672],["▁watanzania",-14.017559051513672],["▁കഴിവ",-14.017559051513672],["▁Донбасі",-14.017560005187988],["▁prebieha",-14.017560958862305],["အရွယ်",-14.01756191253662],["▁bjuder",-14.01756191253662],["▁бүтэц",-14.01756191253662],["▁липсва",-14.01756191253662],["▁электрондық",-14.01756191253662],["▁הכלכלי",-14.01756191253662],["▁बाइक",-14.01756191253662],["▁хляб",-14.017562866210938],["▁nusikalt",-14.017563819885254],["▁Српској",-14.017563819885254],["ပုံစံ",-14.01756477355957],["▁framework",-14.017565727233888],["▁հույս",-14.017565727233888],["▁విశాఖ",-14.017565727233888],["▁ಸೆಕ್ಸ್",-14.017565727233888],["▁Novembro",-14.017566680908203],["▁двумя",-14.01756763458252],["▁Brooklyn",-14.017569541931152],["美術館",-14.017573356628418],["▁δήλωση",-14.017574310302734],["▁સવાલ",-14.017574310302734],["▁dobrą",-14.017577171325684],["▁společně",-14.017581939697266],["▁ಅಷ್ಟೇ",-14.017581939697266],["▁отличается",-14.017585754394531],["▁оказались",-14.017589569091797],["▁Музей",-14.017593383789062],["توضيح",-14.01760196685791],["▁Қазір",-14.017602920532228],["abast",-14.017610549926758],["ข้อสอบ",-14.017610549926758],["▁yangiliklar",-14.017610549926758],["▁ruột",-14.01761245727539],["▁እንዲሆን",-14.01761245727539],["▁കാത്തിരിക്ക",-14.017613410949709],["▁trôi",-14.017614364624023],["▁참조",-14.01761531829834],["▁menangkap",-14.017616271972656],["▁pamatyti",-14.017616271972656],["▁ආශ්",-14.017620086669922],["▁ΑΕΚ",-14.017621040344238],["▁ಜನರಿಗೆ",-14.017621040344238],["પરા",-14.017621994018556],["▁دائىر",-14.017623901367188],["▁Նշ",-14.017626762390137],["గొట్ట",-14.017635345458984],["▁regnum",-14.017640113830566],["άτης",-14.017643928527832],["▁بهانه",-14.017644882202148],["ినేని",-14.017659187316896],["▁välkommen",-14.017666816711426],["▁жашап",-14.017667770385742],["▁Jānis",-14.017682075500488],["▁ولري",-14.01768398284912],["▁કયા",-14.017684936523438],["▁lurer",-14.017685890197754],["чаралар",-14.01768684387207],["▁Skoči",-14.017688751220703],["▁Rojhilata",-14.017690658569336],["▁appearance",-14.017701148986816],["מצרים",-14.017704010009766],["▁tillväxt",-14.017704963684082],["▁tuoksu",-14.017706871032717],["▁associazioni",-14.01771068572998],["▁замечательн",-14.017711639404297],["▁وروستي",-14.017711639404297],["සේකර",-14.017712593078612],["▁پہلو",-14.017715454101562],["▁ಕ್ರೀಡಾ",-14.017722129821776],["▁інтересів",-14.017739295959473],["▁serviciilor",-14.017744064331056],["▁പഠിച്ച",-14.017746925354004],["▁hoảng",-14.017748832702637],["▁bygninger",-14.017757415771484],["▁اعلي",-14.01779079437256],["▁проповед",-14.01780605316162],["ਾਉਂਦੇ",-14.017809867858888],["▁استخراج",-14.017809867858888],["▁катал",-14.017839431762695],["▁ثانیه",-14.017842292785645],["चंद",-14.017848014831545],["▁keičia",-14.01784896850586],["▁pijač",-14.017855644226074],["▁Parent",-14.017857551574709],["▁funcionalidad",-14.017858505249023],["▁established",-14.017868041992188],["▁lõuna",-14.017870903015137],["kräm",-14.017873764038086],["▁badhe",-14.0178804397583],["▁pendiente",-14.017881393432615],["ීම්",-14.017884254455566],["spice",-14.017891883850098],["▁හමු",-14.017895698547363],["▁цветя",-14.01791763305664],["▁तराई",-14.017929077148438],["shoq",-14.017932891845703],["▁frygt",-14.017950057983398],["▁কবিতা",-14.017953872680664],["▁hypothe",-14.017971992492676],["節能",-14.017972946166992],["▁montras",-14.017973899841309],["▁Damen",-14.017974853515623],["▁जुड़ी",-14.01798152923584],["▁počne",-14.017991065979004],["这不是",-14.017992973327637],["▁caruur",-14.01800537109375],["▁മഞ്ഞ",-14.018006324768066],["▁grāmatas",-14.018009185791016],["▁pianta",-14.01801872253418],["▁precau",-14.018030166625977],["▁Odgovor",-14.018041610717772],["▁evropske",-14.01804542541504],["▁creampie",-14.01811408996582],["▁කොල",-14.018115997314451],["▁rendben",-14.01811981201172],["▁perturba",-14.018122673034668],["售價",-14.018149375915527],["▁Nasir",-14.01815128326416],["▁içeriği",-14.01815700531006],["地形",-14.018163681030272],["▁кожи",-14.018169403076172],["ચે",-14.018184661865234],["▁zwierz",-14.018208503723145],["▁keyifli",-14.018255233764648],["▁келу",-14.01826286315918],["▁ગુજરાતમાં",-14.018268585205078],["▁жуық",-14.018284797668455],["▁Olemme",-14.018298149108888],["ប្ប",-14.018299102783203],["ດົນ",-14.018302917480469],["▁Herzlich",-14.01831340789795],["▁границата",-14.018319129943848],["▁bugetul",-14.018354415893556],["▁ΑΓ",-14.01835823059082],["▁disponibilidad",-14.018376350402832],["▁CASA",-14.01838207244873],["▁Serviciul",-14.01838207244873],["▁بلاده",-14.018399238586426],["▁вынікі",-14.01840114593506],["מדד",-14.018412590026855],["▁измам",-14.01842212677002],["mailadres",-14.018426895141602],["▁mitmeid",-14.018449783325195],["aineet",-14.01845932006836],["▁добити",-14.018477439880373],["▁gittiği",-14.018481254577637],["▁katten",-14.018486976623535],["ลัง",-14.01850700378418],["▁doonaya",-14.018509864807127],["ovog",-14.018511772155762],["▁Destek",-14.01852321624756],["същ",-14.018546104431152],["▁policiais",-14.01859188079834],["সী",-14.018593788146973],["▁शांति",-14.018609046936035],["▁చంప",-14.0186128616333],["ligtas",-14.018617630004885],["▁Hadde",-14.018630027770996],["ล็อก",-14.018638610839844],["jälke",-14.018659591674805],["ನಾದ",-14.018662452697754],["▁बॅ",-14.01866626739502],["▁počuti",-14.018680572509766],["▁billed",-14.018686294555664],["▁integrada",-14.018689155578612],["▁कोही",-14.018701553344728],["סיפור",-14.018702507019045],["▁ulaşım",-14.01871109008789],["▁şagirdlər",-14.01871109008789],["▁preocupación",-14.018725395202637],["▁առաջադր",-14.018735885620115],["▁olayı",-14.018750190734863],["种类",-14.018765449523926],["▁Zahar",-14.01877212524414],["▁arbres",-14.018797874450684],["▁hverken",-14.01887035369873],["▁Регистр",-14.018882751464844],["▁925",-14.01897144317627],["▁الذكر",-14.018994331359863],["▁služb",-14.018998146057127],["▁zonă",-14.019025802612305],["ാമോ",-14.01903247833252],["▁слични",-14.019043922424316],["▁farkı",-14.019071578979492],["▁eigenaar",-14.019075393676758],["▁uvoz",-14.019085884094238],["てくれた",-14.019086837768556],["▁nikomu",-14.019089698791504],["▁அவர்களை",-14.019089698791504],["▁intaas",-14.019109725952148],["ዎት",-14.019120216369627],["▁tumma",-14.01913070678711],["▁ബസ്",-14.01915168762207],["▁radica",-14.01918125152588],["▁дыск",-14.01918601989746],["ਫ਼ਾ",-14.019208908081056],["不断地",-14.019210815429688],["▁dalalka",-14.019221305847168],["▁않으면",-14.019227027893066],["▁doprava",-14.019268989562988],["▁выводу",-14.019269943237305],["▁Publikum",-14.019282341003418],["▁పరుగుల",-14.019285202026367],["دني",-14.019288063049316],["▁xukuumadda",-14.019290924072266],["▁arrivato",-14.01930046081543],["නීය",-14.019322395324709],["▁Shoq",-14.01934051513672],["podziewa",-14.019381523132324],["みましょう",-14.01938533782959],["▁pokojni",-14.01939296722412],["▁Plaid",-14.019408226013184],["▁срца",-14.01942253112793],["▁Milk",-14.01943302154541],["▁finansijsk",-14.019436836242676],["▁узима",-14.019452095031738],["법원",-14.019455909729004],["▁RGB",-14.019463539123535],["▁Fahrer",-14.019475936889648],["▁Radyo",-14.019476890563965],["▁неожиданно",-14.01948070526123],["▁Kett",-14.019481658935549],["estudiant",-14.01949405670166],["▁kuchli",-14.01951026916504],["上学",-14.019515991210938],["ሄዱ",-14.019522666931152],["طعن",-14.019529342651367],["▁angļu",-14.01954746246338],["▁percebe",-14.019549369812012],["▁RTL",-14.01955509185791],["▁પકડ",-14.01956844329834],["▁సభ్యులు",-14.019577026367188],["▁ఎత్తు",-14.01958465576172],["▁kiadó",-14.01960563659668],["▁Sensor",-14.019627571105955],["▁Credo",-14.019699096679688],["▁سوره",-14.01970100402832],["▁pjesma",-14.019701957702637],["rechtlichen",-14.019723892211914],["055",-14.019730567932127],["▁establi",-14.019730567932127],["ဝါး",-14.019756317138672],["naskiĝis",-14.019757270812988],["▁spørre",-14.01975917816162],["schwer",-14.019781112670898],["▁adiciona",-14.019816398620604],["▁patay",-14.019831657409668],["▁cearn",-14.0198392868042],["▁titlul",-14.019857406616213],["கார்",-14.019861221313477],["▁knip",-14.01986312866211],["മുണ്ടായ",-14.019872665405272],["1965",-14.019893646240234],["Мал",-14.019909858703612],["▁Temer",-14.01991367340088],["▁organizuoja",-14.019942283630373],["▁organizace",-14.01994514465332],["UDE",-14.019949913024902],["▁மேல",-14.020002365112305],["▁akcí",-14.020004272460938],["▁Evropsko",-14.02000904083252],["ឿន",-14.020017623901367],["してます",-14.020063400268556],["▁තනතුර",-14.02007007598877],["கொள்",-14.020081520080566],["מחיר",-14.020090103149414],["▁врача",-14.020090103149414],["▁Sabri",-14.020108222961426],["मंत्र",-14.020124435424805],["▁რუსეთი",-14.020161628723145],["▁konečne",-14.020177841186523],["partij",-14.020184516906738],["▁posljednji",-14.020225524902344],["ตระ",-14.020230293273926],["▁منظوری",-14.020265579223633],["▁හිට",-14.02026653289795],["▁అంశాల",-14.020275115966797],["ახალი",-14.020301818847656],["ძინე",-14.020301818847656],["න්නා",-14.020318031311035],["チェ",-14.020331382751465],["viisi",-14.02033519744873],["▁околна",-14.020350456237791],["▁shaqeeya",-14.020357131958008],["কুর",-14.02036190032959],["▁والتر",-14.020407676696776],["形で",-14.020424842834473],["лууд",-14.020435333251951],["▁tayong",-14.020437240600586],["▁изисквания",-14.020452499389648],["यौं",-14.020467758178713],["Chat",-14.020468711853027],["ינסט",-14.02048397064209],["riyo",-14.020500183105469],["▁убит",-14.020508766174316],["ਗਤ",-14.020516395568848],["马克思主义",-14.020519256591797],["▁છોડી",-14.020523071289062],["▁таңдау",-14.02053928375244],["▁neurologi",-14.020548820495604],["كذب",-14.020552635192873],["នយោបាយ",-14.020562171936035],["▁acabado",-14.020594596862791],["▁gaver",-14.020609855651855],["公务员",-14.020624160766602],["比特币",-14.020639419555664],["685",-14.02066135406494],["ۇنداق",-14.020668029785156],["▁nozīmī",-14.020674705505373],["ทุกข์",-14.020678520202637],["พระบาทสมเด็จพระ",-14.020679473876951],["ពុទ្ធ",-14.020679473876951],["▁Alessandro",-14.020679473876951],["▁Ollscoil",-14.020679473876951],["▁République",-14.020679473876951],["▁Técnico",-14.020679473876951],["▁baxımından",-14.020679473876951],["▁hétvégé",-14.020679473876951],["▁lanzamiento",-14.020679473876951],["▁maždaug",-14.020679473876951],["▁natychmiast",-14.020679473876951],["▁recompensa",-14.020679473876951],["▁stroškov",-14.020679473876951],["▁tarnybos",-14.020679473876951],["▁töhfə",-14.020679473876951],["▁zrakoplov",-14.020679473876951],["▁ανακοίνωσε",-14.020679473876951],["▁πίεση",-14.020679473876951],["▁παίρνει",-14.020679473876951],["▁Жамбыл",-14.020679473876951],["▁Тимошенко",-14.020679473876951],["▁автомобіля",-14.020679473876951],["▁байршуулах",-14.020679473876951],["▁никакъв",-14.020679473876951],["▁поведінки",-14.020679473876951],["▁юстиції",-14.020679473876951],["▁الكهربائي",-14.020679473876951],["▁दूसरा",-14.020679473876951],["▁सन्दर्भ",-14.020679473876951],["▁આયોજન",-14.020679473876951],["▁ଶ୍ରେଷ୍ଠ",-14.020679473876951],["▁ସୁବିଧା",-14.020679473876951],["▁ఫీచర్",-14.020679473876951],["▁సలహా",-14.020679473876951],["▁సీనియర్",-14.020679473876951],["▁ಕ್ಯಾಮೆರಾ",-14.020679473876951],["▁좋습니다",-14.020679473876951],["덜",-14.020679473876951],["ঐ",-14.02068042755127],["▁Nicholas",-14.02068042755127],["▁toimunud",-14.02068042755127],["▁СМІ",-14.02068042755127],["▁आश्चर्य",-14.02068042755127],["▁শরীর",-14.02068042755127],["▁그리스도",-14.02068042755127],["⠀",-14.02068042755127],["▁Arkitekt",-14.020681381225586],["▁negdje",-14.020681381225586],["▁paveiksl",-14.020681381225586],["▁यात्रु",-14.020681381225586],["▁hudební",-14.020682334899902],["▁αρχηγ",-14.020682334899902],["▁juridique",-14.02068328857422],["▁ದೇವಾಲಯ",-14.020684242248535],["መስቀል",-14.020685195922852],["▁המבקש",-14.020685195922852],["▁заклік",-14.020686149597168],["▁shkencor",-14.020689010620115],["▁üzemeltet",-14.020689010620115],["▁последица",-14.020689010620115],["▁әзір",-14.020689010620115],["▁цааш",-14.020689964294434],["▁Vetëm",-14.020691871643066],["▁საზოგადოებრივი",-14.020692825317385],["▁ελληνικά",-14.020695686340332],["▁اجاره",-14.020696640014648],["▁comfortabel",-14.020697593688965],["▁menghantar",-14.020697593688965],["▁లెక్క",-14.020697593688965],["▁Viljandi",-14.02069854736328],["▁complejo",-14.02069854736328],["▁زینب",-14.020702362060549],["라운드",-14.020702362060549],["▁знайшли",-14.020706176757812],["▁используют",-14.020711898803713],["▁pričakovan",-14.020713806152344],["▁състои",-14.02071475982666],["▁Китеп",-14.020715713500977],["పడ్డారు",-14.020716667175291],["▁תפריט",-14.020716667175291],["სიამოვნე",-14.020719528198242],["▁Hydref",-14.020722389221191],["▁tevlî",-14.020723342895508],["▁powołan",-14.020729064941406],["▁Zsolt",-14.02073574066162],["▁Florence",-14.020737648010254],["▁זמני",-14.02074146270752],["▁طاہر",-14.02074146270752],["konservativ",-14.020743370056152],["▁augstu",-14.020746231079102],["▁réception",-14.020750045776367],["ലുകള്",-14.020751953125],["▁پنجره",-14.020753860473633],["▁ältere",-14.02075481414795],["▁successivo",-14.020755767822266],["▁käynyt",-14.020758628845217],["▁zarokên",-14.020758628845217],["▁ongeluk",-14.02076244354248],["▁übertragen",-14.020763397216797],["▁संभव",-14.02076816558838],["▁त्यासाठी",-14.020771980285645],["▁сумнів",-14.020776748657228],["▁Väike",-14.020787239074709],["▁boshladi",-14.020790100097656],["▁ovenfor",-14.020790100097656],["▁መኖር",-14.020804405212402],["തിനാ",-14.02080535888672],["▁ysgolion",-14.020806312561035],["▁사이에",-14.020809173583984],["▁alterações",-14.020812034606934],["▁выканаў",-14.020816802978516],["▁teenoor",-14.020817756652832],["ឈាន",-14.02082061767578],["亮相",-14.020827293395996],["▁obviously",-14.020833015441896],["▁εμβ",-14.020833015441896],["חיד",-14.02085018157959],["▁odpowiedź",-14.020855903625488],["gudu",-14.02087116241455],["▁jäänyt",-14.020872116088867],["▁własnego",-14.020873069763184],["▁avantages",-14.020877838134766],["집니다",-14.020882606506348],["▁Einkommen",-14.02088737487793],["▁Everton",-14.020888328552246],["ខាងមុខ",-14.020894050598145],["▁препарати",-14.02089500427246],["▁bạo",-14.020899772644045],["▁Pussy",-14.020904541015623],["▁Orice",-14.02090835571289],["▁жертва",-14.020914077758787],["▁odobren",-14.020926475524902],["▁υλικό",-14.020943641662598],["රුවන්",-14.020944595336914],["▁shalay",-14.020947456359863],["▁Боро",-14.020950317382812],["▁소유",-14.020951271057127],["▁עמד",-14.020960807800291],["~~",-14.02096176147461],["▁Syrien",-14.02096462249756],["▁джерело",-14.020966529846191],["▁χώρους",-14.020968437194824],["▁לחזור",-14.020998001098633],["дё",-14.021005630493164],["▁λαός",-14.021010398864746],["▁ביחס",-14.02103328704834],["▁vjerujem",-14.021045684814451],["ജീവന",-14.021055221557615],["တုိက္",-14.0210599899292],["äiset",-14.021061897277832],["▁Federasiyası",-14.021076202392578],["▁condiţiile",-14.021077156066896],["▁Kurdên",-14.021082878112791],["▁комнате",-14.021089553833008],["▁இருக்கிறார்",-14.021100044250488],["食べた",-14.021103858947754],["MOMO",-14.021106719970703],["veiro",-14.02111530303955],["▁종교",-14.021135330200195],["▁песні",-14.02113914489746],["autorisation",-14.021143913269045],["២២",-14.021150588989258],["▁ברגע",-14.021162033081056],["▁пище",-14.021166801452637],["▁učenje",-14.021184921264648],["▁scoala",-14.021190643310549],["sérült",-14.02120590209961],["▁Σχολή",-14.021207809448242],["▁кошо",-14.021209716796877],["▁sekian",-14.021222114562988],["▁osobnih",-14.021225929260254],["ااااا",-14.021281242370604],["▁العمليات",-14.021286010742188],["▁devait",-14.021296501159668],["▁Mangler",-14.0213041305542],["▁մարզում",-14.021317481994627],["讓她",-14.021344184875488],["▁Specialist",-14.021350860595703],["ούπολη",-14.021355628967283],["▁RADIO",-14.021361351013184],["käigu",-14.021366119384766],["▁Launch",-14.021377563476562],["▁Rusko",-14.021388053894045],["▁kullanıyor",-14.021404266357422],["▁매년",-14.02140998840332],["の流れ",-14.021411895751951],["▁مخالفین",-14.021437644958496],["▁dedicated",-14.021440505981444],["дарынын",-14.02149486541748],["וטו",-14.021496772766112],["▁bedroom",-14.021510124206545],["창원",-14.02151870727539],["হান",-14.021563529968262],["▁Lionel",-14.021615982055664],["方も多い",-14.021617889404297],["▁fallait",-14.02164363861084],["▁jatkuva",-14.021646499633787],["▁Irina",-14.021653175354004],["受け入れ",-14.021655082702637],["ตึก",-14.0216703414917],["▁Tomasz",-14.021685600280762],["▁అనుమతి",-14.021685600280762],["бужда",-14.021686553955078],["▁դադարեց",-14.021711349487305],["▁Michał",-14.02174186706543],["್ವಾ",-14.021753311157228],["▁රැස්",-14.02176284790039],["▁Збор",-14.021769523620604],["▁trigo",-14.02177906036377],["▁kompetisi",-14.0217866897583],["▁krši",-14.021787643432615],["靈感",-14.02178955078125],["lamaq",-14.021810531616213],["▁њим",-14.021814346313477],["migrant",-14.021821975708008],["बस",-14.021821975708008],["läsning",-14.021824836730955],["▁interdit",-14.021860122680664],["▁മണ",-14.021865844726562],["軍事",-14.021868705749512],["日式",-14.021881103515623],["▁manteniment",-14.02188777923584],["ანად",-14.021893501281738],["▁invitati",-14.021926879882812],["▁истим",-14.021990776062012],["avimui",-14.022003173828123],["▁Tē",-14.022005081176758],["▁Ärzte",-14.022011756896973],["هوية",-14.022014617919922],["跨越",-14.022016525268556],["ագրության",-14.022038459777832],["▁ostiral",-14.02207374572754],["▁аймагында",-14.022130012512209],["▁režimu",-14.022162437438965],["6,9",-14.022177696228027],["▁egyszerre",-14.022187232971191],["▁Kakš",-14.022209167480469],["▁bodoh",-14.022209167480469],["▁Gaar",-14.022214889526367],["▁suallar",-14.022221565246582],["▁gratuïta",-14.022239685058594],["▁حاجي",-14.022239685058594],["▁startade",-14.022241592407228],["▁rekel",-14.022246360778809],["▁безопасност",-14.022263526916504],["▁Bruger",-14.022266387939451],["天気",-14.022275924682615],["比如说",-14.022283554077148],["▁vzduch",-14.02228546142578],["برداری",-14.022292137145996],["▁genul",-14.022302627563477],["เนี่ย",-14.022311210632324],["▁дължина",-14.022315979003906],["▁કહેવાય",-14.02231788635254],["▁Treffen",-14.022348403930664],["ਫ਼ਰ",-14.02234935760498],["ਂਗ",-14.022393226623535],["လံု",-14.0224027633667],["▁Baptist",-14.0224027633667],["▁ഏത്",-14.02241039276123],["Само",-14.022431373596191],["natura",-14.022432327270508],["▁обмеження",-14.022451400756836],["EBC",-14.022459030151367],["boende",-14.022513389587402],["▁городах",-14.022534370422363],["▁Γενική",-14.022550582885742],["ΧΑ",-14.02255916595459],["▁plynu",-14.022573471069336],["基础上",-14.022587776184082],["▁Црној",-14.02259922027588],["ໂຊ",-14.022602081298828],["نسا",-14.022631645202637],["ハード",-14.02265167236328],["ङ्गा",-14.022704124450684],["sayang",-14.022712707519531],["۰۲",-14.02271556854248],["▁stacion",-14.02274227142334],["agairt",-14.022745132446287],["ambassade",-14.022757530212402],["መዝ",-14.022764205932615],["▁Kocha",-14.022790908813477],["nošenje",-14.02280044555664],["ענען",-14.022831916809082],["halagang",-14.022851943969728],["▁istəyi",-14.022857666015623],["▁прегледа",-14.022860527038574],["疑问",-14.022894859313965],["אָנ",-14.022915840148926],["ષ્ઠ",-14.02293872833252],["ektől",-14.022951126098633],["▁allà",-14.022954940795898],["РЕК",-14.02296543121338],["▁meklēt",-14.02297306060791],["्दो",-14.023012161254885],["▁sākt",-14.023015022277832],["市区",-14.023059844970703],["duizend",-14.02306079864502],["ീവ",-14.02308464050293],["▁બનાવો",-14.02309513092041],["aseman",-14.023096084594728],["▁رسيد",-14.02310562133789],["▁magister",-14.02312469482422],["émission",-14.023130416870115],["▁Володимира",-14.02315616607666],["敌人",-14.023164749145508],["▁kraujo",-14.023168563842772],["▁elastan",-14.023191452026367],["▁filium",-14.023193359375],["▁netgi",-14.023193359375],["▁Номер",-14.023198127746582],["பட்டு",-14.02321434020996],["▁ovoz",-14.023219108581545],["▁Warren",-14.023240089416504],["ข้า",-14.023253440856934],["izimit",-14.02325439453125],["न्ती",-14.023274421691896],["ক্রম",-14.023298263549805],["▁singola",-14.02331256866455],["▁problemă",-14.023323059082031],["wohl",-14.023334503173828],["▁inscrição",-14.023343086242676],["ટેલ",-14.023355484008787],["▁ලබාදෙන",-14.023357391357422],["▁ryšk",-14.023359298706056],["プレー",-14.023364067077637],["提及",-14.02336597442627],["ေဖာ္ျပ",-14.023370742797852],["▁انگريز",-14.0233736038208],["▁сурах",-14.02338695526123],["ഫു",-14.023393630981444],["pedagógus",-14.023394584655762],["▁kartų",-14.023399353027344],["υθυν",-14.023405075073242],["യെങ്കിലും",-14.023414611816406],["구역",-14.023452758789062],["леныя",-14.023467063903809],["▁فرشت",-14.023476600646973],["▁sälj",-14.023487091064451],["▁constitution",-14.023494720458984],["▁полов",-14.023508071899414],["বিডি",-14.023554801940918],["▁ఆల్",-14.023564338684082],["▁તાજ",-14.02361297607422],["гіп",-14.0236177444458],["жер",-14.023624420166016],["▁Портал",-14.02363109588623],["▁സംവിധാന",-14.023681640625],["▁руског",-14.023685455322266],["ಬಲ್ಲ",-14.023691177368164],["नारायण",-14.02369213104248],["ผ่อน",-14.023693084716797],["うちの",-14.02369785308838],["▁الدولار",-14.02371311187744],["▁Vantaa",-14.023725509643556],["▁उपस्थिति",-14.023728370666504],["sinlər",-14.023730278015137],["▁soumis",-14.023730278015137],["▁rider",-14.023741722106934],["ómico",-14.023743629455566],["虐",-14.023743629455566],["▁33%",-14.023754119873049],["混乱",-14.02377223968506],["方が良い",-14.023776054382324],["批評",-14.023792266845703],["λπ",-14.023794174194336],["犹豫",-14.023794174194336],["▁1867",-14.023796081542969],["氣候",-14.023804664611816],["▁живут",-14.023807525634766],["menjak",-14.023811340332031],["ช้อปปิ้ง",-14.023811340332031],["▁ភេទ",-14.023812294006348],["ຊຸກຍູ້",-14.023813247680664],["ပေါ့",-14.023813247680664],["សាស្រ្ត",-14.023813247680664],["apprentissage",-14.02381420135498],["ɛ",-14.02381420135498],["▁Bealtaine",-14.02381420135498],["▁Chevrolet",-14.02381420135498],["▁Privātum",-14.02381420135498],["▁Swêdê",-14.02381420135498],["▁Thảo",-14.02381420135498],["▁Təhlükəsizlik",-14.02381420135498],["▁absolument",-14.02381420135498],["▁announced",-14.02381420135498],["▁gyventojai",-14.02381420135498],["▁irgendwann",-14.02381420135498],["▁jurídico",-14.02381420135498],["▁kejahatan",-14.02381420135498],["▁mengumpulkan",-14.02381420135498],["▁pamamaraan",-14.02381420135498],["▁pemandangan",-14.02381420135498],["▁pengeluaran",-14.02381420135498],["▁przypadkach",-14.02381420135498],["▁tecnológico",-14.02381420135498],["▁telèfon",-14.02381420135498],["▁verbessert",-14.02381420135498],["▁vereinbart",-14.02381420135498],["▁viacerých",-14.02381420135498],["▁łatwiej",-14.02381420135498],["▁κράτους",-14.02381420135498],["▁οικονομικά",-14.02381420135498],["▁адбудзецца",-14.02381420135498],["▁договір",-14.02381420135498],["▁поддржува",-14.02381420135498],["▁сістэмы",-14.02381420135498],["▁أنحاء",-14.02381420135498],["▁الأراضي",-14.02381420135498],["▁گھنٹے",-14.02381420135498],["▁ਮੌਜੂਦ",-14.02381420135498],["▁ਵਜੋਂ",-14.02381420135498],["▁ਸੰਪਰਕ",-14.02381420135498],["▁କମ୍ପାନୀ",-14.02381420135498],["▁ଦ୍ୱିତୀୟ",-14.02381420135498],["▁గుండె",-14.02381420135498],["▁ತನಿಖೆ",-14.02381420135498],["▁පෙරේරා",-14.02381420135498],["끈",-14.02381420135498],["▁Mısır",-14.023815155029297],["▁neprijatelj",-14.023815155029297],["▁peňazí",-14.023815155029297],["▁почувствовал",-14.023815155029297],["▁نتوانست",-14.023815155029297],["▁چیلنج",-14.023815155029297],["▁મોબાઇલ",-14.023815155029297],["▁ପଦକ୍ଷେପ",-14.023815155029297],["▁ଯୁବତୀ",-14.023815155029297],["▁සාධාරණ",-14.023815155029297],["▁Antarabangsa",-14.023816108703612],["▁उपाधि",-14.023816108703612],["▁వెల్లడించ",-14.023816108703612],["ថ្នាំ",-14.023818016052246],["▁Konferans",-14.023818016052246],["▁učestvova",-14.023818016052246],["▁शिंदे",-14.023818016052246],["▁Everything",-14.023818969726562],["▁mitjà",-14.023818969726562],["▁இறை",-14.02381992340088],["▁nálunk",-14.023820877075195],["ศิลป์",-14.023821830749512],["▁শব্দ",-14.023821830749512],["▁täpse",-14.023822784423828],["▁ታዲያ",-14.023825645446776],["▁حمايت",-14.023829460144045],["มป์",-14.02383041381836],["▁opplysninger",-14.02383041381836],["▁բանակցություն",-14.023831367492676],["▁ragazzo",-14.023832321166992],["▁표준",-14.023832321166992],["▁lalàna",-14.02383518218994],["▁IKKE",-14.023836135864258],["幅度",-14.02383804321289],["▁چڑھ",-14.023838996887209],["▁biarkan",-14.023839950561523],["정상회담",-14.023839950561523],["▁chuột",-14.02384090423584],["▁görünüyor",-14.023841857910156],["▁Μέσα",-14.023841857910156],["▁మిత్ర",-14.023844718933104],["▁ręce",-14.023845672607422],["▁Landkreis",-14.023847579956056],["▁राखेको",-14.023847579956056],["R",-14.023849487304688],["▁kullanılır",-14.02385139465332],["язкове",-14.023852348327637],["▁Niveau",-14.023852348327637],["▁Pārdod",-14.02385425567627],["▁Hægt",-14.023860931396484],["▁minőségű",-14.023860931396484],["▁Karaoke",-14.023862838745115],["▁చిత్రాన్ని",-14.02386474609375],["▁difesa",-14.0238676071167],["▁Stewart",-14.023870468139648],["▁plaukų",-14.023882865905762],["▁sarcini",-14.02389144897461],["跑步",-14.02389430999756],["▁ζητά",-14.023895263671877],["▁družine",-14.023896217346191],["▁доўга",-14.023896217346191],["▁ফেসবুকে",-14.023896217346191],["▁kategory",-14.023897171020508],["▁занимљив",-14.023900032043455],["▁допомога",-14.023900985717772],["夜晚",-14.02390480041504],["▁اشغال",-14.023905754089355],["▁אבא",-14.023908615112305],["▁klijent",-14.02390956878662],["▁suosikki",-14.02391242980957],["▁Reglament",-14.023921966552734],["▁다양",-14.023933410644531],["▁iekārtas",-14.02393627166748],["şünas",-14.023941040039062],["ինա",-14.02394676208496],["▁Güncel",-14.02395248413086],["▁daardoor",-14.02395725250244],["▁lehetnek",-14.02396011352539],["▁áratug",-14.023967742919922],["แพร่",-14.023968696594238],["ታችሁ",-14.023969650268556],["▁kompreni",-14.023971557617188],["▁konkurrent",-14.023983001708984],["▁가능하다",-14.023984909057615],["▁raspored",-14.023985862731934],["بلی",-14.02399444580078],["只不过",-14.02399730682373],["▁Doğan",-14.02401065826416],["▁तरिका",-14.02401065826416],["▁sonuncu",-14.024033546447754],["民主党",-14.02403450012207],["▁kiusa",-14.02403736114502],["▁atsauc",-14.024039268493652],["▁مشترکہ",-14.024039268493652],["सहस्र",-14.024043083190918],["▁preservar",-14.024048805236816],["▁услуге",-14.024048805236816],["▁Lassen",-14.024055480957031],["дић",-14.024059295654297],["умду",-14.024060249328612],["▁بشری",-14.02407455444336],["כּ",-14.024075508117676],["▁okoliša",-14.02409839630127],["▁terlepas",-14.02409839630127],["▁corresponent",-14.024102210998535],["▁සරල",-14.024114608764648],["▁החשוב",-14.02412223815918],["▁euskararen",-14.024128913879396],["▁تنش",-14.02416706085205],["▁Истори",-14.024170875549316],["▁haberá",-14.02417278289795],["▁بازیگران",-14.024191856384276],["▁surpresa",-14.024202346801758],["▁Elämä",-14.024210929870604],["▁убијен",-14.024222373962402],["μένης",-14.024240493774414],["vrez",-14.02424430847168],["▁signál",-14.024261474609377],["▁veranderen",-14.024274826049805],["▁استخوان",-14.02427577972412],["brať",-14.024297714233398],["қара",-14.024311065673828],["▁pescado",-14.024320602416992],["▁populär",-14.024328231811523],["ユニ",-14.024356842041016],["▁सतत",-14.024373054504396],["▁චරිතය",-14.024392127990724],["လိုအပ္",-14.02441120147705],["▁ярих",-14.024433135986328],["ïda",-14.024446487426758],["ULLA",-14.024471282958984],["▁नेतृत्वको",-14.02448844909668],["ჭრა",-14.024502754211426],["▁Vstup",-14.024503707885742],["▁الشعبية",-14.02451229095459],["▁vedľa",-14.024524688720703],["▁коментарів",-14.024526596069336],["642",-14.024548530578612],["possibili",-14.024580001831056],["▁Saada",-14.024590492248535],["스케",-14.024608612060549],["▁элге",-14.024613380432127],["페이스",-14.024613380432127],["حزن",-14.024666786193848],["▁логор",-14.024686813354492],["ေလ့",-14.02469253540039],["▁ඉන්නෙ",-14.024693489074709],["▁నేర్చ",-14.02469539642334],["▁koşulları",-14.024702072143556],["▁najčešće",-14.024718284606934],["▁gospodarstvo",-14.02472972869873],["▁призыв",-14.024738311767578],["▁kusaidia",-14.024744987487791],["▁කොන්",-14.024758338928224],["▁içki",-14.024765968322754],["éshez",-14.024810791015623],["šený",-14.024825096130373],["ତଃ",-14.024832725524902],["▁vielfältig",-14.024834632873535],["▁Година",-14.024842262268066],["▁լսել",-14.0248441696167],["▁halsen",-14.024858474731444],["▁leží",-14.024858474731444],["▁канала",-14.024894714355469],["▁выхода",-14.02490520477295],["ექსი",-14.024922370910645],["ছু",-14.024927139282228],["为期",-14.024932861328123],["▁naravi",-14.02493381500244],["נוב",-14.024953842163086],["▁वर्णन",-14.025006294250488],["▁Лен",-14.025029182434082],["പ്പും",-14.025032997131348],["ತ್ತೆ",-14.025035858154297],["▁Втори",-14.025057792663574],["▁picked",-14.025065422058104],["내기",-14.025065422058104],["▁dráh",-14.025081634521484],["▁Tong",-14.02509307861328],["▁Yog",-14.025108337402344],["ვებული",-14.02511215209961],["▁Asad",-14.02511978149414],["▁küsimuste",-14.02513599395752],["▁aðrir",-14.02515983581543],["קדמת",-14.025166511535645],["▁mentira",-14.025166511535645],["▁yemək",-14.025176048278809],["▁farmer",-14.025191307067873],["▁Iroda",-14.0252046585083],["1940",-14.025208473205566],["▁nevz",-14.02521514892578],["▁mintaqa",-14.02522087097168],["பது",-14.02523422241211],["▁ПОС",-14.025254249572754],["▁ranger",-14.02525520324707],["▁blend",-14.025283813476562],["effect",-14.025287628173828],["ೀಶ್",-14.025288581848145],["▁பாவ",-14.025334358215332],["▁duplica",-14.02534008026123],["бағ",-14.025347709655762],["▁թուրք",-14.02537727355957],["▁поголеми",-14.025391578674316],["ძლო",-14.025394439697266],["▁vereador",-14.025397300720217],["Бала",-14.025403022766112],["▁содржи",-14.025415420532228],["▁Ciò",-14.025418281555176],["ແຄ",-14.025428771972656],["▁معین",-14.025431632995604],["vernd",-14.02544116973877],["▁формирање",-14.025500297546388],["សម",-14.025506973266602],["▁maža",-14.025540351867676],["aramdaman",-14.025542259216309],["حاب",-14.025555610656738],["▁جوابدار",-14.025559425354004],["เวล",-14.02558708190918],["्नुपर्छ",-14.025623321533203],["▁његовом",-14.025679588317873],["aalaga",-14.025683403015137],["िस्तान",-14.025691986083984],["▁gäst",-14.025697708129885],["▁ئور",-14.025710105895996],["▁ഇവിട",-14.025741577148438],["дигне",-14.025758743286133],["▁γάμο",-14.025764465332031],["izovaný",-14.025766372680664],["▁шесть",-14.025775909423828],["▁addit",-14.02578067779541],["▁kuusi",-14.02578830718994],["▁sôi",-14.025789260864258],["▁hadith",-14.025815963745115],["貫",-14.025819778442385],["рошен",-14.025826454162598],["▁Responsabili",-14.025839805603027],["ränn",-14.025853157043455],["▁vastutus",-14.02585792541504],["▁Tiek",-14.02588939666748],["544",-14.02592945098877],["▁konularda",-14.025934219360352],["▁gemiddelde",-14.025943756103516],["▁ورکړی",-14.025952339172363],["▁അവളെ",-14.025968551635742],["JEL",-14.026006698608398],["▁예수",-14.02602195739746],["▁Dhar",-14.0260648727417],["પતિ",-14.026100158691406],["▁पिक",-14.026143074035645],["▁Бър",-14.02615451812744],["▁romanzo",-14.026187896728516],["什么是",-14.026206016540527],["431",-14.026212692260742],["▁гром",-14.026220321655272],["ทําเล",-14.02622127532959],["兩位",-14.026244163513184],["▁anfon",-14.026252746582031],["▁személyek",-14.02627658843994],["იკის",-14.026284217834473],["feier",-14.026288986206056],["организацион",-14.026321411132812],["ଫୋନ",-14.02633285522461],["سمى",-14.026351928710938],["▁western",-14.026352882385254],["اجتماع",-14.02635669708252],["Flor",-14.026366233825684],["▁okuloj",-14.026368141174316],["▁βοηθήσει",-14.026386260986328],["TAY",-14.026432037353516],["▁armasta",-14.026446342468262],["都不会",-14.026447296142578],["▁MAIS",-14.02647304534912],["Телевиз",-14.02647876739502],["ීමෙන්",-14.026494979858398],["встановлен",-14.02650260925293],["を知って",-14.026507377624512],["▁ມານີ",-14.026508331298828],["▁istədiyi",-14.02651023864746],["ቀደ",-14.026531219482422],["▁зогс",-14.026533126831056],["▁చెల్ల",-14.026537895202637],["ผิวขาว",-14.026546478271484],["取り入れ",-14.026546478271484],["四個",-14.026562690734863],["▁reggeli",-14.026602745056152],["άρη",-14.026616096496582],["ъф",-14.026617050170898],["▁korrik",-14.026631355285645],["▁Аман",-14.026634216308594],["▁започват",-14.02663803100586],["▁પ્રમાણ",-14.026639938354492],["▁Having",-14.026650428771973],["▁axudar",-14.026653289794922],["خسر",-14.026670455932615],["▁مکانی",-14.026700019836426],["▁parameter",-14.026713371276855],["▁Praia",-14.026726722717283],["nasib",-14.026728630065918],["ဇြန္",-14.026798248291016],["▁cyst",-14.026803016662598],["គិត",-14.026814460754396],["ಛ",-14.02681827545166],["أستاذ",-14.02682876586914],["abilmesi",-14.026833534240724],["▁myslim",-14.02684211730957],["▁DAIŞ",-14.026848793029783],["▁көрген",-14.026880264282228],["▁پاپ",-14.026887893676758],["▁Unfall",-14.026898384094238],["ნავ",-14.0269136428833],["香蕉",-14.026935577392578],["волод",-14.026938438415527],["▁잘못",-14.026947021484377],["闘",-14.026948928833008],["fjöl",-14.026952743530272],["介入",-14.026955604553224],["ອະນຸຍາດ",-14.02695655822754],["กังวล",-14.026957511901855],["บาดเจ็บ",-14.026957511901855],["ແລກປ່ຽນ",-14.026957511901855],["บวม",-14.026958465576172],["ເຜົ່າ",-14.026958465576172],["épreuve",-14.026959419250488],["▁Artículo",-14.026959419250488],["▁Craciun",-14.026959419250488],["▁LnwShop",-14.026959419250488],["▁academia",-14.026959419250488],["▁fhèin",-14.026959419250488],["▁gərgin",-14.026959419250488],["▁mismunandi",-14.026959419250488],["▁ongelmia",-14.026959419250488],["▁pemesanan",-14.026959419250488],["▁poświęcon",-14.026959419250488],["▁specjalnie",-14.026959419250488],["▁təltif",-14.026959419250488],["▁Μεγάλη",-14.026959419250488],["▁εξελίξεις",-14.026959419250488],["▁Прогноз",-14.026959419250488],["▁СРСР",-14.026959419250488],["▁жыйынтыгы",-14.026959419250488],["▁многія",-14.026959419250488],["▁соответствует",-14.026959419250488],["▁удобства",-14.026959419250488],["▁הרפואי",-14.026959419250488],["▁התעופה",-14.026959419250488],["▁المقاومة",-14.026959419250488],["▁तालिम",-14.026959419250488],["▁সূত্র",-14.026959419250488],["▁স্বাধীনতা",-14.026959419250488],["▁વિષે",-14.026959419250488],["▁சிந்தனை",-14.026959419250488],["اكتشاف",-14.026960372924805],["لەندۈر",-14.026960372924805],["▁Nghiên",-14.026960372924805],["▁Wireless",-14.026960372924805],["▁Xesús",-14.026960372924805],["▁dərhal",-14.026960372924805],["▁khilaaf",-14.026960372924805],["▁προστατεύ",-14.026960372924805],["▁Апостол",-14.026960372924805],["▁набагато",-14.026960372924805],["▁تدریس",-14.026960372924805],["▁عديدة",-14.026960372924805],["▁pergerakan",-14.02696132659912],["▁şikayet",-14.02696132659912],["▁இயக்குனர்",-14.02696132659912],["▁встретил",-14.026962280273438],["▁साबित",-14.026962280273438],["▁MAGYAR",-14.026963233947754],["▁לנסות",-14.026963233947754],["▁Джей",-14.02696418762207],["▁отглежда",-14.02696418762207],["บทบาท",-14.026965141296388],["▁ಕೈಗೊಳ್ಳ",-14.02696704864502],["有很大的",-14.026968955993652],["▁ringrazia",-14.026969909667969],["▁Shuningdek",-14.026970863342283],["▁האחרונים",-14.026972770690918],["慾",-14.02697467803955],["▁publicznych",-14.0269775390625],["▁Örebro",-14.0269775390625],["▁독립",-14.026978492736816],["▁denúncia",-14.026983261108398],["▁strategji",-14.026983261108398],["▁учасники",-14.026983261108398],["▁požadovan",-14.026985168457031],["▁הצעיר",-14.026985168457031],["भूमि",-14.026987075805664],["▁някога",-14.026987075805664],["▁excesiv",-14.02698802947998],["▁Монголчууд",-14.02698802947998],["जन्य",-14.026991844177246],["▁Passion",-14.026991844177246],["gefordert",-14.02699375152588],["▁Upload",-14.02699375152588],["▁inkludera",-14.026994705200195],["▁ursprung",-14.026995658874512],["▁പിതാവ",-14.026999473571776],["▁ეხლა",-14.026999473571776],["völgy",-14.027003288269045],["OJI",-14.027005195617676],["^_^",-14.027005195617676],["▁edərkən",-14.02700901031494],["▁Einführung",-14.027024269104004],["▁அறிமுகம்",-14.027027130126951],["▁πηγή",-14.02702808380127],["▁Биздин",-14.027034759521484],["▁везде",-14.0270357131958],["▁mánaða",-14.027043342590332],["▁թե՞",-14.027043342590332],["▁desenvolupar",-14.027044296264648],["ไม่กี่",-14.027045249938965],["▁بریدونو",-14.027045249938965],["▁சென்னையில்",-14.027048110961914],["ኾ",-14.027050971984863],["▁međutim",-14.027050971984863],["ഭൂമി",-14.027054786682127],["隆重",-14.027054786682127],["▁موٹر",-14.027058601379396],["▁Kraków",-14.027059555053713],["▁Kvinnor",-14.027064323425291],["▁употреби",-14.027066230773926],["▁disorder",-14.027067184448242],["▁používaním",-14.027067184448242],["וואַר",-14.027069091796877],["▁መቼ",-14.02707862854004],["▁सिट",-14.027080535888672],["▁FEB",-14.027085304260254],["▁gwbl",-14.02708911895752],["▁হাই",-14.027091026306152],["▁Vertrieb",-14.027093887329102],["▁mobitel",-14.027093887329102],["▁fitnah",-14.027097702026367],["வரு",-14.027104377746582],["▁Xildhibaanada",-14.027111053466797],["▁pisz",-14.027114868164062],["▁توانائی",-14.027124404907228],["▁ٿيندڙ",-14.027124404907228],["▁разработка",-14.027138710021973],["▁umýva",-14.02715015411377],["konferanse",-14.027151107788086],["▁راستے",-14.027161598205566],["▁wróci",-14.027178764343262],["▁આશા",-14.027178764343262],["imagina",-14.027179718017578],["ተለያየ",-14.02718448638916],["រើស",-14.027185440063477],["ాలనే",-14.027191162109377],["▁بلخ",-14.02720832824707],["았습니다",-14.027222633361816],["▁Murray",-14.02722454071045],["▁samboer",-14.02722454071045],["Уул",-14.027230262756348],["▁rääkida",-14.027236938476562],["▁backlink",-14.02724266052246],["▁чоловіків",-14.027249336242676],["▁aniversari",-14.027251243591309],["▁облека",-14.027252197265623],["▁guraso",-14.02727222442627],["▁مجددا",-14.02727222442627],["▁ciele",-14.027280807495115],["484",-14.027298927307127],["▁dauguma",-14.027303695678713],["▁Лег",-14.027303695678713],["▁говорила",-14.027303695678713],["をご紹介します",-14.02731227874756],["▁Teach",-14.027318954467772],["と思われる",-14.027325630187988],["▁бібліотеки",-14.027345657348633],["▁שמחה",-14.027361869812012],["▁ukuya",-14.027387619018556],["laşım",-14.027393341064451],["wetenskap",-14.027399063110352],["زاب",-14.027408599853516],["▁מכתב",-14.027417182922363],["אמונה",-14.027446746826172],["▁crew",-14.02744960784912],["層次",-14.027457237243652],["مرض",-14.027458190917969],["▁ফুল",-14.02746868133545],["▁Азийн",-14.027477264404297],["▁Niemand",-14.027495384216309],["▁сферах",-14.027505874633787],["▁ಪುಟ್ಟ",-14.027515411376951],["▁پلس",-14.027521133422852],["精彩的",-14.027527809143066],["हिन",-14.027532577514648],["engatik",-14.02753448486328],["▁bloot",-14.027544975280762],["▁thâ",-14.027546882629396],["ेकर",-14.027552604675291],["▁Semaki",-14.027563095092772],["позиция",-14.027585983276367],["▁inregistrat",-14.02761459350586],["ਡੋ",-14.027645111083984],["▁емоцій",-14.02765941619873],["פּו",-14.027660369873049],["▁মারা",-14.027673721313477],["▁Sdn",-14.027689933776855],["றது",-14.027695655822754],["palkinto",-14.02770709991455],["daiwa",-14.027710914611816],["▁mərkəzində",-14.02772045135498],["▁hafla",-14.027725219726562],["паш",-14.027729988098145],["▁פורנו",-14.02773094177246],["579",-14.027746200561523],["▁dərəcədə",-14.027750968933104],["▁చర్మ",-14.027751922607422],["এসসি",-14.027752876281738],["ക്കുറി",-14.0277681350708],["▁indicator",-14.027782440185549],["tunnus",-14.027793884277344],["ೊಳಗ",-14.027804374694824],["▁ελέγχου",-14.02784538269043],["▁realizó",-14.027862548828123],["taotlus",-14.02786636352539],["▁anuas",-14.027909278869627],["▁любил",-14.027929306030272],["ในโลก",-14.027949333190918],["▁බාල",-14.0279541015625],["簡単な",-14.027960777282717],["ಗತ",-14.027972221374512],["ចាប់ពី",-14.027978897094728],["▁účtu",-14.027981758117676],["▁derecha",-14.027989387512209],["สําหรับเด็ก",-14.028000831604004],["▁peeling",-14.02802276611328],["АСЫ",-14.028023719787598],["新技术",-14.028032302856444],["▁štít",-14.028037071228027],["▁inkluderer",-14.028040885925291],["▁adversari",-14.028045654296877],["▁armastus",-14.02805519104004],["▁MILF",-14.02805995941162],["▁Spis",-14.028077125549316],["▁Hidro",-14.02808952331543],["concert",-14.028094291687012],["▁Phố",-14.028094291687012],["▁portugal",-14.02810764312744],["▁hilsen",-14.028111457824709],["▁лидера",-14.028115272521973],["▁naglala",-14.028118133544922],["▁بنفس",-14.028128623962402],["uotė",-14.028143882751465],["▁kutaka",-14.02814483642578],["▁gemeinsamen",-14.028152465820312],["▁coordinador",-14.02816677093506],["ėję",-14.028170585632324],["看过",-14.02818202972412],["реакци",-14.02818489074707],["यह",-14.028189659118652],["▁групы",-14.0281982421875],["▁értelme",-14.028216361999512],["▁pārvaldes",-14.028263092041016],["▁itsekin",-14.028264999389648],["▁Михайл",-14.028284072875977],["platform",-14.028303146362305],["▁hacemos",-14.028305053710938],["რიკ",-14.028332710266112],["ჰყავ",-14.02834415435791],["▁লাভ",-14.028350830078123],["▁канца",-14.028359413146973],["▁converter",-14.028372764587402],["వాళ్ళ",-14.028377532958984],["好處",-14.028380393981934],["▁உடன்",-14.02840805053711],["samtök",-14.028409957885742],[">",0.0]]}} \ No newline at end of file diff --git a/model/sentence-transformer/tokenizer_config.json b/model/sentence-transformer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..0da3507018a1a1c625ff93179ff60bdb9202cc6c --- /dev/null +++ b/model/sentence-transformer/tokenizer_config.json @@ -0,0 +1 @@ +{"do_lower_case": true, "unk_token": "", "sep_token": "", "pad_token": "", "cls_token": "", "mask_token": {"content": "", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "tokenize_chinese_chars": true, "strip_accents": null, "bos_token": "", "eos_token": "", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "old_models/paraphrase-multilingual-MiniLM-L12-v2/0_Transformer"} \ No newline at end of file diff --git a/model/sentence-transformer/unigram.json b/model/sentence-transformer/unigram.json new file mode 100644 index 0000000000000000000000000000000000000000..81c58e044763c1fa21b08726c8ddebf35cc9be4a --- /dev/null +++ b/model/sentence-transformer/unigram.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71b44701d7efd054205115acfa6ef126c5d2f84bd3affe0c59e48163674d19a6 +size 14763234 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..dea73d4775888e019d5df3f763242178a4061660 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +einops==0.7.0 +protobuf==5.26.1 +llama-index==0.11.20 +llama-index-llms-replicate==0.3.0 +llama-index-llms-openai-like==0.2.0 +llama-index-embeddings-huggingface==0.3.1 +llama-index-embeddings-instructor==0.2.1 +torch==2.5.0 +torchvision==0.20.0 +torchaudio==2.5.0 +streamlit==1.39.0 \ No newline at end of file diff --git a/space.yaml b/space.yaml new file mode 100644 index 0000000000000000000000000000000000000000..648697556b73920f235422fa0a1af65a9ef9ac3a --- /dev/null +++ b/space.yaml @@ -0,0 +1,4 @@ +sdk: streamlit +hardware: "cpu-basic" # 选择 GPU 类型 +python_version: 3.10 +timeout: 1000 \ No newline at end of file